提交 5084dccf 编写于 作者: C coleenp

8003635: NPG: AsynchGetCallTrace broken by Method* virtual call

Summary: Make metaspace::contains be lock free and used to see if something is in metaspace, also compare Method* with vtbl pointer.
Reviewed-by: dholmes, sspitsyn, dcubed, jmasa
上级 91e7a3d5
......@@ -648,7 +648,7 @@ bool frame::is_interpreted_frame_valid(JavaThread* thread) const {
Method* m = *interpreter_frame_method_addr();
// validate the method we'd find in this potential sender
if (!Universe::heap()->is_valid_method(m)) return false;
if (!m->is_valid_method()) return false;
// stack frames shouldn't be much larger than max_stack elements
......
......@@ -534,7 +534,7 @@ bool frame::is_interpreted_frame_valid(JavaThread* thread) const {
Method* m = *interpreter_frame_method_addr();
// validate the method we'd find in this potential sender
if (!Universe::heap()->is_valid_method(m)) return false;
if (!m->is_valid_method()) return false;
// stack frames shouldn't be much larger than max_stack elements
......
......@@ -289,11 +289,6 @@ class CollectedHeap : public CHeapObj<mtInternal> {
// (A scavenge is a GC which is not a full GC.)
virtual bool is_scavengable(const void *p) = 0;
// Returns "TRUE" if "p" is a method oop in the
// current heap, with high probability. This predicate
// is not stable, in general.
bool is_valid_method(Method* p) const;
void set_gc_cause(GCCause::Cause v) {
if (UsePerfData) {
_gc_lastcause = _gc_cause;
......
......@@ -242,36 +242,6 @@ oop CollectedHeap::array_allocate_nozero(KlassHandle klass,
return (oop)obj;
}
// Returns "TRUE" if "p" is a method oop in the
// current heap with high probability. NOTE: The main
// current consumers of this interface are Forte::
// and ThreadProfiler::. In these cases, the
// interpreter frame from which "p" came, may be
// under construction when sampled asynchronously, so
// the clients want to check that it represents a
// valid method before using it. Nonetheless since
// the clients do not typically lock out GC, the
// predicate is_valid_method() is not stable, so
// it is possible that by the time "p" is used, it
// is no longer valid.
inline bool CollectedHeap::is_valid_method(Method* p) const {
return
p != NULL &&
// Check whether "method" is metadata
p->is_metadata() &&
// See if GC is active; however, there is still an
// apparently unavoidable window after this call
// and before the client of this interface uses "p".
// If the client chooses not to lock out GC, then
// it's a risk the client must accept.
!is_gc_active() &&
// Check that p is a Method*.
p->is_method();
}
inline void CollectedHeap::oop_iterate_no_header(OopClosure* cl) {
NoHeaderExtendedOopClosure no_header_cl(cl);
oop_iterate(&no_header_cl);
......
......@@ -66,10 +66,17 @@ bool MetaspaceObj::is_shared() const {
}
bool MetaspaceObj::is_metadata() const {
// ClassLoaderDataGraph::contains((address)this); has lock inversion problems
// GC Verify checks use this in guarantees.
// TODO: either replace them with is_metaspace_object() or remove them.
// is_metaspace_object() is slower than this test. This test doesn't
// seem very useful for metaspace objects anymore though.
return !Universe::heap()->is_in_reserved(this);
}
bool MetaspaceObj::is_metaspace_object() const {
return Metaspace::contains((void*)this);
}
void MetaspaceObj::print_address_on(outputStream* st) const {
st->print(" {"INTPTR_FORMAT"}", this);
}
......
......@@ -245,6 +245,7 @@ class ClassLoaderData;
class MetaspaceObj {
public:
bool is_metadata() const;
bool is_metaspace_object() const; // more specific test but slower
bool is_shared() const;
void print_address_on(outputStream* st) const; // nonvirtual address printing
......
......@@ -36,6 +36,7 @@
#include "memory/universe.hpp"
#include "runtime/globals.hpp"
#include "runtime/mutex.hpp"
#include "runtime/orderAccess.hpp"
#include "services/memTracker.hpp"
#include "utilities/copy.hpp"
#include "utilities/debug.hpp"
......@@ -1007,6 +1008,8 @@ bool VirtualSpaceList::grow_vs(size_t vs_word_size) {
delete new_entry;
return false;
} else {
// ensure lock-free iteration sees fully initialized node
OrderAccess::storestore();
link_vs(new_entry, vs_word_size);
return true;
}
......@@ -1096,7 +1099,6 @@ void VirtualSpaceList::print_on(outputStream* st) const {
}
}
#ifndef PRODUCT
bool VirtualSpaceList::contains(const void *ptr) {
VirtualSpaceNode* list = virtual_space_list();
VirtualSpaceListIterator iter(list);
......@@ -1108,7 +1110,6 @@ bool VirtualSpaceList::contains(const void *ptr) {
}
return false;
}
#endif // PRODUCT
// MetaspaceGC methods
......@@ -2739,15 +2740,17 @@ void Metaspace::print_on(outputStream* out) const {
}
}
#ifndef PRODUCT
bool Metaspace::contains(const void * ptr) const {
bool Metaspace::contains(const void * ptr) {
if (MetaspaceShared::is_in_shared_space(ptr)) {
return true;
}
MutexLockerEx cl(SpaceManager::expand_lock(), Mutex::_no_safepoint_check_flag);
// This is checked while unlocked. As long as the virtualspaces are added
// at the end, the pointer will be in one of them. The virtual spaces
// aren't deleted presently. When they are, some sort of locking might
// be needed. Note, locking this can cause inversion problems with the
// caller in MetaspaceObj::is_metadata() function.
return space_list()->contains(ptr) || class_space_list()->contains(ptr);
}
#endif
void Metaspace::verify() {
vsm()->verify();
......
......@@ -135,11 +135,7 @@ class Metaspace : public CHeapObj<mtClass> {
static bool is_initialized() { return _class_space_list != NULL; }
#ifndef PRODUCT
bool contains(const void *ptr) const;
bool contains_class(const void *ptr) const;
#endif
static bool contains(const void *ptr);
void dump(outputStream* const out) const;
void print_on(outputStream* st) const;
......
......@@ -425,14 +425,10 @@ void Universe::genesis(TRAPS) {
// from MetaspaceObj, because the latter does not have virtual functions.
// If the metadata type has a vtable, it cannot be shared in the read-only
// section of the CDS archive, because the vtable pointer is patched.
static inline void* dereference(void* addr) {
return *(void**)addr;
}
static inline void add_vtable(void** list, int* n, void* o, int count) {
guarantee((*n) < count, "vtable list too small");
void* vtable = dereference(o);
assert(dereference(vtable) != NULL, "invalid vtable");
void* vtable = dereference_vptr(o);
assert(*(void**)(vtable) != NULL, "invalid vtable");
list[(*n)++] = vtable;
}
......
......@@ -48,8 +48,8 @@ void CompiledICHolder::print_value_on(outputStream* st) const {
// Verification
void CompiledICHolder::verify_on(outputStream* st) {
guarantee(holder_method()->is_metadata(), "should be in permspace");
guarantee(holder_method()->is_metadata(), "should be in metaspace");
guarantee(holder_method()->is_method(), "should be method");
guarantee(holder_klass()->is_metadata(), "should be in permspace");
guarantee(holder_klass()->is_metadata(), "should be in metaspace");
guarantee(holder_klass()->is_klass(), "should be klass");
}
......@@ -1814,6 +1814,23 @@ void Method::clear_jmethod_ids(ClassLoaderData* loader_data) {
loader_data->jmethod_ids()->clear_all_methods();
}
// Check that this pointer is valid by checking that the vtbl pointer matches
bool Method::is_valid_method() const {
if (this == NULL) {
return false;
} else if (!is_metaspace_object()) {
return false;
} else {
Method m;
// This assumes that the vtbl pointer is the first word of a C++ object.
// This assumption is also in universe.cpp patch_klass_vtble
void* vtbl2 = dereference_vptr((void*)&m);
void* this_vtbl = dereference_vptr((void*)this);
return vtbl2 == this_vtbl;
}
}
#ifndef PRODUCT
void Method::print_jmethod_ids(ClassLoaderData* loader_data, outputStream* out) {
out->print_cr("jni_method_id count = %d", loader_data->jmethod_ids()->count_methods());
......@@ -1935,7 +1952,7 @@ void Method::verify_on(outputStream* st) {
guarantee(constMethod()->is_metadata(), "should be metadata");
MethodData* md = method_data();
guarantee(md == NULL ||
md->is_metadata(), "should be in permspace");
md->is_metadata(), "should be metadata");
guarantee(md == NULL ||
md->is_methodData(), "should be method data");
}
......@@ -169,7 +169,8 @@ class Method : public Metadata {
ConstMethod::MethodType method_type,
TRAPS);
Method() { assert(DumpSharedSpaces || UseSharedSpaces, "only for CDS"); }
// CDS and vtbl checking can create an empty Method to get vtbl pointer.
Method(){}
// The Method vtable is restored by this call when the Method is in the
// shared archive. See patch_klass_vtables() in metaspaceShared.cpp for
......@@ -812,6 +813,9 @@ class Method : public Metadata {
const char* internal_name() const { return "{method}"; }
// Check for valid method pointer
bool is_valid_method() const;
// Verify
void verify() { verify_on(tty); }
void verify_on(outputStream* st);
......
......@@ -216,10 +216,7 @@ static bool is_decipherable_interpreted_frame(JavaThread* thread,
// not yet valid.
*method_p = method;
// See if gc may have invalidated method since we validated frame
if (!Universe::heap()->is_valid_method(method)) return false;
if (!method->is_valid_method()) return false;
intptr_t bcx = fr->interpreter_frame_bcx();
......@@ -394,19 +391,11 @@ static void forte_fill_call_trace_given_top(JavaThread* thd,
bool fully_decipherable = find_initial_Java_frame(thd, &top_frame, &initial_Java_frame, &method, &bci);
// The frame might not be walkable but still recovered a method
// (e.g. an nmethod with no scope info for the pc
// (e.g. an nmethod with no scope info for the pc)
if (method == NULL) return;
CollectedHeap* ch = Universe::heap();
// The method is not stored GC safe so see if GC became active
// after we entered AsyncGetCallTrace() and before we try to
// use the Method*.
// Yes, there is still a window after this check and before
// we use Method* below, but we can't lock out GC so that
// has to be an acceptable risk.
if (!ch->is_valid_method(method)) {
if (!method->is_valid_method()) {
trace->num_frames = ticks_GC_active; // -2
return;
}
......@@ -440,13 +429,7 @@ static void forte_fill_call_trace_given_top(JavaThread* thd,
bci = st.bci();
method = st.method();
// The method is not stored GC safe so see if GC became active
// after we entered AsyncGetCallTrace() and before we try to
// use the Method*.
// Yes, there is still a window after this check and before
// we use Method* below, but we can't lock out GC so that
// has to be an acceptable risk.
if (!ch->is_valid_method(method)) {
if (!method->is_valid_method()) {
// we throw away everything we've gathered in this sample since
// none of it is safe
trace->num_frames = ticks_GC_active; // -2
......
......@@ -1280,4 +1280,12 @@ inline int build_int_from_shorts( jushort low, jushort high ) {
#define ARRAY_SIZE(array) (sizeof(array)/sizeof((array)[0]))
// Dereference vptr
// All C++ compilers that we know of have the vtbl pointer in the first
// word. If there are exceptions, this function needs to be made compiler
// specific.
static inline void* dereference_vptr(void* addr) {
return *(void**)addr;
}
#endif // SHARE_VM_UTILITIES_GLOBALDEFINITIONS_HPP
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册