提交 9e24ba9c 编写于 作者: K kvn

Merge

...@@ -54,72 +54,72 @@ jprt.sync.push=false ...@@ -54,72 +54,72 @@ jprt.sync.push=false
# Define the Solaris platforms we want for the various releases # Define the Solaris platforms we want for the various releases
jprt.my.solaris.sparc.jdk8=solaris_sparc_5.10 jprt.my.solaris.sparc.jdk8=solaris_sparc_5.10
jprt.my.solaris.sparc.jdk7=solaris_sparc_5.10 jprt.my.solaris.sparc.jdk7=solaris_sparc_5.10
jprt.my.solaris.sparc.jdk7u4=${jprt.my.solaris.sparc.jdk7} jprt.my.solaris.sparc.jdk7u6=${jprt.my.solaris.sparc.jdk7}
jprt.my.solaris.sparc=${jprt.my.solaris.sparc.${jprt.tools.default.release}} jprt.my.solaris.sparc=${jprt.my.solaris.sparc.${jprt.tools.default.release}}
jprt.my.solaris.sparcv9.jdk8=solaris_sparcv9_5.10 jprt.my.solaris.sparcv9.jdk8=solaris_sparcv9_5.10
jprt.my.solaris.sparcv9.jdk7=solaris_sparcv9_5.10 jprt.my.solaris.sparcv9.jdk7=solaris_sparcv9_5.10
jprt.my.solaris.sparcv9.jdk7u4=${jprt.my.solaris.sparcv9.jdk7} jprt.my.solaris.sparcv9.jdk7u6=${jprt.my.solaris.sparcv9.jdk7}
jprt.my.solaris.sparcv9=${jprt.my.solaris.sparcv9.${jprt.tools.default.release}} jprt.my.solaris.sparcv9=${jprt.my.solaris.sparcv9.${jprt.tools.default.release}}
jprt.my.solaris.i586.jdk8=solaris_i586_5.10 jprt.my.solaris.i586.jdk8=solaris_i586_5.10
jprt.my.solaris.i586.jdk7=solaris_i586_5.10 jprt.my.solaris.i586.jdk7=solaris_i586_5.10
jprt.my.solaris.i586.jdk7u4=${jprt.my.solaris.i586.jdk7} jprt.my.solaris.i586.jdk7u6=${jprt.my.solaris.i586.jdk7}
jprt.my.solaris.i586=${jprt.my.solaris.i586.${jprt.tools.default.release}} jprt.my.solaris.i586=${jprt.my.solaris.i586.${jprt.tools.default.release}}
jprt.my.solaris.x64.jdk8=solaris_x64_5.10 jprt.my.solaris.x64.jdk8=solaris_x64_5.10
jprt.my.solaris.x64.jdk7=solaris_x64_5.10 jprt.my.solaris.x64.jdk7=solaris_x64_5.10
jprt.my.solaris.x64.jdk7u4=${jprt.my.solaris.x64.jdk7} jprt.my.solaris.x64.jdk7u6=${jprt.my.solaris.x64.jdk7}
jprt.my.solaris.x64=${jprt.my.solaris.x64.${jprt.tools.default.release}} jprt.my.solaris.x64=${jprt.my.solaris.x64.${jprt.tools.default.release}}
jprt.my.linux.i586.jdk8=linux_i586_2.6 jprt.my.linux.i586.jdk8=linux_i586_2.6
jprt.my.linux.i586.jdk7=linux_i586_2.6 jprt.my.linux.i586.jdk7=linux_i586_2.6
jprt.my.linux.i586.jdk7u4=${jprt.my.linux.i586.jdk7} jprt.my.linux.i586.jdk7u6=${jprt.my.linux.i586.jdk7}
jprt.my.linux.i586=${jprt.my.linux.i586.${jprt.tools.default.release}} jprt.my.linux.i586=${jprt.my.linux.i586.${jprt.tools.default.release}}
jprt.my.linux.x64.jdk8=linux_x64_2.6 jprt.my.linux.x64.jdk8=linux_x64_2.6
jprt.my.linux.x64.jdk7=linux_x64_2.6 jprt.my.linux.x64.jdk7=linux_x64_2.6
jprt.my.linux.x64.jdk7u4=${jprt.my.linux.x64.jdk7} jprt.my.linux.x64.jdk7u6=${jprt.my.linux.x64.jdk7}
jprt.my.linux.x64=${jprt.my.linux.x64.${jprt.tools.default.release}} jprt.my.linux.x64=${jprt.my.linux.x64.${jprt.tools.default.release}}
jprt.my.linux.ppc.jdk8=linux_ppc_2.6 jprt.my.linux.ppc.jdk8=linux_ppc_2.6
jprt.my.linux.ppc.jdk7=linux_ppc_2.6 jprt.my.linux.ppc.jdk7=linux_ppc_2.6
jprt.my.linux.ppc.jdk7u4=${jprt.my.linux.ppc.jdk7} jprt.my.linux.ppc.jdk7u6=${jprt.my.linux.ppc.jdk7}
jprt.my.linux.ppc=${jprt.my.linux.ppc.${jprt.tools.default.release}} jprt.my.linux.ppc=${jprt.my.linux.ppc.${jprt.tools.default.release}}
jprt.my.linux.ppcv2.jdk8=linux_ppcv2_2.6 jprt.my.linux.ppcv2.jdk8=linux_ppcv2_2.6
jprt.my.linux.ppcv2.jdk7=linux_ppcv2_2.6 jprt.my.linux.ppcv2.jdk7=linux_ppcv2_2.6
jprt.my.linux.ppcv2.jdk7u4=${jprt.my.linux.ppcv2.jdk7} jprt.my.linux.ppcv2.jdk7u6=${jprt.my.linux.ppcv2.jdk7}
jprt.my.linux.ppcv2=${jprt.my.linux.ppcv2.${jprt.tools.default.release}} jprt.my.linux.ppcv2=${jprt.my.linux.ppcv2.${jprt.tools.default.release}}
jprt.my.linux.ppcsflt.jdk8=linux_ppcsflt_2.6 jprt.my.linux.ppcsflt.jdk8=linux_ppcsflt_2.6
jprt.my.linux.ppcsflt.jdk7=linux_ppcsflt_2.6 jprt.my.linux.ppcsflt.jdk7=linux_ppcsflt_2.6
jprt.my.linux.ppcsflt.jdk7u4=${jprt.my.linux.ppcsflt.jdk7} jprt.my.linux.ppcsflt.jdk7u6=${jprt.my.linux.ppcsflt.jdk7}
jprt.my.linux.ppcsflt=${jprt.my.linux.ppcsflt.${jprt.tools.default.release}} jprt.my.linux.ppcsflt=${jprt.my.linux.ppcsflt.${jprt.tools.default.release}}
jprt.my.linux.armvfp.jdk8=linux_armvfp_2.6 jprt.my.linux.armvfp.jdk8=linux_armvfp_2.6
jprt.my.linux.armvfp.jdk7=linux_armvfp_2.6 jprt.my.linux.armvfp.jdk7=linux_armvfp_2.6
jprt.my.linux.armvfp.jdk7u4=${jprt.my.linux.armvfp.jdk7} jprt.my.linux.armvfp.jdk7u6=${jprt.my.linux.armvfp.jdk7}
jprt.my.linux.armvfp=${jprt.my.linux.armvfp.${jprt.tools.default.release}} jprt.my.linux.armvfp=${jprt.my.linux.armvfp.${jprt.tools.default.release}}
jprt.my.linux.armsflt.jdk8=linux_armsflt_2.6 jprt.my.linux.armsflt.jdk8=linux_armsflt_2.6
jprt.my.linux.armsflt.jdk7=linux_armsflt_2.6 jprt.my.linux.armsflt.jdk7=linux_armsflt_2.6
jprt.my.linux.armsflt.jdk7u4=${jprt.my.linux.armsflt.jdk7} jprt.my.linux.armsflt.jdk7u6=${jprt.my.linux.armsflt.jdk7}
jprt.my.linux.armsflt=${jprt.my.linux.armsflt.${jprt.tools.default.release}} jprt.my.linux.armsflt=${jprt.my.linux.armsflt.${jprt.tools.default.release}}
jprt.my.macosx.x64.jdk8=macosx_x64_10.7 jprt.my.macosx.x64.jdk8=macosx_x64_10.7
jprt.my.macosx.x64.jdk7=macosx_x64_10.7 jprt.my.macosx.x64.jdk7=macosx_x64_10.7
jprt.my.macosx.x64.jdk7u4=${jprt.my.macosx.x64.jdk7} jprt.my.macosx.x64.jdk7u6=${jprt.my.macosx.x64.jdk7}
jprt.my.macosx.x64=${jprt.my.macosx.x64.${jprt.tools.default.release}} jprt.my.macosx.x64=${jprt.my.macosx.x64.${jprt.tools.default.release}}
jprt.my.windows.i586.jdk8=windows_i586_5.1 jprt.my.windows.i586.jdk8=windows_i586_5.1
jprt.my.windows.i586.jdk7=windows_i586_5.1 jprt.my.windows.i586.jdk7=windows_i586_5.1
jprt.my.windows.i586.jdk7u4=${jprt.my.windows.i586.jdk7} jprt.my.windows.i586.jdk7u6=${jprt.my.windows.i586.jdk7}
jprt.my.windows.i586=${jprt.my.windows.i586.${jprt.tools.default.release}} jprt.my.windows.i586=${jprt.my.windows.i586.${jprt.tools.default.release}}
jprt.my.windows.x64.jdk8=windows_x64_5.2 jprt.my.windows.x64.jdk8=windows_x64_5.2
jprt.my.windows.x64.jdk7=windows_x64_5.2 jprt.my.windows.x64.jdk7=windows_x64_5.2
jprt.my.windows.x64.jdk7u4=${jprt.my.windows.x64.jdk7} jprt.my.windows.x64.jdk7u6=${jprt.my.windows.x64.jdk7}
jprt.my.windows.x64=${jprt.my.windows.x64.${jprt.tools.default.release}} jprt.my.windows.x64=${jprt.my.windows.x64.${jprt.tools.default.release}}
# Standard list of jprt build targets for this source tree # Standard list of jprt build targets for this source tree
...@@ -154,7 +154,7 @@ jprt.build.targets.all=${jprt.build.targets.standard}, \ ...@@ -154,7 +154,7 @@ jprt.build.targets.all=${jprt.build.targets.standard}, \
jprt.build.targets.jdk8=${jprt.build.targets.all} jprt.build.targets.jdk8=${jprt.build.targets.all}
jprt.build.targets.jdk7=${jprt.build.targets.all} jprt.build.targets.jdk7=${jprt.build.targets.all}
jprt.build.targets.jdk7u4=${jprt.build.targets.all} jprt.build.targets.jdk7u6=${jprt.build.targets.all}
jprt.build.targets=${jprt.build.targets.${jprt.tools.default.release}} jprt.build.targets=${jprt.build.targets.${jprt.tools.default.release}}
# Subset lists of test targets for this source tree # Subset lists of test targets for this source tree
...@@ -447,7 +447,7 @@ jprt.test.targets.embedded= \ ...@@ -447,7 +447,7 @@ jprt.test.targets.embedded= \
jprt.test.targets.jdk8=${jprt.test.targets.standard} jprt.test.targets.jdk8=${jprt.test.targets.standard}
jprt.test.targets.jdk7=${jprt.test.targets.standard} jprt.test.targets.jdk7=${jprt.test.targets.standard}
jprt.test.targets.jdk7u4=${jprt.test.targets.jdk7} jprt.test.targets.jdk7u6=${jprt.test.targets.jdk7}
jprt.test.targets=${jprt.test.targets.${jprt.tools.default.release}} jprt.test.targets=${jprt.test.targets.${jprt.tools.default.release}}
# The default test/Makefile targets that should be run # The default test/Makefile targets that should be run
...@@ -507,7 +507,7 @@ jprt.make.rule.test.targets.embedded = \ ...@@ -507,7 +507,7 @@ jprt.make.rule.test.targets.embedded = \
jprt.make.rule.test.targets.jdk8=${jprt.make.rule.test.targets.standard} jprt.make.rule.test.targets.jdk8=${jprt.make.rule.test.targets.standard}
jprt.make.rule.test.targets.jdk7=${jprt.make.rule.test.targets.standard} jprt.make.rule.test.targets.jdk7=${jprt.make.rule.test.targets.standard}
jprt.make.rule.test.targets.jdk7u4=${jprt.make.rule.test.targets.jdk7} jprt.make.rule.test.targets.jdk7u6=${jprt.make.rule.test.targets.jdk7}
jprt.make.rule.test.targets=${jprt.make.rule.test.targets.${jprt.tools.default.release}} jprt.make.rule.test.targets=${jprt.make.rule.test.targets.${jprt.tools.default.release}}
# 7155453: Work-around to prevent popups on OSX from blocking test completion # 7155453: Work-around to prevent popups on OSX from blocking test completion
......
...@@ -42,6 +42,11 @@ void Canonicalizer::set_canonical(Value x) { ...@@ -42,6 +42,11 @@ void Canonicalizer::set_canonical(Value x) {
// the instruction stream (because the instruction list is embedded // the instruction stream (because the instruction list is embedded
// in the instructions). // in the instructions).
if (canonical() != x) { if (canonical() != x) {
#ifndef PRODUCT
if (!x->has_printable_bci()) {
x->set_printable_bci(bci());
}
#endif
if (PrintCanonicalization) { if (PrintCanonicalization) {
PrintValueVisitor do_print_value; PrintValueVisitor do_print_value;
canonical()->input_values_do(&do_print_value); canonical()->input_values_do(&do_print_value);
...@@ -451,6 +456,28 @@ void Canonicalizer::do_Intrinsic (Intrinsic* x) { ...@@ -451,6 +456,28 @@ void Canonicalizer::do_Intrinsic (Intrinsic* x) {
} }
break; break;
} }
case vmIntrinsics::_isInstance : {
assert(x->number_of_arguments() == 2, "wrong type");
InstanceConstant* c = x->argument_at(0)->type()->as_InstanceConstant();
if (c != NULL && !c->value()->is_null_object()) {
// ciInstance::java_mirror_type() returns non-NULL only for Java mirrors
ciType* t = c->value()->as_instance()->java_mirror_type();
if (t->is_klass()) {
// substitute cls.isInstance(obj) of a constant Class into
// an InstantOf instruction
InstanceOf* i = new InstanceOf(t->as_klass(), x->argument_at(1), x->state_before());
set_canonical(i);
// and try to canonicalize even further
do_InstanceOf(i);
} else {
assert(t->is_primitive_type(), "should be a primitive type");
// cls.isInstance(obj) always returns false for primitive classes
set_constant(0);
}
}
break;
}
} }
} }
...@@ -677,8 +704,8 @@ void Canonicalizer::do_If(If* x) { ...@@ -677,8 +704,8 @@ void Canonicalizer::do_If(If* x) {
return; return;
} }
} }
set_canonical(canon);
set_bci(cmp->state_before()->bci()); set_bci(cmp->state_before()->bci());
set_canonical(canon);
} }
} }
} else if (l->as_InstanceOf() != NULL) { } else if (l->as_InstanceOf() != NULL) {
......
...@@ -3170,6 +3170,7 @@ bool GraphBuilder::try_inline_intrinsics(ciMethod* callee) { ...@@ -3170,6 +3170,7 @@ bool GraphBuilder::try_inline_intrinsics(ciMethod* callee) {
break; break;
case vmIntrinsics::_getClass : case vmIntrinsics::_getClass :
case vmIntrinsics::_isInstance :
if (!InlineClassNatives) return false; if (!InlineClassNatives) return false;
preserves_state = true; preserves_state = true;
break; break;
......
...@@ -302,8 +302,6 @@ class Instruction: public CompilationResourceObj { ...@@ -302,8 +302,6 @@ class Instruction: public CompilationResourceObj {
void update_exception_state(ValueStack* state); void update_exception_state(ValueStack* state);
bool has_printable_bci() const { return NOT_PRODUCT(_printable_bci != -99) PRODUCT_ONLY(false); }
protected: protected:
void set_type(ValueType* type) { void set_type(ValueType* type) {
assert(type != NULL, "type must exist"); assert(type != NULL, "type must exist");
...@@ -392,8 +390,9 @@ class Instruction: public CompilationResourceObj { ...@@ -392,8 +390,9 @@ class Instruction: public CompilationResourceObj {
// accessors // accessors
int id() const { return _id; } int id() const { return _id; }
#ifndef PRODUCT #ifndef PRODUCT
bool has_printable_bci() const { return _printable_bci != -99; }
int printable_bci() const { assert(has_printable_bci(), "_printable_bci should have been set"); return _printable_bci; } int printable_bci() const { assert(has_printable_bci(), "_printable_bci should have been set"); return _printable_bci; }
void set_printable_bci(int bci) { NOT_PRODUCT(_printable_bci = bci;) } void set_printable_bci(int bci) { _printable_bci = bci; }
#endif #endif
int use_count() const { return _use_count; } int use_count() const { return _use_count; }
int pin_state() const { return _pin_state; } int pin_state() const { return _pin_state; }
...@@ -576,6 +575,7 @@ LEAF(Phi, Instruction) ...@@ -576,6 +575,7 @@ LEAF(Phi, Instruction)
, _block(b) , _block(b)
, _index(index) , _index(index)
{ {
NOT_PRODUCT(set_printable_bci(Value(b)->printable_bci()));
if (type->is_illegal()) { if (type->is_illegal()) {
make_illegal(); make_illegal();
} }
...@@ -631,7 +631,9 @@ LEAF(Local, Instruction) ...@@ -631,7 +631,9 @@ LEAF(Local, Instruction)
: Instruction(type) : Instruction(type)
, _java_index(index) , _java_index(index)
, _declared_type(declared) , _declared_type(declared)
{} {
NOT_PRODUCT(set_printable_bci(-1));
}
// accessors // accessors
int java_index() const { return _java_index; } int java_index() const { return _java_index; }
......
...@@ -1242,6 +1242,36 @@ void LIRGenerator::do_Reference_get(Intrinsic* x) { ...@@ -1242,6 +1242,36 @@ void LIRGenerator::do_Reference_get(Intrinsic* x) {
NULL /* info */); NULL /* info */);
} }
// Example: clazz.isInstance(object)
void LIRGenerator::do_isInstance(Intrinsic* x) {
assert(x->number_of_arguments() == 2, "wrong type");
// TODO could try to substitute this node with an equivalent InstanceOf
// if clazz is known to be a constant Class. This will pick up newly found
// constants after HIR construction. I'll leave this to a future change.
// as a first cut, make a simple leaf call to runtime to stay platform independent.
// could follow the aastore example in a future change.
LIRItem clazz(x->argument_at(0), this);
LIRItem object(x->argument_at(1), this);
clazz.load_item();
object.load_item();
LIR_Opr result = rlock_result(x);
// need to perform null check on clazz
if (x->needs_null_check()) {
CodeEmitInfo* info = state_for(x);
__ null_check(clazz.result(), info);
}
LIR_Opr call_result = call_runtime(clazz.value(), object.value(),
CAST_FROM_FN_PTR(address, Runtime1::is_instance_of),
x->type(),
NULL); // NULL CodeEmitInfo results in a leaf call
__ move(call_result, result);
}
// Example: object.getClass () // Example: object.getClass ()
void LIRGenerator::do_getClass(Intrinsic* x) { void LIRGenerator::do_getClass(Intrinsic* x) {
assert(x->number_of_arguments() == 1, "wrong type"); assert(x->number_of_arguments() == 1, "wrong type");
...@@ -2951,6 +2981,7 @@ void LIRGenerator::do_Intrinsic(Intrinsic* x) { ...@@ -2951,6 +2981,7 @@ void LIRGenerator::do_Intrinsic(Intrinsic* x) {
break; break;
case vmIntrinsics::_Object_init: do_RegisterFinalizer(x); break; case vmIntrinsics::_Object_init: do_RegisterFinalizer(x); break;
case vmIntrinsics::_isInstance: do_isInstance(x); break;
case vmIntrinsics::_getClass: do_getClass(x); break; case vmIntrinsics::_getClass: do_getClass(x); break;
case vmIntrinsics::_currentThread: do_currentThread(x); break; case vmIntrinsics::_currentThread: do_currentThread(x); break;
......
...@@ -238,6 +238,7 @@ class LIRGenerator: public InstructionVisitor, public BlockClosure { ...@@ -238,6 +238,7 @@ class LIRGenerator: public InstructionVisitor, public BlockClosure {
LIR_Opr getThreadPointer(); LIR_Opr getThreadPointer();
void do_RegisterFinalizer(Intrinsic* x); void do_RegisterFinalizer(Intrinsic* x);
void do_isInstance(Intrinsic* x);
void do_getClass(Intrinsic* x); void do_getClass(Intrinsic* x);
void do_currentThread(Intrinsic* x); void do_currentThread(Intrinsic* x);
void do_MathIntrinsic(Intrinsic* x); void do_MathIntrinsic(Intrinsic* x);
......
...@@ -294,6 +294,7 @@ const char* Runtime1::name_for_address(address entry) { ...@@ -294,6 +294,7 @@ const char* Runtime1::name_for_address(address entry) {
FUNCTION_CASE(entry, SharedRuntime::lrem); FUNCTION_CASE(entry, SharedRuntime::lrem);
FUNCTION_CASE(entry, SharedRuntime::dtrace_method_entry); FUNCTION_CASE(entry, SharedRuntime::dtrace_method_entry);
FUNCTION_CASE(entry, SharedRuntime::dtrace_method_exit); FUNCTION_CASE(entry, SharedRuntime::dtrace_method_exit);
FUNCTION_CASE(entry, is_instance_of);
FUNCTION_CASE(entry, trace_block_entry); FUNCTION_CASE(entry, trace_block_entry);
#ifdef TRACE_HAVE_INTRINSICS #ifdef TRACE_HAVE_INTRINSICS
FUNCTION_CASE(entry, TRACE_TIME_METHOD); FUNCTION_CASE(entry, TRACE_TIME_METHOD);
...@@ -1270,6 +1271,19 @@ JRT_LEAF(void, Runtime1::oop_arraycopy(HeapWord* src, HeapWord* dst, int num)) ...@@ -1270,6 +1271,19 @@ JRT_LEAF(void, Runtime1::oop_arraycopy(HeapWord* src, HeapWord* dst, int num))
JRT_END JRT_END
JRT_LEAF(int, Runtime1::is_instance_of(oopDesc* mirror, oopDesc* obj))
// had to return int instead of bool, otherwise there may be a mismatch
// between the C calling convention and the Java one.
// e.g., on x86, GCC may clear only %al when returning a bool false, but
// JVM takes the whole %eax as the return value, which may misinterpret
// the return value as a boolean true.
assert(mirror != NULL, "should null-check on mirror before calling");
klassOop k = java_lang_Class::as_klassOop(mirror);
return (k != NULL && obj != NULL && obj->is_a(k)) ? 1 : 0;
JRT_END
#ifndef PRODUCT #ifndef PRODUCT
void Runtime1::print_statistics() { void Runtime1::print_statistics() {
tty->print_cr("C1 Runtime statistics:"); tty->print_cr("C1 Runtime statistics:");
......
...@@ -186,6 +186,7 @@ class Runtime1: public AllStatic { ...@@ -186,6 +186,7 @@ class Runtime1: public AllStatic {
static int arraycopy(oopDesc* src, int src_pos, oopDesc* dst, int dst_pos, int length); static int arraycopy(oopDesc* src, int src_pos, oopDesc* dst, int dst_pos, int length);
static void primitive_arraycopy(HeapWord* src, HeapWord* dst, int length); static void primitive_arraycopy(HeapWord* src, HeapWord* dst, int length);
static void oop_arraycopy(HeapWord* src, HeapWord* dst, int length); static void oop_arraycopy(HeapWord* src, HeapWord* dst, int length);
static int is_instance_of(oopDesc* mirror, oopDesc* obj);
static void print_statistics() PRODUCT_RETURN; static void print_statistics() PRODUCT_RETURN;
}; };
......
...@@ -141,8 +141,11 @@ class ValueNumberingVisitor: public InstructionVisitor { ...@@ -141,8 +141,11 @@ class ValueNumberingVisitor: public InstructionVisitor {
// visitor functions // visitor functions
void do_StoreField (StoreField* x) { void do_StoreField (StoreField* x) {
if (x->is_init_point()) { if (x->is_init_point() || // putstatic is an initialization point so treat it as a wide kill
// putstatic is an initialization point so treat it as a wide kill // This is actually too strict and the JMM doesn't require
// this in all cases (e.g. load a; volatile store b; load a)
// but possible future optimizations might require this.
x->field()->is_volatile()) {
kill_memory(); kill_memory();
} else { } else {
kill_field(x->field()); kill_field(x->field());
...@@ -160,8 +163,8 @@ class ValueNumberingVisitor: public InstructionVisitor { ...@@ -160,8 +163,8 @@ class ValueNumberingVisitor: public InstructionVisitor {
void do_Local (Local* x) { /* nothing to do */ } void do_Local (Local* x) { /* nothing to do */ }
void do_Constant (Constant* x) { /* nothing to do */ } void do_Constant (Constant* x) { /* nothing to do */ }
void do_LoadField (LoadField* x) { void do_LoadField (LoadField* x) {
if (x->is_init_point()) { if (x->is_init_point() || // getstatic is an initialization point so treat it as a wide kill
// getstatic is an initialization point so treat it as a wide kill x->field()->is_volatile()) { // the JMM requires this
kill_memory(); kill_memory();
} }
} }
......
...@@ -527,6 +527,9 @@ class Parse : public GraphKit { ...@@ -527,6 +527,9 @@ class Parse : public GraphKit {
int repush_if_args(); int repush_if_args();
void adjust_map_after_if(BoolTest::mask btest, Node* c, float prob, void adjust_map_after_if(BoolTest::mask btest, Node* c, float prob,
Block* path, Block* other_path); Block* path, Block* other_path);
void sharpen_type_after_if(BoolTest::mask btest,
Node* con, const Type* tcon,
Node* val, const Type* tval);
IfNode* jump_if_fork_int(Node* a, Node* b, BoolTest::mask mask); IfNode* jump_if_fork_int(Node* a, Node* b, BoolTest::mask mask);
Node* jump_if_join(Node* iffalse, Node* iftrue); Node* jump_if_join(Node* iffalse, Node* iftrue);
void jump_if_true_fork(IfNode *ifNode, int dest_bci_if_true, int prof_table_index); void jump_if_true_fork(IfNode *ifNode, int dest_bci_if_true, int prof_table_index);
......
...@@ -1233,6 +1233,71 @@ void Parse::adjust_map_after_if(BoolTest::mask btest, Node* c, float prob, ...@@ -1233,6 +1233,71 @@ void Parse::adjust_map_after_if(BoolTest::mask btest, Node* c, float prob,
if (!have_con) // remaining adjustments need a con if (!have_con) // remaining adjustments need a con
return; return;
sharpen_type_after_if(btest, con, tcon, val, tval);
}
static Node* extract_obj_from_klass_load(PhaseGVN* gvn, Node* n) {
Node* ldk;
if (n->is_DecodeN()) {
if (n->in(1)->Opcode() != Op_LoadNKlass) {
return NULL;
} else {
ldk = n->in(1);
}
} else if (n->Opcode() != Op_LoadKlass) {
return NULL;
} else {
ldk = n;
}
assert(ldk != NULL && ldk->is_Load(), "should have found a LoadKlass or LoadNKlass node");
Node* adr = ldk->in(MemNode::Address);
intptr_t off = 0;
Node* obj = AddPNode::Ideal_base_and_offset(adr, gvn, off);
if (obj == NULL || off != oopDesc::klass_offset_in_bytes()) // loading oopDesc::_klass?
return NULL;
const TypePtr* tp = gvn->type(obj)->is_ptr();
if (tp == NULL || !(tp->isa_instptr() || tp->isa_aryptr())) // is obj a Java object ptr?
return NULL;
return obj;
}
void Parse::sharpen_type_after_if(BoolTest::mask btest,
Node* con, const Type* tcon,
Node* val, const Type* tval) {
// Look for opportunities to sharpen the type of a node
// whose klass is compared with a constant klass.
if (btest == BoolTest::eq && tcon->isa_klassptr()) {
Node* obj = extract_obj_from_klass_load(&_gvn, val);
const TypeOopPtr* con_type = tcon->isa_klassptr()->as_instance_type();
if (obj != NULL && (con_type->isa_instptr() || con_type->isa_aryptr())) {
// Found:
// Bool(CmpP(LoadKlass(obj._klass), ConP(Foo.klass)), [eq])
// or the narrowOop equivalent.
const Type* obj_type = _gvn.type(obj);
const TypeOopPtr* tboth = obj_type->join(con_type)->isa_oopptr();
if (tboth != NULL && tboth != obj_type && tboth->higher_equal(obj_type)) {
// obj has to be of the exact type Foo if the CmpP succeeds.
assert(tboth->klass_is_exact(), "klass should be exact");
int obj_in_map = map()->find_edge(obj);
JVMState* jvms = this->jvms();
if (obj_in_map >= 0 &&
(jvms->is_loc(obj_in_map) || jvms->is_stk(obj_in_map))) {
TypeNode* ccast = new (C, 2) CheckCastPPNode(control(), obj, tboth);
const Type* tcc = ccast->as_Type()->type();
assert(tcc != obj_type && tcc->higher_equal(obj_type), "must improve");
// Delay transform() call to allow recovery of pre-cast value
// at the control merge.
_gvn.set_type_bottom(ccast);
record_for_igvn(ccast);
// Here's the payoff.
replace_in_map(obj, ccast);
}
}
}
}
int val_in_map = map()->find_edge(val); int val_in_map = map()->find_edge(val);
if (val_in_map < 0) return; // replace_in_map would be useless if (val_in_map < 0) return; // replace_in_map would be useless
...@@ -1265,6 +1330,7 @@ void Parse::adjust_map_after_if(BoolTest::mask btest, Node* c, float prob, ...@@ -1265,6 +1330,7 @@ void Parse::adjust_map_after_if(BoolTest::mask btest, Node* c, float prob,
// Exclude tests vs float/double 0 as these could be // Exclude tests vs float/double 0 as these could be
// either +0 or -0. Just because you are equal to +0 // either +0 or -0. Just because you are equal to +0
// doesn't mean you ARE +0! // doesn't mean you ARE +0!
// Note, following code also replaces Long and Oop values.
if ((!tf || tf->_f != 0.0) && if ((!tf || tf->_f != 0.0) &&
(!td || td->_d != 0.0)) (!td || td->_d != 0.0))
cast = con; // Replace non-constant val by con. cast = con; // Replace non-constant val by con.
......
...@@ -702,12 +702,84 @@ const Type *CmpPNode::sub( const Type *t1, const Type *t2 ) const { ...@@ -702,12 +702,84 @@ const Type *CmpPNode::sub( const Type *t1, const Type *t2 ) const {
return TypeInt::CC; return TypeInt::CC;
} }
static inline Node* isa_java_mirror_load(PhaseGVN* phase, Node* n) {
// Return the klass node for
// LoadP(AddP(foo:Klass, #java_mirror))
// or NULL if not matching.
if (n->Opcode() != Op_LoadP) return NULL;
const TypeInstPtr* tp = phase->type(n)->isa_instptr();
if (!tp || tp->klass() != phase->C->env()->Class_klass()) return NULL;
Node* adr = n->in(MemNode::Address);
intptr_t off = 0;
Node* k = AddPNode::Ideal_base_and_offset(adr, phase, off);
if (k == NULL) return NULL;
const TypeKlassPtr* tkp = phase->type(k)->isa_klassptr();
if (!tkp || off != in_bytes(Klass::java_mirror_offset())) return NULL;
// We've found the klass node of a Java mirror load.
return k;
}
static inline Node* isa_const_java_mirror(PhaseGVN* phase, Node* n) {
// for ConP(Foo.class) return ConP(Foo.klass)
// otherwise return NULL
if (!n->is_Con()) return NULL;
const TypeInstPtr* tp = phase->type(n)->isa_instptr();
if (!tp) return NULL;
ciType* mirror_type = tp->java_mirror_type();
// TypeInstPtr::java_mirror_type() returns non-NULL for compile-
// time Class constants only.
if (!mirror_type) return NULL;
// x.getClass() == int.class can never be true (for all primitive types)
// Return a ConP(NULL) node for this case.
if (mirror_type->is_classless()) {
return phase->makecon(TypePtr::NULL_PTR);
}
// return the ConP(Foo.klass)
assert(mirror_type->is_klass(), "mirror_type should represent a klassOop");
return phase->makecon(TypeKlassPtr::make(mirror_type->as_klass()));
}
//------------------------------Ideal------------------------------------------ //------------------------------Ideal------------------------------------------
// Check for the case of comparing an unknown klass loaded from the primary // Normalize comparisons between Java mirror loads to compare the klass instead.
//
// Also check for the case of comparing an unknown klass loaded from the primary
// super-type array vs a known klass with no subtypes. This amounts to // super-type array vs a known klass with no subtypes. This amounts to
// checking to see an unknown klass subtypes a known klass with no subtypes; // checking to see an unknown klass subtypes a known klass with no subtypes;
// this only happens on an exact match. We can shorten this test by 1 load. // this only happens on an exact match. We can shorten this test by 1 load.
Node *CmpPNode::Ideal( PhaseGVN *phase, bool can_reshape ) { Node *CmpPNode::Ideal( PhaseGVN *phase, bool can_reshape ) {
// Normalize comparisons between Java mirrors into comparisons of the low-
// level klass, where a dependent load could be shortened.
//
// The new pattern has a nice effect of matching the same pattern used in the
// fast path of instanceof/checkcast/Class.isInstance(), which allows
// redundant exact type check be optimized away by GVN.
// For example, in
// if (x.getClass() == Foo.class) {
// Foo foo = (Foo) x;
// // ... use a ...
// }
// a CmpPNode could be shared between if_acmpne and checkcast
{
Node* k1 = isa_java_mirror_load(phase, in(1));
Node* k2 = isa_java_mirror_load(phase, in(2));
Node* conk2 = isa_const_java_mirror(phase, in(2));
if (k1 && (k2 || conk2)) {
Node* lhs = k1;
Node* rhs = (k2 != NULL) ? k2 : conk2;
this->set_req(1, lhs);
this->set_req(2, rhs);
return this;
}
}
// Constant pointer on right? // Constant pointer on right?
const TypeKlassPtr* t2 = phase->type(in(2))->isa_klassptr(); const TypeKlassPtr* t2 = phase->type(in(2))->isa_klassptr();
if (t2 == NULL || !t2->klass_is_exact()) if (t2 == NULL || !t2->klass_is_exact())
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册