提交 a04e00b5 编写于 作者: S shade

8014447: Object.hashCode intrinsic breaks inline caches

Summary: Try to inline as normal method first, then fall back to intrinsic.
Reviewed-by: kvn, twisti
上级 8c82fe4f
...@@ -65,6 +65,8 @@ class CallGenerator : public ResourceObj { ...@@ -65,6 +65,8 @@ class CallGenerator : public ResourceObj {
virtual bool is_predicted() const { return false; } virtual bool is_predicted() const { return false; }
// is_trap: Does not return to the caller. (E.g., uncommon trap.) // is_trap: Does not return to the caller. (E.g., uncommon trap.)
virtual bool is_trap() const { return false; } virtual bool is_trap() const { return false; }
// does_virtual_dispatch: Should try inlining as normal method first.
virtual bool does_virtual_dispatch() const { return false; }
// is_late_inline: supports conversion of call into an inline // is_late_inline: supports conversion of call into an inline
virtual bool is_late_inline() const { return false; } virtual bool is_late_inline() const { return false; }
......
...@@ -110,6 +110,7 @@ CallGenerator* Compile::call_generator(ciMethod* callee, int vtable_index, bool ...@@ -110,6 +110,7 @@ CallGenerator* Compile::call_generator(ciMethod* callee, int vtable_index, bool
// then we return it as the inlined version of the call. // then we return it as the inlined version of the call.
// We do this before the strict f.p. check below because the // We do this before the strict f.p. check below because the
// intrinsics handle strict f.p. correctly. // intrinsics handle strict f.p. correctly.
CallGenerator* cg_intrinsic = NULL;
if (allow_inline && allow_intrinsics) { if (allow_inline && allow_intrinsics) {
CallGenerator* cg = find_intrinsic(callee, call_does_dispatch); CallGenerator* cg = find_intrinsic(callee, call_does_dispatch);
if (cg != NULL) { if (cg != NULL) {
...@@ -121,9 +122,18 @@ CallGenerator* Compile::call_generator(ciMethod* callee, int vtable_index, bool ...@@ -121,9 +122,18 @@ CallGenerator* Compile::call_generator(ciMethod* callee, int vtable_index, bool
cg = CallGenerator::for_predicted_intrinsic(cg, inline_cg); cg = CallGenerator::for_predicted_intrinsic(cg, inline_cg);
} }
} }
// If intrinsic does the virtual dispatch, we try to use the type profile
// first, and hopefully inline it as the regular virtual call below.
// We will retry the intrinsic if nothing had claimed it afterwards.
if (cg->does_virtual_dispatch()) {
cg_intrinsic = cg;
cg = NULL;
} else {
return cg; return cg;
} }
} }
}
// Do method handle calls. // Do method handle calls.
// NOTE: This must happen before normal inlining logic below since // NOTE: This must happen before normal inlining logic below since
...@@ -266,6 +276,13 @@ CallGenerator* Compile::call_generator(ciMethod* callee, int vtable_index, bool ...@@ -266,6 +276,13 @@ CallGenerator* Compile::call_generator(ciMethod* callee, int vtable_index, bool
} }
} }
// Nothing claimed the intrinsic, we go with straight-forward inlining
// for already discovered intrinsic.
if (allow_inline && allow_intrinsics && cg_intrinsic != NULL) {
assert(cg_intrinsic->does_virtual_dispatch(), "sanity");
return cg_intrinsic;
}
// There was no special inlining tactic, or it bailed out. // There was no special inlining tactic, or it bailed out.
// Use a more generic tactic, like a simple call. // Use a more generic tactic, like a simple call.
if (call_does_dispatch) { if (call_does_dispatch) {
......
...@@ -47,19 +47,22 @@ class LibraryIntrinsic : public InlineCallGenerator { ...@@ -47,19 +47,22 @@ class LibraryIntrinsic : public InlineCallGenerator {
private: private:
bool _is_virtual; bool _is_virtual;
bool _is_predicted; bool _is_predicted;
bool _does_virtual_dispatch;
vmIntrinsics::ID _intrinsic_id; vmIntrinsics::ID _intrinsic_id;
public: public:
LibraryIntrinsic(ciMethod* m, bool is_virtual, bool is_predicted, vmIntrinsics::ID id) LibraryIntrinsic(ciMethod* m, bool is_virtual, bool is_predicted, bool does_virtual_dispatch, vmIntrinsics::ID id)
: InlineCallGenerator(m), : InlineCallGenerator(m),
_is_virtual(is_virtual), _is_virtual(is_virtual),
_is_predicted(is_predicted), _is_predicted(is_predicted),
_does_virtual_dispatch(does_virtual_dispatch),
_intrinsic_id(id) _intrinsic_id(id)
{ {
} }
virtual bool is_intrinsic() const { return true; } virtual bool is_intrinsic() const { return true; }
virtual bool is_virtual() const { return _is_virtual; } virtual bool is_virtual() const { return _is_virtual; }
virtual bool is_predicted() const { return _is_predicted; } virtual bool is_predicted() const { return _is_predicted; }
virtual bool does_virtual_dispatch() const { return _does_virtual_dispatch; }
virtual JVMState* generate(JVMState* jvms); virtual JVMState* generate(JVMState* jvms);
virtual Node* generate_predicate(JVMState* jvms); virtual Node* generate_predicate(JVMState* jvms);
vmIntrinsics::ID intrinsic_id() const { return _intrinsic_id; } vmIntrinsics::ID intrinsic_id() const { return _intrinsic_id; }
...@@ -355,6 +358,7 @@ CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) { ...@@ -355,6 +358,7 @@ CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) {
} }
bool is_predicted = false; bool is_predicted = false;
bool does_virtual_dispatch = false;
switch (id) { switch (id) {
case vmIntrinsics::_compareTo: case vmIntrinsics::_compareTo:
...@@ -381,8 +385,10 @@ CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) { ...@@ -381,8 +385,10 @@ CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) {
break; break;
case vmIntrinsics::_hashCode: case vmIntrinsics::_hashCode:
if (!InlineObjectHash) return NULL; if (!InlineObjectHash) return NULL;
does_virtual_dispatch = true;
break; break;
case vmIntrinsics::_clone: case vmIntrinsics::_clone:
does_virtual_dispatch = true;
case vmIntrinsics::_copyOf: case vmIntrinsics::_copyOf:
case vmIntrinsics::_copyOfRange: case vmIntrinsics::_copyOfRange:
if (!InlineObjectCopy) return NULL; if (!InlineObjectCopy) return NULL;
...@@ -541,7 +547,7 @@ CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) { ...@@ -541,7 +547,7 @@ CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) {
if (!InlineUnsafeOps) return NULL; if (!InlineUnsafeOps) return NULL;
} }
return new LibraryIntrinsic(m, is_virtual, is_predicted, (vmIntrinsics::ID) id); return new LibraryIntrinsic(m, is_virtual, is_predicted, does_virtual_dispatch, (vmIntrinsics::ID) id);
} }
//----------------------register_library_intrinsics----------------------- //----------------------register_library_intrinsics-----------------------
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册