diff --git a/src/share/vm/opto/callGenerator.hpp b/src/share/vm/opto/callGenerator.hpp index a1616de4dc73e179a13ead4c5df53b019067a818..956f227c32f3edc083abfaf21c2ac945bdd0a923 100644 --- a/src/share/vm/opto/callGenerator.hpp +++ b/src/share/vm/opto/callGenerator.hpp @@ -65,6 +65,8 @@ class CallGenerator : public ResourceObj { virtual bool is_predicted() const { return false; } // is_trap: Does not return to the caller. (E.g., uncommon trap.) virtual bool is_trap() const { return false; } + // does_virtual_dispatch: Should try inlining as normal method first. + virtual bool does_virtual_dispatch() const { return false; } // is_late_inline: supports conversion of call into an inline virtual bool is_late_inline() const { return false; } diff --git a/src/share/vm/opto/doCall.cpp b/src/share/vm/opto/doCall.cpp index 8784bbe2dc5ba63a7e25eaba79eb809e5a504c67..9558d6040683bf1a87c4094e0884a115b478a9bc 100644 --- a/src/share/vm/opto/doCall.cpp +++ b/src/share/vm/opto/doCall.cpp @@ -110,6 +110,7 @@ CallGenerator* Compile::call_generator(ciMethod* callee, int vtable_index, bool // then we return it as the inlined version of the call. // We do this before the strict f.p. check below because the // intrinsics handle strict f.p. correctly. + CallGenerator* cg_intrinsic = NULL; if (allow_inline && allow_intrinsics) { CallGenerator* cg = find_intrinsic(callee, call_does_dispatch); if (cg != NULL) { @@ -121,7 +122,16 @@ CallGenerator* Compile::call_generator(ciMethod* callee, int vtable_index, bool cg = CallGenerator::for_predicted_intrinsic(cg, inline_cg); } } - return cg; + + // If intrinsic does the virtual dispatch, we try to use the type profile + // first, and hopefully inline it as the regular virtual call below. + // We will retry the intrinsic if nothing had claimed it afterwards. + if (cg->does_virtual_dispatch()) { + cg_intrinsic = cg; + cg = NULL; + } else { + return cg; + } } } @@ -266,6 +276,13 @@ CallGenerator* Compile::call_generator(ciMethod* callee, int vtable_index, bool } } + // Nothing claimed the intrinsic, we go with straight-forward inlining + // for already discovered intrinsic. + if (allow_inline && allow_intrinsics && cg_intrinsic != NULL) { + assert(cg_intrinsic->does_virtual_dispatch(), "sanity"); + return cg_intrinsic; + } + // There was no special inlining tactic, or it bailed out. // Use a more generic tactic, like a simple call. if (call_does_dispatch) { diff --git a/src/share/vm/opto/library_call.cpp b/src/share/vm/opto/library_call.cpp index 903726001905374c500463977bdb0dc69061cae9..45b8f33379926900e21d117866d13b13b984fe8a 100644 --- a/src/share/vm/opto/library_call.cpp +++ b/src/share/vm/opto/library_call.cpp @@ -47,19 +47,22 @@ class LibraryIntrinsic : public InlineCallGenerator { private: bool _is_virtual; bool _is_predicted; + bool _does_virtual_dispatch; vmIntrinsics::ID _intrinsic_id; public: - LibraryIntrinsic(ciMethod* m, bool is_virtual, bool is_predicted, vmIntrinsics::ID id) + LibraryIntrinsic(ciMethod* m, bool is_virtual, bool is_predicted, bool does_virtual_dispatch, vmIntrinsics::ID id) : InlineCallGenerator(m), _is_virtual(is_virtual), _is_predicted(is_predicted), + _does_virtual_dispatch(does_virtual_dispatch), _intrinsic_id(id) { } virtual bool is_intrinsic() const { return true; } virtual bool is_virtual() const { return _is_virtual; } virtual bool is_predicted() const { return _is_predicted; } + virtual bool does_virtual_dispatch() const { return _does_virtual_dispatch; } virtual JVMState* generate(JVMState* jvms); virtual Node* generate_predicate(JVMState* jvms); vmIntrinsics::ID intrinsic_id() const { return _intrinsic_id; } @@ -355,6 +358,7 @@ CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) { } bool is_predicted = false; + bool does_virtual_dispatch = false; switch (id) { case vmIntrinsics::_compareTo: @@ -381,8 +385,10 @@ CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) { break; case vmIntrinsics::_hashCode: if (!InlineObjectHash) return NULL; + does_virtual_dispatch = true; break; case vmIntrinsics::_clone: + does_virtual_dispatch = true; case vmIntrinsics::_copyOf: case vmIntrinsics::_copyOfRange: if (!InlineObjectCopy) return NULL; @@ -541,7 +547,7 @@ CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) { if (!InlineUnsafeOps) return NULL; } - return new LibraryIntrinsic(m, is_virtual, is_predicted, (vmIntrinsics::ID) id); + return new LibraryIntrinsic(m, is_virtual, is_predicted, does_virtual_dispatch, (vmIntrinsics::ID) id); } //----------------------register_library_intrinsics-----------------------