提交 f739219e 编写于 作者: J jiangli

Merge

/*
* Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#include "precompiled.hpp"
#include "asm/macroAssembler.inline.hpp"
#include "code/compiledIC.hpp"
#include "code/icBuffer.hpp"
#include "code/nmethod.hpp"
#include "memory/resourceArea.hpp"
#include "runtime/mutexLocker.hpp"
#include "runtime/safepoint.hpp"
#ifdef COMPILER2
#include "opto/matcher.hpp"
#endif
// Release the CompiledICHolder* associated with this call site is there is one.
void CompiledIC::cleanup_call_site(virtual_call_Relocation* call_site) {
// This call site might have become stale so inspect it carefully.
NativeCall* call = nativeCall_at(call_site->addr());
if (is_icholder_entry(call->destination())) {
NativeMovConstReg* value = nativeMovConstReg_at(call_site->cached_value());
InlineCacheBuffer::queue_for_release((CompiledICHolder*)value->data());
}
}
bool CompiledIC::is_icholder_call_site(virtual_call_Relocation* call_site) {
// This call site might have become stale so inspect it carefully.
NativeCall* call = nativeCall_at(call_site->addr());
return is_icholder_entry(call->destination());
}
//-----------------------------------------------------------------------------
// High-level access to an inline cache. Guaranteed to be MT-safe.
CompiledIC::CompiledIC(nmethod* nm, NativeCall* call)
: _ic_call(call)
{
address ic_call = call->instruction_address();
assert(ic_call != NULL, "ic_call address must be set");
assert(nm != NULL, "must pass nmethod");
assert(nm->contains(ic_call), "must be in nmethod");
// Search for the ic_call at the given address.
RelocIterator iter(nm, ic_call, ic_call+1);
bool ret = iter.next();
assert(ret == true, "relocInfo must exist at this address");
assert(iter.addr() == ic_call, "must find ic_call");
if (iter.type() == relocInfo::virtual_call_type) {
virtual_call_Relocation* r = iter.virtual_call_reloc();
_is_optimized = false;
_value = nativeMovConstReg_at(r->cached_value());
} else {
assert(iter.type() == relocInfo::opt_virtual_call_type, "must be a virtual call");
_is_optimized = true;
_value = NULL;
}
}
// ----------------------------------------------------------------------------
#define __ _masm.
void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
#ifdef COMPILER2
// Stub is fixed up when the corresponding call is converted from calling
// compiled code to calling interpreted code.
// set (empty), G5
// jmp -1
address mark = cbuf.insts_mark(); // Get mark within main instrs section.
MacroAssembler _masm(&cbuf);
address base =
__ start_a_stub(to_interp_stub_size()*2);
if (base == NULL) return; // CodeBuffer::expand failed.
// Static stub relocation stores the instruction address of the call.
__ relocate(static_stub_Relocation::spec(mark));
__ set_metadata(NULL, as_Register(Matcher::inline_cache_reg_encode()));
__ set_inst_mark();
AddressLiteral addrlit(-1);
__ JUMP(addrlit, G3, 0);
__ delayed()->nop();
// Update current stubs pointer and restore code_end.
__ end_a_stub();
#else
ShouldNotReachHere();
#endif
}
#undef __
int CompiledStaticCall::to_interp_stub_size() {
// This doesn't need to be accurate but it must be larger or equal to
// the real size of the stub.
return (NativeMovConstReg::instruction_size + // sethi/setlo;
NativeJump::instruction_size + // sethi; jmp; nop
(TraceJumps ? 20 * BytesPerInstWord : 0) );
}
// Relocation entries for call stub, compiled java to interpreter.
int CompiledStaticCall::reloc_to_interp_stub() {
return 10; // 4 in emit_java_to_interp + 1 in Java_Static_Call
}
void CompiledStaticCall::set_to_interpreted(methodHandle callee, address entry) {
address stub = find_stub();
guarantee(stub != NULL, "stub not found");
if (TraceICs) {
ResourceMark rm;
tty->print_cr("CompiledStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s",
instruction_address(),
callee->name_and_sig_as_C_string());
}
// Creation also verifies the object.
NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
assert(method_holder->data() == 0 || method_holder->data() == (intptr_t)callee(),
"a) MT-unsafe modification of inline cache");
assert(jump->jump_destination() == (address)-1 || jump->jump_destination() == entry,
"b) MT-unsafe modification of inline cache");
// Update stub.
method_holder->set_data((intptr_t)callee());
jump->set_jump_destination(entry);
// Update jump to call.
set_destination_mt_safe(stub);
}
void CompiledStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
// Reset stub.
address stub = static_stub->addr();
assert(stub != NULL, "stub not found");
// Creation also verifies the object.
NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
method_holder->set_data(0);
jump->set_jump_destination((address)-1);
}
//-----------------------------------------------------------------------------
// Non-product mode code
#ifndef PRODUCT
void CompiledStaticCall::verify() {
// Verify call.
NativeCall::verify();
if (os::is_MP()) {
verify_alignment();
}
// Verify stub.
address stub = find_stub();
assert(stub != NULL, "no stub found for static call");
// Creation also verifies the object.
NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
// Verify state.
assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
}
#endif // !PRODUCT
...@@ -1655,53 +1655,6 @@ uint BoxLockNode::size(PhaseRegAlloc *ra_) const { ...@@ -1655,53 +1655,6 @@ uint BoxLockNode::size(PhaseRegAlloc *ra_) const {
return ra_->C->scratch_emit_size(this); return ra_->C->scratch_emit_size(this);
} }
//=============================================================================
// emit call stub, compiled java to interpretor
void emit_java_to_interp(CodeBuffer &cbuf ) {
// Stub is fixed up when the corresponding call is converted from calling
// compiled code to calling interpreted code.
// set (empty), G5
// jmp -1
address mark = cbuf.insts_mark(); // get mark within main instrs section
MacroAssembler _masm(&cbuf);
address base =
__ start_a_stub(Compile::MAX_stubs_size);
if (base == NULL) return; // CodeBuffer::expand failed
// static stub relocation stores the instruction address of the call
__ relocate(static_stub_Relocation::spec(mark));
__ set_metadata(NULL, reg_to_register_object(Matcher::inline_cache_reg_encode()));
__ set_inst_mark();
AddressLiteral addrlit(-1);
__ JUMP(addrlit, G3, 0);
__ delayed()->nop();
// Update current stubs pointer and restore code_end.
__ end_a_stub();
}
// size of call stub, compiled java to interpretor
uint size_java_to_interp() {
// This doesn't need to be accurate but it must be larger or equal to
// the real size of the stub.
return (NativeMovConstReg::instruction_size + // sethi/setlo;
NativeJump::instruction_size + // sethi; jmp; nop
(TraceJumps ? 20 * BytesPerInstWord : 0) );
}
// relocation entries for call stub, compiled java to interpretor
uint reloc_java_to_interp() {
return 10; // 4 in emit_java_to_interp + 1 in Java_Static_Call
}
//============================================================================= //=============================================================================
#ifndef PRODUCT #ifndef PRODUCT
void MachUEPNode::format( PhaseRegAlloc *ra_, outputStream *st ) const { void MachUEPNode::format( PhaseRegAlloc *ra_, outputStream *st ) const {
...@@ -2576,15 +2529,15 @@ encode %{ ...@@ -2576,15 +2529,15 @@ encode %{
enc_class Java_Static_Call (method meth) %{ // JAVA STATIC CALL enc_class Java_Static_Call (method meth) %{ // JAVA STATIC CALL
// CALL to fixup routine. Fixup routine uses ScopeDesc info to determine // CALL to fixup routine. Fixup routine uses ScopeDesc info to determine
// who we intended to call. // who we intended to call.
if ( !_method ) { if (!_method) {
emit_call_reloc(cbuf, $meth$$method, relocInfo::runtime_call_type); emit_call_reloc(cbuf, $meth$$method, relocInfo::runtime_call_type);
} else if (_optimized_virtual) { } else if (_optimized_virtual) {
emit_call_reloc(cbuf, $meth$$method, relocInfo::opt_virtual_call_type); emit_call_reloc(cbuf, $meth$$method, relocInfo::opt_virtual_call_type);
} else { } else {
emit_call_reloc(cbuf, $meth$$method, relocInfo::static_call_type); emit_call_reloc(cbuf, $meth$$method, relocInfo::static_call_type);
} }
if( _method ) { // Emit stub for static call if (_method) { // Emit stub for static call.
emit_java_to_interp(cbuf); CompiledStaticCall::emit_to_interp_stub(cbuf);
} }
%} %}
......
/*
* Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#include "precompiled.hpp"
#include "asm/macroAssembler.inline.hpp"
#include "code/compiledIC.hpp"
#include "code/icBuffer.hpp"
#include "code/nmethod.hpp"
#include "memory/resourceArea.hpp"
#include "runtime/mutexLocker.hpp"
#include "runtime/safepoint.hpp"
// Release the CompiledICHolder* associated with this call site is there is one.
void CompiledIC::cleanup_call_site(virtual_call_Relocation* call_site) {
// This call site might have become stale so inspect it carefully.
NativeCall* call = nativeCall_at(call_site->addr());
if (is_icholder_entry(call->destination())) {
NativeMovConstReg* value = nativeMovConstReg_at(call_site->cached_value());
InlineCacheBuffer::queue_for_release((CompiledICHolder*)value->data());
}
}
bool CompiledIC::is_icholder_call_site(virtual_call_Relocation* call_site) {
// This call site might have become stale so inspect it carefully.
NativeCall* call = nativeCall_at(call_site->addr());
return is_icholder_entry(call->destination());
}
//-----------------------------------------------------------------------------
// High-level access to an inline cache. Guaranteed to be MT-safe.
CompiledIC::CompiledIC(nmethod* nm, NativeCall* call)
: _ic_call(call)
{
address ic_call = call->instruction_address();
assert(ic_call != NULL, "ic_call address must be set");
assert(nm != NULL, "must pass nmethod");
assert(nm->contains(ic_call), "must be in nmethod");
// Search for the ic_call at the given address.
RelocIterator iter(nm, ic_call, ic_call+1);
bool ret = iter.next();
assert(ret == true, "relocInfo must exist at this address");
assert(iter.addr() == ic_call, "must find ic_call");
if (iter.type() == relocInfo::virtual_call_type) {
virtual_call_Relocation* r = iter.virtual_call_reloc();
_is_optimized = false;
_value = nativeMovConstReg_at(r->cached_value());
} else {
assert(iter.type() == relocInfo::opt_virtual_call_type, "must be a virtual call");
_is_optimized = true;
_value = NULL;
}
}
// ----------------------------------------------------------------------------
#define __ _masm.
void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
// Stub is fixed up when the corresponding call is converted from
// calling compiled code to calling interpreted code.
// movq rbx, 0
// jmp -5 # to self
address mark = cbuf.insts_mark(); // Get mark within main instrs section.
// Note that the code buffer's insts_mark is always relative to insts.
// That's why we must use the macroassembler to generate a stub.
MacroAssembler _masm(&cbuf);
address base =
__ start_a_stub(to_interp_stub_size()*2);
if (base == NULL) return; // CodeBuffer::expand failed.
// Static stub relocation stores the instruction address of the call.
__ relocate(static_stub_Relocation::spec(mark), Assembler::imm_operand);
// Static stub relocation also tags the Method* in the code-stream.
__ mov_metadata(rbx, (Metadata*) NULL); // Method is zapped till fixup time.
// This is recognized as unresolved by relocs/nativeinst/ic code.
__ jump(RuntimeAddress(__ pc()));
// Update current stubs pointer and restore insts_end.
__ end_a_stub();
}
#undef __
int CompiledStaticCall::to_interp_stub_size() {
return NOT_LP64(10) // movl; jmp
LP64_ONLY(15); // movq (1+1+8); jmp (1+4)
}
// Relocation entries for call stub, compiled java to interpreter.
int CompiledStaticCall::reloc_to_interp_stub() {
return 4; // 3 in emit_to_interp_stub + 1 in emit_call
}
void CompiledStaticCall::set_to_interpreted(methodHandle callee, address entry) {
address stub = find_stub();
guarantee(stub != NULL, "stub not found");
if (TraceICs) {
ResourceMark rm;
tty->print_cr("CompiledStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s",
instruction_address(),
callee->name_and_sig_as_C_string());
}
// Creation also verifies the object.
NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
assert(method_holder->data() == 0 || method_holder->data() == (intptr_t)callee(),
"a) MT-unsafe modification of inline cache");
assert(jump->jump_destination() == (address)-1 || jump->jump_destination() == entry,
"b) MT-unsafe modification of inline cache");
// Update stub.
method_holder->set_data((intptr_t)callee());
jump->set_jump_destination(entry);
// Update jump to call.
set_destination_mt_safe(stub);
}
void CompiledStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
// Reset stub.
address stub = static_stub->addr();
assert(stub != NULL, "stub not found");
// Creation also verifies the object.
NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
method_holder->set_data(0);
jump->set_jump_destination((address)-1);
}
//-----------------------------------------------------------------------------
// Non-product mode code
#ifndef PRODUCT
void CompiledStaticCall::verify() {
// Verify call.
NativeCall::verify();
if (os::is_MP()) {
verify_alignment();
}
// Verify stub.
address stub = find_stub();
assert(stub != NULL, "no stub found for static call");
// Creation also verifies the object.
NativeMovConstReg* method_holder = nativeMovConstReg_at(stub);
NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
// Verify state.
assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
}
#endif // !PRODUCT
...@@ -1256,43 +1256,6 @@ uint BoxLockNode::size(PhaseRegAlloc *ra_) const { ...@@ -1256,43 +1256,6 @@ uint BoxLockNode::size(PhaseRegAlloc *ra_) const {
} }
} }
//=============================================================================
// emit call stub, compiled java to interpreter
void emit_java_to_interp(CodeBuffer &cbuf ) {
// Stub is fixed up when the corresponding call is converted from calling
// compiled code to calling interpreted code.
// mov rbx,0
// jmp -1
address mark = cbuf.insts_mark(); // get mark within main instrs section
// Note that the code buffer's insts_mark is always relative to insts.
// That's why we must use the macroassembler to generate a stub.
MacroAssembler _masm(&cbuf);
address base =
__ start_a_stub(Compile::MAX_stubs_size);
if (base == NULL) return; // CodeBuffer::expand failed
// static stub relocation stores the instruction address of the call
__ relocate(static_stub_Relocation::spec(mark), RELOC_IMM32);
// static stub relocation also tags the Method* in the code-stream.
__ mov_metadata(rbx, (Metadata*)NULL); // method is zapped till fixup time
// This is recognized as unresolved by relocs/nativeInst/ic code
__ jump(RuntimeAddress(__ pc()));
__ end_a_stub();
// Update current stubs pointer and restore insts_end.
}
// size of call stub, compiled java to interpretor
uint size_java_to_interp() {
return 10; // movl; jmp
}
// relocation entries for call stub, compiled java to interpretor
uint reloc_java_to_interp() {
return 4; // 3 in emit_java_to_interp + 1 in Java_Static_Call
}
//============================================================================= //=============================================================================
#ifndef PRODUCT #ifndef PRODUCT
void MachUEPNode::format( PhaseRegAlloc *ra_, outputStream* st ) const { void MachUEPNode::format( PhaseRegAlloc *ra_, outputStream* st ) const {
...@@ -1909,8 +1872,8 @@ encode %{ ...@@ -1909,8 +1872,8 @@ encode %{
emit_d32_reloc(cbuf, ($meth$$method - (int)(cbuf.insts_end()) - 4), emit_d32_reloc(cbuf, ($meth$$method - (int)(cbuf.insts_end()) - 4),
static_call_Relocation::spec(), RELOC_IMM32 ); static_call_Relocation::spec(), RELOC_IMM32 );
} }
if (_method) { // Emit stub for static call if (_method) { // Emit stub for static call.
emit_java_to_interp(cbuf); CompiledStaticCall::emit_to_interp_stub(cbuf);
} }
%} %}
......
...@@ -1387,48 +1387,6 @@ uint BoxLockNode::size(PhaseRegAlloc *ra_) const ...@@ -1387,48 +1387,6 @@ uint BoxLockNode::size(PhaseRegAlloc *ra_) const
return (offset < 0x80) ? 5 : 8; // REX return (offset < 0x80) ? 5 : 8; // REX
} }
//=============================================================================
// emit call stub, compiled java to interpreter
void emit_java_to_interp(CodeBuffer& cbuf)
{
// Stub is fixed up when the corresponding call is converted from
// calling compiled code to calling interpreted code.
// movq rbx, 0
// jmp -5 # to self
address mark = cbuf.insts_mark(); // get mark within main instrs section
// Note that the code buffer's insts_mark is always relative to insts.
// That's why we must use the macroassembler to generate a stub.
MacroAssembler _masm(&cbuf);
address base =
__ start_a_stub(Compile::MAX_stubs_size);
if (base == NULL) return; // CodeBuffer::expand failed
// static stub relocation stores the instruction address of the call
__ relocate(static_stub_Relocation::spec(mark), RELOC_IMM64);
// static stub relocation also tags the Method* in the code-stream.
__ mov_metadata(rbx, (Metadata*) NULL); // method is zapped till fixup time
// This is recognized as unresolved by relocs/nativeinst/ic code
__ jump(RuntimeAddress(__ pc()));
// Update current stubs pointer and restore insts_end.
__ end_a_stub();
}
// size of call stub, compiled java to interpretor
uint size_java_to_interp()
{
return 15; // movq (1+1+8); jmp (1+4)
}
// relocation entries for call stub, compiled java to interpretor
uint reloc_java_to_interp()
{
return 4; // 3 in emit_java_to_interp + 1 in Java_Static_Call
}
//============================================================================= //=============================================================================
#ifndef PRODUCT #ifndef PRODUCT
void MachUEPNode::format(PhaseRegAlloc* ra_, outputStream* st) const void MachUEPNode::format(PhaseRegAlloc* ra_, outputStream* st) const
...@@ -2078,8 +2036,8 @@ encode %{ ...@@ -2078,8 +2036,8 @@ encode %{
RELOC_DISP32); RELOC_DISP32);
} }
if (_method) { if (_method) {
// Emit stub for static call // Emit stub for static call.
emit_java_to_interp(cbuf); CompiledStaticCall::emit_to_interp_stub(cbuf);
} }
%} %}
......
/*
* Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#include "precompiled.hpp"
#include "classfile/systemDictionary.hpp"
#include "code/codeCache.hpp"
#include "code/compiledIC.hpp"
#include "code/icBuffer.hpp"
#include "code/nmethod.hpp"
#include "code/vtableStubs.hpp"
#include "interpreter/interpreter.hpp"
#include "interpreter/linkResolver.hpp"
#include "memory/metadataFactory.hpp"
#include "memory/oopFactory.hpp"
#include "oops/method.hpp"
#include "oops/oop.inline.hpp"
#include "oops/symbol.hpp"
#include "runtime/icache.hpp"
#include "runtime/sharedRuntime.hpp"
#include "runtime/stubRoutines.hpp"
#include "utilities/events.hpp"
// Release the CompiledICHolder* associated with this call site is there is one.
void CompiledIC::cleanup_call_site(virtual_call_Relocation* call_site) {
// This call site might have become stale so inspect it carefully.
NativeCall* call = nativeCall_at(call_site->addr());
if (is_icholder_entry(call->destination())) {
NativeMovConstReg* value = nativeMovConstReg_at(call_site->cached_value());
InlineCacheBuffer::queue_for_release((CompiledICHolder*)value->data());
}
}
bool CompiledIC::is_icholder_call_site(virtual_call_Relocation* call_site) {
// This call site might have become stale so inspect it carefully.
NativeCall* call = nativeCall_at(call_site->addr());
return is_icholder_entry(call->destination());
}
//-----------------------------------------------------------------------------
// High-level access to an inline cache. Guaranteed to be MT-safe.
CompiledIC::CompiledIC(nmethod* nm, NativeCall* call)
: _ic_call(call)
{
address ic_call = call->instruction_address();
assert(ic_call != NULL, "ic_call address must be set");
assert(nm != NULL, "must pass nmethod");
assert(nm->contains(ic_call), "must be in nmethod");
// Search for the ic_call at the given address.
RelocIterator iter(nm, ic_call, ic_call+1);
bool ret = iter.next();
assert(ret == true, "relocInfo must exist at this address");
assert(iter.addr() == ic_call, "must find ic_call");
if (iter.type() == relocInfo::virtual_call_type) {
virtual_call_Relocation* r = iter.virtual_call_reloc();
_is_optimized = false;
_value = nativeMovConstReg_at(r->cached_value());
} else {
assert(iter.type() == relocInfo::opt_virtual_call_type, "must be a virtual call");
_is_optimized = true;
_value = NULL;
}
}
// ----------------------------------------------------------------------------
void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
ShouldNotReachHere(); // Only needed for COMPILER2.
}
int CompiledStaticCall::to_interp_stub_size() {
ShouldNotReachHere(); // Only needed for COMPILER2.
return 0;
}
// Relocation entries for call stub, compiled java to interpreter.
int CompiledStaticCall::reloc_to_interp_stub() {
ShouldNotReachHere(); // Only needed for COMPILER2.
return 0;
}
void CompiledStaticCall::set_to_interpreted(methodHandle callee, address entry) {
ShouldNotReachHere(); // Only needed for COMPILER2.
}
void CompiledStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
ShouldNotReachHere(); // Only needed for COMPILER2.
}
//-----------------------------------------------------------------------------
// Non-product mode code.
#ifndef PRODUCT
void CompiledStaticCall::verify() {
ShouldNotReachHere(); // Only needed for COMPILER2.
}
#endif // !PRODUCT
...@@ -213,6 +213,7 @@ int main(int argc, char *argv[]) ...@@ -213,6 +213,7 @@ int main(int argc, char *argv[])
AD.addInclude(AD._CPP_file, "adfiles", get_basename(AD._HPP_file._name)); AD.addInclude(AD._CPP_file, "adfiles", get_basename(AD._HPP_file._name));
AD.addInclude(AD._CPP_file, "memory/allocation.inline.hpp"); AD.addInclude(AD._CPP_file, "memory/allocation.inline.hpp");
AD.addInclude(AD._CPP_file, "asm/macroAssembler.inline.hpp"); AD.addInclude(AD._CPP_file, "asm/macroAssembler.inline.hpp");
AD.addInclude(AD._CPP_file, "code/compiledIC.hpp");
AD.addInclude(AD._CPP_file, "code/vmreg.hpp"); AD.addInclude(AD._CPP_file, "code/vmreg.hpp");
AD.addInclude(AD._CPP_file, "gc_interface/collectedHeap.inline.hpp"); AD.addInclude(AD._CPP_file, "gc_interface/collectedHeap.inline.hpp");
AD.addInclude(AD._CPP_file, "oops/compiledICHolder.hpp"); AD.addInclude(AD._CPP_file, "oops/compiledICHolder.hpp");
......
...@@ -45,25 +45,6 @@ ...@@ -45,25 +45,6 @@
// Every time a compiled IC is changed or its type is being accessed, // Every time a compiled IC is changed or its type is being accessed,
// either the CompiledIC_lock must be set or we must be at a safe point. // either the CompiledIC_lock must be set or we must be at a safe point.
// Release the CompiledICHolder* associated with this call site is there is one.
void CompiledIC::cleanup_call_site(virtual_call_Relocation* call_site) {
// This call site might have become stale so inspect it carefully.
NativeCall* call = nativeCall_at(call_site->addr());
if (is_icholder_entry(call->destination())) {
NativeMovConstReg* value = nativeMovConstReg_at(call_site->cached_value());
InlineCacheBuffer::queue_for_release((CompiledICHolder*)value->data());
}
}
bool CompiledIC::is_icholder_call_site(virtual_call_Relocation* call_site) {
// This call site might have become stale so inspect it carefully.
NativeCall* call = nativeCall_at(call_site->addr());
return is_icholder_entry(call->destination());
}
//----------------------------------------------------------------------------- //-----------------------------------------------------------------------------
// Low-level access to an inline cache. Private, since they might not be // Low-level access to an inline cache. Private, since they might not be
// MT-safe to use. // MT-safe to use.
...@@ -488,33 +469,6 @@ bool CompiledIC::is_icholder_entry(address entry) { ...@@ -488,33 +469,6 @@ bool CompiledIC::is_icholder_entry(address entry) {
return (cb != NULL && cb->is_adapter_blob()); return (cb != NULL && cb->is_adapter_blob());
} }
CompiledIC::CompiledIC(nmethod* nm, NativeCall* call)
: _ic_call(call)
{
address ic_call = call->instruction_address();
assert(ic_call != NULL, "ic_call address must be set");
assert(nm != NULL, "must pass nmethod");
assert(nm->contains(ic_call), "must be in nmethod");
// search for the ic_call at the given address
RelocIterator iter(nm, ic_call, ic_call+1);
bool ret = iter.next();
assert(ret == true, "relocInfo must exist at this address");
assert(iter.addr() == ic_call, "must find ic_call");
if (iter.type() == relocInfo::virtual_call_type) {
virtual_call_Relocation* r = iter.virtual_call_reloc();
_is_optimized = false;
_value = nativeMovConstReg_at(r->cached_value());
} else {
assert(iter.type() == relocInfo::opt_virtual_call_type, "must be a virtual call");
_is_optimized = true;
_value = NULL;
}
}
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
void CompiledStaticCall::set_to_clean() { void CompiledStaticCall::set_to_clean() {
...@@ -549,33 +503,6 @@ bool CompiledStaticCall::is_call_to_interpreted() const { ...@@ -549,33 +503,6 @@ bool CompiledStaticCall::is_call_to_interpreted() const {
return nm->stub_contains(destination()); return nm->stub_contains(destination());
} }
void CompiledStaticCall::set_to_interpreted(methodHandle callee, address entry) {
address stub=find_stub();
guarantee(stub != NULL, "stub not found");
if (TraceICs) {
ResourceMark rm;
tty->print_cr("CompiledStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s",
instruction_address(),
callee->name_and_sig_as_C_string());
}
NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); // creation also verifies the object
NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
assert(method_holder->data() == 0 || method_holder->data() == (intptr_t)callee(), "a) MT-unsafe modification of inline cache");
assert(jump->jump_destination() == (address)-1 || jump->jump_destination() == entry, "b) MT-unsafe modification of inline cache");
// Update stub
method_holder->set_data((intptr_t)callee());
jump->set_jump_destination(entry);
// Update jump to call
set_destination_mt_safe(stub);
}
void CompiledStaticCall::set(const StaticCallInfo& info) { void CompiledStaticCall::set(const StaticCallInfo& info) {
assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call"); assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
MutexLockerEx pl(Patching_lock, Mutex::_no_safepoint_check_flag); MutexLockerEx pl(Patching_lock, Mutex::_no_safepoint_check_flag);
...@@ -618,19 +545,6 @@ void CompiledStaticCall::compute_entry(methodHandle m, StaticCallInfo& info) { ...@@ -618,19 +545,6 @@ void CompiledStaticCall::compute_entry(methodHandle m, StaticCallInfo& info) {
} }
} }
void CompiledStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) {
assert (CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "mt unsafe call");
// Reset stub
address stub = static_stub->addr();
assert(stub!=NULL, "stub not found");
NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); // creation also verifies the object
NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
method_holder->set_data(0);
jump->set_jump_destination((address)-1);
}
address CompiledStaticCall::find_stub() { address CompiledStaticCall::find_stub() {
// Find reloc. information containing this call-site // Find reloc. information containing this call-site
RelocIterator iter((nmethod*)NULL, instruction_address()); RelocIterator iter((nmethod*)NULL, instruction_address());
...@@ -668,19 +582,16 @@ void CompiledIC::verify() { ...@@ -668,19 +582,16 @@ void CompiledIC::verify() {
|| is_optimized() || is_megamorphic(), "sanity check"); || is_optimized() || is_megamorphic(), "sanity check");
} }
void CompiledIC::print() { void CompiledIC::print() {
print_compiled_ic(); print_compiled_ic();
tty->cr(); tty->cr();
} }
void CompiledIC::print_compiled_ic() { void CompiledIC::print_compiled_ic() {
tty->print("Inline cache at " INTPTR_FORMAT ", calling %s " INTPTR_FORMAT " cached_value " INTPTR_FORMAT, tty->print("Inline cache at " INTPTR_FORMAT ", calling %s " INTPTR_FORMAT " cached_value " INTPTR_FORMAT,
instruction_address(), is_call_to_interpreted() ? "interpreted " : "", ic_destination(), is_optimized() ? NULL : cached_value()); instruction_address(), is_call_to_interpreted() ? "interpreted " : "", ic_destination(), is_optimized() ? NULL : cached_value());
} }
void CompiledStaticCall::print() { void CompiledStaticCall::print() {
tty->print("static call at " INTPTR_FORMAT " -> ", instruction_address()); tty->print("static call at " INTPTR_FORMAT " -> ", instruction_address());
if (is_clean()) { if (is_clean()) {
...@@ -693,21 +604,4 @@ void CompiledStaticCall::print() { ...@@ -693,21 +604,4 @@ void CompiledStaticCall::print() {
tty->cr(); tty->cr();
} }
void CompiledStaticCall::verify() { #endif // !PRODUCT
// Verify call
NativeCall::verify();
if (os::is_MP()) {
verify_alignment();
}
// Verify stub
address stub = find_stub();
assert(stub != NULL, "no stub found for static call");
NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); // creation also verifies the object
NativeJump* jump = nativeJump_at(method_holder->next_instruction_address());
// Verify state
assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check");
}
#endif
...@@ -304,6 +304,11 @@ class CompiledStaticCall: public NativeCall { ...@@ -304,6 +304,11 @@ class CompiledStaticCall: public NativeCall {
friend CompiledStaticCall* compiledStaticCall_at(address native_call); friend CompiledStaticCall* compiledStaticCall_at(address native_call);
friend CompiledStaticCall* compiledStaticCall_at(Relocation* call_site); friend CompiledStaticCall* compiledStaticCall_at(Relocation* call_site);
// Code
static void emit_to_interp_stub(CodeBuffer &cbuf);
static int to_interp_stub_size();
static int reloc_to_interp_stub();
// State // State
bool is_clean() const; bool is_clean() const;
bool is_call_to_compiled() const; bool is_call_to_compiled() const;
......
...@@ -24,6 +24,7 @@ ...@@ -24,6 +24,7 @@
#include "precompiled.hpp" #include "precompiled.hpp"
#include "asm/assembler.inline.hpp" #include "asm/assembler.inline.hpp"
#include "code/compiledIC.hpp"
#include "code/debugInfo.hpp" #include "code/debugInfo.hpp"
#include "code/debugInfoRec.hpp" #include "code/debugInfoRec.hpp"
#include "compiler/compileBroker.hpp" #include "compiler/compileBroker.hpp"
...@@ -41,8 +42,6 @@ ...@@ -41,8 +42,6 @@
#include "runtime/handles.inline.hpp" #include "runtime/handles.inline.hpp"
#include "utilities/xmlstream.hpp" #include "utilities/xmlstream.hpp"
extern uint size_java_to_interp();
extern uint reloc_java_to_interp();
extern uint size_exception_handler(); extern uint size_exception_handler();
extern uint size_deopt_handler(); extern uint size_deopt_handler();
...@@ -389,15 +388,15 @@ void Compile::shorten_branches(uint* blk_starts, int& code_size, int& reloc_size ...@@ -389,15 +388,15 @@ void Compile::shorten_branches(uint* blk_starts, int& code_size, int& reloc_size
MachNode *mach = nj->as_Mach(); MachNode *mach = nj->as_Mach();
blk_size += (mach->alignment_required() - 1) * relocInfo::addr_unit(); // assume worst case padding blk_size += (mach->alignment_required() - 1) * relocInfo::addr_unit(); // assume worst case padding
reloc_size += mach->reloc(); reloc_size += mach->reloc();
if( mach->is_MachCall() ) { if (mach->is_MachCall()) {
MachCallNode *mcall = mach->as_MachCall(); MachCallNode *mcall = mach->as_MachCall();
// This destination address is NOT PC-relative // This destination address is NOT PC-relative
mcall->method_set((intptr_t)mcall->entry_point()); mcall->method_set((intptr_t)mcall->entry_point());
if( mcall->is_MachCallJava() && mcall->as_MachCallJava()->_method ) { if (mcall->is_MachCallJava() && mcall->as_MachCallJava()->_method) {
stub_size += size_java_to_interp(); stub_size += CompiledStaticCall::to_interp_stub_size();
reloc_size += reloc_java_to_interp(); reloc_size += CompiledStaticCall::reloc_to_interp_stub();
} }
} else if (mach->is_MachSafePoint()) { } else if (mach->is_MachSafePoint()) {
// If call/safepoint are adjacent, account for possible // If call/safepoint are adjacent, account for possible
......
...@@ -2224,6 +2224,55 @@ jint Arguments::parse_vm_init_args(const JavaVMInitArgs* args) { ...@@ -2224,6 +2224,55 @@ jint Arguments::parse_vm_init_args(const JavaVMInitArgs* args) {
return JNI_OK; return JNI_OK;
} }
// Checks if name in command-line argument -agent{lib,path}:name[=options]
// represents a valid HPROF of JDWP agent. is_path==true denotes that we
// are dealing with -agentpath (case where name is a path), otherwise with
// -agentlib
bool valid_hprof_or_jdwp_agent(char *name, bool is_path) {
char *_name;
const char *_hprof = "hprof", *_jdwp = "jdwp";
size_t _len_hprof, _len_jdwp, _len_prefix;
if (is_path) {
if ((_name = strrchr(name, (int) *os::file_separator())) == NULL) {
return false;
}
_name++; // skip past last path separator
_len_prefix = strlen(JNI_LIB_PREFIX);
if (strncmp(_name, JNI_LIB_PREFIX, _len_prefix) != 0) {
return false;
}
_name += _len_prefix;
_len_hprof = strlen(_hprof);
_len_jdwp = strlen(_jdwp);
if (strncmp(_name, _hprof, _len_hprof) == 0) {
_name += _len_hprof;
}
else if (strncmp(_name, _jdwp, _len_jdwp) == 0) {
_name += _len_jdwp;
}
else {
return false;
}
if (strcmp(_name, JNI_LIB_SUFFIX) != 0) {
return false;
}
return true;
}
if (strcmp(name, _hprof) == 0 || strcmp(name, _jdwp) == 0) {
return true;
}
return false;
}
jint Arguments::parse_each_vm_init_arg(const JavaVMInitArgs* args, jint Arguments::parse_each_vm_init_arg(const JavaVMInitArgs* args,
SysClassPath* scp_p, SysClassPath* scp_p,
bool* scp_assembly_required_p, bool* scp_assembly_required_p,
...@@ -2322,7 +2371,7 @@ jint Arguments::parse_each_vm_init_arg(const JavaVMInitArgs* args, ...@@ -2322,7 +2371,7 @@ jint Arguments::parse_each_vm_init_arg(const JavaVMInitArgs* args,
options = strcpy(NEW_C_HEAP_ARRAY(char, strlen(pos + 1) + 1, mtInternal), pos + 1); options = strcpy(NEW_C_HEAP_ARRAY(char, strlen(pos + 1) + 1, mtInternal), pos + 1);
} }
#if !INCLUDE_JVMTI #if !INCLUDE_JVMTI
if ((strcmp(name, "hprof") == 0) || (strcmp(name, "jdwp") == 0)) { if (valid_hprof_or_jdwp_agent(name, is_absolute_path)) {
jio_fprintf(defaultStream::error_stream(), jio_fprintf(defaultStream::error_stream(),
"Profiling and debugging agents are not supported in this VM\n"); "Profiling and debugging agents are not supported in this VM\n");
return JNI_ERR; return JNI_ERR;
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册