Skip to content
体验新版
项目
组织
正在加载...
登录
切换导航
打开侧边栏
openanolis
dragonwell8_hotspot
提交
f739219e
D
dragonwell8_hotspot
项目概览
openanolis
/
dragonwell8_hotspot
通知
2
Star
2
Fork
0
代码
文件
提交
分支
Tags
贡献者
分支图
Diff
Issue
0
列表
看板
标记
里程碑
合并请求
0
Wiki
0
Wiki
分析
仓库
DevOps
项目成员
Pages
D
dragonwell8_hotspot
项目概览
项目概览
详情
发布
仓库
仓库
文件
提交
分支
标签
贡献者
分支图
比较
Issue
0
Issue
0
列表
看板
标记
里程碑
合并请求
0
合并请求
0
Pages
分析
分析
仓库分析
DevOps
Wiki
0
Wiki
成员
成员
收起侧边栏
关闭侧边栏
动态
分支图
创建新Issue
提交
Issue看板
提交
f739219e
编写于
5月 02, 2013
作者:
J
jiangli
浏览文件
操作
浏览文件
下载
差异文件
Merge
上级
05ea7b01
b7c54ca2
变更
11
隐藏空白更改
内联
并排
Showing
11 changed file
with
564 addition
and
247 deletion
+564
-247
src/cpu/sparc/vm/compiledIC_sparc.cpp
src/cpu/sparc/vm/compiledIC_sparc.cpp
+193
-0
src/cpu/sparc/vm/sparc.ad
src/cpu/sparc/vm/sparc.ad
+3
-50
src/cpu/x86/vm/compiledIC_x86.cpp
src/cpu/x86/vm/compiledIC_x86.cpp
+180
-0
src/cpu/x86/vm/x86_32.ad
src/cpu/x86/vm/x86_32.ad
+2
-39
src/cpu/x86/vm/x86_64.ad
src/cpu/x86/vm/x86_64.ad
+2
-44
src/cpu/zero/vm/compiledIC_zero.cpp
src/cpu/zero/vm/compiledIC_zero.cpp
+122
-0
src/share/vm/adlc/main.cpp
src/share/vm/adlc/main.cpp
+1
-0
src/share/vm/code/compiledIC.cpp
src/share/vm/code/compiledIC.cpp
+1
-107
src/share/vm/code/compiledIC.hpp
src/share/vm/code/compiledIC.hpp
+5
-0
src/share/vm/opto/output.cpp
src/share/vm/opto/output.cpp
+5
-6
src/share/vm/runtime/arguments.cpp
src/share/vm/runtime/arguments.cpp
+50
-1
未找到文件。
src/cpu/sparc/vm/compiledIC_sparc.cpp
0 → 100644
浏览文件 @
f739219e
/*
* Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#include "precompiled.hpp"
#include "asm/macroAssembler.inline.hpp"
#include "code/compiledIC.hpp"
#include "code/icBuffer.hpp"
#include "code/nmethod.hpp"
#include "memory/resourceArea.hpp"
#include "runtime/mutexLocker.hpp"
#include "runtime/safepoint.hpp"
#ifdef COMPILER2
#include "opto/matcher.hpp"
#endif
// Release the CompiledICHolder* associated with this call site is there is one.
void
CompiledIC
::
cleanup_call_site
(
virtual_call_Relocation
*
call_site
)
{
// This call site might have become stale so inspect it carefully.
NativeCall
*
call
=
nativeCall_at
(
call_site
->
addr
());
if
(
is_icholder_entry
(
call
->
destination
()))
{
NativeMovConstReg
*
value
=
nativeMovConstReg_at
(
call_site
->
cached_value
());
InlineCacheBuffer
::
queue_for_release
((
CompiledICHolder
*
)
value
->
data
());
}
}
bool
CompiledIC
::
is_icholder_call_site
(
virtual_call_Relocation
*
call_site
)
{
// This call site might have become stale so inspect it carefully.
NativeCall
*
call
=
nativeCall_at
(
call_site
->
addr
());
return
is_icholder_entry
(
call
->
destination
());
}
//-----------------------------------------------------------------------------
// High-level access to an inline cache. Guaranteed to be MT-safe.
CompiledIC
::
CompiledIC
(
nmethod
*
nm
,
NativeCall
*
call
)
:
_ic_call
(
call
)
{
address
ic_call
=
call
->
instruction_address
();
assert
(
ic_call
!=
NULL
,
"ic_call address must be set"
);
assert
(
nm
!=
NULL
,
"must pass nmethod"
);
assert
(
nm
->
contains
(
ic_call
),
"must be in nmethod"
);
// Search for the ic_call at the given address.
RelocIterator
iter
(
nm
,
ic_call
,
ic_call
+
1
);
bool
ret
=
iter
.
next
();
assert
(
ret
==
true
,
"relocInfo must exist at this address"
);
assert
(
iter
.
addr
()
==
ic_call
,
"must find ic_call"
);
if
(
iter
.
type
()
==
relocInfo
::
virtual_call_type
)
{
virtual_call_Relocation
*
r
=
iter
.
virtual_call_reloc
();
_is_optimized
=
false
;
_value
=
nativeMovConstReg_at
(
r
->
cached_value
());
}
else
{
assert
(
iter
.
type
()
==
relocInfo
::
opt_virtual_call_type
,
"must be a virtual call"
);
_is_optimized
=
true
;
_value
=
NULL
;
}
}
// ----------------------------------------------------------------------------
#define __ _masm.
void
CompiledStaticCall
::
emit_to_interp_stub
(
CodeBuffer
&
cbuf
)
{
#ifdef COMPILER2
// Stub is fixed up when the corresponding call is converted from calling
// compiled code to calling interpreted code.
// set (empty), G5
// jmp -1
address
mark
=
cbuf
.
insts_mark
();
// Get mark within main instrs section.
MacroAssembler
_masm
(
&
cbuf
);
address
base
=
__
start_a_stub
(
to_interp_stub_size
()
*
2
);
if
(
base
==
NULL
)
return
;
// CodeBuffer::expand failed.
// Static stub relocation stores the instruction address of the call.
__
relocate
(
static_stub_Relocation
::
spec
(
mark
));
__
set_metadata
(
NULL
,
as_Register
(
Matcher
::
inline_cache_reg_encode
()));
__
set_inst_mark
();
AddressLiteral
addrlit
(
-
1
);
__
JUMP
(
addrlit
,
G3
,
0
);
__
delayed
()
->
nop
();
// Update current stubs pointer and restore code_end.
__
end_a_stub
();
#else
ShouldNotReachHere
();
#endif
}
#undef __
int
CompiledStaticCall
::
to_interp_stub_size
()
{
// This doesn't need to be accurate but it must be larger or equal to
// the real size of the stub.
return
(
NativeMovConstReg
::
instruction_size
+
// sethi/setlo;
NativeJump
::
instruction_size
+
// sethi; jmp; nop
(
TraceJumps
?
20
*
BytesPerInstWord
:
0
)
);
}
// Relocation entries for call stub, compiled java to interpreter.
int
CompiledStaticCall
::
reloc_to_interp_stub
()
{
return
10
;
// 4 in emit_java_to_interp + 1 in Java_Static_Call
}
void
CompiledStaticCall
::
set_to_interpreted
(
methodHandle
callee
,
address
entry
)
{
address
stub
=
find_stub
();
guarantee
(
stub
!=
NULL
,
"stub not found"
);
if
(
TraceICs
)
{
ResourceMark
rm
;
tty
->
print_cr
(
"CompiledStaticCall@"
INTPTR_FORMAT
": set_to_interpreted %s"
,
instruction_address
(),
callee
->
name_and_sig_as_C_string
());
}
// Creation also verifies the object.
NativeMovConstReg
*
method_holder
=
nativeMovConstReg_at
(
stub
);
NativeJump
*
jump
=
nativeJump_at
(
method_holder
->
next_instruction_address
());
assert
(
method_holder
->
data
()
==
0
||
method_holder
->
data
()
==
(
intptr_t
)
callee
(),
"a) MT-unsafe modification of inline cache"
);
assert
(
jump
->
jump_destination
()
==
(
address
)
-
1
||
jump
->
jump_destination
()
==
entry
,
"b) MT-unsafe modification of inline cache"
);
// Update stub.
method_holder
->
set_data
((
intptr_t
)
callee
());
jump
->
set_jump_destination
(
entry
);
// Update jump to call.
set_destination_mt_safe
(
stub
);
}
void
CompiledStaticCall
::
set_stub_to_clean
(
static_stub_Relocation
*
static_stub
)
{
assert
(
CompiledIC_lock
->
is_locked
()
||
SafepointSynchronize
::
is_at_safepoint
(),
"mt unsafe call"
);
// Reset stub.
address
stub
=
static_stub
->
addr
();
assert
(
stub
!=
NULL
,
"stub not found"
);
// Creation also verifies the object.
NativeMovConstReg
*
method_holder
=
nativeMovConstReg_at
(
stub
);
NativeJump
*
jump
=
nativeJump_at
(
method_holder
->
next_instruction_address
());
method_holder
->
set_data
(
0
);
jump
->
set_jump_destination
((
address
)
-
1
);
}
//-----------------------------------------------------------------------------
// Non-product mode code
#ifndef PRODUCT
void
CompiledStaticCall
::
verify
()
{
// Verify call.
NativeCall
::
verify
();
if
(
os
::
is_MP
())
{
verify_alignment
();
}
// Verify stub.
address
stub
=
find_stub
();
assert
(
stub
!=
NULL
,
"no stub found for static call"
);
// Creation also verifies the object.
NativeMovConstReg
*
method_holder
=
nativeMovConstReg_at
(
stub
);
NativeJump
*
jump
=
nativeJump_at
(
method_holder
->
next_instruction_address
());
// Verify state.
assert
(
is_clean
()
||
is_call_to_compiled
()
||
is_call_to_interpreted
(),
"sanity check"
);
}
#endif // !PRODUCT
src/cpu/sparc/vm/sparc.ad
浏览文件 @
f739219e
...
...
@@ -1655,53 +1655,6 @@ uint BoxLockNode::size(PhaseRegAlloc *ra_) const {
return ra_->C->scratch_emit_size(this);
}
//=============================================================================
// emit call stub, compiled java to interpretor
void emit_java_to_interp(CodeBuffer &cbuf ) {
// Stub is fixed up when the corresponding call is converted from calling
// compiled code to calling interpreted code.
// set (empty), G5
// jmp -1
address mark = cbuf.insts_mark(); // get mark within main instrs section
MacroAssembler _masm(&cbuf);
address base =
__ start_a_stub(Compile::MAX_stubs_size);
if (base == NULL) return; // CodeBuffer::expand failed
// static stub relocation stores the instruction address of the call
__ relocate(static_stub_Relocation::spec(mark));
__ set_metadata(NULL, reg_to_register_object(Matcher::inline_cache_reg_encode()));
__ set_inst_mark();
AddressLiteral addrlit(-1);
__ JUMP(addrlit, G3, 0);
__ delayed()->nop();
// Update current stubs pointer and restore code_end.
__ end_a_stub();
}
// size of call stub, compiled java to interpretor
uint size_java_to_interp() {
// This doesn't need to be accurate but it must be larger or equal to
// the real size of the stub.
return (NativeMovConstReg::instruction_size + // sethi/setlo;
NativeJump::instruction_size + // sethi; jmp; nop
(TraceJumps ? 20 * BytesPerInstWord : 0) );
}
// relocation entries for call stub, compiled java to interpretor
uint reloc_java_to_interp() {
return 10; // 4 in emit_java_to_interp + 1 in Java_Static_Call
}
//=============================================================================
#ifndef PRODUCT
void MachUEPNode::format( PhaseRegAlloc *ra_, outputStream *st ) const {
...
...
@@ -2576,15 +2529,15 @@ encode %{
enc_class Java_Static_Call (method meth) %{ // JAVA STATIC CALL
// CALL to fixup routine. Fixup routine uses ScopeDesc info to determine
// who we intended to call.
if (
!_method
) {
if (
!_method
) {
emit_call_reloc(cbuf, $meth$$method, relocInfo::runtime_call_type);
} else if (_optimized_virtual) {
emit_call_reloc(cbuf, $meth$$method, relocInfo::opt_virtual_call_type);
} else {
emit_call_reloc(cbuf, $meth$$method, relocInfo::static_call_type);
}
if
( _method ) { // Emit stub for static call
emit_java_to_interp
(cbuf);
if
(_method) { // Emit stub for static call.
CompiledStaticCall::emit_to_interp_stub
(cbuf);
}
%}
...
...
src/cpu/x86/vm/compiledIC_x86.cpp
0 → 100644
浏览文件 @
f739219e
/*
* Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#include "precompiled.hpp"
#include "asm/macroAssembler.inline.hpp"
#include "code/compiledIC.hpp"
#include "code/icBuffer.hpp"
#include "code/nmethod.hpp"
#include "memory/resourceArea.hpp"
#include "runtime/mutexLocker.hpp"
#include "runtime/safepoint.hpp"
// Release the CompiledICHolder* associated with this call site is there is one.
void
CompiledIC
::
cleanup_call_site
(
virtual_call_Relocation
*
call_site
)
{
// This call site might have become stale so inspect it carefully.
NativeCall
*
call
=
nativeCall_at
(
call_site
->
addr
());
if
(
is_icholder_entry
(
call
->
destination
()))
{
NativeMovConstReg
*
value
=
nativeMovConstReg_at
(
call_site
->
cached_value
());
InlineCacheBuffer
::
queue_for_release
((
CompiledICHolder
*
)
value
->
data
());
}
}
bool
CompiledIC
::
is_icholder_call_site
(
virtual_call_Relocation
*
call_site
)
{
// This call site might have become stale so inspect it carefully.
NativeCall
*
call
=
nativeCall_at
(
call_site
->
addr
());
return
is_icholder_entry
(
call
->
destination
());
}
//-----------------------------------------------------------------------------
// High-level access to an inline cache. Guaranteed to be MT-safe.
CompiledIC
::
CompiledIC
(
nmethod
*
nm
,
NativeCall
*
call
)
:
_ic_call
(
call
)
{
address
ic_call
=
call
->
instruction_address
();
assert
(
ic_call
!=
NULL
,
"ic_call address must be set"
);
assert
(
nm
!=
NULL
,
"must pass nmethod"
);
assert
(
nm
->
contains
(
ic_call
),
"must be in nmethod"
);
// Search for the ic_call at the given address.
RelocIterator
iter
(
nm
,
ic_call
,
ic_call
+
1
);
bool
ret
=
iter
.
next
();
assert
(
ret
==
true
,
"relocInfo must exist at this address"
);
assert
(
iter
.
addr
()
==
ic_call
,
"must find ic_call"
);
if
(
iter
.
type
()
==
relocInfo
::
virtual_call_type
)
{
virtual_call_Relocation
*
r
=
iter
.
virtual_call_reloc
();
_is_optimized
=
false
;
_value
=
nativeMovConstReg_at
(
r
->
cached_value
());
}
else
{
assert
(
iter
.
type
()
==
relocInfo
::
opt_virtual_call_type
,
"must be a virtual call"
);
_is_optimized
=
true
;
_value
=
NULL
;
}
}
// ----------------------------------------------------------------------------
#define __ _masm.
void
CompiledStaticCall
::
emit_to_interp_stub
(
CodeBuffer
&
cbuf
)
{
// Stub is fixed up when the corresponding call is converted from
// calling compiled code to calling interpreted code.
// movq rbx, 0
// jmp -5 # to self
address
mark
=
cbuf
.
insts_mark
();
// Get mark within main instrs section.
// Note that the code buffer's insts_mark is always relative to insts.
// That's why we must use the macroassembler to generate a stub.
MacroAssembler
_masm
(
&
cbuf
);
address
base
=
__
start_a_stub
(
to_interp_stub_size
()
*
2
);
if
(
base
==
NULL
)
return
;
// CodeBuffer::expand failed.
// Static stub relocation stores the instruction address of the call.
__
relocate
(
static_stub_Relocation
::
spec
(
mark
),
Assembler
::
imm_operand
);
// Static stub relocation also tags the Method* in the code-stream.
__
mov_metadata
(
rbx
,
(
Metadata
*
)
NULL
);
// Method is zapped till fixup time.
// This is recognized as unresolved by relocs/nativeinst/ic code.
__
jump
(
RuntimeAddress
(
__
pc
()));
// Update current stubs pointer and restore insts_end.
__
end_a_stub
();
}
#undef __
int
CompiledStaticCall
::
to_interp_stub_size
()
{
return
NOT_LP64
(
10
)
// movl; jmp
LP64_ONLY
(
15
);
// movq (1+1+8); jmp (1+4)
}
// Relocation entries for call stub, compiled java to interpreter.
int
CompiledStaticCall
::
reloc_to_interp_stub
()
{
return
4
;
// 3 in emit_to_interp_stub + 1 in emit_call
}
void
CompiledStaticCall
::
set_to_interpreted
(
methodHandle
callee
,
address
entry
)
{
address
stub
=
find_stub
();
guarantee
(
stub
!=
NULL
,
"stub not found"
);
if
(
TraceICs
)
{
ResourceMark
rm
;
tty
->
print_cr
(
"CompiledStaticCall@"
INTPTR_FORMAT
": set_to_interpreted %s"
,
instruction_address
(),
callee
->
name_and_sig_as_C_string
());
}
// Creation also verifies the object.
NativeMovConstReg
*
method_holder
=
nativeMovConstReg_at
(
stub
);
NativeJump
*
jump
=
nativeJump_at
(
method_holder
->
next_instruction_address
());
assert
(
method_holder
->
data
()
==
0
||
method_holder
->
data
()
==
(
intptr_t
)
callee
(),
"a) MT-unsafe modification of inline cache"
);
assert
(
jump
->
jump_destination
()
==
(
address
)
-
1
||
jump
->
jump_destination
()
==
entry
,
"b) MT-unsafe modification of inline cache"
);
// Update stub.
method_holder
->
set_data
((
intptr_t
)
callee
());
jump
->
set_jump_destination
(
entry
);
// Update jump to call.
set_destination_mt_safe
(
stub
);
}
void
CompiledStaticCall
::
set_stub_to_clean
(
static_stub_Relocation
*
static_stub
)
{
assert
(
CompiledIC_lock
->
is_locked
()
||
SafepointSynchronize
::
is_at_safepoint
(),
"mt unsafe call"
);
// Reset stub.
address
stub
=
static_stub
->
addr
();
assert
(
stub
!=
NULL
,
"stub not found"
);
// Creation also verifies the object.
NativeMovConstReg
*
method_holder
=
nativeMovConstReg_at
(
stub
);
NativeJump
*
jump
=
nativeJump_at
(
method_holder
->
next_instruction_address
());
method_holder
->
set_data
(
0
);
jump
->
set_jump_destination
((
address
)
-
1
);
}
//-----------------------------------------------------------------------------
// Non-product mode code
#ifndef PRODUCT
void
CompiledStaticCall
::
verify
()
{
// Verify call.
NativeCall
::
verify
();
if
(
os
::
is_MP
())
{
verify_alignment
();
}
// Verify stub.
address
stub
=
find_stub
();
assert
(
stub
!=
NULL
,
"no stub found for static call"
);
// Creation also verifies the object.
NativeMovConstReg
*
method_holder
=
nativeMovConstReg_at
(
stub
);
NativeJump
*
jump
=
nativeJump_at
(
method_holder
->
next_instruction_address
());
// Verify state.
assert
(
is_clean
()
||
is_call_to_compiled
()
||
is_call_to_interpreted
(),
"sanity check"
);
}
#endif // !PRODUCT
src/cpu/x86/vm/x86_32.ad
浏览文件 @
f739219e
...
...
@@ -1256,43 +1256,6 @@ uint BoxLockNode::size(PhaseRegAlloc *ra_) const {
}
}
//=============================================================================
// emit call stub, compiled java to interpreter
void emit_java_to_interp(CodeBuffer &cbuf ) {
// Stub is fixed up when the corresponding call is converted from calling
// compiled code to calling interpreted code.
// mov rbx,0
// jmp -1
address mark = cbuf.insts_mark(); // get mark within main instrs section
// Note that the code buffer's insts_mark is always relative to insts.
// That's why we must use the macroassembler to generate a stub.
MacroAssembler _masm(&cbuf);
address base =
__ start_a_stub(Compile::MAX_stubs_size);
if (base == NULL) return; // CodeBuffer::expand failed
// static stub relocation stores the instruction address of the call
__ relocate(static_stub_Relocation::spec(mark), RELOC_IMM32);
// static stub relocation also tags the Method* in the code-stream.
__ mov_metadata(rbx, (Metadata*)NULL); // method is zapped till fixup time
// This is recognized as unresolved by relocs/nativeInst/ic code
__ jump(RuntimeAddress(__ pc()));
__ end_a_stub();
// Update current stubs pointer and restore insts_end.
}
// size of call stub, compiled java to interpretor
uint size_java_to_interp() {
return 10; // movl; jmp
}
// relocation entries for call stub, compiled java to interpretor
uint reloc_java_to_interp() {
return 4; // 3 in emit_java_to_interp + 1 in Java_Static_Call
}
//=============================================================================
#ifndef PRODUCT
void MachUEPNode::format( PhaseRegAlloc *ra_, outputStream* st ) const {
...
...
@@ -1909,8 +1872,8 @@ encode %{
emit_d32_reloc(cbuf, ($meth$$method - (int)(cbuf.insts_end()) - 4),
static_call_Relocation::spec(), RELOC_IMM32 );
}
if (_method) { // Emit stub for static call
emit_java_to_interp
(cbuf);
if (_method) { // Emit stub for static call
.
CompiledStaticCall::emit_to_interp_stub
(cbuf);
}
%}
...
...
src/cpu/x86/vm/x86_64.ad
浏览文件 @
f739219e
...
...
@@ -1387,48 +1387,6 @@ uint BoxLockNode::size(PhaseRegAlloc *ra_) const
return (offset < 0x80) ? 5 : 8; // REX
}
//=============================================================================
// emit call stub, compiled java to interpreter
void emit_java_to_interp(CodeBuffer& cbuf)
{
// Stub is fixed up when the corresponding call is converted from
// calling compiled code to calling interpreted code.
// movq rbx, 0
// jmp -5 # to self
address mark = cbuf.insts_mark(); // get mark within main instrs section
// Note that the code buffer's insts_mark is always relative to insts.
// That's why we must use the macroassembler to generate a stub.
MacroAssembler _masm(&cbuf);
address base =
__ start_a_stub(Compile::MAX_stubs_size);
if (base == NULL) return; // CodeBuffer::expand failed
// static stub relocation stores the instruction address of the call
__ relocate(static_stub_Relocation::spec(mark), RELOC_IMM64);
// static stub relocation also tags the Method* in the code-stream.
__ mov_metadata(rbx, (Metadata*) NULL); // method is zapped till fixup time
// This is recognized as unresolved by relocs/nativeinst/ic code
__ jump(RuntimeAddress(__ pc()));
// Update current stubs pointer and restore insts_end.
__ end_a_stub();
}
// size of call stub, compiled java to interpretor
uint size_java_to_interp()
{
return 15; // movq (1+1+8); jmp (1+4)
}
// relocation entries for call stub, compiled java to interpretor
uint reloc_java_to_interp()
{
return 4; // 3 in emit_java_to_interp + 1 in Java_Static_Call
}
//=============================================================================
#ifndef PRODUCT
void MachUEPNode::format(PhaseRegAlloc* ra_, outputStream* st) const
...
...
@@ -2078,8 +2036,8 @@ encode %{
RELOC_DISP32);
}
if (_method) {
// Emit stub for static call
emit_java_to_interp
(cbuf);
// Emit stub for static call
.
CompiledStaticCall::emit_to_interp_stub
(cbuf);
}
%}
...
...
src/cpu/zero/vm/compiledIC_zero.cpp
0 → 100644
浏览文件 @
f739219e
/*
* Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#include "precompiled.hpp"
#include "classfile/systemDictionary.hpp"
#include "code/codeCache.hpp"
#include "code/compiledIC.hpp"
#include "code/icBuffer.hpp"
#include "code/nmethod.hpp"
#include "code/vtableStubs.hpp"
#include "interpreter/interpreter.hpp"
#include "interpreter/linkResolver.hpp"
#include "memory/metadataFactory.hpp"
#include "memory/oopFactory.hpp"
#include "oops/method.hpp"
#include "oops/oop.inline.hpp"
#include "oops/symbol.hpp"
#include "runtime/icache.hpp"
#include "runtime/sharedRuntime.hpp"
#include "runtime/stubRoutines.hpp"
#include "utilities/events.hpp"
// Release the CompiledICHolder* associated with this call site is there is one.
void
CompiledIC
::
cleanup_call_site
(
virtual_call_Relocation
*
call_site
)
{
// This call site might have become stale so inspect it carefully.
NativeCall
*
call
=
nativeCall_at
(
call_site
->
addr
());
if
(
is_icholder_entry
(
call
->
destination
()))
{
NativeMovConstReg
*
value
=
nativeMovConstReg_at
(
call_site
->
cached_value
());
InlineCacheBuffer
::
queue_for_release
((
CompiledICHolder
*
)
value
->
data
());
}
}
bool
CompiledIC
::
is_icholder_call_site
(
virtual_call_Relocation
*
call_site
)
{
// This call site might have become stale so inspect it carefully.
NativeCall
*
call
=
nativeCall_at
(
call_site
->
addr
());
return
is_icholder_entry
(
call
->
destination
());
}
//-----------------------------------------------------------------------------
// High-level access to an inline cache. Guaranteed to be MT-safe.
CompiledIC
::
CompiledIC
(
nmethod
*
nm
,
NativeCall
*
call
)
:
_ic_call
(
call
)
{
address
ic_call
=
call
->
instruction_address
();
assert
(
ic_call
!=
NULL
,
"ic_call address must be set"
);
assert
(
nm
!=
NULL
,
"must pass nmethod"
);
assert
(
nm
->
contains
(
ic_call
),
"must be in nmethod"
);
// Search for the ic_call at the given address.
RelocIterator
iter
(
nm
,
ic_call
,
ic_call
+
1
);
bool
ret
=
iter
.
next
();
assert
(
ret
==
true
,
"relocInfo must exist at this address"
);
assert
(
iter
.
addr
()
==
ic_call
,
"must find ic_call"
);
if
(
iter
.
type
()
==
relocInfo
::
virtual_call_type
)
{
virtual_call_Relocation
*
r
=
iter
.
virtual_call_reloc
();
_is_optimized
=
false
;
_value
=
nativeMovConstReg_at
(
r
->
cached_value
());
}
else
{
assert
(
iter
.
type
()
==
relocInfo
::
opt_virtual_call_type
,
"must be a virtual call"
);
_is_optimized
=
true
;
_value
=
NULL
;
}
}
// ----------------------------------------------------------------------------
void
CompiledStaticCall
::
emit_to_interp_stub
(
CodeBuffer
&
cbuf
)
{
ShouldNotReachHere
();
// Only needed for COMPILER2.
}
int
CompiledStaticCall
::
to_interp_stub_size
()
{
ShouldNotReachHere
();
// Only needed for COMPILER2.
return
0
;
}
// Relocation entries for call stub, compiled java to interpreter.
int
CompiledStaticCall
::
reloc_to_interp_stub
()
{
ShouldNotReachHere
();
// Only needed for COMPILER2.
return
0
;
}
void
CompiledStaticCall
::
set_to_interpreted
(
methodHandle
callee
,
address
entry
)
{
ShouldNotReachHere
();
// Only needed for COMPILER2.
}
void
CompiledStaticCall
::
set_stub_to_clean
(
static_stub_Relocation
*
static_stub
)
{
ShouldNotReachHere
();
// Only needed for COMPILER2.
}
//-----------------------------------------------------------------------------
// Non-product mode code.
#ifndef PRODUCT
void
CompiledStaticCall
::
verify
()
{
ShouldNotReachHere
();
// Only needed for COMPILER2.
}
#endif // !PRODUCT
src/share/vm/adlc/main.cpp
浏览文件 @
f739219e
...
...
@@ -213,6 +213,7 @@ int main(int argc, char *argv[])
AD
.
addInclude
(
AD
.
_CPP_file
,
"adfiles"
,
get_basename
(
AD
.
_HPP_file
.
_name
));
AD
.
addInclude
(
AD
.
_CPP_file
,
"memory/allocation.inline.hpp"
);
AD
.
addInclude
(
AD
.
_CPP_file
,
"asm/macroAssembler.inline.hpp"
);
AD
.
addInclude
(
AD
.
_CPP_file
,
"code/compiledIC.hpp"
);
AD
.
addInclude
(
AD
.
_CPP_file
,
"code/vmreg.hpp"
);
AD
.
addInclude
(
AD
.
_CPP_file
,
"gc_interface/collectedHeap.inline.hpp"
);
AD
.
addInclude
(
AD
.
_CPP_file
,
"oops/compiledICHolder.hpp"
);
...
...
src/share/vm/code/compiledIC.cpp
浏览文件 @
f739219e
...
...
@@ -45,25 +45,6 @@
// Every time a compiled IC is changed or its type is being accessed,
// either the CompiledIC_lock must be set or we must be at a safe point.
// Release the CompiledICHolder* associated with this call site is there is one.
void
CompiledIC
::
cleanup_call_site
(
virtual_call_Relocation
*
call_site
)
{
// This call site might have become stale so inspect it carefully.
NativeCall
*
call
=
nativeCall_at
(
call_site
->
addr
());
if
(
is_icholder_entry
(
call
->
destination
()))
{
NativeMovConstReg
*
value
=
nativeMovConstReg_at
(
call_site
->
cached_value
());
InlineCacheBuffer
::
queue_for_release
((
CompiledICHolder
*
)
value
->
data
());
}
}
bool
CompiledIC
::
is_icholder_call_site
(
virtual_call_Relocation
*
call_site
)
{
// This call site might have become stale so inspect it carefully.
NativeCall
*
call
=
nativeCall_at
(
call_site
->
addr
());
return
is_icholder_entry
(
call
->
destination
());
}
//-----------------------------------------------------------------------------
// Low-level access to an inline cache. Private, since they might not be
// MT-safe to use.
...
...
@@ -488,33 +469,6 @@ bool CompiledIC::is_icholder_entry(address entry) {
return
(
cb
!=
NULL
&&
cb
->
is_adapter_blob
());
}
CompiledIC
::
CompiledIC
(
nmethod
*
nm
,
NativeCall
*
call
)
:
_ic_call
(
call
)
{
address
ic_call
=
call
->
instruction_address
();
assert
(
ic_call
!=
NULL
,
"ic_call address must be set"
);
assert
(
nm
!=
NULL
,
"must pass nmethod"
);
assert
(
nm
->
contains
(
ic_call
),
"must be in nmethod"
);
// search for the ic_call at the given address
RelocIterator
iter
(
nm
,
ic_call
,
ic_call
+
1
);
bool
ret
=
iter
.
next
();
assert
(
ret
==
true
,
"relocInfo must exist at this address"
);
assert
(
iter
.
addr
()
==
ic_call
,
"must find ic_call"
);
if
(
iter
.
type
()
==
relocInfo
::
virtual_call_type
)
{
virtual_call_Relocation
*
r
=
iter
.
virtual_call_reloc
();
_is_optimized
=
false
;
_value
=
nativeMovConstReg_at
(
r
->
cached_value
());
}
else
{
assert
(
iter
.
type
()
==
relocInfo
::
opt_virtual_call_type
,
"must be a virtual call"
);
_is_optimized
=
true
;
_value
=
NULL
;
}
}
// ----------------------------------------------------------------------------
void
CompiledStaticCall
::
set_to_clean
()
{
...
...
@@ -549,33 +503,6 @@ bool CompiledStaticCall::is_call_to_interpreted() const {
return
nm
->
stub_contains
(
destination
());
}
void
CompiledStaticCall
::
set_to_interpreted
(
methodHandle
callee
,
address
entry
)
{
address
stub
=
find_stub
();
guarantee
(
stub
!=
NULL
,
"stub not found"
);
if
(
TraceICs
)
{
ResourceMark
rm
;
tty
->
print_cr
(
"CompiledStaticCall@"
INTPTR_FORMAT
": set_to_interpreted %s"
,
instruction_address
(),
callee
->
name_and_sig_as_C_string
());
}
NativeMovConstReg
*
method_holder
=
nativeMovConstReg_at
(
stub
);
// creation also verifies the object
NativeJump
*
jump
=
nativeJump_at
(
method_holder
->
next_instruction_address
());
assert
(
method_holder
->
data
()
==
0
||
method_holder
->
data
()
==
(
intptr_t
)
callee
(),
"a) MT-unsafe modification of inline cache"
);
assert
(
jump
->
jump_destination
()
==
(
address
)
-
1
||
jump
->
jump_destination
()
==
entry
,
"b) MT-unsafe modification of inline cache"
);
// Update stub
method_holder
->
set_data
((
intptr_t
)
callee
());
jump
->
set_jump_destination
(
entry
);
// Update jump to call
set_destination_mt_safe
(
stub
);
}
void
CompiledStaticCall
::
set
(
const
StaticCallInfo
&
info
)
{
assert
(
CompiledIC_lock
->
is_locked
()
||
SafepointSynchronize
::
is_at_safepoint
(),
"mt unsafe call"
);
MutexLockerEx
pl
(
Patching_lock
,
Mutex
::
_no_safepoint_check_flag
);
...
...
@@ -618,19 +545,6 @@ void CompiledStaticCall::compute_entry(methodHandle m, StaticCallInfo& info) {
}
}
void
CompiledStaticCall
::
set_stub_to_clean
(
static_stub_Relocation
*
static_stub
)
{
assert
(
CompiledIC_lock
->
is_locked
()
||
SafepointSynchronize
::
is_at_safepoint
(),
"mt unsafe call"
);
// Reset stub
address
stub
=
static_stub
->
addr
();
assert
(
stub
!=
NULL
,
"stub not found"
);
NativeMovConstReg
*
method_holder
=
nativeMovConstReg_at
(
stub
);
// creation also verifies the object
NativeJump
*
jump
=
nativeJump_at
(
method_holder
->
next_instruction_address
());
method_holder
->
set_data
(
0
);
jump
->
set_jump_destination
((
address
)
-
1
);
}
address
CompiledStaticCall
::
find_stub
()
{
// Find reloc. information containing this call-site
RelocIterator
iter
((
nmethod
*
)
NULL
,
instruction_address
());
...
...
@@ -668,19 +582,16 @@ void CompiledIC::verify() {
||
is_optimized
()
||
is_megamorphic
(),
"sanity check"
);
}
void
CompiledIC
::
print
()
{
print_compiled_ic
();
tty
->
cr
();
}
void
CompiledIC
::
print_compiled_ic
()
{
tty
->
print
(
"Inline cache at "
INTPTR_FORMAT
", calling %s "
INTPTR_FORMAT
" cached_value "
INTPTR_FORMAT
,
instruction_address
(),
is_call_to_interpreted
()
?
"interpreted "
:
""
,
ic_destination
(),
is_optimized
()
?
NULL
:
cached_value
());
}
void
CompiledStaticCall
::
print
()
{
tty
->
print
(
"static call at "
INTPTR_FORMAT
" -> "
,
instruction_address
());
if
(
is_clean
())
{
...
...
@@ -693,21 +604,4 @@ void CompiledStaticCall::print() {
tty
->
cr
();
}
void
CompiledStaticCall
::
verify
()
{
// Verify call
NativeCall
::
verify
();
if
(
os
::
is_MP
())
{
verify_alignment
();
}
// Verify stub
address
stub
=
find_stub
();
assert
(
stub
!=
NULL
,
"no stub found for static call"
);
NativeMovConstReg
*
method_holder
=
nativeMovConstReg_at
(
stub
);
// creation also verifies the object
NativeJump
*
jump
=
nativeJump_at
(
method_holder
->
next_instruction_address
());
// Verify state
assert
(
is_clean
()
||
is_call_to_compiled
()
||
is_call_to_interpreted
(),
"sanity check"
);
}
#endif
#endif // !PRODUCT
src/share/vm/code/compiledIC.hpp
浏览文件 @
f739219e
...
...
@@ -304,6 +304,11 @@ class CompiledStaticCall: public NativeCall {
friend
CompiledStaticCall
*
compiledStaticCall_at
(
address
native_call
);
friend
CompiledStaticCall
*
compiledStaticCall_at
(
Relocation
*
call_site
);
// Code
static
void
emit_to_interp_stub
(
CodeBuffer
&
cbuf
);
static
int
to_interp_stub_size
();
static
int
reloc_to_interp_stub
();
// State
bool
is_clean
()
const
;
bool
is_call_to_compiled
()
const
;
...
...
src/share/vm/opto/output.cpp
浏览文件 @
f739219e
...
...
@@ -24,6 +24,7 @@
#include "precompiled.hpp"
#include "asm/assembler.inline.hpp"
#include "code/compiledIC.hpp"
#include "code/debugInfo.hpp"
#include "code/debugInfoRec.hpp"
#include "compiler/compileBroker.hpp"
...
...
@@ -41,8 +42,6 @@
#include "runtime/handles.inline.hpp"
#include "utilities/xmlstream.hpp"
extern
uint
size_java_to_interp
();
extern
uint
reloc_java_to_interp
();
extern
uint
size_exception_handler
();
extern
uint
size_deopt_handler
();
...
...
@@ -389,15 +388,15 @@ void Compile::shorten_branches(uint* blk_starts, int& code_size, int& reloc_size
MachNode
*
mach
=
nj
->
as_Mach
();
blk_size
+=
(
mach
->
alignment_required
()
-
1
)
*
relocInfo
::
addr_unit
();
// assume worst case padding
reloc_size
+=
mach
->
reloc
();
if
(
mach
->
is_MachCall
()
)
{
if
(
mach
->
is_MachCall
()
)
{
MachCallNode
*
mcall
=
mach
->
as_MachCall
();
// This destination address is NOT PC-relative
mcall
->
method_set
((
intptr_t
)
mcall
->
entry_point
());
if
(
mcall
->
is_MachCallJava
()
&&
mcall
->
as_MachCallJava
()
->
_method
)
{
stub_size
+=
size_java_to_interp
();
reloc_size
+=
reloc_java_to_interp
();
if
(
mcall
->
is_MachCallJava
()
&&
mcall
->
as_MachCallJava
()
->
_method
)
{
stub_size
+=
CompiledStaticCall
::
to_interp_stub_size
();
reloc_size
+=
CompiledStaticCall
::
reloc_to_interp_stub
();
}
}
else
if
(
mach
->
is_MachSafePoint
())
{
// If call/safepoint are adjacent, account for possible
...
...
src/share/vm/runtime/arguments.cpp
浏览文件 @
f739219e
...
...
@@ -2224,6 +2224,55 @@ jint Arguments::parse_vm_init_args(const JavaVMInitArgs* args) {
return
JNI_OK
;
}
// Checks if name in command-line argument -agent{lib,path}:name[=options]
// represents a valid HPROF of JDWP agent. is_path==true denotes that we
// are dealing with -agentpath (case where name is a path), otherwise with
// -agentlib
bool
valid_hprof_or_jdwp_agent
(
char
*
name
,
bool
is_path
)
{
char
*
_name
;
const
char
*
_hprof
=
"hprof"
,
*
_jdwp
=
"jdwp"
;
size_t
_len_hprof
,
_len_jdwp
,
_len_prefix
;
if
(
is_path
)
{
if
((
_name
=
strrchr
(
name
,
(
int
)
*
os
::
file_separator
()))
==
NULL
)
{
return
false
;
}
_name
++
;
// skip past last path separator
_len_prefix
=
strlen
(
JNI_LIB_PREFIX
);
if
(
strncmp
(
_name
,
JNI_LIB_PREFIX
,
_len_prefix
)
!=
0
)
{
return
false
;
}
_name
+=
_len_prefix
;
_len_hprof
=
strlen
(
_hprof
);
_len_jdwp
=
strlen
(
_jdwp
);
if
(
strncmp
(
_name
,
_hprof
,
_len_hprof
)
==
0
)
{
_name
+=
_len_hprof
;
}
else
if
(
strncmp
(
_name
,
_jdwp
,
_len_jdwp
)
==
0
)
{
_name
+=
_len_jdwp
;
}
else
{
return
false
;
}
if
(
strcmp
(
_name
,
JNI_LIB_SUFFIX
)
!=
0
)
{
return
false
;
}
return
true
;
}
if
(
strcmp
(
name
,
_hprof
)
==
0
||
strcmp
(
name
,
_jdwp
)
==
0
)
{
return
true
;
}
return
false
;
}
jint
Arguments
::
parse_each_vm_init_arg
(
const
JavaVMInitArgs
*
args
,
SysClassPath
*
scp_p
,
bool
*
scp_assembly_required_p
,
...
...
@@ -2322,7 +2371,7 @@ jint Arguments::parse_each_vm_init_arg(const JavaVMInitArgs* args,
options
=
strcpy
(
NEW_C_HEAP_ARRAY
(
char
,
strlen
(
pos
+
1
)
+
1
,
mtInternal
),
pos
+
1
);
}
#if !INCLUDE_JVMTI
if
(
(
strcmp
(
name
,
"hprof"
)
==
0
)
||
(
strcmp
(
name
,
"jdwp"
)
==
0
))
{
if
(
valid_hprof_or_jdwp_agent
(
name
,
is_absolute_path
))
{
jio_fprintf
(
defaultStream
::
error_stream
(),
"Profiling and debugging agents are not supported in this VM
\n
"
);
return
JNI_ERR
;
...
...
编辑
预览
Markdown
is supported
0%
请重试
或
添加新附件
.
添加附件
取消
You are about to add
0
people
to the discussion. Proceed with caution.
先完成此消息的编辑!
取消
想要评论请
注册
或
登录