提交 c1ee843b 编写于 作者: D duke

Merge

......@@ -238,3 +238,4 @@ d086227bfc45d124f09b3bd72a07956b4073bf71 jdk8-b111
4f2011496393a26dcfd7b1f7787a3673ddd32599 jdk8-b114
763ada2a1d8c5962bc8c3d297e57c562d2e95338 jdk8-b115
cbfe5da942c63ef865cab4a7159e01eff7d7fcf5 jdk8-b116
a4afb0a8d55ef75aef5b0d77b434070468fb89f8 jdk8-b117
......@@ -395,3 +395,5 @@ f6962730bbde82f279a0ae3a1c14bc5e58096c6e jdk8-b111
e510dfdec6dd701410f3398ed86ebcdff0cca63a hs25-b58
52b076e6ffae247c1c7d8b7aba995195be2b6fc2 jdk8-b116
c78d517c7ea47501b456e707afd4b78e7b5b202e hs25-b59
f573d00213b7170c2ff856f9cd83cd148437f5b9 jdk8-b117
abad3b2d905d9e1ad767c94baa94aba6ed5b207b hs25-b60
......@@ -24,8 +24,9 @@
package sun.jvm.hotspot.tools;
import sun.jvm.hotspot.runtime.*;
import sun.jvm.hotspot.debugger.JVMDebugger;
import sun.jvm.hotspot.runtime.Arguments;
import sun.jvm.hotspot.runtime.VM;
public class JInfo extends Tool {
public JInfo() {
......@@ -138,14 +139,33 @@ public class JInfo extends Tool {
}
private void printVMFlags() {
VM.Flag[] flags = VM.getVM().getCommandLineFlags();
System.out.print("Non-default VM flags: ");
for (VM.Flag flag : flags) {
if (flag.getOrigin() == 0) {
// only print flags which aren't their defaults
continue;
}
if (flag.isBool()) {
String onoff = flag.getBool() ? "+" : "-";
System.out.print("-XX:" + onoff + flag.getName() + " ");
} else {
System.out.print("-XX:" + flag.getName() + "="
+ flag.getValue() + " ");
}
}
System.out.println();
System.out.print("Command line: ");
String str = Arguments.getJVMFlags();
if (str != null) {
System.out.println(str);
System.out.print(str + " ");
}
str = Arguments.getJVMArgs();
if (str != null) {
System.out.println(str);
System.out.print(str);
}
System.out.println();
}
private int mode;
......
......@@ -25,11 +25,11 @@
package sun.jvm.hotspot.tools;
import java.io.PrintStream;
import java.util.Hashtable;
import sun.jvm.hotspot.*;
import sun.jvm.hotspot.runtime.*;
import sun.jvm.hotspot.debugger.*;
import sun.jvm.hotspot.HotSpotAgent;
import sun.jvm.hotspot.debugger.DebuggerException;
import sun.jvm.hotspot.debugger.JVMDebugger;
import sun.jvm.hotspot.runtime.VM;
// generic command line or GUI tool.
// override run & code main as shown below.
......@@ -147,6 +147,7 @@ public abstract class Tool implements Runnable {
}
PrintStream err = System.err;
PrintStream out = System.out;
int pid = 0;
String coreFileName = null;
......@@ -180,18 +181,18 @@ public abstract class Tool implements Runnable {
try {
switch (debugeeType) {
case DEBUGEE_PID:
err.println("Attaching to process ID " + pid + ", please wait...");
out.println("Attaching to process ID " + pid + ", please wait...");
agent.attach(pid);
break;
case DEBUGEE_CORE:
err.println("Attaching to core " + coreFileName +
out.println("Attaching to core " + coreFileName +
" from executable " + executableName + ", please wait...");
agent.attach(executableName, coreFileName);
break;
case DEBUGEE_REMOTE:
err.println("Attaching to remote server " + remoteServer + ", please wait...");
out.println("Attaching to remote server " + remoteServer + ", please wait...");
agent.attach(remoteServer);
break;
}
......@@ -218,7 +219,7 @@ public abstract class Tool implements Runnable {
return 1;
}
err.println("Debugger attached successfully.");
out.println("Debugger attached successfully.");
startInternal();
return 0;
}
......@@ -237,14 +238,14 @@ public abstract class Tool implements Runnable {
// Remains of the start mechanism, common to both start methods.
private void startInternal() {
PrintStream err = System.err;
PrintStream out = System.out;
VM vm = VM.getVM();
if (vm.isCore()) {
err.println("Core build detected.");
out.println("Core build detected.");
} else if (vm.isClientCompiler()) {
err.println("Client compiler detected.");
out.println("Client compiler detected.");
} else if (vm.isServerCompiler()) {
err.println("Server compiler detected.");
out.println("Server compiler detected.");
} else {
throw new RuntimeException("Fatal error: "
+ "should have been able to detect core/C1/C2 build");
......@@ -252,8 +253,8 @@ public abstract class Tool implements Runnable {
String version = vm.getVMRelease();
if (version != null) {
err.print("JVM version is ");
err.println(version);
out.print("JVM version is ");
out.println(version);
}
run();
......
......@@ -35,7 +35,7 @@ HOTSPOT_VM_COPYRIGHT=Copyright 2013
HS_MAJOR_VER=25
HS_MINOR_VER=0
HS_BUILD_NUMBER=59
HS_BUILD_NUMBER=60
JDK_MAJOR_VER=1
JDK_MINOR_VER=8
......
......@@ -3001,6 +3001,10 @@ void SharedRuntime::generate_deopt_blob() {
// sp should be pointing at the return address to the caller (3)
// Pick up the initial fp we should save
// restore rbp before stack bang because if stack overflow is thrown it needs to be pushed (and preserved)
__ movptr(rbp, Address(rdi, Deoptimization::UnrollBlock::initial_info_offset_in_bytes()));
// Stack bang to make sure there's enough room for these interpreter frames.
if (UseStackBanging) {
__ movl(rbx, Address(rdi ,Deoptimization::UnrollBlock::total_frame_sizes_offset_in_bytes()));
......@@ -3020,9 +3024,6 @@ void SharedRuntime::generate_deopt_blob() {
__ movl(rbx, Address(rdi, Deoptimization::UnrollBlock::number_of_frames_offset_in_bytes()));
__ movl(counter, rbx);
// Pick up the initial fp we should save
__ movptr(rbp, Address(rdi, Deoptimization::UnrollBlock::initial_info_offset_in_bytes()));
// Now adjust the caller's stack to make up for the extra locals
// but record the original sp so that we can save it in the skeletal interpreter
// frame and the stack walking of interpreter_sender will get the unextended sp
......@@ -3220,6 +3221,10 @@ void SharedRuntime::generate_uncommon_trap_blob() {
// sp should be pointing at the return address to the caller (3)
// Pick up the initial fp we should save
// restore rbp before stack bang because if stack overflow is thrown it needs to be pushed (and preserved)
__ movptr(rbp, Address(rdi, Deoptimization::UnrollBlock::initial_info_offset_in_bytes()));
// Stack bang to make sure there's enough room for these interpreter frames.
if (UseStackBanging) {
__ movl(rbx, Address(rdi ,Deoptimization::UnrollBlock::total_frame_sizes_offset_in_bytes()));
......@@ -3240,9 +3245,6 @@ void SharedRuntime::generate_uncommon_trap_blob() {
__ movl(rbx, Address(rdi, Deoptimization::UnrollBlock::number_of_frames_offset_in_bytes()));
__ movl(counter, rbx);
// Pick up the initial fp we should save
__ movptr(rbp, Address(rdi, Deoptimization::UnrollBlock::initial_info_offset_in_bytes()));
// Now adjust the caller's stack to make up for the extra locals
// but record the original sp so that we can save it in the skeletal interpreter
// frame and the stack walking of interpreter_sender will get the unextended sp
......
......@@ -3471,6 +3471,10 @@ void SharedRuntime::generate_deopt_blob() {
// rsp should be pointing at the return address to the caller (3)
// Pick up the initial fp we should save
// restore rbp before stack bang because if stack overflow is thrown it needs to be pushed (and preserved)
__ movptr(rbp, Address(rdi, Deoptimization::UnrollBlock::initial_info_offset_in_bytes()));
// Stack bang to make sure there's enough room for these interpreter frames.
if (UseStackBanging) {
__ movl(rbx, Address(rdi, Deoptimization::UnrollBlock::total_frame_sizes_offset_in_bytes()));
......@@ -3489,9 +3493,6 @@ void SharedRuntime::generate_deopt_blob() {
// Load counter into rdx
__ movl(rdx, Address(rdi, Deoptimization::UnrollBlock::number_of_frames_offset_in_bytes()));
// Pick up the initial fp we should save
__ movptr(rbp, Address(rdi, Deoptimization::UnrollBlock::initial_info_offset_in_bytes()));
// Now adjust the caller's stack to make up for the extra locals
// but record the original sp so that we can save it in the skeletal interpreter
// frame and the stack walking of interpreter_sender will get the unextended sp
......@@ -3663,6 +3664,10 @@ void SharedRuntime::generate_uncommon_trap_blob() {
// rsp should be pointing at the return address to the caller (3)
// Pick up the initial fp we should save
// restore rbp before stack bang because if stack overflow is thrown it needs to be pushed (and preserved)
__ movptr(rbp, Address(rdi, Deoptimization::UnrollBlock::initial_info_offset_in_bytes()));
// Stack bang to make sure there's enough room for these interpreter frames.
if (UseStackBanging) {
__ movl(rbx, Address(rdi ,Deoptimization::UnrollBlock::total_frame_sizes_offset_in_bytes()));
......@@ -3670,27 +3675,16 @@ void SharedRuntime::generate_uncommon_trap_blob() {
}
// Load address of array of frame pcs into rcx (address*)
__ movptr(rcx,
Address(rdi,
Deoptimization::UnrollBlock::frame_pcs_offset_in_bytes()));
__ movptr(rcx, Address(rdi, Deoptimization::UnrollBlock::frame_pcs_offset_in_bytes()));
// Trash the return pc
__ addptr(rsp, wordSize);
// Load address of array of frame sizes into rsi (intptr_t*)
__ movptr(rsi, Address(rdi,
Deoptimization::UnrollBlock::
frame_sizes_offset_in_bytes()));
__ movptr(rsi, Address(rdi, Deoptimization::UnrollBlock:: frame_sizes_offset_in_bytes()));
// Counter
__ movl(rdx, Address(rdi,
Deoptimization::UnrollBlock::
number_of_frames_offset_in_bytes())); // (int)
// Pick up the initial fp we should save
__ movptr(rbp,
Address(rdi,
Deoptimization::UnrollBlock::initial_info_offset_in_bytes()));
__ movl(rdx, Address(rdi, Deoptimization::UnrollBlock:: number_of_frames_offset_in_bytes())); // (int)
// Now adjust the caller's stack to make up for the extra locals but
// record the original sp so that we can save it in the skeletal
......@@ -3700,9 +3694,7 @@ void SharedRuntime::generate_uncommon_trap_blob() {
const Register sender_sp = r8;
__ mov(sender_sp, rsp);
__ movl(rbx, Address(rdi,
Deoptimization::UnrollBlock::
caller_adjustment_offset_in_bytes())); // (int)
__ movl(rbx, Address(rdi, Deoptimization::UnrollBlock:: caller_adjustment_offset_in_bytes())); // (int)
__ subptr(rsp, rbx);
// Push interpreter frames in a loop
......
......@@ -1003,21 +1003,15 @@ void ciEnv::register_method(ciMethod* target,
// Free codeBlobs
code_buffer->free_blob();
if (nm == NULL) {
// The CodeCache is full. Print out warning and disable compilation.
record_failure("code cache is full");
{
MutexUnlocker ml(Compile_lock);
MutexUnlocker locker(MethodCompileQueue_lock);
CompileBroker::handle_full_code_cache();
}
} else {
if (nm != NULL) {
nm->set_has_unsafe_access(has_unsafe_access);
nm->set_has_wide_vectors(has_wide_vectors);
// Record successful registration.
// (Put nm into the task handle *before* publishing to the Java heap.)
if (task() != NULL) task()->set_code(nm);
if (task() != NULL) {
task()->set_code(nm);
}
if (entry_bci == InvocationEntryBci) {
if (TieredCompilation) {
......@@ -1055,12 +1049,16 @@ void ciEnv::register_method(ciMethod* target,
method->method_holder()->add_osr_nmethod(nm);
}
}
}
// JVMTI -- compiled method notification (must be done outside lock)
} // safepoints are allowed again
if (nm != NULL) {
// JVMTI -- compiled method notification (must be done outside lock)
nm->post_compiled_method_load_event();
} else {
// The CodeCache is full. Print out warning and disable compilation.
record_failure("code cache is full");
CompileBroker::handle_full_code_cache();
}
}
......
......@@ -30,6 +30,7 @@
#include "prims/jvmtiImpl.hpp"
#include "runtime/synchronizer.hpp"
#include "runtime/thread.hpp"
#include "services/threadService.hpp"
#include "utilities/growableArray.hpp"
......@@ -50,6 +51,7 @@ MetadataOnStackMark::MetadataOnStackMark() {
CodeCache::alive_nmethods_do(nmethod::mark_on_stack);
CompileBroker::mark_on_stack();
JvmtiCurrentBreakpoints::metadata_do(Metadata::mark_on_stack);
ThreadService::metadata_do(Metadata::mark_on_stack);
}
MetadataOnStackMark::~MetadataOnStackMark() {
......
......@@ -141,7 +141,6 @@ class SymbolPropertyTable;
/* NOTE: needed too early in bootstrapping process to have checks based on JDK version */ \
/* Universe::is_gte_jdk14x_version() is not set up by this point. */ \
/* It's okay if this turns out to be NULL in non-1.4 JDKs. */ \
do_klass(lambda_MagicLambdaImpl_klass, java_lang_invoke_MagicLambdaImpl, Opt ) \
do_klass(reflect_MagicAccessorImpl_klass, sun_reflect_MagicAccessorImpl, Opt ) \
do_klass(reflect_MethodAccessorImpl_klass, sun_reflect_MethodAccessorImpl, Opt_Only_JDK14NewRef) \
do_klass(reflect_ConstructorAccessorImpl_klass, sun_reflect_ConstructorAccessorImpl, Opt_Only_JDK14NewRef) \
......
......@@ -188,10 +188,8 @@ bool Verifier::verify(instanceKlassHandle klass, Verifier::Mode mode, bool shoul
bool Verifier::is_eligible_for_verification(instanceKlassHandle klass, bool should_verify_class) {
Symbol* name = klass->name();
Klass* refl_magic_klass = SystemDictionary::reflect_MagicAccessorImpl_klass();
Klass* lambda_magic_klass = SystemDictionary::lambda_MagicLambdaImpl_klass();
bool is_reflect = refl_magic_klass != NULL && klass->is_subtype_of(refl_magic_klass);
bool is_lambda = lambda_magic_klass != NULL && klass->is_subtype_of(lambda_magic_klass);
return (should_verify_for(klass->class_loader(), should_verify_class) &&
// return if the class is a bootstrapping class
......@@ -215,9 +213,7 @@ bool Verifier::is_eligible_for_verification(instanceKlassHandle klass, bool shou
// NOTE: this is called too early in the bootstrapping process to be
// guarded by Universe::is_gte_jdk14x_version()/UseNewReflection.
// Also for lambda generated code, gte jdk8
(!is_reflect || VerifyReflectionBytecodes) &&
(!is_lambda || VerifyLambdaBytecodes)
);
(!is_reflect || VerifyReflectionBytecodes));
}
Symbol* Verifier::inference_verify(
......
......@@ -273,7 +273,6 @@
template(java_lang_invoke_Stable_signature, "Ljava/lang/invoke/Stable;") \
template(java_lang_invoke_LambdaForm_Compiled_signature, "Ljava/lang/invoke/LambdaForm$Compiled;") \
template(java_lang_invoke_LambdaForm_Hidden_signature, "Ljava/lang/invoke/LambdaForm$Hidden;") \
template(java_lang_invoke_MagicLambdaImpl, "java/lang/invoke/MagicLambdaImpl") \
/* internal up-calls made only by the JVM, via class sun.invoke.MethodHandleNatives: */ \
template(findMethodHandleType_name, "findMethodHandleType") \
template(findMethodHandleType_signature, "(Ljava/lang/Class;[Ljava/lang/Class;)Ljava/lang/invoke/MethodType;") \
......
......@@ -70,12 +70,14 @@ void Rewriter::compute_index_maps() {
}
// Unrewrite the bytecodes if an error occurs.
void Rewriter::restore_bytecodes(TRAPS) {
void Rewriter::restore_bytecodes() {
int len = _methods->length();
bool invokespecial_error = false;
for (int i = len-1; i >= 0; i--) {
Method* method = _methods->at(i);
scan_method(method, true, CHECK);
scan_method(method, true, &invokespecial_error);
assert(!invokespecial_error, "reversing should not get an invokespecial error");
}
}
......@@ -160,22 +162,21 @@ void Rewriter::rewrite_member_reference(address bcp, int offset, bool reverse) {
// These cannot share cpCache entries. It's unclear if all invokespecial to
// InterfaceMethodrefs would resolve to the same thing so a new cpCache entry
// is created for each one. This was added with lambda.
void Rewriter::rewrite_invokespecial(address bcp, int offset, bool reverse, TRAPS) {
static int count = 0;
void Rewriter::rewrite_invokespecial(address bcp, int offset, bool reverse, bool* invokespecial_error) {
address p = bcp + offset;
if (!reverse) {
int cp_index = Bytes::get_Java_u2(p);
if (_pool->tag_at(cp_index).is_interface_method()) {
int cache_index = add_invokespecial_cp_cache_entry(cp_index);
if (cache_index != (int)(jushort) cache_index) {
THROW_MSG(vmSymbols::java_lang_InternalError(),
"This classfile overflows invokespecial for interfaces "
"and cannot be loaded");
*invokespecial_error = true;
}
Bytes::put_native_u2(p, cache_index);
} else {
int cache_index = Bytes::get_native_u2(p);
int cp_index = cp_cache_entry_pool_index(cache_index);
Bytes::put_Java_u2(p, cp_index);
rewrite_member_reference(bcp, offset, reverse);
}
} else {
rewrite_member_reference(bcp, offset, reverse);
}
}
......@@ -329,7 +330,7 @@ void Rewriter::maybe_rewrite_ldc(address bcp, int offset, bool is_wide,
// Rewrites a method given the index_map information
void Rewriter::scan_method(Method* method, bool reverse, TRAPS) {
void Rewriter::scan_method(Method* method, bool reverse, bool* invokespecial_error) {
int nof_jsrs = 0;
bool has_monitor_bytecodes = false;
......@@ -391,15 +392,7 @@ void Rewriter::scan_method(Method* method, bool reverse, TRAPS) {
}
case Bytecodes::_invokespecial : {
int offset = prefix_length + 1;
address p = bcp + offset;
int cp_index = Bytes::get_Java_u2(p);
// InterfaceMethodref
if (_pool->tag_at(cp_index).is_interface_method()) {
rewrite_invokespecial(bcp, offset, reverse, CHECK);
} else {
rewrite_member_reference(bcp, offset, reverse);
}
rewrite_invokespecial(bcp, prefix_length+1, reverse, invokespecial_error);
break;
}
......@@ -496,11 +489,20 @@ Rewriter::Rewriter(instanceKlassHandle klass, constantPoolHandle cpool, Array<Me
// rewrite methods, in two passes
int len = _methods->length();
bool invokespecial_error = false;
for (int i = len-1; i >= 0; i--) {
Method* method = _methods->at(i);
scan_method(method, false, CHECK); // If you get an error here,
// there is no reversing bytecodes
scan_method(method, false, &invokespecial_error);
if (invokespecial_error) {
// If you get an error here, there is no reversing bytecodes
// This exception is stored for this class and no further attempt is
// made at verifying or rewriting.
THROW_MSG(vmSymbols::java_lang_InternalError(),
"This classfile overflows invokespecial for interfaces "
"and cannot be loaded");
return;
}
}
// May have to fix invokedynamic bytecodes if invokestatic/InterfaceMethodref
......@@ -513,7 +515,7 @@ Rewriter::Rewriter(instanceKlassHandle klass, constantPoolHandle cpool, Array<Me
// Restore bytecodes to their unrewritten state if there are exceptions
// rewriting bytecodes or allocating the cpCache
if (HAS_PENDING_EXCEPTION) {
restore_bytecodes(CATCH);
restore_bytecodes();
return;
}
......@@ -530,7 +532,7 @@ Rewriter::Rewriter(instanceKlassHandle klass, constantPoolHandle cpool, Array<Me
// relocating bytecodes. If some are relocated, that is ok because that
// doesn't affect constant pool to cpCache rewriting.
if (HAS_PENDING_EXCEPTION) {
restore_bytecodes(CATCH);
restore_bytecodes();
return;
}
// Method might have gotten rewritten.
......
......@@ -189,18 +189,18 @@ class Rewriter: public StackObj {
void compute_index_maps();
void make_constant_pool_cache(TRAPS);
void scan_method(Method* m, bool reverse, TRAPS);
void scan_method(Method* m, bool reverse, bool* invokespecial_error);
void rewrite_Object_init(methodHandle m, TRAPS);
void rewrite_member_reference(address bcp, int offset, bool reverse);
void maybe_rewrite_invokehandle(address opc, int cp_index, int cache_index, bool reverse);
void rewrite_invokedynamic(address bcp, int offset, bool reverse);
void maybe_rewrite_ldc(address bcp, int offset, bool is_wide, bool reverse);
void rewrite_invokespecial(address bcp, int offset, bool reverse, TRAPS);
void rewrite_invokespecial(address bcp, int offset, bool reverse, bool* invokespecial_error);
void patch_invokedynamic_bytecodes();
// Revert bytecodes in case of an exception.
void restore_bytecodes(TRAPS);
void restore_bytecodes();
static methodHandle rewrite_jsrs(methodHandle m, TRAPS);
public:
......
......@@ -321,6 +321,8 @@ void Flag::print_kind(outputStream* st) {
{ KIND_PRODUCT, "product" },
{ KIND_MANAGEABLE, "manageable" },
{ KIND_DIAGNOSTIC, "diagnostic" },
{ KIND_EXPERIMENTAL, "experimental" },
{ KIND_COMMERCIAL, "commercial" },
{ KIND_NOT_PRODUCT, "notproduct" },
{ KIND_DEVELOP, "develop" },
{ KIND_LP64_PRODUCT, "lp64_product" },
......
......@@ -3622,9 +3622,6 @@ class CommandLineFlags {
"Temporary flag for transition to AbstractMethodError wrapped " \
"in InvocationTargetException. See 6531596") \
\
develop(bool, VerifyLambdaBytecodes, false, \
"Force verification of jdk 8 lambda metafactory bytecodes") \
\
develop(intx, FastSuperclassLimit, 8, \
"Depth of hardwired instanceof accelerator array") \
\
......
......@@ -470,12 +470,6 @@ bool Reflection::verify_class_access(Klass* current_class, Klass* new_class, boo
return true;
}
// Also allow all accesses from
// java/lang/invoke/MagicLambdaImpl subclasses to succeed trivially.
if (current_class->is_subclass_of(SystemDictionary::lambda_MagicLambdaImpl_klass())) {
return true;
}
return can_relax_access_check_for(current_class, new_class, classloader_only);
}
......@@ -570,12 +564,6 @@ bool Reflection::verify_field_access(Klass* current_class,
return true;
}
// Also allow all accesses from
// java/lang/invoke/MagicLambdaImpl subclasses to succeed trivially.
if (current_class->is_subclass_of(SystemDictionary::lambda_MagicLambdaImpl_klass())) {
return true;
}
return can_relax_access_check_for(
current_class, field_class, classloader_only);
}
......
......@@ -84,6 +84,7 @@
// Shared stub locations
RuntimeStub* SharedRuntime::_wrong_method_blob;
RuntimeStub* SharedRuntime::_wrong_method_abstract_blob;
RuntimeStub* SharedRuntime::_ic_miss_blob;
RuntimeStub* SharedRuntime::_resolve_opt_virtual_call_blob;
RuntimeStub* SharedRuntime::_resolve_virtual_call_blob;
......@@ -101,11 +102,12 @@ UncommonTrapBlob* SharedRuntime::_uncommon_trap_blob;
//----------------------------generate_stubs-----------------------------------
void SharedRuntime::generate_stubs() {
_wrong_method_blob = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method), "wrong_method_stub");
_ic_miss_blob = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_ic_miss), "ic_miss_stub");
_resolve_opt_virtual_call_blob = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_opt_virtual_call_C), "resolve_opt_virtual_call");
_resolve_virtual_call_blob = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_virtual_call_C), "resolve_virtual_call");
_resolve_static_call_blob = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_static_call_C), "resolve_static_call");
_wrong_method_blob = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method), "wrong_method_stub");
_wrong_method_abstract_blob = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_abstract), "wrong_method_abstract_stub");
_ic_miss_blob = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::handle_wrong_method_ic_miss), "ic_miss_stub");
_resolve_opt_virtual_call_blob = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_opt_virtual_call_C), "resolve_opt_virtual_call");
_resolve_virtual_call_blob = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_virtual_call_C), "resolve_virtual_call");
_resolve_static_call_blob = generate_resolve_blob(CAST_FROM_FN_PTR(address, SharedRuntime::resolve_static_call_C), "resolve_static_call");
#ifdef COMPILER2
// Vectors are generated only by C2.
......@@ -1345,6 +1347,11 @@ JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* thread))
return callee_method->verified_code_entry();
JRT_END
// Handle abstract method call
JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_abstract(JavaThread* thread))
return StubRoutines::throw_AbstractMethodError_entry();
JRT_END
// resolve a static call and patch code
JRT_BLOCK_ENTRY(address, SharedRuntime::resolve_static_call_C(JavaThread *thread ))
......@@ -2341,12 +2348,13 @@ void AdapterHandlerLibrary::initialize() {
// Create a special handler for abstract methods. Abstract methods
// are never compiled so an i2c entry is somewhat meaningless, but
// fill it in with something appropriate just in case. Pass handle
// wrong method for the c2i transitions.
address wrong_method = SharedRuntime::get_handle_wrong_method_stub();
// throw AbstractMethodError just in case.
// Pass wrong_method_abstract for the c2i transitions to return
// AbstractMethodError for invalid invocations.
address wrong_method_abstract = SharedRuntime::get_handle_wrong_method_abstract_stub();
_abstract_method_handler = AdapterHandlerLibrary::new_entry(new AdapterFingerPrint(0, NULL),
StubRoutines::throw_AbstractMethodError_entry(),
wrong_method, wrong_method);
wrong_method_abstract, wrong_method_abstract);
}
AdapterHandlerEntry* AdapterHandlerLibrary::new_entry(AdapterFingerPrint* fingerprint,
......
......@@ -56,6 +56,7 @@ class SharedRuntime: AllStatic {
// Shared stub locations
static RuntimeStub* _wrong_method_blob;
static RuntimeStub* _wrong_method_abstract_blob;
static RuntimeStub* _ic_miss_blob;
static RuntimeStub* _resolve_opt_virtual_call_blob;
static RuntimeStub* _resolve_virtual_call_blob;
......@@ -206,6 +207,11 @@ class SharedRuntime: AllStatic {
return _wrong_method_blob->entry_point();
}
static address get_handle_wrong_method_abstract_stub() {
assert(_wrong_method_abstract_blob!= NULL, "oops");
return _wrong_method_abstract_blob->entry_point();
}
#ifdef COMPILER2
static void generate_uncommon_trap_blob(void);
static UncommonTrapBlob* uncommon_trap_blob() { return _uncommon_trap_blob; }
......@@ -481,6 +487,7 @@ class SharedRuntime: AllStatic {
// handle ic miss with caller being compiled code
// wrong method handling (inline cache misses, zombie methods)
static address handle_wrong_method(JavaThread* thread);
static address handle_wrong_method_abstract(JavaThread* thread);
static address handle_wrong_method_ic_miss(JavaThread* thread);
#ifndef PRODUCT
......
......@@ -231,7 +231,8 @@ void NMethodSweeper::mark_active_nmethods() {
*/
void NMethodSweeper::possibly_sweep() {
assert(JavaThread::current()->thread_state() == _thread_in_vm, "must run in vm mode");
if (!MethodFlushing || !sweep_in_progress()) {
// Only compiler threads are allowed to sweep
if (!MethodFlushing || !sweep_in_progress() || !Thread::current()->is_Compiler_thread()) {
return;
}
......
......@@ -200,6 +200,12 @@ void ThreadService::oops_do(OopClosure* f) {
}
}
void ThreadService::metadata_do(void f(Metadata*)) {
for (ThreadDumpResult* dump = _threaddump_list; dump != NULL; dump = dump->next()) {
dump->metadata_do(f);
}
}
void ThreadService::add_thread_dump(ThreadDumpResult* dump) {
MutexLocker ml(Management_lock);
if (_threaddump_list == NULL) {
......@@ -451,9 +457,16 @@ void ThreadDumpResult::oops_do(OopClosure* f) {
}
}
void ThreadDumpResult::metadata_do(void f(Metadata*)) {
for (ThreadSnapshot* ts = _snapshots; ts != NULL; ts = ts->next()) {
ts->metadata_do(f);
}
}
StackFrameInfo::StackFrameInfo(javaVFrame* jvf, bool with_lock_info) {
_method = jvf->method();
_bci = jvf->bci();
_class_holder = _method->method_holder()->klass_holder();
_locked_monitors = NULL;
if (with_lock_info) {
ResourceMark rm;
......@@ -477,6 +490,11 @@ void StackFrameInfo::oops_do(OopClosure* f) {
f->do_oop((oop*) _locked_monitors->adr_at(i));
}
}
f->do_oop(&_class_holder);
}
void StackFrameInfo::metadata_do(void f(Metadata*)) {
f(_method);
}
void StackFrameInfo::print_on(outputStream* st) const {
......@@ -620,6 +638,14 @@ void ThreadStackTrace::oops_do(OopClosure* f) {
}
}
void ThreadStackTrace::metadata_do(void f(Metadata*)) {
int length = _frames->length();
for (int i = 0; i < length; i++) {
_frames->at(i)->metadata_do(f);
}
}
ConcurrentLocksDump::~ConcurrentLocksDump() {
if (_retain_map_on_free) {
return;
......@@ -823,6 +849,13 @@ void ThreadSnapshot::oops_do(OopClosure* f) {
}
}
void ThreadSnapshot::metadata_do(void f(Metadata*)) {
if (_stack_trace != NULL) {
_stack_trace->metadata_do(f);
}
}
DeadlockCycle::DeadlockCycle() {
_is_deadlock = false;
_threads = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<JavaThread*>(INITIAL_ARRAY_SIZE, true);
......
......@@ -113,6 +113,7 @@ public:
// GC support
static void oops_do(OopClosure* f);
static void metadata_do(void f(Metadata*));
};
// Per-thread Statistics for synchronization
......@@ -242,6 +243,7 @@ public:
void dump_stack_at_safepoint(int max_depth, bool with_locked_monitors);
void set_concurrent_locks(ThreadConcurrentLocks* l) { _concurrent_locks = l; }
void oops_do(OopClosure* f);
void metadata_do(void f(Metadata*));
};
class ThreadStackTrace : public CHeapObj<mtInternal> {
......@@ -265,6 +267,7 @@ class ThreadStackTrace : public CHeapObj<mtInternal> {
void dump_stack_at_safepoint(int max_depth);
Handle allocate_fill_stack_trace_element_array(TRAPS);
void oops_do(OopClosure* f);
void metadata_do(void f(Metadata*));
GrowableArray<oop>* jni_locked_monitors() { return _jni_locked_monitors; }
int num_jni_locked_monitors() { return (_jni_locked_monitors != NULL ? _jni_locked_monitors->length() : 0); }
......@@ -280,6 +283,9 @@ class StackFrameInfo : public CHeapObj<mtInternal> {
Method* _method;
int _bci;
GrowableArray<oop>* _locked_monitors; // list of object monitors locked by this frame
// We need to save the mirrors in the backtrace to keep the class
// from being unloaded while we still have this stack trace.
oop _class_holder;
public:
......@@ -289,9 +295,10 @@ class StackFrameInfo : public CHeapObj<mtInternal> {
delete _locked_monitors;
}
};
Method* method() const { return _method; }
Method* method() const { return _method; }
int bci() const { return _bci; }
void oops_do(OopClosure* f);
void metadata_do(void f(Metadata*));
int num_locked_monitors() { return (_locked_monitors != NULL ? _locked_monitors->length() : 0); }
GrowableArray<oop>* locked_monitors() { return _locked_monitors; }
......@@ -354,6 +361,7 @@ class ThreadDumpResult : public StackObj {
int num_snapshots() { return _num_snapshots; }
ThreadSnapshot* snapshots() { return _snapshots; }
void oops_do(OopClosure* f);
void metadata_do(void f(Metadata*));
};
class DeadlockCycle : public CHeapObj<mtInternal> {
......
......@@ -172,7 +172,6 @@ public class ConcurrentClassLoadingTest {
"java.lang.invoke.LambdaConversionException",
"java.lang.invoke.LambdaForm",
"java.lang.invoke.LambdaMetafactory",
"java.lang.invoke.MagicLambdaImpl",
"java.lang.invoke.MemberName",
"java.lang.invoke.MethodHandle",
"java.lang.invoke.MethodHandleImpl",
......
/*
* Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* @test
* @bug 8028308
* @summary rbp not restored when stack overflow is thrown from deopt/uncommon trap blobs
* @run main/othervm -XX:-BackgroundCompilation -XX:CompileCommand=dontinline,TestStackBangRbp::m1 -XX:CompileCommand=exclude,TestStackBangRbp::m2 -Xss256K -XX:-UseOnStackReplacement TestStackBangRbp
*
*/
public class TestStackBangRbp {
static class UnloadedClass1 {
}
static class UnloadedClass2 {
}
static Object m1(boolean deopt) {
long l0, l1, l2, l3, l4, l5, l6, l7, l8, l9, l10, l11, l12,
l13, l14, l15, l16, l17, l18, l19, l20, l21, l22, l23, l24,
l25, l26, l27, l28, l29, l30, l31, l32, l33, l34, l35, l36,
l37, l38, l39, l40, l41, l42, l43, l44, l45, l46, l47, l48,
l49, l50, l51, l52, l53, l54, l55, l56, l57, l58, l59, l60,
l61, l62, l63, l64, l65, l66, l67, l68, l69, l70, l71, l72,
l73, l74, l75, l76, l77, l78, l79, l80, l81, l82, l83, l84,
l85, l86, l87, l88, l89, l90, l91, l92, l93, l94, l95, l96,
l97, l98, l99, l100, l101, l102, l103, l104, l105, l106, l107,
l108, l109, l110, l111, l112, l113, l114, l115, l116, l117,
l118, l119, l120, l121, l122, l123, l124, l125, l126, l127,
l128, l129, l130, l131, l132, l133, l134, l135, l136, l137,
l138, l139, l140, l141, l142, l143, l144, l145, l146, l147,
l148, l149, l150, l151, l152, l153, l154, l155, l156, l157,
l158, l159, l160, l161, l162, l163, l164, l165, l166, l167,
l168, l169, l170, l171, l172, l173, l174, l175, l176, l177,
l178, l179, l180, l181, l182, l183, l184, l185, l186, l187,
l188, l189, l190, l191, l192, l193, l194, l195, l196, l197,
l198, l199, l200, l201, l202, l203, l204, l205, l206, l207,
l208, l209, l210, l211, l212, l213, l214, l215, l216, l217,
l218, l219, l220, l221, l222, l223, l224, l225, l226, l227,
l228, l229, l230, l231, l232, l233, l234, l235, l236, l237,
l238, l239, l240, l241, l242, l243, l244, l245, l246, l247,
l248, l249, l250, l251, l252, l253, l254, l255, l256, l257,
l258, l259, l260, l261, l262, l263, l264, l265, l266, l267,
l268, l269, l270, l271, l272, l273, l274, l275, l276, l277,
l278, l279, l280, l281, l282, l283, l284, l285, l286, l287,
l288, l289, l290, l291, l292, l293, l294, l295, l296, l297,
l298, l299, l300, l301, l302, l303, l304, l305, l306, l307,
l308, l309, l310, l311, l312, l313, l314, l315, l316, l317,
l318, l319, l320, l321, l322, l323, l324, l325, l326, l327,
l328, l329, l330, l331, l332, l333, l334, l335, l336, l337,
l338, l339, l340, l341, l342, l343, l344, l345, l346, l347,
l348, l349, l350, l351, l352, l353, l354, l355, l356, l357,
l358, l359, l360, l361, l362, l363, l364, l365, l366, l367,
l368, l369, l370, l371, l372, l373, l374, l375, l376, l377,
l378, l379, l380, l381, l382, l383, l384, l385, l386, l387,
l388, l389, l390, l391, l392, l393, l394, l395, l396, l397,
l398, l399, l400, l401, l402, l403, l404, l405, l406, l407,
l408, l409, l410, l411, l412, l413, l414, l415, l416, l417,
l418, l419, l420, l421, l422, l423, l424, l425, l426, l427,
l428, l429, l430, l431, l432, l433, l434, l435, l436, l437,
l438, l439, l440, l441, l442, l443, l444, l445, l446, l447,
l448, l449, l450, l451, l452, l453, l454, l455, l456, l457,
l458, l459, l460, l461, l462, l463, l464, l465, l466, l467,
l468, l469, l470, l471, l472, l473, l474, l475, l476, l477,
l478, l479, l480, l481, l482, l483, l484, l485, l486, l487,
l488, l489, l490, l491, l492, l493, l494, l495, l496, l497,
l498, l499, l500, l501, l502, l503, l504, l505, l506, l507,
l508, l509, l510, l511;
long ll0, ll1, ll2, ll3, ll4, ll5, ll6, ll7, ll8, ll9, ll10, ll11, ll12,
ll13, ll14, ll15, ll16, ll17, ll18, ll19, ll20, ll21, ll22, ll23, ll24,
ll25, ll26, ll27, ll28, ll29, ll30, ll31, ll32, ll33, ll34, ll35, ll36,
ll37, ll38, ll39, ll40, ll41, ll42, ll43, ll44, ll45, ll46, ll47, ll48,
ll49, ll50, ll51, ll52, ll53, ll54, ll55, ll56, ll57, ll58, ll59, ll60,
ll61, ll62, ll63, ll64, ll65, ll66, ll67, ll68, ll69, ll70, ll71, ll72,
ll73, ll74, ll75, ll76, ll77, ll78, ll79, ll80, ll81, ll82, ll83, ll84,
ll85, ll86, ll87, ll88, ll89, ll90, ll91, ll92, ll93, ll94, ll95, ll96,
ll97, ll98, ll99, ll100, ll101, ll102, ll103, ll104, ll105, ll106, ll107,
ll108, ll109, ll110, ll111, ll112, ll113, ll114, ll115, ll116, ll117,
ll118, ll119, ll120, ll121, ll122, ll123, ll124, ll125, ll126, ll127,
ll128, ll129, ll130, ll131, ll132, ll133, ll134, ll135, ll136, ll137,
ll138, ll139, ll140, ll141, ll142, ll143, ll144, ll145, ll146, ll147,
ll148, ll149, ll150, ll151, ll152, ll153, ll154, ll155, ll156, ll157,
ll158, ll159, ll160, ll161, ll162, ll163, ll164, ll165, ll166, ll167,
ll168, ll169, ll170, ll171, ll172, ll173, ll174, ll175, ll176, ll177,
ll178, ll179, ll180, ll181, ll182, ll183, ll184, ll185, ll186, ll187,
ll188, ll189, ll190, ll191, ll192, ll193, ll194, ll195, ll196, ll197,
ll198, ll199, ll200, ll201, ll202, ll203, ll204, ll205, ll206, ll207,
ll208, ll209, ll210, ll211, ll212, ll213, ll214, ll215, ll216, ll217,
ll218, ll219, ll220, ll221, ll222, ll223, ll224, ll225, ll226, ll227,
ll228, ll229, ll230, ll231, ll232, ll233, ll234, ll235, ll236, ll237,
ll238, ll239, ll240, ll241, ll242, ll243, ll244, ll245, ll246, ll247,
ll248, ll249, ll250, ll251, ll252, ll253, ll254, ll255, ll256, ll257,
ll258, ll259, ll260, ll261, ll262, ll263, ll264, ll265, ll266, ll267,
ll268, ll269, ll270, ll271, ll272, ll273, ll274, ll275, ll276, ll277,
ll278, ll279, ll280, ll281, ll282, ll283, ll284, ll285, ll286, ll287,
ll288, ll289, ll290, ll291, ll292, ll293, ll294, ll295, ll296, ll297,
ll298, ll299, ll300, ll301, ll302, ll303, ll304, ll305, ll306, ll307,
ll308, ll309, ll310, ll311, ll312, ll313, ll314, ll315, ll316, ll317,
ll318, ll319, ll320, ll321, ll322, ll323, ll324, ll325, ll326, ll327,
ll328, ll329, ll330, ll331, ll332, ll333, ll334, ll335, ll336, ll337,
ll338, ll339, ll340, ll341, ll342, ll343, ll344, ll345, ll346, ll347,
ll348, ll349, ll350, ll351, ll352, ll353, ll354, ll355, ll356, ll357,
ll358, ll359, ll360, ll361, ll362, ll363, ll364, ll365, ll366, ll367,
ll368, ll369, ll370, ll371, ll372, ll373, ll374, ll375, ll376, ll377,
ll378, ll379, ll380, ll381, ll382, ll383, ll384, ll385, ll386, ll387,
ll388, ll389, ll390, ll391, ll392, ll393, ll394, ll395, ll396, ll397,
ll398, ll399, ll400, ll401, ll402, ll403, ll404, ll405, ll406, ll407,
ll408, ll409, ll410, ll411, ll412, ll413, ll414, ll415, ll416, ll417,
ll418, ll419, ll420, ll421, ll422, ll423, ll424, ll425, ll426, ll427,
ll428, ll429, ll430, ll431, ll432, ll433, ll434, ll435, ll436, ll437,
ll438, ll439, ll440, ll441, ll442, ll443, ll444, ll445, ll446, ll447,
ll448, ll449, ll450, ll451, ll452, ll453, ll454, ll455, ll456, ll457,
ll458, ll459, ll460, ll461, ll462, ll463, ll464, ll465, ll466, ll467,
ll468, ll469, ll470, ll471, ll472, ll473, ll474, ll475, ll476, ll477,
ll478, ll479, ll480, ll481, ll482, ll483, ll484, ll485, ll486, ll487,
ll488, ll489, ll490, ll491, ll492, ll493, ll494, ll495, ll496, ll497,
ll498, ll499, ll500, ll501, ll502, ll503, ll504, ll505, ll506, ll507,
ll508, ll509, ll510, ll511;
int i1 = TestStackBangRbp.i1;
int i2 = TestStackBangRbp.i2;
int i3 = TestStackBangRbp.i3;
int i4 = TestStackBangRbp.i4;
int i5 = TestStackBangRbp.i5;
int i6 = TestStackBangRbp.i6;
int i7 = TestStackBangRbp.i7;
int i8 = TestStackBangRbp.i8;
int i9 = TestStackBangRbp.i9;
int i10 = TestStackBangRbp.i10;
int i11 = TestStackBangRbp.i11;
int i12 = TestStackBangRbp.i12;
int i13 = TestStackBangRbp.i13;
int i14 = TestStackBangRbp.i14;
int i15 = TestStackBangRbp.i15;
int i16 = TestStackBangRbp.i16;
TestStackBangRbp.i1 = i1;
TestStackBangRbp.i2 = i2;
TestStackBangRbp.i3 = i3;
TestStackBangRbp.i4 = i4;
TestStackBangRbp.i5 = i5;
TestStackBangRbp.i6 = i6;
TestStackBangRbp.i7 = i7;
TestStackBangRbp.i8 = i8;
TestStackBangRbp.i9 = i9;
TestStackBangRbp.i10 = i10;
TestStackBangRbp.i11 = i11;
TestStackBangRbp.i12 = i12;
TestStackBangRbp.i13 = i13;
TestStackBangRbp.i14 = i14;
TestStackBangRbp.i15 = i15;
TestStackBangRbp.i16 = i16;
if (deopt) {
// deoptimize with integer in rbp
UnloadedClass1 res = new UnloadedClass1(); // forces deopt with c2
return res;
}
return null;
}
static boolean m2(boolean deopt) {
// call m2 recursively until stack overflow. Then call m3 that
// will call m1 and trigger and deopt in m1 while keeping a
// lot of objects live in registers at the call to m1
long l0, l1, l2, l3, l4, l5, l6, l7, l8, l9, l10, l11, l12,
l13, l14, l15, l16, l17, l18, l19, l20, l21, l22, l23, l24,
l25, l26, l27, l28, l29, l30, l31, l32, l33, l34, l35, l36,
l37, l38, l39, l40, l41, l42, l43, l44, l45, l46, l47, l48,
l49, l50, l51, l52, l53, l54, l55, l56, l57, l58, l59, l60,
l61, l62, l63, l64, l65, l66, l67, l68, l69, l70, l71, l72,
l73, l74, l75, l76, l77, l78, l79, l80, l81, l82, l83, l84,
l85, l86, l87, l88, l89, l90, l91, l92, l93, l94, l95, l96,
l97, l98, l99, l100, l101, l102, l103, l104, l105, l106, l107,
l108, l109, l110, l111, l112, l113, l114, l115, l116, l117,
l118, l119, l120, l121, l122, l123, l124, l125, l126, l127,
l128, l129, l130, l131, l132, l133, l134, l135, l136, l137,
l138, l139, l140, l141, l142, l143, l144, l145, l146, l147,
l148, l149, l150, l151, l152, l153, l154, l155, l156, l157,
l158, l159, l160, l161, l162, l163, l164, l165, l166, l167,
l168, l169, l170, l171, l172, l173, l174, l175, l176, l177,
l178, l179, l180, l181, l182, l183, l184, l185, l186, l187,
l188, l189, l190, l191, l192, l193, l194, l195, l196, l197,
l198, l199, l200, l201, l202, l203, l204, l205, l206, l207,
l208, l209, l210, l211, l212, l213, l214, l215, l216, l217,
l218, l219, l220, l221, l222, l223, l224, l225, l226, l227,
l228, l229, l230, l231, l232, l233, l234, l235, l236, l237,
l238, l239, l240, l241, l242, l243, l244, l245, l246, l247,
l248, l249, l250, l251, l252, l253, l254, l255, l256, l257,
l258, l259, l260, l261, l262, l263, l264, l265, l266, l267,
l268, l269, l270, l271, l272, l273, l274, l275, l276, l277,
l278, l279, l280, l281, l282, l283, l284, l285, l286, l287,
l288, l289, l290, l291, l292, l293, l294, l295, l296, l297,
l298, l299, l300, l301, l302, l303, l304, l305, l306, l307,
l308, l309, l310, l311, l312, l313, l314, l315, l316, l317,
l318, l319, l320, l321, l322, l323, l324, l325, l326, l327,
l328, l329, l330, l331, l332, l333, l334, l335, l336, l337,
l338, l339, l340, l341, l342, l343, l344, l345, l346, l347,
l348, l349, l350, l351, l352, l353, l354, l355, l356, l357,
l358, l359, l360, l361, l362, l363, l364, l365, l366, l367,
l368, l369, l370, l371, l372, l373, l374, l375, l376, l377,
l378, l379, l380, l381, l382, l383, l384, l385, l386, l387,
l388, l389, l390, l391, l392, l393, l394, l395, l396, l397,
l398, l399, l400, l401, l402, l403, l404, l405, l406, l407,
l408, l409, l410, l411, l412, l413, l414, l415, l416, l417,
l418, l419, l420, l421, l422, l423, l424, l425, l426, l427,
l428, l429, l430, l431, l432, l433, l434, l435, l436, l437,
l438, l439, l440, l441, l442, l443, l444, l445, l446, l447,
l448, l449, l450, l451, l452, l453, l454, l455, l456, l457,
l458, l459, l460, l461, l462, l463, l464, l465, l466, l467,
l468, l469, l470, l471, l472, l473, l474, l475, l476, l477,
l478, l479, l480, l481, l482, l483, l484, l485, l486, l487,
l488, l489, l490, l491, l492, l493, l494, l495, l496, l497,
l498, l499, l500, l501, l502, l503, l504, l505, l506, l507,
l508, l509, l510, l511;
boolean do_m3 = false;
try {
do_m3 = m2(deopt);
} catch (StackOverflowError e) {
return true;
}
if (do_m3) {
m3(deopt);
}
return false;
}
static volatile Object o1 = new Object();
static volatile int i1 = 1;
static volatile int i2 = 2;
static volatile int i3 = 3;
static volatile int i4 = 4;
static volatile int i5 = 5;
static volatile int i6 = 6;
static volatile int i7 = 7;
static volatile int i8 = 8;
static volatile int i9 = 9;
static volatile int i10 = 10;
static volatile int i11 = 11;
static volatile int i12 = 12;
static volatile int i13 = 13;
static volatile int i14 = 14;
static volatile int i15 = 15;
static volatile int i16 = 16;
static void m3(boolean deopt) {
Object o1 = TestStackBangRbp.o1;
TestStackBangRbp.o1 = o1;
try {
m1(deopt);
} catch (StackOverflowError e) {
// deoptimize again. rbp holds an integer. It should have an object.
UnloadedClass2 res = new UnloadedClass2(); // forces deopt with c2
}
TestStackBangRbp.o1 = o1;
}
static public void main(String[] args) {
// get m1 & m3 compiled
for (int i = 0; i < 20000; i++) {
m1(false);
m3(false);
}
m2(true);
System.out.println("TEST PASSED");
}
}
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册