提交 77f85fe6 编写于 作者: R roland

6986046: C1 valuestack cleanup

Summary: fixes an historical oddity in C1 with inlining where all of the expression stacks are kept in the topmost ValueStack instead of being in their respective ValueStacks.
Reviewed-by: never
Contributed-by: NChristian Wimmer <cwimmer@uci.edu>
上级 9491724a
...@@ -32,6 +32,7 @@ RangeCheckStub::RangeCheckStub(CodeEmitInfo* info, LIR_Opr index, ...@@ -32,6 +32,7 @@ RangeCheckStub::RangeCheckStub(CodeEmitInfo* info, LIR_Opr index,
: _throw_index_out_of_bounds_exception(throw_index_out_of_bounds_exception) : _throw_index_out_of_bounds_exception(throw_index_out_of_bounds_exception)
, _index(index) , _index(index)
{ {
assert(info != NULL, "must have info");
_info = new CodeEmitInfo(info); _info = new CodeEmitInfo(info);
} }
......
...@@ -311,7 +311,7 @@ void LIRGenerator::store_stack_parameter (LIR_Opr item, ByteSize offset_from_sp) ...@@ -311,7 +311,7 @@ void LIRGenerator::store_stack_parameter (LIR_Opr item, ByteSize offset_from_sp)
void LIRGenerator::do_StoreIndexed(StoreIndexed* x) { void LIRGenerator::do_StoreIndexed(StoreIndexed* x) {
assert(x->is_root(),""); assert(x->is_pinned(),"");
bool needs_range_check = true; bool needs_range_check = true;
bool use_length = x->length() != NULL; bool use_length = x->length() != NULL;
bool obj_store = x->elt_type() == T_ARRAY || x->elt_type() == T_OBJECT; bool obj_store = x->elt_type() == T_ARRAY || x->elt_type() == T_OBJECT;
...@@ -386,7 +386,7 @@ void LIRGenerator::do_StoreIndexed(StoreIndexed* x) { ...@@ -386,7 +386,7 @@ void LIRGenerator::do_StoreIndexed(StoreIndexed* x) {
void LIRGenerator::do_MonitorEnter(MonitorEnter* x) { void LIRGenerator::do_MonitorEnter(MonitorEnter* x) {
assert(x->is_root(),""); assert(x->is_pinned(),"");
LIRItem obj(x->obj(), this); LIRItem obj(x->obj(), this);
obj.load_item(); obj.load_item();
...@@ -398,7 +398,7 @@ void LIRGenerator::do_MonitorEnter(MonitorEnter* x) { ...@@ -398,7 +398,7 @@ void LIRGenerator::do_MonitorEnter(MonitorEnter* x) {
CodeEmitInfo* info_for_exception = NULL; CodeEmitInfo* info_for_exception = NULL;
if (x->needs_null_check()) { if (x->needs_null_check()) {
info_for_exception = state_for(x, x->lock_stack_before()); info_for_exception = state_for(x);
} }
// this CodeEmitInfo must not have the xhandlers because here the // this CodeEmitInfo must not have the xhandlers because here the
...@@ -409,7 +409,7 @@ void LIRGenerator::do_MonitorEnter(MonitorEnter* x) { ...@@ -409,7 +409,7 @@ void LIRGenerator::do_MonitorEnter(MonitorEnter* x) {
void LIRGenerator::do_MonitorExit(MonitorExit* x) { void LIRGenerator::do_MonitorExit(MonitorExit* x) {
assert(x->is_root(),""); assert(x->is_pinned(),"");
LIRItem obj(x->obj(), this); LIRItem obj(x->obj(), this);
obj.dont_load_item(); obj.dont_load_item();
...@@ -871,10 +871,11 @@ void LIRGenerator::do_NewInstance(NewInstance* x) { ...@@ -871,10 +871,11 @@ void LIRGenerator::do_NewInstance(NewInstance* x) {
// This instruction can be deoptimized in the slow path : use // This instruction can be deoptimized in the slow path : use
// O0 as result register. // O0 as result register.
const LIR_Opr reg = result_register_for(x->type()); const LIR_Opr reg = result_register_for(x->type());
#ifndef PRODUCT
if (PrintNotLoaded && !x->klass()->is_loaded()) { if (PrintNotLoaded && !x->klass()->is_loaded()) {
tty->print_cr(" ###class not loaded at new bci %d", x->bci()); tty->print_cr(" ###class not loaded at new bci %d", x->printable_bci());
} }
#endif
CodeEmitInfo* info = state_for(x, x->state()); CodeEmitInfo* info = state_for(x, x->state());
LIR_Opr tmp1 = FrameMap::G1_oop_opr; LIR_Opr tmp1 = FrameMap::G1_oop_opr;
LIR_Opr tmp2 = FrameMap::G3_oop_opr; LIR_Opr tmp2 = FrameMap::G3_oop_opr;
...@@ -1018,7 +1019,7 @@ void LIRGenerator::do_CheckCast(CheckCast* x) { ...@@ -1018,7 +1019,7 @@ void LIRGenerator::do_CheckCast(CheckCast* x) {
obj.load_item(); obj.load_item();
LIR_Opr out_reg = rlock_result(x); LIR_Opr out_reg = rlock_result(x);
CodeStub* stub; CodeStub* stub;
CodeEmitInfo* info_for_exception = state_for(x, x->state()->copy_locks()); CodeEmitInfo* info_for_exception = state_for(x);
if (x->is_incompatible_class_change_check()) { if (x->is_incompatible_class_change_check()) {
assert(patching_info == NULL, "can't patch this"); assert(patching_info == NULL, "can't patch this");
......
...@@ -83,7 +83,8 @@ RangeCheckStub::RangeCheckStub(CodeEmitInfo* info, LIR_Opr index, ...@@ -83,7 +83,8 @@ RangeCheckStub::RangeCheckStub(CodeEmitInfo* info, LIR_Opr index,
: _throw_index_out_of_bounds_exception(throw_index_out_of_bounds_exception) : _throw_index_out_of_bounds_exception(throw_index_out_of_bounds_exception)
, _index(index) , _index(index)
{ {
_info = info == NULL ? NULL : new CodeEmitInfo(info); assert(info != NULL, "must have info");
_info = new CodeEmitInfo(info);
} }
......
...@@ -107,7 +107,7 @@ bool LIRGenerator::can_store_as_constant(Value v, BasicType type) const { ...@@ -107,7 +107,7 @@ bool LIRGenerator::can_store_as_constant(Value v, BasicType type) const {
return false; return false;
} }
Constant* c = v->as_Constant(); Constant* c = v->as_Constant();
if (c && c->state() == NULL) { if (c && c->state_before() == NULL) {
// constants of any type can be stored directly, except for // constants of any type can be stored directly, except for
// unloaded object constants. // unloaded object constants.
return true; return true;
...@@ -250,7 +250,7 @@ void LIRGenerator::store_stack_parameter (LIR_Opr item, ByteSize offset_from_sp) ...@@ -250,7 +250,7 @@ void LIRGenerator::store_stack_parameter (LIR_Opr item, ByteSize offset_from_sp)
void LIRGenerator::do_StoreIndexed(StoreIndexed* x) { void LIRGenerator::do_StoreIndexed(StoreIndexed* x) {
assert(x->is_root(),""); assert(x->is_pinned(),"");
bool needs_range_check = true; bool needs_range_check = true;
bool use_length = x->length() != NULL; bool use_length = x->length() != NULL;
bool obj_store = x->elt_type() == T_ARRAY || x->elt_type() == T_OBJECT; bool obj_store = x->elt_type() == T_ARRAY || x->elt_type() == T_OBJECT;
...@@ -325,7 +325,7 @@ void LIRGenerator::do_StoreIndexed(StoreIndexed* x) { ...@@ -325,7 +325,7 @@ void LIRGenerator::do_StoreIndexed(StoreIndexed* x) {
void LIRGenerator::do_MonitorEnter(MonitorEnter* x) { void LIRGenerator::do_MonitorEnter(MonitorEnter* x) {
assert(x->is_root(),""); assert(x->is_pinned(),"");
LIRItem obj(x->obj(), this); LIRItem obj(x->obj(), this);
obj.load_item(); obj.load_item();
...@@ -341,7 +341,7 @@ void LIRGenerator::do_MonitorEnter(MonitorEnter* x) { ...@@ -341,7 +341,7 @@ void LIRGenerator::do_MonitorEnter(MonitorEnter* x) {
CodeEmitInfo* info_for_exception = NULL; CodeEmitInfo* info_for_exception = NULL;
if (x->needs_null_check()) { if (x->needs_null_check()) {
info_for_exception = state_for(x, x->lock_stack_before()); info_for_exception = state_for(x);
} }
// this CodeEmitInfo must not have the xhandlers because here the // this CodeEmitInfo must not have the xhandlers because here the
// object is already locked (xhandlers expect object to be unlocked) // object is already locked (xhandlers expect object to be unlocked)
...@@ -352,7 +352,7 @@ void LIRGenerator::do_MonitorEnter(MonitorEnter* x) { ...@@ -352,7 +352,7 @@ void LIRGenerator::do_MonitorEnter(MonitorEnter* x) {
void LIRGenerator::do_MonitorExit(MonitorExit* x) { void LIRGenerator::do_MonitorExit(MonitorExit* x) {
assert(x->is_root(),""); assert(x->is_pinned(),"");
LIRItem obj(x->obj(), this); LIRItem obj(x->obj(), this);
obj.dont_load_item(); obj.dont_load_item();
...@@ -984,9 +984,11 @@ void LIRGenerator::do_Convert(Convert* x) { ...@@ -984,9 +984,11 @@ void LIRGenerator::do_Convert(Convert* x) {
void LIRGenerator::do_NewInstance(NewInstance* x) { void LIRGenerator::do_NewInstance(NewInstance* x) {
#ifndef PRODUCT
if (PrintNotLoaded && !x->klass()->is_loaded()) { if (PrintNotLoaded && !x->klass()->is_loaded()) {
tty->print_cr(" ###class not loaded at new bci %d", x->bci()); tty->print_cr(" ###class not loaded at new bci %d", x->printable_bci());
} }
#endif
CodeEmitInfo* info = state_for(x, x->state()); CodeEmitInfo* info = state_for(x, x->state());
LIR_Opr reg = result_register_for(x->type()); LIR_Opr reg = result_register_for(x->type());
LIR_Opr klass_reg = new_register(objectType); LIR_Opr klass_reg = new_register(objectType);
...@@ -1127,7 +1129,7 @@ void LIRGenerator::do_CheckCast(CheckCast* x) { ...@@ -1127,7 +1129,7 @@ void LIRGenerator::do_CheckCast(CheckCast* x) {
obj.load_item(); obj.load_item();
// info for exceptions // info for exceptions
CodeEmitInfo* info_for_exception = state_for(x, x->state()->copy_locks()); CodeEmitInfo* info_for_exception = state_for(x);
CodeStub* stub; CodeStub* stub;
if (x->is_incompatible_class_change_check()) { if (x->is_incompatible_class_change_check()) {
......
...@@ -174,31 +174,6 @@ void CFGPrinterOutput::print_state(BlockBegin* block) { ...@@ -174,31 +174,6 @@ void CFGPrinterOutput::print_state(BlockBegin* block) {
int index; int index;
Value value; Value value;
if (state->stack_size() > 0) {
print_begin("stack");
print("size %d", state->stack_size());
for_each_stack_value(state, index, value) {
ip.print_phi(index, value, block);
print_operand(value);
output()->cr();
}
print_end("stack");
}
if (state->locks_size() > 0) {
print_begin("locks");
print("size %d", state->locks_size());
for_each_lock_value(state, index, value) {
ip.print_phi(index, value, block);
print_operand(value);
output()->cr();
}
print_end("locks");
}
for_each_state(state) { for_each_state(state) {
print_begin("locals"); print_begin("locals");
print("size %d", state->locals_size()); print("size %d", state->locals_size());
...@@ -210,6 +185,33 @@ void CFGPrinterOutput::print_state(BlockBegin* block) { ...@@ -210,6 +185,33 @@ void CFGPrinterOutput::print_state(BlockBegin* block) {
output()->cr(); output()->cr();
} }
print_end("locals"); print_end("locals");
if (state->stack_size() > 0) {
print_begin("stack");
print("size %d", state->stack_size());
print("method \"%s\"", method_name(state->scope()->method()));
for_each_stack_value(state, index, value) {
ip.print_phi(index, value, block);
print_operand(value);
output()->cr();
}
print_end("stack");
}
if (state->locks_size() > 0) {
print_begin("locks");
print("size %d", state->locks_size());
print("method \"%s\"", method_name(state->scope()->method()));
for_each_lock_value(state, index, value) {
ip.print_phi(index, value, block);
print_operand(value);
output()->cr();
}
print_end("locks");
}
} }
print_end("states"); print_end("states");
...@@ -230,7 +232,8 @@ void CFGPrinterOutput::print_HIR(Value instr) { ...@@ -230,7 +232,8 @@ void CFGPrinterOutput::print_HIR(Value instr) {
if (instr->is_pinned()) { if (instr->is_pinned()) {
output()->put('.'); output()->put('.');
} }
output()->print("%d %d ", instr->bci(), instr->use_count());
output()->print("%d %d ", instr->printable_bci(), instr->use_count());
print_operand(instr); print_operand(instr);
...@@ -271,7 +274,7 @@ void CFGPrinterOutput::print_block(BlockBegin* block) { ...@@ -271,7 +274,7 @@ void CFGPrinterOutput::print_block(BlockBegin* block) {
print("name \"B%d\"", block->block_id()); print("name \"B%d\"", block->block_id());
print("from_bci %d", block->bci()); print("from_bci %d", block->bci());
print("to_bci %d", (block->end() == NULL ? -1 : block->end()->bci())); print("to_bci %d", (block->end() == NULL ? -1 : block->end()->printable_bci()));
output()->indent(); output()->indent();
output()->print("predecessors "); output()->print("predecessors ");
......
...@@ -205,7 +205,7 @@ void Canonicalizer::do_StoreField (StoreField* x) { ...@@ -205,7 +205,7 @@ void Canonicalizer::do_StoreField (StoreField* x) {
// limit this optimization to current block // limit this optimization to current block
if (value != NULL && in_current_block(conv)) { if (value != NULL && in_current_block(conv)) {
set_canonical(new StoreField(x->obj(), x->offset(), x->field(), value, x->is_static(), set_canonical(new StoreField(x->obj(), x->offset(), x->field(), value, x->is_static(),
x->lock_stack(), x->state_before(), x->is_loaded(), x->is_initialized())); x->state_before(), x->is_loaded(), x->is_initialized()));
return; return;
} }
} }
...@@ -256,7 +256,7 @@ void Canonicalizer::do_StoreIndexed (StoreIndexed* x) { ...@@ -256,7 +256,7 @@ void Canonicalizer::do_StoreIndexed (StoreIndexed* x) {
// limit this optimization to current block // limit this optimization to current block
if (value != NULL && in_current_block(conv)) { if (value != NULL && in_current_block(conv)) {
set_canonical(new StoreIndexed(x->array(), x->index(), x->length(), set_canonical(new StoreIndexed(x->array(), x->index(), x->length(),
x->elt_type(), value, x->lock_stack())); x->elt_type(), value, x->state_before()));
return; return;
} }
} }
...@@ -667,7 +667,7 @@ void Canonicalizer::do_If(If* x) { ...@@ -667,7 +667,7 @@ void Canonicalizer::do_If(If* x) {
} }
} }
set_canonical(canon); set_canonical(canon);
set_bci(cmp->bci()); set_bci(cmp->state_before()->bci());
} }
} }
} else if (l->as_InstanceOf() != NULL) { } else if (l->as_InstanceOf() != NULL) {
...@@ -685,7 +685,7 @@ void Canonicalizer::do_If(If* x) { ...@@ -685,7 +685,7 @@ void Canonicalizer::do_If(If* x) {
set_canonical(new Goto(is_inst_sux, x->state_before(), x->is_safepoint())); set_canonical(new Goto(is_inst_sux, x->state_before(), x->is_safepoint()));
} else { } else {
// successors differ => simplify to: IfInstanceOf // successors differ => simplify to: IfInstanceOf
set_canonical(new IfInstanceOf(inst->klass(), inst->obj(), true, inst->bci(), is_inst_sux, no_inst_sux)); set_canonical(new IfInstanceOf(inst->klass(), inst->obj(), true, inst->state_before()->bci(), is_inst_sux, no_inst_sux));
} }
} }
} else if (rt == objectNull && (l->as_NewInstance() || l->as_NewArray())) { } else if (rt == objectNull && (l->as_NewInstance() || l->as_NewArray())) {
......
...@@ -22,7 +22,6 @@ ...@@ -22,7 +22,6 @@
* *
*/ */
class BlockBegin;
class CompilationResourceObj; class CompilationResourceObj;
class XHandlers; class XHandlers;
class ExceptionInfo; class ExceptionInfo;
......
...@@ -659,7 +659,6 @@ GraphBuilder::ScopeData::ScopeData(ScopeData* parent) ...@@ -659,7 +659,6 @@ GraphBuilder::ScopeData::ScopeData(ScopeData* parent)
, _jsr_xhandlers(NULL) , _jsr_xhandlers(NULL)
, _caller_stack_size(-1) , _caller_stack_size(-1)
, _continuation(NULL) , _continuation(NULL)
, _continuation_state(NULL)
, _num_returns(0) , _num_returns(0)
, _cleanup_block(NULL) , _cleanup_block(NULL)
, _cleanup_return_prev(NULL) , _cleanup_return_prev(NULL)
...@@ -795,14 +794,6 @@ void GraphBuilder::sort_top_into_worklist(BlockList* worklist, BlockBegin* top) ...@@ -795,14 +794,6 @@ void GraphBuilder::sort_top_into_worklist(BlockList* worklist, BlockBegin* top)
if (i >= -1) worklist->at_put(i + 1, top); if (i >= -1) worklist->at_put(i + 1, top);
} }
int GraphBuilder::ScopeData::caller_stack_size() const {
ValueStack* state = scope()->caller_state();
if (state == NULL) {
return 0;
}
return state->stack_size();
}
BlockBegin* GraphBuilder::ScopeData::remove_from_work_list() { BlockBegin* GraphBuilder::ScopeData::remove_from_work_list() {
if (is_work_list_empty()) { if (is_work_list_empty()) {
...@@ -880,7 +871,7 @@ void GraphBuilder::load_constant() { ...@@ -880,7 +871,7 @@ void GraphBuilder::load_constant() {
ciObject* obj = con.as_object(); ciObject* obj = con.as_object();
if (!obj->is_loaded() if (!obj->is_loaded()
|| (PatchALot && obj->klass() != ciEnv::current()->String_klass())) { || (PatchALot && obj->klass() != ciEnv::current()->String_klass())) {
patch_state = state()->copy(); patch_state = copy_state_before();
t = new ObjectConstant(obj); t = new ObjectConstant(obj);
} else { } else {
assert(!obj->is_klass(), "must be java_mirror of klass"); assert(!obj->is_klass(), "must be java_mirror of klass");
...@@ -902,7 +893,8 @@ void GraphBuilder::load_constant() { ...@@ -902,7 +893,8 @@ void GraphBuilder::load_constant() {
void GraphBuilder::load_local(ValueType* type, int index) { void GraphBuilder::load_local(ValueType* type, int index) {
Value x = state()->load_local(index); Value x = state()->local_at(index);
assert(x != NULL && !x->type()->is_illegal(), "access of illegal local variable");
push(type, x); push(type, x);
} }
...@@ -942,19 +934,21 @@ void GraphBuilder::store_local(ValueStack* state, Value x, ValueType* type, int ...@@ -942,19 +934,21 @@ void GraphBuilder::store_local(ValueStack* state, Value x, ValueType* type, int
void GraphBuilder::load_indexed(BasicType type) { void GraphBuilder::load_indexed(BasicType type) {
ValueStack* state_before = copy_state_for_exception();
Value index = ipop(); Value index = ipop();
Value array = apop(); Value array = apop();
Value length = NULL; Value length = NULL;
if (CSEArrayLength || if (CSEArrayLength ||
(array->as_AccessField() && array->as_AccessField()->field()->is_constant()) || (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
(array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant())) { (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant())) {
length = append(new ArrayLength(array, lock_stack())); length = append(new ArrayLength(array, state_before));
} }
push(as_ValueType(type), append(new LoadIndexed(array, index, length, type, lock_stack()))); push(as_ValueType(type), append(new LoadIndexed(array, index, length, type, state_before)));
} }
void GraphBuilder::store_indexed(BasicType type) { void GraphBuilder::store_indexed(BasicType type) {
ValueStack* state_before = copy_state_for_exception();
Value value = pop(as_ValueType(type)); Value value = pop(as_ValueType(type));
Value index = ipop(); Value index = ipop();
Value array = apop(); Value array = apop();
...@@ -962,9 +956,9 @@ void GraphBuilder::store_indexed(BasicType type) { ...@@ -962,9 +956,9 @@ void GraphBuilder::store_indexed(BasicType type) {
if (CSEArrayLength || if (CSEArrayLength ||
(array->as_AccessField() && array->as_AccessField()->field()->is_constant()) || (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
(array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant())) { (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant())) {
length = append(new ArrayLength(array, lock_stack())); length = append(new ArrayLength(array, state_before));
} }
StoreIndexed* result = new StoreIndexed(array, index, length, type, value, lock_stack()); StoreIndexed* result = new StoreIndexed(array, index, length, type, value, state_before);
append(result); append(result);
_memory->store_value(value); _memory->store_value(value);
...@@ -1063,12 +1057,12 @@ void GraphBuilder::stack_op(Bytecodes::Code code) { ...@@ -1063,12 +1057,12 @@ void GraphBuilder::stack_op(Bytecodes::Code code) {
} }
void GraphBuilder::arithmetic_op(ValueType* type, Bytecodes::Code code, ValueStack* stack) { void GraphBuilder::arithmetic_op(ValueType* type, Bytecodes::Code code, ValueStack* state_before) {
Value y = pop(type); Value y = pop(type);
Value x = pop(type); Value x = pop(type);
// NOTE: strictfp can be queried from current method since we don't // NOTE: strictfp can be queried from current method since we don't
// inline methods with differing strictfp bits // inline methods with differing strictfp bits
Value res = new ArithmeticOp(code, x, y, method()->is_strict(), stack); Value res = new ArithmeticOp(code, x, y, method()->is_strict(), state_before);
// Note: currently single-precision floating-point rounding on Intel is handled at the LIRGenerator level // Note: currently single-precision floating-point rounding on Intel is handled at the LIRGenerator level
res = append(res); res = append(res);
if (method()->is_strict()) { if (method()->is_strict()) {
...@@ -1132,7 +1126,7 @@ void GraphBuilder::logic_op(ValueType* type, Bytecodes::Code code) { ...@@ -1132,7 +1126,7 @@ void GraphBuilder::logic_op(ValueType* type, Bytecodes::Code code) {
void GraphBuilder::compare_op(ValueType* type, Bytecodes::Code code) { void GraphBuilder::compare_op(ValueType* type, Bytecodes::Code code) {
ValueStack* state_before = state()->copy(); ValueStack* state_before = copy_state_before();
Value y = pop(type); Value y = pop(type);
Value x = pop(type); Value x = pop(type);
ipush(append(new CompareOp(code, x, y, state_before))); ipush(append(new CompareOp(code, x, y, state_before)));
...@@ -1217,7 +1211,7 @@ void GraphBuilder::if_node(Value x, If::Condition cond, Value y, ValueStack* sta ...@@ -1217,7 +1211,7 @@ void GraphBuilder::if_node(Value x, If::Condition cond, Value y, ValueStack* sta
void GraphBuilder::if_zero(ValueType* type, If::Condition cond) { void GraphBuilder::if_zero(ValueType* type, If::Condition cond) {
Value y = append(new Constant(intZero)); Value y = append(new Constant(intZero));
ValueStack* state_before = state()->copy(); ValueStack* state_before = copy_state_before();
Value x = ipop(); Value x = ipop();
if_node(x, cond, y, state_before); if_node(x, cond, y, state_before);
} }
...@@ -1225,14 +1219,14 @@ void GraphBuilder::if_zero(ValueType* type, If::Condition cond) { ...@@ -1225,14 +1219,14 @@ void GraphBuilder::if_zero(ValueType* type, If::Condition cond) {
void GraphBuilder::if_null(ValueType* type, If::Condition cond) { void GraphBuilder::if_null(ValueType* type, If::Condition cond) {
Value y = append(new Constant(objectNull)); Value y = append(new Constant(objectNull));
ValueStack* state_before = state()->copy(); ValueStack* state_before = copy_state_before();
Value x = apop(); Value x = apop();
if_node(x, cond, y, state_before); if_node(x, cond, y, state_before);
} }
void GraphBuilder::if_same(ValueType* type, If::Condition cond) { void GraphBuilder::if_same(ValueType* type, If::Condition cond) {
ValueStack* state_before = state()->copy(); ValueStack* state_before = copy_state_before();
Value y = pop(type); Value y = pop(type);
Value x = pop(type); Value x = pop(type);
if_node(x, cond, y, state_before); if_node(x, cond, y, state_before);
...@@ -1282,7 +1276,7 @@ void GraphBuilder::table_switch() { ...@@ -1282,7 +1276,7 @@ void GraphBuilder::table_switch() {
BlockBegin* tsux = block_at(bci() + switch_->dest_offset_at(0)); BlockBegin* tsux = block_at(bci() + switch_->dest_offset_at(0));
BlockBegin* fsux = block_at(bci() + switch_->default_offset()); BlockBegin* fsux = block_at(bci() + switch_->default_offset());
bool is_bb = tsux->bci() < bci() || fsux->bci() < bci(); bool is_bb = tsux->bci() < bci() || fsux->bci() < bci();
ValueStack* state_before = is_bb ? state() : NULL; ValueStack* state_before = is_bb ? copy_state_before() : NULL;
append(new If(ipop(), If::eql, true, key, tsux, fsux, state_before, is_bb)); append(new If(ipop(), If::eql, true, key, tsux, fsux, state_before, is_bb));
} else { } else {
// collect successors // collect successors
...@@ -1295,7 +1289,7 @@ void GraphBuilder::table_switch() { ...@@ -1295,7 +1289,7 @@ void GraphBuilder::table_switch() {
} }
// add default successor // add default successor
sux->at_put(i, block_at(bci() + switch_->default_offset())); sux->at_put(i, block_at(bci() + switch_->default_offset()));
ValueStack* state_before = has_bb ? state() : NULL; ValueStack* state_before = has_bb ? copy_state_before() : NULL;
append(new TableSwitch(ipop(), sux, switch_->low_key(), state_before, has_bb)); append(new TableSwitch(ipop(), sux, switch_->low_key(), state_before, has_bb));
} }
} }
...@@ -1314,7 +1308,7 @@ void GraphBuilder::lookup_switch() { ...@@ -1314,7 +1308,7 @@ void GraphBuilder::lookup_switch() {
BlockBegin* tsux = block_at(bci() + pair->offset()); BlockBegin* tsux = block_at(bci() + pair->offset());
BlockBegin* fsux = block_at(bci() + switch_->default_offset()); BlockBegin* fsux = block_at(bci() + switch_->default_offset());
bool is_bb = tsux->bci() < bci() || fsux->bci() < bci(); bool is_bb = tsux->bci() < bci() || fsux->bci() < bci();
ValueStack* state_before = is_bb ? state() : NULL; ValueStack* state_before = is_bb ? copy_state_before() : NULL;
append(new If(ipop(), If::eql, true, key, tsux, fsux, state_before, is_bb)); append(new If(ipop(), If::eql, true, key, tsux, fsux, state_before, is_bb));
} else { } else {
// collect successors & keys // collect successors & keys
...@@ -1330,7 +1324,7 @@ void GraphBuilder::lookup_switch() { ...@@ -1330,7 +1324,7 @@ void GraphBuilder::lookup_switch() {
} }
// add default successor // add default successor
sux->at_put(i, block_at(bci() + switch_->default_offset())); sux->at_put(i, block_at(bci() + switch_->default_offset()));
ValueStack* state_before = has_bb ? state() : NULL; ValueStack* state_before = has_bb ? copy_state_before() : NULL;
append(new LookupSwitch(ipop(), sux, keys, state_before, has_bb)); append(new LookupSwitch(ipop(), sux, keys, state_before, has_bb));
} }
} }
...@@ -1340,7 +1334,7 @@ void GraphBuilder::call_register_finalizer() { ...@@ -1340,7 +1334,7 @@ void GraphBuilder::call_register_finalizer() {
// the registration on return. // the registration on return.
// Gather some type information about the receiver // Gather some type information about the receiver
Value receiver = state()->load_local(0); Value receiver = state()->local_at(0);
assert(receiver != NULL, "must have a receiver"); assert(receiver != NULL, "must have a receiver");
ciType* declared_type = receiver->declared_type(); ciType* declared_type = receiver->declared_type();
ciType* exact_type = receiver->exact_type(); ciType* exact_type = receiver->exact_type();
...@@ -1373,10 +1367,11 @@ void GraphBuilder::call_register_finalizer() { ...@@ -1373,10 +1367,11 @@ void GraphBuilder::call_register_finalizer() {
if (needs_check) { if (needs_check) {
// Perform the registration of finalizable objects. // Perform the registration of finalizable objects.
ValueStack* state_before = copy_state_for_exception();
load_local(objectType, 0); load_local(objectType, 0);
append_split(new Intrinsic(voidType, vmIntrinsics::_Object_init, append_split(new Intrinsic(voidType, vmIntrinsics::_Object_init,
state()->pop_arguments(1), state()->pop_arguments(1),
true, lock_stack(), true)); true, state_before, true));
} }
} }
...@@ -1395,12 +1390,14 @@ void GraphBuilder::method_return(Value x) { ...@@ -1395,12 +1390,14 @@ void GraphBuilder::method_return(Value x) {
// If the inlined method is synchronized, the monitor must be // If the inlined method is synchronized, the monitor must be
// released before we jump to the continuation block. // released before we jump to the continuation block.
if (method()->is_synchronized()) { if (method()->is_synchronized()) {
int i = state()->caller_state()->locks_size(); assert(state()->locks_size() == 1, "receiver must be locked here");
assert(state()->locks_size() == i + 1, "receiver must be locked here"); monitorexit(state()->lock_at(0), SynchronizationEntryBCI);
monitorexit(state()->lock_at(i), SynchronizationEntryBCI);
} }
state()->truncate_stack(caller_stack_size()); // State at end of inlined method is the state of the caller
// without the method parameters on stack, including the
// return value, if any, of the inlined method on operand stack.
set_state(state()->caller_state()->copy_for_parsing());
if (x != NULL) { if (x != NULL) {
state()->push(x->type(), x); state()->push(x->type(), x);
} }
...@@ -1412,14 +1409,6 @@ void GraphBuilder::method_return(Value x) { ...@@ -1412,14 +1409,6 @@ void GraphBuilder::method_return(Value x) {
set_inline_cleanup_info(_block, _last, state()); set_inline_cleanup_info(_block, _last, state());
} }
// State at end of inlined method is the state of the caller
// without the method parameters on stack, including the
// return value, if any, of the inlined method on operand stack.
set_state(scope_data()->continuation_state()->copy());
if (x) {
state()->push(x->type(), x);
}
// The current bci() is in the wrong scope, so use the bci() of // The current bci() is in the wrong scope, so use the bci() of
// the continuation point. // the continuation point.
append_with_bci(goto_callee, scope_data()->continuation()->bci()); append_with_bci(goto_callee, scope_data()->continuation()->bci());
...@@ -1455,11 +1444,11 @@ void GraphBuilder::access_field(Bytecodes::Code code) { ...@@ -1455,11 +1444,11 @@ void GraphBuilder::access_field(Bytecodes::Code code) {
field->will_link(method()->holder(), code); field->will_link(method()->holder(), code);
const bool is_initialized = is_loaded && holder->is_initialized(); const bool is_initialized = is_loaded && holder->is_initialized();
ValueStack* state_copy = NULL; ValueStack* state_before = NULL;
if (!is_initialized || PatchALot) { if (!is_initialized || PatchALot) {
// save state before instruction for debug info when // save state before instruction for debug info when
// deoptimization happens during patching // deoptimization happens during patching
state_copy = state()->copy(); state_before = copy_state_before();
} }
Value obj = NULL; Value obj = NULL;
...@@ -1468,9 +1457,9 @@ void GraphBuilder::access_field(Bytecodes::Code code) { ...@@ -1468,9 +1457,9 @@ void GraphBuilder::access_field(Bytecodes::Code code) {
// fully initialized and resolved in this constant pool. The will_link test // fully initialized and resolved in this constant pool. The will_link test
// above essentially checks if this class is resolved in this constant pool // above essentially checks if this class is resolved in this constant pool
// so, the is_initialized flag should be suffiect. // so, the is_initialized flag should be suffiect.
if (state_copy != NULL) { if (state_before != NULL) {
// build a patching constant // build a patching constant
obj = new Constant(new ClassConstant(holder), state_copy); obj = new Constant(new ClassConstant(holder), state_before);
} else { } else {
obj = new Constant(new ClassConstant(holder)); obj = new Constant(new ClassConstant(holder));
} }
...@@ -1499,25 +1488,32 @@ void GraphBuilder::access_field(Bytecodes::Code code) { ...@@ -1499,25 +1488,32 @@ void GraphBuilder::access_field(Bytecodes::Code code) {
} }
if (constant != NULL) { if (constant != NULL) {
push(type, append(constant)); push(type, append(constant));
state_copy = NULL; // Not a potential deoptimization point (see set_state_before logic below)
} else { } else {
if (state_before == NULL) {
state_before = copy_state_for_exception();
}
push(type, append(new LoadField(append(obj), offset, field, true, push(type, append(new LoadField(append(obj), offset, field, true,
lock_stack(), state_copy, is_loaded, is_initialized))); state_before, is_loaded, is_initialized)));
} }
break; break;
} }
case Bytecodes::_putstatic: case Bytecodes::_putstatic:
{ Value val = pop(type); { Value val = pop(type);
append(new StoreField(append(obj), offset, field, val, true, lock_stack(), state_copy, is_loaded, is_initialized)); if (state_before == NULL) {
state_before = copy_state_for_exception();
}
append(new StoreField(append(obj), offset, field, val, true, state_before, is_loaded, is_initialized));
} }
break; break;
case Bytecodes::_getfield : case Bytecodes::_getfield :
{ {
LoadField* load = new LoadField(apop(), offset, field, false, lock_stack(), state_copy, is_loaded, true); if (state_before == NULL) {
state_before = copy_state_for_exception();
}
LoadField* load = new LoadField(apop(), offset, field, false, state_before, is_loaded, true);
Value replacement = is_loaded ? _memory->load(load) : load; Value replacement = is_loaded ? _memory->load(load) : load;
if (replacement != load) { if (replacement != load) {
assert(replacement->bci() != -99 || replacement->as_Phi() || replacement->as_Local(), assert(replacement->is_linked() || !replacement->can_be_linked(), "should already by linked");
"should already by linked");
push(type, replacement); push(type, replacement);
} else { } else {
push(type, append(load)); push(type, append(load));
...@@ -1527,7 +1523,10 @@ void GraphBuilder::access_field(Bytecodes::Code code) { ...@@ -1527,7 +1523,10 @@ void GraphBuilder::access_field(Bytecodes::Code code) {
case Bytecodes::_putfield : case Bytecodes::_putfield :
{ Value val = pop(type); { Value val = pop(type);
StoreField* store = new StoreField(apop(), offset, field, val, false, lock_stack(), state_copy, is_loaded, true); if (state_before == NULL) {
state_before = copy_state_for_exception();
}
StoreField* store = new StoreField(apop(), offset, field, val, false, state_before, is_loaded, true);
if (is_loaded) store = _memory->store(store); if (is_loaded) store = _memory->store(store);
if (store != NULL) { if (store != NULL) {
append(store); append(store);
...@@ -1647,7 +1646,7 @@ void GraphBuilder::invoke(Bytecodes::Code code) { ...@@ -1647,7 +1646,7 @@ void GraphBuilder::invoke(Bytecodes::Code code) {
actual_recv = target->holder(); actual_recv = target->holder();
// insert a check it's really the expected class. // insert a check it's really the expected class.
CheckCast* c = new CheckCast(klass, receiver, NULL); CheckCast* c = new CheckCast(klass, receiver, copy_state_for_exception());
c->set_incompatible_class_change_check(); c->set_incompatible_class_change_check();
c->set_direct_compare(klass->is_final()); c->set_direct_compare(klass->is_final());
append_split(c); append_split(c);
...@@ -1732,7 +1731,7 @@ void GraphBuilder::invoke(Bytecodes::Code code) { ...@@ -1732,7 +1731,7 @@ void GraphBuilder::invoke(Bytecodes::Code code) {
// We require the debug info to be the "state before" because // We require the debug info to be the "state before" because
// invokedynamics may deoptimize. // invokedynamics may deoptimize.
ValueStack* state_before = is_invokedynamic ? state()->copy() : NULL; ValueStack* state_before = is_invokedynamic ? copy_state_before() : copy_state_exhandling();
Values* args = state()->pop_arguments(target->arg_size_no_receiver()); Values* args = state()->pop_arguments(target->arg_size_no_receiver());
Value recv = has_receiver ? apop() : NULL; Value recv = has_receiver ? apop() : NULL;
...@@ -1795,24 +1794,26 @@ void GraphBuilder::invoke(Bytecodes::Code code) { ...@@ -1795,24 +1794,26 @@ void GraphBuilder::invoke(Bytecodes::Code code) {
void GraphBuilder::new_instance(int klass_index) { void GraphBuilder::new_instance(int klass_index) {
ValueStack* state_before = copy_state_exhandling();
bool will_link; bool will_link;
ciKlass* klass = stream()->get_klass(will_link); ciKlass* klass = stream()->get_klass(will_link);
assert(klass->is_instance_klass(), "must be an instance klass"); assert(klass->is_instance_klass(), "must be an instance klass");
NewInstance* new_instance = new NewInstance(klass->as_instance_klass()); NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before);
_memory->new_instance(new_instance); _memory->new_instance(new_instance);
apush(append_split(new_instance)); apush(append_split(new_instance));
} }
void GraphBuilder::new_type_array() { void GraphBuilder::new_type_array() {
apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index()))); ValueStack* state_before = copy_state_exhandling();
apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index(), state_before)));
} }
void GraphBuilder::new_object_array() { void GraphBuilder::new_object_array() {
bool will_link; bool will_link;
ciKlass* klass = stream()->get_klass(will_link); ciKlass* klass = stream()->get_klass(will_link);
ValueStack* state_before = !klass->is_loaded() || PatchALot ? state()->copy() : NULL; ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
NewArray* n = new NewObjectArray(klass, ipop(), state_before); NewArray* n = new NewObjectArray(klass, ipop(), state_before);
apush(append_split(n)); apush(append_split(n));
} }
...@@ -1838,7 +1839,7 @@ bool GraphBuilder::direct_compare(ciKlass* k) { ...@@ -1838,7 +1839,7 @@ bool GraphBuilder::direct_compare(ciKlass* k) {
void GraphBuilder::check_cast(int klass_index) { void GraphBuilder::check_cast(int klass_index) {
bool will_link; bool will_link;
ciKlass* klass = stream()->get_klass(will_link); ciKlass* klass = stream()->get_klass(will_link);
ValueStack* state_before = !klass->is_loaded() || PatchALot ? state()->copy() : NULL; ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_for_exception();
CheckCast* c = new CheckCast(klass, apop(), state_before); CheckCast* c = new CheckCast(klass, apop(), state_before);
apush(append_split(c)); apush(append_split(c));
c->set_direct_compare(direct_compare(klass)); c->set_direct_compare(direct_compare(klass));
...@@ -1859,7 +1860,7 @@ void GraphBuilder::check_cast(int klass_index) { ...@@ -1859,7 +1860,7 @@ void GraphBuilder::check_cast(int klass_index) {
void GraphBuilder::instance_of(int klass_index) { void GraphBuilder::instance_of(int klass_index) {
bool will_link; bool will_link;
ciKlass* klass = stream()->get_klass(will_link); ciKlass* klass = stream()->get_klass(will_link);
ValueStack* state_before = !klass->is_loaded() || PatchALot ? state()->copy() : NULL; ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
InstanceOf* i = new InstanceOf(klass, apop(), state_before); InstanceOf* i = new InstanceOf(klass, apop(), state_before);
ipush(append_split(i)); ipush(append_split(i));
i->set_direct_compare(direct_compare(klass)); i->set_direct_compare(direct_compare(klass));
...@@ -1879,25 +1880,13 @@ void GraphBuilder::instance_of(int klass_index) { ...@@ -1879,25 +1880,13 @@ void GraphBuilder::instance_of(int klass_index) {
void GraphBuilder::monitorenter(Value x, int bci) { void GraphBuilder::monitorenter(Value x, int bci) {
// save state before locking in case of deoptimization after a NullPointerException // save state before locking in case of deoptimization after a NullPointerException
ValueStack* lock_stack_before = lock_stack(); ValueStack* state_before = copy_state_for_exception_with_bci(bci);
append_with_bci(new MonitorEnter(x, state()->lock(scope(), x), lock_stack_before), bci); append_with_bci(new MonitorEnter(x, state()->lock(x), state_before), bci);
kill_all(); kill_all();
} }
void GraphBuilder::monitorexit(Value x, int bci) { void GraphBuilder::monitorexit(Value x, int bci) {
// Note: the comment below is only relevant for the case where we do
// not deoptimize due to asynchronous exceptions (!(DeoptC1 &&
// DeoptOnAsyncException), which is not used anymore)
// Note: Potentially, the monitor state in an exception handler
// can be wrong due to wrong 'initialization' of the handler
// via a wrong asynchronous exception path. This can happen,
// if the exception handler range for asynchronous exceptions
// is too long (see also java bug 4327029, and comment in
// GraphBuilder::handle_exception()). This may cause 'under-
// flow' of the monitor stack => bailout instead.
if (state()->locks_size() < 1) BAILOUT("monitor stack underflow");
append_with_bci(new MonitorExit(x, state()->unlock()), bci); append_with_bci(new MonitorExit(x, state()->unlock()), bci);
kill_all(); kill_all();
} }
...@@ -1906,7 +1895,7 @@ void GraphBuilder::monitorexit(Value x, int bci) { ...@@ -1906,7 +1895,7 @@ void GraphBuilder::monitorexit(Value x, int bci) {
void GraphBuilder::new_multi_array(int dimensions) { void GraphBuilder::new_multi_array(int dimensions) {
bool will_link; bool will_link;
ciKlass* klass = stream()->get_klass(will_link); ciKlass* klass = stream()->get_klass(will_link);
ValueStack* state_before = !klass->is_loaded() || PatchALot ? state()->copy() : NULL; ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
Values* dims = new Values(dimensions, NULL); Values* dims = new Values(dimensions, NULL);
// fill in all dimensions // fill in all dimensions
...@@ -1921,8 +1910,10 @@ void GraphBuilder::new_multi_array(int dimensions) { ...@@ -1921,8 +1910,10 @@ void GraphBuilder::new_multi_array(int dimensions) {
void GraphBuilder::throw_op(int bci) { void GraphBuilder::throw_op(int bci) {
// We require that the debug info for a Throw be the "state before" // We require that the debug info for a Throw be the "state before"
// the Throw (i.e., exception oop is still on TOS) // the Throw (i.e., exception oop is still on TOS)
ValueStack* state_before = state()->copy(); ValueStack* state_before = copy_state_before_with_bci(bci);
Throw* t = new Throw(apop(), state_before); Throw* t = new Throw(apop(), state_before);
// operand stack not needed after a throw
state()->truncate_stack(0);
append_with_bci(t, bci); append_with_bci(t, bci);
} }
...@@ -1947,60 +1938,62 @@ Value GraphBuilder::round_fp(Value fp_value) { ...@@ -1947,60 +1938,62 @@ Value GraphBuilder::round_fp(Value fp_value) {
Instruction* GraphBuilder::append_with_bci(Instruction* instr, int bci) { Instruction* GraphBuilder::append_with_bci(Instruction* instr, int bci) {
Canonicalizer canon(compilation(), instr, bci); Canonicalizer canon(compilation(), instr, bci);
Instruction* i1 = canon.canonical(); Instruction* i1 = canon.canonical();
if (i1->bci() != -99) { if (i1->is_linked() || !i1->can_be_linked()) {
// Canonicalizer returned an instruction which was already // Canonicalizer returned an instruction which was already
// appended so simply return it. // appended so simply return it.
return i1; return i1;
} else if (UseLocalValueNumbering) { }
if (UseLocalValueNumbering) {
// Lookup the instruction in the ValueMap and add it to the map if // Lookup the instruction in the ValueMap and add it to the map if
// it's not found. // it's not found.
Instruction* i2 = vmap()->find_insert(i1); Instruction* i2 = vmap()->find_insert(i1);
if (i2 != i1) { if (i2 != i1) {
// found an entry in the value map, so just return it. // found an entry in the value map, so just return it.
assert(i2->bci() != -1, "should already be linked"); assert(i2->is_linked(), "should already be linked");
return i2; return i2;
} }
ValueNumberingEffects vne(vmap()); ValueNumberingEffects vne(vmap());
i1->visit(&vne); i1->visit(&vne);
} }
if (i1->as_Phi() == NULL && i1->as_Local() == NULL) { // i1 was not eliminated => append it
// i1 was not eliminated => append it assert(i1->next() == NULL, "shouldn't already be linked");
assert(i1->next() == NULL, "shouldn't already be linked"); _last = _last->set_next(i1, canon.bci());
_last = _last->set_next(i1, canon.bci());
if (++_instruction_count >= InstructionCountCutoff if (++_instruction_count >= InstructionCountCutoff && !bailed_out()) {
&& !bailed_out()) { // set the bailout state but complete normal processing. We
// set the bailout state but complete normal processing. We // might do a little more work before noticing the bailout so we
// might do a little more work before noticing the bailout so we // want processing to continue normally until it's noticed.
// want processing to continue normally until it's noticed. bailout("Method and/or inlining is too large");
bailout("Method and/or inlining is too large"); }
}
#ifndef PRODUCT #ifndef PRODUCT
if (PrintIRDuringConstruction) { if (PrintIRDuringConstruction) {
InstructionPrinter ip; InstructionPrinter ip;
ip.print_line(i1); ip.print_line(i1);
if (Verbose) { if (Verbose) {
state()->print(); state()->print();
}
} }
}
#endif #endif
assert(_last == i1, "adjust code below");
StateSplit* s = i1->as_StateSplit(); // save state after modification of operand stack for StateSplit instructions
if (s != NULL && i1->as_BlockEnd() == NULL) { StateSplit* s = i1->as_StateSplit();
if (EliminateFieldAccess) { if (s != NULL) {
Intrinsic* intrinsic = s->as_Intrinsic(); if (EliminateFieldAccess) {
if (s->as_Invoke() != NULL || (intrinsic && !intrinsic->preserves_state())) { Intrinsic* intrinsic = s->as_Intrinsic();
_memory->kill(); if (s->as_Invoke() != NULL || (intrinsic && !intrinsic->preserves_state())) {
} _memory->kill();
} }
s->set_state(state()->copy());
}
// set up exception handlers for this instruction if necessary
if (i1->can_trap()) {
assert(exception_state() != NULL || !has_handler(), "must have setup exception state");
i1->set_exception_handlers(handle_exception(bci));
} }
s->set_state(state()->copy(ValueStack::StateAfter, canon.bci()));
}
// set up exception handlers for this instruction if necessary
if (i1->can_trap()) {
i1->set_exception_handlers(handle_exception(i1));
assert(i1->exception_state() != NULL || !i1->needs_exception_state() || bailed_out(), "handle_exception must set exception state");
} }
return i1; return i1;
} }
...@@ -2032,26 +2025,30 @@ void GraphBuilder::null_check(Value value) { ...@@ -2032,26 +2025,30 @@ void GraphBuilder::null_check(Value value) {
} }
} }
} }
append(new NullCheck(value, lock_stack())); append(new NullCheck(value, copy_state_for_exception()));
} }
XHandlers* GraphBuilder::handle_exception(int cur_bci) { XHandlers* GraphBuilder::handle_exception(Instruction* instruction) {
// fast path if it is guaranteed that no exception handlers are present if (!has_handler() && (!instruction->needs_exception_state() || instruction->exception_state() != NULL)) {
if (!has_handler()) { assert(instruction->exception_state() == NULL
// TODO: check if return NULL is possible (avoids empty lists) || instruction->exception_state()->kind() == ValueStack::EmptyExceptionState
|| (instruction->exception_state()->kind() == ValueStack::ExceptionState && _compilation->env()->jvmti_can_access_local_variables()),
"exception_state should be of exception kind");
return new XHandlers(); return new XHandlers();
} }
XHandlers* exception_handlers = new XHandlers(); XHandlers* exception_handlers = new XHandlers();
ScopeData* cur_scope_data = scope_data(); ScopeData* cur_scope_data = scope_data();
ValueStack* s = exception_state(); ValueStack* cur_state = instruction->state_before();
ValueStack* prev_state = NULL;
int scope_count = 0; int scope_count = 0;
assert(s != NULL, "exception state must be set"); assert(cur_state != NULL, "state_before must be set");
do { do {
assert(cur_scope_data->scope() == s->scope(), "scopes do not match"); int cur_bci = cur_state->bci();
assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");
assert(cur_bci == SynchronizationEntryBCI || cur_bci == cur_scope_data->stream()->cur_bci(), "invalid bci"); assert(cur_bci == SynchronizationEntryBCI || cur_bci == cur_scope_data->stream()->cur_bci(), "invalid bci");
// join with all potential exception handlers // join with all potential exception handlers
...@@ -2075,10 +2072,15 @@ XHandlers* GraphBuilder::handle_exception(int cur_bci) { ...@@ -2075,10 +2072,15 @@ XHandlers* GraphBuilder::handle_exception(int cur_bci) {
// previously this was a BAILOUT, but this is not necessary // previously this was a BAILOUT, but this is not necessary
// now because asynchronous exceptions are not handled this way. // now because asynchronous exceptions are not handled this way.
assert(entry->state() == NULL || s->locks_size() == entry->state()->locks_size(), "locks do not match"); assert(entry->state() == NULL || cur_state->total_locks_size() == entry->state()->total_locks_size(), "locks do not match");
// xhandler start with an empty expression stack // xhandler start with an empty expression stack
s->truncate_stack(cur_scope_data->caller_stack_size()); if (cur_state->stack_size() != 0) {
cur_state = cur_state->copy(ValueStack::ExceptionState, cur_state->bci());
}
if (instruction->exception_state() == NULL) {
instruction->set_exception_state(cur_state);
}
// Note: Usually this join must work. However, very // Note: Usually this join must work. However, very
// complicated jsr-ret structures where we don't ret from // complicated jsr-ret structures where we don't ret from
...@@ -2087,12 +2089,12 @@ XHandlers* GraphBuilder::handle_exception(int cur_bci) { ...@@ -2087,12 +2089,12 @@ XHandlers* GraphBuilder::handle_exception(int cur_bci) {
// The only test case we've seen so far which exhibits this // The only test case we've seen so far which exhibits this
// problem is caught by the infinite recursion test in // problem is caught by the infinite recursion test in
// GraphBuilder::jsr() if the join doesn't work. // GraphBuilder::jsr() if the join doesn't work.
if (!entry->try_merge(s)) { if (!entry->try_merge(cur_state)) {
BAILOUT_("error while joining with exception handler, prob. due to complicated jsr/rets", exception_handlers); BAILOUT_("error while joining with exception handler, prob. due to complicated jsr/rets", exception_handlers);
} }
// add current state for correct handling of phi functions at begin of xhandler // add current state for correct handling of phi functions at begin of xhandler
int phi_operand = entry->add_exception_state(s); int phi_operand = entry->add_exception_state(cur_state);
// add entry to the list of xhandlers of this block // add entry to the list of xhandlers of this block
_block->add_exception_handler(entry); _block->add_exception_handler(entry);
...@@ -2119,26 +2121,39 @@ XHandlers* GraphBuilder::handle_exception(int cur_bci) { ...@@ -2119,26 +2121,39 @@ XHandlers* GraphBuilder::handle_exception(int cur_bci) {
} }
} }
if (exception_handlers->length() == 0) {
// This scope and all callees do not handle exceptions, so the local
// variables of this scope are not needed. However, the scope itself is
// required for a correct exception stack trace -> clear out the locals.
if (_compilation->env()->jvmti_can_access_local_variables()) {
cur_state = cur_state->copy(ValueStack::ExceptionState, cur_state->bci());
} else {
cur_state = cur_state->copy(ValueStack::EmptyExceptionState, cur_state->bci());
}
if (prev_state != NULL) {
prev_state->set_caller_state(cur_state);
}
if (instruction->exception_state() == NULL) {
instruction->set_exception_state(cur_state);
}
}
// Set up iteration for next time. // Set up iteration for next time.
// If parsing a jsr, do not grab exception handlers from the // If parsing a jsr, do not grab exception handlers from the
// parent scopes for this method (already got them, and they // parent scopes for this method (already got them, and they
// needed to be cloned) // needed to be cloned)
if (cur_scope_data->parsing_jsr()) {
IRScope* tmp_scope = cur_scope_data->scope(); while (cur_scope_data->parsing_jsr()) {
while (cur_scope_data->parent() != NULL &&
cur_scope_data->parent()->scope() == tmp_scope) {
cur_scope_data = cur_scope_data->parent();
}
}
if (cur_scope_data != NULL) {
if (cur_scope_data->parent() != NULL) {
// must use pop_scope instead of caller_state to preserve all monitors
s = s->pop_scope();
}
cur_bci = cur_scope_data->scope()->caller_bci();
cur_scope_data = cur_scope_data->parent(); cur_scope_data = cur_scope_data->parent();
scope_count++;
} }
assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");
assert(cur_state->locks_size() == 0 || cur_state->locks_size() == 1, "unlocking must be done in a catchall exception handler");
prev_state = cur_state;
cur_state = cur_state->caller_state();
cur_scope_data = cur_scope_data->parent();
scope_count++;
} while (cur_scope_data != NULL); } while (cur_scope_data != NULL);
return exception_handlers; return exception_handlers;
...@@ -2243,14 +2258,10 @@ void PhiSimplifier::block_do(BlockBegin* b) { ...@@ -2243,14 +2258,10 @@ void PhiSimplifier::block_do(BlockBegin* b) {
); );
ValueStack* state = b->state()->caller_state(); ValueStack* state = b->state()->caller_state();
int index; for_each_state_value(state, value,
Value value; Phi* phi = value->as_Phi();
for_each_state(state) { assert(phi == NULL || phi->block() != b, "must not have phi function to simplify in caller state");
for_each_local_value(state, index, value) { );
Phi* phi = value->as_Phi();
assert(phi == NULL || phi->block() != b, "must not have phi function to simplify in caller state");
}
}
#endif #endif
} }
...@@ -2265,7 +2276,7 @@ void GraphBuilder::connect_to_end(BlockBegin* beg) { ...@@ -2265,7 +2276,7 @@ void GraphBuilder::connect_to_end(BlockBegin* beg) {
// setup iteration // setup iteration
kill_all(); kill_all();
_block = beg; _block = beg;
_state = beg->state()->copy(); _state = beg->state()->copy_for_parsing();
_last = beg; _last = beg;
iterate_bytecodes_for_block(beg->bci()); iterate_bytecodes_for_block(beg->bci());
} }
...@@ -2301,14 +2312,7 @@ BlockEnd* GraphBuilder::iterate_bytecodes_for_block(int bci) { ...@@ -2301,14 +2312,7 @@ BlockEnd* GraphBuilder::iterate_bytecodes_for_block(int bci) {
while (!bailed_out() && last()->as_BlockEnd() == NULL && while (!bailed_out() && last()->as_BlockEnd() == NULL &&
(code = stream()->next()) != ciBytecodeStream::EOBC() && (code = stream()->next()) != ciBytecodeStream::EOBC() &&
(block_at(s.cur_bci()) == NULL || block_at(s.cur_bci()) == block())) { (block_at(s.cur_bci()) == NULL || block_at(s.cur_bci()) == block())) {
assert(state()->kind() == ValueStack::Parsing, "invalid state kind");
if (has_handler() && can_trap(method(), code)) {
// copy the state because it is modified before handle_exception is called
set_exception_state(state()->copy());
} else {
// handle_exception is not called for this bytecode
set_exception_state(NULL);
}
// Check for active jsr during OSR compilation // Check for active jsr during OSR compilation
if (compilation()->is_osr_compile() if (compilation()->is_osr_compile()
...@@ -2433,12 +2437,12 @@ BlockEnd* GraphBuilder::iterate_bytecodes_for_block(int bci) { ...@@ -2433,12 +2437,12 @@ BlockEnd* GraphBuilder::iterate_bytecodes_for_block(int bci) {
case Bytecodes::_lmul : arithmetic_op(longType , code); break; case Bytecodes::_lmul : arithmetic_op(longType , code); break;
case Bytecodes::_fmul : arithmetic_op(floatType , code); break; case Bytecodes::_fmul : arithmetic_op(floatType , code); break;
case Bytecodes::_dmul : arithmetic_op(doubleType, code); break; case Bytecodes::_dmul : arithmetic_op(doubleType, code); break;
case Bytecodes::_idiv : arithmetic_op(intType , code, lock_stack()); break; case Bytecodes::_idiv : arithmetic_op(intType , code, copy_state_for_exception()); break;
case Bytecodes::_ldiv : arithmetic_op(longType , code, lock_stack()); break; case Bytecodes::_ldiv : arithmetic_op(longType , code, copy_state_for_exception()); break;
case Bytecodes::_fdiv : arithmetic_op(floatType , code); break; case Bytecodes::_fdiv : arithmetic_op(floatType , code); break;
case Bytecodes::_ddiv : arithmetic_op(doubleType, code); break; case Bytecodes::_ddiv : arithmetic_op(doubleType, code); break;
case Bytecodes::_irem : arithmetic_op(intType , code, lock_stack()); break; case Bytecodes::_irem : arithmetic_op(intType , code, copy_state_for_exception()); break;
case Bytecodes::_lrem : arithmetic_op(longType , code, lock_stack()); break; case Bytecodes::_lrem : arithmetic_op(longType , code, copy_state_for_exception()); break;
case Bytecodes::_frem : arithmetic_op(floatType , code); break; case Bytecodes::_frem : arithmetic_op(floatType , code); break;
case Bytecodes::_drem : arithmetic_op(doubleType, code); break; case Bytecodes::_drem : arithmetic_op(doubleType, code); break;
case Bytecodes::_ineg : negate_op(intType ); break; case Bytecodes::_ineg : negate_op(intType ); break;
...@@ -2515,11 +2519,10 @@ BlockEnd* GraphBuilder::iterate_bytecodes_for_block(int bci) { ...@@ -2515,11 +2519,10 @@ BlockEnd* GraphBuilder::iterate_bytecodes_for_block(int bci) {
case Bytecodes::_new : new_instance(s.get_index_u2()); break; case Bytecodes::_new : new_instance(s.get_index_u2()); break;
case Bytecodes::_newarray : new_type_array(); break; case Bytecodes::_newarray : new_type_array(); break;
case Bytecodes::_anewarray : new_object_array(); break; case Bytecodes::_anewarray : new_object_array(); break;
case Bytecodes::_arraylength : ipush(append(new ArrayLength(apop(), lock_stack()))); break; case Bytecodes::_arraylength : { ValueStack* state_before = copy_state_for_exception(); ipush(append(new ArrayLength(apop(), state_before))); break; }
case Bytecodes::_athrow : throw_op(s.cur_bci()); break; case Bytecodes::_athrow : throw_op(s.cur_bci()); break;
case Bytecodes::_checkcast : check_cast(s.get_index_u2()); break; case Bytecodes::_checkcast : check_cast(s.get_index_u2()); break;
case Bytecodes::_instanceof : instance_of(s.get_index_u2()); break; case Bytecodes::_instanceof : instance_of(s.get_index_u2()); break;
// Note: we do not have special handling for the monitorenter bytecode if DeoptC1 && DeoptOnAsyncException
case Bytecodes::_monitorenter : monitorenter(apop(), s.cur_bci()); break; case Bytecodes::_monitorenter : monitorenter(apop(), s.cur_bci()); break;
case Bytecodes::_monitorexit : monitorexit (apop(), s.cur_bci()); break; case Bytecodes::_monitorexit : monitorexit (apop(), s.cur_bci()); break;
case Bytecodes::_wide : ShouldNotReachHere(); break; case Bytecodes::_wide : ShouldNotReachHere(); break;
...@@ -2546,28 +2549,22 @@ BlockEnd* GraphBuilder::iterate_bytecodes_for_block(int bci) { ...@@ -2546,28 +2549,22 @@ BlockEnd* GraphBuilder::iterate_bytecodes_for_block(int bci) {
if (end == NULL) { if (end == NULL) {
// all blocks must end with a BlockEnd instruction => add a Goto // all blocks must end with a BlockEnd instruction => add a Goto
end = new Goto(block_at(s.cur_bci()), false); end = new Goto(block_at(s.cur_bci()), false);
_last = _last->set_next(end, prev_bci); append(end);
} }
assert(end == last()->as_BlockEnd(), "inconsistency"); assert(end == last()->as_BlockEnd(), "inconsistency");
// if the method terminates, we don't need the stack anymore assert(end->state() != NULL, "state must already be present");
if (end->as_Return() != NULL) { assert(end->as_Return() == NULL || end->as_Throw() == NULL || end->state()->stack_size() == 0, "stack not needed for return and throw");
state()->clear_stack();
} else if (end->as_Throw() != NULL) {
// May have exception handler in caller scopes
state()->truncate_stack(scope()->lock_stack_size());
}
// connect to begin & set state // connect to begin & set state
// NOTE that inlining may have changed the block we are parsing // NOTE that inlining may have changed the block we are parsing
block()->set_end(end); block()->set_end(end);
end->set_state(state());
// propagate state // propagate state
for (int i = end->number_of_sux() - 1; i >= 0; i--) { for (int i = end->number_of_sux() - 1; i >= 0; i--) {
BlockBegin* sux = end->sux_at(i); BlockBegin* sux = end->sux_at(i);
assert(sux->is_predecessor(block()), "predecessor missing"); assert(sux->is_predecessor(block()), "predecessor missing");
// be careful, bailout if bytecodes are strange // be careful, bailout if bytecodes are strange
if (!sux->try_merge(state())) BAILOUT_("block join failed", NULL); if (!sux->try_merge(end->state())) BAILOUT_("block join failed", NULL);
scope_data()->add_to_work_list(end->sux_at(i)); scope_data()->add_to_work_list(end->sux_at(i));
} }
...@@ -2605,7 +2602,6 @@ void GraphBuilder::iterate_all_blocks(bool start_in_current_block_for_inlining) ...@@ -2605,7 +2602,6 @@ void GraphBuilder::iterate_all_blocks(bool start_in_current_block_for_inlining)
bool GraphBuilder::_can_trap [Bytecodes::number_of_java_codes]; bool GraphBuilder::_can_trap [Bytecodes::number_of_java_codes];
bool GraphBuilder::_is_async[Bytecodes::number_of_java_codes];
void GraphBuilder::initialize() { void GraphBuilder::initialize() {
// the following bytecodes are assumed to potentially // the following bytecodes are assumed to potentially
...@@ -2657,67 +2653,14 @@ void GraphBuilder::initialize() { ...@@ -2657,67 +2653,14 @@ void GraphBuilder::initialize() {
, Bytecodes::_multianewarray , Bytecodes::_multianewarray
}; };
// the following bytecodes are assumed to potentially
// throw asynchronous exceptions in compiled code due
// to safepoints (note: these entries could be merged
// with the can_trap_list - however, we need to know
// which ones are asynchronous for now - see also the
// comment in GraphBuilder::handle_exception)
Bytecodes::Code is_async_list[] =
{ Bytecodes::_ifeq
, Bytecodes::_ifne
, Bytecodes::_iflt
, Bytecodes::_ifge
, Bytecodes::_ifgt
, Bytecodes::_ifle
, Bytecodes::_if_icmpeq
, Bytecodes::_if_icmpne
, Bytecodes::_if_icmplt
, Bytecodes::_if_icmpge
, Bytecodes::_if_icmpgt
, Bytecodes::_if_icmple
, Bytecodes::_if_acmpeq
, Bytecodes::_if_acmpne
, Bytecodes::_goto
, Bytecodes::_jsr
, Bytecodes::_ret
, Bytecodes::_tableswitch
, Bytecodes::_lookupswitch
, Bytecodes::_ireturn
, Bytecodes::_lreturn
, Bytecodes::_freturn
, Bytecodes::_dreturn
, Bytecodes::_areturn
, Bytecodes::_return
, Bytecodes::_ifnull
, Bytecodes::_ifnonnull
, Bytecodes::_goto_w
, Bytecodes::_jsr_w
};
// inititialize trap tables // inititialize trap tables
for (int i = 0; i < Bytecodes::number_of_java_codes; i++) { for (int i = 0; i < Bytecodes::number_of_java_codes; i++) {
_can_trap[i] = false; _can_trap[i] = false;
_is_async[i] = false;
} }
// set standard trap info // set standard trap info
for (uint j = 0; j < ARRAY_SIZE(can_trap_list); j++) { for (uint j = 0; j < ARRAY_SIZE(can_trap_list); j++) {
_can_trap[can_trap_list[j]] = true; _can_trap[can_trap_list[j]] = true;
} }
// We now deoptimize if an asynchronous exception is thrown. This
// considerably cleans up corner case issues related to javac's
// incorrect exception handler ranges for async exceptions and
// allows us to precisely analyze the types of exceptions from
// certain bytecodes.
if (!(DeoptC1 && DeoptOnAsyncException)) {
// set asynchronous trap info
for (uint k = 0; k < ARRAY_SIZE(is_async_list); k++) {
assert(!_can_trap[is_async_list[k]], "can_trap_list and is_async_list should be disjoint");
_can_trap[is_async_list[k]] = true;
_is_async[is_async_list[k]] = true;
}
}
} }
...@@ -2733,7 +2676,7 @@ BlockBegin* GraphBuilder::header_block(BlockBegin* entry, BlockBegin::Flag f, Va ...@@ -2733,7 +2676,7 @@ BlockBegin* GraphBuilder::header_block(BlockBegin* entry, BlockBegin::Flag f, Va
h->set_end(g); h->set_end(g);
h->set(f); h->set(f);
// setup header block end state // setup header block end state
ValueStack* s = state->copy(); // can use copy since stack is empty (=> no phis) ValueStack* s = state->copy(ValueStack::StateAfter, entry->bci()); // can use copy since stack is empty (=> no phis)
assert(s->stack_is_empty(), "must have empty stack at entry point"); assert(s->stack_is_empty(), "must have empty stack at entry point");
g->set_state(s); g->set_state(s);
return h; return h;
...@@ -2768,8 +2711,8 @@ BlockBegin* GraphBuilder::setup_start_block(int osr_bci, BlockBegin* std_entry, ...@@ -2768,8 +2711,8 @@ BlockBegin* GraphBuilder::setup_start_block(int osr_bci, BlockBegin* std_entry,
start->set_next(base, 0); start->set_next(base, 0);
start->set_end(base); start->set_end(base);
// create & setup state for start block // create & setup state for start block
start->set_state(state->copy()); start->set_state(state->copy(ValueStack::StateAfter, std_entry->bci()));
base->set_state(state->copy()); base->set_state(state->copy(ValueStack::StateAfter, std_entry->bci()));
if (base->std_entry()->state() == NULL) { if (base->std_entry()->state() == NULL) {
// setup states for header blocks // setup states for header blocks
...@@ -2803,6 +2746,7 @@ void GraphBuilder::setup_osr_entry_block() { ...@@ -2803,6 +2746,7 @@ void GraphBuilder::setup_osr_entry_block() {
kill_all(); kill_all();
_block = _osr_entry; _block = _osr_entry;
_state = _osr_entry->state()->copy(); _state = _osr_entry->state()->copy();
assert(_state->bci() == osr_bci, "mismatch");
_last = _osr_entry; _last = _osr_entry;
Value e = append(new OsrEntry()); Value e = append(new OsrEntry());
e->set_needs_null_check(false); e->set_needs_null_check(false);
...@@ -2852,7 +2796,6 @@ void GraphBuilder::setup_osr_entry_block() { ...@@ -2852,7 +2796,6 @@ void GraphBuilder::setup_osr_entry_block() {
assert(state->caller_state() == NULL, "should be top scope"); assert(state->caller_state() == NULL, "should be top scope");
state->clear_locals(); state->clear_locals();
Goto* g = new Goto(target, false); Goto* g = new Goto(target, false);
g->set_state(_state->copy());
append(g); append(g);
_osr_entry->set_end(g); _osr_entry->set_end(g);
target->merge(_osr_entry->end()->state()); target->merge(_osr_entry->end()->state());
...@@ -2862,7 +2805,7 @@ void GraphBuilder::setup_osr_entry_block() { ...@@ -2862,7 +2805,7 @@ void GraphBuilder::setup_osr_entry_block() {
ValueStack* GraphBuilder::state_at_entry() { ValueStack* GraphBuilder::state_at_entry() {
ValueStack* state = new ValueStack(scope(), method()->max_locals(), method()->max_stack()); ValueStack* state = new ValueStack(scope(), NULL);
// Set up locals for receiver // Set up locals for receiver
int idx = 0; int idx = 0;
...@@ -2886,7 +2829,7 @@ ValueStack* GraphBuilder::state_at_entry() { ...@@ -2886,7 +2829,7 @@ ValueStack* GraphBuilder::state_at_entry() {
// lock synchronized method // lock synchronized method
if (method()->is_synchronized()) { if (method()->is_synchronized()) {
state->lock(scope(), NULL); state->lock(NULL);
} }
return state; return state;
...@@ -2895,7 +2838,6 @@ ValueStack* GraphBuilder::state_at_entry() { ...@@ -2895,7 +2838,6 @@ ValueStack* GraphBuilder::state_at_entry() {
GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope) GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope)
: _scope_data(NULL) : _scope_data(NULL)
, _exception_state(NULL)
, _instruction_count(0) , _instruction_count(0)
, _osr_entry(NULL) , _osr_entry(NULL)
, _memory(new MemoryBuffer()) , _memory(new MemoryBuffer())
...@@ -2919,7 +2861,6 @@ GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope) ...@@ -2919,7 +2861,6 @@ GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope)
// complete graph // complete graph
_vmap = new ValueMap(); _vmap = new ValueMap();
scope->compute_lock_stack_size();
switch (scope->method()->intrinsic_id()) { switch (scope->method()->intrinsic_id()) {
case vmIntrinsics::_dabs : // fall through case vmIntrinsics::_dabs : // fall through
case vmIntrinsics::_dsqrt : // fall through case vmIntrinsics::_dsqrt : // fall through
...@@ -2945,7 +2886,7 @@ GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope) ...@@ -2945,7 +2886,7 @@ GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope)
// setup the initial block state // setup the initial block state
_block = start_block; _block = start_block;
_state = start_block->state()->copy(); _state = start_block->state()->copy_for_parsing();
_last = start_block; _last = start_block;
load_local(doubleType, 0); load_local(doubleType, 0);
...@@ -2957,7 +2898,6 @@ GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope) ...@@ -2957,7 +2898,6 @@ GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope)
// connect the begin and end blocks and we're all done. // connect the begin and end blocks and we're all done.
BlockEnd* end = last()->as_BlockEnd(); BlockEnd* end = last()->as_BlockEnd();
block()->set_end(end); block()->set_end(end);
end->set_state(state());
break; break;
} }
default: default:
...@@ -2988,13 +2928,38 @@ GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope) ...@@ -2988,13 +2928,38 @@ GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope)
} }
ValueStack* GraphBuilder::lock_stack() { ValueStack* GraphBuilder::copy_state_before() {
// return a new ValueStack representing just the current lock stack return copy_state_before_with_bci(bci());
// (for debug info at safepoints in exception throwing or handling) }
ValueStack* new_stack = state()->copy_locks();
return new_stack; ValueStack* GraphBuilder::copy_state_exhandling() {
return copy_state_exhandling_with_bci(bci());
}
ValueStack* GraphBuilder::copy_state_for_exception() {
return copy_state_for_exception_with_bci(bci());
}
ValueStack* GraphBuilder::copy_state_before_with_bci(int bci) {
return state()->copy(ValueStack::StateBefore, bci);
} }
ValueStack* GraphBuilder::copy_state_exhandling_with_bci(int bci) {
if (!has_handler()) return NULL;
return state()->copy(ValueStack::StateBefore, bci);
}
ValueStack* GraphBuilder::copy_state_for_exception_with_bci(int bci) {
ValueStack* s = copy_state_exhandling_with_bci(bci);
if (s == NULL) {
if (_compilation->env()->jvmti_can_access_local_variables()) {
s = state()->copy(ValueStack::ExceptionState, bci);
} else {
s = state()->copy(ValueStack::EmptyExceptionState, bci);
}
}
return s;
}
int GraphBuilder::recursive_inline_level(ciMethod* cur_callee) const { int GraphBuilder::recursive_inline_level(ciMethod* cur_callee) const {
int recur_level = 0; int recur_level = 0;
...@@ -3177,9 +3142,9 @@ bool GraphBuilder::try_inline_intrinsics(ciMethod* callee) { ...@@ -3177,9 +3142,9 @@ bool GraphBuilder::try_inline_intrinsics(ciMethod* callee) {
// create intrinsic node // create intrinsic node
const bool has_receiver = !callee->is_static(); const bool has_receiver = !callee->is_static();
ValueType* result_type = as_ValueType(callee->return_type()); ValueType* result_type = as_ValueType(callee->return_type());
ValueStack* state_before = copy_state_for_exception();
Values* args = state()->pop_arguments(callee->arg_size()); Values* args = state()->pop_arguments(callee->arg_size());
ValueStack* locks = lock_stack();
if (is_profiling()) { if (is_profiling()) {
// Don't profile in the special case where the root method // Don't profile in the special case where the root method
...@@ -3198,7 +3163,7 @@ bool GraphBuilder::try_inline_intrinsics(ciMethod* callee) { ...@@ -3198,7 +3163,7 @@ bool GraphBuilder::try_inline_intrinsics(ciMethod* callee) {
} }
} }
Intrinsic* result = new Intrinsic(result_type, id, args, has_receiver, lock_stack(), Intrinsic* result = new Intrinsic(result_type, id, args, has_receiver, state_before,
preserves_state, cantrap); preserves_state, cantrap);
// append instruction & push result // append instruction & push result
Value value = append_split(result); Value value = append_split(result);
...@@ -3236,10 +3201,9 @@ bool GraphBuilder::try_inline_jsr(int jsr_dest_bci) { ...@@ -3236,10 +3201,9 @@ bool GraphBuilder::try_inline_jsr(int jsr_dest_bci) {
assert(jsr_start_block != NULL, "jsr start block must exist"); assert(jsr_start_block != NULL, "jsr start block must exist");
assert(!jsr_start_block->is_set(BlockBegin::was_visited_flag), "should not have visited jsr yet"); assert(!jsr_start_block->is_set(BlockBegin::was_visited_flag), "should not have visited jsr yet");
Goto* goto_sub = new Goto(jsr_start_block, false); Goto* goto_sub = new Goto(jsr_start_block, false);
goto_sub->set_state(state());
// Must copy state to avoid wrong sharing when parsing bytecodes // Must copy state to avoid wrong sharing when parsing bytecodes
assert(jsr_start_block->state() == NULL, "should have fresh jsr starting block"); assert(jsr_start_block->state() == NULL, "should have fresh jsr starting block");
jsr_start_block->set_state(state()->copy()); jsr_start_block->set_state(copy_state_before_with_bci(jsr_dest_bci));
append(goto_sub); append(goto_sub);
_block->set_end(goto_sub); _block->set_end(goto_sub);
_last = _block = jsr_start_block; _last = _block = jsr_start_block;
...@@ -3290,7 +3254,6 @@ bool GraphBuilder::try_inline_jsr(int jsr_dest_bci) { ...@@ -3290,7 +3254,6 @@ bool GraphBuilder::try_inline_jsr(int jsr_dest_bci) {
void GraphBuilder::inline_sync_entry(Value lock, BlockBegin* sync_handler) { void GraphBuilder::inline_sync_entry(Value lock, BlockBegin* sync_handler) {
assert(lock != NULL && sync_handler != NULL, "lock or handler missing"); assert(lock != NULL && sync_handler != NULL, "lock or handler missing");
set_exception_state(state()->copy());
monitorenter(lock, SynchronizationEntryBCI); monitorenter(lock, SynchronizationEntryBCI);
assert(_last->as_MonitorEnter() != NULL, "monitor enter expected"); assert(_last->as_MonitorEnter() != NULL, "monitor enter expected");
_last->set_needs_null_check(false); _last->set_needs_null_check(false);
...@@ -3332,7 +3295,7 @@ void GraphBuilder::fill_sync_handler(Value lock, BlockBegin* sync_handler, bool ...@@ -3332,7 +3295,7 @@ void GraphBuilder::fill_sync_handler(Value lock, BlockBegin* sync_handler, bool
int bci = SynchronizationEntryBCI; int bci = SynchronizationEntryBCI;
if (lock) { if (lock) {
assert(state()->locks_size() > 0 && state()->lock_at(state()->locks_size() - 1) == lock, "lock is missing"); assert(state()->locks_size() > 0 && state()->lock_at(state()->locks_size() - 1) == lock, "lock is missing");
if (lock->bci() == -99) { if (!lock->is_linked()) {
lock = append_with_bci(lock, -1); lock = append_with_bci(lock, -1);
} }
...@@ -3342,21 +3305,17 @@ void GraphBuilder::fill_sync_handler(Value lock, BlockBegin* sync_handler, bool ...@@ -3342,21 +3305,17 @@ void GraphBuilder::fill_sync_handler(Value lock, BlockBegin* sync_handler, bool
// exit the context of the synchronized method // exit the context of the synchronized method
if (!default_handler) { if (!default_handler) {
pop_scope(); pop_scope();
_state = _state->copy(); bci = _state->caller_state()->bci();
bci = _state->scope()->caller_bci(); _state = _state->caller_state()->copy_for_parsing();
_state = _state->pop_scope()->copy();
} }
} }
// perform the throw as if at the the call site // perform the throw as if at the the call site
apush(exception); apush(exception);
set_exception_state(state()->copy());
throw_op(bci); throw_op(bci);
BlockEnd* end = last()->as_BlockEnd(); BlockEnd* end = last()->as_BlockEnd();
block()->set_end(end); block()->set_end(end);
end->set_state(state());
_block = orig_block; _block = orig_block;
_state = orig_state; _state = orig_state;
...@@ -3487,7 +3446,7 @@ bool GraphBuilder::try_inline_full(ciMethod* callee, bool holder_known) { ...@@ -3487,7 +3446,7 @@ bool GraphBuilder::try_inline_full(ciMethod* callee, bool holder_known) {
// Pass parameters into callee state: add assignments // Pass parameters into callee state: add assignments
// note: this will also ensure that all arguments are computed before being passed // note: this will also ensure that all arguments are computed before being passed
ValueStack* callee_state = state(); ValueStack* callee_state = state();
ValueStack* caller_state = scope()->caller_state(); ValueStack* caller_state = state()->caller_state();
{ int i = args_base; { int i = args_base;
while (i < caller_state->stack_size()) { while (i < caller_state->stack_size()) {
const int par_no = i - args_base; const int par_no = i - args_base;
...@@ -3502,16 +3461,7 @@ bool GraphBuilder::try_inline_full(ciMethod* callee, bool holder_known) { ...@@ -3502,16 +3461,7 @@ bool GraphBuilder::try_inline_full(ciMethod* callee, bool holder_known) {
// Note that we preserve locals state in case we can use it later // Note that we preserve locals state in case we can use it later
// (see use of pop_scope() below) // (see use of pop_scope() below)
caller_state->truncate_stack(args_base); caller_state->truncate_stack(args_base);
callee_state->truncate_stack(args_base); assert(callee_state->stack_size() == 0, "callee stack must be empty");
// Setup state that is used at returns form the inlined method.
// This is essentially the state of the continuation block,
// but without the return value on stack, if any, this will
// be pushed at the return instruction (see method_return).
scope_data()->set_continuation_state(caller_state->copy());
// Compute lock stack size for callee scope now that args have been passed
scope()->compute_lock_stack_size();
Value lock; Value lock;
BlockBegin* sync_handler; BlockBegin* sync_handler;
...@@ -3520,11 +3470,8 @@ bool GraphBuilder::try_inline_full(ciMethod* callee, bool holder_known) { ...@@ -3520,11 +3470,8 @@ bool GraphBuilder::try_inline_full(ciMethod* callee, bool holder_known) {
if (callee->is_synchronized()) { if (callee->is_synchronized()) {
lock = callee->is_static() ? append(new Constant(new InstanceConstant(callee->holder()->java_mirror()))) lock = callee->is_static() ? append(new Constant(new InstanceConstant(callee->holder()->java_mirror())))
: state()->local_at(0); : state()->local_at(0);
sync_handler = new BlockBegin(-1); sync_handler = new BlockBegin(SynchronizationEntryBCI);
inline_sync_entry(lock, sync_handler); inline_sync_entry(lock, sync_handler);
// recompute the lock stack size
scope()->compute_lock_stack_size();
} }
...@@ -3532,7 +3479,6 @@ bool GraphBuilder::try_inline_full(ciMethod* callee, bool holder_known) { ...@@ -3532,7 +3479,6 @@ bool GraphBuilder::try_inline_full(ciMethod* callee, bool holder_known) {
if (callee_start_block != NULL) { if (callee_start_block != NULL) {
assert(callee_start_block->is_set(BlockBegin::parser_loop_header_flag), "must be loop header"); assert(callee_start_block->is_set(BlockBegin::parser_loop_header_flag), "must be loop header");
Goto* goto_callee = new Goto(callee_start_block, false); Goto* goto_callee = new Goto(callee_start_block, false);
goto_callee->set_state(state());
// The state for this goto is in the scope of the callee, so use // The state for this goto is in the scope of the callee, so use
// the entry bci for the callee instead of the call site bci. // the entry bci for the callee instead of the call site bci.
append_with_bci(goto_callee, 0); append_with_bci(goto_callee, 0);
...@@ -3579,7 +3525,7 @@ bool GraphBuilder::try_inline_full(ciMethod* callee, bool holder_known) { ...@@ -3579,7 +3525,7 @@ bool GraphBuilder::try_inline_full(ciMethod* callee, bool holder_known) {
&& block() == orig_block && block() == orig_block
&& block() == inline_cleanup_block()) { && block() == inline_cleanup_block()) {
_last = inline_cleanup_return_prev(); _last = inline_cleanup_return_prev();
_state = inline_cleanup_state()->pop_scope(); _state = inline_cleanup_state();
} else if (continuation_preds == cont->number_of_preds()) { } else if (continuation_preds == cont->number_of_preds()) {
// Inlining caused that the instructions after the invoke in the // Inlining caused that the instructions after the invoke in the
// caller are not reachable any more. So skip filling this block // caller are not reachable any more. So skip filling this block
...@@ -3645,8 +3591,7 @@ void GraphBuilder::push_scope(ciMethod* callee, BlockBegin* continuation) { ...@@ -3645,8 +3591,7 @@ void GraphBuilder::push_scope(ciMethod* callee, BlockBegin* continuation) {
blb.bci2block()->at_put(0, NULL); blb.bci2block()->at_put(0, NULL);
} }
callee_scope->set_caller_state(state()); set_state(new ValueStack(callee_scope, state()->copy(ValueStack::CallerState, bci())));
set_state(state()->push_scope(callee_scope));
ScopeData* data = new ScopeData(scope_data()); ScopeData* data = new ScopeData(scope_data());
data->set_scope(callee_scope); data->set_scope(callee_scope);
...@@ -3670,10 +3615,6 @@ void GraphBuilder::push_scope_for_jsr(BlockBegin* jsr_continuation, int jsr_dest ...@@ -3670,10 +3615,6 @@ void GraphBuilder::push_scope_for_jsr(BlockBegin* jsr_continuation, int jsr_dest
data->set_scope(scope()); data->set_scope(scope());
data->setup_jsr_xhandlers(); data->setup_jsr_xhandlers();
data->set_continuation(continuation()); data->set_continuation(continuation());
if (continuation() != NULL) {
assert(continuation_state() != NULL, "");
data->set_continuation_state(continuation_state()->copy());
}
data->set_jsr_continuation(jsr_continuation); data->set_jsr_continuation(jsr_continuation);
_scope_data = data; _scope_data = data;
} }
...@@ -3768,6 +3709,7 @@ bool GraphBuilder::append_unsafe_prefetch(ciMethod* callee, bool is_static, bool ...@@ -3768,6 +3709,7 @@ bool GraphBuilder::append_unsafe_prefetch(ciMethod* callee, bool is_static, bool
void GraphBuilder::append_unsafe_CAS(ciMethod* callee) { void GraphBuilder::append_unsafe_CAS(ciMethod* callee) {
ValueStack* state_before = copy_state_for_exception();
ValueType* result_type = as_ValueType(callee->return_type()); ValueType* result_type = as_ValueType(callee->return_type());
assert(result_type->is_int(), "int result"); assert(result_type->is_int(), "int result");
Values* args = state()->pop_arguments(callee->arg_size()); Values* args = state()->pop_arguments(callee->arg_size());
...@@ -3796,7 +3738,7 @@ void GraphBuilder::append_unsafe_CAS(ciMethod* callee) { ...@@ -3796,7 +3738,7 @@ void GraphBuilder::append_unsafe_CAS(ciMethod* callee) {
// know which ones so mark the state as no preserved. This will // know which ones so mark the state as no preserved. This will
// cause CSE to invalidate memory across it. // cause CSE to invalidate memory across it.
bool preserves_state = false; bool preserves_state = false;
Intrinsic* result = new Intrinsic(result_type, callee->intrinsic_id(), args, false, lock_stack(), preserves_state); Intrinsic* result = new Intrinsic(result_type, callee->intrinsic_id(), args, false, state_before, preserves_state);
append_split(result); append_split(result);
push(result_type, result); push(result_type, result);
compilation()->set_has_unsafe_access(true); compilation()->set_has_unsafe_access(true);
......
...@@ -58,9 +58,6 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC { ...@@ -58,9 +58,6 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC {
// BlockEnds. // BlockEnds.
BlockBegin* _continuation; BlockBegin* _continuation;
// Without return value of inlined method on stack
ValueStack* _continuation_state;
// Was this ScopeData created only for the parsing and inlining of // Was this ScopeData created only for the parsing and inlining of
// a jsr? // a jsr?
bool _parsing_jsr; bool _parsing_jsr;
...@@ -125,14 +122,10 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC { ...@@ -125,14 +122,10 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC {
void set_stream(ciBytecodeStream* stream) { _stream = stream; } void set_stream(ciBytecodeStream* stream) { _stream = stream; }
intx max_inline_size() const { return _max_inline_size; } intx max_inline_size() const { return _max_inline_size; }
int caller_stack_size() const;
BlockBegin* continuation() const { return _continuation; } BlockBegin* continuation() const { return _continuation; }
void set_continuation(BlockBegin* cont) { _continuation = cont; } void set_continuation(BlockBegin* cont) { _continuation = cont; }
ValueStack* continuation_state() const { return _continuation_state; }
void set_continuation_state(ValueStack* s) { _continuation_state = s; }
// Indicates whether this ScopeData was pushed only for the // Indicates whether this ScopeData was pushed only for the
// parsing and inlining of a jsr // parsing and inlining of a jsr
bool parsing_jsr() const { return _parsing_jsr; } bool parsing_jsr() const { return _parsing_jsr; }
...@@ -163,7 +156,6 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC { ...@@ -163,7 +156,6 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC {
// for all GraphBuilders // for all GraphBuilders
static bool _can_trap[Bytecodes::number_of_java_codes]; static bool _can_trap[Bytecodes::number_of_java_codes];
static bool _is_async[Bytecodes::number_of_java_codes];
// for each instance of GraphBuilder // for each instance of GraphBuilder
ScopeData* _scope_data; // Per-scope data; used for inlining ScopeData* _scope_data; // Per-scope data; used for inlining
...@@ -179,7 +171,6 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC { ...@@ -179,7 +171,6 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC {
// for each call to connect_to_end; can also be set by inliner // for each call to connect_to_end; can also be set by inliner
BlockBegin* _block; // the current block BlockBegin* _block; // the current block
ValueStack* _state; // the current execution state ValueStack* _state; // the current execution state
ValueStack* _exception_state; // state that will be used by handle_exception
Instruction* _last; // the last instruction added Instruction* _last; // the last instruction added
bool _skip_block; // skip processing of the rest of this block bool _skip_block; // skip processing of the rest of this block
...@@ -194,8 +185,6 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC { ...@@ -194,8 +185,6 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC {
ValueStack* state() const { return _state; } ValueStack* state() const { return _state; }
void set_state(ValueStack* state) { _state = state; } void set_state(ValueStack* state) { _state = state; }
IRScope* scope() const { return scope_data()->scope(); } IRScope* scope() const { return scope_data()->scope(); }
ValueStack* exception_state() const { return _exception_state; }
void set_exception_state(ValueStack* s) { _exception_state = s; }
ciMethod* method() const { return scope()->method(); } ciMethod* method() const { return scope()->method(); }
ciBytecodeStream* stream() const { return scope_data()->stream(); } ciBytecodeStream* stream() const { return scope_data()->stream(); }
Instruction* last() const { return _last; } Instruction* last() const { return _last; }
...@@ -230,7 +219,7 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC { ...@@ -230,7 +219,7 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC {
void load_indexed (BasicType type); void load_indexed (BasicType type);
void store_indexed(BasicType type); void store_indexed(BasicType type);
void stack_op(Bytecodes::Code code); void stack_op(Bytecodes::Code code);
void arithmetic_op(ValueType* type, Bytecodes::Code code, ValueStack* lock_stack = NULL); void arithmetic_op(ValueType* type, Bytecodes::Code code, ValueStack* state_before = NULL);
void negate_op(ValueType* type); void negate_op(ValueType* type);
void shift_op(ValueType* type, Bytecodes::Code code); void shift_op(ValueType* type, Bytecodes::Code code);
void logic_op(ValueType* type, Bytecodes::Code code); void logic_op(ValueType* type, Bytecodes::Code code);
...@@ -267,12 +256,8 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC { ...@@ -267,12 +256,8 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC {
Instruction* append_split(StateSplit* instr); Instruction* append_split(StateSplit* instr);
// other helpers // other helpers
static bool is_async(Bytecodes::Code code) {
assert(0 <= code && code < Bytecodes::number_of_java_codes, "illegal bytecode");
return _is_async[code];
}
BlockBegin* block_at(int bci) { return scope_data()->block_at(bci); } BlockBegin* block_at(int bci) { return scope_data()->block_at(bci); }
XHandlers* handle_exception(int bci); XHandlers* handle_exception(Instruction* instruction);
void connect_to_end(BlockBegin* beg); void connect_to_end(BlockBegin* beg);
void null_check(Value value); void null_check(Value value);
void eliminate_redundant_phis(BlockBegin* start); void eliminate_redundant_phis(BlockBegin* start);
...@@ -283,7 +268,28 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC { ...@@ -283,7 +268,28 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC {
void kill_all(); void kill_all();
ValueStack* lock_stack(); // use of state copy routines (try to minimize unnecessary state
// object allocations):
// - if the instruction unconditionally needs a full copy of the
// state (for patching for example), then use copy_state_before*
// - if the instruction needs a full copy of the state only for
// handler generation (Instruction::needs_exception_state() returns
// false) then use copy_state_exhandling*
// - if the instruction needs either a full copy of the state for
// handler generation and a least a minimal copy of the state (as
// returned by Instruction::exception_state()) for debug info
// generation (that is when Instruction::needs_exception_state()
// returns true) then use copy_state_for_exception*
ValueStack* copy_state_before_with_bci(int bci);
ValueStack* copy_state_before();
ValueStack* copy_state_exhandling_with_bci(int bci);
ValueStack* copy_state_exhandling();
ValueStack* copy_state_for_exception_with_bci(int bci);
ValueStack* copy_state_for_exception();
// //
// Inlining support // Inlining support
...@@ -292,9 +298,7 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC { ...@@ -292,9 +298,7 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC {
// accessors // accessors
bool parsing_jsr() const { return scope_data()->parsing_jsr(); } bool parsing_jsr() const { return scope_data()->parsing_jsr(); }
BlockBegin* continuation() const { return scope_data()->continuation(); } BlockBegin* continuation() const { return scope_data()->continuation(); }
ValueStack* continuation_state() const { return scope_data()->continuation_state(); }
BlockBegin* jsr_continuation() const { return scope_data()->jsr_continuation(); } BlockBegin* jsr_continuation() const { return scope_data()->jsr_continuation(); }
int caller_stack_size() const { return scope_data()->caller_stack_size(); }
void set_continuation(BlockBegin* continuation) { scope_data()->set_continuation(continuation); } void set_continuation(BlockBegin* continuation) { scope_data()->set_continuation(continuation); }
void set_inline_cleanup_info(BlockBegin* block, void set_inline_cleanup_info(BlockBegin* block,
Instruction* return_prev, Instruction* return_prev,
......
...@@ -116,24 +116,6 @@ bool XHandler::equals(XHandler* other) const { ...@@ -116,24 +116,6 @@ bool XHandler::equals(XHandler* other) const {
// Implementation of IRScope // Implementation of IRScope
BlockBegin* IRScope::header_block(BlockBegin* entry, BlockBegin::Flag f, ValueStack* state) {
if (entry == NULL) return NULL;
assert(entry->is_set(f), "entry/flag mismatch");
// create header block
BlockBegin* h = new BlockBegin(entry->bci());
BlockEnd* g = new Goto(entry, false);
h->set_next(g, entry->bci());
h->set_end(g);
h->set(f);
// setup header block end state
ValueStack* s = state->copy(); // can use copy since stack is empty (=> no phis)
assert(s->stack_is_empty(), "must have empty stack at entry point");
g->set_state(s);
return h;
}
BlockBegin* IRScope::build_graph(Compilation* compilation, int osr_bci) { BlockBegin* IRScope::build_graph(Compilation* compilation, int osr_bci) {
GraphBuilder gm(compilation, this); GraphBuilder gm(compilation, this);
NOT_PRODUCT(if (PrintValueNumbering && Verbose) gm.print_stats()); NOT_PRODUCT(if (PrintValueNumbering && Verbose) gm.print_stats());
...@@ -145,12 +127,9 @@ BlockBegin* IRScope::build_graph(Compilation* compilation, int osr_bci) { ...@@ -145,12 +127,9 @@ BlockBegin* IRScope::build_graph(Compilation* compilation, int osr_bci) {
IRScope::IRScope(Compilation* compilation, IRScope* caller, int caller_bci, ciMethod* method, int osr_bci, bool create_graph) IRScope::IRScope(Compilation* compilation, IRScope* caller, int caller_bci, ciMethod* method, int osr_bci, bool create_graph)
: _callees(2) : _callees(2)
, _compilation(compilation) , _compilation(compilation)
, _lock_stack_size(-1)
, _requires_phi_function(method->max_locals()) , _requires_phi_function(method->max_locals())
{ {
_caller = caller; _caller = caller;
_caller_bci = caller == NULL ? -1 : caller_bci;
_caller_state = NULL; // Must be set later if needed
_level = caller == NULL ? 0 : caller->level() + 1; _level = caller == NULL ? 0 : caller->level() + 1;
_method = method; _method = method;
_xhandlers = new XHandlers(method); _xhandlers = new XHandlers(method);
...@@ -182,32 +161,6 @@ int IRScope::max_stack() const { ...@@ -182,32 +161,6 @@ int IRScope::max_stack() const {
} }
void IRScope::compute_lock_stack_size() {
if (!InlineMethodsWithExceptionHandlers) {
_lock_stack_size = 0;
return;
}
// Figure out whether we have to preserve expression stack elements
// for parent scopes, and if so, how many
IRScope* cur_scope = this;
while (cur_scope != NULL && !cur_scope->xhandlers()->has_handlers()) {
cur_scope = cur_scope->caller();
}
_lock_stack_size = (cur_scope == NULL ? 0 :
(cur_scope->caller_state() == NULL ? 0 :
cur_scope->caller_state()->stack_size()));
}
int IRScope::top_scope_bci() const {
assert(!is_top_scope(), "no correct answer for top scope possible");
const IRScope* scope = this;
while (!scope->caller()->is_top_scope()) {
scope = scope->caller();
}
return scope->caller_bci();
}
bool IRScopeDebugInfo::should_reexecute() { bool IRScopeDebugInfo::should_reexecute() {
ciMethod* cur_method = scope()->method(); ciMethod* cur_method = scope()->method();
int cur_bci = bci(); int cur_bci = bci();
...@@ -222,37 +175,24 @@ bool IRScopeDebugInfo::should_reexecute() { ...@@ -222,37 +175,24 @@ bool IRScopeDebugInfo::should_reexecute() {
// Implementation of CodeEmitInfo // Implementation of CodeEmitInfo
// Stack must be NON-null // Stack must be NON-null
CodeEmitInfo::CodeEmitInfo(int bci, ValueStack* stack, XHandlers* exception_handlers) CodeEmitInfo::CodeEmitInfo(ValueStack* stack, XHandlers* exception_handlers)
: _scope(stack->scope()) : _scope(stack->scope())
, _bci(bci)
, _scope_debug_info(NULL) , _scope_debug_info(NULL)
, _oop_map(NULL) , _oop_map(NULL)
, _stack(stack) , _stack(stack)
, _exception_handlers(exception_handlers) , _exception_handlers(exception_handlers)
, _next(NULL)
, _id(-1)
, _is_method_handle_invoke(false) { , _is_method_handle_invoke(false) {
assert(_stack != NULL, "must be non null"); assert(_stack != NULL, "must be non null");
assert(_bci == SynchronizationEntryBCI || Bytecodes::is_defined(scope()->method()->java_code_at_bci(_bci)), "make sure bci points at a real bytecode");
} }
CodeEmitInfo::CodeEmitInfo(CodeEmitInfo* info, bool lock_stack_only) CodeEmitInfo::CodeEmitInfo(CodeEmitInfo* info, ValueStack* stack)
: _scope(info->_scope) : _scope(info->_scope)
, _exception_handlers(NULL) , _exception_handlers(NULL)
, _bci(info->_bci)
, _scope_debug_info(NULL) , _scope_debug_info(NULL)
, _oop_map(NULL) , _oop_map(NULL)
, _stack(stack == NULL ? info->_stack : stack)
, _is_method_handle_invoke(info->_is_method_handle_invoke) { , _is_method_handle_invoke(info->_is_method_handle_invoke) {
if (lock_stack_only) {
if (info->_stack != NULL) {
_stack = info->_stack->copy_locks();
} else {
_stack = NULL;
}
} else {
_stack = info->_stack;
}
// deep copy of exception handlers // deep copy of exception handlers
if (info->_exception_handlers != NULL) { if (info->_exception_handlers != NULL) {
...@@ -273,8 +213,6 @@ void CodeEmitInfo::add_register_oop(LIR_Opr opr) { ...@@ -273,8 +213,6 @@ void CodeEmitInfo::add_register_oop(LIR_Opr opr) {
assert(_oop_map != NULL, "oop map must already exist"); assert(_oop_map != NULL, "oop map must already exist");
assert(opr->is_single_cpu(), "should not call otherwise"); assert(opr->is_single_cpu(), "should not call otherwise");
int frame_size = frame_map()->framesize();
int arg_count = frame_map()->oop_map_arg_count();
VMReg name = frame_map()->regname(opr); VMReg name = frame_map()->regname(opr);
_oop_map->set_oop(name); _oop_map->set_oop(name);
} }
...@@ -383,8 +321,7 @@ class UseCountComputer: public ValueVisitor, BlockClosure { ...@@ -383,8 +321,7 @@ class UseCountComputer: public ValueVisitor, BlockClosure {
void visit(Value* n) { void visit(Value* n) {
// Local instructions and Phis for expression stack values at the // Local instructions and Phis for expression stack values at the
// start of basic blocks are not added to the instruction list // start of basic blocks are not added to the instruction list
if ((*n)->bci() == -99 && (*n)->as_Local() == NULL && if (!(*n)->is_linked()&& (*n)->can_be_linked()) {
(*n)->as_Phi() == NULL) {
assert(false, "a node was not appended to the graph"); assert(false, "a node was not appended to the graph");
Compilation::current()->bailout("a node was not appended to the graph"); Compilation::current()->bailout("a node was not appended to the graph");
} }
...@@ -1338,7 +1275,7 @@ void SubstitutionResolver::block_do(BlockBegin* block) { ...@@ -1338,7 +1275,7 @@ void SubstitutionResolver::block_do(BlockBegin* block) {
// need to remove this instruction from the instruction stream // need to remove this instruction from the instruction stream
if (n->subst() != n) { if (n->subst() != n) {
assert(last != NULL, "must have last"); assert(last != NULL, "must have last");
last->set_next(n->next(), n->next()->bci()); last->set_next(n->next());
} else { } else {
last = n; last = n;
} }
......
...@@ -132,8 +132,6 @@ class IRScope: public CompilationResourceObj { ...@@ -132,8 +132,6 @@ class IRScope: public CompilationResourceObj {
// hierarchy // hierarchy
Compilation* _compilation; // the current compilation Compilation* _compilation; // the current compilation
IRScope* _caller; // the caller scope, or NULL IRScope* _caller; // the caller scope, or NULL
int _caller_bci; // the caller bci of the corresponding (inlined) invoke, or < 0
ValueStack* _caller_state; // the caller state, or NULL
int _level; // the inlining level int _level; // the inlining level
ciMethod* _method; // the corresponding method ciMethod* _method; // the corresponding method
IRScopeList _callees; // the inlined method scopes IRScopeList _callees; // the inlined method scopes
...@@ -144,15 +142,9 @@ class IRScope: public CompilationResourceObj { ...@@ -144,15 +142,9 @@ class IRScope: public CompilationResourceObj {
bool _monitor_pairing_ok; // the monitor pairing info bool _monitor_pairing_ok; // the monitor pairing info
BlockBegin* _start; // the start block, successsors are method entries BlockBegin* _start; // the start block, successsors are method entries
// lock stack management
int _lock_stack_size; // number of expression stack elements which, if present,
// must be spilled to the stack because of exception
// handling inside inlined methods
BitMap _requires_phi_function; // bit is set if phi functions at loop headers are necessary for a local variable BitMap _requires_phi_function; // bit is set if phi functions at loop headers are necessary for a local variable
// helper functions // helper functions
BlockBegin* header_block(BlockBegin* entry, BlockBegin::Flag f, ValueStack* state);
BlockBegin* build_graph(Compilation* compilation, int osr_bci); BlockBegin* build_graph(Compilation* compilation, int osr_bci);
public: public:
...@@ -162,33 +154,16 @@ class IRScope: public CompilationResourceObj { ...@@ -162,33 +154,16 @@ class IRScope: public CompilationResourceObj {
// accessors // accessors
Compilation* compilation() const { return _compilation; } Compilation* compilation() const { return _compilation; }
IRScope* caller() const { return _caller; } IRScope* caller() const { return _caller; }
int caller_bci() const { return _caller_bci; }
ValueStack* caller_state() const { return _caller_state; }
int level() const { return _level; } int level() const { return _level; }
ciMethod* method() const { return _method; } ciMethod* method() const { return _method; }
int max_stack() const; // NOTE: expensive int max_stack() const; // NOTE: expensive
int lock_stack_size() const {
assert(_lock_stack_size != -1, "uninitialized");
return _lock_stack_size;
}
BitMap& requires_phi_function() { return _requires_phi_function; } BitMap& requires_phi_function() { return _requires_phi_function; }
// mutators
// Needed because caller state is not ready at time of IRScope construction
void set_caller_state(ValueStack* state) { _caller_state = state; }
// Needed because caller state changes after IRScope construction.
// Computes number of expression stack elements whose state must be
// preserved in the case of an exception; these may be seen by
// caller scopes. Zero when inlining of methods containing exception
// handlers is disabled, otherwise a conservative approximation.
void compute_lock_stack_size();
// hierarchy // hierarchy
bool is_top_scope() const { return _caller == NULL; } bool is_top_scope() const { return _caller == NULL; }
void add_callee(IRScope* callee) { _callees.append(callee); } void add_callee(IRScope* callee) { _callees.append(callee); }
int number_of_callees() const { return _callees.length(); } int number_of_callees() const { return _callees.length(); }
IRScope* callee_no(int i) const { return _callees.at(i); } IRScope* callee_no(int i) const { return _callees.at(i); }
int top_scope_bci() const;
// accessors, graph // accessors, graph
bool is_valid() const { return start() != NULL; } bool is_valid() const { return start() != NULL; }
...@@ -266,9 +241,6 @@ class CodeEmitInfo: public CompilationResourceObj { ...@@ -266,9 +241,6 @@ class CodeEmitInfo: public CompilationResourceObj {
XHandlers* _exception_handlers; XHandlers* _exception_handlers;
OopMap* _oop_map; OopMap* _oop_map;
ValueStack* _stack; // used by deoptimization (contains also monitors ValueStack* _stack; // used by deoptimization (contains also monitors
int _bci;
CodeEmitInfo* _next;
int _id;
bool _is_method_handle_invoke; // true if the associated call site is a MethodHandle call site. bool _is_method_handle_invoke; // true if the associated call site is a MethodHandle call site.
FrameMap* frame_map() const { return scope()->compilation()->frame_map(); } FrameMap* frame_map() const { return scope()->compilation()->frame_map(); }
...@@ -277,23 +249,10 @@ class CodeEmitInfo: public CompilationResourceObj { ...@@ -277,23 +249,10 @@ class CodeEmitInfo: public CompilationResourceObj {
public: public:
// use scope from ValueStack // use scope from ValueStack
CodeEmitInfo(int bci, ValueStack* stack, XHandlers* exception_handlers); CodeEmitInfo(ValueStack* stack, XHandlers* exception_handlers);
// used by natives
CodeEmitInfo(IRScope* scope, int bci)
: _scope(scope)
, _bci(bci)
, _oop_map(NULL)
, _scope_debug_info(NULL)
, _stack(NULL)
, _exception_handlers(NULL)
, _next(NULL)
, _id(-1)
, _is_method_handle_invoke(false) {
}
// make a copy // make a copy
CodeEmitInfo(CodeEmitInfo* info, bool lock_stack_only = false); CodeEmitInfo(CodeEmitInfo* info, ValueStack* stack = NULL);
// accessors // accessors
OopMap* oop_map() { return _oop_map; } OopMap* oop_map() { return _oop_map; }
...@@ -301,17 +260,10 @@ class CodeEmitInfo: public CompilationResourceObj { ...@@ -301,17 +260,10 @@ class CodeEmitInfo: public CompilationResourceObj {
IRScope* scope() const { return _scope; } IRScope* scope() const { return _scope; }
XHandlers* exception_handlers() const { return _exception_handlers; } XHandlers* exception_handlers() const { return _exception_handlers; }
ValueStack* stack() const { return _stack; } ValueStack* stack() const { return _stack; }
int bci() const { return _bci; }
void add_register_oop(LIR_Opr opr); void add_register_oop(LIR_Opr opr);
void record_debug_info(DebugInformationRecorder* recorder, int pc_offset); void record_debug_info(DebugInformationRecorder* recorder, int pc_offset);
CodeEmitInfo* next() const { return _next; }
void set_next(CodeEmitInfo* next) { _next = next; }
int id() const { return _id; }
void set_id(int id) { _id = id; }
bool is_method_handle_invoke() const { return _is_method_handle_invoke; } bool is_method_handle_invoke() const { return _is_method_handle_invoke; }
void set_is_method_handle_invoke(bool x) { _is_method_handle_invoke = x; } void set_is_method_handle_invoke(bool x) { _is_method_handle_invoke = x; }
}; };
......
...@@ -29,13 +29,6 @@ ...@@ -29,13 +29,6 @@
// Implementation of Instruction // Implementation of Instruction
#ifdef ASSERT
void Instruction::create_hi_word() {
assert(type()->is_double_word() && _hi_word == NULL, "only double word has high word");
_hi_word = new HiWord(this);
}
#endif
Instruction::Condition Instruction::mirror(Condition cond) { Instruction::Condition Instruction::mirror(Condition cond) {
switch (cond) { switch (cond) {
case eql: return eql; case eql: return eql;
...@@ -63,6 +56,15 @@ Instruction::Condition Instruction::negate(Condition cond) { ...@@ -63,6 +56,15 @@ Instruction::Condition Instruction::negate(Condition cond) {
return eql; return eql;
} }
void Instruction::update_exception_state(ValueStack* state) {
if (state != NULL && (state->kind() == ValueStack::EmptyExceptionState || state->kind() == ValueStack::ExceptionState)) {
assert(state->kind() == ValueStack::EmptyExceptionState || Compilation::current()->env()->jvmti_can_access_local_variables(), "unexpected state kind");
_exception_state = state;
} else {
_exception_state = NULL;
}
}
Instruction* Instruction::prev(BlockBegin* block) { Instruction* Instruction::prev(BlockBegin* block) {
Instruction* p = NULL; Instruction* p = NULL;
...@@ -75,7 +77,24 @@ Instruction* Instruction::prev(BlockBegin* block) { ...@@ -75,7 +77,24 @@ Instruction* Instruction::prev(BlockBegin* block) {
} }
void Instruction::state_values_do(ValueVisitor* f) {
if (state_before() != NULL) {
state_before()->values_do(f);
}
if (exception_state() != NULL){
exception_state()->values_do(f);
}
}
#ifndef PRODUCT #ifndef PRODUCT
void Instruction::check_state(ValueStack* state) {
if (state != NULL) {
state->verify();
}
}
void Instruction::print() { void Instruction::print() {
InstructionPrinter ip; InstructionPrinter ip;
print(ip); print(ip);
...@@ -190,35 +209,6 @@ ciType* CheckCast::exact_type() const { ...@@ -190,35 +209,6 @@ ciType* CheckCast::exact_type() const {
return NULL; return NULL;
} }
void ArithmeticOp::other_values_do(ValueVisitor* f) {
if (lock_stack() != NULL) lock_stack()->values_do(f);
}
void NullCheck::other_values_do(ValueVisitor* f) {
lock_stack()->values_do(f);
}
void AccessArray::other_values_do(ValueVisitor* f) {
if (lock_stack() != NULL) lock_stack()->values_do(f);
}
// Implementation of AccessField
void AccessField::other_values_do(ValueVisitor* f) {
if (state_before() != NULL) state_before()->values_do(f);
if (lock_stack() != NULL) lock_stack()->values_do(f);
}
// Implementation of StoreIndexed
IRScope* StoreIndexed::scope() const {
return lock_stack()->scope();
}
// Implementation of ArithmeticOp // Implementation of ArithmeticOp
bool ArithmeticOp::is_commutative() const { bool ArithmeticOp::is_commutative() const {
...@@ -266,13 +256,6 @@ bool LogicOp::is_commutative() const { ...@@ -266,13 +256,6 @@ bool LogicOp::is_commutative() const {
} }
// Implementation of CompareOp
void CompareOp::other_values_do(ValueVisitor* f) {
if (state_before() != NULL) state_before()->values_do(f);
}
// Implementation of IfOp // Implementation of IfOp
bool IfOp::is_commutative() const { bool IfOp::is_commutative() const {
...@@ -301,6 +284,7 @@ IRScope* StateSplit::scope() const { ...@@ -301,6 +284,7 @@ IRScope* StateSplit::scope() const {
void StateSplit::state_values_do(ValueVisitor* f) { void StateSplit::state_values_do(ValueVisitor* f) {
Instruction::state_values_do(f);
if (state() != NULL) state()->values_do(f); if (state() != NULL) state()->values_do(f);
} }
...@@ -316,30 +300,17 @@ void BlockBegin::state_values_do(ValueVisitor* f) { ...@@ -316,30 +300,17 @@ void BlockBegin::state_values_do(ValueVisitor* f) {
} }
void MonitorEnter::state_values_do(ValueVisitor* f) {
StateSplit::state_values_do(f);
_lock_stack_before->values_do(f);
}
void Intrinsic::state_values_do(ValueVisitor* f) {
StateSplit::state_values_do(f);
if (lock_stack() != NULL) lock_stack()->values_do(f);
}
// Implementation of Invoke // Implementation of Invoke
Invoke::Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args, Invoke::Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args,
int vtable_index, ciMethod* target, ValueStack* state_before) int vtable_index, ciMethod* target, ValueStack* state_before)
: StateSplit(result_type) : StateSplit(result_type, state_before)
, _code(code) , _code(code)
, _recv(recv) , _recv(recv)
, _args(args) , _args(args)
, _vtable_index(vtable_index) , _vtable_index(vtable_index)
, _target(target) , _target(target)
, _state_before(state_before)
{ {
set_flag(TargetIsLoadedFlag, target->is_loaded()); set_flag(TargetIsLoadedFlag, target->is_loaded());
set_flag(TargetIsFinalFlag, target_is_loaded() && target->is_final_method()); set_flag(TargetIsFinalFlag, target_is_loaded() && target->is_final_method());
...@@ -376,7 +347,7 @@ void Invoke::state_values_do(ValueVisitor* f) { ...@@ -376,7 +347,7 @@ void Invoke::state_values_do(ValueVisitor* f) {
// Implementation of Contant // Implementation of Contant
intx Constant::hash() const { intx Constant::hash() const {
if (_state == NULL) { if (state_before() == NULL) {
switch (type()->tag()) { switch (type()->tag()) {
case intTag: case intTag:
return HASH2(name(), type()->as_IntConstant()->value()); return HASH2(name(), type()->as_IntConstant()->value());
...@@ -499,25 +470,6 @@ BlockBegin* Constant::compare(Instruction::Condition cond, Value right, ...@@ -499,25 +470,6 @@ BlockBegin* Constant::compare(Instruction::Condition cond, Value right,
} }
void Constant::other_values_do(ValueVisitor* f) {
if (state() != NULL) state()->values_do(f);
}
// Implementation of NewArray
void NewArray::other_values_do(ValueVisitor* f) {
if (state_before() != NULL) state_before()->values_do(f);
}
// Implementation of TypeCheck
void TypeCheck::other_values_do(ValueVisitor* f) {
if (state_before() != NULL) state_before()->values_do(f);
}
// Implementation of BlockBegin // Implementation of BlockBegin
void BlockBegin::set_end(BlockEnd* end) { void BlockBegin::set_end(BlockEnd* end) {
...@@ -604,23 +556,14 @@ void BlockBegin::substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux) { ...@@ -604,23 +556,14 @@ void BlockBegin::substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux) {
// of the inserted block, without recomputing the values of the other blocks // of the inserted block, without recomputing the values of the other blocks
// in the CFG. Therefore the value of "depth_first_number" in BlockBegin becomes meaningless. // in the CFG. Therefore the value of "depth_first_number" in BlockBegin becomes meaningless.
BlockBegin* BlockBegin::insert_block_between(BlockBegin* sux) { BlockBegin* BlockBegin::insert_block_between(BlockBegin* sux) {
// Try to make the bci close to a block with a single pred or sux, BlockBegin* new_sux = new BlockBegin(-99);
// since this make the block layout algorithm work better.
int bci = -1;
if (sux->number_of_preds() == 1) {
bci = sux->bci();
} else {
bci = end()->bci();
}
BlockBegin* new_sux = new BlockBegin(bci);
// mark this block (special treatment when block order is computed) // mark this block (special treatment when block order is computed)
new_sux->set(critical_edge_split_flag); new_sux->set(critical_edge_split_flag);
// This goto is not a safepoint. // This goto is not a safepoint.
Goto* e = new Goto(sux, false); Goto* e = new Goto(sux, false);
new_sux->set_next(e, bci); new_sux->set_next(e, end()->state()->bci());
new_sux->set_end(e); new_sux->set_end(e);
// setup states // setup states
ValueStack* s = end()->state(); ValueStack* s = end()->state();
...@@ -763,7 +706,7 @@ bool BlockBegin::try_merge(ValueStack* new_state) { ...@@ -763,7 +706,7 @@ bool BlockBegin::try_merge(ValueStack* new_state) {
} }
// copy state because it is altered // copy state because it is altered
new_state = new_state->copy(); new_state = new_state->copy(ValueStack::BlockBeginState, bci());
// Use method liveness to invalidate dead locals // Use method liveness to invalidate dead locals
MethodLivenessResult liveness = new_state->scope()->method()->liveness_at_bci(bci()); MethodLivenessResult liveness = new_state->scope()->method()->liveness_at_bci(bci());
...@@ -800,19 +743,9 @@ bool BlockBegin::try_merge(ValueStack* new_state) { ...@@ -800,19 +743,9 @@ bool BlockBegin::try_merge(ValueStack* new_state) {
// initialize state of block // initialize state of block
set_state(new_state); set_state(new_state);
} else if (existing_state->is_same_across_scopes(new_state)) { } else if (existing_state->is_same(new_state)) {
TRACE_PHI(tty->print_cr("exisiting state found")); TRACE_PHI(tty->print_cr("exisiting state found"));
// Inlining may cause the local state not to match up, so walk up
// the new state until we get to the same scope as the
// existing and then start processing from there.
while (existing_state->scope() != new_state->scope()) {
new_state = new_state->caller_state();
assert(new_state != NULL, "could not match up scopes");
assert(false, "check if this is necessary");
}
assert(existing_state->scope() == new_state->scope(), "not matching"); assert(existing_state->scope() == new_state->scope(), "not matching");
assert(existing_state->locals_size() == new_state->locals_size(), "not matching"); assert(existing_state->locals_size() == new_state->locals_size(), "not matching");
assert(existing_state->stack_size() == new_state->stack_size(), "not matching"); assert(existing_state->stack_size() == new_state->stack_size(), "not matching");
...@@ -969,11 +902,6 @@ void BlockEnd::substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux) { ...@@ -969,11 +902,6 @@ void BlockEnd::substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux) {
} }
void BlockEnd::other_values_do(ValueVisitor* f) {
if (state_before() != NULL) state_before()->values_do(f);
}
// Implementation of Phi // Implementation of Phi
// Normal phi functions take their operands from the last instruction of the // Normal phi functions take their operands from the last instruction of the
...@@ -1006,11 +934,6 @@ int Phi::operand_count() const { ...@@ -1006,11 +934,6 @@ int Phi::operand_count() const {
} }
// Implementation of Throw
void Throw::state_values_do(ValueVisitor* f) {
BlockEnd::state_values_do(f);
}
void ProfileInvoke::state_values_do(ValueVisitor* f) { void ProfileInvoke::state_values_do(ValueVisitor* f) {
if (state() != NULL) state()->values_do(f); if (state() != NULL) state()->values_do(f);
......
...@@ -38,7 +38,6 @@ typedef LIR_OprDesc* LIR_Opr; ...@@ -38,7 +38,6 @@ typedef LIR_OprDesc* LIR_Opr;
// serve factoring. // serve factoring.
class Instruction; class Instruction;
class HiWord;
class Phi; class Phi;
class Local; class Local;
class Constant; class Constant;
...@@ -149,7 +148,6 @@ class BlockList: public _BlockList { ...@@ -149,7 +148,6 @@ class BlockList: public _BlockList {
class InstructionVisitor: public StackObj { class InstructionVisitor: public StackObj {
public: public:
void do_HiWord (HiWord* x) { ShouldNotReachHere(); }
virtual void do_Phi (Phi* x) = 0; virtual void do_Phi (Phi* x) = 0;
virtual void do_Local (Local* x) = 0; virtual void do_Local (Local* x) = 0;
virtual void do_Constant (Constant* x) = 0; virtual void do_Constant (Constant* x) = 0;
...@@ -272,7 +270,9 @@ class InstructionVisitor: public StackObj { ...@@ -272,7 +270,9 @@ class InstructionVisitor: public StackObj {
class Instruction: public CompilationResourceObj { class Instruction: public CompilationResourceObj {
private: private:
int _id; // the unique instruction id int _id; // the unique instruction id
int _bci; // the instruction bci #ifndef PRODUCT
int _printable_bci; // the bci of the instruction for printing
#endif
int _use_count; // the number of instructions refering to this value (w/o prev/next); only roots can have use count = 0 or > 1 int _use_count; // the number of instructions refering to this value (w/o prev/next); only roots can have use count = 0 or > 1
int _pin_state; // set of PinReason describing the reason for pinning int _pin_state; // set of PinReason describing the reason for pinning
ValueType* _type; // the instruction value type ValueType* _type; // the instruction value type
...@@ -281,17 +281,18 @@ class Instruction: public CompilationResourceObj { ...@@ -281,17 +281,18 @@ class Instruction: public CompilationResourceObj {
LIR_Opr _operand; // LIR specific information LIR_Opr _operand; // LIR specific information
unsigned int _flags; // Flag bits unsigned int _flags; // Flag bits
ValueStack* _state_before; // Copy of state with input operands still on stack (or NULL)
ValueStack* _exception_state; // Copy of state for exception handling
XHandlers* _exception_handlers; // Flat list of exception handlers covering this instruction XHandlers* _exception_handlers; // Flat list of exception handlers covering this instruction
#ifdef ASSERT
HiWord* _hi_word;
#endif
friend class UseCountComputer; friend class UseCountComputer;
friend class BlockBegin; friend class BlockBegin;
void update_exception_state(ValueStack* state);
bool has_printable_bci() const { return NOT_PRODUCT(_printable_bci != -99) PRODUCT_ONLY(false); }
protected: protected:
void set_bci(int bci) { assert(bci == SynchronizationEntryBCI || bci >= 0, "illegal bci"); _bci = bci; }
void set_type(ValueType* type) { void set_type(ValueType* type) {
assert(type != NULL, "type must exist"); assert(type != NULL, "type must exist");
_type = type; _type = type;
...@@ -325,6 +326,7 @@ class Instruction: public CompilationResourceObj { ...@@ -325,6 +326,7 @@ class Instruction: public CompilationResourceObj {
NeedsPatchingFlag, NeedsPatchingFlag,
ThrowIncompatibleClassChangeErrorFlag, ThrowIncompatibleClassChangeErrorFlag,
ProfileMDOFlag, ProfileMDOFlag,
IsLinkedInBlockFlag,
InstructionLastFlag InstructionLastFlag
}; };
...@@ -356,31 +358,31 @@ class Instruction: public CompilationResourceObj { ...@@ -356,31 +358,31 @@ class Instruction: public CompilationResourceObj {
} }
// creation // creation
Instruction(ValueType* type, bool type_is_constant = false, bool create_hi = true) Instruction(ValueType* type, ValueStack* state_before = NULL, bool type_is_constant = false, bool create_hi = true)
: _bci(-99) : _use_count(0)
, _use_count(0) #ifndef PRODUCT
, _printable_bci(-99)
#endif
, _pin_state(0) , _pin_state(0)
, _type(type) , _type(type)
, _next(NULL) , _next(NULL)
, _subst(NULL) , _subst(NULL)
, _flags(0) , _flags(0)
, _operand(LIR_OprFact::illegalOpr) , _operand(LIR_OprFact::illegalOpr)
, _state_before(state_before)
, _exception_handlers(NULL) , _exception_handlers(NULL)
#ifdef ASSERT
, _hi_word(NULL)
#endif
{ {
check_state(state_before);
assert(type != NULL && (!type->is_constant() || type_is_constant), "type must exist"); assert(type != NULL && (!type->is_constant() || type_is_constant), "type must exist");
#ifdef ASSERT update_exception_state(_state_before);
if (create_hi && type->is_double_word()) {
create_hi_word();
}
#endif
} }
// accessors // accessors
int id() const { return _id; } int id() const { return _id; }
int bci() const { return _bci; } #ifndef PRODUCT
int printable_bci() const { assert(has_printable_bci(), "_printable_bci should have been set"); return _printable_bci; }
void set_printable_bci(int bci) { NOT_PRODUCT(_printable_bci = bci;) }
#endif
int use_count() const { return _use_count; } int use_count() const { return _use_count; }
int pin_state() const { return _pin_state; } int pin_state() const { return _pin_state; }
bool is_pinned() const { return _pin_state != 0 || PinAllInstructions; } bool is_pinned() const { return _pin_state != 0 || PinAllInstructions; }
...@@ -393,9 +395,13 @@ class Instruction: public CompilationResourceObj { ...@@ -393,9 +395,13 @@ class Instruction: public CompilationResourceObj {
void set_needs_null_check(bool f) { set_flag(NeedsNullCheckFlag, f); } void set_needs_null_check(bool f) { set_flag(NeedsNullCheckFlag, f); }
bool needs_null_check() const { return check_flag(NeedsNullCheckFlag); } bool needs_null_check() const { return check_flag(NeedsNullCheckFlag); }
bool is_linked() const { return check_flag(IsLinkedInBlockFlag); }
bool can_be_linked() { return as_Local() == NULL && as_Phi() == NULL; }
bool has_uses() const { return use_count() > 0; } bool has_uses() const { return use_count() > 0; }
bool is_root() const { return is_pinned() || use_count() > 1; } ValueStack* state_before() const { return _state_before; }
ValueStack* exception_state() const { return _exception_state; }
virtual bool needs_exception_state() const { return true; }
XHandlers* exception_handlers() const { return _exception_handlers; } XHandlers* exception_handlers() const { return _exception_handlers; }
// manipulation // manipulation
...@@ -403,19 +409,25 @@ class Instruction: public CompilationResourceObj { ...@@ -403,19 +409,25 @@ class Instruction: public CompilationResourceObj {
void pin() { _pin_state |= PinUnknown; } void pin() { _pin_state |= PinUnknown; }
// DANGEROUS: only used by EliminateStores // DANGEROUS: only used by EliminateStores
void unpin(PinReason reason) { assert((reason & PinUnknown) == 0, "can't unpin unknown state"); _pin_state &= ~reason; } void unpin(PinReason reason) { assert((reason & PinUnknown) == 0, "can't unpin unknown state"); _pin_state &= ~reason; }
virtual void set_lock_stack(ValueStack* l) { /* do nothing*/ }
virtual ValueStack* lock_stack() const { return NULL; }
Instruction* set_next(Instruction* next, int bci) { Instruction* set_next(Instruction* next) {
if (next != NULL) { assert(next->has_printable_bci(), "_printable_bci should have been set");
assert(as_BlockEnd() == NULL, "BlockEnd instructions must have no next"); assert(next != NULL, "must not be NULL");
assert(next->as_Phi() == NULL && next->as_Local() == NULL, "shouldn't link these instructions into list"); assert(as_BlockEnd() == NULL, "BlockEnd instructions must have no next");
next->set_bci(bci); assert(next->can_be_linked(), "shouldn't link these instructions into list");
}
next->set_flag(Instruction::IsLinkedInBlockFlag, true);
_next = next; _next = next;
return next; return next;
} }
Instruction* set_next(Instruction* next, int bci) {
#ifndef PRODUCT
next->set_printable_bci(bci);
#endif
return set_next(next);
}
void set_subst(Instruction* subst) { void set_subst(Instruction* subst) {
assert(subst == NULL || assert(subst == NULL ||
type()->base() == subst->type()->base() || type()->base() == subst->type()->base() ||
...@@ -423,14 +435,7 @@ class Instruction: public CompilationResourceObj { ...@@ -423,14 +435,7 @@ class Instruction: public CompilationResourceObj {
_subst = subst; _subst = subst;
} }
void set_exception_handlers(XHandlers *xhandlers) { _exception_handlers = xhandlers; } void set_exception_handlers(XHandlers *xhandlers) { _exception_handlers = xhandlers; }
void set_exception_state(ValueStack* s) { check_state(s); _exception_state = s; }
#ifdef ASSERT
// HiWord is used for debugging and is allocated early to avoid
// allocation at inconvenient points
HiWord* hi_word() { return _hi_word; }
void create_hi_word();
#endif
// machine-specifics // machine-specifics
void set_operand(LIR_Opr operand) { assert(operand != LIR_OprFact::illegalOpr, "operand must exist"); _operand = operand; } void set_operand(LIR_Opr operand) { assert(operand != LIR_OprFact::illegalOpr, "operand must exist"); _operand = operand; }
...@@ -438,7 +443,6 @@ class Instruction: public CompilationResourceObj { ...@@ -438,7 +443,6 @@ class Instruction: public CompilationResourceObj {
// generic // generic
virtual Instruction* as_Instruction() { return this; } // to satisfy HASHING1 macro virtual Instruction* as_Instruction() { return this; } // to satisfy HASHING1 macro
virtual HiWord* as_HiWord() { return NULL; }
virtual Phi* as_Phi() { return NULL; } virtual Phi* as_Phi() { return NULL; }
virtual Local* as_Local() { return NULL; } virtual Local* as_Local() { return NULL; }
virtual Constant* as_Constant() { return NULL; } virtual Constant* as_Constant() { return NULL; }
...@@ -493,7 +497,7 @@ class Instruction: public CompilationResourceObj { ...@@ -493,7 +497,7 @@ class Instruction: public CompilationResourceObj {
virtual bool can_trap() const { return false; } virtual bool can_trap() const { return false; }
virtual void input_values_do(ValueVisitor* f) = 0; virtual void input_values_do(ValueVisitor* f) = 0;
virtual void state_values_do(ValueVisitor* f) { /* usually no state - override on demand */ } virtual void state_values_do(ValueVisitor* f);
virtual void other_values_do(ValueVisitor* f) { /* usually no other - override on demand */ } virtual void other_values_do(ValueVisitor* f) { /* usually no other - override on demand */ }
void values_do(ValueVisitor* f) { input_values_do(f); state_values_do(f); other_values_do(f); } void values_do(ValueVisitor* f) { input_values_do(f); state_values_do(f); other_values_do(f); }
...@@ -505,6 +509,7 @@ class Instruction: public CompilationResourceObj { ...@@ -505,6 +509,7 @@ class Instruction: public CompilationResourceObj {
HASHING1(Instruction, false, id()) // hashing disabled by default HASHING1(Instruction, false, id()) // hashing disabled by default
// debugging // debugging
static void check_state(ValueStack* state) PRODUCT_RETURN;
void print() PRODUCT_RETURN; void print() PRODUCT_RETURN;
void print_line() PRODUCT_RETURN; void print_line() PRODUCT_RETURN;
void print(InstructionPrinter& ip) PRODUCT_RETURN; void print(InstructionPrinter& ip) PRODUCT_RETURN;
...@@ -541,40 +546,6 @@ class AssertValues: public ValueVisitor { ...@@ -541,40 +546,6 @@ class AssertValues: public ValueVisitor {
#endif // ASSERT #endif // ASSERT
// A HiWord occupies the 'high word' of a 2-word
// expression stack entry. Hi & lo words must be
// paired on the expression stack (otherwise the
// bytecode sequence is illegal). Note that 'hi'
// refers to the IR expression stack format and
// does *not* imply a machine word ordering. No
// HiWords are used in optimized mode for speed,
// but NULL pointers are used instead.
LEAF(HiWord, Instruction)
private:
Value _lo_word;
public:
// creation
HiWord(Value lo_word)
: Instruction(illegalType, false, false),
_lo_word(lo_word) {
// hi-words are also allowed for illegal lo-words
assert(lo_word->type()->is_double_word() || lo_word->type()->is_illegal(),
"HiWord must be used for 2-word values only");
}
// accessors
Value lo_word() const { return _lo_word->subst(); }
// for invalidating of HiWords
void make_illegal() { set_type(illegalType); }
// generic
virtual void input_values_do(ValueVisitor* f) { ShouldNotReachHere(); }
};
// A Phi is a phi function in the sense of SSA form. It stands for // A Phi is a phi function in the sense of SSA form. It stands for
// the value of a local variable at the beginning of a join block. // the value of a local variable at the beginning of a join block.
// A Phi consists of n operands, one for every incoming branch. // A Phi consists of n operands, one for every incoming branch.
...@@ -656,31 +627,25 @@ LEAF(Local, Instruction) ...@@ -656,31 +627,25 @@ LEAF(Local, Instruction)
LEAF(Constant, Instruction) LEAF(Constant, Instruction)
ValueStack* _state;
public: public:
// creation // creation
Constant(ValueType* type): Constant(ValueType* type):
Instruction(type, true) Instruction(type, NULL, true)
, _state(NULL) { {
assert(type->is_constant(), "must be a constant"); assert(type->is_constant(), "must be a constant");
} }
Constant(ValueType* type, ValueStack* state): Constant(ValueType* type, ValueStack* state_before):
Instruction(type, true) Instruction(type, state_before, true)
, _state(state) { {
assert(state != NULL, "only used for constants which need patching"); assert(state_before != NULL, "only used for constants which need patching");
assert(type->is_constant(), "must be a constant"); assert(type->is_constant(), "must be a constant");
// since it's patching it needs to be pinned // since it's patching it needs to be pinned
pin(); pin();
} }
ValueStack* state() const { return _state; } virtual bool can_trap() const { return state_before() != NULL; }
// generic
virtual bool can_trap() const { return state() != NULL; }
virtual void input_values_do(ValueVisitor* f) { /* no values */ } virtual void input_values_do(ValueVisitor* f) { /* no values */ }
virtual void other_values_do(ValueVisitor* f);
virtual intx hash() const; virtual intx hash() const;
virtual bool is_equal(Value v) const; virtual bool is_equal(Value v) const;
...@@ -695,20 +660,16 @@ BASE(AccessField, Instruction) ...@@ -695,20 +660,16 @@ BASE(AccessField, Instruction)
Value _obj; Value _obj;
int _offset; int _offset;
ciField* _field; ciField* _field;
ValueStack* _state_before; // state is set only for unloaded or uninitialized fields
ValueStack* _lock_stack; // contains lock and scope information
NullCheck* _explicit_null_check; // For explicit null check elimination NullCheck* _explicit_null_check; // For explicit null check elimination
public: public:
// creation // creation
AccessField(Value obj, int offset, ciField* field, bool is_static, ValueStack* lock_stack, AccessField(Value obj, int offset, ciField* field, bool is_static,
ValueStack* state_before, bool is_loaded, bool is_initialized) ValueStack* state_before, bool is_loaded, bool is_initialized)
: Instruction(as_ValueType(field->type()->basic_type())) : Instruction(as_ValueType(field->type()->basic_type()), state_before)
, _obj(obj) , _obj(obj)
, _offset(offset) , _offset(offset)
, _field(field) , _field(field)
, _lock_stack(lock_stack)
, _state_before(state_before)
, _explicit_null_check(NULL) , _explicit_null_check(NULL)
{ {
set_needs_null_check(!is_static); set_needs_null_check(!is_static);
...@@ -734,13 +695,11 @@ BASE(AccessField, Instruction) ...@@ -734,13 +695,11 @@ BASE(AccessField, Instruction)
bool is_static() const { return check_flag(IsStaticFlag); } bool is_static() const { return check_flag(IsStaticFlag); }
bool is_loaded() const { return check_flag(IsLoadedFlag); } bool is_loaded() const { return check_flag(IsLoadedFlag); }
bool is_initialized() const { return check_flag(IsInitializedFlag); } bool is_initialized() const { return check_flag(IsInitializedFlag); }
ValueStack* state_before() const { return _state_before; }
ValueStack* lock_stack() const { return _lock_stack; }
NullCheck* explicit_null_check() const { return _explicit_null_check; } NullCheck* explicit_null_check() const { return _explicit_null_check; }
bool needs_patching() const { return check_flag(NeedsPatchingFlag); } bool needs_patching() const { return check_flag(NeedsPatchingFlag); }
// manipulation // manipulation
void set_lock_stack(ValueStack* l) { _lock_stack = l; }
// Under certain circumstances, if a previous NullCheck instruction // Under certain circumstances, if a previous NullCheck instruction
// proved the target object non-null, we can eliminate the explicit // proved the target object non-null, we can eliminate the explicit
// null check and do an implicit one, simply specifying the debug // null check and do an implicit one, simply specifying the debug
...@@ -751,16 +710,15 @@ BASE(AccessField, Instruction) ...@@ -751,16 +710,15 @@ BASE(AccessField, Instruction)
// generic // generic
virtual bool can_trap() const { return needs_null_check() || needs_patching(); } virtual bool can_trap() const { return needs_null_check() || needs_patching(); }
virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); }
virtual void other_values_do(ValueVisitor* f);
}; };
LEAF(LoadField, AccessField) LEAF(LoadField, AccessField)
public: public:
// creation // creation
LoadField(Value obj, int offset, ciField* field, bool is_static, ValueStack* lock_stack, LoadField(Value obj, int offset, ciField* field, bool is_static,
ValueStack* state_before, bool is_loaded, bool is_initialized) ValueStack* state_before, bool is_loaded, bool is_initialized)
: AccessField(obj, offset, field, is_static, lock_stack, state_before, is_loaded, is_initialized) : AccessField(obj, offset, field, is_static, state_before, is_loaded, is_initialized)
{} {}
ciType* declared_type() const; ciType* declared_type() const;
...@@ -777,9 +735,9 @@ LEAF(StoreField, AccessField) ...@@ -777,9 +735,9 @@ LEAF(StoreField, AccessField)
public: public:
// creation // creation
StoreField(Value obj, int offset, ciField* field, Value value, bool is_static, ValueStack* lock_stack, StoreField(Value obj, int offset, ciField* field, Value value, bool is_static,
ValueStack* state_before, bool is_loaded, bool is_initialized) ValueStack* state_before, bool is_loaded, bool is_initialized)
: AccessField(obj, offset, field, is_static, lock_stack, state_before, is_loaded, is_initialized) : AccessField(obj, offset, field, is_static, state_before, is_loaded, is_initialized)
, _value(value) , _value(value)
{ {
set_flag(NeedsWriteBarrierFlag, as_ValueType(field_type())->is_object()); set_flag(NeedsWriteBarrierFlag, as_ValueType(field_type())->is_object());
...@@ -799,29 +757,23 @@ LEAF(StoreField, AccessField) ...@@ -799,29 +757,23 @@ LEAF(StoreField, AccessField)
BASE(AccessArray, Instruction) BASE(AccessArray, Instruction)
private: private:
Value _array; Value _array;
ValueStack* _lock_stack;
public: public:
// creation // creation
AccessArray(ValueType* type, Value array, ValueStack* lock_stack) AccessArray(ValueType* type, Value array, ValueStack* state_before)
: Instruction(type) : Instruction(type, state_before)
, _array(array) , _array(array)
, _lock_stack(lock_stack) { {
set_needs_null_check(true); set_needs_null_check(true);
ASSERT_VALUES ASSERT_VALUES
pin(); // instruction with side effect (null exception or range check throwing) pin(); // instruction with side effect (null exception or range check throwing)
} }
Value array() const { return _array; } Value array() const { return _array; }
ValueStack* lock_stack() const { return _lock_stack; }
// setters
void set_lock_stack(ValueStack* l) { _lock_stack = l; }
// generic // generic
virtual bool can_trap() const { return needs_null_check(); } virtual bool can_trap() const { return needs_null_check(); }
virtual void input_values_do(ValueVisitor* f) { f->visit(&_array); } virtual void input_values_do(ValueVisitor* f) { f->visit(&_array); }
virtual void other_values_do(ValueVisitor* f);
}; };
...@@ -831,8 +783,8 @@ LEAF(ArrayLength, AccessArray) ...@@ -831,8 +783,8 @@ LEAF(ArrayLength, AccessArray)
public: public:
// creation // creation
ArrayLength(Value array, ValueStack* lock_stack) ArrayLength(Value array, ValueStack* state_before)
: AccessArray(intType, array, lock_stack) : AccessArray(intType, array, state_before)
, _explicit_null_check(NULL) {} , _explicit_null_check(NULL) {}
// accessors // accessors
...@@ -855,8 +807,8 @@ BASE(AccessIndexed, AccessArray) ...@@ -855,8 +807,8 @@ BASE(AccessIndexed, AccessArray)
public: public:
// creation // creation
AccessIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* lock_stack) AccessIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before)
: AccessArray(as_ValueType(elt_type), array, lock_stack) : AccessArray(as_ValueType(elt_type), array, state_before)
, _index(index) , _index(index)
, _length(length) , _length(length)
, _elt_type(elt_type) , _elt_type(elt_type)
...@@ -883,8 +835,8 @@ LEAF(LoadIndexed, AccessIndexed) ...@@ -883,8 +835,8 @@ LEAF(LoadIndexed, AccessIndexed)
public: public:
// creation // creation
LoadIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* lock_stack) LoadIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before)
: AccessIndexed(array, index, length, elt_type, lock_stack) : AccessIndexed(array, index, length, elt_type, state_before)
, _explicit_null_check(NULL) {} , _explicit_null_check(NULL) {}
// accessors // accessors
...@@ -910,8 +862,8 @@ LEAF(StoreIndexed, AccessIndexed) ...@@ -910,8 +862,8 @@ LEAF(StoreIndexed, AccessIndexed)
int _profiled_bci; int _profiled_bci;
public: public:
// creation // creation
StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value, ValueStack* lock_stack) StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value, ValueStack* state_before)
: AccessIndexed(array, index, length, elt_type, lock_stack) : AccessIndexed(array, index, length, elt_type, state_before)
, _value(value), _profiled_method(NULL), _profiled_bci(0) , _value(value), _profiled_method(NULL), _profiled_bci(0)
{ {
set_flag(NeedsWriteBarrierFlag, (as_ValueType(elt_type)->is_object())); set_flag(NeedsWriteBarrierFlag, (as_ValueType(elt_type)->is_object()));
...@@ -922,7 +874,6 @@ LEAF(StoreIndexed, AccessIndexed) ...@@ -922,7 +874,6 @@ LEAF(StoreIndexed, AccessIndexed)
// accessors // accessors
Value value() const { return _value; } Value value() const { return _value; }
IRScope* scope() const; // the state's scope
bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); }
bool needs_store_check() const { return check_flag(NeedsStoreCheckFlag); } bool needs_store_check() const { return check_flag(NeedsStoreCheckFlag); }
// Helpers for methodDataOop profiling // Helpers for methodDataOop profiling
...@@ -963,7 +914,12 @@ BASE(Op2, Instruction) ...@@ -963,7 +914,12 @@ BASE(Op2, Instruction)
public: public:
// creation // creation
Op2(ValueType* type, Bytecodes::Code op, Value x, Value y) : Instruction(type), _op(op), _x(x), _y(y) { Op2(ValueType* type, Bytecodes::Code op, Value x, Value y, ValueStack* state_before = NULL)
: Instruction(type, state_before)
, _op(op)
, _x(x)
, _y(y)
{
ASSERT_VALUES ASSERT_VALUES
} }
...@@ -985,28 +941,21 @@ BASE(Op2, Instruction) ...@@ -985,28 +941,21 @@ BASE(Op2, Instruction)
LEAF(ArithmeticOp, Op2) LEAF(ArithmeticOp, Op2)
private:
ValueStack* _lock_stack; // used only for division operations
public: public:
// creation // creation
ArithmeticOp(Bytecodes::Code op, Value x, Value y, bool is_strictfp, ValueStack* lock_stack) ArithmeticOp(Bytecodes::Code op, Value x, Value y, bool is_strictfp, ValueStack* state_before)
: Op2(x->type()->meet(y->type()), op, x, y) : Op2(x->type()->meet(y->type()), op, x, y, state_before)
, _lock_stack(lock_stack) { {
set_flag(IsStrictfpFlag, is_strictfp); set_flag(IsStrictfpFlag, is_strictfp);
if (can_trap()) pin(); if (can_trap()) pin();
} }
// accessors // accessors
ValueStack* lock_stack() const { return _lock_stack; }
bool is_strictfp() const { return check_flag(IsStrictfpFlag); } bool is_strictfp() const { return check_flag(IsStrictfpFlag); }
// setters
void set_lock_stack(ValueStack* l) { _lock_stack = l; }
// generic // generic
virtual bool is_commutative() const; virtual bool is_commutative() const;
virtual bool can_trap() const; virtual bool can_trap() const;
virtual void other_values_do(ValueVisitor* f);
HASHING3(Op2, true, op(), x()->subst(), y()->subst()) HASHING3(Op2, true, op(), x()->subst(), y()->subst())
}; };
...@@ -1033,21 +982,14 @@ LEAF(LogicOp, Op2) ...@@ -1033,21 +982,14 @@ LEAF(LogicOp, Op2)
LEAF(CompareOp, Op2) LEAF(CompareOp, Op2)
private:
ValueStack* _state_before; // for deoptimization, when canonicalizing
public: public:
// creation // creation
CompareOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before) CompareOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before)
: Op2(intType, op, x, y) : Op2(intType, op, x, y, state_before)
, _state_before(state_before)
{} {}
// accessors
ValueStack* state_before() const { return _state_before; }
// generic // generic
HASHING3(Op2, true, op(), x()->subst(), y()->subst()) HASHING3(Op2, true, op(), x()->subst(), y()->subst())
virtual void other_values_do(ValueVisitor* f);
}; };
...@@ -1103,11 +1045,13 @@ LEAF(Convert, Instruction) ...@@ -1103,11 +1045,13 @@ LEAF(Convert, Instruction)
LEAF(NullCheck, Instruction) LEAF(NullCheck, Instruction)
private: private:
Value _obj; Value _obj;
ValueStack* _lock_stack;
public: public:
// creation // creation
NullCheck(Value obj, ValueStack* lock_stack) : Instruction(obj->type()->base()), _obj(obj), _lock_stack(lock_stack) { NullCheck(Value obj, ValueStack* state_before)
: Instruction(obj->type()->base(), state_before)
, _obj(obj)
{
ASSERT_VALUES ASSERT_VALUES
set_can_trap(true); set_can_trap(true);
assert(_obj->type()->is_object(), "null check must be applied to objects only"); assert(_obj->type()->is_object(), "null check must be applied to objects only");
...@@ -1116,16 +1060,13 @@ LEAF(NullCheck, Instruction) ...@@ -1116,16 +1060,13 @@ LEAF(NullCheck, Instruction)
// accessors // accessors
Value obj() const { return _obj; } Value obj() const { return _obj; }
ValueStack* lock_stack() const { return _lock_stack; }
// setters // setters
void set_lock_stack(ValueStack* l) { _lock_stack = l; }
void set_can_trap(bool can_trap) { set_flag(CanTrapFlag, can_trap); } void set_can_trap(bool can_trap) { set_flag(CanTrapFlag, can_trap); }
// generic // generic
virtual bool can_trap() const { return check_flag(CanTrapFlag); /* null-check elimination sets to false */ } virtual bool can_trap() const { return check_flag(CanTrapFlag); /* null-check elimination sets to false */ }
virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); }
virtual void other_values_do(ValueVisitor* f);
HASHING1(NullCheck, true, obj()->subst()) HASHING1(NullCheck, true, obj()->subst())
}; };
...@@ -1139,7 +1080,10 @@ BASE(StateSplit, Instruction) ...@@ -1139,7 +1080,10 @@ BASE(StateSplit, Instruction)
public: public:
// creation // creation
StateSplit(ValueType* type) : Instruction(type), _state(NULL) { StateSplit(ValueType* type, ValueStack* state_before = NULL)
: Instruction(type, state_before)
, _state(NULL)
{
pin(PinStateSplitConstructor); pin(PinStateSplitConstructor);
} }
...@@ -1148,7 +1092,7 @@ BASE(StateSplit, Instruction) ...@@ -1148,7 +1092,7 @@ BASE(StateSplit, Instruction)
IRScope* scope() const; // the state's scope IRScope* scope() const; // the state's scope
// manipulation // manipulation
void set_state(ValueStack* state) { _state = state; } void set_state(ValueStack* state) { assert(_state == NULL, "overwriting existing state"); check_state(state); _state = state; }
// generic // generic
virtual void input_values_do(ValueVisitor* f) { /* no values */ } virtual void input_values_do(ValueVisitor* f) { /* no values */ }
...@@ -1164,7 +1108,6 @@ LEAF(Invoke, StateSplit) ...@@ -1164,7 +1108,6 @@ LEAF(Invoke, StateSplit)
BasicTypeList* _signature; BasicTypeList* _signature;
int _vtable_index; int _vtable_index;
ciMethod* _target; ciMethod* _target;
ValueStack* _state_before; // Required for deoptimization.
public: public:
// creation // creation
...@@ -1180,7 +1123,6 @@ LEAF(Invoke, StateSplit) ...@@ -1180,7 +1123,6 @@ LEAF(Invoke, StateSplit)
int vtable_index() const { return _vtable_index; } int vtable_index() const { return _vtable_index; }
BasicTypeList* signature() const { return _signature; } BasicTypeList* signature() const { return _signature; }
ciMethod* target() const { return _target; } ciMethod* target() const { return _target; }
ValueStack* state_before() const { return _state_before; }
// Returns false if target is not loaded // Returns false if target is not loaded
bool target_is_final() const { return check_flag(TargetIsFinalFlag); } bool target_is_final() const { return check_flag(TargetIsFinalFlag); }
...@@ -1191,6 +1133,8 @@ LEAF(Invoke, StateSplit) ...@@ -1191,6 +1133,8 @@ LEAF(Invoke, StateSplit)
// JSR 292 support // JSR 292 support
bool is_invokedynamic() const { return code() == Bytecodes::_invokedynamic; } bool is_invokedynamic() const { return code() == Bytecodes::_invokedynamic; }
virtual bool needs_exception_state() const { return false; }
// generic // generic
virtual bool can_trap() const { return true; } virtual bool can_trap() const { return true; }
virtual void input_values_do(ValueVisitor* f) { virtual void input_values_do(ValueVisitor* f) {
...@@ -1208,11 +1152,16 @@ LEAF(NewInstance, StateSplit) ...@@ -1208,11 +1152,16 @@ LEAF(NewInstance, StateSplit)
public: public:
// creation // creation
NewInstance(ciInstanceKlass* klass) : StateSplit(instanceType), _klass(klass) {} NewInstance(ciInstanceKlass* klass, ValueStack* state_before)
: StateSplit(instanceType, state_before)
, _klass(klass)
{}
// accessors // accessors
ciInstanceKlass* klass() const { return _klass; } ciInstanceKlass* klass() const { return _klass; }
virtual bool needs_exception_state() const { return false; }
// generic // generic
virtual bool can_trap() const { return true; } virtual bool can_trap() const { return true; }
ciType* exact_type() const; ciType* exact_type() const;
...@@ -1222,22 +1171,24 @@ LEAF(NewInstance, StateSplit) ...@@ -1222,22 +1171,24 @@ LEAF(NewInstance, StateSplit)
BASE(NewArray, StateSplit) BASE(NewArray, StateSplit)
private: private:
Value _length; Value _length;
ValueStack* _state_before;
public: public:
// creation // creation
NewArray(Value length, ValueStack* state_before) : StateSplit(objectType), _length(length), _state_before(state_before) { NewArray(Value length, ValueStack* state_before)
: StateSplit(objectType, state_before)
, _length(length)
{
// Do not ASSERT_VALUES since length is NULL for NewMultiArray // Do not ASSERT_VALUES since length is NULL for NewMultiArray
} }
// accessors // accessors
ValueStack* state_before() const { return _state_before; }
Value length() const { return _length; } Value length() const { return _length; }
virtual bool needs_exception_state() const { return false; }
// generic // generic
virtual bool can_trap() const { return true; } virtual bool can_trap() const { return true; }
virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_length); } virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_length); }
virtual void other_values_do(ValueVisitor* f);
}; };
...@@ -1247,7 +1198,10 @@ LEAF(NewTypeArray, NewArray) ...@@ -1247,7 +1198,10 @@ LEAF(NewTypeArray, NewArray)
public: public:
// creation // creation
NewTypeArray(Value length, BasicType elt_type) : NewArray(length, NULL), _elt_type(elt_type) {} NewTypeArray(Value length, BasicType elt_type, ValueStack* state_before)
: NewArray(length, state_before)
, _elt_type(elt_type)
{}
// accessors // accessors
BasicType elt_type() const { return _elt_type; } BasicType elt_type() const { return _elt_type; }
...@@ -1303,7 +1257,6 @@ BASE(TypeCheck, StateSplit) ...@@ -1303,7 +1257,6 @@ BASE(TypeCheck, StateSplit)
private: private:
ciKlass* _klass; ciKlass* _klass;
Value _obj; Value _obj;
ValueStack* _state_before;
ciMethod* _profiled_method; ciMethod* _profiled_method;
int _profiled_bci; int _profiled_bci;
...@@ -1311,14 +1264,13 @@ BASE(TypeCheck, StateSplit) ...@@ -1311,14 +1264,13 @@ BASE(TypeCheck, StateSplit)
public: public:
// creation // creation
TypeCheck(ciKlass* klass, Value obj, ValueType* type, ValueStack* state_before) TypeCheck(ciKlass* klass, Value obj, ValueType* type, ValueStack* state_before)
: StateSplit(type), _klass(klass), _obj(obj), _state_before(state_before), : StateSplit(type, state_before), _klass(klass), _obj(obj),
_profiled_method(NULL), _profiled_bci(0) { _profiled_method(NULL), _profiled_bci(0) {
ASSERT_VALUES ASSERT_VALUES
set_direct_compare(false); set_direct_compare(false);
} }
// accessors // accessors
ValueStack* state_before() const { return _state_before; }
ciKlass* klass() const { return _klass; } ciKlass* klass() const { return _klass; }
Value obj() const { return _obj; } Value obj() const { return _obj; }
bool is_loaded() const { return klass() != NULL; } bool is_loaded() const { return klass() != NULL; }
...@@ -1330,7 +1282,6 @@ BASE(TypeCheck, StateSplit) ...@@ -1330,7 +1282,6 @@ BASE(TypeCheck, StateSplit)
// generic // generic
virtual bool can_trap() const { return true; } virtual bool can_trap() const { return true; }
virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); }
virtual void other_values_do(ValueVisitor* f);
// Helpers for methodDataOop profiling // Helpers for methodDataOop profiling
void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); }
...@@ -1364,6 +1315,8 @@ LEAF(InstanceOf, TypeCheck) ...@@ -1364,6 +1315,8 @@ LEAF(InstanceOf, TypeCheck)
public: public:
// creation // creation
InstanceOf(ciKlass* klass, Value obj, ValueStack* state_before) : TypeCheck(klass, obj, intType, state_before) {} InstanceOf(ciKlass* klass, Value obj, ValueStack* state_before) : TypeCheck(klass, obj, intType, state_before) {}
virtual bool needs_exception_state() const { return false; }
}; };
...@@ -1374,8 +1327,8 @@ BASE(AccessMonitor, StateSplit) ...@@ -1374,8 +1327,8 @@ BASE(AccessMonitor, StateSplit)
public: public:
// creation // creation
AccessMonitor(Value obj, int monitor_no) AccessMonitor(Value obj, int monitor_no, ValueStack* state_before = NULL)
: StateSplit(illegalType) : StateSplit(illegalType, state_before)
, _obj(obj) , _obj(obj)
, _monitor_no(monitor_no) , _monitor_no(monitor_no)
{ {
...@@ -1393,22 +1346,14 @@ BASE(AccessMonitor, StateSplit) ...@@ -1393,22 +1346,14 @@ BASE(AccessMonitor, StateSplit)
LEAF(MonitorEnter, AccessMonitor) LEAF(MonitorEnter, AccessMonitor)
private:
ValueStack* _lock_stack_before;
public: public:
// creation // creation
MonitorEnter(Value obj, int monitor_no, ValueStack* lock_stack_before) MonitorEnter(Value obj, int monitor_no, ValueStack* state_before)
: AccessMonitor(obj, monitor_no) : AccessMonitor(obj, monitor_no, state_before)
, _lock_stack_before(lock_stack_before)
{ {
ASSERT_VALUES ASSERT_VALUES
} }
// accessors
ValueStack* lock_stack_before() const { return _lock_stack_before; }
virtual void state_values_do(ValueVisitor* f);
// generic // generic
virtual bool can_trap() const { return true; } virtual bool can_trap() const { return true; }
}; };
...@@ -1417,7 +1362,11 @@ LEAF(MonitorEnter, AccessMonitor) ...@@ -1417,7 +1362,11 @@ LEAF(MonitorEnter, AccessMonitor)
LEAF(MonitorExit, AccessMonitor) LEAF(MonitorExit, AccessMonitor)
public: public:
// creation // creation
MonitorExit(Value obj, int monitor_no) : AccessMonitor(obj, monitor_no) {} MonitorExit(Value obj, int monitor_no)
: AccessMonitor(obj, monitor_no, NULL)
{
ASSERT_VALUES
}
}; };
...@@ -1425,7 +1374,6 @@ LEAF(Intrinsic, StateSplit) ...@@ -1425,7 +1374,6 @@ LEAF(Intrinsic, StateSplit)
private: private:
vmIntrinsics::ID _id; vmIntrinsics::ID _id;
Values* _args; Values* _args;
ValueStack* _lock_stack;
Value _recv; Value _recv;
public: public:
...@@ -1440,13 +1388,12 @@ LEAF(Intrinsic, StateSplit) ...@@ -1440,13 +1388,12 @@ LEAF(Intrinsic, StateSplit)
vmIntrinsics::ID id, vmIntrinsics::ID id,
Values* args, Values* args,
bool has_receiver, bool has_receiver,
ValueStack* lock_stack, ValueStack* state_before,
bool preserves_state, bool preserves_state,
bool cantrap = true) bool cantrap = true)
: StateSplit(type) : StateSplit(type, state_before)
, _id(id) , _id(id)
, _args(args) , _args(args)
, _lock_stack(lock_stack)
, _recv(NULL) , _recv(NULL)
{ {
assert(args != NULL, "args must exist"); assert(args != NULL, "args must exist");
...@@ -1468,7 +1415,6 @@ LEAF(Intrinsic, StateSplit) ...@@ -1468,7 +1415,6 @@ LEAF(Intrinsic, StateSplit)
vmIntrinsics::ID id() const { return _id; } vmIntrinsics::ID id() const { return _id; }
int number_of_arguments() const { return _args->length(); } int number_of_arguments() const { return _args->length(); }
Value argument_at(int i) const { return _args->at(i); } Value argument_at(int i) const { return _args->at(i); }
ValueStack* lock_stack() const { return _lock_stack; }
bool has_receiver() const { return (_recv != NULL); } bool has_receiver() const { return (_recv != NULL); }
Value receiver() const { assert(has_receiver(), "must have receiver"); return _recv; } Value receiver() const { assert(has_receiver(), "must have receiver"); return _recv; }
...@@ -1480,8 +1426,6 @@ LEAF(Intrinsic, StateSplit) ...@@ -1480,8 +1426,6 @@ LEAF(Intrinsic, StateSplit)
StateSplit::input_values_do(f); StateSplit::input_values_do(f);
for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i));
} }
virtual void state_values_do(ValueVisitor* f);
}; };
...@@ -1490,6 +1434,7 @@ class LIR_List; ...@@ -1490,6 +1434,7 @@ class LIR_List;
LEAF(BlockBegin, StateSplit) LEAF(BlockBegin, StateSplit)
private: private:
int _block_id; // the unique block id int _block_id; // the unique block id
int _bci; // start-bci of block
int _depth_first_number; // number of this block in a depth-first ordering int _depth_first_number; // number of this block in a depth-first ordering
int _linear_scan_number; // number of this block in linear-scan ordering int _linear_scan_number; // number of this block in linear-scan ordering
int _loop_depth; // the loop nesting level of this block int _loop_depth; // the loop nesting level of this block
...@@ -1546,6 +1491,7 @@ LEAF(BlockBegin, StateSplit) ...@@ -1546,6 +1491,7 @@ LEAF(BlockBegin, StateSplit)
// creation // creation
BlockBegin(int bci) BlockBegin(int bci)
: StateSplit(illegalType) : StateSplit(illegalType)
, _bci(bci)
, _depth_first_number(-1) , _depth_first_number(-1)
, _linear_scan_number(-1) , _linear_scan_number(-1)
, _loop_depth(0) , _loop_depth(0)
...@@ -1570,11 +1516,14 @@ LEAF(BlockBegin, StateSplit) ...@@ -1570,11 +1516,14 @@ LEAF(BlockBegin, StateSplit)
, _total_preds(0) , _total_preds(0)
, _stores_to_locals() , _stores_to_locals()
{ {
set_bci(bci); #ifndef PRODUCT
set_printable_bci(bci);
#endif
} }
// accessors // accessors
int block_id() const { return _block_id; } int block_id() const { return _block_id; }
int bci() const { return _bci; }
BlockList* successors() { return &_successors; } BlockList* successors() { return &_successors; }
BlockBegin* dominator() const { return _dominator; } BlockBegin* dominator() const { return _dominator; }
int loop_depth() const { return _loop_depth; } int loop_depth() const { return _loop_depth; }
...@@ -1596,7 +1545,6 @@ LEAF(BlockBegin, StateSplit) ...@@ -1596,7 +1545,6 @@ LEAF(BlockBegin, StateSplit)
BitMap& stores_to_locals() { return _stores_to_locals; } BitMap& stores_to_locals() { return _stores_to_locals; }
// manipulation // manipulation
void set_bci(int bci) { Instruction::set_bci(bci); }
void set_dominator(BlockBegin* dom) { _dominator = dom; } void set_dominator(BlockBegin* dom) { _dominator = dom; }
void set_loop_depth(int d) { _loop_depth = d; } void set_loop_depth(int d) { _loop_depth = d; }
void set_depth_first_number(int dfn) { _depth_first_number = dfn; } void set_depth_first_number(int dfn) { _depth_first_number = dfn; }
...@@ -1694,7 +1642,6 @@ BASE(BlockEnd, StateSplit) ...@@ -1694,7 +1642,6 @@ BASE(BlockEnd, StateSplit)
private: private:
BlockBegin* _begin; BlockBegin* _begin;
BlockList* _sux; BlockList* _sux;
ValueStack* _state_before;
protected: protected:
BlockList* sux() const { return _sux; } BlockList* sux() const { return _sux; }
...@@ -1710,24 +1657,20 @@ BASE(BlockEnd, StateSplit) ...@@ -1710,24 +1657,20 @@ BASE(BlockEnd, StateSplit)
public: public:
// creation // creation
BlockEnd(ValueType* type, ValueStack* state_before, bool is_safepoint) BlockEnd(ValueType* type, ValueStack* state_before, bool is_safepoint)
: StateSplit(type) : StateSplit(type, state_before)
, _begin(NULL) , _begin(NULL)
, _sux(NULL) , _sux(NULL)
, _state_before(state_before) { {
set_flag(IsSafepointFlag, is_safepoint); set_flag(IsSafepointFlag, is_safepoint);
} }
// accessors // accessors
ValueStack* state_before() const { return _state_before; }
bool is_safepoint() const { return check_flag(IsSafepointFlag); } bool is_safepoint() const { return check_flag(IsSafepointFlag); }
BlockBegin* begin() const { return _begin; } BlockBegin* begin() const { return _begin; }
// manipulation // manipulation
void set_begin(BlockBegin* begin); void set_begin(BlockBegin* begin);
// generic
virtual void other_values_do(ValueVisitor* f);
// successors // successors
int number_of_sux() const { return _sux != NULL ? _sux->length() : 0; } int number_of_sux() const { return _sux != NULL ? _sux->length() : 0; }
BlockBegin* sux_at(int i) const { return _sux->at(i); } BlockBegin* sux_at(int i) const { return _sux->at(i); }
...@@ -1919,6 +1862,8 @@ BASE(Switch, BlockEnd) ...@@ -1919,6 +1862,8 @@ BASE(Switch, BlockEnd)
Value tag() const { return _tag; } Value tag() const { return _tag; }
int length() const { return number_of_sux() - 1; } int length() const { return number_of_sux() - 1; }
virtual bool needs_exception_state() const { return false; }
// generic // generic
virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_tag); } virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_tag); }
}; };
...@@ -1996,7 +1941,6 @@ LEAF(Throw, BlockEnd) ...@@ -1996,7 +1941,6 @@ LEAF(Throw, BlockEnd)
// generic // generic
virtual bool can_trap() const { return true; } virtual bool can_trap() const { return true; }
virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_exception); } virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_exception); }
virtual void state_values_do(ValueVisitor* f);
}; };
...@@ -2091,7 +2035,6 @@ BASE(UnsafeOp, Instruction) ...@@ -2091,7 +2035,6 @@ BASE(UnsafeOp, Instruction)
// generic // generic
virtual void input_values_do(ValueVisitor* f) { } virtual void input_values_do(ValueVisitor* f) { }
virtual void other_values_do(ValueVisitor* f) { }
}; };
......
...@@ -316,7 +316,7 @@ void InstructionPrinter::print_head() { ...@@ -316,7 +316,7 @@ void InstructionPrinter::print_head() {
void InstructionPrinter::print_line(Instruction* instr) { void InstructionPrinter::print_line(Instruction* instr) {
// print instruction data on one line // print instruction data on one line
if (instr->is_pinned()) output()->put('.'); if (instr->is_pinned()) output()->put('.');
fill_to(bci_pos ); output()->print("%d", instr->bci()); fill_to(bci_pos ); output()->print("%d", instr->printable_bci());
fill_to(use_pos ); output()->print("%d", instr->use_count()); fill_to(use_pos ); output()->print("%d", instr->use_count());
fill_to(temp_pos ); print_temp(instr); fill_to(temp_pos ); print_temp(instr);
fill_to(instr_pos); print_instr(instr); fill_to(instr_pos); print_instr(instr);
...@@ -569,7 +569,7 @@ void InstructionPrinter::do_BlockBegin(BlockBegin* x) { ...@@ -569,7 +569,7 @@ void InstructionPrinter::do_BlockBegin(BlockBegin* x) {
if (printed_flag) output()->print(") "); if (printed_flag) output()->print(") ");
// print block bci range // print block bci range
output()->print("[%d, %d]", x->bci(), (end == NULL ? -1 : end->bci())); output()->print("[%d, %d]", x->bci(), (end == NULL ? -1 : end->printable_bci()));
// print block successors // print block successors
if (end != NULL && end->number_of_sux() > 0) { if (end != NULL && end->number_of_sux() > 0) {
......
...@@ -1520,7 +1520,7 @@ static void print_block(BlockBegin* x) { ...@@ -1520,7 +1520,7 @@ static void print_block(BlockBegin* x) {
if (x->is_set(BlockBegin::linear_scan_loop_end_flag)) tty->print("le "); if (x->is_set(BlockBegin::linear_scan_loop_end_flag)) tty->print("le ");
// print block bci range // print block bci range
tty->print("[%d, %d] ", x->bci(), (end == NULL ? -1 : end->bci())); tty->print("[%d, %d] ", x->bci(), (end == NULL ? -1 : end->printable_bci()));
// print predecessors and successors // print predecessors and successors
if (x->number_of_preds() > 0) { if (x->number_of_preds() > 0) {
...@@ -1576,7 +1576,7 @@ void LIR_Op::print_on(outputStream* out) const { ...@@ -1576,7 +1576,7 @@ void LIR_Op::print_on(outputStream* out) const {
} }
out->print(name()); out->print(" "); out->print(name()); out->print(" ");
print_instr(out); print_instr(out);
if (info() != NULL) out->print(" [bci:%d]", info()->bci()); if (info() != NULL) out->print(" [bci:%d]", info()->stack()->bci());
#ifdef ASSERT #ifdef ASSERT
if (Verbose && _file != NULL) { if (Verbose && _file != NULL) {
out->print(" (%s:%d)", _file, _line); out->print(" (%s:%d)", _file, _line);
...@@ -1781,7 +1781,7 @@ void LIR_OpBranch::print_instr(outputStream* out) const { ...@@ -1781,7 +1781,7 @@ void LIR_OpBranch::print_instr(outputStream* out) const {
out->print("["); out->print("[");
stub()->print_name(out); stub()->print_name(out);
out->print(": 0x%x]", stub()); out->print(": 0x%x]", stub());
if (stub()->info() != NULL) out->print(" [bci:%d]", stub()->info()->bci()); if (stub()->info() != NULL) out->print(" [bci:%d]", stub()->info()->stack()->bci());
} else { } else {
out->print("[label:0x%x] ", label()); out->print("[label:0x%x] ", label());
} }
...@@ -1896,7 +1896,7 @@ void LIR_OpTypeCheck::print_instr(outputStream* out) const { ...@@ -1896,7 +1896,7 @@ void LIR_OpTypeCheck::print_instr(outputStream* out) const {
tmp2()->print(out); out->print(" "); tmp2()->print(out); out->print(" ");
tmp3()->print(out); out->print(" "); tmp3()->print(out); out->print(" ");
result_opr()->print(out); out->print(" "); result_opr()->print(out); out->print(" ");
if (info_for_exception() != NULL) out->print(" [bci:%d]", info_for_exception()->bci()); if (info_for_exception() != NULL) out->print(" [bci:%d]", info_for_exception()->stack()->bci());
} }
......
...@@ -35,7 +35,7 @@ void LIR_Assembler::patching_epilog(PatchingStub* patch, LIR_PatchCode patch_cod ...@@ -35,7 +35,7 @@ void LIR_Assembler::patching_epilog(PatchingStub* patch, LIR_PatchCode patch_cod
append_patching_stub(patch); append_patching_stub(patch);
#ifdef ASSERT #ifdef ASSERT
Bytecodes::Code code = info->scope()->method()->java_code_at_bci(info->bci()); Bytecodes::Code code = info->scope()->method()->java_code_at_bci(info->stack()->bci());
if (patch->id() == PatchingStub::access_field_id) { if (patch->id() == PatchingStub::access_field_id) {
switch (code) { switch (code) {
case Bytecodes::_putstatic: case Bytecodes::_putstatic:
...@@ -221,7 +221,7 @@ void LIR_Assembler::emit_block(BlockBegin* block) { ...@@ -221,7 +221,7 @@ void LIR_Assembler::emit_block(BlockBegin* block) {
#ifndef PRODUCT #ifndef PRODUCT
if (CommentedAssembly) { if (CommentedAssembly) {
stringStream st; stringStream st;
st.print_cr(" block B%d [%d, %d]", block->block_id(), block->bci(), block->end()->bci()); st.print_cr(" block B%d [%d, %d]", block->block_id(), block->bci(), block->end()->printable_bci());
_masm->block_comment(st.as_string()); _masm->block_comment(st.as_string());
} }
#endif #endif
...@@ -312,7 +312,7 @@ void LIR_Assembler::add_call_info(int pc_offset, CodeEmitInfo* cinfo) { ...@@ -312,7 +312,7 @@ void LIR_Assembler::add_call_info(int pc_offset, CodeEmitInfo* cinfo) {
static ValueStack* debug_info(Instruction* ins) { static ValueStack* debug_info(Instruction* ins) {
StateSplit* ss = ins->as_StateSplit(); StateSplit* ss = ins->as_StateSplit();
if (ss != NULL) return ss->state(); if (ss != NULL) return ss->state();
return ins->lock_stack(); return ins->state_before();
} }
void LIR_Assembler::process_debug_info(LIR_Op* op) { void LIR_Assembler::process_debug_info(LIR_Op* op) {
...@@ -327,8 +327,7 @@ void LIR_Assembler::process_debug_info(LIR_Op* op) { ...@@ -327,8 +327,7 @@ void LIR_Assembler::process_debug_info(LIR_Op* op) {
if (vstack == NULL) return; if (vstack == NULL) return;
if (_pending_non_safepoint != NULL) { if (_pending_non_safepoint != NULL) {
// Got some old debug info. Get rid of it. // Got some old debug info. Get rid of it.
if (_pending_non_safepoint->bci() == src->bci() && if (debug_info(_pending_non_safepoint) == vstack) {
debug_info(_pending_non_safepoint) == vstack) {
_pending_non_safepoint_offset = pc_offset; _pending_non_safepoint_offset = pc_offset;
return; return;
} }
...@@ -358,7 +357,7 @@ static ValueStack* nth_oldest(ValueStack* s, int n, int& bci_result) { ...@@ -358,7 +357,7 @@ static ValueStack* nth_oldest(ValueStack* s, int n, int& bci_result) {
ValueStack* tc = t->caller_state(); ValueStack* tc = t->caller_state();
if (tc == NULL) return s; if (tc == NULL) return s;
t = tc; t = tc;
bci_result = s->scope()->caller_bci(); bci_result = tc->bci();
s = s->caller_state(); s = s->caller_state();
} }
} }
...@@ -366,7 +365,7 @@ static ValueStack* nth_oldest(ValueStack* s, int n, int& bci_result) { ...@@ -366,7 +365,7 @@ static ValueStack* nth_oldest(ValueStack* s, int n, int& bci_result) {
void LIR_Assembler::record_non_safepoint_debug_info() { void LIR_Assembler::record_non_safepoint_debug_info() {
int pc_offset = _pending_non_safepoint_offset; int pc_offset = _pending_non_safepoint_offset;
ValueStack* vstack = debug_info(_pending_non_safepoint); ValueStack* vstack = debug_info(_pending_non_safepoint);
int bci = _pending_non_safepoint->bci(); int bci = vstack->bci();
DebugInformationRecorder* debug_info = compilation()->debug_info_recorder(); DebugInformationRecorder* debug_info = compilation()->debug_info_recorder();
assert(debug_info->recording_non_safepoints(), "sanity"); assert(debug_info->recording_non_safepoints(), "sanity");
...@@ -380,7 +379,7 @@ void LIR_Assembler::record_non_safepoint_debug_info() { ...@@ -380,7 +379,7 @@ void LIR_Assembler::record_non_safepoint_debug_info() {
if (s == NULL) break; if (s == NULL) break;
IRScope* scope = s->scope(); IRScope* scope = s->scope();
//Always pass false for reexecute since these ScopeDescs are never used for deopt //Always pass false for reexecute since these ScopeDescs are never used for deopt
debug_info->describe_scope(pc_offset, scope->method(), s_bci, false/*reexecute*/); debug_info->describe_scope(pc_offset, scope->method(), s->bci(), false/*reexecute*/);
} }
debug_info->end_non_safepoint(pc_offset); debug_info->end_non_safepoint(pc_offset);
......
...@@ -386,18 +386,26 @@ void LIRGenerator::walk(Value instr) { ...@@ -386,18 +386,26 @@ void LIRGenerator::walk(Value instr) {
CodeEmitInfo* LIRGenerator::state_for(Instruction* x, ValueStack* state, bool ignore_xhandler) { CodeEmitInfo* LIRGenerator::state_for(Instruction* x, ValueStack* state, bool ignore_xhandler) {
int index; assert(state != NULL, "state must be defined");
Value value;
for_each_stack_value(state, index, value) {
assert(value->subst() == value, "missed substition");
if (!value->is_pinned() && value->as_Constant() == NULL && value->as_Local() == NULL) {
walk(value);
assert(value->operand()->is_valid(), "must be evaluated now");
}
}
ValueStack* s = state; ValueStack* s = state;
int bci = x->bci();
for_each_state(s) { for_each_state(s) {
if (s->kind() == ValueStack::EmptyExceptionState) {
assert(s->stack_size() == 0 && s->locals_size() == 0 && (s->locks_size() == 0 || s->locks_size() == 1), "state must be empty");
continue;
}
int index;
Value value;
for_each_stack_value(s, index, value) {
assert(value->subst() == value, "missed substitution");
if (!value->is_pinned() && value->as_Constant() == NULL && value->as_Local() == NULL) {
walk(value);
assert(value->operand()->is_valid(), "must be evaluated now");
}
}
int bci = s->bci();
IRScope* scope = s->scope(); IRScope* scope = s->scope();
ciMethod* method = scope->method(); ciMethod* method = scope->method();
...@@ -428,15 +436,14 @@ CodeEmitInfo* LIRGenerator::state_for(Instruction* x, ValueStack* state, bool ig ...@@ -428,15 +436,14 @@ CodeEmitInfo* LIRGenerator::state_for(Instruction* x, ValueStack* state, bool ig
} }
} }
} }
bci = scope->caller_bci();
} }
return new CodeEmitInfo(x->bci(), state, ignore_xhandler ? NULL : x->exception_handlers()); return new CodeEmitInfo(state, ignore_xhandler ? NULL : x->exception_handlers());
} }
CodeEmitInfo* LIRGenerator::state_for(Instruction* x) { CodeEmitInfo* LIRGenerator::state_for(Instruction* x) {
return state_for(x, x->lock_stack()); return state_for(x, x->exception_state());
} }
...@@ -900,18 +907,14 @@ void LIRGenerator::move_to_phi(ValueStack* cur_state) { ...@@ -900,18 +907,14 @@ void LIRGenerator::move_to_phi(ValueStack* cur_state) {
Value sux_value; Value sux_value;
int index; int index;
assert(cur_state->scope() == sux_state->scope(), "not matching");
assert(cur_state->locals_size() == sux_state->locals_size(), "not matching");
assert(cur_state->stack_size() == sux_state->stack_size(), "not matching");
for_each_stack_value(sux_state, index, sux_value) { for_each_stack_value(sux_state, index, sux_value) {
move_to_phi(&resolver, cur_state->stack_at(index), sux_value); move_to_phi(&resolver, cur_state->stack_at(index), sux_value);
} }
// Inlining may cause the local state not to match up, so walk up
// the caller state until we get to the same scope as the
// successor and then start processing from there.
while (cur_state->scope() != sux_state->scope()) {
cur_state = cur_state->caller_state();
assert(cur_state != NULL, "scopes don't match up");
}
for_each_local_value(sux_state, index, sux_value) { for_each_local_value(sux_state, index, sux_value) {
move_to_phi(&resolver, cur_state->local_at(index), sux_value); move_to_phi(&resolver, cur_state->local_at(index), sux_value);
} }
...@@ -1023,10 +1026,10 @@ void LIRGenerator::do_Phi(Phi* x) { ...@@ -1023,10 +1026,10 @@ void LIRGenerator::do_Phi(Phi* x) {
// Code for a constant is generated lazily unless the constant is frequently used and can't be inlined. // Code for a constant is generated lazily unless the constant is frequently used and can't be inlined.
void LIRGenerator::do_Constant(Constant* x) { void LIRGenerator::do_Constant(Constant* x) {
if (x->state() != NULL) { if (x->state_before() != NULL) {
// Any constant with a ValueStack requires patching so emit the patch here // Any constant with a ValueStack requires patching so emit the patch here
LIR_Opr reg = rlock_result(x); LIR_Opr reg = rlock_result(x);
CodeEmitInfo* info = state_for(x, x->state()); CodeEmitInfo* info = state_for(x, x->state_before());
__ oop2reg_patch(NULL, reg, info); __ oop2reg_patch(NULL, reg, info);
} else if (x->use_count() > 1 && !can_inline_as_constant(x)) { } else if (x->use_count() > 1 && !can_inline_as_constant(x)) {
if (!x->is_pinned()) { if (!x->is_pinned()) {
...@@ -1102,7 +1105,7 @@ void LIRGenerator::do_getClass(Intrinsic* x) { ...@@ -1102,7 +1105,7 @@ void LIRGenerator::do_getClass(Intrinsic* x) {
// need to perform the null check on the rcvr // need to perform the null check on the rcvr
CodeEmitInfo* info = NULL; CodeEmitInfo* info = NULL;
if (x->needs_null_check()) { if (x->needs_null_check()) {
info = state_for(x, x->state()->copy_locks()); info = state_for(x);
} }
__ move(new LIR_Address(rcvr.result(), oopDesc::klass_offset_in_bytes(), T_OBJECT), result, info); __ move(new LIR_Address(rcvr.result(), oopDesc::klass_offset_in_bytes(), T_OBJECT), result, info);
__ move(new LIR_Address(result, Klass::java_mirror_offset_in_bytes() + __ move(new LIR_Address(result, Klass::java_mirror_offset_in_bytes() +
...@@ -1481,7 +1484,7 @@ void LIRGenerator::do_StoreField(StoreField* x) { ...@@ -1481,7 +1484,7 @@ void LIRGenerator::do_StoreField(StoreField* x) {
} else if (x->needs_null_check()) { } else if (x->needs_null_check()) {
NullCheck* nc = x->explicit_null_check(); NullCheck* nc = x->explicit_null_check();
if (nc == NULL) { if (nc == NULL) {
info = state_for(x, x->lock_stack()); info = state_for(x);
} else { } else {
info = state_for(nc); info = state_for(nc);
} }
...@@ -1509,10 +1512,12 @@ void LIRGenerator::do_StoreField(StoreField* x) { ...@@ -1509,10 +1512,12 @@ void LIRGenerator::do_StoreField(StoreField* x) {
set_no_result(x); set_no_result(x);
#ifndef PRODUCT
if (PrintNotLoaded && needs_patching) { if (PrintNotLoaded && needs_patching) {
tty->print_cr(" ###class not loaded at store_%s bci %d", tty->print_cr(" ###class not loaded at store_%s bci %d",
x->is_static() ? "static" : "field", x->bci()); x->is_static() ? "static" : "field", x->printable_bci());
} }
#endif
if (x->needs_null_check() && if (x->needs_null_check() &&
(needs_patching || (needs_patching ||
...@@ -1575,7 +1580,7 @@ void LIRGenerator::do_LoadField(LoadField* x) { ...@@ -1575,7 +1580,7 @@ void LIRGenerator::do_LoadField(LoadField* x) {
} else if (x->needs_null_check()) { } else if (x->needs_null_check()) {
NullCheck* nc = x->explicit_null_check(); NullCheck* nc = x->explicit_null_check();
if (nc == NULL) { if (nc == NULL) {
info = state_for(x, x->lock_stack()); info = state_for(x);
} else { } else {
info = state_for(nc); info = state_for(nc);
} }
...@@ -1585,10 +1590,12 @@ void LIRGenerator::do_LoadField(LoadField* x) { ...@@ -1585,10 +1590,12 @@ void LIRGenerator::do_LoadField(LoadField* x) {
object.load_item(); object.load_item();
#ifndef PRODUCT
if (PrintNotLoaded && needs_patching) { if (PrintNotLoaded && needs_patching) {
tty->print_cr(" ###class not loaded at load_%s bci %d", tty->print_cr(" ###class not loaded at load_%s bci %d",
x->is_static() ? "static" : "field", x->bci()); x->is_static() ? "static" : "field", x->printable_bci());
} }
#endif
if (x->needs_null_check() && if (x->needs_null_check() &&
(needs_patching || (needs_patching ||
...@@ -1781,7 +1788,7 @@ void LIRGenerator::do_Throw(Throw* x) { ...@@ -1781,7 +1788,7 @@ void LIRGenerator::do_Throw(Throw* x) {
if (GenerateCompilerNullChecks && if (GenerateCompilerNullChecks &&
(x->exception()->as_NewInstance() == NULL && x->exception()->as_ExceptionObject() == NULL)) { (x->exception()->as_NewInstance() == NULL && x->exception()->as_ExceptionObject() == NULL)) {
// if the exception object wasn't created using new then it might be null. // if the exception object wasn't created using new then it might be null.
__ null_check(exception_opr, new CodeEmitInfo(info, true)); __ null_check(exception_opr, new CodeEmitInfo(info, x->state()->copy(ValueStack::ExceptionState, x->state()->bci())));
} }
if (compilation()->env()->jvmti_can_post_on_exceptions()) { if (compilation()->env()->jvmti_can_post_on_exceptions()) {
...@@ -2127,7 +2134,6 @@ void LIRGenerator::do_TableSwitch(TableSwitch* x) { ...@@ -2127,7 +2134,6 @@ void LIRGenerator::do_TableSwitch(TableSwitch* x) {
int lo_key = x->lo_key(); int lo_key = x->lo_key();
int hi_key = x->hi_key(); int hi_key = x->hi_key();
int len = x->length(); int len = x->length();
CodeEmitInfo* info = state_for(x, x->state());
LIR_Opr value = tag.result(); LIR_Opr value = tag.result();
if (UseTableRanges) { if (UseTableRanges) {
do_SwitchRanges(create_lookup_ranges(x), value, x->default_sux()); do_SwitchRanges(create_lookup_ranges(x), value, x->default_sux());
...@@ -2186,7 +2192,7 @@ void LIRGenerator::do_Goto(Goto* x) { ...@@ -2186,7 +2192,7 @@ void LIRGenerator::do_Goto(Goto* x) {
// increment backedge counter if needed // increment backedge counter if needed
CodeEmitInfo* info = state_for(x, state); CodeEmitInfo* info = state_for(x, state);
increment_backedge_counter(info, info->bci()); increment_backedge_counter(info, info->stack()->bci());
CodeEmitInfo* safepoint_info = state_for(x, state); CodeEmitInfo* safepoint_info = state_for(x, state);
__ safepoint(safepoint_poll_register(), safepoint_info); __ safepoint(safepoint_poll_register(), safepoint_info);
} }
...@@ -2293,7 +2299,7 @@ void LIRGenerator::do_Base(Base* x) { ...@@ -2293,7 +2299,7 @@ void LIRGenerator::do_Base(Base* x) {
LIR_Opr lock = new_register(T_INT); LIR_Opr lock = new_register(T_INT);
__ load_stack_address_monitor(0, lock); __ load_stack_address_monitor(0, lock);
CodeEmitInfo* info = new CodeEmitInfo(SynchronizationEntryBCI, scope()->start()->state(), NULL); CodeEmitInfo* info = new CodeEmitInfo(scope()->start()->state()->copy(ValueStack::StateBefore, SynchronizationEntryBCI), NULL);
CodeStub* slow_path = new MonitorEnterStub(obj, lock, info); CodeStub* slow_path = new MonitorEnterStub(obj, lock, info);
// receiver is guaranteed non-NULL so don't need CodeEmitInfo // receiver is guaranteed non-NULL so don't need CodeEmitInfo
...@@ -2303,7 +2309,7 @@ void LIRGenerator::do_Base(Base* x) { ...@@ -2303,7 +2309,7 @@ void LIRGenerator::do_Base(Base* x) {
// increment invocation counters if needed // increment invocation counters if needed
if (!method()->is_accessor()) { // Accessors do not have MDOs, so no counting. if (!method()->is_accessor()) { // Accessors do not have MDOs, so no counting.
CodeEmitInfo* info = new CodeEmitInfo(InvocationEntryBci, scope()->start()->state(), NULL); CodeEmitInfo* info = new CodeEmitInfo(scope()->start()->state(), NULL);
increment_invocation_counter(info); increment_invocation_counter(info);
} }
...@@ -2463,7 +2469,7 @@ void LIRGenerator::do_Invoke(Invoke* x) { ...@@ -2463,7 +2469,7 @@ void LIRGenerator::do_Invoke(Invoke* x) {
break; break;
case Bytecodes::_invokedynamic: { case Bytecodes::_invokedynamic: {
ciBytecodeStream bcs(x->scope()->method()); ciBytecodeStream bcs(x->scope()->method());
bcs.force_bci(x->bci()); bcs.force_bci(x->state()->bci());
assert(bcs.cur_bc() == Bytecodes::_invokedynamic, "wrong stream"); assert(bcs.cur_bc() == Bytecodes::_invokedynamic, "wrong stream");
ciCPCache* cpcache = bcs.get_cpcache(); ciCPCache* cpcache = bcs.get_cpcache();
......
...@@ -2274,8 +2274,8 @@ void assert_equal(IRScopeDebugInfo* d1, IRScopeDebugInfo* d2) { ...@@ -2274,8 +2274,8 @@ void assert_equal(IRScopeDebugInfo* d1, IRScopeDebugInfo* d2) {
} }
void check_stack_depth(CodeEmitInfo* info, int stack_end) { void check_stack_depth(CodeEmitInfo* info, int stack_end) {
if (info->bci() != SynchronizationEntryBCI && !info->scope()->method()->is_native()) { if (info->stack()->bci() != SynchronizationEntryBCI && !info->scope()->method()->is_native()) {
Bytecodes::Code code = info->scope()->method()->java_code_at_bci(info->bci()); Bytecodes::Code code = info->scope()->method()->java_code_at_bci(info->stack()->bci());
switch (code) { switch (code) {
case Bytecodes::_ifnull : // fall through case Bytecodes::_ifnull : // fall through
case Bytecodes::_ifnonnull : // fall through case Bytecodes::_ifnonnull : // fall through
...@@ -2379,7 +2379,7 @@ OopMap* LinearScan::compute_oop_map(IntervalWalker* iw, LIR_Op* op, CodeEmitInfo ...@@ -2379,7 +2379,7 @@ OopMap* LinearScan::compute_oop_map(IntervalWalker* iw, LIR_Op* op, CodeEmitInfo
// add oops from lock stack // add oops from lock stack
assert(info->stack() != NULL, "CodeEmitInfo must always have a stack"); assert(info->stack() != NULL, "CodeEmitInfo must always have a stack");
int locks_count = info->stack()->locks_size(); int locks_count = info->stack()->total_locks_size();
for (int i = 0; i < locks_count; i++) { for (int i = 0; i < locks_count; i++) {
map->set_oop(frame_map()->monitor_object_regname(i)); map->set_oop(frame_map()->monitor_object_regname(i));
} }
...@@ -2762,19 +2762,13 @@ int LinearScan::append_scope_value(int op_id, Value value, GrowableArray<ScopeVa ...@@ -2762,19 +2762,13 @@ int LinearScan::append_scope_value(int op_id, Value value, GrowableArray<ScopeVa
} }
IRScopeDebugInfo* LinearScan::compute_debug_info_for_scope(int op_id, IRScope* cur_scope, ValueStack* cur_state, ValueStack* innermost_state, int cur_bci, int stack_end, int locks_end) { IRScopeDebugInfo* LinearScan::compute_debug_info_for_scope(int op_id, IRScope* cur_scope, ValueStack* cur_state, ValueStack* innermost_state) {
IRScopeDebugInfo* caller_debug_info = NULL; IRScopeDebugInfo* caller_debug_info = NULL;
int stack_begin, locks_begin;
ValueStack* caller_state = cur_scope->caller_state(); ValueStack* caller_state = cur_state->caller_state();
if (caller_state != NULL) { if (caller_state != NULL) {
// process recursively to compute outermost scope first // process recursively to compute outermost scope first
stack_begin = caller_state->stack_size(); caller_debug_info = compute_debug_info_for_scope(op_id, cur_scope->caller(), caller_state, innermost_state);
locks_begin = caller_state->locks_size();
caller_debug_info = compute_debug_info_for_scope(op_id, cur_scope->caller(), caller_state, innermost_state, cur_scope->caller_bci(), stack_begin, locks_begin);
} else {
stack_begin = 0;
locks_begin = 0;
} }
// initialize these to null. // initialize these to null.
...@@ -2785,7 +2779,7 @@ IRScopeDebugInfo* LinearScan::compute_debug_info_for_scope(int op_id, IRScope* c ...@@ -2785,7 +2779,7 @@ IRScopeDebugInfo* LinearScan::compute_debug_info_for_scope(int op_id, IRScope* c
GrowableArray<MonitorValue*>* monitors = NULL; GrowableArray<MonitorValue*>* monitors = NULL;
// describe local variable values // describe local variable values
int nof_locals = cur_scope->method()->max_locals(); int nof_locals = cur_state->locals_size();
if (nof_locals > 0) { if (nof_locals > 0) {
locals = new GrowableArray<ScopeValue*>(nof_locals); locals = new GrowableArray<ScopeValue*>(nof_locals);
...@@ -2800,45 +2794,41 @@ IRScopeDebugInfo* LinearScan::compute_debug_info_for_scope(int op_id, IRScope* c ...@@ -2800,45 +2794,41 @@ IRScopeDebugInfo* LinearScan::compute_debug_info_for_scope(int op_id, IRScope* c
} }
assert(locals->length() == cur_scope->method()->max_locals(), "wrong number of locals"); assert(locals->length() == cur_scope->method()->max_locals(), "wrong number of locals");
assert(locals->length() == cur_state->locals_size(), "wrong number of locals"); assert(locals->length() == cur_state->locals_size(), "wrong number of locals");
} else if (cur_scope->method()->max_locals() > 0) {
assert(cur_state->kind() == ValueStack::EmptyExceptionState, "should be");
nof_locals = cur_scope->method()->max_locals();
locals = new GrowableArray<ScopeValue*>(nof_locals);
for(int i = 0; i < nof_locals; i++) {
locals->append(&_illegal_value);
}
} }
// describe expression stack // describe expression stack
// int nof_stack = cur_state->stack_size();
// When we inline methods containing exception handlers, the
// "lock_stacks" are changed to preserve expression stack values
// in caller scopes when exception handlers are present. This
// can cause callee stacks to be smaller than caller stacks.
if (stack_end > innermost_state->stack_size()) {
stack_end = innermost_state->stack_size();
}
int nof_stack = stack_end - stack_begin;
if (nof_stack > 0) { if (nof_stack > 0) {
expressions = new GrowableArray<ScopeValue*>(nof_stack); expressions = new GrowableArray<ScopeValue*>(nof_stack);
int pos = stack_begin; int pos = 0;
while (pos < stack_end) { while (pos < nof_stack) {
Value expression = innermost_state->stack_at_inc(pos); Value expression = cur_state->stack_at_inc(pos);
append_scope_value(op_id, expression, expressions); append_scope_value(op_id, expression, expressions);
assert(expressions->length() + stack_begin == pos, "must match"); assert(expressions->length() == pos, "must match");
} }
assert(expressions->length() == cur_state->stack_size(), "wrong number of stack entries");
} }
// describe monitors // describe monitors
assert(locks_begin <= locks_end, "error in scope iteration"); int nof_locks = cur_state->locks_size();
int nof_locks = locks_end - locks_begin;
if (nof_locks > 0) { if (nof_locks > 0) {
int lock_offset = cur_state->caller_state() != NULL ? cur_state->caller_state()->total_locks_size() : 0;
monitors = new GrowableArray<MonitorValue*>(nof_locks); monitors = new GrowableArray<MonitorValue*>(nof_locks);
for (int i = locks_begin; i < locks_end; i++) { for (int i = 0; i < nof_locks; i++) {
monitors->append(location_for_monitor_index(i)); monitors->append(location_for_monitor_index(lock_offset + i));
} }
} }
return new IRScopeDebugInfo(cur_scope, cur_bci, locals, expressions, monitors, caller_debug_info); return new IRScopeDebugInfo(cur_scope, cur_state->bci(), locals, expressions, monitors, caller_debug_info);
} }
...@@ -2850,17 +2840,14 @@ void LinearScan::compute_debug_info(CodeEmitInfo* info, int op_id) { ...@@ -2850,17 +2840,14 @@ void LinearScan::compute_debug_info(CodeEmitInfo* info, int op_id) {
assert(innermost_scope != NULL && innermost_state != NULL, "why is it missing?"); assert(innermost_scope != NULL && innermost_state != NULL, "why is it missing?");
int stack_end = innermost_state->stack_size(); DEBUG_ONLY(check_stack_depth(info, innermost_state->stack_size()));
int locks_end = innermost_state->locks_size();
DEBUG_ONLY(check_stack_depth(info, stack_end));
if (info->_scope_debug_info == NULL) { if (info->_scope_debug_info == NULL) {
// compute debug information // compute debug information
info->_scope_debug_info = compute_debug_info_for_scope(op_id, innermost_scope, innermost_state, innermost_state, info->bci(), stack_end, locks_end); info->_scope_debug_info = compute_debug_info_for_scope(op_id, innermost_scope, innermost_state, innermost_state);
} else { } else {
// debug information already set. Check that it is correct from the current point of view // debug information already set. Check that it is correct from the current point of view
DEBUG_ONLY(assert_equal(info->_scope_debug_info, compute_debug_info_for_scope(op_id, innermost_scope, innermost_state, innermost_state, info->bci(), stack_end, locks_end))); DEBUG_ONLY(assert_equal(info->_scope_debug_info, compute_debug_info_for_scope(op_id, innermost_scope, innermost_state, innermost_state)));
} }
} }
......
...@@ -346,7 +346,7 @@ class LinearScan : public CompilationResourceObj { ...@@ -346,7 +346,7 @@ class LinearScan : public CompilationResourceObj {
int append_scope_value_for_operand(LIR_Opr opr, GrowableArray<ScopeValue*>* scope_values); int append_scope_value_for_operand(LIR_Opr opr, GrowableArray<ScopeValue*>* scope_values);
int append_scope_value(int op_id, Value value, GrowableArray<ScopeValue*>* scope_values); int append_scope_value(int op_id, Value value, GrowableArray<ScopeValue*>* scope_values);
IRScopeDebugInfo* compute_debug_info_for_scope(int op_id, IRScope* cur_scope, ValueStack* cur_state, ValueStack* innermost_state, int cur_bci, int stack_end, int locks_end); IRScopeDebugInfo* compute_debug_info_for_scope(int op_id, IRScope* cur_scope, ValueStack* cur_state, ValueStack* innermost_state);
void compute_debug_info(CodeEmitInfo* info, int op_id); void compute_debug_info(CodeEmitInfo* info, int op_id);
void assign_reg_num(LIR_OpList* instructions, IntervalWalker* iw); void assign_reg_num(LIR_OpList* instructions, IntervalWalker* iw);
......
...@@ -140,25 +140,27 @@ class CE_Eliminator: public BlockClosure { ...@@ -140,25 +140,27 @@ class CE_Eliminator: public BlockClosure {
// with an IfOp followed by a Goto // with an IfOp followed by a Goto
// cut if_ away and get node before // cut if_ away and get node before
Instruction* cur_end = if_->prev(block); Instruction* cur_end = if_->prev(block);
int bci = if_->bci();
// append constants of true- and false-block if necessary // append constants of true- and false-block if necessary
// clone constants because original block must not be destroyed // clone constants because original block must not be destroyed
assert((t_value != f_const && f_value != t_const) || t_const == f_const, "mismatch"); assert((t_value != f_const && f_value != t_const) || t_const == f_const, "mismatch");
if (t_value == t_const) { if (t_value == t_const) {
t_value = new Constant(t_const->type()); t_value = new Constant(t_const->type());
cur_end = cur_end->set_next(t_value, bci); NOT_PRODUCT(t_value->set_printable_bci(if_->printable_bci()));
cur_end = cur_end->set_next(t_value);
} }
if (f_value == f_const) { if (f_value == f_const) {
f_value = new Constant(f_const->type()); f_value = new Constant(f_const->type());
cur_end = cur_end->set_next(f_value, bci); NOT_PRODUCT(f_value->set_printable_bci(if_->printable_bci()));
cur_end = cur_end->set_next(f_value);
} }
// it is very unlikely that the condition can be statically decided // it is very unlikely that the condition can be statically decided
// (this was checked previously by the Canonicalizer), so always // (this was checked previously by the Canonicalizer), so always
// append IfOp // append IfOp
Value result = new IfOp(if_->x(), if_->cond(), if_->y(), t_value, f_value); Value result = new IfOp(if_->x(), if_->cond(), if_->y(), t_value, f_value);
cur_end = cur_end->set_next(result, bci); NOT_PRODUCT(result->set_printable_bci(if_->printable_bci()));
cur_end = cur_end->set_next(result);
// append Goto to successor // append Goto to successor
ValueStack* state_before = if_->is_safepoint() ? if_->state_before() : NULL; ValueStack* state_before = if_->is_safepoint() ? if_->state_before() : NULL;
...@@ -167,16 +169,15 @@ class CE_Eliminator: public BlockClosure { ...@@ -167,16 +169,15 @@ class CE_Eliminator: public BlockClosure {
// prepare state for Goto // prepare state for Goto
ValueStack* goto_state = if_->state(); ValueStack* goto_state = if_->state();
while (sux_state->scope() != goto_state->scope()) { while (sux_state->scope() != goto_state->scope()) {
goto_state = goto_state->pop_scope(); goto_state = goto_state->caller_state();
assert(goto_state != NULL, "states do not match up"); assert(goto_state != NULL, "states do not match up");
} }
goto_state = goto_state->copy(); goto_state = goto_state->copy(ValueStack::StateAfter, goto_state->bci());
goto_state->push(result->type(), result); goto_state->push(result->type(), result);
assert(goto_state->is_same_across_scopes(sux_state), "states must match now"); assert(goto_state->is_same(sux_state), "states must match now");
goto_->set_state(goto_state); goto_->set_state(goto_state);
// Steal the bci for the goto from the sux cur_end = cur_end->set_next(goto_, goto_state->bci());
cur_end = cur_end->set_next(goto_, sux->bci());
// Adjust control flow graph // Adjust control flow graph
BlockBegin::disconnect_edge(block, t_block); BlockBegin::disconnect_edge(block, t_block);
...@@ -251,10 +252,8 @@ class BlockMerger: public BlockClosure { ...@@ -251,10 +252,8 @@ class BlockMerger: public BlockClosure {
// no phi functions must be present at beginning of sux // no phi functions must be present at beginning of sux
ValueStack* sux_state = sux->state(); ValueStack* sux_state = sux->state();
ValueStack* end_state = end->state(); ValueStack* end_state = end->state();
while (end_state->scope() != sux_state->scope()) {
// match up inlining level assert(end_state->scope() == sux_state->scope(), "scopes must match");
end_state = end_state->pop_scope();
}
assert(end_state->stack_size() == sux_state->stack_size(), "stack not equal"); assert(end_state->stack_size() == sux_state->stack_size(), "stack not equal");
assert(end_state->locals_size() == sux_state->locals_size(), "locals not equal"); assert(end_state->locals_size() == sux_state->locals_size(), "locals not equal");
...@@ -273,7 +272,7 @@ class BlockMerger: public BlockClosure { ...@@ -273,7 +272,7 @@ class BlockMerger: public BlockClosure {
Instruction* prev = end->prev(block); Instruction* prev = end->prev(block);
Instruction* next = sux->next(); Instruction* next = sux->next();
assert(prev->as_BlockEnd() == NULL, "must not be a BlockEnd"); assert(prev->as_BlockEnd() == NULL, "must not be a BlockEnd");
prev->set_next(next, next->bci()); prev->set_next(next);
sux->disconnect_from_graph(); sux->disconnect_from_graph();
block->set_end(sux->end()); block->set_end(sux->end());
// add exception handlers of deleted block, if any // add exception handlers of deleted block, if any
...@@ -337,7 +336,8 @@ class BlockMerger: public BlockClosure { ...@@ -337,7 +336,8 @@ class BlockMerger: public BlockClosure {
newif->set_state(if_->state()->copy()); newif->set_state(if_->state()->copy());
assert(prev->next() == if_, "must be guaranteed by above search"); assert(prev->next() == if_, "must be guaranteed by above search");
prev->set_next(newif, if_->bci()); NOT_PRODUCT(newif->set_printable_bci(if_->printable_bci()));
prev->set_next(newif);
block->set_end(newif); block->set_end(newif);
_merge_count++; _merge_count++;
...@@ -705,7 +705,7 @@ void NullCheckEliminator::iterate_one(BlockBegin* block) { ...@@ -705,7 +705,7 @@ void NullCheckEliminator::iterate_one(BlockBegin* block) {
// visiting instructions which are references in other blocks or // visiting instructions which are references in other blocks or
// visiting instructions more than once. // visiting instructions more than once.
mark_visitable(instr); mark_visitable(instr);
if (instr->is_root() || instr->can_trap() || (instr->as_NullCheck() != NULL)) { if (instr->is_pinned() || instr->can_trap() || (instr->as_NullCheck() != NULL)) {
mark_visited(instr); mark_visited(instr);
instr->input_values_do(this); instr->input_values_do(this);
instr->visit(&_visitor); instr->visit(&_visitor);
......
...@@ -28,55 +28,60 @@ ...@@ -28,55 +28,60 @@
// Implementation of ValueStack // Implementation of ValueStack
ValueStack::ValueStack(IRScope* scope, int locals_size, int max_stack_size) ValueStack::ValueStack(IRScope* scope, ValueStack* caller_state)
: _scope(scope) : _scope(scope)
, _locals(locals_size, NULL) , _caller_state(caller_state)
, _stack(max_stack_size) , _bci(-99)
, _lock_stack(false) , _kind(Parsing)
, _locks(1) , _locals(scope->method()->max_locals(), NULL)
, _stack(scope->method()->max_stack())
, _locks()
{ {
assert(scope != NULL, "scope must exist"); verify();
}
ValueStack* ValueStack::copy() {
ValueStack* s = new ValueStack(scope(), locals_size(), max_stack_size());
s->_stack.appendAll(&_stack);
s->_locks.appendAll(&_locks);
s->replace_locals(this);
return s;
} }
ValueStack* ValueStack::copy_locks() { ValueStack::ValueStack(ValueStack* copy_from, Kind kind, int bci)
int sz = scope()->lock_stack_size(); : _scope(copy_from->scope())
if (stack_size() == 0) { , _caller_state(copy_from->caller_state())
sz = 0; , _bci(bci)
, _kind(kind)
, _locals()
, _stack()
, _locks(copy_from->locks_size())
{
assert(kind != EmptyExceptionState || !Compilation::current()->env()->jvmti_can_access_local_variables(), "need locals");
if (kind != EmptyExceptionState) {
// only allocate space if we need to copy the locals-array
_locals = Values(copy_from->locals_size());
_locals.appendAll(&copy_from->_locals);
} }
ValueStack* s = new ValueStack(scope(), locals_size(), sz);
s->_lock_stack = true; if (kind != ExceptionState && kind != EmptyExceptionState) {
s->_locks.appendAll(&_locks); if (kind == Parsing) {
s->replace_locals(this); // stack will be modified, so reserve enough space to avoid resizing
if (sz > 0) { _stack = Values(scope()->method()->max_stack());
assert(sz <= stack_size(), "lock stack underflow"); } else {
for (int i = 0; i < sz; i++) { // stack will not be modified, so do not waste space
s->_stack.append(_stack[i]); _stack = Values(copy_from->stack_size());
} }
_stack.appendAll(&copy_from->_stack);
} }
return s;
_locks.appendAll(&copy_from->_locks);
verify();
} }
bool ValueStack::is_same(ValueStack* s) { bool ValueStack::is_same(ValueStack* s) {
assert(s != NULL, "state must exist"); if (scope() != s->scope()) return false;
assert(scope () == s->scope (), "scopes must correspond"); if (caller_state() != s->caller_state()) return false;
assert(locals_size() == s->locals_size(), "locals sizes must correspond");
return is_same_across_scopes(s);
}
if (locals_size() != s->locals_size()) return false;
if (stack_size() != s->stack_size()) return false;
if (locks_size() != s->locks_size()) return false;
bool ValueStack::is_same_across_scopes(ValueStack* s) {
assert(s != NULL, "state must exist");
assert(stack_size () == s->stack_size (), "stack sizes must correspond");
assert(locks_size () == s->locks_size (), "locks sizes must correspond");
// compare each stack element with the corresponding stack element of s // compare each stack element with the corresponding stack element of s
int index; int index;
Value value; Value value;
...@@ -89,12 +94,6 @@ bool ValueStack::is_same_across_scopes(ValueStack* s) { ...@@ -89,12 +94,6 @@ bool ValueStack::is_same_across_scopes(ValueStack* s) {
return true; return true;
} }
ValueStack* ValueStack::caller_state() const {
return scope()->caller_state();
}
void ValueStack::clear_locals() { void ValueStack::clear_locals() {
for (int i = _locals.length() - 1; i >= 0; i--) { for (int i = _locals.length() - 1; i >= 0; i--) {
_locals.at_put(i, NULL); _locals.at_put(i, NULL);
...@@ -102,13 +101,6 @@ void ValueStack::clear_locals() { ...@@ -102,13 +101,6 @@ void ValueStack::clear_locals() {
} }
void ValueStack::replace_locals(ValueStack* with) {
assert(locals_size() == with->locals_size(), "number of locals must match");
for (int i = locals_size() - 1; i >= 0; i--) {
_locals.at_put(i, with->_locals.at(i));
}
}
void ValueStack::pin_stack_for_linear_scan() { void ValueStack::pin_stack_for_linear_scan() {
for_each_state_value(this, v, for_each_state_value(this, v,
if (v->as_Constant() == NULL && v->as_Local() == NULL) { if (v->as_Constant() == NULL && v->as_Local() == NULL) {
...@@ -123,33 +115,25 @@ void ValueStack::apply(Values list, ValueVisitor* f) { ...@@ -123,33 +115,25 @@ void ValueStack::apply(Values list, ValueVisitor* f) {
for (int i = 0; i < list.length(); i++) { for (int i = 0; i < list.length(); i++) {
Value* va = list.adr_at(i); Value* va = list.adr_at(i);
Value v0 = *va; Value v0 = *va;
if (v0 != NULL) { if (v0 != NULL && !v0->type()->is_illegal()) {
if (!v0->type()->is_illegal()) { f->visit(va);
assert(v0->as_HiWord() == NULL, "should never see HiWord during traversal");
f->visit(va);
#ifdef ASSERT #ifdef ASSERT
Value v1 = *va; Value v1 = *va;
if (v0 != v1) { assert(v1->type()->is_illegal() || v0->type()->tag() == v1->type()->tag(), "types must match");
assert(v1->type()->is_illegal() || v0->type()->tag() == v1->type()->tag(), "types must match"); assert(!v1->type()->is_double_word() || list.at(i + 1) == NULL, "hi-word of doubleword value must be NULL");
if (v0->type()->is_double_word()) {
list.at_put(i + 1, v0->hi_word());
}
}
#endif #endif
if (v0->type()->is_double_word()) i++; if (v0->type()->is_double_word()) i++;
}
} }
} }
} }
void ValueStack::values_do(ValueVisitor* f) { void ValueStack::values_do(ValueVisitor* f) {
apply(_stack, f);
apply(_locks, f);
ValueStack* state = this; ValueStack* state = this;
for_each_state(state) { for_each_state(state) {
apply(state->_locals, f); apply(state->_locals, f);
apply(state->_stack, f);
apply(state->_locks, f);
} }
} }
...@@ -164,52 +148,26 @@ Values* ValueStack::pop_arguments(int argument_size) { ...@@ -164,52 +148,26 @@ Values* ValueStack::pop_arguments(int argument_size) {
} }
int ValueStack::lock(IRScope* scope, Value obj) { int ValueStack::total_locks_size() const {
int num_locks = 0;
const ValueStack* state = this;
for_each_state(state) {
num_locks += state->locks_size();
}
return num_locks;
}
int ValueStack::lock(Value obj) {
_locks.push(obj); _locks.push(obj);
scope->set_min_number_of_locks(locks_size()); int num_locks = total_locks_size();
return locks_size() - 1; scope()->set_min_number_of_locks(num_locks);
return num_locks - 1;
} }
int ValueStack::unlock() { int ValueStack::unlock() {
_locks.pop(); _locks.pop();
return locks_size(); return total_locks_size();
}
ValueStack* ValueStack::push_scope(IRScope* scope) {
assert(scope->caller() == _scope, "scopes must have caller/callee relationship");
ValueStack* res = new ValueStack(scope,
scope->method()->max_locals(),
max_stack_size() + scope->method()->max_stack());
// Preserves stack and monitors.
res->_stack.appendAll(&_stack);
res->_locks.appendAll(&_locks);
assert(res->_stack.size() <= res->max_stack_size(), "stack overflow");
return res;
}
ValueStack* ValueStack::pop_scope() {
assert(_scope->caller() != NULL, "scope must have caller");
IRScope* scope = _scope->caller();
int max_stack = max_stack_size() - _scope->method()->max_stack();
assert(max_stack >= 0, "stack underflow");
ValueStack* res = new ValueStack(scope,
scope->method()->max_locals(),
max_stack);
// Preserves stack and monitors. Restores local and store state from caller scope.
res->_stack.appendAll(&_stack);
res->_locks.appendAll(&_locks);
ValueStack* caller = caller_state();
if (caller != NULL) {
for (int i = 0; i < caller->_locals.length(); i++) {
res->_locals.at_put(i, caller->_locals.at(i));
}
assert(res->_locals.length() == res->scope()->method()->max_locals(), "just checking");
}
assert(res->_stack.size() <= res->max_stack_size(), "stack overflow");
return res;
} }
...@@ -220,11 +178,7 @@ void ValueStack::setup_phi_for_stack(BlockBegin* b, int index) { ...@@ -220,11 +178,7 @@ void ValueStack::setup_phi_for_stack(BlockBegin* b, int index) {
Value phi = new Phi(t, b, -index - 1); Value phi = new Phi(t, b, -index - 1);
_stack[index] = phi; _stack[index] = phi;
#ifdef ASSERT assert(!t->is_double_word() || _stack.at(index + 1) == NULL, "hi-word of doubleword value must be NULL");
if (t->is_double_word()) {
_stack[index + 1] = phi->hi_word();
}
#endif
} }
void ValueStack::setup_phi_for_local(BlockBegin* b, int index) { void ValueStack::setup_phi_for_local(BlockBegin* b, int index) {
...@@ -236,7 +190,9 @@ void ValueStack::setup_phi_for_local(BlockBegin* b, int index) { ...@@ -236,7 +190,9 @@ void ValueStack::setup_phi_for_local(BlockBegin* b, int index) {
} }
#ifndef PRODUCT #ifndef PRODUCT
void ValueStack::print() { void ValueStack::print() {
scope()->method()->print_name();
if (stack_is_empty()) { if (stack_is_empty()) {
tty->print_cr("empty stack"); tty->print_cr("empty stack");
} else { } else {
...@@ -244,18 +200,20 @@ void ValueStack::print() { ...@@ -244,18 +200,20 @@ void ValueStack::print() {
for (int i = 0; i < stack_size();) { for (int i = 0; i < stack_size();) {
Value t = stack_at_inc(i); Value t = stack_at_inc(i);
tty->print("%2d ", i); tty->print("%2d ", i);
tty->print("%c%d ", t->type()->tchar(), t->id());
ip.print_instr(t); ip.print_instr(t);
tty->cr(); tty->cr();
} }
} }
if (!no_active_locks()) { if (!no_active_locks()) {
InstructionPrinter ip; InstructionPrinter ip;
for (int i = 0; i < locks_size(); i--) { for (int i = 0; i < locks_size(); i++) {
Value t = lock_at(i); Value t = lock_at(i);
tty->print("lock %2d ", i); tty->print("lock %2d ", i);
if (t == NULL) { if (t == NULL) {
tty->print("this"); tty->print("this");
} else { } else {
tty->print("%c%d ", t->type()->tchar(), t->id());
ip.print_instr(t); ip.print_instr(t);
} }
tty->cr(); tty->cr();
...@@ -270,16 +228,55 @@ void ValueStack::print() { ...@@ -270,16 +228,55 @@ void ValueStack::print() {
tty->print("null"); tty->print("null");
i ++; i ++;
} else { } else {
tty->print("%c%d ", l->type()->tchar(), l->id());
ip.print_instr(l); ip.print_instr(l);
if (l->type()->is_illegal() || l->type()->is_single_word()) i ++; else i += 2; if (l->type()->is_illegal() || l->type()->is_single_word()) i ++; else i += 2;
} }
tty->cr(); tty->cr();
} }
} }
if (caller_state() != NULL) {
caller_state()->print();
}
} }
void ValueStack::verify() { void ValueStack::verify() {
Unimplemented(); assert(scope() != NULL, "scope must exist");
if (caller_state() != NULL) {
assert(caller_state()->scope() == scope()->caller(), "invalid caller scope");
caller_state()->verify();
}
if (kind() == Parsing) {
assert(bci() == -99, "bci not defined during parsing");
} else {
assert(bci() >= -1, "bci out of range");
assert(bci() < scope()->method()->code_size(), "bci out of range");
assert(bci() == SynchronizationEntryBCI || Bytecodes::is_defined(scope()->method()->java_code_at_bci(bci())), "make sure bci points at a real bytecode");
assert(scope()->method()->liveness_at_bci(bci()).is_valid(), "liveness at bci must be valid");
}
int i;
for (i = 0; i < stack_size(); i++) {
Value v = _stack.at(i);
if (v == NULL) {
assert(_stack.at(i - 1)->type()->is_double_word(), "only hi-words are NULL on stack");
} else if (v->type()->is_double_word()) {
assert(_stack.at(i + 1) == NULL, "hi-word must be NULL");
}
}
for (i = 0; i < locals_size(); i++) {
Value v = _locals.at(i);
if (v != NULL && v->type()->is_double_word()) {
assert(_locals.at(i + 1) == NULL, "hi-word must be NULL");
}
}
for_each_state_value(this, v,
assert(v != NULL, "just test if state-iteration succeeds");
);
} }
#endif // PRODUCT #endif // PRODUCT
...@@ -23,9 +23,23 @@ ...@@ -23,9 +23,23 @@
*/ */
class ValueStack: public CompilationResourceObj { class ValueStack: public CompilationResourceObj {
public:
enum Kind {
Parsing, // During abstract interpretation in GraphBuilder
CallerState, // Caller state when inlining
StateBefore, // Before before execution of instruction
StateAfter, // After execution of instruction
ExceptionState, // Exception handling of instruction
EmptyExceptionState, // Exception handling of instructions not covered by an xhandler
BlockBeginState // State of BlockBegin instruction with phi functions of this block
};
private: private:
IRScope* _scope; // the enclosing scope IRScope* _scope; // the enclosing scope
bool _lock_stack; // indicates that this ValueStack is for an exception site ValueStack* _caller_state;
int _bci;
Kind _kind;
Values _locals; // the locals Values _locals; // the locals
Values _stack; // the expression stack Values _stack; // the expression stack
Values _locks; // the monitor stack (holding the locked values) Values _locks; // the monitor stack (holding the locked values)
...@@ -36,100 +50,79 @@ class ValueStack: public CompilationResourceObj { ...@@ -36,100 +50,79 @@ class ValueStack: public CompilationResourceObj {
} }
Value check(ValueTag tag, Value t, Value h) { Value check(ValueTag tag, Value t, Value h) {
assert(h->as_HiWord()->lo_word() == t, "incorrect stack pair"); assert(h == NULL, "hi-word of doubleword value must be NULL");
return check(tag, t); return check(tag, t);
} }
// helper routine // helper routine
static void apply(Values list, ValueVisitor* f); static void apply(Values list, ValueVisitor* f);
// for simplified copying
ValueStack(ValueStack* copy_from, Kind kind, int bci);
public: public:
// creation // creation
ValueStack(IRScope* scope, int locals_size, int max_stack_size); ValueStack(IRScope* scope, ValueStack* caller_state);
// merging ValueStack* copy() { return new ValueStack(this, _kind, _bci); }
ValueStack* copy(); // returns a copy of this w/ cleared locals ValueStack* copy(Kind new_kind, int new_bci) { return new ValueStack(this, new_kind, new_bci); }
ValueStack* copy_locks(); // returns a copy of this w/ cleared locals and stack ValueStack* copy_for_parsing() { return new ValueStack(this, Parsing, -99); }
// Note that when inlining of methods with exception
// handlers is enabled, this stack may have a void set_caller_state(ValueStack* s) { assert(kind() == EmptyExceptionState, "only EmptyExceptionStates can be modified"); _caller_state = s; }
// non-empty expression stack (size defined by
// scope()->lock_stack_size())
bool is_same(ValueStack* s); // returns true if this & s's types match (w/o checking locals) bool is_same(ValueStack* s); // returns true if this & s's types match (w/o checking locals)
bool is_same_across_scopes(ValueStack* s); // same as is_same but returns true even if stacks are in different scopes (used for block merging w/inlining)
// accessors // accessors
IRScope* scope() const { return _scope; } IRScope* scope() const { return _scope; }
bool is_lock_stack() const { return _lock_stack; } ValueStack* caller_state() const { return _caller_state; }
int bci() const { return _bci; }
Kind kind() const { return _kind; }
int locals_size() const { return _locals.length(); } int locals_size() const { return _locals.length(); }
int stack_size() const { return _stack.length(); } int stack_size() const { return _stack.length(); }
int locks_size() const { return _locks.length(); } int locks_size() const { return _locks.length(); }
int max_stack_size() const { return _stack.capacity(); }
bool stack_is_empty() const { return _stack.is_empty(); } bool stack_is_empty() const { return _stack.is_empty(); }
bool no_active_locks() const { return _locks.is_empty(); } bool no_active_locks() const { return _locks.is_empty(); }
ValueStack* caller_state() const; int total_locks_size() const;
// locals access // locals access
void clear_locals(); // sets all locals to NULL; void clear_locals(); // sets all locals to NULL;
// Kill local i. Also kill local i+1 if i was a long or double.
void invalidate_local(int i) { void invalidate_local(int i) {
Value x = _locals.at(i); assert(_locals.at(i)->type()->is_single_word() ||
if (x != NULL && x->type()->is_double_word()) { _locals.at(i + 1) == NULL, "hi-word of doubleword value must be NULL");
assert(_locals.at(i + 1)->as_HiWord()->lo_word() == x, "locals inconsistent");
_locals.at_put(i + 1, NULL);
}
_locals.at_put(i, NULL); _locals.at_put(i, NULL);
} }
Value local_at(int i) const {
Value load_local(int i) const {
Value x = _locals.at(i); Value x = _locals.at(i);
if (x != NULL && x->type()->is_illegal()) return NULL; assert(x == NULL || x->type()->is_single_word() ||
assert(x == NULL || x->as_HiWord() == NULL, "index points to hi word"); _locals.at(i + 1) == NULL, "hi-word of doubleword value must be NULL");
assert(x == NULL || x->type()->is_illegal() || x->type()->is_single_word() || x == _locals.at(i+1)->as_HiWord()->lo_word(), "locals inconsistent");
return x; return x;
} }
Value local_at(int i) const { return _locals.at(i); }
// Store x into local i.
void store_local(int i, Value x) { void store_local(int i, Value x) {
// Kill the old value // When overwriting local i, check if i - 1 was the start of a
invalidate_local(i); // double word local and kill it.
_locals.at_put(i, x);
// Writing a double word can kill other locals
if (x != NULL && x->type()->is_double_word()) {
// If x + i was the start of a double word local then kill i + 2.
Value x2 = _locals.at(i + 1);
if (x2 != NULL && x2->type()->is_double_word()) {
_locals.at_put(i + 2, NULL);
}
// If x is a double word local, also update i + 1.
#ifdef ASSERT
_locals.at_put(i + 1, x->hi_word());
#else
_locals.at_put(i + 1, NULL);
#endif
}
// If x - 1 was the start of a double word local then kill i - 1.
if (i > 0) { if (i > 0) {
Value prev = _locals.at(i - 1); Value prev = _locals.at(i - 1);
if (prev != NULL && prev->type()->is_double_word()) { if (prev != NULL && prev->type()->is_double_word()) {
_locals.at_put(i - 1, NULL); _locals.at_put(i - 1, NULL);
} }
} }
}
void replace_locals(ValueStack* with); _locals.at_put(i, x);
if (x->type()->is_double_word()) {
// hi-word of doubleword value is always NULL
_locals.at_put(i + 1, NULL);
}
}
// stack access // stack access
Value stack_at(int i) const { Value stack_at(int i) const {
Value x = _stack.at(i); Value x = _stack.at(i);
assert(x->as_HiWord() == NULL, "index points to hi word");
assert(x->type()->is_single_word() || assert(x->type()->is_single_word() ||
x->subst() == _stack.at(i+1)->as_HiWord()->lo_word(), "stack inconsistent"); _stack.at(i + 1) == NULL, "hi-word of doubleword value must be NULL");
return x; return x;
} }
...@@ -146,7 +139,6 @@ class ValueStack: public CompilationResourceObj { ...@@ -146,7 +139,6 @@ class ValueStack: public CompilationResourceObj {
void values_do(ValueVisitor* f); void values_do(ValueVisitor* f);
// untyped manipulation (for dup_x1, etc.) // untyped manipulation (for dup_x1, etc.)
void clear_stack() { _stack.clear(); }
void truncate_stack(int size) { _stack.trunc_to(size); } void truncate_stack(int size) { _stack.trunc_to(size); }
void raw_push(Value t) { _stack.push(t); } void raw_push(Value t) { _stack.push(t); }
Value raw_pop() { return _stack.pop(); } Value raw_pop() { return _stack.pop(); }
...@@ -156,15 +148,8 @@ class ValueStack: public CompilationResourceObj { ...@@ -156,15 +148,8 @@ class ValueStack: public CompilationResourceObj {
void fpush(Value t) { _stack.push(check(floatTag , t)); } void fpush(Value t) { _stack.push(check(floatTag , t)); }
void apush(Value t) { _stack.push(check(objectTag , t)); } void apush(Value t) { _stack.push(check(objectTag , t)); }
void rpush(Value t) { _stack.push(check(addressTag, t)); } void rpush(Value t) { _stack.push(check(addressTag, t)); }
#ifdef ASSERT
// in debug mode, use HiWord for 2-word values
void lpush(Value t) { _stack.push(check(longTag , t)); _stack.push(new HiWord(t)); }
void dpush(Value t) { _stack.push(check(doubleTag , t)); _stack.push(new HiWord(t)); }
#else
// in optimized mode, use NULL for 2-word values
void lpush(Value t) { _stack.push(check(longTag , t)); _stack.push(NULL); } void lpush(Value t) { _stack.push(check(longTag , t)); _stack.push(NULL); }
void dpush(Value t) { _stack.push(check(doubleTag , t)); _stack.push(NULL); } void dpush(Value t) { _stack.push(check(doubleTag , t)); _stack.push(NULL); }
#endif // ASSERT
void push(ValueType* type, Value t) { void push(ValueType* type, Value t) {
switch (type->tag()) { switch (type->tag()) {
...@@ -182,15 +167,8 @@ class ValueStack: public CompilationResourceObj { ...@@ -182,15 +167,8 @@ class ValueStack: public CompilationResourceObj {
Value fpop() { return check(floatTag , _stack.pop()); } Value fpop() { return check(floatTag , _stack.pop()); }
Value apop() { return check(objectTag , _stack.pop()); } Value apop() { return check(objectTag , _stack.pop()); }
Value rpop() { return check(addressTag, _stack.pop()); } Value rpop() { return check(addressTag, _stack.pop()); }
#ifdef ASSERT
// in debug mode, check for HiWord consistency
Value lpop() { Value h = _stack.pop(); return check(longTag , _stack.pop(), h); } Value lpop() { Value h = _stack.pop(); return check(longTag , _stack.pop(), h); }
Value dpop() { Value h = _stack.pop(); return check(doubleTag, _stack.pop(), h); } Value dpop() { Value h = _stack.pop(); return check(doubleTag, _stack.pop(), h); }
#else
// in optimized mode, ignore HiWord since it is NULL
Value lpop() { _stack.pop(); return check(longTag , _stack.pop()); }
Value dpop() { _stack.pop(); return check(doubleTag, _stack.pop()); }
#endif // ASSERT
Value pop(ValueType* type) { Value pop(ValueType* type) {
switch (type->tag()) { switch (type->tag()) {
...@@ -208,16 +186,10 @@ class ValueStack: public CompilationResourceObj { ...@@ -208,16 +186,10 @@ class ValueStack: public CompilationResourceObj {
Values* pop_arguments(int argument_size); Values* pop_arguments(int argument_size);
// locks access // locks access
int lock (IRScope* scope, Value obj); int lock (Value obj);
int unlock(); int unlock();
Value lock_at(int i) const { return _locks.at(i); } Value lock_at(int i) const { return _locks.at(i); }
// Inlining support
ValueStack* push_scope(IRScope* scope); // "Push" new scope, returning new resulting stack
// Preserves stack and locks, destroys locals
ValueStack* pop_scope(); // "Pop" topmost scope, returning new resulting stack
// Preserves stack and locks, destroys locals
// SSA form IR support // SSA form IR support
void setup_phi_for_stack(BlockBegin* b, int index); void setup_phi_for_stack(BlockBegin* b, int index);
void setup_phi_for_local(BlockBegin* b, int index); void setup_phi_for_local(BlockBegin* b, int index);
...@@ -298,16 +270,18 @@ class ValueStack: public CompilationResourceObj { ...@@ -298,16 +270,18 @@ class ValueStack: public CompilationResourceObj {
{ \ { \
int cur_index; \ int cur_index; \
ValueStack* cur_state = v_state; \ ValueStack* cur_state = v_state; \
Value v_value; \ Value v_value; \
{ \
for_each_stack_value(cur_state, cur_index, v_value) { \
v_code; \
} \
} \
for_each_state(cur_state) { \ for_each_state(cur_state) { \
for_each_local_value(cur_state, cur_index, v_value) { \ { \
v_code; \ for_each_local_value(cur_state, cur_index, v_value) { \
v_code; \
} \
} \ } \
{ \
for_each_stack_value(cur_state, cur_index, v_value) { \
v_code; \
} \
} \
} \ } \
} }
......
...@@ -216,9 +216,6 @@ ...@@ -216,9 +216,6 @@
develop(bool, DeoptC1, true, \ develop(bool, DeoptC1, true, \
"Use deoptimization in C1") \ "Use deoptimization in C1") \
\ \
develop(bool, DeoptOnAsyncException, true, \
"Deoptimize upon Thread.stop(); improves precision of IR") \
\
develop(bool, PrintBailouts, false, \ develop(bool, PrintBailouts, false, \
"Print bailout and its reason") \ "Print bailout and its reason") \
\ \
......
...@@ -448,3 +448,7 @@ thread.cpp c1_Compiler.hpp ...@@ -448,3 +448,7 @@ thread.cpp c1_Compiler.hpp
top.hpp c1_globals.hpp top.hpp c1_globals.hpp
vmStructs.hpp c1_Runtime1.hpp vmStructs.hpp c1_Runtime1.hpp
c1_Canonicalizer.cpp c1_ValueStack.hpp
c1_LIR.cpp c1_ValueStack.hpp
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册