提交 96329ee4 编写于 作者: G goetz

8030863: PPC64: (part 220): ConstantTableBase for calls between args and jvms

Summary: Add ConstantTableBase node edge after parameters and before jvms. Adapt jvms offsets.
Reviewed-by: kvn
上级 632a957f
......@@ -3563,9 +3563,6 @@ encode %{
// postalloc expand emitter for virtual calls.
enc_class postalloc_expand_java_dynamic_call_sched(method meth, iRegLdst toc) %{
// Toc is in return address field, though not accessible via postalloc_expand
// functionaliy.
Node *toc = in(TypeFunc::ReturnAdr);
// Create the nodes for loading the IC from the TOC.
loadConLNodesTuple loadConLNodes_IC =
......@@ -3592,23 +3589,21 @@ encode %{
// New call needs all inputs of old call.
// Req...
for (uint i = 0; i < req(); ++i) {
if (i != TypeFunc::ReturnAdr) {
call->add_req(in(i));
// The expanded node does not need toc any more.
// Add the inline cache constant here instead. This expresses the
// register of the inline cache must be live at the call.
// Else we would have to adapt JVMState by -1.
if (i == mach_constant_base_node_input()) {
call->add_req(loadConLNodes_IC._last);
} else {
// The expanded node does not need toc any more.
call->add_req(C->top());
call->add_req(in(i));
}
}
// ...as well as prec
for (uint i = req(); i < len() ; ++i) {
for (uint i = req(); i < len(); ++i) {
call->add_prec(in(i));
}
// The cache must come before the call, but it's not a req edge.
// GL: actually it should be a req edge to express that the
// register must be live in the Call. But as R19 is declared to be
// the inline_cache_reg that's fine.
call->add_prec(loadConLNodes_IC._last);
// Remember nodes loading the inline cache into r19.
call->_load_ic_hi_node = loadConLNodes_IC._large_hi;
call->_load_ic_node = loadConLNodes_IC._small;
......@@ -3638,13 +3633,13 @@ encode %{
// Must be invalid_vtable_index, not nonvirtual_vtable_index.
assert(_vtable_index == Method::invalid_vtable_index, "correct sentinel value");
Register ic_reg = as_Register(Matcher::inline_cache_reg_encode());
AddressLiteral oop = __ allocate_metadata_address((Metadata *)Universe::non_oop_word());
AddressLiteral meta = __ allocate_metadata_address((Metadata *)Universe::non_oop_word());
address virtual_call_oop_addr = __ pc();
__ load_const_from_method_toc(ic_reg, oop, Rtoc);
address virtual_call_meta_addr = __ pc();
__ load_const_from_method_toc(ic_reg, meta, Rtoc);
// CALL to fixup routine. Fixup routine uses ScopeDesc info
// to determine who we intended to call.
__ relocate(virtual_call_Relocation::spec(virtual_call_oop_addr));
__ relocate(virtual_call_Relocation::spec(virtual_call_meta_addr));
emit_call_with_trampoline_stub(_masm, (address)$meth$$method, relocInfo::none);
assert(((MachCallDynamicJavaNode*)this)->ret_addr_offset() == __ offset() - start_offset,
"Fix constant in ret_addr_offset()");
......@@ -3674,6 +3669,7 @@ encode %{
"Fix constant in ret_addr_offset()");
}
#endif
guarantee(0, "Fix handling of toc edge: messes up derived/base pairs.");
Unimplemented(); // ret_addr_offset not yet fixed. Depends on compressed oops (load klass!).
%}
......@@ -3775,16 +3771,14 @@ encode %{
// New call needs all inputs of old call.
// Req...
for (uint i = 0; i < req(); ++i) {
if (i != TypeFunc::ReturnAdr) {
if (i != mach_constant_base_node_input()) {
call->add_req(in(i));
} else {
// put the mtctr where ReturnAdr would be
call->add_req(mtctr);
}
}
// These must be reqired edges, as the registers are live up to
// the call. Else the constants are handled as kills.
call->add_req(mtctr);
call->add_req(loadConLNodes_Env._last);
call->add_req(loadConLNodes_Toc._last);
......@@ -3818,7 +3812,7 @@ frame %{
// These two registers define part of the calling convention between
// compiled code and the interpreter.
// Inline Cache Register or methodOop for I2C.
// Inline Cache Register or method for I2C.
inline_cache_reg(R19); // R19_method
// Method Oop Register when calling interpreter.
......@@ -6149,8 +6143,8 @@ instruct loadConP_lo(iRegPdst dst, immP_NM src, iRegLdst base) %{
size(4);
ins_encode %{
// TODO: PPC port $archOpcode(ppc64Opcode_ld);
int offset = ra_->C->in_scratch_emit_size() ? 0 : MacroAssembler::largeoffset_si16_si16_lo(_const_toc_offset_hi_node->_const_toc_offset);
__ ld($dst$$Register, offset, $base$$Register);
int offset = ra_->C->in_scratch_emit_size() ? 0 : _const_toc_offset_hi_node->_const_toc_offset;
__ ld($dst$$Register, MacroAssembler::largeoffset_si16_si16_lo(offset), $base$$Register);
%}
ins_pipe(pipe_class_memory);
%}
......@@ -6784,7 +6778,7 @@ instruct cond_add_base(iRegPdst dst, flagsReg crx, iRegPsrc src1) %{
Label done;
__ beq($crx$$CondRegister, done);
__ add($dst$$Register, $src1$$Register, R30);
// TODO PPC port __ endgroup_if_needed(_size == 12);
// TODO PPC port __ endgroup_if_needed(_size == 12);
__ bind(done);
%}
ins_pipe(pipe_class_default);
......
......@@ -172,7 +172,8 @@ ArchDesc::ArchDesc()
_internalOps(cmpstr,hashstr, Form::arena),
_internalMatch(cmpstr,hashstr, Form::arena),
_chainRules(cmpstr,hashstr, Form::arena),
_cisc_spill_operand(NULL) {
_cisc_spill_operand(NULL),
_needs_clone_jvms(false) {
// Initialize the opcode to MatchList table with NULLs
for( int i=0; i<_last_opcode; ++i ) {
......
......@@ -121,6 +121,12 @@ private:
// to access [stack_pointer + offset]
OperandForm *_cisc_spill_operand;
// If a Call node uses $constanttablebase, it gets MachConstantBaseNode
// by the matcher and the matcher will modify the jvms. If so, jvm states
// always have to be cloned when a node is cloned. Adlc generates
// Compile::needs_clone_jvms() accordingly.
bool _needs_clone_jvms;
// Methods for outputting the DFA
void gen_match(FILE *fp, MatchList &mlist, ProductionState &status, Dict &operands_chained_from);
void chain_rule(FILE *fp, const char *indent, const char *ideal,
......@@ -289,6 +295,7 @@ public:
void addPreHeaderBlocks(FILE *fp_hpp);
void addHeaderBlocks(FILE *fp_hpp);
void addSourceBlocks(FILE *fp_cpp);
void generate_needs_clone_jvms(FILE *fp_cpp);
void generate_adlc_verification(FILE *fp_cpp);
// output declaration of class State
......
......@@ -306,6 +306,7 @@ int main(int argc, char *argv[])
AD.buildInstructMatchCheck(AD._CPP_file._fp); // .cpp
// define methods for machine dependent frame management
AD.buildFrameMethods(AD._CPP_file._fp); // .cpp
AD.generate_needs_clone_jvms(AD._CPP_file._fp);
// do this last:
AD.addPreprocessorChecks(AD._CPP_file._fp); // .cpp
......
......@@ -1842,17 +1842,23 @@ void ArchDesc::defineExpand(FILE *fp, InstructForm *node) {
// There are nodes that don't use $constantablebase, but still require that it
// is an input to the node. Example: divF_reg_immN, Repl32B_imm on x86_64.
if (node->is_mach_constant() || node->needs_constant_base()) {
fprintf(fp," add_req(C->mach_constant_base_node());\n");
if (node->is_ideal_call() != Form::invalid_type &&
node->is_ideal_call() != Form::JAVA_LEAF) {
fprintf(fp, " // MachConstantBaseNode added in matcher.\n");
_needs_clone_jvms = true;
} else {
fprintf(fp, " add_req(C->mach_constant_base_node());\n");
}
}
fprintf(fp,"\n");
if( node->expands() ) {
fprintf(fp," return result;\n");
fprintf(fp, "\n");
if (node->expands()) {
fprintf(fp, " return result;\n");
} else {
fprintf(fp," return this;\n");
fprintf(fp, " return this;\n");
}
fprintf(fp,"}\n");
fprintf(fp,"\n");
fprintf(fp, "}\n");
fprintf(fp, "\n");
}
......@@ -3642,6 +3648,11 @@ char reg_save_policy(const char *calling_convention) {
return callconv;
}
void ArchDesc::generate_needs_clone_jvms(FILE *fp_cpp) {
fprintf(fp_cpp, "bool Compile::needs_clone_jvms() { return %s; }\n\n",
_needs_clone_jvms ? "true" : "false");
}
//---------------------------generate_assertion_checks-------------------
void ArchDesc::generate_adlc_verification(FILE *fp_cpp) {
fprintf(fp_cpp, "\n");
......
......@@ -1665,7 +1665,15 @@ void ArchDesc::declareClasses(FILE *fp) {
if (instr->needs_constant_base() &&
!instr->is_mach_constant()) { // These inherit the funcion from MachConstantNode.
fprintf(fp," virtual uint mach_constant_base_node_input() const { return req()-1; }\n");
fprintf(fp," virtual uint mach_constant_base_node_input() const { ");
if (instr->is_ideal_call() != Form::invalid_type &&
instr->is_ideal_call() != Form::JAVA_LEAF) {
// MachConstantBase goes behind arguments, but before jvms.
fprintf(fp,"assert(tf() && tf()->domain(), \"\"); return tf()->domain()->cnt();");
} else {
fprintf(fp,"return req()-1;");
}
fprintf(fp," }\n");
}
// Allow machine-independent optimization, invert the sense of the IF test
......
......@@ -595,6 +595,18 @@ void JVMState::set_map_deep(SafePointNode* map) {
}
}
// Adapt offsets in in-array after adding or removing an edge.
// Prerequisite is that the JVMState is used by only one node.
void JVMState::adapt_position(int delta) {
for (JVMState* jvms = this; jvms != NULL; jvms = jvms->caller()) {
jvms->set_locoff(jvms->locoff() + delta);
jvms->set_stkoff(jvms->stkoff() + delta);
jvms->set_monoff(jvms->monoff() + delta);
jvms->set_scloff(jvms->scloff() + delta);
jvms->set_endoff(jvms->endoff() + delta);
}
}
//=============================================================================
uint CallNode::cmp( const Node &n ) const
{ return _tf == ((CallNode&)n)._tf && _jvms == ((CallNode&)n)._jvms; }
......
......@@ -299,6 +299,7 @@ public:
JVMState* clone_deep(Compile* C) const; // recursively clones caller chain
JVMState* clone_shallow(Compile* C) const; // retains uncloned caller
void set_map_deep(SafePointNode *map);// reset map for all callers
void adapt_position(int delta); // Adapt offsets in in-array after adding an edge.
#ifndef PRODUCT
void format(PhaseRegAlloc *regalloc, const Node *n, outputStream* st) const;
......@@ -559,9 +560,15 @@ public:
// Are we guaranteed that this node is a safepoint? Not true for leaf calls and
// for some macro nodes whose expansion does not have a safepoint on the fast path.
virtual bool guaranteed_safepoint() { return true; }
// For macro nodes, the JVMState gets modified during expansion, so when cloning
// the node the JVMState must be cloned.
virtual void clone_jvms(Compile* C) { } // default is not to clone
// For macro nodes, the JVMState gets modified during expansion. If calls
// use MachConstantBase, it gets modified during matching. So when cloning
// the node the JVMState must be cloned. Default is not to clone.
virtual void clone_jvms(Compile* C) {
if (C->needs_clone_jvms() && jvms() != NULL) {
set_jvms(jvms()->clone_deep(C));
jvms()->set_map_deep(this);
}
}
// Returns true if the call may modify n
virtual bool may_modify(const TypeOopPtr *t_oop, PhaseTransform *phase);
......
......@@ -758,6 +758,8 @@ class Compile : public Phase {
MachConstantBaseNode* mach_constant_base_node();
bool has_mach_constant_base_node() const { return _mach_constant_base_node != NULL; }
// Generated by adlc, true if CallNode requires MachConstantBase.
bool needs_clone_jvms();
// Handy undefined Node
Node* top() const { return _top; }
......
......@@ -1338,12 +1338,24 @@ MachNode *Matcher::match_sfpt( SafePointNode *sfpt ) {
}
// Debug inputs begin just after the last incoming parameter
assert( (mcall == NULL) || (mcall->jvms() == NULL) ||
(mcall->jvms()->debug_start() + mcall->_jvmadj == mcall->tf()->domain()->cnt()), "" );
assert((mcall == NULL) || (mcall->jvms() == NULL) ||
(mcall->jvms()->debug_start() + mcall->_jvmadj == mcall->tf()->domain()->cnt()), "");
// Move the OopMap
msfpt->_oop_map = sfpt->_oop_map;
// Add additional edges.
if (msfpt->mach_constant_base_node_input() != (uint)-1 && !msfpt->is_MachCallLeaf()) {
// For these calls we can not add MachConstantBase in expand(), as the
// ins are not complete then.
msfpt->ins_req(msfpt->mach_constant_base_node_input(), C->mach_constant_base_node());
if (msfpt->jvms() &&
msfpt->mach_constant_base_node_input() <= msfpt->jvms()->debug_start() + msfpt->_jvmadj) {
// We added an edge before jvms, so we must adapt the position of the ins.
msfpt->jvms()->adapt_position(+1);
}
}
// Registers killed by the call are set in the local scheduling pass
// of Global Code Motion.
return msfpt;
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册