diff --git a/src/share/vm/opto/callnode.cpp b/src/share/vm/opto/callnode.cpp index 60a183def71379712164c9d4fafe4c2c081ae660..ce9d353b41b00d7729d8cc8d7a47d2b6ce5ac4df 100644 --- a/src/share/vm/opto/callnode.cpp +++ b/src/share/vm/opto/callnode.cpp @@ -1364,7 +1364,7 @@ void AbstractLockNode::set_eliminated() { //============================================================================= Node *LockNode::Ideal(PhaseGVN *phase, bool can_reshape) { - // perform any generic optimizations first + // perform any generic optimizations first (returns 'this' or NULL) Node *result = SafePointNode::Ideal(phase, can_reshape); // Now see if we can optimize away this lock. We don't actually @@ -1372,7 +1372,20 @@ Node *LockNode::Ideal(PhaseGVN *phase, bool can_reshape) { // prevents macro expansion from expanding the lock. Since we don't // modify the graph, the value returned from this function is the // one computed above. - if (EliminateLocks && !is_eliminated()) { + if (result == NULL && can_reshape && EliminateLocks && !is_eliminated()) { + // + // If we are locking an unescaped object, the lock/unlock is unnecessary + // + ConnectionGraph *cgr = Compile::current()->congraph(); + PointsToNode::EscapeState es = PointsToNode::GlobalEscape; + if (cgr != NULL) + es = cgr->escape_state(obj_node(), phase); + if (es != PointsToNode::UnknownEscape && es != PointsToNode::GlobalEscape) { + // Mark it eliminated to update any counters + this->set_eliminated(); + return result; + } + // // Try lock coarsening // @@ -1412,8 +1425,10 @@ Node *LockNode::Ideal(PhaseGVN *phase, bool can_reshape) { int unlocks = 0; for (int i = 0; i < lock_ops.length(); i++) { AbstractLockNode* lock = lock_ops.at(i); - if (lock->Opcode() == Op_Lock) locks++; - else unlocks++; + if (lock->Opcode() == Op_Lock) + locks++; + else + unlocks++; if (Verbose) { lock->dump(1); } @@ -1450,7 +1465,7 @@ uint UnlockNode::size_of() const { return sizeof(*this); } //============================================================================= Node *UnlockNode::Ideal(PhaseGVN *phase, bool can_reshape) { - // perform any generic optimizations first + // perform any generic optimizations first (returns 'this' or NULL) Node * result = SafePointNode::Ideal(phase, can_reshape); // Now see if we can optimize away this unlock. We don't actually @@ -1458,66 +1473,18 @@ Node *UnlockNode::Ideal(PhaseGVN *phase, bool can_reshape) { // prevents macro expansion from expanding the unlock. Since we don't // modify the graph, the value returned from this function is the // one computed above. - if (EliminateLocks && !is_eliminated()) { + // Escape state is defined after Parse phase. + if (result == NULL && can_reshape && EliminateLocks && !is_eliminated()) { // - // If we are unlocking an unescaped object, the lock/unlock is unnecessary - // We can eliminate them if there are no safepoints in the locked region. + // If we are unlocking an unescaped object, the lock/unlock is unnecessary. // ConnectionGraph *cgr = Compile::current()->congraph(); - if (cgr != NULL && cgr->escape_state(obj_node(), phase) == PointsToNode::NoEscape) { - GrowableArray lock_ops; - LockNode *lock = find_matching_lock(this); - if (lock != NULL) { - lock_ops.append(this); - lock_ops.append(lock); - // find other unlocks which pair with the lock we found and add them - // to the list - Node * box = box_node(); - - for (DUIterator_Fast imax, i = box->fast_outs(imax); i < imax; i++) { - Node *use = box->fast_out(i); - if (use->is_Unlock() && use != this) { - UnlockNode *unlock1 = use->as_Unlock(); - if (!unlock1->is_eliminated()) { - LockNode *lock1 = find_matching_lock(unlock1); - if (lock == lock1) - lock_ops.append(unlock1); - else if (lock1 == NULL) { - // we can't find a matching lock, we must assume the worst - lock_ops.trunc_to(0); - break; - } - } - } - } - if (lock_ops.length() > 0) { - - #ifndef PRODUCT - if (PrintEliminateLocks) { - int locks = 0; - int unlocks = 0; - for (int i = 0; i < lock_ops.length(); i++) { - AbstractLockNode* lock = lock_ops.at(i); - if (lock->Opcode() == Op_Lock) locks++; - else unlocks++; - if (Verbose) { - lock->dump(1); - } - } - tty->print_cr("***Eliminated %d unescaped unlocks and %d unescaped locks", unlocks, locks); - } - #endif - - // for each of the identified locks, mark them - // as eliminatable - for (int i = 0; i < lock_ops.length(); i++) { - AbstractLockNode* lock = lock_ops.at(i); - - // Mark it eliminated to update any counters - lock->set_eliminated(); - } - } - } + PointsToNode::EscapeState es = PointsToNode::GlobalEscape; + if (cgr != NULL) + es = cgr->escape_state(obj_node(), phase); + if (es != PointsToNode::UnknownEscape && es != PointsToNode::GlobalEscape) { + // Mark it eliminated to update any counters + this->set_eliminated(); } } return result; diff --git a/src/share/vm/opto/locknode.cpp b/src/share/vm/opto/locknode.cpp index 90da8efaa36f7dde72d5ab774a9acfa991f726dc..48bf9ebfefcceb7a789d1c20d5531d8c135f154e 100644 --- a/src/share/vm/opto/locknode.cpp +++ b/src/share/vm/opto/locknode.cpp @@ -36,7 +36,8 @@ const RegMask &BoxLockNode::out_RegMask() const { uint BoxLockNode::size_of() const { return sizeof(*this); } -BoxLockNode::BoxLockNode( int slot ) : Node( Compile::current()->root() ), _slot(slot) { +BoxLockNode::BoxLockNode( int slot ) : Node( Compile::current()->root() ), + _slot(slot), _is_eliminated(false) { init_class_id(Class_BoxLock); init_flags(Flag_rematerialize); OptoReg::Name reg = OptoReg::stack2reg(_slot); diff --git a/src/share/vm/opto/locknode.hpp b/src/share/vm/opto/locknode.hpp index 6b1a8883ca4cf3b7b3b55c468804ea38a4343dd0..e765a6c5d692c021d6073f584f698eb959724f05 100644 --- a/src/share/vm/opto/locknode.hpp +++ b/src/share/vm/opto/locknode.hpp @@ -27,6 +27,7 @@ class BoxLockNode : public Node { public: const int _slot; RegMask _inmask; + bool _is_eliminated; // indicates this lock was safely eliminated BoxLockNode( int lock ); virtual int Opcode() const; @@ -42,6 +43,10 @@ public: static OptoReg::Name stack_slot(Node* box_node); + bool is_eliminated() { return _is_eliminated; } + // mark lock as eliminated. + void set_eliminated() { _is_eliminated = true; } + #ifndef PRODUCT virtual void format( PhaseRegAlloc *, outputStream *st ) const; virtual void dump_spec(outputStream *st) const { st->print(" Lock %d",_slot); } diff --git a/src/share/vm/opto/macro.cpp b/src/share/vm/opto/macro.cpp index 4a8ffd1076f2cf29360e72c4b151b2f0c1e44848..014280aaa0a993f6905ad3fbd852c01cef015306 100644 --- a/src/share/vm/opto/macro.cpp +++ b/src/share/vm/opto/macro.cpp @@ -828,43 +828,102 @@ void PhaseMacroExpand::expand_allocate_array(AllocateArrayNode *alloc) { // Note: The membar's associated with the lock/unlock are currently not // eliminated. This should be investigated as a future enhancement. // -void PhaseMacroExpand::eliminate_locking_node(AbstractLockNode *alock) { - Node* mem = alock->in(TypeFunc::Memory); +bool PhaseMacroExpand::eliminate_locking_node(AbstractLockNode *alock) { + + if (!alock->is_eliminated()) { + return false; + } + // Mark the box lock as eliminated if all correspondent locks are eliminated + // to construct correct debug info. + BoxLockNode* box = alock->box_node()->as_BoxLock(); + if (!box->is_eliminated()) { + bool eliminate = true; + for (DUIterator_Fast imax, i = box->fast_outs(imax); i < imax; i++) { + Node *lck = box->fast_out(i); + if (lck->is_Lock() && !lck->as_AbstractLock()->is_eliminated()) { + eliminate = false; + break; + } + } + if (eliminate) + box->set_eliminated(); + } + + #ifndef PRODUCT + if (PrintEliminateLocks) { + if (alock->is_Lock()) { + tty->print_cr("++++ Eliminating: %d Lock", alock->_idx); + } else { + tty->print_cr("++++ Eliminating: %d Unlock", alock->_idx); + } + } + #endif + + Node* mem = alock->in(TypeFunc::Memory); + Node* ctrl = alock->in(TypeFunc::Control); + + extract_call_projections(alock); + // There are 2 projections from the lock. The lock node will + // be deleted when its last use is subsumed below. + assert(alock->outcnt() == 2 && + _fallthroughproj != NULL && + _memproj_fallthrough != NULL, + "Unexpected projections from Lock/Unlock"); + + Node* fallthroughproj = _fallthroughproj; + Node* memproj_fallthrough = _memproj_fallthrough; // The memory projection from a lock/unlock is RawMem // The input to a Lock is merged memory, so extract its RawMem input // (unless the MergeMem has been optimized away.) if (alock->is_Lock()) { - if (mem->is_MergeMem()) - mem = mem->as_MergeMem()->in(Compile::AliasIdxRaw); + // Seach for MemBarAcquire node and delete it also. + MemBarNode* membar = fallthroughproj->unique_ctrl_out()->as_MemBar(); + assert(membar != NULL && membar->Opcode() == Op_MemBarAcquire, ""); + Node* ctrlproj = membar->proj_out(TypeFunc::Control); + Node* memproj = membar->proj_out(TypeFunc::Memory); + _igvn.hash_delete(ctrlproj); + _igvn.subsume_node(ctrlproj, fallthroughproj); + _igvn.hash_delete(memproj); + _igvn.subsume_node(memproj, memproj_fallthrough); } - extract_call_projections(alock); - // There are 2 projections from the lock. The lock node will - // be deleted when its last use is subsumed below. - assert(alock->outcnt() == 2 && _fallthroughproj != NULL && - _memproj_fallthrough != NULL, "Unexpected projections from Lock/Unlock"); - _igvn.hash_delete(_fallthroughproj); - _igvn.subsume_node(_fallthroughproj, alock->in(TypeFunc::Control)); - _igvn.hash_delete(_memproj_fallthrough); - _igvn.subsume_node(_memproj_fallthrough, mem); - return; + // Seach for MemBarRelease node and delete it also. + if (alock->is_Unlock() && ctrl != NULL && ctrl->is_Proj() && + ctrl->in(0)->is_MemBar()) { + MemBarNode* membar = ctrl->in(0)->as_MemBar(); + assert(membar->Opcode() == Op_MemBarRelease && + mem->is_Proj() && membar == mem->in(0), ""); + _igvn.hash_delete(fallthroughproj); + _igvn.subsume_node(fallthroughproj, ctrl); + _igvn.hash_delete(memproj_fallthrough); + _igvn.subsume_node(memproj_fallthrough, mem); + fallthroughproj = ctrl; + memproj_fallthrough = mem; + ctrl = membar->in(TypeFunc::Control); + mem = membar->in(TypeFunc::Memory); + } + + _igvn.hash_delete(fallthroughproj); + _igvn.subsume_node(fallthroughproj, ctrl); + _igvn.hash_delete(memproj_fallthrough); + _igvn.subsume_node(memproj_fallthrough, mem); + return true; } //------------------------------expand_lock_node---------------------- void PhaseMacroExpand::expand_lock_node(LockNode *lock) { + if (eliminate_locking_node(lock)) { + return; + } + Node* ctrl = lock->in(TypeFunc::Control); Node* mem = lock->in(TypeFunc::Memory); Node* obj = lock->obj_node(); Node* box = lock->box_node(); - Node *flock = lock->fastlock_node(); - - if (lock->is_eliminated()) { - eliminate_locking_node(lock); - return; - } + Node* flock = lock->fastlock_node(); // Make the merge point Node *region = new (C, 3) RegionNode(3); @@ -913,17 +972,15 @@ void PhaseMacroExpand::expand_lock_node(LockNode *lock) { //------------------------------expand_unlock_node---------------------- void PhaseMacroExpand::expand_unlock_node(UnlockNode *unlock) { - Node *ctrl = unlock->in(TypeFunc::Control); + if (eliminate_locking_node(unlock)) { + return; + } + + Node* ctrl = unlock->in(TypeFunc::Control); Node* mem = unlock->in(TypeFunc::Memory); Node* obj = unlock->obj_node(); Node* box = unlock->box_node(); - - if (unlock->is_eliminated()) { - eliminate_locking_node(unlock); - return; - } - // No need for a null check on unlock // Make the merge point diff --git a/src/share/vm/opto/macro.hpp b/src/share/vm/opto/macro.hpp index 20dd65c40b7ea25630b55f153bca8d704830997c..a34094dbf00eccf52153321e56f821dce50dc5c4 100644 --- a/src/share/vm/opto/macro.hpp +++ b/src/share/vm/opto/macro.hpp @@ -78,7 +78,7 @@ private: Node* length, const TypeFunc* slow_call_type, address slow_call_address); - void eliminate_locking_node(AbstractLockNode *alock); + bool eliminate_locking_node(AbstractLockNode *alock); void expand_lock_node(LockNode *lock); void expand_unlock_node(UnlockNode *unlock); diff --git a/src/share/vm/opto/output.cpp b/src/share/vm/opto/output.cpp index 3c06b135f9a229097ff8accc2188341b5b7805e7..b1f2ca12155e683b874c80a109a0f4b9242c0247 100644 --- a/src/share/vm/opto/output.cpp +++ b/src/share/vm/opto/output.cpp @@ -882,7 +882,8 @@ void Compile::Process_OopMap_Node(MachNode *mach, int current_offset) { } OptoReg::Name box_reg = BoxLockNode::stack_slot(box_node); - monarray->append(new MonitorValue(scval, Location::new_stk_loc(Location::normal,_regalloc->reg2offset(box_reg)))); + Location basic_lock = Location::new_stk_loc(Location::normal,_regalloc->reg2offset(box_reg)); + monarray->append(new MonitorValue(scval, basic_lock, box_node->as_BoxLock()->is_eliminated())); } // We dump the object pool first, since deoptimization reads it in first.