未验证 提交 d936a661 编写于 作者: A Andy Ayers 提交者: GitHub

JIT: contained memory safety analysis fixes and improvements (#64843)

Fixes a couple of issues exposed by forward sub, where containment analysis
was allowing unsafe reordering of operands.

Closes #64828.

Generalize the safety check so that a store to a local not live into a handler
can be reordered with respect to node causing exceptions. Happily this leads
to almost uniformly better code despite the more stringent checking added above.

Add a workaround for the late callbacks into the containment checker made on
unlinked nodes. Assume these are always safe.

Also add extra checks; fast path early out; assertion in MakeSrcContained.
上级 5b1b341f
......@@ -44,9 +44,19 @@ static void assertIsContainableHWIntrinsicOp(Lowering* lowering,
// spillage and for isUsedFromMemory contained nodes, in the case where the register allocator decided to not
// allocate a register in the first place).
GenTree* node = containedNode;
bool supportsRegOptional = false;
bool isContainable = lowering->TryGetContainableHWIntrinsicOp(containingNode, &node, &supportsRegOptional);
GenTree* node = containedNode;
// Now that we are doing full memory containment safety checks, we can't properly check nodes that are not
// linked into an evaluation tree, like the special nodes we create in genHWIntrinsic.
// So, just say those are ok.
//
if (node->gtNext == nullptr)
{
return;
}
bool supportsRegOptional = false;
bool isContainable = lowering->TryGetContainableHWIntrinsicOp(containingNode, &node, &supportsRegOptional);
assert(isContainable || supportsRegOptional);
assert(node == containedNode);
......
......@@ -42,6 +42,22 @@ void Lowering::MakeSrcContained(GenTree* parentNode, GenTree* childNode) const
assert(childNode->canBeContained());
childNode->SetContained();
assert(childNode->isContained());
#ifdef DEBUG
if (IsContainableMemoryOp(childNode))
{
// Verify caller of this method checked safety.
//
const bool isSafeToContainMem = IsSafeToContainMem(parentNode, childNode);
if (!isSafeToContainMem)
{
JITDUMP("** Unsafe mem containment of [%06u] in [%06u}, comp->dspTreeID(childNode), "
"comp->dspTreeID(parentNode)\n");
assert(isSafeToContainMem);
}
}
#endif
}
//------------------------------------------------------------------------
......@@ -79,8 +95,15 @@ bool Lowering::CheckImmedAndMakeContained(GenTree* parentNode, GenTree* childNod
// Return value:
// true if it is safe to make childNode a contained memory operand.
//
bool Lowering::IsSafeToContainMem(GenTree* parentNode, GenTree* childNode)
bool Lowering::IsSafeToContainMem(GenTree* parentNode, GenTree* childNode) const
{
// Quick early-out for unary cases
//
if (childNode->gtNext == parentNode)
{
return true;
}
m_scratchSideEffects.Clear();
m_scratchSideEffects.AddNode(comp, childNode);
......@@ -96,6 +119,40 @@ bool Lowering::IsSafeToContainMem(GenTree* parentNode, GenTree* childNode)
return true;
}
//------------------------------------------------------------------------
// IsSafeToContainMem: Checks for conflicts between childNode and grandParentNode
// and returns 'true' iff memory operand childNode can be contained in ancestorNode
//
// Arguments:
// grandParentNode - any non-leaf node
// parentNode - parent of `childNode` and an input to `grandParentNode`
// childNode - some node that is an input to `parentNode`
//
// Return value:
// true if it is safe to make childNode a contained memory operand.
//
bool Lowering::IsSafeToContainMem(GenTree* grandparentNode, GenTree* parentNode, GenTree* childNode) const
{
m_scratchSideEffects.Clear();
m_scratchSideEffects.AddNode(comp, childNode);
for (GenTree* node = childNode->gtNext; node != grandparentNode; node = node->gtNext)
{
if (node == parentNode)
{
continue;
}
const bool strict = true;
if (m_scratchSideEffects.InterferesWith(comp, node, strict))
{
return false;
}
}
return true;
}
//------------------------------------------------------------------------
// LowerNode: this is the main entry point for Lowering.
//
......
......@@ -560,14 +560,17 @@ public:
bool IsContainableImmed(GenTree* parentNode, GenTree* childNode) const;
// Return true if 'node' is a containable memory op.
bool IsContainableMemoryOp(GenTree* node)
bool IsContainableMemoryOp(GenTree* node) const
{
return m_lsra->isContainableMemoryOp(node);
}
#ifdef FEATURE_HW_INTRINSICS
// Tries to get a containable node for a given HWIntrinsic
bool TryGetContainableHWIntrinsicOp(GenTreeHWIntrinsic* containingNode, GenTree** pNode, bool* supportsRegOptional);
bool TryGetContainableHWIntrinsicOp(GenTreeHWIntrinsic* containingNode,
GenTree** pNode,
bool* supportsRegOptional,
GenTreeHWIntrinsic* transparentParentNode = nullptr);
#endif // FEATURE_HW_INTRINSICS
static void TransformUnusedIndirection(GenTreeIndir* ind, Compiler* comp, BasicBlock* block);
......@@ -585,7 +588,10 @@ private:
// Checks for memory conflicts in the instructions between childNode and parentNode, and returns true if childNode
// can be contained.
bool IsSafeToContainMem(GenTree* parentNode, GenTree* childNode);
bool IsSafeToContainMem(GenTree* parentNode, GenTree* childNode) const;
// Similar to above, but allows bypassing a "transparent" parent.
bool IsSafeToContainMem(GenTree* grandparentNode, GenTree* parentNode, GenTree* childNode) const;
inline LIR::Range& BlockRange() const
{
......@@ -609,10 +615,10 @@ private:
}
}
LinearScan* m_lsra;
unsigned vtableCallTemp; // local variable we use as a temp for vtable calls
SideEffectSet m_scratchSideEffects; // SideEffectSet used for IsSafeToContainMem and isRMWIndirCandidate
BasicBlock* m_block;
LinearScan* m_lsra;
unsigned vtableCallTemp; // local variable we use as a temp for vtable calls
mutable SideEffectSet m_scratchSideEffects; // SideEffectSet used for IsSafeToContainMem and isRMWIndirCandidate
BasicBlock* m_block;
};
#endif // _LOWER_H_
......@@ -4723,7 +4723,7 @@ void Lowering::ContainCheckDivOrMod(GenTreeOp* node)
#endif
// divisor can be an r/m, but the memory indirection must be of the same size as the divide
if (IsContainableMemoryOp(divisor) && (divisor->TypeGet() == node->TypeGet()))
if (IsContainableMemoryOp(divisor) && (divisor->TypeGet() == node->TypeGet()) && IsSafeToContainMem(node, divisor))
{
MakeSrcContained(node, divisor);
}
......@@ -4853,7 +4853,7 @@ void Lowering::ContainCheckCast(GenTreeCast* node)
// U8 -> R8 conversion requires that the operand be in a register.
if (srcType != TYP_ULONG)
{
if (IsContainableMemoryOp(castOp) || castOp->IsCnsNonZeroFltOrDbl())
if ((IsContainableMemoryOp(castOp) && IsSafeToContainMem(node, castOp)) || castOp->IsCnsNonZeroFltOrDbl())
{
MakeSrcContained(node, castOp);
}
......@@ -4949,7 +4949,7 @@ void Lowering::ContainCheckCompare(GenTreeOp* cmp)
// we can treat the MemoryOp as contained.
if (op1Type == op2Type)
{
if (IsContainableMemoryOp(op1))
if (IsContainableMemoryOp(op1) && IsSafeToContainMem(cmp, op1))
{
MakeSrcContained(cmp, op1);
}
......@@ -5250,7 +5250,7 @@ void Lowering::ContainCheckBoundsChk(GenTreeBoundsChk* node)
if (node->GetIndex()->TypeGet() == node->GetArrayLength()->TypeGet())
{
if (IsContainableMemoryOp(other))
if (IsContainableMemoryOp(other) && IsSafeToContainMem(node, other))
{
MakeSrcContained(node, other);
}
......@@ -5279,7 +5279,7 @@ void Lowering::ContainCheckIntrinsic(GenTreeOp* node)
{
GenTree* op1 = node->gtGetOp1();
if (IsContainableMemoryOp(op1) || op1->IsCnsNonZeroFltOrDbl())
if ((IsContainableMemoryOp(op1) && IsSafeToContainMem(node, op1)) || op1->IsCnsNonZeroFltOrDbl())
{
MakeSrcContained(node, op1);
}
......@@ -5365,6 +5365,7 @@ void Lowering::ContainCheckSIMD(GenTreeSIMD* simdNode)
// [In/Out] pNode - The node to check and potentially replace with the containable node
// [Out] supportsRegOptional - On return, this will be true if 'containingNode' supports regOptional operands
// otherwise, false.
// [In] transparentParentNode - optional "transparent" intrinsic parent like CreateScalarUnsafe
//
// Return Value:
// true if 'node' is a containable by containingNode; otherwise, false.
......@@ -5377,7 +5378,8 @@ void Lowering::ContainCheckSIMD(GenTreeSIMD* simdNode)
//
bool Lowering::TryGetContainableHWIntrinsicOp(GenTreeHWIntrinsic* containingNode,
GenTree** pNode,
bool* supportsRegOptional)
bool* supportsRegOptional,
GenTreeHWIntrinsic* transparentParentNode)
{
assert(containingNode != nullptr);
assert((pNode != nullptr) && (*pNode != nullptr));
......@@ -5800,7 +5802,32 @@ bool Lowering::TryGetContainableHWIntrinsicOp(GenTreeHWIntrinsic* containingNode
if (!node->OperIsHWIntrinsic())
{
return supportsGeneralLoads && (IsContainableMemoryOp(node) || node->IsCnsNonZeroFltOrDbl());
bool canBeContained = false;
if (supportsGeneralLoads)
{
if (IsContainableMemoryOp(node))
{
// Code motion safety checks
//
if (transparentParentNode != nullptr)
{
canBeContained = IsSafeToContainMem(containingNode, transparentParentNode, node);
}
else
{
canBeContained = IsSafeToContainMem(containingNode, node);
}
}
else if (node->IsCnsNonZeroFltOrDbl())
{
// Always safe.
//
canBeContained = true;
}
}
return canBeContained;
}
// TODO-XArch: Update this to be table driven, if possible.
......@@ -5821,7 +5848,7 @@ bool Lowering::TryGetContainableHWIntrinsicOp(GenTreeHWIntrinsic* containingNode
GenTree* op1 = hwintrinsic->Op(1);
bool op1SupportsRegOptional = false;
if (!TryGetContainableHWIntrinsicOp(containingNode, &op1, &op1SupportsRegOptional))
if (!TryGetContainableHWIntrinsicOp(containingNode, &op1, &op1SupportsRegOptional, hwintrinsic))
{
return false;
}
......@@ -6287,7 +6314,7 @@ void Lowering::ContainCheckHWIntrinsic(GenTreeHWIntrinsic* node)
MakeSrcContained(node, op2);
}
if (IsContainableMemoryOp(op1))
if (IsContainableMemoryOp(op1) && IsSafeToContainMem(node, op1))
{
MakeSrcContained(node, op1);
......
......@@ -121,7 +121,18 @@ public:
inline bool WritesAnyLocation() const
{
return (m_flags & (ALIAS_WRITES_ADDRESSABLE_LOCATION | ALIAS_WRITES_LCL_VAR)) != 0;
if ((m_flags & ALIAS_WRITES_ADDRESSABLE_LOCATION) != 0)
{
return true;
}
if ((m_flags & ALIAS_WRITES_LCL_VAR) != 0)
{
LclVarDsc* const varDsc = m_compiler->lvaGetDesc(LclNum());
return varDsc->IsAlwaysAliveInMemory();
}
return false;
}
};
......
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// Generated by Fuzzlyn v1.5 on 2022-02-04 12:51:20
// Run on X86 Windows
// Seed: 5821979800164656837
// Reduced from 60.0 KiB to 0.3 KiB in 00:01:44
// Debug: Outputs 0
// Release: Outputs -1
public class Program
{
public static IRT s_rt;
public static long s_4;
public static int Main()
{
s_rt = new C();
int vr3 = (int)(s_4 & ~M7());
s_rt.WriteLine(vr3);
return vr3 + 100;
}
public static short M7()
{
ref long var1 = ref s_4;
var1 = 9223372036854775807L;
return 0;
}
}
public interface IRT
{
void WriteLine<T>(T value);
}
public class C : IRT
{
public void WriteLine<T>(T value)
{
System.Console.WriteLine(value);
}
}
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<DebugType />
<Optimize>True</Optimize>
</PropertyGroup>
<ItemGroup>
<Compile Include="$(MSBuildProjectName).cs" />
</ItemGroup>
</Project>
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// Generated by Fuzzlyn v1.5 on 2022-02-04 13:20:27
// Run on X64 Windows
// Seed: 7242107031351932946
// Reduced from 319.7 KiB to 0.7 KiB in 00:09:00
// Debug:
// Release: Outputs 0
public class C0
{
public byte F1;
}
public struct S0
{
public C0 F0;
public S0(C0 f0): this()
{
F0 = f0;
}
}
public class C1
{
public long F0;
public C1(long f0)
{
F0 = f0;
}
}
public class Program
{
public static S0 s_25 = new S0(new C0());
public static C1[] s_28;
public static int Main()
{
int result = 100;
s_28 = new C1[]{new C1(0)};
var vr3 = new C1(-1);
if (vr3.F0 >= (M35(vr3) & 0))
{
System.Console.WriteLine(vr3.F0);
result = -1;
}
return result;
}
public static byte M35(C1 argThis)
{
argThis.F0 = s_28[0].F0;
return s_25.F0.F1;
}
}
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<DebugType />
<Optimize>True</Optimize>
</PropertyGroup>
<ItemGroup>
<Compile Include="$(MSBuildProjectName).cs" />
</ItemGroup>
</Project>
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册