未验证 提交 61abe4b7 编写于 作者: J Jakob Botsch Nielsen 提交者: GitHub

JIT: Disallow 0-sized block ops (#83224)

Avoid creating 0-sized block ops in the importer (but keep the side effects of the addresses). Add asserts that we don't create these nodes and get rid of some downstream checks for the case.

Fix #12807

Also merge CEE_INITBLK and CEE_CPBLK importer cases. As part of this fix a bug where we weren't setting GTF_EXCEPT on the destination in the CEE_INITBLK case.
上级 00858e42
......@@ -7340,6 +7340,7 @@ public:
void Initialize(ClassLayout* layout)
{
assert(OperIsBlk(OperGet()) && ((layout != nullptr) || OperIs(GT_STORE_DYN_BLK)));
assert((layout == nullptr) || (layout->GetSize() != 0));
m_layout = layout;
gtBlkOpKind = BlkOpKindInvalid;
......
......@@ -10641,56 +10641,47 @@ void Compiler::impImportBlockCode(BasicBlock* block)
}
case CEE_INITBLK:
case CEE_CPBLK:
op3 = impPopStack().val; // Size
op2 = impPopStack().val; // Value
op2 = impPopStack().val; // Value / Src addr
op1 = impPopStack().val; // Dst addr
if (op3->IsCnsIntOrI())
{
size = (unsigned)op3->AsIntConCommon()->IconValue();
op1 = new (this, GT_BLK) GenTreeBlk(GT_BLK, TYP_STRUCT, op1, typGetBlkLayout(size));
op1 = gtNewBlkOpNode(op1, op2, (prefixFlags & PREFIX_VOLATILE) != 0);
}
else
{
if (!op2->IsIntegralConst(0))
if (op3->IsIntegralConst(0))
{
op2 = gtNewOperNode(GT_INIT_VAL, TYP_INT, op2);
}
#ifdef TARGET_64BIT
// STORE_DYN_BLK takes a native uint size as it turns into call to memset.
op3 = gtNewCastNode(TYP_I_IMPL, op3, /* fromUnsigned */ true, TYP_U_IMPL);
#endif
if ((op1->gtFlags & GTF_SIDE_EFFECT) != 0)
{
impAppendTree(gtUnusedValNode(op1), CHECK_SPILL_ALL, impCurStmtDI);
}
op1 = new (this, GT_STORE_DYN_BLK) GenTreeStoreDynBlk(op1, op2, op3);
size = 0;
if ((op2->gtFlags & GTF_SIDE_EFFECT) != 0)
{
impAppendTree(gtUnusedValNode(op2), CHECK_SPILL_ALL, impCurStmtDI);
}
if ((prefixFlags & PREFIX_VOLATILE) != 0)
{
op1->gtFlags |= GTF_BLK_VOLATILE;
break;
}
}
goto SPILL_APPEND;
case CEE_CPBLK:
op3 = impPopStack().val; // Size
op2 = impPopStack().val; // Src addr
op1 = impPopStack().val; // Dst addr
if (op3->IsCnsIntOrI())
{
size = static_cast<unsigned>(op3->AsIntConCommon()->IconValue());
op1 = gtNewBlockVal(op1, size);
op2 = gtNewBlockVal(op2, size);
op1 = gtNewBlkOpNode(op1, op2, (prefixFlags & PREFIX_VOLATILE) != 0);
op1 = gtNewBlockVal(op1, size);
op2 = opcode == CEE_INITBLK ? op2 : gtNewBlockVal(op2, size);
op1 = gtNewBlkOpNode(op1, op2, (prefixFlags & PREFIX_VOLATILE) != 0);
}
else
{
op2 = gtNewOperNode(GT_IND, TYP_STRUCT, op2);
if (opcode == CEE_INITBLK)
{
if (!op2->IsIntegralConst(0))
{
op2 = gtNewOperNode(GT_INIT_VAL, TYP_INT, op2);
}
}
else
{
op2 = gtNewOperNode(GT_IND, TYP_STRUCT, op2);
}
#ifdef TARGET_64BIT
// STORE_DYN_BLK takes a native uint size as it turns into call to memcpy.
......
......@@ -211,6 +211,8 @@ void MorphInitBlockHelper::PrepareDst()
m_blockSize = genTypeSize(m_dst);
}
assert(m_blockSize != 0);
#if defined(DEBUG)
if (m_comp->verbose)
{
......@@ -481,12 +483,6 @@ void MorphInitBlockHelper::TryInitFieldByField()
LclVarDsc* destLclVar = m_dstVarDsc;
unsigned blockSize = m_blockSize;
if (blockSize == 0)
{
JITDUMP(" size is zero.\n");
return;
}
if (destLclVar->IsAddressExposed() && destLclVar->lvContainsHoles)
{
JITDUMP(" dest is address exposed and contains holes.\n");
......@@ -647,11 +643,6 @@ void MorphInitBlockHelper::TryInitFieldByField()
//
void MorphInitBlockHelper::TryPrimitiveInit()
{
if (m_blockSize == 0)
{
return;
}
if (m_src->IsIntegralConst(0) && (m_dstVarDsc != nullptr) && (genTypeSize(m_dstVarDsc) == m_blockSize))
{
var_types lclVarType = m_dstVarDsc->TypeGet();
......@@ -1101,7 +1092,7 @@ void MorphCopyBlockHelper::MorphStructCases()
//
void MorphCopyBlockHelper::TryPrimitiveCopy()
{
if (!m_dst->TypeIs(TYP_STRUCT) || (m_blockSize == 0))
if (!m_dst->TypeIs(TYP_STRUCT))
{
return;
}
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册