lower.h 18.7 KB
Newer Older
D
dotnet-bot 已提交
1 2
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
3 4 5 6 7 8 9 10

/*XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX                                                                           XX
XX                               Lower                                       XX
XX                                                                           XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
11
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
12 13 14 15 16 17
*/

#ifndef _LOWER_H_
#define _LOWER_H_

#include "compiler.h"
18
#include "phase.h"
19
#include "lsra.h"
20
#include "sideeffects.h"
21

22
class Lowering final : public Phase
23 24 25
{
public:
    inline Lowering(Compiler* compiler, LinearScanInterface* lsra)
26
        : Phase(compiler, PHASE_LOWERING), vtableCallTemp(BAD_VAR_NUM)
27
    {
28
        m_lsra = (LinearScan*)lsra;
29 30
        assert(m_lsra);
    }
31
    virtual PhaseStatus DoPhase() override;
32

33 34 35 36 37 38 39 40 41 42 43
    // This variant of LowerRange is called from outside of the main Lowering pass,
    // so it creates its own instance of Lowering to do so.
    void LowerRange(BasicBlock* block, LIR::ReadOnlyRange& range)
    {
        Lowering lowerer(comp, m_lsra);
        lowerer.m_block = block;

        lowerer.LowerRange(range);
    }

private:
44
    // LowerRange handles new code that is introduced by or after Lowering.
45
    void LowerRange(LIR::ReadOnlyRange& range)
46 47 48 49 50 51
    {
        for (GenTree* newNode : range)
        {
            LowerNode(newNode);
        }
    }
52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80
    void LowerRange(GenTree* firstNode, GenTree* lastNode)
    {
        LIR::ReadOnlyRange range(firstNode, lastNode);
        LowerRange(range);
    }

    // ContainCheckRange handles new code that is introduced by or after Lowering,
    // and that is known to be already in Lowered form.
    void ContainCheckRange(LIR::ReadOnlyRange& range)
    {
        for (GenTree* newNode : range)
        {
            ContainCheckNode(newNode);
        }
    }
    void ContainCheckRange(GenTree* firstNode, GenTree* lastNode)
    {
        LIR::ReadOnlyRange range(firstNode, lastNode);
        ContainCheckRange(range);
    }

    void InsertTreeBeforeAndContainCheck(GenTree* insertionPoint, GenTree* tree)
    {
        LIR::Range range = LIR::SeqTree(comp, tree);
        ContainCheckRange(range);
        BlockRange().InsertBefore(insertionPoint, std::move(range));
    }

    void ContainCheckNode(GenTree* node);
81

C
Carol Eidt 已提交
82 83 84 85
    void ContainCheckDivOrMod(GenTreeOp* node);
    void ContainCheckReturnTrap(GenTreeOp* node);
    void ContainCheckArrOffset(GenTreeArrOffs* node);
    void ContainCheckLclHeap(GenTreeOp* node);
S
Sergey Andreenko 已提交
86
    void ContainCheckRet(GenTreeUnOp* ret);
87 88
    void ContainCheckJTrue(GenTreeOp* node);

89
    void ContainCheckBitCast(GenTree* node);
90 91
    void ContainCheckCallOperands(GenTreeCall* call);
    void ContainCheckIndir(GenTreeIndir* indirNode);
92
    void ContainCheckStoreIndir(GenTreeStoreInd* indirNode);
C
Carol Eidt 已提交
93 94
    void ContainCheckMul(GenTreeOp* node);
    void ContainCheckShiftRotate(GenTreeOp* node);
95
    void ContainCheckStoreLoc(GenTreeLclVarCommon* storeLoc) const;
C
Carol Eidt 已提交
96 97
    void ContainCheckCast(GenTreeCast* node);
    void ContainCheckCompare(GenTreeOp* node);
98
    void ContainCheckBinary(GenTreeOp* node);
C
Carol Eidt 已提交
99
    void ContainCheckBoundsChk(GenTreeBoundsChk* node);
100
#ifdef TARGET_XARCH
C
Carol Eidt 已提交
101 102
    void ContainCheckFloatBinary(GenTreeOp* node);
    void ContainCheckIntrinsic(GenTreeOp* node);
103
#endif // TARGET_XARCH
C
Carol Eidt 已提交
104 105 106
#ifdef FEATURE_SIMD
    void ContainCheckSIMD(GenTreeSIMD* simdNode);
#endif // FEATURE_SIMD
107
#ifdef FEATURE_HW_INTRINSICS
108
    void ContainCheckHWIntrinsicAddr(GenTreeHWIntrinsic* node, GenTree* addr);
109 110
    void ContainCheckHWIntrinsic(GenTreeHWIntrinsic* node);
#endif // FEATURE_HW_INTRINSICS
C
Carol Eidt 已提交
111

112 113 114
#ifdef DEBUG
    static void CheckCallArg(GenTree* arg);
    static void CheckCall(GenTreeCall* call);
115
    static void CheckNode(Compiler* compiler, GenTree* node);
116 117 118 119 120
    static bool CheckBlock(Compiler* compiler, BasicBlock* block);
#endif // DEBUG

    void LowerBlock(BasicBlock* block);
    GenTree* LowerNode(GenTree* node);
121

122 123
    bool IsInvariantInRange(GenTree* node, GenTree* endExclusive);

124 125 126
    // ------------------------------
    // Call Lowering
    // ------------------------------
127
    void LowerCall(GenTree* call);
128 129
    void LowerCFGCall(GenTreeCall* call);
    void MoveCFGCallArg(GenTreeCall* call, GenTree* node);
130
#ifndef TARGET_64BIT
131 132 133 134
    GenTree* DecomposeLongCompare(GenTree* cmp);
#endif
    GenTree* OptimizeConstCompare(GenTree* cmp);
    GenTree* LowerCompare(GenTree* cmp);
M
Mike Danes 已提交
135
    GenTree* LowerJTrue(GenTreeOp* jtrue);
136
    GenTreeCC* LowerNodeCC(GenTree* node, GenCondition condition);
137
    void LowerJmpMethod(GenTree* jmp);
S
Sergey Andreenko 已提交
138
    void LowerRet(GenTreeUnOp* ret);
139
    void LowerStoreLocCommon(GenTreeLclVarCommon* lclVar);
140
    void LowerRetStruct(GenTreeUnOp* ret);
141
    void LowerRetSingleRegStructLclVar(GenTreeUnOp* ret);
142
    void LowerCallStruct(GenTreeCall* call);
143
    void LowerStoreSingleRegCallStruct(GenTreeBlk* store);
144 145 146
#if !defined(WINDOWS_AMD64_ABI)
    GenTreeLclVar* SpillStructCallResult(GenTreeCall* call) const;
#endif // WINDOWS_AMD64_ABI
147 148 149 150
    GenTree* LowerDelegateInvoke(GenTreeCall* call);
    GenTree* LowerIndirectNonvirtCall(GenTreeCall* call);
    GenTree* LowerDirectCall(GenTreeCall* call);
    GenTree* LowerNonvirtPinvokeCall(GenTreeCall* call);
151
    GenTree* LowerTailCallViaJitHelper(GenTreeCall* callNode, GenTree* callTarget);
152
    void LowerFastTailCall(GenTreeCall* callNode);
153 154 155 156
    void RehomeArgForFastTailCall(unsigned int lclNum,
                                  GenTree*     insertTempBefore,
                                  GenTree*     lookForUsesStart,
                                  GenTreeCall* callNode);
157 158 159 160
    void InsertProfTailCallHook(GenTreeCall* callNode, GenTree* insertionPoint);
    GenTree* LowerVirtualVtableCall(GenTreeCall* call);
    GenTree* LowerVirtualStubCall(GenTreeCall* call);
    void LowerArgsForCall(GenTreeCall* call);
161
    void ReplaceArgWithPutArgOrBitcast(GenTree** ppChild, GenTree* newNode);
162 163
    GenTree* NewPutArg(GenTreeCall* call, GenTree* arg, CallArg* callArg, var_types type);
    void LowerArg(GenTreeCall* call, CallArg* callArg, bool late);
164
#if defined(TARGET_ARMARCH) || defined(TARGET_LOONGARCH64)
165
    GenTree* LowerFloatArg(GenTree** pArg, CallArg* callArg);
166 167 168
    GenTree* LowerFloatArgReg(GenTree* arg, regNumber regNum);
#endif

169 170 171
    void InsertPInvokeCallProlog(GenTreeCall* call);
    void InsertPInvokeCallEpilog(GenTreeCall* call);
    void InsertPInvokeMethodProlog();
172
    void InsertPInvokeMethodEpilog(BasicBlock* returnBB DEBUGARG(GenTree* lastExpr));
173 174 175 176 177 178 179 180
    GenTree* SetGCState(int cns);
    GenTree* CreateReturnTrapSeq();
    enum FrameLinkAction
    {
        PushFrame,
        PopFrame
    };
    GenTree* CreateFrameLinkUpdate(FrameLinkAction);
181 182
    GenTree* AddrGen(ssize_t addr);
    GenTree* AddrGen(void* addr);
183

184
    GenTree* Ind(GenTree* tree, var_types type = TYP_I_IMPL)
185
    {
186
        return comp->gtNewOperNode(GT_IND, type, tree);
187 188 189 190 191 192 193 194 195 196 197 198 199 200 201
    }

    GenTree* PhysReg(regNumber reg, var_types type = TYP_I_IMPL)
    {
        return comp->gtNewPhysRegNode(reg, type);
    }

    GenTree* ThisReg(GenTreeCall* call)
    {
        return PhysReg(comp->codeGen->genGetThisArgReg(call), TYP_REF);
    }

    GenTree* Offset(GenTree* base, unsigned offset)
    {
        var_types resultType = (base->TypeGet() == TYP_REF) ? TYP_BYREF : base->TypeGet();
202
        return new (comp, GT_LEA) GenTreeAddrMode(resultType, base, nullptr, 0, offset);
203 204
    }

205 206 207 208 209 210
    GenTree* OffsetByIndex(GenTree* base, GenTree* index)
    {
        var_types resultType = (base->TypeGet() == TYP_REF) ? TYP_BYREF : base->TypeGet();
        return new (comp, GT_LEA) GenTreeAddrMode(resultType, base, index, 0, 0);
    }

211 212 213 214 215 216
    GenTree* OffsetByIndexWithScale(GenTree* base, GenTree* index, unsigned scale)
    {
        var_types resultType = (base->TypeGet() == TYP_REF) ? TYP_BYREF : base->TypeGet();
        return new (comp, GT_LEA) GenTreeAddrMode(resultType, base, index, scale, 0);
    }

217
    // Replace the definition of the given use with a lclVar, allocating a new temp
218 219
    // if 'tempNum' is BAD_VAR_NUM. Returns the LclVar node.
    GenTreeLclVar* ReplaceWithLclVar(LIR::Use& use, unsigned tempNum = BAD_VAR_NUM)
220 221 222 223
    {
        GenTree* oldUseNode = use.Def();
        if ((oldUseNode->gtOper != GT_LCL_VAR) || (tempNum != BAD_VAR_NUM))
        {
224 225 226
            GenTree* assign;
            use.ReplaceWithLclVar(comp, tempNum, &assign);

227 228
            GenTree* newUseNode = use.Def();
            ContainCheckRange(oldUseNode->gtNext, newUseNode);
229 230 231 232 233 234 235

            // We need to lower the LclVar and assignment since there may be certain
            // types or scenarios, such as TYP_SIMD12, that need special handling

            LowerNode(assign);
            LowerNode(newUseNode);

236
            return newUseNode->AsLclVar();
237
        }
238
        return oldUseNode->AsLclVar();
239 240
    }

241
    // return true if this call target is within range of a pc-rel call on the machine
242
    bool IsCallTargetInRange(void* addr);
243

244
#if defined(TARGET_XARCH)
S
sivarv 已提交
245 246 247 248 249 250 251 252 253 254 255 256 257
    GenTree* PreferredRegOptionalOperand(GenTree* tree);

    // ------------------------------------------------------------------
    // SetRegOptionalBinOp - Indicates which of the operands of a bin-op
    // register requirement is optional. Xarch instruction set allows
    // either of op1 or op2 of binary operation (e.g. add, mul etc) to be
    // a memory operand.  This routine provides info to register allocator
    // which of its operands optionally require a register.  Lsra might not
    // allocate a register to RefTypeUse positions of such operands if it
    // is beneficial. In such a case codegen will treat them as memory
    // operands.
    //
    // Arguments:
258 259 260
    //     tree  -             Gentree of a binary operation.
    //     isSafeToMarkOp1     True if it's safe to mark op1 as register optional
    //     isSafeToMarkOp2     True if it's safe to mark op2 as register optional
S
sivarv 已提交
261
    //
262
    // Returns
263 264
    //     The caller is expected to get isSafeToMarkOp1 and isSafeToMarkOp2
    //     by calling IsSafeToContainMem.
265
    //
S
sivarv 已提交
266 267 268
    // Note: On xarch at most only one of the operands will be marked as
    // reg optional, even when both operands could be considered register
    // optional.
269
    void SetRegOptionalForBinOp(GenTree* tree, bool isSafeToMarkOp1, bool isSafeToMarkOp2)
S
sivarv 已提交
270 271 272
    {
        assert(GenTree::OperIsBinary(tree->OperGet()));

273 274
        GenTree* const op1 = tree->gtGetOp1();
        GenTree* const op2 = tree->gtGetOp2();
S
sivarv 已提交
275

276 277
        const unsigned operatorSize = genTypeSize(tree->TypeGet());

278 279 280
        const bool op1Legal =
            isSafeToMarkOp1 && tree->OperIsCommutative() && (operatorSize == genTypeSize(op1->TypeGet()));
        const bool op2Legal = isSafeToMarkOp2 && (operatorSize == genTypeSize(op2->TypeGet()));
281

C
Carol Eidt 已提交
282
        GenTree* regOptionalOperand = nullptr;
283
        if (op1Legal)
S
sivarv 已提交
284
        {
C
Carol Eidt 已提交
285
            regOptionalOperand = op2Legal ? PreferredRegOptionalOperand(tree) : op1;
S
sivarv 已提交
286
        }
287
        else if (op2Legal)
S
sivarv 已提交
288
        {
C
Carol Eidt 已提交
289 290 291 292 293
            regOptionalOperand = op2;
        }
        if (regOptionalOperand != nullptr)
        {
            regOptionalOperand->SetRegOptional();
S
sivarv 已提交
294 295
        }
    }
296
#endif // defined(TARGET_XARCH)
C
Carol Eidt 已提交
297

C
Carol Eidt 已提交
298
    // Per tree node member functions
299
    void LowerStoreIndirCommon(GenTreeStoreInd* ind);
300
    void LowerIndir(GenTreeIndir* ind);
301
    void LowerStoreIndir(GenTreeStoreInd* node);
302
    GenTree* LowerAdd(GenTreeOp* node);
303
    GenTree* LowerMul(GenTreeOp* mul);
W
Wraith 已提交
304
    GenTree* LowerBinaryArithmetic(GenTreeOp* binOp);
C
Carol Eidt 已提交
305 306 307 308
    bool LowerUnsignedDivOrMod(GenTreeOp* divMod);
    GenTree* LowerConstIntDivOrMod(GenTree* node);
    GenTree* LowerSignedDivOrMod(GenTree* node);
    void LowerBlockStore(GenTreeBlk* blkNode);
309
    void LowerBlockStoreCommon(GenTreeBlk* blkNode);
M
Mike Danes 已提交
310
    void ContainBlockStoreAddress(GenTreeBlk* blkNode, unsigned size, GenTree* addr);
C
Carol Eidt 已提交
311
    void LowerPutArgStk(GenTreePutArgStk* tree);
312

313
    bool TryCreateAddrMode(GenTree* addr, bool isContainable, GenTree* parent);
314

315 316
    bool TryTransformStoreObjAsStoreInd(GenTreeBlk* blkNode);

317
    GenTree* LowerSwitch(GenTree* node);
M
Mike Danes 已提交
318 319 320
    bool TryLowerSwitchToBitTest(
        BasicBlock* jumpTable[], unsigned jumpCount, unsigned targetCount, BasicBlock* bbSwitch, GenTree* switchValue);

321
    void LowerCast(GenTree* node);
322 323

#if !CPU_LOAD_STORE_ARCH
324
    bool IsRMWIndirCandidate(GenTree* operand, GenTree* storeInd);
325 326
    bool IsBinOpInRMWStoreInd(GenTree* tree);
    bool IsRMWMemOpRootedAtStoreInd(GenTree* storeIndTree, GenTree** indirCandidate, GenTree** indirOpSource);
C
Carol Eidt 已提交
327
    bool LowerRMWMemOp(GenTreeIndir* storeInd);
328
#endif
C
Carol Eidt 已提交
329 330

    void WidenSIMD12IfNecessary(GenTreeLclVarCommon* node);
331
    bool CheckMultiRegLclVar(GenTreeLclVar* lclNode, const ReturnTypeDesc* retTypeDesc);
332
    void LowerStoreLoc(GenTreeLclVarCommon* tree);
333
    GenTree* LowerArrElem(GenTree* node);
334
    void LowerRotate(GenTree* tree);
335
    void LowerShift(GenTreeOp* shift);
336 337 338
#ifdef FEATURE_SIMD
    void LowerSIMD(GenTreeSIMD* simdNode);
#endif // FEATURE_SIMD
339
#ifdef FEATURE_HW_INTRINSICS
340
    GenTree* LowerHWIntrinsic(GenTreeHWIntrinsic* node);
341
    void LowerHWIntrinsicCC(GenTreeHWIntrinsic* node, NamedIntrinsic newIntrinsicId, GenCondition condition);
342
    GenTree* LowerHWIntrinsicCmpOp(GenTreeHWIntrinsic* node, genTreeOps cmpOp);
343
    void LowerHWIntrinsicCndSel(GenTreeHWIntrinsic* node);
344 345
    GenTree* LowerHWIntrinsicCreate(GenTreeHWIntrinsic* node);
    GenTree* LowerHWIntrinsicDot(GenTreeHWIntrinsic* node);
346
#if defined(TARGET_XARCH)
347
    void LowerFusedMultiplyAdd(GenTreeHWIntrinsic* node);
348
    void LowerHWIntrinsicToScalar(GenTreeHWIntrinsic* node);
349
    void LowerHWIntrinsicGetElement(GenTreeHWIntrinsic* node);
350
    GenTree* LowerHWIntrinsicWithElement(GenTreeHWIntrinsic* node);
W
Wraith 已提交
351
    GenTree* TryLowerAndOpToResetLowestSetBit(GenTreeOp* andNode);
W
Wraith 已提交
352
    GenTree* TryLowerAndOpToExtractLowestSetBit(GenTreeOp* andNode);
W
Wraith 已提交
353
    GenTree* TryLowerAndOpToAndNot(GenTreeOp* andNode);
354
    void LowerBswapOp(GenTreeOp* node);
355
#elif defined(TARGET_ARM64)
356
    bool IsValidConstForMovImm(GenTreeHWIntrinsic* node);
357
    void LowerHWIntrinsicFusedMultiplyAddScalar(GenTreeHWIntrinsic* node);
358
    void LowerModPow2(GenTree* node);
359
    GenTree* LowerAddForPossibleContainment(GenTreeOp* node);
360
#endif // !TARGET_XARCH && !TARGET_ARM64
361
#endif // FEATURE_HW_INTRINSICS
362

363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391
    //----------------------------------------------------------------------------------------------
    // TryRemoveCastIfPresent: Removes op it is a cast operation and the size of its input is at
    //                         least the size of expectedType
    //
    //  Arguments:
    //     expectedType - The expected type of the cast operation input if it is to be removed
    //     op           - The tree to remove if it is a cast op whose input is at least the size of expectedType
    //
    //  Returns:
    //     op if it was not a cast node or if its input is not at least the size of expected type;
    //     Otherwise, it returns the underlying operation that was being casted
    GenTree* TryRemoveCastIfPresent(var_types expectedType, GenTree* op)
    {
        if (!op->OperIs(GT_CAST))
        {
            return op;
        }

        GenTree* castOp = op->AsCast()->CastOp();

        if (genTypeSize(castOp->gtType) >= genTypeSize(expectedType))
        {
            BlockRange().Remove(op);
            return castOp;
        }

        return op;
    }

392 393
    // Utility functions
public:
394
    static bool IndirsAreEquivalent(GenTree* pTreeA, GenTree* pTreeB);
395 396

    // return true if 'childNode' is an immediate that can be contained
397 398
    //  by the 'parentNode' (i.e. folded into an instruction)
    //  for example small enough and non-relocatable
399
    bool IsContainableImmed(GenTree* parentNode, GenTree* childNode) const;
400

C
Carol Eidt 已提交
401
    // Return true if 'node' is a containable memory op.
402
    bool IsContainableMemoryOp(GenTree* node) const
C
Carol Eidt 已提交
403 404 405 406
    {
        return m_lsra->isContainableMemoryOp(node);
    }

407 408 409 410
#ifdef TARGET_ARM64
    bool IsContainableBinaryOp(GenTree* parentNode, GenTree* childNode) const;
#endif // TARGET_ARM64

411
#ifdef FEATURE_HW_INTRINSICS
412
    // Tries to get a containable node for a given HWIntrinsic
413 414 415 416
    bool TryGetContainableHWIntrinsicOp(GenTreeHWIntrinsic* containingNode,
                                        GenTree**           pNode,
                                        bool*               supportsRegOptional,
                                        GenTreeHWIntrinsic* transparentParentNode = nullptr);
417 418
#endif // FEATURE_HW_INTRINSICS

419 420
    static void TransformUnusedIndirection(GenTreeIndir* ind, Compiler* comp, BasicBlock* block);

421 422 423 424 425
private:
    static bool NodesAreEquivalentLeaves(GenTree* candidate, GenTree* storeInd);

    bool AreSourcesPossiblyModifiedLocals(GenTree* addr, GenTree* base, GenTree* index);

426
    // Makes 'childNode' contained in the 'parentNode'
427
    void MakeSrcContained(GenTree* parentNode, GenTree* childNode) const;
428 429 430

    // Checks and makes 'childNode' contained in the 'parentNode'
    bool CheckImmedAndMakeContained(GenTree* parentNode, GenTree* childNode);
431 432 433

    // Checks for memory conflicts in the instructions between childNode and parentNode, and returns true if childNode
    // can be contained.
434 435 436 437
    bool IsSafeToContainMem(GenTree* parentNode, GenTree* childNode) const;

    // Similar to above, but allows bypassing a "transparent" parent.
    bool IsSafeToContainMem(GenTree* grandparentNode, GenTree* parentNode, GenTree* childNode) const;
438

439 440 441 442 443
    inline LIR::Range& BlockRange() const
    {
        return LIR::AsRange(m_block);
    }

444 445 446 447 448 449
    // Any tracked lclVar accessed by a LCL_FLD or STORE_LCL_FLD should be marked doNotEnregister.
    // This method checks, and asserts in the DEBUG case if it is not so marked,
    // but in the non-DEBUG case (asserts disabled) set the flag so that we don't generate bad code.
    // This ensures that the local's value is valid on-stack as expected for a *LCL_FLD.
    void verifyLclFldDoNotEnregister(unsigned lclNum)
    {
450
        LclVarDsc* varDsc = comp->lvaGetDesc(lclNum);
451 452 453 454 455 456
        // Do a couple of simple checks before setting lvDoNotEnregister.
        // This may not cover all cases in 'isRegCandidate()' but we don't want to
        // do an expensive check here. For non-candidates it is not harmful to set lvDoNotEnregister.
        if (varDsc->lvTracked && !varDsc->lvDoNotEnregister)
        {
            assert(!m_lsra->isRegCandidate(varDsc));
457
            comp->lvaSetVarDoNotEnregister(lclNum DEBUG_ARG(DoNotEnregisterReason::LocalField));
458 459 460
        }
    }

461 462 463 464
    LinearScan*           m_lsra;
    unsigned              vtableCallTemp;       // local variable we use as a temp for vtable calls
    mutable SideEffectSet m_scratchSideEffects; // SideEffectSet used for IsSafeToContainMem and isRMWIndirCandidate
    BasicBlock*           m_block;
465 466 467
};

#endif // _LOWER_H_