summaryrefslogtreecommitdiff
path: root/src/jit/lower.h
diff options
context:
space:
mode:
authorCarol Eidt <carol.eidt@microsoft.com>2017-07-31 17:41:13 -0700
committerCarol Eidt <carol.eidt@microsoft.com>2017-08-16 11:37:16 -0700
commit0ad49b141f1d06c5e83ec58fa7f21c7d2f3714d3 (patch)
tree717327f4a7f0fabc9d1a7c83681504818d6d01f5 /src/jit/lower.h
parent89d8be12677a5d395d1ec7e62874a4476ea5d4d6 (diff)
downloadcoreclr-0ad49b141f1d06c5e83ec58fa7f21c7d2f3714d3.tar.gz
coreclr-0ad49b141f1d06c5e83ec58fa7f21c7d2f3714d3.tar.bz2
coreclr-0ad49b141f1d06c5e83ec58fa7f21c7d2f3714d3.zip
Move Containment Analysis to Lowering
- Ensure that the only changes to child nodes are the specification of srcCandidates (i.e. the register requirements of the parent/consumer) - Since this now occurs before redoing dataflow, we don't have complete information on what lclVars are containable. To mitigate this: - In `TreeNodeInfoInit()`, if a lclVar is marked `isRegOptional` but is now untracked, change it to be contained. - Mark decomposed/promoted fields of long params as `lvDoNotEnregister` - Ensure that any new code added after `Lowering` is analyzed as necessary. - When a value becomes unused, it is no longer contained. (This is needed now because liveness runs after containment analysis) - Add dumping of nodes deleted as unused. - Need to set byteable regs, as needed on contained RMW memory ops.
Diffstat (limited to 'src/jit/lower.h')
-rw-r--r--src/jit/lower.h108
1 files changed, 82 insertions, 26 deletions
diff --git a/src/jit/lower.h b/src/jit/lower.h
index f09df7a836..ebee64117b 100644
--- a/src/jit/lower.h
+++ b/src/jit/lower.h
@@ -47,35 +47,77 @@ public:
static void getCastDescription(GenTreePtr treeNode, CastInfo* castInfo);
+ // This variant of LowerRange is called from outside of the main Lowering pass,
+ // so it creates its own instance of Lowering to do so.
+ void LowerRange(BasicBlock* block, LIR::ReadOnlyRange& range)
+ {
+ Lowering lowerer(comp, m_lsra);
+ lowerer.m_block = block;
+
+ lowerer.LowerRange(range);
+ }
+
+private:
// LowerRange handles new code that is introduced by or after Lowering.
- void LowerRange(LIR::Range&& range)
+ void LowerRange(LIR::ReadOnlyRange& range)
{
for (GenTree* newNode : range)
{
LowerNode(newNode);
}
}
+ void LowerRange(GenTree* firstNode, GenTree* lastNode)
+ {
+ LIR::ReadOnlyRange range(firstNode, lastNode);
+ LowerRange(range);
+ }
+
+ // ContainCheckRange handles new code that is introduced by or after Lowering,
+ // and that is known to be already in Lowered form.
+ void ContainCheckRange(LIR::ReadOnlyRange& range)
+ {
+ for (GenTree* newNode : range)
+ {
+ ContainCheckNode(newNode);
+ }
+ }
+ void ContainCheckRange(GenTree* firstNode, GenTree* lastNode)
+ {
+ LIR::ReadOnlyRange range(firstNode, lastNode);
+ ContainCheckRange(range);
+ }
+
+ void InsertTreeBeforeAndContainCheck(GenTree* insertionPoint, GenTree* tree)
+ {
+ LIR::Range range = LIR::SeqTree(comp, tree);
+ ContainCheckRange(range);
+ BlockRange().InsertBefore(insertionPoint, std::move(range));
+ }
+
+ void ContainCheckNode(GenTree* node);
-private:
void ContainCheckDivOrMod(GenTreeOp* node);
void ContainCheckReturnTrap(GenTreeOp* node);
void ContainCheckArrOffset(GenTreeArrOffs* node);
void ContainCheckLclHeap(GenTreeOp* node);
void ContainCheckRet(GenTreeOp* node);
- void ContainCheckBinary(GenTreeOp* node);
+ void ContainCheckJTrue(GenTreeOp* node);
+
+ void ContainCheckCallOperands(GenTreeCall* call);
+ void ContainCheckIndir(GenTreeIndir* indirNode);
+ void ContainCheckStoreIndir(GenTreeIndir* indirNode);
void ContainCheckMul(GenTreeOp* node);
void ContainCheckShiftRotate(GenTreeOp* node);
void ContainCheckStoreLoc(GenTreeLclVarCommon* storeLoc);
- void ContainCheckIndir(GenTreeIndir* indirNode);
void ContainCheckCast(GenTreeCast* node);
void ContainCheckCompare(GenTreeOp* node);
+ void ContainCheckBinary(GenTreeOp* node);
void ContainCheckBoundsChk(GenTreeBoundsChk* node);
#ifdef _TARGET_XARCH_
void ContainCheckFloatBinary(GenTreeOp* node);
void ContainCheckIntrinsic(GenTreeOp* node);
#endif // _TARGET_XARCH_
#ifdef FEATURE_SIMD
- void ContainCheckJTrue(GenTreeOp* node);
void ContainCheckSIMD(GenTreeSIMD* simdNode);
#endif // FEATURE_SIMD
@@ -153,6 +195,21 @@ private:
return new (comp, GT_LEA) GenTreeAddrMode(resultType, base, index, 0, 0);
}
+ // Replace the definition of the given use with a lclVar, allocating a new temp
+ // if 'tempNum' is BAD_VAR_NUM.
+ unsigned ReplaceWithLclVar(LIR::Use& use, unsigned tempNum = BAD_VAR_NUM)
+ {
+ GenTree* oldUseNode = use.Def();
+ if ((oldUseNode->gtOper != GT_LCL_VAR) || (tempNum != BAD_VAR_NUM))
+ {
+ unsigned newLclNum = use.ReplaceWithLclVar(comp, m_block->getBBWeight(comp), tempNum);
+ GenTree* newUseNode = use.Def();
+ ContainCheckRange(oldUseNode->gtNext, newUseNode);
+ return newLclNum;
+ }
+ return oldUseNode->AsLclVarCommon()->gtLclNum;
+ }
+
// returns true if the tree can use the read-modify-write memory instruction form
bool isRMWRegOper(GenTreePtr tree);
@@ -236,12 +293,12 @@ private:
int GetOperandSourceCount(GenTree* node);
int GetIndirSourceCount(GenTreeIndir* indirTree);
+ void HandleFloatVarArgs(GenTreeCall* call, GenTree* argNode, bool* callHasFloatRegArgs);
void TreeNodeInfoInitStoreLoc(GenTree* tree);
void TreeNodeInfoInitReturn(GenTree* tree);
void TreeNodeInfoInitShiftRotate(GenTree* tree);
- void TreeNodeInfoInitPutArgReg(
- GenTreeUnOp* node, regNumber argReg, TreeNodeInfo& info, bool isVarArgs, bool* callHasFloatRegArgs);
+ void TreeNodeInfoInitPutArgReg(GenTreeUnOp* node);
void TreeNodeInfoInitCall(GenTreeCall* call);
void TreeNodeInfoInitCmp(GenTreePtr tree);
void TreeNodeInfoInitStructArg(GenTreePtr structArg);
@@ -251,36 +308,37 @@ private:
void TreeNodeInfoInitStoreLoc(GenTreeLclVarCommon* tree);
void TreeNodeInfoInitIndir(GenTreeIndir* indirTree);
void TreeNodeInfoInitGCWriteBarrier(GenTree* tree);
-#if !CPU_LOAD_STORE_ARCH
- bool TreeNodeInfoInitIfRMWMemOp(GenTreePtr storeInd);
-#endif
+ void TreeNodeInfoInitCast(GenTree* tree);
+
+#if defined(_TARGET_XARCH_)
+ void TreeNodeInfoInitMul(GenTreePtr tree);
+ void SetContainsAVXFlags(bool isFloatingPointType = true, unsigned sizeOfSIMDVector = 0);
+#endif // defined(_TARGET_XARCH_)
+
#ifdef FEATURE_SIMD
- void TreeNodeInfoInitSIMD(GenTree* tree);
+ void TreeNodeInfoInitSIMD(GenTreeSIMD* tree);
#endif // FEATURE_SIMD
- void TreeNodeInfoInitCast(GenTree* tree);
+
+ void TreeNodeInfoInitPutArgStk(GenTreePutArgStk* argNode);
#ifdef _TARGET_ARM64_
void LowerPutArgStk(GenTreePutArgStk* argNode, fgArgTabEntryPtr info);
- void TreeNodeInfoInitPutArgStk(GenTreePutArgStk* argNode, fgArgTabEntryPtr info);
#endif // _TARGET_ARM64_
#ifdef _TARGET_ARM_
void LowerPutArgStk(GenTreePutArgStk* argNode, fgArgTabEntryPtr info);
- void TreeNodeInfoInitPutArgStk(GenTreePutArgStk* argNode, fgArgTabEntryPtr info);
#endif // _TARGET_ARM64_
-#ifdef FEATURE_PUT_STRUCT_ARG_STK
void LowerPutArgStk(GenTreePutArgStk* tree);
- void TreeNodeInfoInitPutArgStk(GenTreePutArgStk* tree);
#ifdef _TARGET_ARM_
- void TreeNodeInfoInitPutArgSplit(GenTreePutArgSplit* tree, TreeNodeInfo& info, fgArgTabEntryPtr argInfo);
+ void TreeNodeInfoInitPutArgSplit(GenTreePutArgSplit* tree);
#endif
-#endif // FEATURE_PUT_STRUCT_ARG_STK
void TreeNodeInfoInitLclHeap(GenTree* tree);
void DumpNodeInfoMap();
// Per tree node member functions
- void LowerStoreInd(GenTree* node);
+ void LowerStoreIndir(GenTreeIndir* node);
GenTree* LowerAdd(GenTree* node);
- GenTree* LowerUnsignedDivOrMod(GenTreeOp* divMod);
+ bool LowerUnsignedDivOrMod(GenTreeOp* divMod);
+ GenTree* LowerConstIntDivOrMod(GenTree* node);
GenTree* LowerSignedDivOrMod(GenTree* node);
void LowerBlockStore(GenTreeBlk* blkNode);
@@ -290,11 +348,6 @@ private:
GenTree* LowerSwitch(GenTree* node);
void LowerCast(GenTree* node);
-#if defined(_TARGET_XARCH_)
- void TreeNodeInfoInitMul(GenTreePtr tree);
- void SetContainsAVXFlags(bool isFloatingPointType = true, unsigned sizeOfSIMDVector = 0);
-#endif // defined(_TARGET_XARCH_)
-
#if !CPU_LOAD_STORE_ARCH
bool IsRMWIndirCandidate(GenTree* operand, GenTree* storeInd);
bool IsBinOpInRMWStoreInd(GenTreePtr tree);
@@ -307,6 +360,9 @@ private:
GenTree* LowerArrElem(GenTree* node);
void LowerRotate(GenTree* tree);
void LowerShift(GenTreeOp* shift);
+#ifdef FEATURE_SIMD
+ void LowerSIMD(GenTreeSIMD* simdNode);
+#endif // FEATURE_SIMD
// Utility functions
void MorphBlkIntoHelperCall(GenTreePtr pTree, GenTreePtr treeStmt);
@@ -325,7 +381,7 @@ private:
bool IsContainableImmed(GenTree* parentNode, GenTree* childNode);
// Return true if 'node' is a containable memory op.
- bool IsContainableMemoryOp(GenTree* node, bool useTracked);
+ bool IsContainableMemoryOp(GenTree* node);
// Makes 'childNode' contained in the 'parentNode'
void MakeSrcContained(GenTreePtr parentNode, GenTreePtr childNode);