summaryrefslogtreecommitdiff
path: root/src/jit/gentree.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/jit/gentree.cpp')
-rw-r--r--src/jit/gentree.cpp1153
1 files changed, 881 insertions, 272 deletions
diff --git a/src/jit/gentree.cpp b/src/jit/gentree.cpp
index 9503206874..8aa255f520 100644
--- a/src/jit/gentree.cpp
+++ b/src/jit/gentree.cpp
@@ -299,6 +299,7 @@ void GenTree::InitNodeSize()
GenTree::s_gtNodeSizes[GT_FTN_ADDR] = TREE_NODE_SZ_LARGE;
GenTree::s_gtNodeSizes[GT_BOX] = TREE_NODE_SZ_LARGE;
GenTree::s_gtNodeSizes[GT_INDEX] = TREE_NODE_SZ_LARGE;
+ GenTree::s_gtNodeSizes[GT_INDEX_ADDR] = TREE_NODE_SZ_LARGE;
GenTree::s_gtNodeSizes[GT_ARR_BOUNDS_CHECK] = TREE_NODE_SZ_LARGE;
#ifdef FEATURE_SIMD
GenTree::s_gtNodeSizes[GT_SIMD_CHK] = TREE_NODE_SZ_LARGE;
@@ -610,42 +611,6 @@ void Compiler::fgWalkAllTreesPre(fgWalkPreFn* visitor, void* pCallBackData)
}
}
-// ------------------------------------------------------------------------------------------
-// gtClearReg: Sets the register to the "no register assignment" value, depending upon
-// the type of the node, and whether it fits any of the special cases for register pairs
-// or multi-reg call nodes.
-//
-// Arguments:
-// compiler - compiler instance
-//
-// Return Value:
-// None
-void GenTree::gtClearReg(Compiler* compiler)
-{
-#if CPU_LONG_USES_REGPAIR
- if (isRegPairType(TypeGet()) ||
- // (IsLocal() && isRegPairType(compiler->lvaTable[gtLclVarCommon.gtLclNum].TypeGet())) ||
- (OperGet() == GT_MUL && (gtFlags & GTF_MUL_64RSLT)))
- {
- gtRegPair = REG_PAIR_NONE;
- }
- else
-#endif // CPU_LONG_USES_REGPAIR
- {
- gtRegNum = REG_NA;
- }
-
- // Also clear multi-reg state if this is a call node
- if (IsCall())
- {
- this->AsCall()->ClearOtherRegs();
- }
- else if (IsCopyOrReload())
- {
- this->AsCopyOrReload()->ClearOtherRegs();
- }
-}
-
//-----------------------------------------------------------
// CopyReg: Copy the _gtRegNum/_gtRegPair/gtRegTag fields.
//
@@ -704,56 +669,52 @@ bool GenTree::gtHasReg() const
{
assert(_gtRegNum != REG_NA);
INDEBUG(assert(gtRegTag == GT_REGTAG_REGPAIR));
- hasReg = (gtRegPair != REG_PAIR_NONE);
+ return (gtRegPair != REG_PAIR_NONE);
}
- else
+ assert(_gtRegNum != REG_PAIR_NONE);
+ INDEBUG(assert(gtRegTag == GT_REGTAG_REG));
#endif
+ if (IsMultiRegCall())
{
- assert(_gtRegNum != REG_PAIR_NONE);
- INDEBUG(assert(gtRegTag == GT_REGTAG_REG));
+ // Have to cast away const-ness because GetReturnTypeDesc() is a non-const method
+ GenTree* tree = const_cast<GenTree*>(this);
+ GenTreeCall* call = tree->AsCall();
+ unsigned regCount = call->GetReturnTypeDesc()->GetReturnRegCount();
+ hasReg = false;
- if (IsMultiRegCall())
+ // A Multi-reg call node is said to have regs, if it has
+ // reg assigned to each of its result registers.
+ for (unsigned i = 0; i < regCount; ++i)
{
- // Has to cast away const-ness because GetReturnTypeDesc() is a non-const method
- GenTree* tree = const_cast<GenTree*>(this);
- GenTreeCall* call = tree->AsCall();
- unsigned regCount = call->GetReturnTypeDesc()->GetReturnRegCount();
- hasReg = false;
-
- // A Multi-reg call node is said to have regs, if it has
- // reg assigned to each of its result registers.
- for (unsigned i = 0; i < regCount; ++i)
+ hasReg = (call->GetRegNumByIdx(i) != REG_NA);
+ if (!hasReg)
{
- hasReg = (call->GetRegNumByIdx(i) != REG_NA);
- if (!hasReg)
- {
- break;
- }
+ break;
}
}
- else if (IsCopyOrReloadOfMultiRegCall())
- {
- GenTree* tree = const_cast<GenTree*>(this);
- GenTreeCopyOrReload* copyOrReload = tree->AsCopyOrReload();
- GenTreeCall* call = copyOrReload->gtGetOp1()->AsCall();
- unsigned regCount = call->GetReturnTypeDesc()->GetReturnRegCount();
- hasReg = false;
+ }
+ else if (IsCopyOrReloadOfMultiRegCall())
+ {
+ GenTree* tree = const_cast<GenTree*>(this);
+ GenTreeCopyOrReload* copyOrReload = tree->AsCopyOrReload();
+ GenTreeCall* call = copyOrReload->gtGetOp1()->AsCall();
+ unsigned regCount = call->GetReturnTypeDesc()->GetReturnRegCount();
+ hasReg = false;
- // A Multi-reg copy or reload node is said to have regs,
- // if it has valid regs in any of the positions.
- for (unsigned i = 0; i < regCount; ++i)
+ // A Multi-reg copy or reload node is said to have regs,
+ // if it has valid regs in any of the positions.
+ for (unsigned i = 0; i < regCount; ++i)
+ {
+ hasReg = (copyOrReload->GetRegNumByIdx(i) != REG_NA);
+ if (hasReg)
{
- hasReg = (copyOrReload->GetRegNumByIdx(i) != REG_NA);
- if (hasReg)
- {
- break;
- }
+ break;
}
}
- else
- {
- hasReg = (gtRegNum != REG_NA);
- }
+ }
+ else
+ {
+ hasReg = (gtRegNum != REG_NA);
}
return hasReg;
@@ -885,6 +846,55 @@ bool GenTreeCall::IsPure(Compiler* compiler) const
compiler->s_helperCallProperties.IsPure(compiler->eeGetHelperNum(gtCallMethHnd));
}
+//-------------------------------------------------------------------------
+// HasSideEffects:
+// Returns true if this call has any side effects. All non-helpers are considered to have side-effects. Only helpers
+// that do not mutate the heap, do not run constructors, may not throw, and are either a) pure or b) non-finalizing
+// allocation functions are considered side-effect-free.
+//
+// Arguments:
+// compiler - the compiler instance
+// ignoreExceptions - when `true`, ignores exception side effects
+// ignoreCctors - when `true`, ignores class constructor side effects
+//
+// Return Value:
+// true if this call has any side-effects; false otherwise.
+bool GenTreeCall::HasSideEffects(Compiler* compiler, bool ignoreExceptions, bool ignoreCctors) const
+{
+ // Generally all GT_CALL nodes are considered to have side-effects, but we may have extra information about helper
+ // calls that can prove them side-effect-free.
+ if (gtCallType != CT_HELPER)
+ {
+ return true;
+ }
+
+ CorInfoHelpFunc helper = compiler->eeGetHelperNum(gtCallMethHnd);
+ HelperCallProperties& helperProperties = compiler->s_helperCallProperties;
+
+ // We definitely care about the side effects if MutatesHeap is true
+ if (helperProperties.MutatesHeap(helper))
+ {
+ return true;
+ }
+
+ // Unless we have been instructed to ignore cctors (CSE, for example, ignores cctors), consider them side effects.
+ if (!ignoreCctors && helperProperties.MayRunCctor(helper))
+ {
+ return true;
+ }
+
+ // If we also care about exceptions then check if the helper can throw
+ if (!ignoreExceptions && !helperProperties.NoThrow(helper))
+ {
+ return true;
+ }
+
+ // If this is not a Pure helper call or an allocator (that will not need to run a finalizer)
+ // then this call has side effects.
+ return !helperProperties.IsPure(helper) &&
+ (!helperProperties.IsAllocator(helper) || helperProperties.MayFinalize(helper));
+}
+
#ifndef LEGACY_BACKEND
//-------------------------------------------------------------------------
@@ -1281,7 +1291,7 @@ AGAIN:
{
return false;
}
- if (op1->gtAddrMode.gtOffset != op2->gtAddrMode.gtOffset)
+ if (op1->gtAddrMode.Offset() != op2->gtAddrMode.Offset())
{
return false;
}
@@ -1292,6 +1302,12 @@ AGAIN:
return false;
}
break;
+ case GT_INDEX_ADDR:
+ if (op1->AsIndexAddr()->gtElemSize != op2->AsIndexAddr()->gtElemSize)
+ {
+ return false;
+ }
+ break;
// For the ones below no extra argument matters for comparison.
case GT_QMARK:
@@ -1867,6 +1883,9 @@ AGAIN:
case GT_INDEX:
hash += tree->gtIndex.gtIndElemSize;
break;
+ case GT_INDEX_ADDR:
+ hash += tree->AsIndexAddr()->gtElemSize;
+ break;
case GT_ALLOCOBJ:
hash = genTreeHashAdd(hash, static_cast<unsigned>(
reinterpret_cast<uintptr_t>(tree->gtAllocObj.gtAllocObjClsHnd)));
@@ -1907,7 +1926,7 @@ AGAIN:
hash += tree->gtIntrinsic.gtIntrinsicId;
break;
case GT_LEA:
- hash += (tree->gtAddrMode.gtOffset << 3) + tree->gtAddrMode.gtScale;
+ hash += static_cast<unsigned>(tree->gtAddrMode.Offset() << 3) + tree->gtAddrMode.gtScale;
break;
case GT_BLK:
@@ -1929,6 +1948,7 @@ AGAIN:
case GT_ARR_INDEX:
case GT_QMARK:
case GT_INDEX:
+ case GT_INDEX_ADDR:
break;
#ifdef FEATURE_SIMD
@@ -3375,7 +3395,7 @@ unsigned Compiler::gtSetEvalOrder(GenTree* tree)
case CORINFO_INTRINSIC_Ceiling:
case CORINFO_INTRINSIC_Floor:
case CORINFO_INTRINSIC_Object_GetType:
- // Giving intrinsics a large fixed exectuion cost is because we'd like to CSE
+ // Giving intrinsics a large fixed execution cost is because we'd like to CSE
// them, even if they are implemented by calls. This is different from modeling
// user calls since we never CSE user calls.
costEx = 36;
@@ -4767,6 +4787,23 @@ unsigned Compiler::gtSetEvalOrder(GenTree* tree)
}
break;
+ case GT_INDEX_ADDR:
+ costEx = 6; // cmp reg,reg; jae throw; mov reg, [addrmode] (not taken)
+ costSz = 9; // jump to cold section
+
+ level = gtSetEvalOrder(tree->AsIndexAddr()->Index());
+ costEx += tree->AsIndexAddr()->Index()->gtCostEx;
+ costSz += tree->AsIndexAddr()->Index()->gtCostSz;
+
+ lvl2 = gtSetEvalOrder(tree->AsIndexAddr()->Arr());
+ if (level < lvl2)
+ {
+ level = lvl2;
+ }
+ costEx += tree->AsIndexAddr()->Arr()->gtCostEx;
+ costSz += tree->AsIndexAddr()->Arr()->gtCostSz;
+ break;
+
default:
#ifdef DEBUG
if (verbose)
@@ -5410,6 +5447,19 @@ bool GenTree::TryGetUse(GenTree* def, GenTree*** use)
case GT_FIELD_LIST:
return TryGetUseList(def, use);
+#if !defined(LEGACY_BACKEND) && defined(_TARGET_ARM_)
+ case GT_PUTARG_SPLIT:
+ if (this->AsUnOp()->gtOp1->gtOper == GT_FIELD_LIST)
+ {
+ return TryGetUseList(def, use);
+ }
+ if (def == this->AsUnOp()->gtOp1)
+ {
+ *use = &this->AsUnOp()->gtOp1;
+ return true;
+ }
+ return false;
+#endif // !LEGACY_BACKEND && _TARGET_ARM_
#ifdef FEATURE_SIMD
case GT_SIMD:
@@ -5698,12 +5748,28 @@ GenTreePtr GenTree::gtGetParent(GenTreePtr** parentChildPtrPtr) const
return parent;
}
-/*****************************************************************************
- *
- * Returns true if the given operator may cause an exception.
- */
+//------------------------------------------------------------------------------
+// OperMayThrow : Check whether the operation requires GTF_ASG flag regardless
+// of the children's flags.
+//
+
+bool GenTree::OperRequiresAsgFlag()
+{
+ return ((OperKind() & GTK_ASGOP) || (gtOper == GT_XADD) || (gtOper == GT_XCHG) || (gtOper == GT_LOCKADD) ||
+ (gtOper == GT_CMPXCHG) || (gtOper == GT_MEMORYBARRIER));
+}
+
+//------------------------------------------------------------------------------
+// OperMayThrow : Check whether the operation may throw.
+//
+//
+// Arguments:
+// comp - Compiler instance
+//
+// Return Value:
+// True if the given operator may cause an exception
-bool GenTree::OperMayThrow()
+bool GenTree::OperMayThrow(Compiler* comp)
{
GenTreePtr op;
@@ -5730,30 +5796,6 @@ bool GenTree::OperMayThrow()
}
return true;
- case GT_IND:
- op = gtOp.gtOp1;
-
- /* Indirections of handles are known to be safe */
- if (op->gtOper == GT_CNS_INT)
- {
- if (op->IsIconHandle())
- {
- /* No exception is thrown on this indirection */
- return false;
- }
- }
- if (this->gtFlags & GTF_IND_NONFAULTING)
- {
- return false;
- }
- // Non-Null AssertionProp will remove the GTF_EXCEPT flag and mark the GT_IND with GTF_ORDER_SIDEEFF flag
- if ((this->gtFlags & GTF_ALL_EFFECT) == GTF_ORDER_SIDEEFF)
- {
- return false;
- }
-
- return true;
-
case GT_INTRINSIC:
// If this is an intrinsic that represents the object.GetType(), it can throw an NullReferenceException.
// Report it as may throw.
@@ -5769,23 +5811,34 @@ bool GenTree::OperMayThrow()
break;
+ case GT_CALL:
+
+ CorInfoHelpFunc helper;
+ helper = comp->eeGetHelperNum(this->AsCall()->gtCallMethHnd);
+ return ((helper == CORINFO_HELP_UNDEF) || !comp->s_helperCallProperties.NoThrow(helper));
+
+ case GT_IND:
case GT_BLK:
case GT_OBJ:
case GT_DYN_BLK:
case GT_STORE_BLK:
- return !Compiler::fgIsIndirOfAddrOfLocal(this);
+ case GT_NULLCHECK:
+ return (((this->gtFlags & GTF_IND_NONFAULTING) == 0) && comp->fgAddrCouldBeNull(this->AsIndir()->Addr()));
+
+ case GT_ARR_LENGTH:
+ return (((this->gtFlags & GTF_IND_NONFAULTING) == 0) && comp->fgAddrCouldBeNull(gtOp.gtOp1));
case GT_ARR_BOUNDS_CHECK:
case GT_ARR_ELEM:
case GT_ARR_INDEX:
+ case GT_ARR_OFFSET:
case GT_CATCH_ARG:
- case GT_ARR_LENGTH:
case GT_LCLHEAP:
case GT_CKFINITE:
- case GT_NULLCHECK:
#ifdef FEATURE_SIMD
case GT_SIMD_CHK:
#endif // FEATURE_SIMD
+ case GT_INDEX_ADDR:
return true;
default:
break;
@@ -6600,9 +6653,13 @@ GenTree* Compiler::gtNewObjNode(CORINFO_CLASS_HANDLE structHnd, GenTree* addr)
if ((addr->gtFlags & GTF_GLOB_REF) == 0)
{
GenTreeLclVarCommon* lclNode = addr->IsLocalAddrExpr();
- if ((lclNode != nullptr) && !lvaIsImplicitByRefLocal(lclNode->gtLclNum))
+ if (lclNode != nullptr)
{
- newBlkOrObjNode->gtFlags &= ~GTF_GLOB_REF;
+ newBlkOrObjNode->gtFlags |= GTF_IND_NONFAULTING;
+ if (!lvaIsImplicitByRefLocal(lclNode->gtLclNum))
+ {
+ newBlkOrObjNode->gtFlags &= ~GTF_GLOB_REF;
+ }
}
}
return newBlkOrObjNode;
@@ -6993,8 +7050,9 @@ GenTree* Compiler::gtNewBlkOpNode(
// gtNewPutArgReg: Creates a new PutArgReg node.
//
// Arguments:
-// type - The actual type of the argument
-// arg - The argument node
+// type - The actual type of the argument
+// arg - The argument node
+// argReg - The register that the argument will be passed in
//
// Return Value:
// Returns the newly created PutArgReg node.
@@ -7002,22 +7060,18 @@ GenTree* Compiler::gtNewBlkOpNode(
// Notes:
// The node is generated as GenTreeMultiRegOp on armel, as GenTreeOp on all the other archs
//
-GenTreePtr Compiler::gtNewPutArgReg(var_types type, GenTreePtr arg)
+GenTreePtr Compiler::gtNewPutArgReg(var_types type, GenTreePtr arg, regNumber argReg)
{
assert(arg != nullptr);
GenTreePtr node = nullptr;
-#if !defined(LEGACY_BACKEND) && defined(_TARGET_ARM_)
+#if !defined(LEGACY_BACKEND) && defined(ARM_SOFTFP)
// A PUTARG_REG could be a MultiRegOp on armel since we could move a double register to two int registers.
- if (opts.compUseSoftFP)
- {
- node = new (this, GT_PUTARG_REG) GenTreeMultiRegOp(GT_PUTARG_REG, type, arg, nullptr);
- }
- else
+ node = new (this, GT_PUTARG_REG) GenTreeMultiRegOp(GT_PUTARG_REG, type, arg, nullptr);
+#else
+ node = gtNewOperNode(GT_PUTARG_REG, type, arg);
#endif
- {
- node = gtNewOperNode(GT_PUTARG_REG, type, arg);
- }
+ node->gtRegNum = argReg;
return node;
}
@@ -7392,6 +7446,19 @@ GenTreePtr Compiler::gtCloneExpr(
}
break;
+ case GT_INDEX_ADDR:
+ {
+ GenTreeIndexAddr* asIndAddr = tree->AsIndexAddr();
+
+ copy = new (this, GT_INDEX_ADDR)
+ GenTreeIndexAddr(asIndAddr->Arr(), asIndAddr->Index(), asIndAddr->gtElemType,
+ asIndAddr->gtStructElemClass, asIndAddr->gtElemSize, asIndAddr->gtLenOffset,
+ asIndAddr->gtElemOffset);
+ copy->AsIndexAddr()->gtIndRngFailBB = asIndAddr->gtIndRngFailBB;
+ copy->AsIndexAddr()->gtStkDepth = asIndAddr->gtStkDepth;
+ }
+ break;
+
case GT_ALLOCOBJ:
{
GenTreeAllocObj* asAllocObj = tree->AsAllocObj();
@@ -7401,8 +7468,7 @@ GenTreePtr Compiler::gtCloneExpr(
break;
case GT_ARR_LENGTH:
- copy = new (this, GT_ARR_LENGTH)
- GenTreeArrLen(tree->TypeGet(), tree->gtOp.gtOp1, tree->gtArrLen.ArrLenOffset());
+ copy = gtNewArrLen(tree->TypeGet(), tree->gtOp.gtOp1, tree->gtArrLen.ArrLenOffset());
break;
case GT_ARR_INDEX:
@@ -7441,7 +7507,8 @@ GenTreePtr Compiler::gtCloneExpr(
case GT_BOX:
copy = new (this, GT_BOX)
- GenTreeBox(tree->TypeGet(), tree->gtOp.gtOp1, tree->gtBox.gtAsgStmtWhenInlinedBoxValue);
+ GenTreeBox(tree->TypeGet(), tree->gtOp.gtOp1, tree->gtBox.gtAsgStmtWhenInlinedBoxValue,
+ tree->gtBox.gtCopyStmtWhenInlinedBoxValue);
break;
case GT_INTRINSIC:
@@ -7456,9 +7523,9 @@ GenTreePtr Compiler::gtCloneExpr(
case GT_LEA:
{
GenTreeAddrMode* addrModeOp = tree->AsAddrMode();
- copy =
- new (this, GT_LEA) GenTreeAddrMode(addrModeOp->TypeGet(), addrModeOp->Base(), addrModeOp->Index(),
- addrModeOp->gtScale, addrModeOp->gtOffset);
+ copy = new (this, GT_LEA)
+ GenTreeAddrMode(addrModeOp->TypeGet(), addrModeOp->Base(), addrModeOp->Index(), addrModeOp->gtScale,
+ static_cast<unsigned>(addrModeOp->Offset()));
}
break;
@@ -7549,14 +7616,14 @@ GenTreePtr Compiler::gtCloneExpr(
case GT_IND:
case GT_OBJ:
case GT_STORE_OBJ:
- if (tree->gtFlags & GTF_IND_ARR_INDEX)
+ {
+ ArrayInfo arrInfo;
+ if (!tree->AsIndir()->gtOp1->OperIs(GT_INDEX_ADDR) && TryGetArrayInfo(tree->AsIndir(), &arrInfo))
{
- ArrayInfo arrInfo;
- bool b = GetArrayInfoMap()->Lookup(tree, &arrInfo);
- assert(b);
GetArrayInfoMap()->Set(copy, arrInfo);
}
- break;
+ }
+ break;
default:
break;
@@ -7737,6 +7804,8 @@ GenTreePtr Compiler::gtCloneExpr(
gtCloneExpr(tree->gtBoundsChk.gtIndex, addFlags, deepVarNum, deepVarVal),
gtCloneExpr(tree->gtBoundsChk.gtArrLen, addFlags, deepVarNum, deepVarVal),
tree->gtBoundsChk.gtThrowKind);
+ copy->gtBoundsChk.gtIndRngFailBB = tree->gtBoundsChk.gtIndRngFailBB;
+ copy->gtBoundsChk.gtStkDepth = tree->gtBoundsChk.gtStkDepth;
break;
case GT_STORE_DYN_BLK:
@@ -7794,6 +7863,9 @@ DONE:
copy->gtOp.gtOp1->gtFlags |= GTF_RELOP_QMARK;
}
+ // Clear the copy's GTF_ASG and GTF_EXCEPT bits; these will instead be taken from the source.
+ copy->gtFlags &= ~(GTF_ASG | GTF_EXCEPT);
+
copy->gtFlags |= addFlags;
}
@@ -7830,6 +7902,7 @@ DONE:
//
// Notes:
// The caller must ensure that the original statement has been sequenced,
+// and the side effect flags are updated on the statement nodes,
// but this method will sequence 'replacementTree', and insert it into the
// proper place in the statement sequence.
@@ -7907,51 +7980,145 @@ GenTreePtr Compiler::gtReplaceTree(GenTreePtr stmt, GenTreePtr tree, GenTreePtr
treeLastNode->gtNext = treeNextNode;
treeNextNode->gtPrev = treeLastNode;
}
-
- // Propagate side-effect flags of "replacementTree" to its parents if needed.
- gtUpdateSideEffects(treeParent, tree->gtFlags, replacementTree->gtFlags);
}
return replacementTree;
}
//------------------------------------------------------------------------
-// gtUpdateSideEffects: Update the side effects for ancestors.
+// gtUpdateSideEffects: Update the side effects of a tree and its ancestors
//
// Arguments:
-// treeParent - The immediate parent node.
-// oldGtFlags - The stale gtFlags.
-// newGtFlags - The new gtFlags.
-//
+// stmt - The tree's statement
+// tree - Tree to update the side effects for
//
-// Assumptions:
-// Linear order of the stmt has been established.
+// Note: If tree's order hasn't been established, the method updates side effect
+// flags on all statement's nodes.
+
+void Compiler::gtUpdateSideEffects(GenTree* stmt, GenTree* tree)
+{
+ if (fgStmtListThreaded)
+ {
+ gtUpdateTreeAncestorsSideEffects(tree);
+ }
+ else
+ {
+ gtUpdateStmtSideEffects(stmt);
+ }
+}
+
+//------------------------------------------------------------------------
+// gtUpdateTreeAncestorsSideEffects: Update the side effects of a tree and its ancestors
+// when statement order has been established.
//
-// Notes:
-// The routine is used for updating the stale side effect flags for ancestor
-// nodes starting from treeParent up to the top-level stmt expr.
+// Arguments:
+// tree - Tree to update the side effects for
-void Compiler::gtUpdateSideEffects(GenTreePtr treeParent, unsigned oldGtFlags, unsigned newGtFlags)
+void Compiler::gtUpdateTreeAncestorsSideEffects(GenTree* tree)
{
assert(fgStmtListThreaded);
+ while (tree != nullptr)
+ {
+ gtResetNodeSideEffects(tree);
+ unsigned nChildren = tree->NumChildren();
+ for (unsigned childNum = 0; childNum < nChildren; childNum++)
+ {
+ tree->gtFlags |= (tree->GetChild(childNum)->gtFlags & GTF_ALL_EFFECT);
+ }
+ tree = tree->gtGetParent(nullptr);
+ }
+}
- oldGtFlags = oldGtFlags & GTF_ALL_EFFECT;
- newGtFlags = newGtFlags & GTF_ALL_EFFECT;
+//------------------------------------------------------------------------
+// gtUpdateStmtSideEffects: Update the side effects for statement tree nodes.
+//
+// Arguments:
+// stmt - The statement to update side effects on
+
+void Compiler::gtUpdateStmtSideEffects(GenTree* stmt)
+{
+ fgWalkTree(&stmt->gtStmt.gtStmtExpr, fgUpdateSideEffectsPre, fgUpdateSideEffectsPost);
+}
+
+//------------------------------------------------------------------------
+// gtResetNodeSideEffects: Update the side effects based on the node operation.
+//
+// Arguments:
+// tree - Tree to update the side effects on
+//
+// Notes:
+// This method currently only updates GTF_EXCEPT and GTF_ASG flags. The other side effect
+// flags may remain unnecessarily (conservatively) set.
+// The caller of this method is expected to update the flags based on the children's flags.
- if (oldGtFlags != newGtFlags)
+void Compiler::gtResetNodeSideEffects(GenTree* tree)
+{
+ if (tree->OperMayThrow(this))
{
- while (treeParent)
+ tree->gtFlags |= GTF_EXCEPT;
+ }
+ else
+ {
+ tree->gtFlags &= ~GTF_EXCEPT;
+ if (tree->OperIsIndirOrArrLength())
{
- treeParent->gtFlags &= ~oldGtFlags;
- treeParent->gtFlags |= newGtFlags;
- treeParent = treeParent->gtGetParent(nullptr);
+ tree->gtFlags |= GTF_IND_NONFAULTING;
}
}
+
+ if (tree->OperRequiresAsgFlag())
+ {
+ tree->gtFlags |= GTF_ASG;
+ }
+ else
+ {
+ tree->gtFlags &= ~GTF_ASG;
+ }
+}
+
+//------------------------------------------------------------------------
+// fgUpdateSideEffectsPre: Update the side effects based on the tree operation.
+//
+// Arguments:
+// pTree - Pointer to the tree to update the side effects
+// fgWalkPre - Walk data
+//
+// Notes:
+// This method currently only updates GTF_EXCEPT and GTF_ASG flags. The other side effect
+// flags may remain unnecessarily (conservatively) set.
+
+Compiler::fgWalkResult Compiler::fgUpdateSideEffectsPre(GenTree** pTree, fgWalkData* fgWalkPre)
+{
+ fgWalkPre->compiler->gtResetNodeSideEffects(*pTree);
+
+ return WALK_CONTINUE;
+}
+
+//------------------------------------------------------------------------
+// fgUpdateSideEffectsPost: Update the side effects of the parent based on the tree's flags.
+//
+// Arguments:
+// pTree - Pointer to the tree
+// fgWalkPost - Walk data
+//
+// Notes:
+// The routine is used for updating the stale side effect flags for ancestor
+// nodes starting from treeParent up to the top-level stmt expr.
+
+Compiler::fgWalkResult Compiler::fgUpdateSideEffectsPost(GenTree** pTree, fgWalkData* fgWalkPost)
+{
+ GenTree* tree = *pTree;
+ GenTree* parent = fgWalkPost->parent;
+ if (parent != nullptr)
+ {
+ parent->gtFlags |= (tree->gtFlags & GTF_ALL_EFFECT);
+ }
+ return WALK_CONTINUE;
}
/*****************************************************************************
*
- * Comapres two trees and returns true when both trees are the same.
+ * Compares two trees and returns true when both trees are the same.
* Instead of fully comparing the two trees this method can just return false.
* Thus callers should not assume that the trees are different when false is returned.
* Only when true is returned can the caller perform code optimizations.
@@ -9348,7 +9515,7 @@ void Compiler::gtDispNodeName(GenTree* tree)
{
bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), "(i*%d)+", lea->gtScale);
}
- bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), "%d)", lea->gtOffset);
+ bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), "%d)", lea->Offset());
}
else if (tree->gtOper == GT_ARR_BOUNDS_CHECK)
{
@@ -9532,8 +9699,8 @@ void Compiler::gtDispNode(GenTreePtr tree, IndentStack* indentStack, __in __in_z
case GT_STORE_DYN_BLK:
case GT_IND:
- // We prefer printing R, V or U
- if ((tree->gtFlags & (GTF_IND_REFARR_LAYOUT | GTF_IND_VOLATILE | GTF_IND_UNALIGNED)) == 0)
+ // We prefer printing V or U
+ if ((tree->gtFlags & (GTF_IND_VOLATILE | GTF_IND_UNALIGNED)) == 0)
{
if (tree->gtFlags & GTF_IND_TGTANYWHERE)
{
@@ -9557,10 +9724,11 @@ void Compiler::gtDispNode(GenTreePtr tree, IndentStack* indentStack, __in __in_z
__fallthrough;
case GT_INDEX:
+ case GT_INDEX_ADDR:
if ((tree->gtFlags & (GTF_IND_VOLATILE | GTF_IND_UNALIGNED)) == 0) // We prefer printing V or U over R
{
- if (tree->gtFlags & GTF_IND_REFARR_LAYOUT)
+ if (tree->gtFlags & GTF_INX_REFARR_LAYOUT)
{
printf("R");
--msgLength;
@@ -9948,9 +10116,9 @@ void Compiler::gtDispRegVal(GenTree* tree)
#endif
#if !defined(LEGACY_BACKEND) && defined(_TARGET_ARM_)
- if (tree->IsMultiReg())
+ if (tree->OperIsMultiRegOp() && tree->AsMultiRegOp()->gtOtherReg != REG_NA)
{
- printf(",%s", compRegVarName((regNumber)tree->AsMultiRegOp()->gtOtherReg));
+ printf(",%s", compRegVarName(tree->AsMultiRegOp()->gtOtherReg));
}
#endif
@@ -11944,10 +12112,7 @@ GenTreePtr Compiler::gtFoldExprCompare(GenTreePtr tree)
if (fgGlobalMorph)
{
- if (!fgIsInlining())
- {
- fgMorphTreeDone(cons);
- }
+ fgMorphTreeDone(cons);
}
else
{
@@ -12019,47 +12184,79 @@ GenTreePtr Compiler::gtFoldExprSpecial(GenTreePtr tree)
switch (oper)
{
-
case GT_EQ:
case GT_NE:
+ case GT_GT:
// Optimize boxed value classes; these are always false. This IL is
// generated when a generic value is tested against null:
// <T> ... foo(T x) { ... if ((object)x == null) ...
if (val == 0 && op->IsBoxedValue())
{
- // Change the assignment node so we don't generate any code for it.
+ JITDUMP("\nAttempting to optimize BOX(valueType) %s null [%06u]\n", GenTree::OpName(oper),
+ dspTreeID(tree));
- GenTreePtr asgStmt = op->gtBox.gtAsgStmtWhenInlinedBoxValue;
- assert(asgStmt->gtOper == GT_STMT);
- GenTreePtr asg = asgStmt->gtStmt.gtStmtExpr;
- assert(asg->gtOper == GT_ASG);
-#ifdef DEBUG
- if (verbose)
+ // We don't expect GT_GT with signed compares, and we
+ // can't predict the result if we do see it, since the
+ // boxed object addr could have its high bit set.
+ if ((oper == GT_GT) && !tree->IsUnsigned())
{
- printf("Bashing ");
- printTreeID(asg);
- printf(" to NOP as part of dead box operation\n");
- gtDispTree(tree);
+ JITDUMP(" bailing; unexpected signed compare via GT_GT\n");
}
-#endif
- asg->gtBashToNOP();
-
- op = gtNewIconNode(oper == GT_NE);
- if (fgGlobalMorph)
+ else
{
- if (!fgIsInlining())
+ // The tree under the box must be side effect free
+ // since we will drop it if we optimize.
+ assert(!gtTreeHasSideEffects(op->gtBox.gtOp.gtOp1, GTF_SIDE_EFFECT));
+
+ // See if we can optimize away the box and related statements.
+ GenTree* boxSourceTree = gtTryRemoveBoxUpstreamEffects(op);
+ bool didOptimize = (boxSourceTree != nullptr);
+
+ // If optimization succeeded, remove the box.
+ if (didOptimize)
{
- fgMorphTreeDone(op);
+ // Set up the result of the compare.
+ int compareResult = 0;
+ if (oper == GT_GT)
+ {
+ // GT_GT(null, box) == false
+ // GT_GT(box, null) == true
+ compareResult = (op1 == op);
+ }
+ else if (oper == GT_EQ)
+ {
+ // GT_EQ(box, null) == false
+ // GT_EQ(null, box) == false
+ compareResult = 0;
+ }
+ else
+ {
+ assert(oper == GT_NE);
+ // GT_NE(box, null) == true
+ // GT_NE(null, box) == true
+ compareResult = 1;
+ }
+
+ JITDUMP("\nSuccess: replacing BOX(valueType) %s null with %d\n", GenTree::OpName(oper),
+ compareResult);
+
+ op = gtNewIconNode(compareResult);
+
+ if (fgGlobalMorph)
+ {
+ fgMorphTreeDone(op);
+ }
+ else
+ {
+ op->gtNext = tree->gtNext;
+ op->gtPrev = tree->gtPrev;
+ }
+
+ return op;
}
}
- else
- {
- op->gtNext = tree->gtNext;
- op->gtPrev = tree->gtPrev;
- }
- fgSetStmtSeq(asgStmt);
- return op;
}
+
break;
case GT_ADD:
@@ -12250,6 +12447,369 @@ DONE_FOLD:
return op;
}
+//------------------------------------------------------------------------
+// gtTryRemoveBoxUpstreamEffects: given an unused value type box,
+// try and remove the upstream allocation and unnecessary parts of
+// the copy.
+//
+// Arguments:
+// op - the box node to optimize
+// options - controls whether and how trees are modified
+// (see notes)
+//
+// Return Value:
+// A tree representing the original value to box, if removal
+// is successful/possible (but see note). nullptr if removal fails.
+//
+// Notes:
+// Value typed box gets special treatment because it has associated
+// side effects that can be removed if the box result is not used.
+//
+// By default (options == BR_REMOVE_AND_NARROW) this method will
+// try and remove unnecessary trees and will try and reduce remaning
+// operations to the minimal set, possibly narrowing the width of
+// loads from the box source if it is a struct.
+//
+// To perform a trial removal, pass BR_DONT_REMOVE. This can be
+// useful to determine if this optimization should only be
+// performed if some other conditions hold true.
+//
+// To remove but not alter the access to the box source, pass
+// BR_REMOVE_BUT_NOT_NARROW.
+//
+// To remove and return the tree for the type handle used for
+// the boxed newobj, pass BR_REMOVE_BUT_NOT_NARROW_WANT_TYPE_HANDLE.
+// This can be useful when the only part of the box that is "live"
+// is its type.
+//
+// If removal fails, is is possible that a subsequent pass may be
+// able to optimize. Blocking side effects may now be minimized
+// (null or bounds checks might have been removed) or might be
+// better known (inline return placeholder updated with the actual
+// return expression). So the box is perhaps best left as is to
+// help trigger this re-examination.
+
+GenTree* Compiler::gtTryRemoveBoxUpstreamEffects(GenTree* op, BoxRemovalOptions options)
+{
+ JITDUMP("gtTryRemoveBoxUpstreamEffects called for [%06u]\n", dspTreeID(op));
+ assert(op->IsBoxedValue());
+
+ // grab related parts for the optimization
+ GenTree* asgStmt = op->gtBox.gtAsgStmtWhenInlinedBoxValue;
+ assert(asgStmt->gtOper == GT_STMT);
+ GenTree* copyStmt = op->gtBox.gtCopyStmtWhenInlinedBoxValue;
+ assert(copyStmt->gtOper == GT_STMT);
+
+#ifdef DEBUG
+ if (verbose)
+ {
+ printf("\n%s to remove side effects of BOX (valuetype)\n",
+ options == BR_DONT_REMOVE ? "Checking if it is possible" : "Attempting");
+ gtDispTree(op);
+ printf("\nWith assign\n");
+ gtDispTree(asgStmt);
+ printf("\nAnd copy\n");
+ gtDispTree(copyStmt);
+ }
+#endif
+
+ // If we don't recognize the form of the assign, bail.
+ GenTree* asg = asgStmt->gtStmt.gtStmtExpr;
+ if (asg->gtOper != GT_ASG)
+ {
+ JITDUMP(" bailing; unexpected assignment op %s\n", GenTree::OpName(asg->gtOper));
+ return nullptr;
+ }
+
+ // If we're eventually going to return the type handle, remember it now.
+ GenTree* boxTypeHandle = nullptr;
+ if (options == BR_REMOVE_AND_NARROW_WANT_TYPE_HANDLE)
+ {
+ // Note we might see GenTreeAllocObj here, if impImportAndPushBox
+ // starts using it instead of a bare helper call.
+ GenTree* asgSrc = asg->gtOp.gtOp2;
+ assert(asgSrc->IsCall());
+ GenTreeCall* newobjCall = asgSrc->AsCall();
+ GenTreeArgList* newobjArgs = newobjCall->gtCallArgs->AsArgList();
+ boxTypeHandle = newobjArgs->Current();
+ assert(boxTypeHandle != nullptr);
+ }
+
+ // If we don't recognize the form of the copy, bail.
+ GenTree* copy = copyStmt->gtStmt.gtStmtExpr;
+ if (copy->gtOper != GT_ASG)
+ {
+ // GT_RET_EXPR is a tolerable temporary failure.
+ // The jit will revisit this optimization after
+ // inlining is done.
+ if (copy->gtOper == GT_RET_EXPR)
+ {
+ JITDUMP(" bailing; must wait for replacement of copy %s\n", GenTree::OpName(copy->gtOper));
+ }
+ else
+ {
+ // Anything else is a missed case we should
+ // figure out how to handle. One known case
+ // is GT_COMMAs enclosing the GT_ASG we are
+ // looking for.
+ JITDUMP(" bailing; unexpected copy op %s\n", GenTree::OpName(copy->gtOper));
+ }
+ return nullptr;
+ }
+
+ // If the copy is a struct copy, make sure we know how to isolate
+ // any source side effects.
+ GenTree* copySrc = copy->gtOp.gtOp2;
+
+ // If the copy source is from a pending inline, wait for it to resolve.
+ if (copySrc->gtOper == GT_RET_EXPR)
+ {
+ JITDUMP(" bailing; must wait for replacement of copy source %s\n", GenTree::OpName(copySrc->gtOper));
+ return nullptr;
+ }
+
+ bool hasSrcSideEffect = false;
+ bool isStructCopy = false;
+
+ if (gtTreeHasSideEffects(copySrc, GTF_SIDE_EFFECT))
+ {
+ hasSrcSideEffect = true;
+
+ if (copySrc->gtType == TYP_STRUCT)
+ {
+ isStructCopy = true;
+
+ if ((copySrc->gtOper != GT_OBJ) && (copySrc->gtOper != GT_IND) && (copySrc->gtOper != GT_FIELD))
+ {
+ // We don't know how to handle other cases, yet.
+ JITDUMP(" bailing; unexpected copy source struct op with side effect %s\n",
+ GenTree::OpName(copySrc->gtOper));
+ return nullptr;
+ }
+ }
+ }
+
+ // If this was a trial removal, we're done.
+ if (options == BR_DONT_REMOVE)
+ {
+ return copySrc;
+ }
+
+ // Otherwise, proceed with the optimization.
+ //
+ // Change the assignment expression to a NOP.
+ JITDUMP("\nBashing NEWOBJ [%06u] to NOP\n", dspTreeID(asg));
+ asg->gtBashToNOP();
+
+ // Change the copy expression so it preserves key
+ // source side effects.
+ JITDUMP("\nBashing COPY [%06u]", dspTreeID(copy));
+
+ if (!hasSrcSideEffect)
+ {
+ // If there were no copy source side effects just bash
+ // the copy to a NOP.
+ copy->gtBashToNOP();
+ JITDUMP(" to NOP; no source side effects.\n");
+ }
+ else if (!isStructCopy)
+ {
+ // For scalar types, go ahead and produce the
+ // value as the copy is fairly cheap and likely
+ // the optimizer can trim things down to just the
+ // minimal side effect parts.
+ copyStmt->gtStmt.gtStmtExpr = copySrc;
+ JITDUMP(" to scalar read via [%06u]\n", dspTreeID(copySrc));
+ }
+ else
+ {
+ // For struct types read the first byte of the
+ // source struct; there's no need to read the
+ // entire thing, and no place to put it.
+ assert(copySrc->gtOper == GT_OBJ || copySrc->gtOper == GT_IND || copySrc->gtOper == GT_FIELD);
+ copyStmt->gtStmt.gtStmtExpr = copySrc;
+
+ if (options == BR_REMOVE_AND_NARROW || options == BR_REMOVE_AND_NARROW_WANT_TYPE_HANDLE)
+ {
+ JITDUMP(" to read first byte of struct via modified [%06u]\n", dspTreeID(copySrc));
+ copySrc->ChangeOper(GT_IND);
+ copySrc->gtType = TYP_BYTE;
+ }
+ else
+ {
+ JITDUMP(" to read entire struct via modified [%06u]\n", dspTreeID(copySrc));
+ }
+ }
+
+ if (fgStmtListThreaded)
+ {
+ fgSetStmtSeq(asgStmt);
+ fgSetStmtSeq(copyStmt);
+ }
+
+ // Box effects were successfully optimized.
+
+ if (options == BR_REMOVE_AND_NARROW_WANT_TYPE_HANDLE)
+ {
+ return boxTypeHandle;
+ }
+ else
+ {
+ return copySrc;
+ }
+}
+
+//------------------------------------------------------------------------
+// gtOptimizeEnumHasFlag: given the operands for a call to Enum.HasFlag,
+// try and optimize the call to a simple and/compare tree.
+//
+// Arguments:
+// thisOp - first argument to the call
+// flagOp - second argument to the call
+//
+// Return Value:
+// A new cmp/amd tree if successful. nullptr on failure.
+//
+// Notes:
+// If successful, may allocate new temps and modify connected
+// statements.
+
+GenTree* Compiler::gtOptimizeEnumHasFlag(GenTree* thisOp, GenTree* flagOp)
+{
+ JITDUMP("Considering optimizing call to Enum.HasFlag....\n");
+
+ // Operands must be boxes
+ if (!thisOp->IsBoxedValue() || !flagOp->IsBoxedValue())
+ {
+ JITDUMP("bailing, need both inputs to be BOXes\n");
+ return nullptr;
+ }
+
+ // Operands must have same type
+ bool isExactThis = false;
+ bool isNonNullThis = false;
+ CORINFO_CLASS_HANDLE thisHnd = gtGetClassHandle(thisOp, &isExactThis, &isNonNullThis);
+
+ if (thisHnd == nullptr)
+ {
+ JITDUMP("bailing, can't find type for 'this' operand\n");
+ return nullptr;
+ }
+
+ // A boxed thisOp should have exact type and non-null instance
+ assert(isExactThis);
+ assert(isNonNullThis);
+
+ bool isExactFlag = false;
+ bool isNonNullFlag = false;
+ CORINFO_CLASS_HANDLE flagHnd = gtGetClassHandle(flagOp, &isExactFlag, &isNonNullFlag);
+
+ if (flagHnd == nullptr)
+ {
+ JITDUMP("bailing, can't find type for 'flag' operand\n");
+ return nullptr;
+ }
+
+ // A boxed flagOp should have exact type and non-null instance
+ assert(isExactFlag);
+ assert(isNonNullFlag);
+
+ if (flagHnd != thisHnd)
+ {
+ JITDUMP("bailing, operand types differ\n");
+ return nullptr;
+ }
+
+ // If we have a shared type instance we can't safely check type
+ // equality, so bail.
+ DWORD classAttribs = info.compCompHnd->getClassAttribs(thisHnd);
+ if (classAttribs & CORINFO_FLG_SHAREDINST)
+ {
+ JITDUMP("bailing, have shared instance type\n");
+ return nullptr;
+ }
+
+ // Simulate removing the box for thisOP. We need to know that it can
+ // be safely removed before we can optimize.
+ GenTree* thisVal = gtTryRemoveBoxUpstreamEffects(thisOp, BR_DONT_REMOVE);
+ if (thisVal == nullptr)
+ {
+ // Note we may fail here if the this operand comes from
+ // a call. We should be able to retry this post-inlining.
+ JITDUMP("bailing, can't undo box of 'this' operand\n");
+ return nullptr;
+ }
+
+ GenTree* flagVal = gtTryRemoveBoxUpstreamEffects(flagOp, BR_REMOVE_BUT_NOT_NARROW);
+ if (flagVal == nullptr)
+ {
+ // Note we may fail here if the flag operand comes from
+ // a call. We should be able to retry this post-inlining.
+ JITDUMP("bailing, can't undo box of 'flag' operand\n");
+ return nullptr;
+ }
+
+ // Yes, both boxes can be cleaned up. Optimize.
+ JITDUMP("Optimizing call to Enum.HasFlag\n");
+
+ // Undo the boxing of thisOp and prepare to operate directly
+ // on the original enum values.
+ thisVal = gtTryRemoveBoxUpstreamEffects(thisOp, BR_REMOVE_BUT_NOT_NARROW);
+
+ // Our trial removal above should guarantee successful removal here.
+ assert(thisVal != nullptr);
+
+ // We should have a consistent view of the type
+ var_types type = thisVal->TypeGet();
+ assert(type == flagVal->TypeGet());
+
+ // The thisVal and flagVal trees come from earlier statements.
+ //
+ // Unless they are invariant values, we need to evaluate them both
+ // to temps at those points to safely transmit the values here.
+ //
+ // Also we need to use the flag twice, so we need two trees for it.
+ GenTree* thisValOpt = nullptr;
+ GenTree* flagValOpt = nullptr;
+ GenTree* flagValOptCopy = nullptr;
+
+ if (thisVal->IsIntegralConst())
+ {
+ thisValOpt = gtClone(thisVal);
+ }
+ else
+ {
+ const unsigned thisTmp = lvaGrabTemp(true DEBUGARG("Enum:HasFlag this temp"));
+ GenTree* thisAsg = gtNewTempAssign(thisTmp, thisVal);
+ GenTree* thisAsgStmt = thisOp->AsBox()->gtCopyStmtWhenInlinedBoxValue;
+ thisAsgStmt->gtStmt.gtStmtExpr = thisAsg;
+ thisValOpt = gtNewLclvNode(thisTmp, type);
+ }
+
+ if (flagVal->IsIntegralConst())
+ {
+ flagValOpt = gtClone(flagVal);
+ flagValOptCopy = gtClone(flagVal);
+ }
+ else
+ {
+ const unsigned flagTmp = lvaGrabTemp(true DEBUGARG("Enum:HasFlag flag temp"));
+ GenTree* flagAsg = gtNewTempAssign(flagTmp, flagVal);
+ GenTree* flagAsgStmt = flagOp->AsBox()->gtCopyStmtWhenInlinedBoxValue;
+ flagAsgStmt->gtStmt.gtStmtExpr = flagAsg;
+ flagValOpt = gtNewLclvNode(flagTmp, type);
+ flagValOptCopy = gtNewLclvNode(flagTmp, type);
+ }
+
+ // Turn the call into (thisValTmp & flagTmp) == flagTmp.
+ GenTree* andTree = gtNewOperNode(GT_AND, type, thisValOpt, flagValOpt);
+ GenTree* cmpTree = gtNewOperNode(GT_EQ, TYP_INT, andTree, flagValOptCopy);
+
+ JITDUMP("Optimized call to Enum.HasFlag\n");
+
+ return cmpTree;
+}
+
/*****************************************************************************
*
* Fold the given constant tree.
@@ -13121,7 +13681,7 @@ GenTreePtr Compiler::gtFoldExprConst(GenTreePtr tree)
assert(op1);
op2 = op1;
- op1 = gtNewHelperCallNode(CORINFO_HELP_OVERFLOW, TYP_VOID, GTF_EXCEPT,
+ op1 = gtNewHelperCallNode(CORINFO_HELP_OVERFLOW, TYP_VOID,
gtNewArgList(gtNewIconNode(compCurBB->bbTryIndex)));
if (vnStore != nullptr)
@@ -13409,6 +13969,11 @@ GenTreePtr Compiler::gtFoldExprConst(GenTreePtr tree)
CNS_LONG:
+ if (fieldSeq != FieldSeqStore::NotAField())
+ {
+ return tree;
+ }
+
#ifdef DEBUG
if (verbose)
{
@@ -13642,13 +14207,33 @@ DONE:
#pragma warning(pop)
#endif
-/*****************************************************************************
- *
- * Create an assignment of the given value to a temp.
- */
+//------------------------------------------------------------------------
+// gtNewTempAssign: Create an assignment of the given value to a temp.
+//
+// Arguments:
+// tmp - local number for a compiler temp
+// val - value to assign to the temp
+//
+// Return Value:
+// Normally a new assignment node.
+// However may return a nop node if val is simply a reference to the temp.
+//
+// Notes:
+// Self-assignments may be represented via NOPs.
+//
+// May update the type of the temp, if it was previously unknown.
+//
+// May set compFloatingPointUsed.
+//
GenTreePtr Compiler::gtNewTempAssign(unsigned tmp, GenTreePtr val)
{
+ // Self-assignment is a nop.
+ if (val->OperGet() == GT_LCL_VAR && val->gtLclVarCommon.gtLclNum == tmp)
+ {
+ return gtNewNothingNode();
+ }
+
LclVarDsc* varDsc = lvaTable + tmp;
if (varDsc->TypeGet() == TYP_I_IMPL && val->TypeGet() == TYP_BYREF)
@@ -13835,7 +14420,7 @@ GenTreePtr Compiler::gtNewRefCOMfield(GenTreePtr objPtr,
args = gtNewListNode(objPtr, args);
}
- GenTreePtr tree = gtNewHelperCallNode(pFieldInfo->helper, genActualType(helperType), 0, args);
+ GenTreePtr tree = gtNewHelperCallNode(pFieldInfo->helper, genActualType(helperType), args);
if (pFieldInfo->fieldAccessor == CORINFO_FIELD_INSTANCE_HELPER)
{
@@ -13915,62 +14500,31 @@ bool Compiler::gtNodeHasSideEffects(GenTreePtr tree, unsigned flags)
{
if (tree->OperGet() == GT_CALL)
{
- // Generally all GT_CALL nodes are considered to have side-effects.
- // But we may have a helper call that doesn't have any important side effects.
- //
- if (tree->gtCall.gtCallType == CT_HELPER)
+ GenTreeCall* const call = tree->AsCall();
+ const bool ignoreExceptions = (flags & GTF_EXCEPT) == 0;
+ const bool ignoreCctors = (flags & GTF_IS_IN_CSE) != 0; // We can CSE helpers that run cctors.
+ if (!call->HasSideEffects(this, ignoreExceptions, ignoreCctors))
{
- // But if this tree is a helper call we may not care about the side-effects
- //
- CorInfoHelpFunc helper = eeGetHelperNum(tree->AsCall()->gtCallMethHnd);
-
- // We definitely care about the side effects if MutatesHeap is true
- //
- if (s_helperCallProperties.MutatesHeap(helper))
- {
- return true;
- }
-
- // with GTF_IS_IN_CSE we will CSE helper calls that can run cctors.
- //
- if (((flags & GTF_IS_IN_CSE) == 0) && (s_helperCallProperties.MayRunCctor(helper)))
- {
- return true;
- }
-
- // If we also care about exceptions then check if the helper can throw
- //
- if (((flags & GTF_EXCEPT) != 0) && !s_helperCallProperties.NoThrow(helper))
+ // If this call is otherwise side effect free, check its arguments.
+ for (GenTreeArgList* args = call->gtCallArgs; args != nullptr; args = args->Rest())
{
- return true;
- }
-
- // If this is a Pure helper call or an allocator (that will not need to run a finalizer)
- // then we don't need to preserve the side effects (of this call -- we may care about those of the
- // arguments).
- if (s_helperCallProperties.IsPure(helper) ||
- (s_helperCallProperties.IsAllocator(helper) && !s_helperCallProperties.MayFinalize(helper)))
- {
- GenTreeCall* call = tree->AsCall();
- for (GenTreeArgList* args = call->gtCallArgs; args != nullptr; args = args->Rest())
+ if (gtTreeHasSideEffects(args->Current(), flags))
{
- if (gtTreeHasSideEffects(args->Current(), flags))
- {
- return true;
- }
+ return true;
}
- // I'm a little worried that args that assign to temps that are late args will look like
- // side effects...but better to be conservative for now.
- for (GenTreeArgList* args = call->gtCallLateArgs; args != nullptr; args = args->Rest())
+ }
+ // I'm a little worried that args that assign to temps that are late args will look like
+ // side effects...but better to be conservative for now.
+ for (GenTreeArgList* args = call->gtCallLateArgs; args != nullptr; args = args->Rest())
+ {
+ if (gtTreeHasSideEffects(args->Current(), flags))
{
- if (gtTreeHasSideEffects(args->Current(), flags))
- {
- return true;
- }
+ return true;
}
- // Otherwise:
- return false;
}
+
+ // Otherwise:
+ return false;
}
// Otherwise the GT_CALL is considered to have side-effects.
@@ -13980,7 +14534,7 @@ bool Compiler::gtNodeHasSideEffects(GenTreePtr tree, unsigned flags)
if (flags & GTF_EXCEPT)
{
- if (tree->OperMayThrow())
+ if (tree->OperMayThrow(this))
{
return true;
}
@@ -14503,11 +15057,35 @@ void Compiler::gtCheckQuirkAddrExposedLclVar(GenTreePtr tree, GenTreeStack* pare
#endif
}
-// Checks to see if we're allowed to optimize Type::op_Equality or Type::op_Inequality on this operand.
-// We're allowed to convert to GT_EQ/GT_NE if one of the operands is:
-// 1) The result of Object::GetType
-// 2) The result of typeof(...)
-// 3) a local variable of type RuntimeType.
+//------------------------------------------------------------------------
+// gtCanOptimizeTypeEquality: check if tree is a suitable input for a type
+// equality optimization
+//
+// Arguments:
+// tree - tree to examine
+//
+// Return Value:
+// True, if value of tree can be used to optimize type equality tests
+//
+// Notes:
+// Checks to see if the jit is able to optimize Type::op_Equality or
+// Type::op_Inequality calls that consume this tree.
+//
+// The jit can safely convert these methods to GT_EQ/GT_NE if one of
+// the operands is:
+// 1) The result of Object::GetType
+// 2) The result of typeof(...)
+// 3) Is a null reference
+// 4) Is otherwise known to have type RuntimeType
+//
+// The null reference case is surprisingly common because operator
+// overloading turns the otherwise innocuous
+//
+// Type t = ....;
+// if (t == null)
+//
+// into a method call.
+
bool Compiler::gtCanOptimizeTypeEquality(GenTreePtr tree)
{
if (tree->gtOper == GT_CALL)
@@ -14531,15 +15109,19 @@ bool Compiler::gtCanOptimizeTypeEquality(GenTreePtr tree)
{
return true;
}
- else if (tree->gtOper == GT_LCL_VAR)
+ else if ((tree->gtOper == GT_CNS_INT) && (tree->gtIntCon.gtIconVal == 0))
{
- LclVarDsc* lcl = &(lvaTable[tree->gtLclVarCommon.gtLclNum]);
- if (lcl->TypeGet() == TYP_REF)
+ return true;
+ }
+ else
+ {
+ bool isExact = false;
+ bool isNonNull = false;
+ CORINFO_CLASS_HANDLE clsHnd = gtGetClassHandle(tree, &isExact, &isNonNull);
+
+ if (clsHnd == info.compCompHnd->getBuiltinClass(CLASSID_RUNTIME_TYPE))
{
- if (lcl->lvVerTypeInfo.GetClassHandle() == info.compCompHnd->getBuiltinClass(CLASSID_RUNTIME_TYPE))
- {
- return true;
- }
+ return true;
}
}
return false;
@@ -15031,10 +15613,13 @@ bool GenTree::isContained() const
}
// these actually produce a register (the flags reg, we just don't model it)
- // and are a separate instruction from the branch that consumes the result
+ // and are a separate instruction from the branch that consumes the result.
+ // They can only produce a result if the child is a SIMD equality comparison.
else if (OperKind() & GTK_RELOP)
{
- assert(!isMarkedContained);
+ // We have to cast away const-ness since AsOp() method is non-const.
+ GenTree* childNode = const_cast<GenTree*>(this)->AsOp()->gtOp1;
+ assert((isMarkedContained == false) || childNode->IsSIMDEqualityOrInequality());
}
// these either produce a result in register or set flags reg.
@@ -15137,15 +15722,15 @@ unsigned GenTreeIndir::Scale()
}
}
-size_t GenTreeIndir::Offset()
+ssize_t GenTreeIndir::Offset()
{
if (isIndirAddrMode())
{
- return Addr()->AsAddrMode()->gtOffset;
+ return Addr()->AsAddrMode()->Offset();
}
else if (Addr()->gtOper == GT_CLS_VAR_ADDR)
{
- return (size_t)Addr()->gtClsVar.gtClsVarHnd;
+ return static_cast<ssize_t>(reinterpret_cast<intptr_t>(Addr()->gtClsVar.gtClsVarHnd));
}
else if (Addr()->IsCnsIntOrI() && Addr()->isContained())
{
@@ -15398,10 +15983,16 @@ bool GenTree::IsFieldAddr(Compiler* comp, GenTreePtr* pObj, GenTreePtr* pStatic,
}
else if (OperGet() == GT_ADD)
{
- // op1 should never be a field sequence (or any other kind of handle)
- assert((gtOp.gtOp1->gtOper != GT_CNS_INT) || !gtOp.gtOp1->IsIconHandle());
- if (gtOp.gtOp2->OperGet() == GT_CNS_INT)
+ // If one operator is a field sequence/handle, the other operator must not also be a field sequence/handle.
+ if ((gtOp.gtOp1->OperGet() == GT_CNS_INT) && gtOp.gtOp1->IsIconHandle())
+ {
+ assert((gtOp.gtOp2->gtOper != GT_CNS_INT) || !gtOp.gtOp2->IsIconHandle());
+ newFldSeq = gtOp.gtOp1->AsIntCon()->gtFieldSeq;
+ baseAddr = gtOp.gtOp2;
+ }
+ else if (gtOp.gtOp2->OperGet() == GT_CNS_INT)
{
+ assert((gtOp.gtOp1->gtOper != GT_CNS_INT) || !gtOp.gtOp1->IsIconHandle());
newFldSeq = gtOp.gtOp2->AsIntCon()->gtFieldSeq;
baseAddr = gtOp.gtOp1;
}
@@ -15491,6 +16082,9 @@ CORINFO_CLASS_HANDLE Compiler::gtGetStructHandleIfPresent(GenTree* tree)
case GT_INDEX:
structHnd = tree->gtIndex.gtStructElemClass;
break;
+ case GT_INDEX_ADDR:
+ structHnd = tree->AsIndexAddr()->gtStructElemClass;
+ break;
case GT_FIELD:
info.compCompHnd->getFieldType(tree->gtField.gtFldHnd, &structHnd);
break;
@@ -15519,12 +16113,12 @@ CORINFO_CLASS_HANDLE Compiler::gtGetStructHandleIfPresent(GenTree* tree)
}
else
#endif
- if (tree->gtFlags & GTF_IND_ARR_INDEX)
{
ArrayInfo arrInfo;
- bool b = GetArrayInfoMap()->Lookup(tree, &arrInfo);
- assert(b);
- structHnd = EncodeElemType(arrInfo.m_elemType, arrInfo.m_elemStructType);
+ if (TryGetArrayInfo(tree->AsIndir(), &arrInfo))
+ {
+ structHnd = EncodeElemType(arrInfo.m_elemType, arrInfo.m_elemStructType);
+ }
}
break;
#ifdef FEATURE_SIMD
@@ -15744,6 +16338,21 @@ CORINFO_CLASS_HANDLE Compiler::gtGetClassHandle(GenTreePtr tree, bool* isExact,
break;
}
+ case GT_BOX:
+ {
+ // Box should just wrap a local var reference which has
+ // the type we're looking for. Also box only represents a
+ // non-nullable value type so result cannot be null.
+ GenTreeBox* box = obj->AsBox();
+ GenTree* boxTemp = box->BoxOp();
+ assert(boxTemp->IsLocal());
+ const unsigned boxTempLcl = boxTemp->AsLclVar()->GetLclNum();
+ objClass = lvaTable[boxTempLcl].lvClassHnd;
+ *isExact = lvaTable[boxTempLcl].lvClassIsExact;
+ *isNonNull = true;
+ break;
+ }
+
default:
{
break;