summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/jit/assertionprop.cpp82
-rw-r--r--src/jit/codegenarm64.cpp36
-rw-r--r--src/jit/codegenlegacy.cpp3
-rw-r--r--src/jit/codegenlinear.cpp18
-rw-r--r--src/jit/codegenxarch.cpp14
-rw-r--r--src/jit/compiler.h4
-rw-r--r--src/jit/earlyprop.cpp12
-rw-r--r--src/jit/flowgraph.cpp3
-rw-r--r--src/jit/gentree.cpp48
-rw-r--r--src/jit/gentree.h64
-rw-r--r--src/jit/gtlist.h2
-rw-r--r--src/jit/importer.cpp71
-rw-r--r--src/jit/lclvars.cpp4
-rw-r--r--src/jit/lower.cpp2
-rw-r--r--src/jit/lowerarm64.cpp110
-rw-r--r--src/jit/lowerxarch.cpp18
-rw-r--r--src/jit/morph.cpp254
-rw-r--r--src/jit/rationalize.cpp84
-rw-r--r--src/jit/regalloc.cpp7
-rw-r--r--src/jit/valuenum.cpp22
20 files changed, 315 insertions, 543 deletions
diff --git a/src/jit/assertionprop.cpp b/src/jit/assertionprop.cpp
index 747b032bee..159aa293f4 100644
--- a/src/jit/assertionprop.cpp
+++ b/src/jit/assertionprop.cpp
@@ -1100,6 +1100,11 @@ Compiler::AssertionIndex Compiler::optCreateAssertion(GenTreePtr op1,
CNS_COMMON:
{
+ // TODO-1stClassStructs: handle constant propagation to struct types.
+ if (varTypeIsStruct(lclVar))
+ {
+ goto DONE_ASSERTION;
+ }
//
// Must either be an OAK_EQUAL or an OAK_NOT_EQUAL assertion
//
@@ -2029,7 +2034,12 @@ void Compiler::optAssertionGen(GenTreePtr tree)
{
case GT_ASG:
// VN takes care of non local assertions for assignments and data flow.
- if (optLocalAssertionProp)
+ // TODO-1stClassStructs: Enable assertion prop for struct types.
+ if (varTypeIsStruct(tree))
+ {
+ // Do nothing.
+ }
+ else if (optLocalAssertionProp)
{
assertionIndex = optCreateAssertion(tree->gtOp.gtOp1, tree->gtOp.gtOp2, OAK_EQUAL);
}
@@ -2042,15 +2052,26 @@ void Compiler::optAssertionGen(GenTreePtr tree)
case GT_OBJ:
case GT_BLK:
case GT_DYN_BLK:
+ // TODO-1stClassStructs: These should always be considered to create a non-null
+ // assertion, but previously, when these indirections were implicit due to a block
+ // copy or init, they were not being considered to do so.
+ break;
case GT_IND:
+ // TODO-1stClassStructs: All indirections should be considered to create a non-null
+ // assertion, but previously, when these indirections were implicit due to a block
+ // copy or init, they were not being considered to do so.
+ if (tree->gtType == TYP_STRUCT)
+ {
+ GenTree* parent = tree->gtGetParent(nullptr);
+ if ((parent != nullptr) && (parent->gtOper == GT_ASG))
+ {
+ break;
+ }
+ }
case GT_NULLCHECK:
- // All indirections create non-null assertions
- assertionIndex = optCreateAssertion(tree->AsIndir()->Addr(), nullptr, OAK_NOT_EQUAL);
- break;
-
case GT_ARR_LENGTH:
- // An array length is an indirection (but doesn't derive from GenTreeIndir).
- assertionIndex = optCreateAssertion(tree->AsArrLen()->ArrRef(), nullptr, OAK_NOT_EQUAL);
+ // An array length can create a non-null assertion
+ assertionIndex = optCreateAssertion(tree->gtOp.gtOp1, nullptr, OAK_NOT_EQUAL);
break;
case GT_ARR_BOUNDS_CHECK:
@@ -2608,29 +2629,9 @@ GenTreePtr Compiler::optConstantAssertionProp(AssertionDsc* curAssertion,
else
{
bool isArrIndex = ((tree->gtFlags & GTF_VAR_ARR_INDEX) != 0);
- // If we have done constant propagation of a struct type, it is only valid for zero-init,
- // and we have to ensure that we have the right zero for the type.
- if (varTypeIsStruct(tree))
- {
- assert(curAssertion->op2.u1.iconVal == 0);
- }
-#ifdef FEATURE_SIMD
- if (varTypeIsSIMD(tree))
- {
- var_types simdType = tree->TypeGet();
- tree->ChangeOperConst(GT_CNS_DBL);
- GenTree* initVal = tree;
- initVal->gtType = TYP_FLOAT;
- newTree =
- gtNewSIMDNode(simdType, initVal, nullptr, SIMDIntrinsicInit, TYP_FLOAT, genTypeSize(simdType));
- }
- else
-#endif // FEATURE_SIMD
- {
- newTree->ChangeOperConst(GT_CNS_INT);
- newTree->gtIntCon.gtIconVal = curAssertion->op2.u1.iconVal;
- newTree->ClearIconHandleMask();
- }
+ newTree->ChangeOperConst(GT_CNS_INT);
+ newTree->gtIntCon.gtIconVal = curAssertion->op2.u1.iconVal;
+ newTree->ClearIconHandleMask();
// If we're doing an array index address, assume any constant propagated contributes to the index.
if (isArrIndex)
{
@@ -3420,13 +3421,32 @@ GenTreePtr Compiler::optAssertionProp_Ind(ASSERT_VALARG_TP assertions, const Gen
{
assert(tree->OperIsIndir());
+ // TODO-1stClassStructs: All indirections should be handled here, but
+ // previously, when these indirections were GT_OBJ, or implicit due to a block
+ // copy or init, they were not being handled.
+ if (tree->TypeGet() == TYP_STRUCT)
+ {
+ if (tree->OperIsBlk())
+ {
+ return nullptr;
+ }
+ else
+ {
+ GenTree* parent = tree->gtGetParent(nullptr);
+ if ((parent != nullptr) && parent->OperIsBlkOp())
+ {
+ return nullptr;
+ }
+ }
+ }
+
if (!(tree->gtFlags & GTF_EXCEPT))
{
return nullptr;
}
// Check for add of a constant.
- GenTreePtr op1 = tree->AsIndir()->Addr();
+ GenTreePtr op1 = tree->gtOp.gtOp1;
if ((op1->gtOper == GT_ADD) && (op1->gtOp.gtOp2->gtOper == GT_CNS_INT))
{
op1 = op1->gtOp.gtOp1;
diff --git a/src/jit/codegenarm64.cpp b/src/jit/codegenarm64.cpp
index 06637f9faa..6fe7eb99cb 100644
--- a/src/jit/codegenarm64.cpp
+++ b/src/jit/codegenarm64.cpp
@@ -3346,10 +3346,6 @@ void CodeGen::genCodeForInitBlk(GenTreeBlk* initBlkNode)
unsigned size = initBlkNode->Size();
GenTreePtr dstAddr = initBlkNode->Addr();
GenTreePtr initVal = initBlkNode->Data();
- if (initVal->OperIsInitVal())
- {
- initVal = initVal->gtGetOp1();
- }
assert(!dstAddr->isContained());
assert(!initVal->isContained());
@@ -3516,38 +3512,27 @@ void CodeGen::genCodeForCpBlkUnroll(GenTreeBlk* cpBlkNode)
// str tempReg, [R14, #8]
void CodeGen::genCodeForCpObj(GenTreeObj* cpObjNode)
{
- GenTreePtr dstAddr = cpObjNode->Addr();
- GenTreePtr source = cpObjNode->Data();
- var_types srcAddrType = TYP_BYREF;
- bool sourceIsLocal = false;
-
- assert(source->isContained());
- if (source->gtOper == GT_IND)
- {
- GenTree* srcAddr = source->gtGetOp1();
- assert(!srcAddr->isContained());
- srcAddrType = srcAddr->TypeGet();
- }
- else
- {
- noway_assert(source->IsLocal());
- sourceIsLocal = true;
- }
+ // Make sure we got the arguments of the cpobj operation in the right registers
+ GenTreePtr dstAddr = cpObjNode->Addr();
+ GenTreePtr source = cpObjNode->Data();
+ noway_assert(source->gtOper == GT_IND);
+ GenTreePtr srcAddr = source->gtGetOp1();
bool dstOnStack = dstAddr->OperIsLocalAddr();
#ifdef DEBUG
assert(!dstAddr->isContained());
+ assert(!srcAddr->isContained());
// This GenTree node has data about GC pointers, this means we're dealing
// with CpObj.
assert(cpObjNode->gtGcPtrCount > 0);
#endif // DEBUG
- // Consume the operands and get them into the right registers.
+ // Consume these registers.
// They may now contain gc pointers (depending on their type; gcMarkRegPtrVal will "do the right thing").
genConsumeBlockOp(cpObjNode, REG_WRITE_BARRIER_DST_BYREF, REG_WRITE_BARRIER_SRC_BYREF, REG_NA);
- gcInfo.gcMarkRegPtrVal(REG_WRITE_BARRIER_SRC_BYREF, srcAddrType);
+ gcInfo.gcMarkRegPtrVal(REG_WRITE_BARRIER_SRC_BYREF, srcAddr->TypeGet());
gcInfo.gcMarkRegPtrVal(REG_WRITE_BARRIER_DST_BYREF, dstAddr->TypeGet());
// Temp register used to perform the sequence of loads and stores.
@@ -3619,7 +3604,12 @@ void CodeGen::genCodeForCpBlk(GenTreeBlk* cpBlkNode)
// Make sure we got the arguments of the cpblk operation in the right registers
unsigned blockSize = cpBlkNode->Size();
GenTreePtr dstAddr = cpBlkNode->Addr();
+ GenTreePtr source = cpBlkNode->Data();
+ noway_assert(source->gtOper == GT_IND);
+ GenTreePtr srcAddr = source->gtGetOp1();
+
assert(!dstAddr->isContained());
+ assert(!srcAddr->isContained());
genConsumeBlockOp(cpBlkNode, REG_ARG_0, REG_ARG_1, REG_ARG_2);
diff --git a/src/jit/codegenlegacy.cpp b/src/jit/codegenlegacy.cpp
index ae39ece065..a8d925c2a2 100644
--- a/src/jit/codegenlegacy.cpp
+++ b/src/jit/codegenlegacy.cpp
@@ -10168,9 +10168,6 @@ void CodeGen::genCodeForTreeSmpOp(GenTreePtr tree, regMaskTP destReg, regMaskTP
if (op1 == NULL)
return;
#endif
- __fallthrough;
-
- case GT_INIT_VAL:
/* Generate the operand into some register */
diff --git a/src/jit/codegenlinear.cpp b/src/jit/codegenlinear.cpp
index 980187d5f0..c633e5619e 100644
--- a/src/jit/codegenlinear.cpp
+++ b/src/jit/codegenlinear.cpp
@@ -1197,10 +1197,6 @@ void CodeGen::genConsumeRegs(GenTree* tree)
genUpdateLife(tree);
}
#endif // _TARGET_XARCH_
- else if (tree->OperIsInitVal())
- {
- genConsumeReg(tree->gtGetOp1());
- }
else
{
#ifdef FEATURE_SIMD
@@ -1409,13 +1405,6 @@ void CodeGen::genConsumeBlockSrc(GenTreeBlk* blkNode)
return;
}
}
- else
- {
- if (src->OperIsInitVal())
- {
- src = src->gtGetOp1();
- }
- }
genConsumeReg(src);
}
@@ -1444,13 +1433,6 @@ void CodeGen::genSetBlockSrc(GenTreeBlk* blkNode, regNumber srcReg)
return;
}
}
- else
- {
- if (src->OperIsInitVal())
- {
- src = src->gtGetOp1();
- }
- }
genCopyRegIfNeeded(src, srcReg);
}
diff --git a/src/jit/codegenxarch.cpp b/src/jit/codegenxarch.cpp
index 1752324415..36a6cf5c7f 100644
--- a/src/jit/codegenxarch.cpp
+++ b/src/jit/codegenxarch.cpp
@@ -2766,10 +2766,6 @@ void CodeGen::genCodeForInitBlkRepStos(GenTreeBlk* initBlkNode)
unsigned size = initBlkNode->Size();
GenTreePtr dstAddr = initBlkNode->Addr();
GenTreePtr initVal = initBlkNode->Data();
- if (initVal->OperIsInitVal())
- {
- initVal = initVal->gtGetOp1();
- }
#ifdef DEBUG
assert(!dstAddr->isContained());
@@ -2804,10 +2800,6 @@ void CodeGen::genCodeForInitBlkUnroll(GenTreeBlk* initBlkNode)
unsigned size = initBlkNode->Size();
GenTreePtr dstAddr = initBlkNode->Addr();
GenTreePtr initVal = initBlkNode->Data();
- if (initVal->OperIsInitVal())
- {
- initVal = initVal->gtGetOp1();
- }
assert(!dstAddr->isContained());
assert(!initVal->isContained() || (initVal->IsIntegralConst(0) && ((size & 0xf) == 0)));
@@ -2905,10 +2897,6 @@ void CodeGen::genCodeForInitBlk(GenTreeBlk* initBlkNode)
unsigned blockSize = initBlkNode->Size();
GenTreePtr dstAddr = initBlkNode->Addr();
GenTreePtr initVal = initBlkNode->Data();
- if (initVal->OperIsInitVal())
- {
- initVal = initVal->gtGetOp1();
- }
assert(!dstAddr->isContained());
assert(!initVal->isContained());
@@ -3521,7 +3509,7 @@ void CodeGen::genCodeForCpObj(GenTreeObj* cpObjNode)
}
#endif // DEBUG
- // Consume the operands and get them into the right registers.
+ // Consume these registers.
// They may now contain gc pointers (depending on their type; gcMarkRegPtrVal will "do the right thing").
genConsumeBlockOp(cpObjNode, REG_RDI, REG_RSI, REG_NA);
gcInfo.gcMarkRegPtrVal(REG_RSI, srcAddrType);
diff --git a/src/jit/compiler.h b/src/jit/compiler.h
index 38e59441d4..867a8545ef 100644
--- a/src/jit/compiler.h
+++ b/src/jit/compiler.h
@@ -2323,8 +2323,7 @@ public:
DNER_VMNeedsStackAddr,
DNER_LiveInOutOfHandler,
DNER_LiveAcrossUnmanagedCall,
- DNER_BlockOp, // Is read or written via a block operation that explicitly takes the address.
- DNER_IsStructArg, // Is a struct passed as an argument in a way that requires a stack location.
+ DNER_BlockOp, // Is read or written via a block operation that explicitly takes the address.
#ifdef JIT32_GCENCODER
DNER_PinningRef,
#endif
@@ -4562,7 +4561,6 @@ private:
GenTreePtr fgMorphGetStructAddr(GenTreePtr* pTree, CORINFO_CLASS_HANDLE clsHnd, bool isRValue = false);
GenTreePtr fgMorphBlkNode(GenTreePtr tree, bool isDest);
GenTreePtr fgMorphBlockOperand(GenTreePtr tree, var_types asgType, unsigned blockWidth, bool isDest);
- void fgMorphUnsafeBlk(GenTreeObj* obj);
GenTreePtr fgMorphCopyBlock(GenTreePtr tree);
GenTreePtr fgMorphForRegisterFP(GenTreePtr tree);
GenTreePtr fgMorphSmpOp(GenTreePtr tree, MorphAddrContext* mac = nullptr);
diff --git a/src/jit/earlyprop.cpp b/src/jit/earlyprop.cpp
index 80ec6a01d2..966063ce32 100644
--- a/src/jit/earlyprop.cpp
+++ b/src/jit/earlyprop.cpp
@@ -237,8 +237,12 @@ bool Compiler::optEarlyPropRewriteTree(GenTreePtr tree)
objectRefPtr = tree->gtOp.gtOp1;
propKind = optPropKind::OPK_ARRAYLEN;
}
- else if (tree->OperIsIndir())
+ else if ((tree->OperGet() == GT_IND) && !varTypeIsStruct(tree))
{
+ // TODO-1stClassStructs: The above condition should apply equally to all indirections,
+ // but previously the implicit indirections due to a struct assignment were not
+ // considered, so we are currently limiting it to non-structs to preserve existing
+ // behavior.
// optFoldNullCheck takes care of updating statement info if a null check is removed.
optFoldNullCheck(tree);
@@ -254,7 +258,7 @@ bool Compiler::optEarlyPropRewriteTree(GenTreePtr tree)
return false;
}
- objectRefPtr = tree->AsIndir()->Addr();
+ objectRefPtr = tree->gtOp.gtOp1;
propKind = optPropKind::OPK_OBJ_GETTYPE;
}
else
@@ -511,8 +515,8 @@ void Compiler::optFoldNullCheck(GenTreePtr tree)
return;
}
- assert(tree->OperIsIndir());
- if (tree->AsIndir()->Addr()->OperGet() == GT_LCL_VAR)
+ assert(tree->OperGet() == GT_IND);
+ if (tree->gtGetOp1()->OperGet() == GT_LCL_VAR)
{
// Check if we have the pattern above and find the nullcheck node if we do.
diff --git a/src/jit/flowgraph.cpp b/src/jit/flowgraph.cpp
index d2442ccf15..d9c1f21833 100644
--- a/src/jit/flowgraph.cpp
+++ b/src/jit/flowgraph.cpp
@@ -21273,8 +21273,7 @@ void Compiler::fgAttachStructInlineeToAsg(GenTreePtr tree, GenTreePtr child, COR
assert(tree->gtOper == GT_ASG);
// We have an assignment, we codegen only V05 = call().
- // However, if it is a multireg return on x64/ux we want to assign it to a temp.
- if (child->gtOper == GT_CALL && tree->gtOp.gtOp1->gtOper == GT_LCL_VAR && !child->AsCall()->HasMultiRegRetVal())
+ if (child->gtOper == GT_CALL && tree->gtOp.gtOp1->gtOper == GT_LCL_VAR)
{
return;
}
diff --git a/src/jit/gentree.cpp b/src/jit/gentree.cpp
index 25c22ee943..236edf03fe 100644
--- a/src/jit/gentree.cpp
+++ b/src/jit/gentree.cpp
@@ -7275,32 +7275,17 @@ GenTree* Compiler::gtNewBlockVal(GenTreePtr addr, unsigned size)
{
// By default we treat this as an opaque struct type with known size.
var_types blkType = TYP_STRUCT;
+#if FEATURE_SIMD
if ((addr->gtOper == GT_ADDR) && (addr->gtGetOp1()->OperGet() == GT_LCL_VAR))
{
GenTree* val = addr->gtGetOp1();
-#if FEATURE_SIMD
- if (varTypeIsSIMD(val))
+ if (varTypeIsSIMD(val) && (genTypeSize(val->TypeGet()) == size))
{
- if (genTypeSize(val->TypeGet()) == size)
- {
- blkType = val->TypeGet();
- return addr->gtGetOp1();
- }
- }
- else
-#endif // FEATURE_SIMD
-#ifndef LEGACY_BACKEND
- if (val->TypeGet() == TYP_STRUCT)
- {
- GenTreeLclVarCommon* lcl = addr->gtGetOp1()->AsLclVarCommon();
- LclVarDsc* varDsc = &(lvaTable[lcl->gtLclNum]);
- if ((varDsc->TypeGet() == TYP_STRUCT) && (varDsc->lvExactSize == size))
- {
- return addr->gtGetOp1();
- }
+ blkType = val->TypeGet();
+ return addr->gtGetOp1();
}
-#endif // !LEGACY_BACKEND
}
+#endif // FEATURE_SIMD
return new (this, GT_BLK) GenTreeBlk(GT_BLK, blkType, addr, size);
}
@@ -7383,10 +7368,10 @@ void GenTreeIntCon::FixupInitBlkValue(var_types asgType)
}
#endif // _TARGET_64BIT_
- // Make the type match for evaluation types.
+ // Make the type used in the GT_IND node match for evaluation types.
gtType = asgType;
- // if we are initializing a GC type the value being assigned must be zero (null).
+ // if we are using an GT_INITBLK on a GC type the value being assigned has to be zero (null).
assert(!varTypeIsGC(asgType) || (cns == 0));
}
@@ -7493,6 +7478,9 @@ void Compiler::gtBlockOpInit(GenTreePtr result, GenTreePtr dst, GenTreePtr srcOr
result->gtFlags |= dst->gtFlags & GTF_ALL_EFFECT;
result->gtFlags |= result->gtOp.gtOp2->gtFlags & GTF_ALL_EFFECT;
+ // TODO-1stClassStructs: This should be done only if the destination is non-local.
+ result->gtFlags |= (GTF_GLOB_REF | GTF_ASG);
+
// REVERSE_OPS is necessary because the use must occur before the def
result->gtFlags |= GTF_REVERSE_OPS;
@@ -7563,20 +7551,12 @@ GenTree* Compiler::gtNewBlkOpNode(
srcOrFillVal = srcOrFillVal->gtGetOp1()->gtGetOp1();
}
}
- else
- {
- // InitBlk
- assert(varTypeIsIntegral(srcOrFillVal));
- if (varTypeIsStruct(dst))
- {
- if (!srcOrFillVal->IsIntegralConst(0))
- {
- srcOrFillVal = gtNewOperNode(GT_INIT_VAL, TYP_INT, srcOrFillVal);
- }
- }
- }
GenTree* result = gtNewAssignNode(dst, srcOrFillVal);
+ if (!isCopyBlock)
+ {
+ result->gtFlags |= GTF_BLK_INIT;
+ }
gtBlockOpInit(result, dst, srcOrFillVal, isVolatile);
return result;
}
diff --git a/src/jit/gentree.h b/src/jit/gentree.h
index fc5360b6bc..dd7f7d502a 100644
--- a/src/jit/gentree.h
+++ b/src/jit/gentree.h
@@ -939,6 +939,7 @@ public:
// -- is a volatile block operation
#define GTF_BLK_UNALIGNED 0x02000000 // GT_ASG, GT_STORE_BLK, GT_STORE_OBJ, GT_STORE_DYNBLK
// -- is an unaligned block operation
+#define GTF_BLK_INIT 0x01000000 // GT_ASG, GT_STORE_BLK, GT_STORE_OBJ, GT_STORE_DYNBLK -- is an init block operation
#define GTF_OVERFLOW 0x10000000 // GT_ADD, GT_SUB, GT_MUL, - Need overflow check
// GT_ASG_ADD, GT_ASG_SUB,
@@ -1139,21 +1140,6 @@ public:
return (gtOper == GT_LEA);
}
- static bool OperIsInitVal(genTreeOps gtOper)
- {
- return (gtOper == GT_INIT_VAL);
- }
-
- bool OperIsInitVal() const
- {
- return OperIsInitVal(OperGet());
- }
-
- bool IsConstInitVal()
- {
- return (gtOper == GT_CNS_INT) || (OperIsInitVal() && (gtGetOp1()->gtOper == GT_CNS_INT));
- }
-
bool OperIsBlkOp();
bool OperIsCopyBlkOp();
bool OperIsInitBlkOp();
@@ -4180,19 +4166,6 @@ struct GenTreeObj : public GenTreeBlk
// Let's assert it just to be safe.
noway_assert(roundUp(gtBlkSize, REGSIZE_BYTES) == gtBlkSize);
}
- else
- {
- genTreeOps newOper = GT_BLK;
- if (gtOper == GT_STORE_OBJ)
- {
- newOper = GT_STORE_BLK;
- }
- else
- {
- assert(gtOper == GT_OBJ);
- }
- SetOper(newOper);
- }
}
void CopyGCInfo(GenTreeObj* srcObj)
@@ -4855,31 +4828,34 @@ inline bool GenTree::OperIsDynBlkOp()
return false;
}
-inline bool GenTree::OperIsInitBlkOp()
+inline bool GenTree::OperIsCopyBlkOp()
{
- if (!OperIsBlkOp())
- {
- return false;
- }
-#ifndef LEGACY_BACKEND
- GenTree* src;
if (gtOper == GT_ASG)
{
- src = gtGetOp2();
+ return (varTypeIsStruct(gtGetOp1()) && ((gtFlags & GTF_BLK_INIT) == 0));
}
- else
+#ifndef LEGACY_BACKEND
+ else if (OperIsStoreBlk())
{
- src = AsBlk()->Data()->gtSkipReloadOrCopy();
+ return ((gtFlags & GTF_BLK_INIT) == 0);
}
-#else // LEGACY_BACKEND
- GenTree* src = gtGetOp2();
-#endif // LEGACY_BACKEND
- return src->OperIsInitVal() || src->OperIsConst();
+#endif
+ return false;
}
-inline bool GenTree::OperIsCopyBlkOp()
+inline bool GenTree::OperIsInitBlkOp()
{
- return OperIsBlkOp() && !OperIsInitBlkOp();
+ if (gtOper == GT_ASG)
+ {
+ return (varTypeIsStruct(gtGetOp1()) && ((gtFlags & GTF_BLK_INIT) != 0));
+ }
+#ifndef LEGACY_BACKEND
+ else if (OperIsStoreBlk())
+ {
+ return ((gtFlags & GTF_BLK_INIT) != 0);
+ }
+#endif
+ return false;
}
//------------------------------------------------------------------------
diff --git a/src/jit/gtlist.h b/src/jit/gtlist.h
index 92265a7359..a330a6bb37 100644
--- a/src/jit/gtlist.h
+++ b/src/jit/gtlist.h
@@ -93,8 +93,6 @@ GTNODE(SIMD_CHK , "simdChk" ,GenTreeBoundsChk ,0,GTK_SPECIAL|GTK_
GTNODE(ALLOCOBJ , "allocObj" ,GenTreeAllocObj ,0,GTK_UNOP|GTK_EXOP) // object allocator
-GTNODE(INIT_VAL , "initVal" ,GenTreeOp ,0,GTK_UNOP) // Initialization value for an initBlk
-
//-----------------------------------------------------------------------------
// Binary operators (2 operands):
//-----------------------------------------------------------------------------
diff --git a/src/jit/importer.cpp b/src/jit/importer.cpp
index 11c8967ed1..80336346f3 100644
--- a/src/jit/importer.cpp
+++ b/src/jit/importer.cpp
@@ -600,9 +600,13 @@ inline void Compiler::impAppendStmt(GenTreePtr stmt, unsigned chkLevel)
// Assignment to (unaliased) locals don't count as a side-effect as
// we handle them specially using impSpillLclRefs(). Temp locals should
// be fine too.
+ // TODO-1stClassStructs: The check below should apply equally to struct assignments,
+ // but previously the block ops were always being marked GTF_GLOB_REF, even if
+ // the operands could not be global refs.
if ((expr->gtOper == GT_ASG) && (expr->gtOp.gtOp1->gtOper == GT_LCL_VAR) &&
- !(expr->gtOp.gtOp1->gtFlags & GTF_GLOB_REF) && !gtHasLocalsWithAddrOp(expr->gtOp.gtOp2))
+ !(expr->gtOp.gtOp1->gtFlags & GTF_GLOB_REF) && !gtHasLocalsWithAddrOp(expr->gtOp.gtOp2) &&
+ !varTypeIsStruct(expr->gtOp.gtOp1))
{
unsigned op2Flags = expr->gtOp.gtOp2->gtFlags & GTF_GLOB_EFFECT;
assert(flags == (op2Flags | GTF_ASG));
@@ -1135,13 +1139,9 @@ GenTreePtr Compiler::impAssignStructPtr(GenTreePtr destAddr,
if (destAddr->OperGet() == GT_ADDR)
{
GenTree* destNode = destAddr->gtGetOp1();
- // If the actual destination is a local (for non-LEGACY_BACKEND), or already a block node, or is a node that
+ // If the actual destination is already a block node, or is a node that
// will be morphed, don't insert an OBJ(ADDR).
- if (destNode->gtOper == GT_INDEX || destNode->OperIsBlk()
-#ifndef LEGACY_BACKEND
- || ((destNode->OperGet() == GT_LCL_VAR) && (destNode->TypeGet() == src->TypeGet()))
-#endif // !LEGACY_BACKEND
- )
+ if (destNode->gtOper == GT_INDEX || destNode->OperIsBlk())
{
dest = destNode;
}
@@ -1190,9 +1190,6 @@ GenTreePtr Compiler::impAssignStructPtr(GenTreePtr destAddr,
{
// Mark the struct LclVar as used in a MultiReg return context
// which currently makes it non promotable.
- // TODO-1stClassStructs: Eliminate this pessimization when we can more generally
- // handle multireg returns.
- lcl->gtFlags |= GTF_DONT_CSE;
lvaTable[lcl->gtLclVarCommon.gtLclNum].lvIsMultiRegRet = true;
}
else // The call result is not a multireg return
@@ -1207,20 +1204,12 @@ GenTreePtr Compiler::impAssignStructPtr(GenTreePtr destAddr,
dest = lcl;
#if defined(_TARGET_ARM_)
- // TODO-Cleanup: This should have been taken care of in the above HasMultiRegRetVal() case,
- // but that method has not been updadted to include ARM.
impMarkLclDstNotPromotable(lcl->gtLclVarCommon.gtLclNum, src, structHnd);
- lcl->gtFlags |= GTF_DONT_CSE;
#elif defined(FEATURE_UNIX_AMD64_STRUCT_PASSING)
// Not allowed for FEATURE_CORCLR which is the only SKU available for System V OSs.
assert(!src->gtCall.IsVarargs() && "varargs not allowed for System V OSs.");
// Make the struct non promotable. The eightbytes could contain multiple fields.
- // TODO-1stClassStructs: Eliminate this pessimization when we can more generally
- // handle multireg returns.
- // TODO-Cleanup: Why is this needed here? This seems that it will set this even for
- // non-multireg returns.
- lcl->gtFlags |= GTF_DONT_CSE;
lvaTable[lcl->gtLclVarCommon.gtLclNum].lvIsMultiRegRet = true;
#endif
}
@@ -1262,11 +1251,10 @@ GenTreePtr Compiler::impAssignStructPtr(GenTreePtr destAddr,
src->gtType = genActualType(returnType);
call->gtType = src->gtType;
- // If we've changed the type, and it no longer matches a local destination,
- // we must use an indirection.
- if ((dest != nullptr) && (dest->OperGet() == GT_LCL_VAR) && (dest->TypeGet() != asgType))
+ // 1stClassStructToDo: We shouldn't necessarily need this.
+ if (dest != nullptr)
{
- dest = nullptr;
+ dest = gtNewOperNode(GT_IND, returnType, gtNewOperNode(GT_ADDR, TYP_BYREF, dest));
}
// !!! The destination could be on stack. !!!
@@ -1337,19 +1325,21 @@ GenTreePtr Compiler::impAssignStructPtr(GenTreePtr destAddr,
}
else if (src->IsLocal())
{
+ // TODO-1stClassStructs: Eliminate this; it is only here to minimize diffs in the
+ // initial implementation. Previously the source would have been under a GT_ADDR, which
+ // would cause it to be marked GTF_DONT_CSE.
asgType = src->TypeGet();
- }
- else if (asgType == TYP_STRUCT)
- {
- asgType = impNormStructType(structHnd);
- src->gtType = asgType;
-#ifdef LEGACY_BACKEND
+ src->gtFlags |= GTF_DONT_CSE;
if (asgType == TYP_STRUCT)
{
GenTree* srcAddr = gtNewOperNode(GT_ADDR, TYP_BYREF, src);
src = gtNewOperNode(GT_IND, TYP_STRUCT, srcAddr);
}
-#endif
+ }
+ else if (asgType == TYP_STRUCT)
+ {
+ asgType = impNormStructType(structHnd);
+ src->gtType = asgType;
}
if (dest == nullptr)
{
@@ -7838,9 +7828,6 @@ GenTreePtr Compiler::impFixupStructReturnType(GenTreePtr op, CORINFO_CLASS_HANDL
unsigned lclNum = op->gtLclVarCommon.gtLclNum;
lvaTable[lclNum].lvIsMultiRegRet = true;
- // TODO-1stClassStructs: Handle constant propagation and CSE-ing of multireg returns.
- op->gtFlags |= GTF_DONT_CSE;
-
return op;
}
@@ -7865,10 +7852,6 @@ GenTreePtr Compiler::impFixupStructReturnType(GenTreePtr op, CORINFO_CLASS_HANDL
unsigned lclNum = op->gtLclVarCommon.gtLclNum;
// Make sure this struct type stays as struct so that we can return it as an HFA
lvaTable[lclNum].lvIsMultiRegRet = true;
-
- // TODO-1stClassStructs: Handle constant propagation and CSE-ing of multireg returns.
- op->gtFlags |= GTF_DONT_CSE;
-
return op;
}
@@ -7901,10 +7884,6 @@ GenTreePtr Compiler::impFixupStructReturnType(GenTreePtr op, CORINFO_CLASS_HANDL
// Make sure this struct type is not struct promoted
lvaTable[lclNum].lvIsMultiRegRet = true;
-
- // TODO-1stClassStructs: Handle constant propagation and CSE-ing of multireg returns.
- op->gtFlags |= GTF_DONT_CSE;
-
return op;
}
@@ -9556,14 +9535,6 @@ void Compiler::impImportBlockCode(BasicBlock* block)
SPILL_APPEND:
- // We need to call impSpillLclRefs() for a struct type lclVar.
- // This is done for non-block assignments in the handling of stloc.
- if ((op1->OperGet() == GT_ASG) && varTypeIsStruct(op1->gtOp.gtOp1) &&
- (op1->gtOp.gtOp1->gtOper == GT_LCL_VAR))
- {
- impSpillLclRefs(op1->gtOp.gtOp1->AsLclVarCommon()->gtLclNum);
- }
-
/* Append 'op1' to the list of statements */
impAppendTree(op1, (unsigned)CHECK_SPILL_ALL, impCurStmtOffs);
goto DONE_APPEND;
@@ -14815,10 +14786,6 @@ GenTreePtr Compiler::impAssignMultiRegTypeToVar(GenTreePtr op, CORINFO_CLASS_HAN
unsigned tmpNum = lvaGrabTemp(true DEBUGARG("Return value temp for multireg return."));
impAssignTempGen(tmpNum, op, hClass, (unsigned)CHECK_SPILL_NONE);
GenTreePtr ret = gtNewLclvNode(tmpNum, op->gtType);
-
- // TODO-1stClassStructs: Handle constant propagation and CSE-ing of multireg returns.
- ret->gtFlags |= GTF_DONT_CSE;
-
assert(IsMultiRegReturnedType(hClass));
// Mark the var so that fields are not promoted and stay together.
diff --git a/src/jit/lclvars.cpp b/src/jit/lclvars.cpp
index 299040da81..6895ba5857 100644
--- a/src/jit/lclvars.cpp
+++ b/src/jit/lclvars.cpp
@@ -1897,10 +1897,6 @@ void Compiler::lvaSetVarDoNotEnregister(unsigned varNum DEBUGARG(DoNotEnregister
JITDUMP("it is a struct\n");
assert(varTypeIsStruct(varDsc));
break;
- case DNER_IsStructArg:
- JITDUMP("it is a struct arg\n");
- assert(varTypeIsStruct(varDsc));
- break;
case DNER_BlockOp:
JITDUMP("written in a block op\n");
varDsc->lvLclBlockOpAddr = 1;
diff --git a/src/jit/lower.cpp b/src/jit/lower.cpp
index b96196facb..0e3477f531 100644
--- a/src/jit/lower.cpp
+++ b/src/jit/lower.cpp
@@ -3783,6 +3783,8 @@ void Lowering::LowerStoreInd(GenTree* node)
void Lowering::LowerBlockStore(GenTreeBlk* blkNode)
{
GenTree* src = blkNode->Data();
+ // TODO-1stClassStructs: Don't require this.
+ assert(blkNode->OperIsInitBlkOp() || !src->OperIsLocal());
TryCreateAddrMode(LIR::Use(BlockRange(), &blkNode->Addr(), blkNode), false);
}
diff --git a/src/jit/lowerarm64.cpp b/src/jit/lowerarm64.cpp
index cc9e2266d2..8b0665adfa 100644
--- a/src/jit/lowerarm64.cpp
+++ b/src/jit/lowerarm64.cpp
@@ -126,10 +126,6 @@ void Lowering::TreeNodeInfoInit(GenTree* tree)
TreeNodeInfo* info = &(tree->gtLsraInfo);
RegisterType registerType = TypeGet(tree);
- JITDUMP("TreeNodeInfoInit for: ");
- DISPNODE(tree);
- JITDUMP("\n");
-
switch (tree->OperGet())
{
GenTree* op1;
@@ -542,12 +538,6 @@ void Lowering::TreeNodeInfoInit(GenTree* tree)
TreeNodeInfoInitBlockStore(tree->AsBlk());
break;
- case GT_INIT_VAL:
- // Always a passthrough of its child's value.
- info->srcCount = 0;
- info->dstCount = 0;
- break;
-
case GT_LCLHEAP:
{
info->srcCount = 1;
@@ -1230,9 +1220,8 @@ void Lowering::TreeNodeInfoInitPutArgStk(GenTreePutArgStk* argNode, fgArgTabEntr
void Lowering::TreeNodeInfoInitBlockStore(GenTreeBlk* blkNode)
{
- GenTree* dstAddr = blkNode->Addr();
- unsigned size = blkNode->gtBlkSize;
- GenTree* source = blkNode->Data();
+ GenTree* dstAddr = blkNode->Addr();
+ unsigned size;
LinearScan* l = m_lsra;
Compiler* compiler = comp;
@@ -1240,44 +1229,16 @@ void Lowering::TreeNodeInfoInitBlockStore(GenTreeBlk* blkNode)
// We may require an additional source or temp register for the size.
blkNode->gtLsraInfo.srcCount = 2;
blkNode->gtLsraInfo.dstCount = 0;
- GenTreePtr srcAddrOrFill = nullptr;
- bool isInitBlk = blkNode->OperIsInitBlkOp();
- if (!isInitBlk)
+ if ((blkNode->OperGet() == GT_STORE_OBJ) && (blkNode->AsObj()->gtGcPtrCount == 0))
{
- // CopyObj or CopyBlk
- if ((blkNode->OperGet() == GT_STORE_OBJ) && ((blkNode->AsObj()->gtGcPtrCount == 0) || blkNode->gtBlkOpGcUnsafe))
- {
- blkNode->SetOper(GT_STORE_BLK);
- }
- if (source->gtOper == GT_IND)
- {
- srcAddrOrFill = blkNode->Data()->gtGetOp1();
- // We're effectively setting source as contained, but can't call MakeSrcContained, because the
- // "inheritance" of the srcCount is to a child not a parent - it would "just work" but could be misleading.
- // If srcAddr is already non-contained, we don't need to change it.
- if (srcAddrOrFill->gtLsraInfo.getDstCount() == 0)
- {
- srcAddrOrFill->gtLsraInfo.setDstCount(1);
- srcAddrOrFill->gtLsraInfo.setSrcCount(source->gtLsraInfo.srcCount);
- }
- m_lsra->clearOperandCounts(source);
- }
- else if (!source->IsMultiRegCall() && !source->OperIsSIMD())
- {
- assert(source->IsLocal());
- MakeSrcContained(blkNode, source);
- }
+ blkNode->SetOper(GT_STORE_BLK);
}
- if (isInitBlk)
+ if (blkNode->OperIsInitBlkOp())
{
- GenTreePtr initVal = source;
- if (initVal->OperIsInitVal())
- {
- initVal = initVal->gtGetOp1();
- }
- srcAddrOrFill = initVal;
+ unsigned size = blkNode->gtBlkSize;
+ GenTreePtr initVal = blkNode->Data();
#if 0
// TODO-ARM64-CQ: Currently we generate a helper call for every
@@ -1304,6 +1265,8 @@ void Lowering::TreeNodeInfoInitBlockStore(GenTreeBlk* blkNode)
initVal->gtType = TYP_LONG;
}
+ MakeSrcContained(tree, blockSize);
+
// In case we have a buffer >= 16 bytes
// we can use SSE2 to do a 128-bit store in a single
// instruction.
@@ -1344,12 +1307,34 @@ void Lowering::TreeNodeInfoInitBlockStore(GenTreeBlk* blkNode)
{
// CopyObj or CopyBlk
// Sources are src and dest and size if not constant.
+ unsigned size = blkNode->gtBlkSize;
+ GenTreePtr source = blkNode->Data();
+ GenTree* srcAddr = nullptr;
+ if (source->gtOper == GT_IND)
+ {
+ srcAddr = blkNode->Data()->gtGetOp1();
+ // We're effectively setting source as contained, but can't call MakeSrcContained, because the
+ // "inheritance" of the srcCount is to a child not a parent - it would "just work" but could be misleading.
+ // If srcAddr is already non-contained, we don't need to change it.
+ if (srcAddr->gtLsraInfo.getDstCount() == 0)
+ {
+ srcAddr->gtLsraInfo.setDstCount(1);
+ srcAddr->gtLsraInfo.setSrcCount(source->gtLsraInfo.srcCount);
+ }
+ m_lsra->clearOperandCounts(source);
+ }
+ else
+ {
+ assert(source->IsLocal());
+ MakeSrcContained(blkNode, source);
+ }
if (blkNode->OperGet() == GT_STORE_OBJ)
{
// CopyObj
GenTreeObj* objNode = blkNode->AsObj();
+ GenTreePtr source = objNode->Data();
unsigned slots = objNode->gtSlots;
@@ -1378,19 +1363,15 @@ void Lowering::TreeNodeInfoInitBlockStore(GenTreeBlk* blkNode)
blkNode->gtLsraInfo.internalIntCount = 1;
dstAddr->gtLsraInfo.setSrcCandidates(l, RBM_WRITE_BARRIER_DST_BYREF);
- // If we have a source address we want it in REG_WRITE_BARRIER_SRC_BYREF.
- // Otherwise, if it is a local, codegen will put its address in REG_WRITE_BARRIER_SRC_BYREF,
- // which is killed by a StoreObj (and thus needn't be reserved).
- if (srcAddrOrFill != nullptr)
- {
- srcAddrOrFill->gtLsraInfo.setSrcCandidates(l, RBM_WRITE_BARRIER_SRC_BYREF);
- }
+ srcAddr->gtLsraInfo.setSrcCandidates(l, RBM_WRITE_BARRIER_SRC_BYREF);
}
else
{
// CopyBlk
- short internalIntCount = 0;
- regMaskTP internalIntCandidates = RBM_NONE;
+ unsigned size = blkNode->gtBlkSize;
+ GenTreePtr dstAddr = blkNode->Addr();
+ short internalIntCount = 0;
+ regMaskTP internalIntCandidates = RBM_NONE;
#if 0
// In case of a CpBlk with a constant size and less than CPBLK_UNROLL_LIMIT size
@@ -1398,8 +1379,11 @@ void Lowering::TreeNodeInfoInitBlockStore(GenTreeBlk* blkNode)
// TODO-ARM64-CQ: cpblk loop unrolling is currently not implemented.
- if ((size != 0) && (size <= INITBLK_UNROLL_LIMIT))
+ if (blockSize->IsCnsIntOrI() && blockSize->gtIntCon.gtIconVal <= CPBLK_UNROLL_LIMIT)
{
+ assert(!blockSize->IsIconHandle());
+ ssize_t size = blockSize->gtIntCon.gtIconVal;
+
// If we have a buffer between XMM_REGSIZE_BYTES and CPBLK_UNROLL_LIMIT bytes, we'll use SSE2.
// Structs and buffer with sizes <= CPBLK_UNROLL_LIMIT bytes are occurring in more than 95% of
// our framework assemblies, so this is the main code generation scheme we'll use.
@@ -1420,9 +1404,9 @@ void Lowering::TreeNodeInfoInitBlockStore(GenTreeBlk* blkNode)
// If src or dst are on stack, we don't have to generate the address into a register
// because it's just some constant+SP
- if (srcAddr != nullptr && srcAddrOrFill->OperIsLocalAddr())
+ if (srcAddr->OperIsLocalAddr())
{
- MakeSrcContained(blkNode, srcAddrOrFill);
+ MakeSrcContained(blkNode, srcAddr);
}
if (dstAddr->OperIsLocalAddr())
@@ -1441,9 +1425,15 @@ void Lowering::TreeNodeInfoInitBlockStore(GenTreeBlk* blkNode)
dstAddr->gtLsraInfo.setSrcCandidates(l, RBM_ARG_0);
// The srcAddr goes in arg1.
- if (srcAddrOrFill != nullptr)
+ if (srcAddr != nullptr)
{
- srcAddrOrFill->gtLsraInfo.setSrcCandidates(l, RBM_ARG_1);
+ srcAddr->gtLsraInfo.setSrcCandidates(l, RBM_ARG_1);
+ }
+ else
+ {
+ // This is a local; we'll use a temp register for its address.
+ internalIntCandidates |= RBM_ARG_1;
+ internalIntCount++;
}
if (size != 0)
{
diff --git a/src/jit/lowerxarch.cpp b/src/jit/lowerxarch.cpp
index e7af9f0223..da51f3a8fc 100644
--- a/src/jit/lowerxarch.cpp
+++ b/src/jit/lowerxarch.cpp
@@ -665,12 +665,6 @@ void Lowering::TreeNodeInfoInit(GenTree* tree)
TreeNodeInfoInitBlockStore(tree->AsBlk());
break;
- case GT_INIT_VAL:
- // Always a passthrough of its child's value.
- info->srcCount = 0;
- info->dstCount = 0;
- break;
-
case GT_LCLHEAP:
TreeNodeInfoInitLclHeap(tree);
break;
@@ -1673,7 +1667,7 @@ void Lowering::TreeNodeInfoInitBlockStore(GenTreeBlk* blkNode)
}
m_lsra->clearOperandCounts(source);
}
- else if (!source->IsMultiRegCall() && !source->OperIsSIMD())
+ else if (!source->OperIsSIMD())
{
assert(source->IsLocal());
MakeSrcContained(blkNode, source);
@@ -1683,11 +1677,7 @@ void Lowering::TreeNodeInfoInitBlockStore(GenTreeBlk* blkNode)
if (isInitBlk)
{
GenTree* initVal = source;
- if (initVal->OperIsInitVal())
- {
- initVal = initVal->gtGetOp1();
- }
- srcAddrOrFill = initVal;
+ srcAddrOrFill = source;
// If we have an InitBlk with constant block size we can optimize several ways:
// a) If the size is smaller than a small memory page but larger than INITBLK_UNROLL_LIMIT bytes
// we use rep stosb since this reduces the register pressure in LSRA and we have
@@ -1739,10 +1729,6 @@ void Lowering::TreeNodeInfoInitBlockStore(GenTreeBlk* blkNode)
// a pack of 16 init value constants.
blkNode->gtLsraInfo.internalFloatCount = 1;
blkNode->gtLsraInfo.setInternalCandidates(l, l->internalFloatRegCandidates());
- if ((fill == 0) && ((size & 0xf) == 0))
- {
- MakeSrcContained(blkNode, source);
- }
}
blkNode->gtBlkOpKind = GenTreeBlk::BlkOpKindUnroll;
diff --git a/src/jit/morph.cpp b/src/jit/morph.cpp
index 77e2c82fbd..c0f2697d1e 100644
--- a/src/jit/morph.cpp
+++ b/src/jit/morph.cpp
@@ -8487,7 +8487,7 @@ GenTreePtr Compiler::fgMorphOneAsgBlockOp(GenTreePtr tree)
// with the bits to create a single assigment.
noway_assert(size <= REGSIZE_BYTES);
- if (isInitBlock && !src->IsConstInitVal())
+ if (isInitBlock && (src->gtOper != GT_CNS_INT))
{
return nullptr;
}
@@ -8660,12 +8660,8 @@ GenTreePtr Compiler::fgMorphOneAsgBlockOp(GenTreePtr tree)
}
else
#endif
+ if (src->IsCnsIntOrI())
{
- if (src->OperIsInitVal())
- {
- src = src->gtGetOp1();
- }
- assert(src->IsCnsIntOrI());
// This will mutate the integer constant, in place, to be the correct
// value for the type we are using in the assignment.
src->AsIntCon()->FixupInitBlkValue(asgType);
@@ -8733,8 +8729,7 @@ GenTreePtr Compiler::fgMorphOneAsgBlockOp(GenTreePtr tree)
GenTreePtr Compiler::fgMorphInitBlock(GenTreePtr tree)
{
- // We must have the GT_ASG form of InitBlkOp.
- noway_assert((tree->OperGet() == GT_ASG) && tree->OperIsInitBlkOp());
+ noway_assert(tree->gtOper == GT_ASG && varTypeIsStruct(tree));
#ifdef DEBUG
bool morphed = false;
#endif // DEBUG
@@ -8749,12 +8744,6 @@ GenTreePtr Compiler::fgMorphInitBlock(GenTreePtr tree)
tree->gtOp.gtOp1 = dest;
}
tree->gtType = dest->TypeGet();
- // (Constant propagation may cause a TYP_STRUCT lclVar to be changed to GT_CNS_INT, and its
- // type will be the type of the original lclVar, in which case we will change it to TYP_INT).
- if ((src->OperGet() == GT_CNS_INT) && varTypeIsStruct(src))
- {
- src->gtType = TYP_INT;
- }
JITDUMP("\nfgMorphInitBlock:");
GenTreePtr oneAsgTree = fgMorphOneAsgBlockOp(tree);
@@ -8766,7 +8755,7 @@ GenTreePtr Compiler::fgMorphInitBlock(GenTreePtr tree)
else
{
GenTree* destAddr = nullptr;
- GenTree* initVal = src->OperIsInitVal() ? src->gtGetOp1() : src;
+ GenTree* initVal = src;
GenTree* blockSize = nullptr;
unsigned blockWidth = 0;
FieldSeqNode* destFldSeq = nullptr;
@@ -8835,7 +8824,6 @@ GenTreePtr Compiler::fgMorphInitBlock(GenTreePtr tree)
if (destLclVar->lvPromoted && blockWidthIsConst)
{
- assert(initVal->OperGet() == GT_CNS_INT);
noway_assert(varTypeIsStruct(destLclVar));
noway_assert(!opts.MinOpts());
if (destLclVar->lvAddrExposed & destLclVar->lvContainsHoles)
@@ -8895,9 +8883,25 @@ GenTreePtr Compiler::fgMorphInitBlock(GenTreePtr tree)
#if CPU_USES_BLOCK_MOVE
compBlkOpUsed = true;
#endif
- dest = fgMorphBlockOperand(dest, dest->TypeGet(), blockWidth, true);
- tree->gtOp.gtOp1 = dest;
- tree->gtFlags |= (dest->gtFlags & GTF_ALL_EFFECT);
+ if (!dest->OperIsBlk())
+ {
+ GenTree* destAddr = gtNewOperNode(GT_ADDR, TYP_BYREF, dest);
+ CORINFO_CLASS_HANDLE clsHnd = gtGetStructHandleIfPresent(dest);
+ if (clsHnd == NO_CLASS_HANDLE)
+ {
+ dest = new (this, GT_BLK) GenTreeBlk(GT_BLK, dest->TypeGet(), destAddr, blockWidth);
+ }
+ else
+ {
+ GenTree* newDest = gtNewObjNode(clsHnd, destAddr);
+ if (newDest->OperGet() == GT_OBJ)
+ {
+ gtSetObjGcInfo(newDest->AsObj());
+ }
+ dest = newDest;
+ }
+ tree->gtOp.gtOp1 = dest;
+ }
}
else
{
@@ -9197,7 +9201,7 @@ GenTree* Compiler::fgMorphBlkNode(GenTreePtr tree, bool isDest)
//
// Notes:
// This does the following:
-// - Ensures that a struct operand is a block node or (for non-LEGACY_BACKEND) lclVar.
+// - Ensures that a struct operand is a block node.
// - Ensures that any COMMAs are above ADDR nodes.
// Although 'tree' WAS an operand of a block assignment, the assignment
// may have been retyped to be a scalar assignment.
@@ -9206,6 +9210,10 @@ GenTree* Compiler::fgMorphBlockOperand(GenTree* tree, var_types asgType, unsigne
{
GenTree* effectiveVal = tree->gtEffectiveVal();
+ // TODO-1stClassStucts: We would like to transform non-TYP_STRUCT nodes to
+ // either plain lclVars or GT_INDs. However, for now we want to preserve most
+ // of the block nodes until the Rationalizer.
+
if (!varTypeIsStruct(asgType))
{
if (effectiveVal->OperIsIndir())
@@ -9232,138 +9240,66 @@ GenTree* Compiler::fgMorphBlockOperand(GenTree* tree, var_types asgType, unsigne
}
else
{
- GenTreeIndir* indirTree = nullptr;
- GenTreeLclVarCommon* lclNode = nullptr;
- bool needsIndirection = true;
-
- if (effectiveVal->OperIsIndir())
- {
- indirTree = effectiveVal->AsIndir();
- GenTree* addr = effectiveVal->AsIndir()->Addr();
- if ((addr->OperGet() == GT_ADDR) && (addr->gtGetOp1()->OperGet() == GT_LCL_VAR))
- {
- lclNode = addr->gtGetOp1()->AsLclVarCommon();
- }
- }
- else if (effectiveVal->OperGet() == GT_LCL_VAR)
- {
- lclNode = effectiveVal->AsLclVarCommon();
- }
#ifdef FEATURE_SIMD
if (varTypeIsSIMD(asgType))
{
- if ((indirTree != nullptr) && (lclNode == nullptr) && (indirTree->Addr()->OperGet() == GT_ADDR) &&
- (indirTree->Addr()->gtGetOp1()->gtOper == GT_SIMD))
+ if (effectiveVal->OperIsIndir())
{
- assert(!isDest);
- needsIndirection = false;
- effectiveVal = indirTree->Addr()->gtGetOp1();
+ GenTree* addr = effectiveVal->AsIndir()->Addr();
+ if (!isDest && (addr->OperGet() == GT_ADDR))
+ {
+ if ((addr->gtGetOp1()->gtOper == GT_SIMD) || (addr->gtGetOp1()->OperGet() == GT_LCL_VAR))
+ {
+ effectiveVal = addr->gtGetOp1();
+ }
+ }
+ else if (isDest && !effectiveVal->OperIsBlk())
+ {
+ effectiveVal = new (this, GT_BLK) GenTreeBlk(GT_BLK, asgType, addr, blockWidth);
+ }
}
- if (effectiveVal->OperIsSIMD())
+ else if (!effectiveVal->OperIsSIMD() && (!effectiveVal->IsLocal() || isDest) && !effectiveVal->OperIsBlk())
{
- needsIndirection = false;
+ GenTree* addr = gtNewOperNode(GT_ADDR, TYP_BYREF, effectiveVal);
+ effectiveVal = new (this, GT_BLK) GenTreeBlk(GT_BLK, asgType, addr, blockWidth);
}
}
+ else
#endif // FEATURE_SIMD
- if (lclNode != nullptr)
+ if (!effectiveVal->OperIsBlk())
{
- LclVarDsc* varDsc = &(lvaTable[lclNode->gtLclNum]);
- if (varTypeIsStruct(varDsc) && (varDsc->lvExactSize == blockWidth))
+ GenTree* addr = gtNewOperNode(GT_ADDR, TYP_BYREF, effectiveVal);
+ CORINFO_CLASS_HANDLE clsHnd = gtGetStructHandleIfPresent(effectiveVal);
+ GenTree* newTree;
+ if (clsHnd == NO_CLASS_HANDLE)
{
-#ifndef LEGACY_BACKEND
- effectiveVal = lclNode;
- needsIndirection = false;
-#endif // !LEGACY_BACKEND
+ newTree = new (this, GT_BLK) GenTreeBlk(GT_BLK, TYP_STRUCT, addr, blockWidth);
}
else
{
- // This may be a lclVar that was determined to be address-exposed.
- effectiveVal->gtFlags |= (lclNode->gtFlags & GTF_ALL_EFFECT);
- }
- }
- if (needsIndirection)
- {
- if (indirTree != nullptr)
- {
- // We should never find a struct indirection on the lhs of an assignment.
- assert(!isDest || indirTree->OperIsBlk());
- if (!isDest && indirTree->OperIsBlk())
+ newTree = gtNewObjNode(clsHnd, addr);
+ if (isDest && (newTree->OperGet() == GT_OBJ))
{
- (void)fgMorphBlkToInd(effectiveVal->AsBlk(), asgType);
+ gtSetObjGcInfo(newTree->AsObj());
}
- }
- else
- {
- GenTree* newTree;
- GenTree* addr = gtNewOperNode(GT_ADDR, TYP_BYREF, effectiveVal);
- if (isDest)
+ if (effectiveVal->IsLocal() && ((effectiveVal->gtFlags & GTF_GLOB_EFFECT) == 0))
{
- CORINFO_CLASS_HANDLE clsHnd = gtGetStructHandleIfPresent(effectiveVal);
- if (clsHnd == NO_CLASS_HANDLE)
- {
- newTree = new (this, GT_BLK) GenTreeBlk(GT_BLK, TYP_STRUCT, addr, blockWidth);
- }
- else
- {
- newTree = gtNewObjNode(clsHnd, addr);
- if (isDest && (newTree->OperGet() == GT_OBJ))
- {
- gtSetObjGcInfo(newTree->AsObj());
- }
- if (effectiveVal->IsLocal() && ((effectiveVal->gtFlags & GTF_GLOB_EFFECT) == 0))
- {
- // This is not necessarily a global reference, though gtNewObjNode always assumes it is.
- // TODO-1stClassStructs: This check should be done in the GenTreeObj constructor,
- // where it currently sets GTF_GLOB_EFFECT unconditionally, but it is handled
- // separately now to avoid excess diffs.
- newTree->gtFlags &= ~(GTF_GLOB_EFFECT);
- }
- }
- }
- else
- {
- newTree = new (this, GT_IND) GenTreeIndir(GT_IND, asgType, addr, nullptr);
+ // This is not necessarily a global reference, though gtNewObjNode always assumes it is.
+ // TODO-1stClassStructs: This check should be done in the GenTreeObj constructor,
+ // where it currently sets GTF_GLOB_EFFECT unconditionally, but it is handled
+ // separately now to avoid excess diffs.
+ newTree->gtFlags &= ~(GTF_GLOB_EFFECT);
}
- effectiveVal = newTree;
}
+ effectiveVal = newTree;
}
}
- tree = effectiveVal;
- return tree;
-}
-
-//------------------------------------------------------------------------
-// fgMorphUnsafeBlk: Convert a CopyObj with a dest on the stack to a GC Unsafe CopyBlk
-//
-// Arguments:
-// dest - the GT_OBJ or GT_STORE_OBJ
-//
-// Assumptions:
-// The destination must be known (by the caller) to be on the stack.
-//
-// Notes:
-// If we have a CopyObj with a dest on the stack, and its size is small enouch
-// to be completely unrolled (i.e. between [16..64] bytes), we will convert it into a
-// GC Unsafe CopyBlk that is non-interruptible.
-// This is not supported for the JIT32_GCENCODER, in which case this method is a no-op.
-//
-void Compiler::fgMorphUnsafeBlk(GenTreeObj* dest)
-{
-#if defined(CPBLK_UNROLL_LIMIT) && !defined(JIT32_GCENCODER)
- assert(dest->gtGcPtrCount != 0);
- unsigned blockWidth = dest->AsBlk()->gtBlkSize;
-#ifdef DEBUG
- bool destOnStack = false;
- GenTree* destAddr = dest->Addr();
- assert(destAddr->IsLocalAddrExpr() != nullptr);
-#endif
- if ((blockWidth >= (2 * TARGET_POINTER_SIZE)) && (blockWidth <= CPBLK_UNROLL_LIMIT))
+ if (!isDest && effectiveVal->OperIsBlk())
{
- genTreeOps newOper = (dest->gtOper == GT_OBJ) ? GT_BLK : GT_STORE_BLK;
- dest->SetOper(newOper);
- dest->AsBlk()->gtBlkOpGcUnsafe = true; // Mark as a GC unsafe copy block
+ (void)fgMorphBlkToInd(effectiveVal->AsBlk(), asgType);
}
-#endif // defined(CPBLK_UNROLL_LIMIT) && !defined(JIT32_GCENCODER)
+ tree = effectiveVal;
+ return tree;
}
//------------------------------------------------------------------------
@@ -9646,19 +9582,12 @@ GenTreePtr Compiler::fgMorphCopyBlock(GenTreePtr tree)
// Are both dest and src promoted structs?
if (destDoFldAsg && srcDoFldAsg)
{
- // Both structs should be of the same type, or each have a single field of the same type.
- // If not we will use a copy block.
+ // Both structs should be of the same type, if not we will use a copy block
if (lvaTable[destLclNum].lvVerTypeInfo.GetClassHandle() !=
lvaTable[srcLclNum].lvVerTypeInfo.GetClassHandle())
{
- unsigned destFieldNum = lvaTable[destLclNum].lvFieldLclStart;
- unsigned srcFieldNum = lvaTable[srcLclNum].lvFieldLclStart;
- if ((lvaTable[destLclNum].lvFieldCnt != 1) || (lvaTable[srcLclNum].lvFieldCnt != 1) ||
- (lvaTable[destFieldNum].lvType != lvaTable[srcFieldNum].lvType))
- {
- requiresCopyBlock = true; // Mismatched types, leave as a CopyBlock
- JITDUMP(" with mismatched types");
- }
+ requiresCopyBlock = true; // Mismatched types, leave as a CopyBlock
+ JITDUMP(" with mismatched types");
}
}
// Are neither dest or src promoted structs?
@@ -9752,8 +9681,9 @@ GenTreePtr Compiler::fgMorphCopyBlock(GenTreePtr tree)
var_types asgType = dest->TypeGet();
dest = fgMorphBlockOperand(dest, asgType, blockWidth, true /*isDest*/);
asg->gtOp.gtOp1 = dest;
- asg->gtFlags |= (dest->gtFlags & GTF_ALL_EFFECT);
+ hasGCPtrs = ((dest->OperGet() == GT_OBJ) && (dest->AsObj()->gtGcPtrCount != 0));
+#if defined(CPBLK_UNROLL_LIMIT) && !defined(JIT32_GCENCODER)
// Note that the unrolling of CopyBlk is only implemented on some platforms.
// Currently that includes x64 and ARM but not x86: the code generation for this
// construct requires the ability to mark certain regions of the generated code
@@ -9762,13 +9692,26 @@ GenTreePtr Compiler::fgMorphCopyBlock(GenTreePtr tree)
// If we have a CopyObj with a dest on the stack
// we will convert it into an GC Unsafe CopyBlk that is non-interruptible
- // when its size is small enouch to be completely unrolled (i.e. between [16..64] bytes).
- // (This is not supported for the JIT32_GCENCODER, for which fgMorphUnsafeBlk is a no-op.)
+ // when its size is small enouch to be completely unrolled (i.e. between [16..64] bytes)
//
- if (destOnStack && (dest->OperGet() == GT_OBJ))
+ if (hasGCPtrs && destOnStack && blockWidthIsConst && (blockWidth >= (2 * TARGET_POINTER_SIZE)) &&
+ (blockWidth <= CPBLK_UNROLL_LIMIT))
{
- fgMorphUnsafeBlk(dest->AsObj());
+ if (dest->OperGet() == GT_OBJ)
+ {
+ dest->SetOper(GT_BLK);
+ dest->AsBlk()->gtBlkOpGcUnsafe = true; // Mark as a GC unsafe copy block
+ }
+ else
+ {
+ assert(dest->OperIsLocal());
+ GenTree* destAddr = gtNewOperNode(GT_ADDR, TYP_BYREF, dest);
+ dest = new (this, GT_BLK) GenTreeBlk(GT_BLK, dest->TypeGet(), destAddr, blockWidth);
+ dest->AsBlk()->gtBlkOpGcUnsafe = true; // Mark as a GC unsafe copy block
+ tree->gtOp.gtOp1 = dest;
+ }
}
+#endif // defined(CPBLK_UNROLL_LIMIT) && !defined(JIT32_GCENCODER)
// Eliminate the "OBJ or BLK" node on the rhs.
rhs = fgMorphBlockOperand(rhs, asgType, blockWidth, false /*!isDest*/);
@@ -9817,6 +9760,8 @@ GenTreePtr Compiler::fgMorphCopyBlock(GenTreePtr tree)
// To do fieldwise assignments for both sides, they'd better be the same struct type!
// All of these conditions were checked above...
assert(destLclNum != BAD_VAR_NUM && srcLclNum != BAD_VAR_NUM);
+ assert(lvaTable[destLclNum].lvVerTypeInfo.GetClassHandle() ==
+ lvaTable[srcLclNum].lvVerTypeInfo.GetClassHandle());
assert(destLclVar != nullptr && srcLclVar != nullptr && destLclVar->lvFieldCnt == srcLclVar->lvFieldCnt);
fieldCnt = destLclVar->lvFieldCnt;
@@ -10510,6 +10455,17 @@ GenTreePtr Compiler::fgMorphSmpOp(GenTreePtr tree, MorphAddrContext* mac)
/* fgDoNormalizeOnStore can change op2 */
noway_assert(op1 == tree->gtOp.gtOp1);
op2 = tree->gtOp.gtOp2;
+ // TODO-1stClassStructs: this is here to match previous behavior, but results in some
+ // unnecessary pessimization in the handling of addresses in fgMorphCopyBlock().
+ if (tree->OperIsBlkOp())
+ {
+ op1->gtFlags |= GTF_DONT_CSE;
+ if (tree->OperIsCopyBlkOp() &&
+ (op2->IsLocal() || (op2->OperIsIndir() && (op2->AsIndir()->Addr()->OperGet() == GT_ADDR))))
+ {
+ op2->gtFlags |= GTF_DONT_CSE;
+ }
+ }
#ifdef FEATURE_SIMD
{
@@ -13876,16 +13832,6 @@ GenTree* Compiler::fgMorphSmpOpOptional(GenTreeOp* tree)
break;
- case GT_INIT_VAL:
- // Initialization values for initBlk have special semantics - their lower
- // byte is used to fill the struct. However, we allow 0 as a "bare" value,
- // which enables them to get a VNForZero, and be propagated.
- if (op1->IsIntegralConst(0))
- {
- return op1;
- }
- break;
-
default:
break;
}
diff --git a/src/jit/rationalize.cpp b/src/jit/rationalize.cpp
index 873c608ce4..c552474752 100644
--- a/src/jit/rationalize.cpp
+++ b/src/jit/rationalize.cpp
@@ -441,77 +441,33 @@ void Rationalizer::RewriteAssignment(LIR::Use& use)
genTreeOps locationOp = location->OperGet();
- if (assignment->OperIsBlkOp())
- {
#ifdef FEATURE_SIMD
- if (varTypeIsSIMD(location) && assignment->OperIsInitBlkOp())
+ if (varTypeIsSIMD(location) && assignment->OperIsInitBlkOp())
+ {
+ if (location->OperGet() == GT_LCL_VAR)
{
- if (location->OperGet() == GT_LCL_VAR)
+ var_types simdType = location->TypeGet();
+ GenTree* initVal = assignment->gtOp.gtOp2;
+ var_types baseType = comp->getBaseTypeOfSIMDLocal(location);
+ if (baseType != TYP_UNKNOWN)
{
- var_types simdType = location->TypeGet();
- GenTree* initVal = assignment->gtOp.gtOp2;
- var_types baseType = comp->getBaseTypeOfSIMDLocal(location);
- if (baseType != TYP_UNKNOWN)
- {
- GenTreeSIMD* simdTree = new (comp, GT_SIMD)
- GenTreeSIMD(simdType, initVal, SIMDIntrinsicInit, baseType, genTypeSize(simdType));
- assignment->gtOp.gtOp2 = simdTree;
- value = simdTree;
- initVal->gtNext = simdTree;
- simdTree->gtPrev = initVal;
-
- simdTree->gtNext = location;
- location->gtPrev = simdTree;
- }
+ GenTreeSIMD* simdTree = new (comp, GT_SIMD)
+ GenTreeSIMD(simdType, initVal, SIMDIntrinsicInit, baseType, genTypeSize(simdType));
+ assignment->gtOp.gtOp2 = simdTree;
+ value = simdTree;
+ initVal->gtNext = simdTree;
+ simdTree->gtPrev = initVal;
+
+ simdTree->gtNext = location;
+ location->gtPrev = simdTree;
}
}
-#endif // FEATURE_SIMD
- if ((location->TypeGet() == TYP_STRUCT) && !assignment->IsPhiDefn() && !value->IsMultiRegCall())
+ else
{
- if ((location->OperGet() == GT_LCL_VAR))
- {
- // We need to construct a block node for the location.
- // Modify lcl to be the address form.
- location->SetOper(addrForm(locationOp));
- LclVarDsc* varDsc = &(comp->lvaTable[location->AsLclVarCommon()->gtLclNum]);
- location->gtType = TYP_BYREF;
- GenTreeBlk* storeBlk = nullptr;
- unsigned int size = varDsc->lvExactSize;
-
- if (varDsc->lvStructGcCount != 0)
- {
- CORINFO_CLASS_HANDLE structHnd = varDsc->lvVerTypeInfo.GetClassHandle();
- GenTreeObj* objNode = comp->gtNewObjNode(structHnd, location)->AsObj();
- unsigned int slots = (unsigned)(roundUp(size, TARGET_POINTER_SIZE) / TARGET_POINTER_SIZE);
-
- objNode->SetGCInfo(varDsc->lvGcLayout, varDsc->lvStructGcCount, slots);
- objNode->ChangeOper(GT_STORE_OBJ);
- objNode->SetData(value);
- comp->fgMorphUnsafeBlk(objNode);
- storeBlk = objNode;
- }
- else
- {
- storeBlk = new (comp, GT_STORE_BLK) GenTreeBlk(GT_STORE_BLK, TYP_STRUCT, location, value, size);
- }
- storeBlk->gtFlags |= (GTF_REVERSE_OPS | GTF_ASG);
- storeBlk->gtFlags |= ((location->gtFlags | value->gtFlags) & GTF_ALL_EFFECT);
-
- GenTree* insertionPoint = location->gtNext;
- BlockRange().InsertBefore(insertionPoint, storeBlk);
- use.ReplaceWith(comp, storeBlk);
- BlockRange().Remove(assignment);
- JITDUMP("After transforming local struct assignment into a block op:\n");
- DISPTREERANGE(BlockRange(), use.Def());
- JITDUMP("\n");
- return;
- }
- else
- {
- assert(location->OperIsBlk());
- }
+ assert(location->OperIsBlk());
}
}
+#endif // FEATURE_SIMD
switch (locationOp)
{
@@ -583,7 +539,7 @@ void Rationalizer::RewriteAssignment(LIR::Use& use)
storeBlk->SetOperRaw(storeOper);
storeBlk->gtFlags &= ~GTF_DONT_CSE;
storeBlk->gtFlags |= (assignment->gtFlags & (GTF_ALL_EFFECT | GTF_REVERSE_OPS | GTF_BLK_VOLATILE |
- GTF_BLK_UNALIGNED | GTF_DONT_CSE));
+ GTF_BLK_UNALIGNED | GTF_BLK_INIT | GTF_DONT_CSE));
storeBlk->gtBlk.Data() = value;
// Replace the assignment node with the store
diff --git a/src/jit/regalloc.cpp b/src/jit/regalloc.cpp
index 8a7ad5a163..d6302834c1 100644
--- a/src/jit/regalloc.cpp
+++ b/src/jit/regalloc.cpp
@@ -4458,13 +4458,6 @@ regMaskTP Compiler::rpPredictTreeRegUse(GenTreePtr tree,
case GT_ARR_LENGTH:
goto GENERIC_UNARY;
- case GT_INIT_VAL:
- // This unary operator simply passes through the value from its child (much like GT_NOP)
- // and thus won't need a scratch register.
- regMask = rpPredictTreeRegUse(op1, predictReg, lockedRegs, rsvdRegs);
- tree->gtUsedRegs = op1->gtUsedRegs;
- goto RETURN_CHECK;
-
default:
#ifdef DEBUG
gtDispTree(tree);
diff --git a/src/jit/valuenum.cpp b/src/jit/valuenum.cpp
index 0a38a5f313..c36ae1c1f5 100644
--- a/src/jit/valuenum.cpp
+++ b/src/jit/valuenum.cpp
@@ -3767,9 +3767,8 @@ static genTreeOps genTreeOpsIllegalAsVNFunc[] = {GT_IND, // When we do heap memo
// These need special semantics:
GT_COMMA, // == second argument (but with exception(s) from first).
GT_ADDR, GT_ARR_BOUNDS_CHECK,
- GT_OBJ, // May reference heap memory.
- GT_BLK, // May reference heap memory.
- GT_INIT_VAL, // Not strictly a pass-through.
+ GT_OBJ, // May reference heap memory.
+ GT_BLK, // May reference heap memory.
// These control-flow operations need no values.
GT_JTRUE, GT_RETURN, GT_SWITCH, GT_RETFILT, GT_CKFINITE};
@@ -4928,6 +4927,9 @@ void Compiler::fgValueNumberBlockAssignment(GenTreePtr tree, bool evalAsgLhsInd)
}
#endif // DEBUG
}
+ // Initblock's are of type void. Give them the void "value" -- they may occur in argument lists, which we
+ // want to be able to give VN's to.
+ tree->gtVNPair.SetBoth(ValueNumStore::VNForVoid());
}
else
{
@@ -4935,9 +4937,6 @@ void Compiler::fgValueNumberBlockAssignment(GenTreePtr tree, bool evalAsgLhsInd)
// TODO-CQ: Why not be complete, and get this case right?
fgMutateHeap(tree DEBUGARG("INITBLK - non local"));
}
- // Initblock's are of type void. Give them the void "value" -- they may occur in argument lists, which we
- // want to be able to give VN's to.
- tree->gtVNPair.SetBoth(ValueNumStore::VNForVoid());
}
else
{
@@ -6265,12 +6264,17 @@ void Compiler::fgValueNumberTree(GenTreePtr tree, bool evalAsgLhsInd)
}
tree->gtVNPair = vnStore->VNPWithExc(tree->gtVNPair, addrXvnp);
}
- else if (vnStore->GetVNFunc(addrNvnp.GetLiberal(), &funcApp) && (funcApp.m_func == VNF_PtrToArrElem))
+ else if (!varTypeIsStruct(tree) && vnStore->GetVNFunc(addrNvnp.GetLiberal(), &funcApp) &&
+ (funcApp.m_func == VNF_PtrToArrElem))
{
+ // TODO-1stClassStructs: The above condition need not exclude struct types, but it is
+ // excluded for now to minimize diffs.
fgValueNumberArrIndexVal(tree, &funcApp, addrXvnp.GetLiberal());
}
- else if (addr->IsFieldAddr(this, &obj, &staticOffset, &fldSeq2))
+ else if (!varTypeIsStruct(tree) && addr->IsFieldAddr(this, &obj, &staticOffset, &fldSeq2))
{
+ // TODO-1stClassStructs: The above condition need not exclude struct types, but it is
+ // excluded for now to minimize diffs.
if (fldSeq2 == FieldSeqStore::NotAField())
{
tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
@@ -6690,7 +6694,7 @@ void Compiler::fgValueNumberCastTree(GenTreePtr tree)
bool srcIsUnsigned = ((tree->gtFlags & GTF_UNSIGNED) != 0);
bool hasOverflowCheck = tree->gtOverflowEx();
- assert(genActualType(castToType) == genActualType(tree->TypeGet())); // Insure that the resultType is correct
+ assert(genActualType(castToType) == tree->TypeGet()); // Insure that the resultType is correct
tree->gtVNPair = vnStore->VNPairForCast(srcVNPair, castToType, castFromType, srcIsUnsigned, hasOverflowCheck);
}