summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/jit/assertionprop.cpp82
-rw-r--r--src/jit/codegenarm64.cpp36
-rw-r--r--src/jit/codegenlegacy.cpp3
-rw-r--r--src/jit/codegenlinear.cpp18
-rw-r--r--src/jit/codegenxarch.cpp14
-rw-r--r--src/jit/compiler.h4
-rw-r--r--src/jit/earlyprop.cpp12
-rw-r--r--src/jit/flowgraph.cpp3
-rw-r--r--src/jit/gentree.cpp48
-rw-r--r--src/jit/gentree.h64
-rw-r--r--src/jit/gtlist.h2
-rw-r--r--src/jit/importer.cpp71
-rw-r--r--src/jit/lclvars.cpp4
-rw-r--r--src/jit/lower.cpp2
-rw-r--r--src/jit/lowerarm64.cpp110
-rw-r--r--src/jit/lowerxarch.cpp18
-rw-r--r--src/jit/morph.cpp254
-rw-r--r--src/jit/rationalize.cpp84
-rw-r--r--src/jit/regalloc.cpp7
-rw-r--r--src/jit/valuenum.cpp22
20 files changed, 543 insertions, 315 deletions
diff --git a/src/jit/assertionprop.cpp b/src/jit/assertionprop.cpp
index 159aa293f4..747b032bee 100644
--- a/src/jit/assertionprop.cpp
+++ b/src/jit/assertionprop.cpp
@@ -1100,11 +1100,6 @@ Compiler::AssertionIndex Compiler::optCreateAssertion(GenTreePtr op1,
CNS_COMMON:
{
- // TODO-1stClassStructs: handle constant propagation to struct types.
- if (varTypeIsStruct(lclVar))
- {
- goto DONE_ASSERTION;
- }
//
// Must either be an OAK_EQUAL or an OAK_NOT_EQUAL assertion
//
@@ -2034,12 +2029,7 @@ void Compiler::optAssertionGen(GenTreePtr tree)
{
case GT_ASG:
// VN takes care of non local assertions for assignments and data flow.
- // TODO-1stClassStructs: Enable assertion prop for struct types.
- if (varTypeIsStruct(tree))
- {
- // Do nothing.
- }
- else if (optLocalAssertionProp)
+ if (optLocalAssertionProp)
{
assertionIndex = optCreateAssertion(tree->gtOp.gtOp1, tree->gtOp.gtOp2, OAK_EQUAL);
}
@@ -2052,26 +2042,15 @@ void Compiler::optAssertionGen(GenTreePtr tree)
case GT_OBJ:
case GT_BLK:
case GT_DYN_BLK:
- // TODO-1stClassStructs: These should always be considered to create a non-null
- // assertion, but previously, when these indirections were implicit due to a block
- // copy or init, they were not being considered to do so.
- break;
case GT_IND:
- // TODO-1stClassStructs: All indirections should be considered to create a non-null
- // assertion, but previously, when these indirections were implicit due to a block
- // copy or init, they were not being considered to do so.
- if (tree->gtType == TYP_STRUCT)
- {
- GenTree* parent = tree->gtGetParent(nullptr);
- if ((parent != nullptr) && (parent->gtOper == GT_ASG))
- {
- break;
- }
- }
case GT_NULLCHECK:
+ // All indirections create non-null assertions
+ assertionIndex = optCreateAssertion(tree->AsIndir()->Addr(), nullptr, OAK_NOT_EQUAL);
+ break;
+
case GT_ARR_LENGTH:
- // An array length can create a non-null assertion
- assertionIndex = optCreateAssertion(tree->gtOp.gtOp1, nullptr, OAK_NOT_EQUAL);
+ // An array length is an indirection (but doesn't derive from GenTreeIndir).
+ assertionIndex = optCreateAssertion(tree->AsArrLen()->ArrRef(), nullptr, OAK_NOT_EQUAL);
break;
case GT_ARR_BOUNDS_CHECK:
@@ -2629,9 +2608,29 @@ GenTreePtr Compiler::optConstantAssertionProp(AssertionDsc* curAssertion,
else
{
bool isArrIndex = ((tree->gtFlags & GTF_VAR_ARR_INDEX) != 0);
- newTree->ChangeOperConst(GT_CNS_INT);
- newTree->gtIntCon.gtIconVal = curAssertion->op2.u1.iconVal;
- newTree->ClearIconHandleMask();
+ // If we have done constant propagation of a struct type, it is only valid for zero-init,
+ // and we have to ensure that we have the right zero for the type.
+ if (varTypeIsStruct(tree))
+ {
+ assert(curAssertion->op2.u1.iconVal == 0);
+ }
+#ifdef FEATURE_SIMD
+ if (varTypeIsSIMD(tree))
+ {
+ var_types simdType = tree->TypeGet();
+ tree->ChangeOperConst(GT_CNS_DBL);
+ GenTree* initVal = tree;
+ initVal->gtType = TYP_FLOAT;
+ newTree =
+ gtNewSIMDNode(simdType, initVal, nullptr, SIMDIntrinsicInit, TYP_FLOAT, genTypeSize(simdType));
+ }
+ else
+#endif // FEATURE_SIMD
+ {
+ newTree->ChangeOperConst(GT_CNS_INT);
+ newTree->gtIntCon.gtIconVal = curAssertion->op2.u1.iconVal;
+ newTree->ClearIconHandleMask();
+ }
// If we're doing an array index address, assume any constant propagated contributes to the index.
if (isArrIndex)
{
@@ -3421,32 +3420,13 @@ GenTreePtr Compiler::optAssertionProp_Ind(ASSERT_VALARG_TP assertions, const Gen
{
assert(tree->OperIsIndir());
- // TODO-1stClassStructs: All indirections should be handled here, but
- // previously, when these indirections were GT_OBJ, or implicit due to a block
- // copy or init, they were not being handled.
- if (tree->TypeGet() == TYP_STRUCT)
- {
- if (tree->OperIsBlk())
- {
- return nullptr;
- }
- else
- {
- GenTree* parent = tree->gtGetParent(nullptr);
- if ((parent != nullptr) && parent->OperIsBlkOp())
- {
- return nullptr;
- }
- }
- }
-
if (!(tree->gtFlags & GTF_EXCEPT))
{
return nullptr;
}
// Check for add of a constant.
- GenTreePtr op1 = tree->gtOp.gtOp1;
+ GenTreePtr op1 = tree->AsIndir()->Addr();
if ((op1->gtOper == GT_ADD) && (op1->gtOp.gtOp2->gtOper == GT_CNS_INT))
{
op1 = op1->gtOp.gtOp1;
diff --git a/src/jit/codegenarm64.cpp b/src/jit/codegenarm64.cpp
index 6fe7eb99cb..06637f9faa 100644
--- a/src/jit/codegenarm64.cpp
+++ b/src/jit/codegenarm64.cpp
@@ -3346,6 +3346,10 @@ void CodeGen::genCodeForInitBlk(GenTreeBlk* initBlkNode)
unsigned size = initBlkNode->Size();
GenTreePtr dstAddr = initBlkNode->Addr();
GenTreePtr initVal = initBlkNode->Data();
+ if (initVal->OperIsInitVal())
+ {
+ initVal = initVal->gtGetOp1();
+ }
assert(!dstAddr->isContained());
assert(!initVal->isContained());
@@ -3512,27 +3516,38 @@ void CodeGen::genCodeForCpBlkUnroll(GenTreeBlk* cpBlkNode)
// str tempReg, [R14, #8]
void CodeGen::genCodeForCpObj(GenTreeObj* cpObjNode)
{
- // Make sure we got the arguments of the cpobj operation in the right registers
- GenTreePtr dstAddr = cpObjNode->Addr();
- GenTreePtr source = cpObjNode->Data();
- noway_assert(source->gtOper == GT_IND);
- GenTreePtr srcAddr = source->gtGetOp1();
+ GenTreePtr dstAddr = cpObjNode->Addr();
+ GenTreePtr source = cpObjNode->Data();
+ var_types srcAddrType = TYP_BYREF;
+ bool sourceIsLocal = false;
+
+ assert(source->isContained());
+ if (source->gtOper == GT_IND)
+ {
+ GenTree* srcAddr = source->gtGetOp1();
+ assert(!srcAddr->isContained());
+ srcAddrType = srcAddr->TypeGet();
+ }
+ else
+ {
+ noway_assert(source->IsLocal());
+ sourceIsLocal = true;
+ }
bool dstOnStack = dstAddr->OperIsLocalAddr();
#ifdef DEBUG
assert(!dstAddr->isContained());
- assert(!srcAddr->isContained());
// This GenTree node has data about GC pointers, this means we're dealing
// with CpObj.
assert(cpObjNode->gtGcPtrCount > 0);
#endif // DEBUG
- // Consume these registers.
+ // Consume the operands and get them into the right registers.
// They may now contain gc pointers (depending on their type; gcMarkRegPtrVal will "do the right thing").
genConsumeBlockOp(cpObjNode, REG_WRITE_BARRIER_DST_BYREF, REG_WRITE_BARRIER_SRC_BYREF, REG_NA);
- gcInfo.gcMarkRegPtrVal(REG_WRITE_BARRIER_SRC_BYREF, srcAddr->TypeGet());
+ gcInfo.gcMarkRegPtrVal(REG_WRITE_BARRIER_SRC_BYREF, srcAddrType);
gcInfo.gcMarkRegPtrVal(REG_WRITE_BARRIER_DST_BYREF, dstAddr->TypeGet());
// Temp register used to perform the sequence of loads and stores.
@@ -3604,12 +3619,7 @@ void CodeGen::genCodeForCpBlk(GenTreeBlk* cpBlkNode)
// Make sure we got the arguments of the cpblk operation in the right registers
unsigned blockSize = cpBlkNode->Size();
GenTreePtr dstAddr = cpBlkNode->Addr();
- GenTreePtr source = cpBlkNode->Data();
- noway_assert(source->gtOper == GT_IND);
- GenTreePtr srcAddr = source->gtGetOp1();
-
assert(!dstAddr->isContained());
- assert(!srcAddr->isContained());
genConsumeBlockOp(cpBlkNode, REG_ARG_0, REG_ARG_1, REG_ARG_2);
diff --git a/src/jit/codegenlegacy.cpp b/src/jit/codegenlegacy.cpp
index 5c1bb0990e..8762804b5d 100644
--- a/src/jit/codegenlegacy.cpp
+++ b/src/jit/codegenlegacy.cpp
@@ -10168,6 +10168,9 @@ void CodeGen::genCodeForTreeSmpOp(GenTreePtr tree, regMaskTP destReg, regMaskTP
if (op1 == NULL)
return;
#endif
+ __fallthrough;
+
+ case GT_INIT_VAL:
/* Generate the operand into some register */
diff --git a/src/jit/codegenlinear.cpp b/src/jit/codegenlinear.cpp
index c633e5619e..980187d5f0 100644
--- a/src/jit/codegenlinear.cpp
+++ b/src/jit/codegenlinear.cpp
@@ -1197,6 +1197,10 @@ void CodeGen::genConsumeRegs(GenTree* tree)
genUpdateLife(tree);
}
#endif // _TARGET_XARCH_
+ else if (tree->OperIsInitVal())
+ {
+ genConsumeReg(tree->gtGetOp1());
+ }
else
{
#ifdef FEATURE_SIMD
@@ -1405,6 +1409,13 @@ void CodeGen::genConsumeBlockSrc(GenTreeBlk* blkNode)
return;
}
}
+ else
+ {
+ if (src->OperIsInitVal())
+ {
+ src = src->gtGetOp1();
+ }
+ }
genConsumeReg(src);
}
@@ -1433,6 +1444,13 @@ void CodeGen::genSetBlockSrc(GenTreeBlk* blkNode, regNumber srcReg)
return;
}
}
+ else
+ {
+ if (src->OperIsInitVal())
+ {
+ src = src->gtGetOp1();
+ }
+ }
genCopyRegIfNeeded(src, srcReg);
}
diff --git a/src/jit/codegenxarch.cpp b/src/jit/codegenxarch.cpp
index f63a2687fd..81f425a20a 100644
--- a/src/jit/codegenxarch.cpp
+++ b/src/jit/codegenxarch.cpp
@@ -2766,6 +2766,10 @@ void CodeGen::genCodeForInitBlkRepStos(GenTreeBlk* initBlkNode)
unsigned size = initBlkNode->Size();
GenTreePtr dstAddr = initBlkNode->Addr();
GenTreePtr initVal = initBlkNode->Data();
+ if (initVal->OperIsInitVal())
+ {
+ initVal = initVal->gtGetOp1();
+ }
#ifdef DEBUG
assert(!dstAddr->isContained());
@@ -2800,6 +2804,10 @@ void CodeGen::genCodeForInitBlkUnroll(GenTreeBlk* initBlkNode)
unsigned size = initBlkNode->Size();
GenTreePtr dstAddr = initBlkNode->Addr();
GenTreePtr initVal = initBlkNode->Data();
+ if (initVal->OperIsInitVal())
+ {
+ initVal = initVal->gtGetOp1();
+ }
assert(!dstAddr->isContained());
assert(!initVal->isContained() || (initVal->IsIntegralConst(0) && ((size & 0xf) == 0)));
@@ -2897,6 +2905,10 @@ void CodeGen::genCodeForInitBlk(GenTreeBlk* initBlkNode)
unsigned blockSize = initBlkNode->Size();
GenTreePtr dstAddr = initBlkNode->Addr();
GenTreePtr initVal = initBlkNode->Data();
+ if (initVal->OperIsInitVal())
+ {
+ initVal = initVal->gtGetOp1();
+ }
assert(!dstAddr->isContained());
assert(!initVal->isContained());
@@ -3509,7 +3521,7 @@ void CodeGen::genCodeForCpObj(GenTreeObj* cpObjNode)
}
#endif // DEBUG
- // Consume these registers.
+ // Consume the operands and get them into the right registers.
// They may now contain gc pointers (depending on their type; gcMarkRegPtrVal will "do the right thing").
genConsumeBlockOp(cpObjNode, REG_RDI, REG_RSI, REG_NA);
gcInfo.gcMarkRegPtrVal(REG_RSI, srcAddrType);
diff --git a/src/jit/compiler.h b/src/jit/compiler.h
index a12d87b086..411f762a00 100644
--- a/src/jit/compiler.h
+++ b/src/jit/compiler.h
@@ -2323,7 +2323,8 @@ public:
DNER_VMNeedsStackAddr,
DNER_LiveInOutOfHandler,
DNER_LiveAcrossUnmanagedCall,
- DNER_BlockOp, // Is read or written via a block operation that explicitly takes the address.
+ DNER_BlockOp, // Is read or written via a block operation that explicitly takes the address.
+ DNER_IsStructArg, // Is a struct passed as an argument in a way that requires a stack location.
#ifdef JIT32_GCENCODER
DNER_PinningRef,
#endif
@@ -4561,6 +4562,7 @@ private:
GenTreePtr fgMorphGetStructAddr(GenTreePtr* pTree, CORINFO_CLASS_HANDLE clsHnd, bool isRValue = false);
GenTreePtr fgMorphBlkNode(GenTreePtr tree, bool isDest);
GenTreePtr fgMorphBlockOperand(GenTreePtr tree, var_types asgType, unsigned blockWidth, bool isDest);
+ void fgMorphUnsafeBlk(GenTreeObj* obj);
GenTreePtr fgMorphCopyBlock(GenTreePtr tree);
GenTreePtr fgMorphForRegisterFP(GenTreePtr tree);
GenTreePtr fgMorphSmpOp(GenTreePtr tree, MorphAddrContext* mac = nullptr);
diff --git a/src/jit/earlyprop.cpp b/src/jit/earlyprop.cpp
index 966063ce32..80ec6a01d2 100644
--- a/src/jit/earlyprop.cpp
+++ b/src/jit/earlyprop.cpp
@@ -237,12 +237,8 @@ bool Compiler::optEarlyPropRewriteTree(GenTreePtr tree)
objectRefPtr = tree->gtOp.gtOp1;
propKind = optPropKind::OPK_ARRAYLEN;
}
- else if ((tree->OperGet() == GT_IND) && !varTypeIsStruct(tree))
+ else if (tree->OperIsIndir())
{
- // TODO-1stClassStructs: The above condition should apply equally to all indirections,
- // but previously the implicit indirections due to a struct assignment were not
- // considered, so we are currently limiting it to non-structs to preserve existing
- // behavior.
// optFoldNullCheck takes care of updating statement info if a null check is removed.
optFoldNullCheck(tree);
@@ -258,7 +254,7 @@ bool Compiler::optEarlyPropRewriteTree(GenTreePtr tree)
return false;
}
- objectRefPtr = tree->gtOp.gtOp1;
+ objectRefPtr = tree->AsIndir()->Addr();
propKind = optPropKind::OPK_OBJ_GETTYPE;
}
else
@@ -515,8 +511,8 @@ void Compiler::optFoldNullCheck(GenTreePtr tree)
return;
}
- assert(tree->OperGet() == GT_IND);
- if (tree->gtGetOp1()->OperGet() == GT_LCL_VAR)
+ assert(tree->OperIsIndir());
+ if (tree->AsIndir()->Addr()->OperGet() == GT_LCL_VAR)
{
// Check if we have the pattern above and find the nullcheck node if we do.
diff --git a/src/jit/flowgraph.cpp b/src/jit/flowgraph.cpp
index b8a46a7ca8..aa3679d58d 100644
--- a/src/jit/flowgraph.cpp
+++ b/src/jit/flowgraph.cpp
@@ -21264,7 +21264,8 @@ void Compiler::fgAttachStructInlineeToAsg(GenTreePtr tree, GenTreePtr child, COR
assert(tree->gtOper == GT_ASG);
// We have an assignment, we codegen only V05 = call().
- if (child->gtOper == GT_CALL && tree->gtOp.gtOp1->gtOper == GT_LCL_VAR)
+ // However, if it is a multireg return on x64/ux we want to assign it to a temp.
+ if (child->gtOper == GT_CALL && tree->gtOp.gtOp1->gtOper == GT_LCL_VAR && !child->AsCall()->HasMultiRegRetVal())
{
return;
}
diff --git a/src/jit/gentree.cpp b/src/jit/gentree.cpp
index f927978cb1..1475e1888b 100644
--- a/src/jit/gentree.cpp
+++ b/src/jit/gentree.cpp
@@ -7259,17 +7259,32 @@ GenTree* Compiler::gtNewBlockVal(GenTreePtr addr, unsigned size)
{
// By default we treat this as an opaque struct type with known size.
var_types blkType = TYP_STRUCT;
-#if FEATURE_SIMD
if ((addr->gtOper == GT_ADDR) && (addr->gtGetOp1()->OperGet() == GT_LCL_VAR))
{
GenTree* val = addr->gtGetOp1();
- if (varTypeIsSIMD(val) && (genTypeSize(val->TypeGet()) == size))
+#if FEATURE_SIMD
+ if (varTypeIsSIMD(val))
{
- blkType = val->TypeGet();
- return addr->gtGetOp1();
+ if (genTypeSize(val->TypeGet()) == size)
+ {
+ blkType = val->TypeGet();
+ return addr->gtGetOp1();
+ }
}
- }
+ else
#endif // FEATURE_SIMD
+#ifndef LEGACY_BACKEND
+ if (val->TypeGet() == TYP_STRUCT)
+ {
+ GenTreeLclVarCommon* lcl = addr->gtGetOp1()->AsLclVarCommon();
+ LclVarDsc* varDsc = &(lvaTable[lcl->gtLclNum]);
+ if ((varDsc->TypeGet() == TYP_STRUCT) && (varDsc->lvExactSize == size))
+ {
+ return addr->gtGetOp1();
+ }
+ }
+#endif // !LEGACY_BACKEND
+ }
return new (this, GT_BLK) GenTreeBlk(GT_BLK, blkType, addr, size);
}
@@ -7352,10 +7367,10 @@ void GenTreeIntCon::FixupInitBlkValue(var_types asgType)
}
#endif // _TARGET_64BIT_
- // Make the type used in the GT_IND node match for evaluation types.
+ // Make the type match for evaluation types.
gtType = asgType;
- // if we are using an GT_INITBLK on a GC type the value being assigned has to be zero (null).
+ // if we are initializing a GC type the value being assigned must be zero (null).
assert(!varTypeIsGC(asgType) || (cns == 0));
}
@@ -7462,9 +7477,6 @@ void Compiler::gtBlockOpInit(GenTreePtr result, GenTreePtr dst, GenTreePtr srcOr
result->gtFlags |= dst->gtFlags & GTF_ALL_EFFECT;
result->gtFlags |= result->gtOp.gtOp2->gtFlags & GTF_ALL_EFFECT;
- // TODO-1stClassStructs: This should be done only if the destination is non-local.
- result->gtFlags |= (GTF_GLOB_REF | GTF_ASG);
-
// REVERSE_OPS is necessary because the use must occur before the def
result->gtFlags |= GTF_REVERSE_OPS;
@@ -7535,12 +7547,20 @@ GenTree* Compiler::gtNewBlkOpNode(
srcOrFillVal = srcOrFillVal->gtGetOp1()->gtGetOp1();
}
}
-
- GenTree* result = gtNewAssignNode(dst, srcOrFillVal);
- if (!isCopyBlock)
+ else
{
- result->gtFlags |= GTF_BLK_INIT;
+ // InitBlk
+ assert(varTypeIsIntegral(srcOrFillVal));
+ if (varTypeIsStruct(dst))
+ {
+ if (!srcOrFillVal->IsIntegralConst(0))
+ {
+ srcOrFillVal = gtNewOperNode(GT_INIT_VAL, TYP_INT, srcOrFillVal);
+ }
+ }
}
+
+ GenTree* result = gtNewAssignNode(dst, srcOrFillVal);
gtBlockOpInit(result, dst, srcOrFillVal, isVolatile);
return result;
}
diff --git a/src/jit/gentree.h b/src/jit/gentree.h
index bb34050b7b..d03a446293 100644
--- a/src/jit/gentree.h
+++ b/src/jit/gentree.h
@@ -937,7 +937,6 @@ public:
// -- is a volatile block operation
#define GTF_BLK_UNALIGNED 0x02000000 // GT_ASG, GT_STORE_BLK, GT_STORE_OBJ, GT_STORE_DYNBLK
// -- is an unaligned block operation
-#define GTF_BLK_INIT 0x01000000 // GT_ASG, GT_STORE_BLK, GT_STORE_OBJ, GT_STORE_DYNBLK -- is an init block operation
#define GTF_OVERFLOW 0x10000000 // GT_ADD, GT_SUB, GT_MUL, - Need overflow check
// GT_ASG_ADD, GT_ASG_SUB,
@@ -1138,6 +1137,21 @@ public:
return (gtOper == GT_LEA);
}
+ static bool OperIsInitVal(genTreeOps gtOper)
+ {
+ return (gtOper == GT_INIT_VAL);
+ }
+
+ bool OperIsInitVal() const
+ {
+ return OperIsInitVal(OperGet());
+ }
+
+ bool IsConstInitVal()
+ {
+ return (gtOper == GT_CNS_INT) || (OperIsInitVal() && (gtGetOp1()->gtOper == GT_CNS_INT));
+ }
+
bool OperIsBlkOp();
bool OperIsCopyBlkOp();
bool OperIsInitBlkOp();
@@ -4136,6 +4150,19 @@ struct GenTreeObj : public GenTreeBlk
// Let's assert it just to be safe.
noway_assert(roundUp(gtBlkSize, REGSIZE_BYTES) == gtBlkSize);
}
+ else
+ {
+ genTreeOps newOper = GT_BLK;
+ if (gtOper == GT_STORE_OBJ)
+ {
+ newOper = GT_STORE_BLK;
+ }
+ else
+ {
+ assert(gtOper == GT_OBJ);
+ }
+ SetOper(newOper);
+ }
}
void CopyGCInfo(GenTreeObj* srcObj)
@@ -4798,34 +4825,31 @@ inline bool GenTree::OperIsDynBlkOp()
return false;
}
-inline bool GenTree::OperIsCopyBlkOp()
+inline bool GenTree::OperIsInitBlkOp()
{
- if (gtOper == GT_ASG)
+ if (!OperIsBlkOp())
{
- return (varTypeIsStruct(gtGetOp1()) && ((gtFlags & GTF_BLK_INIT) == 0));
+ return false;
}
#ifndef LEGACY_BACKEND
- else if (OperIsStoreBlk())
- {
- return ((gtFlags & GTF_BLK_INIT) == 0);
- }
-#endif
- return false;
-}
-
-inline bool GenTree::OperIsInitBlkOp()
-{
+ GenTree* src;
if (gtOper == GT_ASG)
{
- return (varTypeIsStruct(gtGetOp1()) && ((gtFlags & GTF_BLK_INIT) != 0));
+ src = gtGetOp2();
}
-#ifndef LEGACY_BACKEND
- else if (OperIsStoreBlk())
+ else
{
- return ((gtFlags & GTF_BLK_INIT) != 0);
+ src = AsBlk()->Data()->gtSkipReloadOrCopy();
}
-#endif
- return false;
+#else // LEGACY_BACKEND
+ GenTree* src = gtGetOp2();
+#endif // LEGACY_BACKEND
+ return src->OperIsInitVal() || src->OperIsConst();
+}
+
+inline bool GenTree::OperIsCopyBlkOp()
+{
+ return OperIsBlkOp() && !OperIsInitBlkOp();
}
//------------------------------------------------------------------------
diff --git a/src/jit/gtlist.h b/src/jit/gtlist.h
index a330a6bb37..92265a7359 100644
--- a/src/jit/gtlist.h
+++ b/src/jit/gtlist.h
@@ -93,6 +93,8 @@ GTNODE(SIMD_CHK , "simdChk" ,GenTreeBoundsChk ,0,GTK_SPECIAL|GTK_
GTNODE(ALLOCOBJ , "allocObj" ,GenTreeAllocObj ,0,GTK_UNOP|GTK_EXOP) // object allocator
+GTNODE(INIT_VAL , "initVal" ,GenTreeOp ,0,GTK_UNOP) // Initialization value for an initBlk
+
//-----------------------------------------------------------------------------
// Binary operators (2 operands):
//-----------------------------------------------------------------------------
diff --git a/src/jit/importer.cpp b/src/jit/importer.cpp
index 53e856ce87..42361b95ee 100644
--- a/src/jit/importer.cpp
+++ b/src/jit/importer.cpp
@@ -600,13 +600,9 @@ inline void Compiler::impAppendStmt(GenTreePtr stmt, unsigned chkLevel)
// Assignment to (unaliased) locals don't count as a side-effect as
// we handle them specially using impSpillLclRefs(). Temp locals should
// be fine too.
- // TODO-1stClassStructs: The check below should apply equally to struct assignments,
- // but previously the block ops were always being marked GTF_GLOB_REF, even if
- // the operands could not be global refs.
if ((expr->gtOper == GT_ASG) && (expr->gtOp.gtOp1->gtOper == GT_LCL_VAR) &&
- !(expr->gtOp.gtOp1->gtFlags & GTF_GLOB_REF) && !gtHasLocalsWithAddrOp(expr->gtOp.gtOp2) &&
- !varTypeIsStruct(expr->gtOp.gtOp1))
+ !(expr->gtOp.gtOp1->gtFlags & GTF_GLOB_REF) && !gtHasLocalsWithAddrOp(expr->gtOp.gtOp2))
{
unsigned op2Flags = expr->gtOp.gtOp2->gtFlags & GTF_GLOB_EFFECT;
assert(flags == (op2Flags | GTF_ASG));
@@ -1139,9 +1135,13 @@ GenTreePtr Compiler::impAssignStructPtr(GenTreePtr destAddr,
if (destAddr->OperGet() == GT_ADDR)
{
GenTree* destNode = destAddr->gtGetOp1();
- // If the actual destination is already a block node, or is a node that
+ // If the actual destination is a local (for non-LEGACY_BACKEND), or already a block node, or is a node that
// will be morphed, don't insert an OBJ(ADDR).
- if (destNode->gtOper == GT_INDEX || destNode->OperIsBlk())
+ if (destNode->gtOper == GT_INDEX || destNode->OperIsBlk()
+#ifndef LEGACY_BACKEND
+ || ((destNode->OperGet() == GT_LCL_VAR) && (destNode->TypeGet() == src->TypeGet()))
+#endif // !LEGACY_BACKEND
+ )
{
dest = destNode;
}
@@ -1190,6 +1190,9 @@ GenTreePtr Compiler::impAssignStructPtr(GenTreePtr destAddr,
{
// Mark the struct LclVar as used in a MultiReg return context
// which currently makes it non promotable.
+ // TODO-1stClassStructs: Eliminate this pessimization when we can more generally
+ // handle multireg returns.
+ lcl->gtFlags |= GTF_DONT_CSE;
lvaTable[lcl->gtLclVarCommon.gtLclNum].lvIsMultiRegRet = true;
}
else // The call result is not a multireg return
@@ -1204,12 +1207,20 @@ GenTreePtr Compiler::impAssignStructPtr(GenTreePtr destAddr,
dest = lcl;
#if defined(_TARGET_ARM_)
+ // TODO-Cleanup: This should have been taken care of in the above HasMultiRegRetVal() case,
+ // but that method has not been updadted to include ARM.
impMarkLclDstNotPromotable(lcl->gtLclVarCommon.gtLclNum, src, structHnd);
+ lcl->gtFlags |= GTF_DONT_CSE;
#elif defined(FEATURE_UNIX_AMD64_STRUCT_PASSING)
// Not allowed for FEATURE_CORCLR which is the only SKU available for System V OSs.
assert(!src->gtCall.IsVarargs() && "varargs not allowed for System V OSs.");
// Make the struct non promotable. The eightbytes could contain multiple fields.
+ // TODO-1stClassStructs: Eliminate this pessimization when we can more generally
+ // handle multireg returns.
+ // TODO-Cleanup: Why is this needed here? This seems that it will set this even for
+ // non-multireg returns.
+ lcl->gtFlags |= GTF_DONT_CSE;
lvaTable[lcl->gtLclVarCommon.gtLclNum].lvIsMultiRegRet = true;
#endif
}
@@ -1251,10 +1262,11 @@ GenTreePtr Compiler::impAssignStructPtr(GenTreePtr destAddr,
src->gtType = genActualType(returnType);
call->gtType = src->gtType;
- // 1stClassStructToDo: We shouldn't necessarily need this.
- if (dest != nullptr)
+ // If we've changed the type, and it no longer matches a local destination,
+ // we must use an indirection.
+ if ((dest != nullptr) && (dest->OperGet() == GT_LCL_VAR) && (dest->TypeGet() != asgType))
{
- dest = gtNewOperNode(GT_IND, returnType, gtNewOperNode(GT_ADDR, TYP_BYREF, dest));
+ dest = nullptr;
}
// !!! The destination could be on stack. !!!
@@ -1325,21 +1337,19 @@ GenTreePtr Compiler::impAssignStructPtr(GenTreePtr destAddr,
}
else if (src->IsLocal())
{
- // TODO-1stClassStructs: Eliminate this; it is only here to minimize diffs in the
- // initial implementation. Previously the source would have been under a GT_ADDR, which
- // would cause it to be marked GTF_DONT_CSE.
asgType = src->TypeGet();
- src->gtFlags |= GTF_DONT_CSE;
- if (asgType == TYP_STRUCT)
- {
- GenTree* srcAddr = gtNewOperNode(GT_ADDR, TYP_BYREF, src);
- src = gtNewOperNode(GT_IND, TYP_STRUCT, srcAddr);
- }
}
else if (asgType == TYP_STRUCT)
{
asgType = impNormStructType(structHnd);
src->gtType = asgType;
+#ifdef LEGACY_BACKEND
+ if (asgType == TYP_STRUCT)
+ {
+ GenTree* srcAddr = gtNewOperNode(GT_ADDR, TYP_BYREF, src);
+ src = gtNewOperNode(GT_IND, TYP_STRUCT, srcAddr);
+ }
+#endif
}
if (dest == nullptr)
{
@@ -7799,6 +7809,9 @@ GenTreePtr Compiler::impFixupStructReturnType(GenTreePtr op, CORINFO_CLASS_HANDL
unsigned lclNum = op->gtLclVarCommon.gtLclNum;
lvaTable[lclNum].lvIsMultiRegRet = true;
+ // TODO-1stClassStructs: Handle constant propagation and CSE-ing of multireg returns.
+ op->gtFlags |= GTF_DONT_CSE;
+
return op;
}
@@ -7823,6 +7836,10 @@ GenTreePtr Compiler::impFixupStructReturnType(GenTreePtr op, CORINFO_CLASS_HANDL
unsigned lclNum = op->gtLclVarCommon.gtLclNum;
// Make sure this struct type stays as struct so that we can return it as an HFA
lvaTable[lclNum].lvIsMultiRegRet = true;
+
+ // TODO-1stClassStructs: Handle constant propagation and CSE-ing of multireg returns.
+ op->gtFlags |= GTF_DONT_CSE;
+
return op;
}
@@ -7855,6 +7872,10 @@ GenTreePtr Compiler::impFixupStructReturnType(GenTreePtr op, CORINFO_CLASS_HANDL
// Make sure this struct type is not struct promoted
lvaTable[lclNum].lvIsMultiRegRet = true;
+
+ // TODO-1stClassStructs: Handle constant propagation and CSE-ing of multireg returns.
+ op->gtFlags |= GTF_DONT_CSE;
+
return op;
}
@@ -9506,6 +9527,14 @@ void Compiler::impImportBlockCode(BasicBlock* block)
SPILL_APPEND:
+ // We need to call impSpillLclRefs() for a struct type lclVar.
+ // This is done for non-block assignments in the handling of stloc.
+ if ((op1->OperGet() == GT_ASG) && varTypeIsStruct(op1->gtOp.gtOp1) &&
+ (op1->gtOp.gtOp1->gtOper == GT_LCL_VAR))
+ {
+ impSpillLclRefs(op1->gtOp.gtOp1->AsLclVarCommon()->gtLclNum);
+ }
+
/* Append 'op1' to the list of statements */
impAppendTree(op1, (unsigned)CHECK_SPILL_ALL, impCurStmtOffs);
goto DONE_APPEND;
@@ -14719,6 +14748,10 @@ GenTreePtr Compiler::impAssignMultiRegTypeToVar(GenTreePtr op, CORINFO_CLASS_HAN
unsigned tmpNum = lvaGrabTemp(true DEBUGARG("Return value temp for multireg return."));
impAssignTempGen(tmpNum, op, hClass, (unsigned)CHECK_SPILL_NONE);
GenTreePtr ret = gtNewLclvNode(tmpNum, op->gtType);
+
+ // TODO-1stClassStructs: Handle constant propagation and CSE-ing of multireg returns.
+ ret->gtFlags |= GTF_DONT_CSE;
+
assert(IsMultiRegReturnedType(hClass));
// Mark the var so that fields are not promoted and stay together.
diff --git a/src/jit/lclvars.cpp b/src/jit/lclvars.cpp
index c9ab053a4e..febbf6e19e 100644
--- a/src/jit/lclvars.cpp
+++ b/src/jit/lclvars.cpp
@@ -1896,6 +1896,10 @@ void Compiler::lvaSetVarDoNotEnregister(unsigned varNum DEBUGARG(DoNotEnregister
JITDUMP("it is a struct\n");
assert(varTypeIsStruct(varDsc));
break;
+ case DNER_IsStructArg:
+ JITDUMP("it is a struct arg\n");
+ assert(varTypeIsStruct(varDsc));
+ break;
case DNER_BlockOp:
JITDUMP("written in a block op\n");
varDsc->lvLclBlockOpAddr = 1;
diff --git a/src/jit/lower.cpp b/src/jit/lower.cpp
index dbe4b5de80..3bb0b4cff0 100644
--- a/src/jit/lower.cpp
+++ b/src/jit/lower.cpp
@@ -3774,8 +3774,6 @@ void Lowering::LowerStoreInd(GenTree* node)
void Lowering::LowerBlockStore(GenTreeBlk* blkNode)
{
GenTree* src = blkNode->Data();
- // TODO-1stClassStructs: Don't require this.
- assert(blkNode->OperIsInitBlkOp() || !src->OperIsLocal());
TryCreateAddrMode(LIR::Use(BlockRange(), &blkNode->Addr(), blkNode), false);
}
diff --git a/src/jit/lowerarm64.cpp b/src/jit/lowerarm64.cpp
index 8b0665adfa..cc9e2266d2 100644
--- a/src/jit/lowerarm64.cpp
+++ b/src/jit/lowerarm64.cpp
@@ -126,6 +126,10 @@ void Lowering::TreeNodeInfoInit(GenTree* tree)
TreeNodeInfo* info = &(tree->gtLsraInfo);
RegisterType registerType = TypeGet(tree);
+ JITDUMP("TreeNodeInfoInit for: ");
+ DISPNODE(tree);
+ JITDUMP("\n");
+
switch (tree->OperGet())
{
GenTree* op1;
@@ -538,6 +542,12 @@ void Lowering::TreeNodeInfoInit(GenTree* tree)
TreeNodeInfoInitBlockStore(tree->AsBlk());
break;
+ case GT_INIT_VAL:
+ // Always a passthrough of its child's value.
+ info->srcCount = 0;
+ info->dstCount = 0;
+ break;
+
case GT_LCLHEAP:
{
info->srcCount = 1;
@@ -1220,8 +1230,9 @@ void Lowering::TreeNodeInfoInitPutArgStk(GenTreePutArgStk* argNode, fgArgTabEntr
void Lowering::TreeNodeInfoInitBlockStore(GenTreeBlk* blkNode)
{
- GenTree* dstAddr = blkNode->Addr();
- unsigned size;
+ GenTree* dstAddr = blkNode->Addr();
+ unsigned size = blkNode->gtBlkSize;
+ GenTree* source = blkNode->Data();
LinearScan* l = m_lsra;
Compiler* compiler = comp;
@@ -1229,16 +1240,44 @@ void Lowering::TreeNodeInfoInitBlockStore(GenTreeBlk* blkNode)
// We may require an additional source or temp register for the size.
blkNode->gtLsraInfo.srcCount = 2;
blkNode->gtLsraInfo.dstCount = 0;
+ GenTreePtr srcAddrOrFill = nullptr;
+ bool isInitBlk = blkNode->OperIsInitBlkOp();
- if ((blkNode->OperGet() == GT_STORE_OBJ) && (blkNode->AsObj()->gtGcPtrCount == 0))
+ if (!isInitBlk)
{
- blkNode->SetOper(GT_STORE_BLK);
+ // CopyObj or CopyBlk
+ if ((blkNode->OperGet() == GT_STORE_OBJ) && ((blkNode->AsObj()->gtGcPtrCount == 0) || blkNode->gtBlkOpGcUnsafe))
+ {
+ blkNode->SetOper(GT_STORE_BLK);
+ }
+ if (source->gtOper == GT_IND)
+ {
+ srcAddrOrFill = blkNode->Data()->gtGetOp1();
+ // We're effectively setting source as contained, but can't call MakeSrcContained, because the
+ // "inheritance" of the srcCount is to a child not a parent - it would "just work" but could be misleading.
+ // If srcAddr is already non-contained, we don't need to change it.
+ if (srcAddrOrFill->gtLsraInfo.getDstCount() == 0)
+ {
+ srcAddrOrFill->gtLsraInfo.setDstCount(1);
+ srcAddrOrFill->gtLsraInfo.setSrcCount(source->gtLsraInfo.srcCount);
+ }
+ m_lsra->clearOperandCounts(source);
+ }
+ else if (!source->IsMultiRegCall() && !source->OperIsSIMD())
+ {
+ assert(source->IsLocal());
+ MakeSrcContained(blkNode, source);
+ }
}
- if (blkNode->OperIsInitBlkOp())
+ if (isInitBlk)
{
- unsigned size = blkNode->gtBlkSize;
- GenTreePtr initVal = blkNode->Data();
+ GenTreePtr initVal = source;
+ if (initVal->OperIsInitVal())
+ {
+ initVal = initVal->gtGetOp1();
+ }
+ srcAddrOrFill = initVal;
#if 0
// TODO-ARM64-CQ: Currently we generate a helper call for every
@@ -1265,8 +1304,6 @@ void Lowering::TreeNodeInfoInitBlockStore(GenTreeBlk* blkNode)
initVal->gtType = TYP_LONG;
}
- MakeSrcContained(tree, blockSize);
-
// In case we have a buffer >= 16 bytes
// we can use SSE2 to do a 128-bit store in a single
// instruction.
@@ -1307,34 +1344,12 @@ void Lowering::TreeNodeInfoInitBlockStore(GenTreeBlk* blkNode)
{
// CopyObj or CopyBlk
// Sources are src and dest and size if not constant.
- unsigned size = blkNode->gtBlkSize;
- GenTreePtr source = blkNode->Data();
- GenTree* srcAddr = nullptr;
- if (source->gtOper == GT_IND)
- {
- srcAddr = blkNode->Data()->gtGetOp1();
- // We're effectively setting source as contained, but can't call MakeSrcContained, because the
- // "inheritance" of the srcCount is to a child not a parent - it would "just work" but could be misleading.
- // If srcAddr is already non-contained, we don't need to change it.
- if (srcAddr->gtLsraInfo.getDstCount() == 0)
- {
- srcAddr->gtLsraInfo.setDstCount(1);
- srcAddr->gtLsraInfo.setSrcCount(source->gtLsraInfo.srcCount);
- }
- m_lsra->clearOperandCounts(source);
- }
- else
- {
- assert(source->IsLocal());
- MakeSrcContained(blkNode, source);
- }
if (blkNode->OperGet() == GT_STORE_OBJ)
{
// CopyObj
GenTreeObj* objNode = blkNode->AsObj();
- GenTreePtr source = objNode->Data();
unsigned slots = objNode->gtSlots;
@@ -1363,15 +1378,19 @@ void Lowering::TreeNodeInfoInitBlockStore(GenTreeBlk* blkNode)
blkNode->gtLsraInfo.internalIntCount = 1;
dstAddr->gtLsraInfo.setSrcCandidates(l, RBM_WRITE_BARRIER_DST_BYREF);
- srcAddr->gtLsraInfo.setSrcCandidates(l, RBM_WRITE_BARRIER_SRC_BYREF);
+ // If we have a source address we want it in REG_WRITE_BARRIER_SRC_BYREF.
+ // Otherwise, if it is a local, codegen will put its address in REG_WRITE_BARRIER_SRC_BYREF,
+ // which is killed by a StoreObj (and thus needn't be reserved).
+ if (srcAddrOrFill != nullptr)
+ {
+ srcAddrOrFill->gtLsraInfo.setSrcCandidates(l, RBM_WRITE_BARRIER_SRC_BYREF);
+ }
}
else
{
// CopyBlk
- unsigned size = blkNode->gtBlkSize;
- GenTreePtr dstAddr = blkNode->Addr();
- short internalIntCount = 0;
- regMaskTP internalIntCandidates = RBM_NONE;
+ short internalIntCount = 0;
+ regMaskTP internalIntCandidates = RBM_NONE;
#if 0
// In case of a CpBlk with a constant size and less than CPBLK_UNROLL_LIMIT size
@@ -1379,11 +1398,8 @@ void Lowering::TreeNodeInfoInitBlockStore(GenTreeBlk* blkNode)
// TODO-ARM64-CQ: cpblk loop unrolling is currently not implemented.
- if (blockSize->IsCnsIntOrI() && blockSize->gtIntCon.gtIconVal <= CPBLK_UNROLL_LIMIT)
+ if ((size != 0) && (size <= INITBLK_UNROLL_LIMIT))
{
- assert(!blockSize->IsIconHandle());
- ssize_t size = blockSize->gtIntCon.gtIconVal;
-
// If we have a buffer between XMM_REGSIZE_BYTES and CPBLK_UNROLL_LIMIT bytes, we'll use SSE2.
// Structs and buffer with sizes <= CPBLK_UNROLL_LIMIT bytes are occurring in more than 95% of
// our framework assemblies, so this is the main code generation scheme we'll use.
@@ -1404,9 +1420,9 @@ void Lowering::TreeNodeInfoInitBlockStore(GenTreeBlk* blkNode)
// If src or dst are on stack, we don't have to generate the address into a register
// because it's just some constant+SP
- if (srcAddr->OperIsLocalAddr())
+ if (srcAddr != nullptr && srcAddrOrFill->OperIsLocalAddr())
{
- MakeSrcContained(blkNode, srcAddr);
+ MakeSrcContained(blkNode, srcAddrOrFill);
}
if (dstAddr->OperIsLocalAddr())
@@ -1425,15 +1441,9 @@ void Lowering::TreeNodeInfoInitBlockStore(GenTreeBlk* blkNode)
dstAddr->gtLsraInfo.setSrcCandidates(l, RBM_ARG_0);
// The srcAddr goes in arg1.
- if (srcAddr != nullptr)
+ if (srcAddrOrFill != nullptr)
{
- srcAddr->gtLsraInfo.setSrcCandidates(l, RBM_ARG_1);
- }
- else
- {
- // This is a local; we'll use a temp register for its address.
- internalIntCandidates |= RBM_ARG_1;
- internalIntCount++;
+ srcAddrOrFill->gtLsraInfo.setSrcCandidates(l, RBM_ARG_1);
}
if (size != 0)
{
diff --git a/src/jit/lowerxarch.cpp b/src/jit/lowerxarch.cpp
index f0258a2581..445d9a5e70 100644
--- a/src/jit/lowerxarch.cpp
+++ b/src/jit/lowerxarch.cpp
@@ -660,6 +660,12 @@ void Lowering::TreeNodeInfoInit(GenTree* tree)
TreeNodeInfoInitBlockStore(tree->AsBlk());
break;
+ case GT_INIT_VAL:
+ // Always a passthrough of its child's value.
+ info->srcCount = 0;
+ info->dstCount = 0;
+ break;
+
case GT_LCLHEAP:
TreeNodeInfoInitLclHeap(tree);
break;
@@ -1655,7 +1661,7 @@ void Lowering::TreeNodeInfoInitBlockStore(GenTreeBlk* blkNode)
}
m_lsra->clearOperandCounts(source);
}
- else if (!source->OperIsSIMD())
+ else if (!source->IsMultiRegCall() && !source->OperIsSIMD())
{
assert(source->IsLocal());
MakeSrcContained(blkNode, source);
@@ -1665,7 +1671,11 @@ void Lowering::TreeNodeInfoInitBlockStore(GenTreeBlk* blkNode)
if (isInitBlk)
{
GenTree* initVal = source;
- srcAddrOrFill = source;
+ if (initVal->OperIsInitVal())
+ {
+ initVal = initVal->gtGetOp1();
+ }
+ srcAddrOrFill = initVal;
// If we have an InitBlk with constant block size we can optimize several ways:
// a) If the size is smaller than a small memory page but larger than INITBLK_UNROLL_LIMIT bytes
// we use rep stosb since this reduces the register pressure in LSRA and we have
@@ -1717,6 +1727,10 @@ void Lowering::TreeNodeInfoInitBlockStore(GenTreeBlk* blkNode)
// a pack of 16 init value constants.
blkNode->gtLsraInfo.internalFloatCount = 1;
blkNode->gtLsraInfo.setInternalCandidates(l, l->internalFloatRegCandidates());
+ if ((fill == 0) && ((size & 0xf) == 0))
+ {
+ MakeSrcContained(blkNode, source);
+ }
}
blkNode->gtBlkOpKind = GenTreeBlk::BlkOpKindUnroll;
diff --git a/src/jit/morph.cpp b/src/jit/morph.cpp
index b55c24a55b..8d0ff0c67d 100644
--- a/src/jit/morph.cpp
+++ b/src/jit/morph.cpp
@@ -8449,7 +8449,7 @@ GenTreePtr Compiler::fgMorphOneAsgBlockOp(GenTreePtr tree)
// with the bits to create a single assigment.
noway_assert(size <= REGSIZE_BYTES);
- if (isInitBlock && (src->gtOper != GT_CNS_INT))
+ if (isInitBlock && !src->IsConstInitVal())
{
return nullptr;
}
@@ -8622,8 +8622,12 @@ GenTreePtr Compiler::fgMorphOneAsgBlockOp(GenTreePtr tree)
}
else
#endif
- if (src->IsCnsIntOrI())
{
+ if (src->OperIsInitVal())
+ {
+ src = src->gtGetOp1();
+ }
+ assert(src->IsCnsIntOrI());
// This will mutate the integer constant, in place, to be the correct
// value for the type we are using in the assignment.
src->AsIntCon()->FixupInitBlkValue(asgType);
@@ -8691,7 +8695,8 @@ GenTreePtr Compiler::fgMorphOneAsgBlockOp(GenTreePtr tree)
GenTreePtr Compiler::fgMorphInitBlock(GenTreePtr tree)
{
- noway_assert(tree->gtOper == GT_ASG && varTypeIsStruct(tree));
+ // We must have the GT_ASG form of InitBlkOp.
+ noway_assert((tree->OperGet() == GT_ASG) && tree->OperIsInitBlkOp());
#ifdef DEBUG
bool morphed = false;
#endif // DEBUG
@@ -8706,6 +8711,12 @@ GenTreePtr Compiler::fgMorphInitBlock(GenTreePtr tree)
tree->gtOp.gtOp1 = dest;
}
tree->gtType = dest->TypeGet();
+ // (Constant propagation may cause a TYP_STRUCT lclVar to be changed to GT_CNS_INT, and its
+ // type will be the type of the original lclVar, in which case we will change it to TYP_INT).
+ if ((src->OperGet() == GT_CNS_INT) && varTypeIsStruct(src))
+ {
+ src->gtType = TYP_INT;
+ }
JITDUMP("\nfgMorphInitBlock:");
GenTreePtr oneAsgTree = fgMorphOneAsgBlockOp(tree);
@@ -8717,7 +8728,7 @@ GenTreePtr Compiler::fgMorphInitBlock(GenTreePtr tree)
else
{
GenTree* destAddr = nullptr;
- GenTree* initVal = src;
+ GenTree* initVal = src->OperIsInitVal() ? src->gtGetOp1() : src;
GenTree* blockSize = nullptr;
unsigned blockWidth = 0;
FieldSeqNode* destFldSeq = nullptr;
@@ -8786,6 +8797,7 @@ GenTreePtr Compiler::fgMorphInitBlock(GenTreePtr tree)
if (destLclVar->lvPromoted && blockWidthIsConst)
{
+ assert(initVal->OperGet() == GT_CNS_INT);
noway_assert(varTypeIsStruct(destLclVar));
noway_assert(!opts.MinOpts());
if (destLclVar->lvAddrExposed & destLclVar->lvContainsHoles)
@@ -8845,25 +8857,9 @@ GenTreePtr Compiler::fgMorphInitBlock(GenTreePtr tree)
#if CPU_USES_BLOCK_MOVE
compBlkOpUsed = true;
#endif
- if (!dest->OperIsBlk())
- {
- GenTree* destAddr = gtNewOperNode(GT_ADDR, TYP_BYREF, dest);
- CORINFO_CLASS_HANDLE clsHnd = gtGetStructHandleIfPresent(dest);
- if (clsHnd == NO_CLASS_HANDLE)
- {
- dest = new (this, GT_BLK) GenTreeBlk(GT_BLK, dest->TypeGet(), destAddr, blockWidth);
- }
- else
- {
- GenTree* newDest = gtNewObjNode(clsHnd, destAddr);
- if (newDest->OperGet() == GT_OBJ)
- {
- gtSetObjGcInfo(newDest->AsObj());
- }
- dest = newDest;
- }
- tree->gtOp.gtOp1 = dest;
- }
+ dest = fgMorphBlockOperand(dest, dest->TypeGet(), blockWidth, true);
+ tree->gtOp.gtOp1 = dest;
+ tree->gtFlags |= (dest->gtFlags & GTF_ALL_EFFECT);
}
else
{
@@ -9163,7 +9159,7 @@ GenTree* Compiler::fgMorphBlkNode(GenTreePtr tree, bool isDest)
//
// Notes:
// This does the following:
-// - Ensures that a struct operand is a block node.
+// - Ensures that a struct operand is a block node or (for non-LEGACY_BACKEND) lclVar.
// - Ensures that any COMMAs are above ADDR nodes.
// Although 'tree' WAS an operand of a block assignment, the assignment
// may have been retyped to be a scalar assignment.
@@ -9172,10 +9168,6 @@ GenTree* Compiler::fgMorphBlockOperand(GenTree* tree, var_types asgType, unsigne
{
GenTree* effectiveVal = tree->gtEffectiveVal();
- // TODO-1stClassStucts: We would like to transform non-TYP_STRUCT nodes to
- // either plain lclVars or GT_INDs. However, for now we want to preserve most
- // of the block nodes until the Rationalizer.
-
if (!varTypeIsStruct(asgType))
{
if (effectiveVal->OperIsIndir())
@@ -9202,69 +9194,141 @@ GenTree* Compiler::fgMorphBlockOperand(GenTree* tree, var_types asgType, unsigne
}
else
{
+ GenTreeIndir* indirTree = nullptr;
+ GenTreeLclVarCommon* lclNode = nullptr;
+ bool needsIndirection = true;
+
+ if (effectiveVal->OperIsIndir())
+ {
+ indirTree = effectiveVal->AsIndir();
+ GenTree* addr = effectiveVal->AsIndir()->Addr();
+ if ((addr->OperGet() == GT_ADDR) && (addr->gtGetOp1()->OperGet() == GT_LCL_VAR))
+ {
+ lclNode = addr->gtGetOp1()->AsLclVarCommon();
+ }
+ }
+ else if (effectiveVal->OperGet() == GT_LCL_VAR)
+ {
+ lclNode = effectiveVal->AsLclVarCommon();
+ }
#ifdef FEATURE_SIMD
if (varTypeIsSIMD(asgType))
{
- if (effectiveVal->OperIsIndir())
+ if ((indirTree != nullptr) && (lclNode == nullptr) && (indirTree->Addr()->OperGet() == GT_ADDR) &&
+ (indirTree->Addr()->gtGetOp1()->gtOper == GT_SIMD))
{
- GenTree* addr = effectiveVal->AsIndir()->Addr();
- if (!isDest && (addr->OperGet() == GT_ADDR))
- {
- if ((addr->gtGetOp1()->gtOper == GT_SIMD) || (addr->gtGetOp1()->OperGet() == GT_LCL_VAR))
- {
- effectiveVal = addr->gtGetOp1();
- }
- }
- else if (isDest && !effectiveVal->OperIsBlk())
- {
- effectiveVal = new (this, GT_BLK) GenTreeBlk(GT_BLK, asgType, addr, blockWidth);
- }
+ assert(!isDest);
+ needsIndirection = false;
+ effectiveVal = indirTree->Addr()->gtGetOp1();
}
- else if (!effectiveVal->OperIsSIMD() && (!effectiveVal->IsLocal() || isDest) && !effectiveVal->OperIsBlk())
+ if (effectiveVal->OperIsSIMD())
{
- GenTree* addr = gtNewOperNode(GT_ADDR, TYP_BYREF, effectiveVal);
- effectiveVal = new (this, GT_BLK) GenTreeBlk(GT_BLK, asgType, addr, blockWidth);
+ needsIndirection = false;
}
}
- else
#endif // FEATURE_SIMD
- if (!effectiveVal->OperIsBlk())
+ if (lclNode != nullptr)
{
- GenTree* addr = gtNewOperNode(GT_ADDR, TYP_BYREF, effectiveVal);
- CORINFO_CLASS_HANDLE clsHnd = gtGetStructHandleIfPresent(effectiveVal);
- GenTree* newTree;
- if (clsHnd == NO_CLASS_HANDLE)
+ LclVarDsc* varDsc = &(lvaTable[lclNode->gtLclNum]);
+ if (varTypeIsStruct(varDsc) && (varDsc->lvExactSize == blockWidth))
{
- newTree = new (this, GT_BLK) GenTreeBlk(GT_BLK, TYP_STRUCT, addr, blockWidth);
+#ifndef LEGACY_BACKEND
+ effectiveVal = lclNode;
+ needsIndirection = false;
+#endif // !LEGACY_BACKEND
}
else
{
- newTree = gtNewObjNode(clsHnd, addr);
- if (isDest && (newTree->OperGet() == GT_OBJ))
+ // This may be a lclVar that was determined to be address-exposed.
+ effectiveVal->gtFlags |= (lclNode->gtFlags & GTF_ALL_EFFECT);
+ }
+ }
+ if (needsIndirection)
+ {
+ if (indirTree != nullptr)
+ {
+ // We should never find a struct indirection on the lhs of an assignment.
+ assert(!isDest || indirTree->OperIsBlk());
+ if (!isDest && indirTree->OperIsBlk())
{
- gtSetObjGcInfo(newTree->AsObj());
+ (void)fgMorphBlkToInd(effectiveVal->AsBlk(), asgType);
}
- if (effectiveVal->IsLocal() && ((effectiveVal->gtFlags & GTF_GLOB_EFFECT) == 0))
+ }
+ else
+ {
+ GenTree* newTree;
+ GenTree* addr = gtNewOperNode(GT_ADDR, TYP_BYREF, effectiveVal);
+ if (isDest)
{
- // This is not necessarily a global reference, though gtNewObjNode always assumes it is.
- // TODO-1stClassStructs: This check should be done in the GenTreeObj constructor,
- // where it currently sets GTF_GLOB_EFFECT unconditionally, but it is handled
- // separately now to avoid excess diffs.
- newTree->gtFlags &= ~(GTF_GLOB_EFFECT);
+ CORINFO_CLASS_HANDLE clsHnd = gtGetStructHandleIfPresent(effectiveVal);
+ if (clsHnd == NO_CLASS_HANDLE)
+ {
+ newTree = new (this, GT_BLK) GenTreeBlk(GT_BLK, TYP_STRUCT, addr, blockWidth);
+ }
+ else
+ {
+ newTree = gtNewObjNode(clsHnd, addr);
+ if (isDest && (newTree->OperGet() == GT_OBJ))
+ {
+ gtSetObjGcInfo(newTree->AsObj());
+ }
+ if (effectiveVal->IsLocal() && ((effectiveVal->gtFlags & GTF_GLOB_EFFECT) == 0))
+ {
+ // This is not necessarily a global reference, though gtNewObjNode always assumes it is.
+ // TODO-1stClassStructs: This check should be done in the GenTreeObj constructor,
+ // where it currently sets GTF_GLOB_EFFECT unconditionally, but it is handled
+ // separately now to avoid excess diffs.
+ newTree->gtFlags &= ~(GTF_GLOB_EFFECT);
+ }
+ }
+ }
+ else
+ {
+ newTree = new (this, GT_IND) GenTreeIndir(GT_IND, asgType, addr, nullptr);
}
+ effectiveVal = newTree;
}
- effectiveVal = newTree;
}
}
- if (!isDest && effectiveVal->OperIsBlk())
- {
- (void)fgMorphBlkToInd(effectiveVal->AsBlk(), asgType);
- }
tree = effectiveVal;
return tree;
}
//------------------------------------------------------------------------
+// fgMorphUnsafeBlk: Convert a CopyObj with a dest on the stack to a GC Unsafe CopyBlk
+//
+// Arguments:
+// dest - the GT_OBJ or GT_STORE_OBJ
+//
+// Assumptions:
+// The destination must be known (by the caller) to be on the stack.
+//
+// Notes:
+// If we have a CopyObj with a dest on the stack, and its size is small enouch
+// to be completely unrolled (i.e. between [16..64] bytes), we will convert it into a
+// GC Unsafe CopyBlk that is non-interruptible.
+// This is not supported for the JIT32_GCENCODER, in which case this method is a no-op.
+//
+void Compiler::fgMorphUnsafeBlk(GenTreeObj* dest)
+{
+#if defined(CPBLK_UNROLL_LIMIT) && !defined(JIT32_GCENCODER)
+ assert(dest->gtGcPtrCount != 0);
+ unsigned blockWidth = dest->AsBlk()->gtBlkSize;
+#ifdef DEBUG
+ bool destOnStack = false;
+ GenTree* destAddr = dest->Addr();
+ assert(destAddr->IsLocalAddrExpr() != nullptr);
+#endif
+ if ((blockWidth >= (2 * TARGET_POINTER_SIZE)) && (blockWidth <= CPBLK_UNROLL_LIMIT))
+ {
+ genTreeOps newOper = (dest->gtOper == GT_OBJ) ? GT_BLK : GT_STORE_BLK;
+ dest->SetOper(newOper);
+ dest->AsBlk()->gtBlkOpGcUnsafe = true; // Mark as a GC unsafe copy block
+ }
+#endif // defined(CPBLK_UNROLL_LIMIT) && !defined(JIT32_GCENCODER)
+}
+
+//------------------------------------------------------------------------
// fgMorphCopyBlock: Perform the Morphing of block copy
//
// Arguments:
@@ -9544,12 +9608,19 @@ GenTreePtr Compiler::fgMorphCopyBlock(GenTreePtr tree)
// Are both dest and src promoted structs?
if (destDoFldAsg && srcDoFldAsg)
{
- // Both structs should be of the same type, if not we will use a copy block
+ // Both structs should be of the same type, or each have a single field of the same type.
+ // If not we will use a copy block.
if (lvaTable[destLclNum].lvVerTypeInfo.GetClassHandle() !=
lvaTable[srcLclNum].lvVerTypeInfo.GetClassHandle())
{
- requiresCopyBlock = true; // Mismatched types, leave as a CopyBlock
- JITDUMP(" with mismatched types");
+ unsigned destFieldNum = lvaTable[destLclNum].lvFieldLclStart;
+ unsigned srcFieldNum = lvaTable[srcLclNum].lvFieldLclStart;
+ if ((lvaTable[destLclNum].lvFieldCnt != 1) || (lvaTable[srcLclNum].lvFieldCnt != 1) ||
+ (lvaTable[destFieldNum].lvType != lvaTable[srcFieldNum].lvType))
+ {
+ requiresCopyBlock = true; // Mismatched types, leave as a CopyBlock
+ JITDUMP(" with mismatched types");
+ }
}
}
// Are neither dest or src promoted structs?
@@ -9643,9 +9714,8 @@ GenTreePtr Compiler::fgMorphCopyBlock(GenTreePtr tree)
var_types asgType = dest->TypeGet();
dest = fgMorphBlockOperand(dest, asgType, blockWidth, true /*isDest*/);
asg->gtOp.gtOp1 = dest;
- hasGCPtrs = ((dest->OperGet() == GT_OBJ) && (dest->AsObj()->gtGcPtrCount != 0));
+ asg->gtFlags |= (dest->gtFlags & GTF_ALL_EFFECT);
-#if defined(CPBLK_UNROLL_LIMIT) && !defined(JIT32_GCENCODER)
// Note that the unrolling of CopyBlk is only implemented on some platforms.
// Currently that includes x64 and ARM but not x86: the code generation for this
// construct requires the ability to mark certain regions of the generated code
@@ -9654,26 +9724,13 @@ GenTreePtr Compiler::fgMorphCopyBlock(GenTreePtr tree)
// If we have a CopyObj with a dest on the stack
// we will convert it into an GC Unsafe CopyBlk that is non-interruptible
- // when its size is small enouch to be completely unrolled (i.e. between [16..64] bytes)
+ // when its size is small enouch to be completely unrolled (i.e. between [16..64] bytes).
+ // (This is not supported for the JIT32_GCENCODER, for which fgMorphUnsafeBlk is a no-op.)
//
- if (hasGCPtrs && destOnStack && blockWidthIsConst && (blockWidth >= (2 * TARGET_POINTER_SIZE)) &&
- (blockWidth <= CPBLK_UNROLL_LIMIT))
+ if (destOnStack && (dest->OperGet() == GT_OBJ))
{
- if (dest->OperGet() == GT_OBJ)
- {
- dest->SetOper(GT_BLK);
- dest->AsBlk()->gtBlkOpGcUnsafe = true; // Mark as a GC unsafe copy block
- }
- else
- {
- assert(dest->OperIsLocal());
- GenTree* destAddr = gtNewOperNode(GT_ADDR, TYP_BYREF, dest);
- dest = new (this, GT_BLK) GenTreeBlk(GT_BLK, dest->TypeGet(), destAddr, blockWidth);
- dest->AsBlk()->gtBlkOpGcUnsafe = true; // Mark as a GC unsafe copy block
- tree->gtOp.gtOp1 = dest;
- }
+ fgMorphUnsafeBlk(dest->AsObj());
}
-#endif // defined(CPBLK_UNROLL_LIMIT) && !defined(JIT32_GCENCODER)
// Eliminate the "OBJ or BLK" node on the rhs.
rhs = fgMorphBlockOperand(rhs, asgType, blockWidth, false /*!isDest*/);
@@ -9722,8 +9779,6 @@ GenTreePtr Compiler::fgMorphCopyBlock(GenTreePtr tree)
// To do fieldwise assignments for both sides, they'd better be the same struct type!
// All of these conditions were checked above...
assert(destLclNum != BAD_VAR_NUM && srcLclNum != BAD_VAR_NUM);
- assert(lvaTable[destLclNum].lvVerTypeInfo.GetClassHandle() ==
- lvaTable[srcLclNum].lvVerTypeInfo.GetClassHandle());
assert(destLclVar != nullptr && srcLclVar != nullptr && destLclVar->lvFieldCnt == srcLclVar->lvFieldCnt);
fieldCnt = destLclVar->lvFieldCnt;
@@ -10417,17 +10472,6 @@ GenTreePtr Compiler::fgMorphSmpOp(GenTreePtr tree, MorphAddrContext* mac)
/* fgDoNormalizeOnStore can change op2 */
noway_assert(op1 == tree->gtOp.gtOp1);
op2 = tree->gtOp.gtOp2;
- // TODO-1stClassStructs: this is here to match previous behavior, but results in some
- // unnecessary pessimization in the handling of addresses in fgMorphCopyBlock().
- if (tree->OperIsBlkOp())
- {
- op1->gtFlags |= GTF_DONT_CSE;
- if (tree->OperIsCopyBlkOp() &&
- (op2->IsLocal() || (op2->OperIsIndir() && (op2->AsIndir()->Addr()->OperGet() == GT_ADDR))))
- {
- op2->gtFlags |= GTF_DONT_CSE;
- }
- }
#ifdef FEATURE_SIMD
{
@@ -13794,6 +13838,16 @@ GenTree* Compiler::fgMorphSmpOpOptional(GenTreeOp* tree)
break;
+ case GT_INIT_VAL:
+ // Initialization values for initBlk have special semantics - their lower
+ // byte is used to fill the struct. However, we allow 0 as a "bare" value,
+ // which enables them to get a VNForZero, and be propagated.
+ if (op1->IsIntegralConst(0))
+ {
+ return op1;
+ }
+ break;
+
default:
break;
}
diff --git a/src/jit/rationalize.cpp b/src/jit/rationalize.cpp
index 42c2cf67f4..228a89ac27 100644
--- a/src/jit/rationalize.cpp
+++ b/src/jit/rationalize.cpp
@@ -434,33 +434,77 @@ void Rationalizer::RewriteAssignment(LIR::Use& use)
genTreeOps locationOp = location->OperGet();
-#ifdef FEATURE_SIMD
- if (varTypeIsSIMD(location) && assignment->OperIsInitBlkOp())
+ if (assignment->OperIsBlkOp())
{
- if (location->OperGet() == GT_LCL_VAR)
+#ifdef FEATURE_SIMD
+ if (varTypeIsSIMD(location) && assignment->OperIsInitBlkOp())
{
- var_types simdType = location->TypeGet();
- GenTree* initVal = assignment->gtOp.gtOp2;
- var_types baseType = comp->getBaseTypeOfSIMDLocal(location);
- if (baseType != TYP_UNKNOWN)
+ if (location->OperGet() == GT_LCL_VAR)
{
- GenTreeSIMD* simdTree = new (comp, GT_SIMD)
- GenTreeSIMD(simdType, initVal, SIMDIntrinsicInit, baseType, genTypeSize(simdType));
- assignment->gtOp.gtOp2 = simdTree;
- value = simdTree;
- initVal->gtNext = simdTree;
- simdTree->gtPrev = initVal;
-
- simdTree->gtNext = location;
- location->gtPrev = simdTree;
+ var_types simdType = location->TypeGet();
+ GenTree* initVal = assignment->gtOp.gtOp2;
+ var_types baseType = comp->getBaseTypeOfSIMDLocal(location);
+ if (baseType != TYP_UNKNOWN)
+ {
+ GenTreeSIMD* simdTree = new (comp, GT_SIMD)
+ GenTreeSIMD(simdType, initVal, SIMDIntrinsicInit, baseType, genTypeSize(simdType));
+ assignment->gtOp.gtOp2 = simdTree;
+ value = simdTree;
+ initVal->gtNext = simdTree;
+ simdTree->gtPrev = initVal;
+
+ simdTree->gtNext = location;
+ location->gtPrev = simdTree;
+ }
}
}
- else
+#endif // FEATURE_SIMD
+ if ((location->TypeGet() == TYP_STRUCT) && !assignment->IsPhiDefn() && !value->IsMultiRegCall())
{
- assert(location->OperIsBlk());
+ if ((location->OperGet() == GT_LCL_VAR))
+ {
+ // We need to construct a block node for the location.
+ // Modify lcl to be the address form.
+ location->SetOper(addrForm(locationOp));
+ LclVarDsc* varDsc = &(comp->lvaTable[location->AsLclVarCommon()->gtLclNum]);
+ location->gtType = TYP_BYREF;
+ GenTreeBlk* storeBlk = nullptr;
+ unsigned int size = varDsc->lvExactSize;
+
+ if (varDsc->lvStructGcCount != 0)
+ {
+ CORINFO_CLASS_HANDLE structHnd = varDsc->lvVerTypeInfo.GetClassHandle();
+ GenTreeObj* objNode = comp->gtNewObjNode(structHnd, location)->AsObj();
+ unsigned int slots = (unsigned)(roundUp(size, TARGET_POINTER_SIZE) / TARGET_POINTER_SIZE);
+
+ objNode->SetGCInfo(varDsc->lvGcLayout, varDsc->lvStructGcCount, slots);
+ objNode->ChangeOper(GT_STORE_OBJ);
+ objNode->SetData(value);
+ comp->fgMorphUnsafeBlk(objNode);
+ storeBlk = objNode;
+ }
+ else
+ {
+ storeBlk = new (comp, GT_STORE_BLK) GenTreeBlk(GT_STORE_BLK, TYP_STRUCT, location, value, size);
+ }
+ storeBlk->gtFlags |= (GTF_REVERSE_OPS | GTF_ASG);
+ storeBlk->gtFlags |= ((location->gtFlags | value->gtFlags) & GTF_ALL_EFFECT);
+
+ GenTree* insertionPoint = location->gtNext;
+ BlockRange().InsertBefore(insertionPoint, storeBlk);
+ use.ReplaceWith(comp, storeBlk);
+ BlockRange().Remove(assignment);
+ JITDUMP("After transforming local struct assignment into a block op:\n");
+ DISPTREERANGE(BlockRange(), use.Def());
+ JITDUMP("\n");
+ return;
+ }
+ else
+ {
+ assert(location->OperIsBlk());
+ }
}
}
-#endif // FEATURE_SIMD
switch (locationOp)
{
@@ -532,7 +576,7 @@ void Rationalizer::RewriteAssignment(LIR::Use& use)
storeBlk->SetOperRaw(storeOper);
storeBlk->gtFlags &= ~GTF_DONT_CSE;
storeBlk->gtFlags |= (assignment->gtFlags & (GTF_ALL_EFFECT | GTF_REVERSE_OPS | GTF_BLK_VOLATILE |
- GTF_BLK_UNALIGNED | GTF_BLK_INIT | GTF_DONT_CSE));
+ GTF_BLK_UNALIGNED | GTF_DONT_CSE));
storeBlk->gtBlk.Data() = value;
// Replace the assignment node with the store
diff --git a/src/jit/regalloc.cpp b/src/jit/regalloc.cpp
index d6302834c1..8a7ad5a163 100644
--- a/src/jit/regalloc.cpp
+++ b/src/jit/regalloc.cpp
@@ -4458,6 +4458,13 @@ regMaskTP Compiler::rpPredictTreeRegUse(GenTreePtr tree,
case GT_ARR_LENGTH:
goto GENERIC_UNARY;
+ case GT_INIT_VAL:
+ // This unary operator simply passes through the value from its child (much like GT_NOP)
+ // and thus won't need a scratch register.
+ regMask = rpPredictTreeRegUse(op1, predictReg, lockedRegs, rsvdRegs);
+ tree->gtUsedRegs = op1->gtUsedRegs;
+ goto RETURN_CHECK;
+
default:
#ifdef DEBUG
gtDispTree(tree);
diff --git a/src/jit/valuenum.cpp b/src/jit/valuenum.cpp
index 82b9278d4a..e6a539cbcc 100644
--- a/src/jit/valuenum.cpp
+++ b/src/jit/valuenum.cpp
@@ -3752,8 +3752,9 @@ static genTreeOps genTreeOpsIllegalAsVNFunc[] = {GT_IND, // When we do heap memo
// These need special semantics:
GT_COMMA, // == second argument (but with exception(s) from first).
GT_ADDR, GT_ARR_BOUNDS_CHECK,
- GT_OBJ, // May reference heap memory.
- GT_BLK, // May reference heap memory.
+ GT_OBJ, // May reference heap memory.
+ GT_BLK, // May reference heap memory.
+ GT_INIT_VAL, // Not strictly a pass-through.
// These control-flow operations need no values.
GT_JTRUE, GT_RETURN, GT_SWITCH, GT_RETFILT, GT_CKFINITE};
@@ -4904,9 +4905,6 @@ void Compiler::fgValueNumberBlockAssignment(GenTreePtr tree, bool evalAsgLhsInd)
}
#endif // DEBUG
}
- // Initblock's are of type void. Give them the void "value" -- they may occur in argument lists, which we
- // want to be able to give VN's to.
- tree->gtVNPair.SetBoth(ValueNumStore::VNForVoid());
}
else
{
@@ -4914,6 +4912,9 @@ void Compiler::fgValueNumberBlockAssignment(GenTreePtr tree, bool evalAsgLhsInd)
// TODO-CQ: Why not be complete, and get this case right?
fgMutateHeap(tree DEBUGARG("INITBLK - non local"));
}
+ // Initblock's are of type void. Give them the void "value" -- they may occur in argument lists, which we
+ // want to be able to give VN's to.
+ tree->gtVNPair.SetBoth(ValueNumStore::VNForVoid());
}
else
{
@@ -6241,17 +6242,12 @@ void Compiler::fgValueNumberTree(GenTreePtr tree, bool evalAsgLhsInd)
}
tree->gtVNPair = vnStore->VNPWithExc(tree->gtVNPair, addrXvnp);
}
- else if (!varTypeIsStruct(tree) && vnStore->GetVNFunc(addrNvnp.GetLiberal(), &funcApp) &&
- (funcApp.m_func == VNF_PtrToArrElem))
+ else if (vnStore->GetVNFunc(addrNvnp.GetLiberal(), &funcApp) && (funcApp.m_func == VNF_PtrToArrElem))
{
- // TODO-1stClassStructs: The above condition need not exclude struct types, but it is
- // excluded for now to minimize diffs.
fgValueNumberArrIndexVal(tree, &funcApp, addrXvnp.GetLiberal());
}
- else if (!varTypeIsStruct(tree) && addr->IsFieldAddr(this, &obj, &staticOffset, &fldSeq2))
+ else if (addr->IsFieldAddr(this, &obj, &staticOffset, &fldSeq2))
{
- // TODO-1stClassStructs: The above condition need not exclude struct types, but it is
- // excluded for now to minimize diffs.
if (fldSeq2 == FieldSeqStore::NotAField())
{
tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
@@ -6671,7 +6667,7 @@ void Compiler::fgValueNumberCastTree(GenTreePtr tree)
bool srcIsUnsigned = ((tree->gtFlags & GTF_UNSIGNED) != 0);
bool hasOverflowCheck = tree->gtOverflowEx();
- assert(genActualType(castToType) == tree->TypeGet()); // Insure that the resultType is correct
+ assert(genActualType(castToType) == genActualType(tree->TypeGet())); // Insure that the resultType is correct
tree->gtVNPair = vnStore->VNPairForCast(srcVNPair, castToType, castFromType, srcIsUnsigned, hasOverflowCheck);
}