summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorCarol Eidt <carol.eidt@microsoft.com>2018-04-11 13:41:22 -0700
committerGitHub <noreply@github.com>2018-04-11 13:41:22 -0700
commit2c51fd0a660a09469d3ee365fb9e081abaf04a27 (patch)
tree5223d20febdeef0a092c3578ea8f7c628c6a2b3a /src
parent300f41c2e6fc78e052e6ff1aa502ec9aa8dcad3c (diff)
parentd62569872457dff731fa3ae4c2888e8182c03190 (diff)
downloadcoreclr-2c51fd0a660a09469d3ee365fb9e081abaf04a27.tar.gz
coreclr-2c51fd0a660a09469d3ee365fb9e081abaf04a27.tar.bz2
coreclr-2c51fd0a660a09469d3ee365fb9e081abaf04a27.zip
Merge pull request #17431 from CarolEidt/Fix17389
LSRA: remove last uses only at use point
Diffstat (limited to 'src')
-rw-r--r--src/jit/lsra.h22
-rw-r--r--src/jit/lsrabuild.cpp56
-rw-r--r--src/jit/lsraxarch.cpp16
3 files changed, 70 insertions, 24 deletions
diff --git a/src/jit/lsra.h b/src/jit/lsra.h
index d3d3f99b00..22e3ead7ec 100644
--- a/src/jit/lsra.h
+++ b/src/jit/lsra.h
@@ -1683,6 +1683,28 @@ private:
bool isRMWRegOper(GenTree* tree);
void BuildMul(GenTree* tree);
void SetContainsAVXFlags(bool isFloatingPointType = true, unsigned sizeOfSIMDVector = 0);
+ // Move the last use bit, if any, from 'fromTree' to 'toTree'; 'fromTree' must be contained.
+ void CheckAndMoveRMWLastUse(GenTree* fromTree, GenTree* toTree)
+ {
+ // If 'fromTree' is not a last-use lclVar, there's nothing to do.
+ if ((fromTree == nullptr) || !fromTree->OperIs(GT_LCL_VAR) || ((fromTree->gtFlags & GTF_VAR_DEATH) == 0))
+ {
+ return;
+ }
+ // If 'fromTree' was a lclVar, it must be contained and 'toTree' must match.
+ if (!fromTree->isContained() || (toTree == nullptr) || !toTree->OperIs(GT_LCL_VAR) ||
+ (toTree->AsLclVarCommon()->gtLclNum != toTree->AsLclVarCommon()->gtLclNum))
+ {
+ assert(!"Unmatched RMW indirections");
+ return;
+ }
+ // This is probably not necessary, but keeps things consistent.
+ fromTree->gtFlags &= ~GTF_VAR_DEATH;
+ if (toTree != nullptr) // Just to be conservative
+ {
+ toTree->gtFlags |= GTF_VAR_DEATH;
+ }
+ }
#endif // defined(_TARGET_XARCH_)
#ifdef FEATURE_SIMD
diff --git a/src/jit/lsrabuild.cpp b/src/jit/lsrabuild.cpp
index 62692d587a..195d1e8b11 100644
--- a/src/jit/lsrabuild.cpp
+++ b/src/jit/lsrabuild.cpp
@@ -1347,21 +1347,6 @@ void LinearScan::buildRefPositionsForNode(GenTree* tree, BasicBlock* block, Lsra
assert(varDsc->lvTracked);
unsigned varIndex = varDsc->lvVarIndex;
- // We have only approximate last-use information at this point. This is because the
- // execution order doesn't actually reflect the true order in which the localVars
- // are referenced - but the order of the RefPositions will, so we recompute it after
- // RefPositions are built.
- // Use the old value for setting currentLiveVars - note that we do this with the
- // not-quite-correct setting of lastUse. However, this is OK because
- // 1) this is only for preferencing, which doesn't require strict correctness, and
- // 2) the cases where these out-of-order uses occur should not overlap a kill.
- // TODO-Throughput: clean this up once we have the execution order correct. At that point
- // we can update currentLiveVars at the same place that we create the RefPosition.
- if ((tree->gtFlags & GTF_VAR_DEATH) != 0)
- {
- VarSetOps::RemoveElemD(compiler, currentLiveVars, varIndex);
- }
-
if (!tree->IsUnusedValue() && !tree->isContained())
{
assert(produce != 0);
@@ -1436,11 +1421,6 @@ void LinearScan::buildRefPositionsForNode(GenTree* tree, BasicBlock* block, Lsra
srcInterval->assignRelatedInterval(varDefInterval);
}
}
-
- if ((tree->gtFlags & GTF_VAR_DEATH) == 0)
- {
- VarSetOps::AddElemD(compiler, currentLiveVars, varIndex);
- }
}
else if (store->gtOp1->OperIs(GT_BITCAST))
{
@@ -1596,9 +1576,34 @@ void LinearScan::buildRefPositionsForNode(GenTree* tree, BasicBlock* block, Lsra
assert((candidates & allRegs(srcInterval->registerType)) != 0);
- // For non-localVar uses we record nothing, as nothing needs to be written back to the tree.
- GenTree* const refPosNode = srcInterval->isLocalVar ? useNode : nullptr;
- RefPosition* pos = newRefPosition(srcInterval, currentLoc, RefTypeUse, refPosNode, candidates, 0);
+ GenTree* refPosNode;
+ if (srcInterval->isLocalVar)
+ {
+ // We have only approximate last-use information at this point. This is because the
+ // execution order doesn't actually reflect the true order in which the localVars
+ // are referenced - but the order of the RefPositions will, so we recompute it after
+ // RefPositions are built.
+ // Use the old value for setting currentLiveVars - note that we do this with the
+ // not-quite-correct setting of lastUse. However, this is OK because
+ // 1) this is only for preferencing, which doesn't require strict correctness, and
+ // for determing which largeVectors require having their upper-half saved & restored.
+ // (Issue #17481 tracks the issue that this system results in excessive spills and
+ // should be changed.)
+ // 2) the cases where these out-of-order uses occur should not overlap a kill (they are
+ // only known to occur within a single expression).
+ if ((useNode->gtFlags & GTF_VAR_DEATH) != 0)
+ {
+ VarSetOps::RemoveElemD(compiler, currentLiveVars, srcInterval->getVarIndex(compiler));
+ }
+ refPosNode = useNode;
+ }
+ else
+ {
+ // For non-localVar uses we record nothing, as nothing needs to be written back to the tree.
+ refPosNode = nullptr;
+ }
+
+ RefPosition* pos = newRefPosition(srcInterval, currentLoc, RefTypeUse, refPosNode, candidates, 0);
if (delayRegFree)
{
pos->delayRegFree = true;
@@ -1736,6 +1741,11 @@ void LinearScan::buildRefPositionsForNode(GenTree* tree, BasicBlock* block, Lsra
else
{
assert(registerTypesEquivalent(interval->registerType, registerType));
+ assert(interval->isLocalVar);
+ if ((tree->gtFlags & GTF_VAR_DEATH) == 0)
+ {
+ VarSetOps::AddElemD(compiler, currentLiveVars, interval->getVarIndex(compiler));
+ }
}
if (prefSrcInterval != nullptr)
diff --git a/src/jit/lsraxarch.cpp b/src/jit/lsraxarch.cpp
index 09cc9ca96e..f72403d0e3 100644
--- a/src/jit/lsraxarch.cpp
+++ b/src/jit/lsraxarch.cpp
@@ -2515,7 +2515,8 @@ void LinearScan::BuildIndir(GenTreeIndir* indirTree)
// Because 'source' is contained, we haven't yet determined its special register requirements, if any.
// As it happens, the Shift or Rotate cases are the only ones with special requirements.
assert(source->isContained() && source->OperIsRMWMemOp());
- GenTree* nonMemSource = nullptr;
+ GenTree* nonMemSource = nullptr;
+ GenTreeIndir* otherIndir = nullptr;
if (source->OperIsShiftOrRotate())
{
@@ -2527,6 +2528,7 @@ void LinearScan::BuildIndir(GenTreeIndir* indirTree)
}
if (indirTree->AsStoreInd()->IsRMWDstOp1())
{
+ otherIndir = source->gtGetOp1()->AsIndir();
if (source->OperIsBinary())
{
nonMemSource = source->gtOp.gtOp2;
@@ -2534,8 +2536,20 @@ void LinearScan::BuildIndir(GenTreeIndir* indirTree)
}
else if (indirTree->AsStoreInd()->IsRMWDstOp2())
{
+ otherIndir = source->gtGetOp2()->AsIndir();
nonMemSource = source->gtOp.gtOp1;
}
+ if (otherIndir != nullptr)
+ {
+ // Any lclVars in the addressing mode of this indirection are contained.
+ // If they are marked as lastUse, transfer the last use flag to the store indir.
+ GenTree* base = otherIndir->Base();
+ GenTree* dstBase = indirTree->Base();
+ CheckAndMoveRMWLastUse(base, dstBase);
+ GenTree* index = otherIndir->Index();
+ GenTree* dstIndex = indirTree->Index();
+ CheckAndMoveRMWLastUse(index, dstIndex);
+ }
if (nonMemSource != nullptr)
{
assert(!nonMemSource->isContained() || (!nonMemSource->isMemoryOp() && !nonMemSource->IsLocal()));