summaryrefslogtreecommitdiff
path: root/src/jit/rationalize.cpp
diff options
context:
space:
mode:
authorPat Gavlin <pgavlin@gmail.com>2017-05-19 12:45:45 -0700
committerGitHub <noreply@github.com>2017-05-19 12:45:45 -0700
commitf993896e20cb303deb7f5c4021901446f07dd7df (patch)
tree20db5a9a22beeec20c627b673b1e78fc2e38c15b /src/jit/rationalize.cpp
parentdde23243ab8ca1626d918a6143e7717c58246459 (diff)
downloadcoreclr-f993896e20cb303deb7f5c4021901446f07dd7df.tar.gz
coreclr-f993896e20cb303deb7f5c4021901446f07dd7df.tar.bz2
coreclr-f993896e20cb303deb7f5c4021901446f07dd7df.zip
Remove `GTF_REVERSE_OPS` from LIR. (#10698)
In HIR, this flag indicates that the second operand to a binary node will execute before the node's first operand. LIR, however, no longer determines ordering via use edges, so this flag only affects the order in which operands to a node are considered. The sole constraint on this use ordering is that for a given node, the same ordering must be used in liveness, LSRA, and the code generator; this is due to the correspondence between use ordering and spill/reload/last-use ordering. As a result, the reverse ops flag is unnecessary and rather unhelpful in LIR, causing little more than a bit of extra complexity throughout the backend. This change removes `GTF_REVERSE_OPS` from LIR by clearing this flag during rationalize and verifying that it remains clear in `LIR::CheckLIR`. We could reuse this bit for an additional backend-specific purpose in the future with a bit more work in the checker.
Diffstat (limited to 'src/jit/rationalize.cpp')
-rw-r--r--src/jit/rationalize.cpp18
1 files changed, 9 insertions, 9 deletions
diff --git a/src/jit/rationalize.cpp b/src/jit/rationalize.cpp
index 1bc3a614a5..ff1d210134 100644
--- a/src/jit/rationalize.cpp
+++ b/src/jit/rationalize.cpp
@@ -503,7 +503,7 @@ void Rationalizer::RewriteAssignment(LIR::Use& use)
{
storeBlk = new (comp, GT_STORE_BLK) GenTreeBlk(GT_STORE_BLK, TYP_STRUCT, location, value, size);
}
- storeBlk->gtFlags |= (GTF_REVERSE_OPS | GTF_ASG);
+ storeBlk->gtFlags |= GTF_ASG;
storeBlk->gtFlags |= ((location->gtFlags | value->gtFlags) & GTF_ALL_EFFECT);
GenTree* insertionPoint = location->gtNext;
@@ -540,11 +540,6 @@ void Rationalizer::RewriteAssignment(LIR::Use& use)
copyFlags(store, assignment, GTF_ALL_EFFECT);
copyFlags(store, location, GTF_IND_FLAGS);
- if (assignment->IsReverseOp())
- {
- store->gtFlags |= GTF_REVERSE_OPS;
- }
-
// TODO: JIT dump
// Remove the GT_IND node and replace the assignment node with the store
@@ -582,7 +577,8 @@ void Rationalizer::RewriteAssignment(LIR::Use& use)
storeOper = GT_STORE_OBJ;
break;
case GT_DYN_BLK:
- storeOper = GT_STORE_DYN_BLK;
+ storeOper = GT_STORE_DYN_BLK;
+ storeBlk->AsDynBlk()->gtEvalSizeFirst = false;
break;
default:
unreached();
@@ -591,8 +587,8 @@ void Rationalizer::RewriteAssignment(LIR::Use& use)
GenTree::NodeName(storeOper));
storeBlk->SetOperRaw(storeOper);
storeBlk->gtFlags &= ~GTF_DONT_CSE;
- storeBlk->gtFlags |= (assignment->gtFlags & (GTF_ALL_EFFECT | GTF_REVERSE_OPS | GTF_BLK_VOLATILE |
- GTF_BLK_UNALIGNED | GTF_DONT_CSE));
+ storeBlk->gtFlags |=
+ (assignment->gtFlags & (GTF_ALL_EFFECT | GTF_BLK_VOLATILE | GTF_BLK_UNALIGNED | GTF_DONT_CSE));
storeBlk->gtBlk.Data() = value;
// Replace the assignment node with the store
@@ -683,9 +679,13 @@ Compiler::fgWalkResult Rationalizer::RewriteNode(GenTree** useEdge, ArrayStack<G
for (GenTree* prev = node->gtPrev; prev != nullptr && prev->OperIsAnyList() && !(prev->OperIsFieldListHead());
prev = node->gtPrev)
{
+ prev->gtFlags &= ~GTF_REVERSE_OPS;
BlockRange().Remove(prev);
}
+ // Now clear the REVERSE_OPS flag on the current node.
+ node->gtFlags &= ~GTF_REVERSE_OPS;
+
// In addition, remove the current node if it is a GT_LIST node that is not an aggregate.
if (node->OperIsAnyList())
{