summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorSteve MacLean <sdmaclea.qdt@qualcommdatacenter.com>2017-09-12 15:45:58 -0400
committerSteve MacLean <sdmaclea.qdt@qualcommdatacenter.com>2017-09-12 16:02:29 -0400
commit6c5756a2d4d408f2fab2404bed3b472816beddd1 (patch)
treeb19169b2ea5e043e296caa92594029521ac397b5 /src
parent18ab3567e2ea6e2a385527bb247b0eec32a995cb (diff)
downloadcoreclr-6c5756a2d4d408f2fab2404bed3b472816beddd1.tar.gz
coreclr-6c5756a2d4d408f2fab2404bed3b472816beddd1.tar.bz2
coreclr-6c5756a2d4d408f2fab2404bed3b472816beddd1.zip
[Arm64] Enable LowerShift
Diffstat (limited to 'src')
-rw-r--r--src/jit/lower.cpp49
-rw-r--r--src/jit/lowerxarch.cpp47
2 files changed, 48 insertions, 48 deletions
diff --git a/src/jit/lower.cpp b/src/jit/lower.cpp
index 7c604053ec..5f93f79ac1 100644
--- a/src/jit/lower.cpp
+++ b/src/jit/lower.cpp
@@ -240,7 +240,7 @@ GenTree* Lowering::LowerNode(GenTree* node)
case GT_LSH:
case GT_RSH:
case GT_RSZ:
-#ifdef _TARGET_XARCH_
+#if defined(_TARGET_XARCH_) || defined(_TARGET_ARM64_)
LowerShift(node->AsOp());
#else
ContainCheckShiftRotate(node->AsOp());
@@ -4521,6 +4521,53 @@ GenTree* Lowering::LowerSignedDivOrMod(GenTreePtr node)
return next;
}
+//------------------------------------------------------------------------
+// LowerShift: Lower shift nodes
+//
+// Arguments:
+// shift - the shift node (GT_LSH, GT_RSH or GT_RSZ)
+//
+// Notes:
+// Remove unnecessary shift count masking, xarch shift instructions
+// mask the shift count to 5 bits (or 6 bits for 64 bit operations).
+
+void Lowering::LowerShift(GenTreeOp* shift)
+{
+ assert(shift->OperIs(GT_LSH, GT_RSH, GT_RSZ));
+
+ size_t mask = 0x1f;
+#ifdef _TARGET_64BIT_
+ if (varTypeIsLong(shift->TypeGet()))
+ {
+ mask = 0x3f;
+ }
+#else
+ assert(!varTypeIsLong(shift->TypeGet()));
+#endif
+
+ for (GenTree* andOp = shift->gtGetOp2(); andOp->OperIs(GT_AND); andOp = andOp->gtGetOp1())
+ {
+ GenTree* maskOp = andOp->gtGetOp2();
+
+ if (!maskOp->IsCnsIntOrI())
+ {
+ break;
+ }
+
+ if ((static_cast<size_t>(maskOp->AsIntCon()->IconValue()) & mask) != mask)
+ {
+ break;
+ }
+
+ shift->gtOp2 = andOp->gtGetOp1();
+ BlockRange().Remove(andOp);
+ BlockRange().Remove(maskOp);
+ // The parent was replaced, clear contain and regOpt flag.
+ shift->gtOp2->ClearContained();
+ }
+ ContainCheckShiftRotate(shift);
+}
+
void Lowering::WidenSIMD12IfNecessary(GenTreeLclVarCommon* node)
{
#ifdef FEATURE_SIMD
diff --git a/src/jit/lowerxarch.cpp b/src/jit/lowerxarch.cpp
index 20a08750a6..3240711875 100644
--- a/src/jit/lowerxarch.cpp
+++ b/src/jit/lowerxarch.cpp
@@ -36,53 +36,6 @@ void Lowering::LowerRotate(GenTree* tree)
}
//------------------------------------------------------------------------
-// LowerShift: Lower shift nodes
-//
-// Arguments:
-// shift - the shift node (GT_LSH, GT_RSH or GT_RSZ)
-//
-// Notes:
-// Remove unnecessary shift count masking, xarch shift instructions
-// mask the shift count to 5 bits (or 6 bits for 64 bit operations).
-
-void Lowering::LowerShift(GenTreeOp* shift)
-{
- assert(shift->OperIs(GT_LSH, GT_RSH, GT_RSZ));
-
- size_t mask = 0x1f;
-#ifdef _TARGET_AMD64_
- if (varTypeIsLong(shift->TypeGet()))
- {
- mask = 0x3f;
- }
-#else
- assert(!varTypeIsLong(shift->TypeGet()));
-#endif
-
- for (GenTree* andOp = shift->gtGetOp2(); andOp->OperIs(GT_AND); andOp = andOp->gtGetOp1())
- {
- GenTree* maskOp = andOp->gtGetOp2();
-
- if (!maskOp->IsCnsIntOrI())
- {
- break;
- }
-
- if ((static_cast<size_t>(maskOp->AsIntCon()->IconValue()) & mask) != mask)
- {
- break;
- }
-
- shift->gtOp2 = andOp->gtGetOp1();
- BlockRange().Remove(andOp);
- BlockRange().Remove(maskOp);
- // The parent was replaced, clear contain and regOpt flag.
- shift->gtOp2->ClearContained();
- }
- ContainCheckShiftRotate(shift);
-}
-
-//------------------------------------------------------------------------
// LowerStoreLoc: Lower a store of a lclVar
//
// Arguments: