summaryrefslogtreecommitdiff
path: root/src/jit/lsraxarch.cpp
diff options
context:
space:
mode:
authorTanner Gooding <tagoo@outlook.com>2018-12-06 17:15:03 -0800
committerGitHub <noreply@github.com>2018-12-06 17:15:03 -0800
commit1c18b3290b825e66e973e147eda8c7cca3e539c6 (patch)
tree82008995558179695040816cfdfca9727d7aa4e0 /src/jit/lsraxarch.cpp
parent61da68e56ab0b3dd4865d34aee0ca243cc702c09 (diff)
downloadcoreclr-1c18b3290b825e66e973e147eda8c7cca3e539c6.tar.gz
coreclr-1c18b3290b825e66e973e147eda8c7cca3e539c6.tar.bz2
coreclr-1c18b3290b825e66e973e147eda8c7cca3e539c6.zip
Moving CreateScalarUnsafe, ToScalar, Vector128.ToVector256Unsafe, and Vector256.GetLower to be intrinsics (#21351)
* Moving CreateScalarUnsafe, ToScalar, Vector128.ToVector256Unsafe, and Vector256.GetLower to be intrinsics * Adding containment support to the helper intrinsics
Diffstat (limited to 'src/jit/lsraxarch.cpp')
-rw-r--r--src/jit/lsraxarch.cpp54
1 files changed, 54 insertions, 0 deletions
diff --git a/src/jit/lsraxarch.cpp b/src/jit/lsraxarch.cpp
index c5cc71e222..774334c032 100644
--- a/src/jit/lsraxarch.cpp
+++ b/src/jit/lsraxarch.cpp
@@ -2372,6 +2372,60 @@ int LinearScan::BuildHWIntrinsic(GenTreeHWIntrinsic* intrinsicTree)
// must be handled within the case.
switch (intrinsicId)
{
+ case NI_Base_Vector128_CreateScalarUnsafe:
+ case NI_Base_Vector128_ToScalar:
+ case NI_Base_Vector256_CreateScalarUnsafe:
+ case NI_Base_Vector256_ToScalar:
+ {
+ assert(numArgs == 1);
+
+ if (varTypeIsFloating(baseType))
+ {
+ if (op1->isContained())
+ {
+ srcCount += BuildOperandUses(op1);
+ }
+ else
+ {
+ // We will either be in memory and need to be moved
+ // into a register of the appropriate size or we
+ // are already in an XMM/YMM register and can stay
+ // where we are.
+
+ tgtPrefUse = BuildUse(op1);
+ srcCount += 1;
+ }
+
+ buildUses = false;
+ }
+ break;
+ }
+
+ case NI_Base_Vector128_ToVector256:
+ case NI_Base_Vector128_ToVector256Unsafe:
+ case NI_Base_Vector256_GetLower:
+ {
+ assert(numArgs == 1);
+
+ if (op1->isContained())
+ {
+ srcCount += BuildOperandUses(op1);
+ }
+ else
+ {
+ // We will either be in memory and need to be moved
+ // into a register of the appropriate size or we
+ // are already in an XMM/YMM register and can stay
+ // where we are.
+
+ tgtPrefUse = BuildUse(op1);
+ srcCount += 1;
+ }
+
+ buildUses = false;
+ break;
+ }
+
case NI_SSE_CompareEqualOrderedScalar:
case NI_SSE_CompareEqualUnorderedScalar:
case NI_SSE_CompareNotEqualOrderedScalar: