summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorMike Danes <onemihaid@hotmail.com>2018-04-23 18:17:52 +0300
committerMike Danes <onemihaid@hotmail.com>2018-11-10 19:00:12 +0200
commit3b24ec21079c2c45c0186586ef17632e85875e39 (patch)
tree327189c5a46465b73097217133fd304900524098 /src
parent54fba14273fe9b6f53d68c98920f0ee1ecb2dd04 (diff)
downloadcoreclr-3b24ec21079c2c45c0186586ef17632e85875e39.tar.gz
coreclr-3b24ec21079c2c45c0186586ef17632e85875e39.tar.bz2
coreclr-3b24ec21079c2c45c0186586ef17632e85875e39.zip
Introduce GenCondition
Diffstat (limited to 'src')
-rw-r--r--src/jit/codegenarmarch.cpp59
-rw-r--r--src/jit/codegenlinear.h395
-rw-r--r--src/jit/codegenxarch.cpp57
-rw-r--r--src/jit/gentree.cpp4
-rw-r--r--src/jit/gentree.h212
-rw-r--r--src/jit/lower.cpp28
-rw-r--r--src/jit/lowerxarch.cpp7
7 files changed, 719 insertions, 43 deletions
diff --git a/src/jit/codegenarmarch.cpp b/src/jit/codegenarmarch.cpp
index bba8c6a1de..0180e729d9 100644
--- a/src/jit/codegenarmarch.cpp
+++ b/src/jit/codegenarmarch.cpp
@@ -3313,6 +3313,47 @@ void CodeGen::genCodeForJumpTrue(GenTree* tree)
}
}
+// clang-format off
+const CodeGen::GenConditionDesc CodeGen::GenConditionDesc::map[32]
+{
+ { }, // NONE
+ { }, // 1
+ { EJ_lt }, // SLT
+ { EJ_le }, // SLE
+ { EJ_ge }, // SGE
+ { EJ_gt }, // SGT
+ { EJ_mi }, // S
+ { EJ_pl }, // NS
+
+ { EJ_eq }, // EQ
+ { EJ_ne }, // NE
+ { EJ_lo }, // ULT
+ { EJ_ls }, // ULE
+ { EJ_hs }, // UGE
+ { EJ_hi }, // UGT
+ { EJ_hs }, // C
+ { EJ_lo }, // NC
+
+ { }, // FEQ
+ { }, // FNE
+ { }, // FLT
+ { }, // FLE
+ { }, // FGE
+ { }, // FGT
+ { EJ_vs }, // O
+ { EJ_vc }, // NO
+
+ { }, // FEQU
+ { }, // FNEU
+ { }, // FLTU
+ { }, // FLEU
+ { }, // FGEU
+ { }, // FGTU
+ { }, // P
+ { }, // NP
+};
+// clang-format on
+
//------------------------------------------------------------------------
// genCodeForJcc: Produce code for a GT_JCC node.
//
@@ -3323,10 +3364,9 @@ void CodeGen::genCodeForJcc(GenTreeCC* tree)
{
assert(compiler->compCurBB->bbJumpKind == BBJ_COND);
- CompareKind compareKind = ((tree->gtFlags & GTF_UNSIGNED) != 0) ? CK_UNSIGNED : CK_SIGNED;
- emitJumpKind jumpKind = genJumpKindForOper(tree->gtCondition, compareKind);
+ const GenConditionDesc& desc = GenConditionDesc::Get(tree->gtCondition);
- inst_JMP(jumpKind, compiler->compCurBB->bbJumpDest);
+ inst_JMP(desc.jumpKind, compiler->compCurBB->bbJumpDest);
}
//------------------------------------------------------------------------
@@ -3344,16 +3384,13 @@ void CodeGen::genCodeForJcc(GenTreeCC* tree)
void CodeGen::genCodeForSetcc(GenTreeCC* setcc)
{
- regNumber dstReg = setcc->gtRegNum;
- CompareKind compareKind = setcc->IsUnsigned() ? CK_UNSIGNED : CK_SIGNED;
- emitJumpKind jumpKind = genJumpKindForOper(setcc->gtCondition, compareKind);
-
+ regNumber dstReg = setcc->gtRegNum;
assert(genIsValidIntReg(dstReg));
- // Make sure nobody is setting GTF_RELOP_NAN_UN on this node as it is ignored.
- assert((setcc->gtFlags & GTF_RELOP_NAN_UN) == 0);
+
+ const GenConditionDesc& desc = GenConditionDesc::Get(setcc->gtCondition);
#ifdef _TARGET_ARM64_
- inst_SET(jumpKind, dstReg);
+ inst_SET(desc.jumpKind, dstReg);
#else
// Emit code like that:
// ...
@@ -3366,7 +3403,7 @@ void CodeGen::genCodeForSetcc(GenTreeCC* setcc)
// ...
BasicBlock* labelTrue = genCreateTempLabel();
- getEmitter()->emitIns_J(emitter::emitJumpKindToIns(jumpKind), labelTrue);
+ getEmitter()->emitIns_J(emitter::emitJumpKindToIns(desc.jumpKind), labelTrue);
getEmitter()->emitIns_R_I(INS_mov, emitActualTypeSize(setcc->TypeGet()), dstReg, 0);
diff --git a/src/jit/codegenlinear.h b/src/jit/codegenlinear.h
new file mode 100644
index 0000000000..d9e24f7fcd
--- /dev/null
+++ b/src/jit/codegenlinear.h
@@ -0,0 +1,395 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+// See the LICENSE file in the project root for more information.
+
+//
+// This file contains the members of CodeGen that are defined and used
+// only by the RyuJIT backend. It is included by CodeGen.h in the
+// definition of the CodeGen class.
+//
+
+void genSetRegToConst(regNumber targetReg, var_types targetType, GenTree* tree);
+void genCodeForTreeNode(GenTree* treeNode);
+void genCodeForBinary(GenTree* treeNode);
+
+#if defined(_TARGET_X86_)
+void genCodeForLongUMod(GenTreeOp* node);
+#endif // _TARGET_X86_
+
+void genCodeForDivMod(GenTreeOp* treeNode);
+void genCodeForMul(GenTreeOp* treeNode);
+void genCodeForMulHi(GenTreeOp* treeNode);
+void genLeaInstruction(GenTreeAddrMode* lea);
+void genSetRegToCond(regNumber dstReg, GenTree* tree);
+
+#if defined(_TARGET_ARMARCH_)
+void genScaledAdd(emitAttr attr, regNumber targetReg, regNumber baseReg, regNumber indexReg, int scale);
+#endif // _TARGET_ARMARCH_
+
+#if defined(_TARGET_ARM_)
+void genCodeForMulLong(GenTreeMultiRegOp* treeNode);
+#endif // _TARGET_ARM_
+
+#if !defined(_TARGET_64BIT_)
+void genLongToIntCast(GenTree* treeNode);
+#endif
+
+void genIntToIntCast(GenTree* treeNode);
+void genFloatToFloatCast(GenTree* treeNode);
+void genFloatToIntCast(GenTree* treeNode);
+void genIntToFloatCast(GenTree* treeNode);
+void genCkfinite(GenTree* treeNode);
+void genCodeForCompare(GenTreeOp* tree);
+void genIntrinsic(GenTree* treeNode);
+void genPutArgStk(GenTreePutArgStk* treeNode);
+void genPutArgReg(GenTreeOp* tree);
+#if FEATURE_ARG_SPLIT
+void genPutArgSplit(GenTreePutArgSplit* treeNode);
+#endif // FEATURE_ARG_SPLIT
+
+#if defined(_TARGET_XARCH_)
+unsigned getBaseVarForPutArgStk(GenTree* treeNode);
+#endif // _TARGET_XARCH_
+
+unsigned getFirstArgWithStackSlot();
+
+void genCompareFloat(GenTree* treeNode);
+void genCompareInt(GenTree* treeNode);
+
+#ifdef FEATURE_SIMD
+enum SIMDScalarMoveType
+{
+ SMT_ZeroInitUpper, // zero initlaize target upper bits
+ SMT_ZeroInitUpper_SrcHasUpperZeros, // zero initialize target upper bits; source upper bits are known to be zero
+ SMT_PreserveUpper // preserve target upper bits
+};
+
+#ifdef _TARGET_ARM64_
+insOpts genGetSimdInsOpt(emitAttr size, var_types elementType);
+#endif
+instruction getOpForSIMDIntrinsic(SIMDIntrinsicID intrinsicId, var_types baseType, unsigned* ival = nullptr);
+void genSIMDScalarMove(
+ var_types targetType, var_types type, regNumber target, regNumber src, SIMDScalarMoveType moveType);
+void genSIMDZero(var_types targetType, var_types baseType, regNumber targetReg);
+void genSIMDIntrinsicInit(GenTreeSIMD* simdNode);
+void genSIMDIntrinsicInitN(GenTreeSIMD* simdNode);
+void genSIMDIntrinsicInitArray(GenTreeSIMD* simdNode);
+void genSIMDIntrinsicUnOp(GenTreeSIMD* simdNode);
+void genSIMDIntrinsicBinOp(GenTreeSIMD* simdNode);
+void genSIMDIntrinsicRelOp(GenTreeSIMD* simdNode);
+void genSIMDIntrinsicDotProduct(GenTreeSIMD* simdNode);
+void genSIMDIntrinsicSetItem(GenTreeSIMD* simdNode);
+void genSIMDIntrinsicGetItem(GenTreeSIMD* simdNode);
+void genSIMDIntrinsicShuffleSSE2(GenTreeSIMD* simdNode);
+void genSIMDIntrinsicUpperSave(GenTreeSIMD* simdNode);
+void genSIMDIntrinsicUpperRestore(GenTreeSIMD* simdNode);
+void genSIMDLo64BitConvert(SIMDIntrinsicID intrinsicID,
+ var_types simdType,
+ var_types baseType,
+ regNumber tmpReg,
+ regNumber tmpIntReg,
+ regNumber targetReg);
+void genSIMDIntrinsic32BitConvert(GenTreeSIMD* simdNode);
+void genSIMDIntrinsic64BitConvert(GenTreeSIMD* simdNode);
+void genSIMDIntrinsicNarrow(GenTreeSIMD* simdNode);
+void genSIMDExtractUpperHalf(GenTreeSIMD* simdNode, regNumber srcReg, regNumber tgtReg);
+void genSIMDIntrinsicWiden(GenTreeSIMD* simdNode);
+void genSIMDIntrinsic(GenTreeSIMD* simdNode);
+void genSIMDCheck(GenTree* treeNode);
+
+// TYP_SIMD12 (i.e Vector3 of size 12 bytes) is not a hardware supported size and requires
+// two reads/writes on 64-bit targets. These routines abstract reading/writing of Vector3
+// values through an indirection. Note that Vector3 locals allocated on stack would have
+// their size rounded to TARGET_POINTER_SIZE (which is 8 bytes on 64-bit targets) and hence
+// Vector3 locals could be treated as TYP_SIMD16 while reading/writing.
+void genStoreIndTypeSIMD12(GenTree* treeNode);
+void genLoadIndTypeSIMD12(GenTree* treeNode);
+void genStoreLclTypeSIMD12(GenTree* treeNode);
+void genLoadLclTypeSIMD12(GenTree* treeNode);
+#ifdef _TARGET_X86_
+void genStoreSIMD12ToStack(regNumber operandReg, regNumber tmpReg);
+void genPutArgStkSIMD12(GenTree* treeNode);
+#endif // _TARGET_X86_
+#endif // FEATURE_SIMD
+
+#ifdef FEATURE_HW_INTRINSICS
+void genHWIntrinsic(GenTreeHWIntrinsic* node);
+#if defined(_TARGET_XARCH_)
+void genHWIntrinsic_R_RM(GenTreeHWIntrinsic* node, instruction ins, emitAttr attr);
+void genHWIntrinsic_R_RM_I(GenTreeHWIntrinsic* node, instruction ins, int8_t ival);
+void genHWIntrinsic_R_R_RM(GenTreeHWIntrinsic* node, instruction ins);
+void genHWIntrinsic_R_R_RM_I(GenTreeHWIntrinsic* node, instruction ins, int8_t ival);
+void genHWIntrinsic_R_R_RM_R(GenTreeHWIntrinsic* node, instruction ins);
+void genHWIntrinsic_R_R_R_RM(
+ instruction ins, emitAttr attr, regNumber targetReg, regNumber op1Reg, regNumber op2Reg, GenTree* op3);
+void genSSEIntrinsic(GenTreeHWIntrinsic* node);
+void genSSE2Intrinsic(GenTreeHWIntrinsic* node);
+void genSSE41Intrinsic(GenTreeHWIntrinsic* node);
+void genSSE42Intrinsic(GenTreeHWIntrinsic* node);
+void genAvxOrAvx2Intrinsic(GenTreeHWIntrinsic* node);
+void genAESIntrinsic(GenTreeHWIntrinsic* node);
+void genBMI1Intrinsic(GenTreeHWIntrinsic* node);
+void genBMI2Intrinsic(GenTreeHWIntrinsic* node);
+void genFMAIntrinsic(GenTreeHWIntrinsic* node);
+void genLZCNTIntrinsic(GenTreeHWIntrinsic* node);
+void genPCLMULQDQIntrinsic(GenTreeHWIntrinsic* node);
+void genPOPCNTIntrinsic(GenTreeHWIntrinsic* node);
+template <typename HWIntrinsicSwitchCaseBody>
+void genHWIntrinsicJumpTableFallback(NamedIntrinsic intrinsic,
+ regNumber nonConstImmReg,
+ regNumber baseReg,
+ regNumber offsReg,
+ HWIntrinsicSwitchCaseBody emitSwCase);
+#endif // defined(_TARGET_XARCH_)
+#if defined(_TARGET_ARM64_)
+instruction getOpForHWIntrinsic(GenTreeHWIntrinsic* node, var_types instrType);
+void genHWIntrinsicUnaryOp(GenTreeHWIntrinsic* node);
+void genHWIntrinsicCrcOp(GenTreeHWIntrinsic* node);
+void genHWIntrinsicSimdBinaryOp(GenTreeHWIntrinsic* node);
+void genHWIntrinsicSimdExtractOp(GenTreeHWIntrinsic* node);
+void genHWIntrinsicSimdInsertOp(GenTreeHWIntrinsic* node);
+void genHWIntrinsicSimdSelectOp(GenTreeHWIntrinsic* node);
+void genHWIntrinsicSimdSetAllOp(GenTreeHWIntrinsic* node);
+void genHWIntrinsicSimdUnaryOp(GenTreeHWIntrinsic* node);
+void genHWIntrinsicSimdBinaryRMWOp(GenTreeHWIntrinsic* node);
+void genHWIntrinsicSimdTernaryRMWOp(GenTreeHWIntrinsic* node);
+void genHWIntrinsicShaHashOp(GenTreeHWIntrinsic* node);
+void genHWIntrinsicShaRotateOp(GenTreeHWIntrinsic* node);
+template <typename HWIntrinsicSwitchCaseBody>
+void genHWIntrinsicSwitchTable(regNumber swReg, regNumber tmpReg, int swMax, HWIntrinsicSwitchCaseBody emitSwCase);
+#endif // defined(_TARGET_XARCH_)
+#endif // FEATURE_HW_INTRINSICS
+
+#if !defined(_TARGET_64BIT_)
+
+// CodeGen for Long Ints
+
+void genStoreLongLclVar(GenTree* treeNode);
+
+#endif // !defined(_TARGET_64BIT_)
+
+void genProduceReg(GenTree* tree);
+void genUnspillRegIfNeeded(GenTree* tree);
+regNumber genConsumeReg(GenTree* tree);
+void genCopyRegIfNeeded(GenTree* tree, regNumber needReg);
+void genConsumeRegAndCopy(GenTree* tree, regNumber needReg);
+
+void genConsumeIfReg(GenTree* tree)
+{
+ if (!tree->isContained())
+ {
+ (void)genConsumeReg(tree);
+ }
+}
+
+void genRegCopy(GenTree* tree);
+void genTransferRegGCState(regNumber dst, regNumber src);
+void genConsumeAddress(GenTree* addr);
+void genConsumeAddrMode(GenTreeAddrMode* mode);
+void genSetBlockSize(GenTreeBlk* blkNode, regNumber sizeReg);
+void genConsumeBlockSrc(GenTreeBlk* blkNode);
+void genSetBlockSrc(GenTreeBlk* blkNode, regNumber srcReg);
+void genConsumeBlockOp(GenTreeBlk* blkNode, regNumber dstReg, regNumber srcReg, regNumber sizeReg);
+
+#ifdef FEATURE_PUT_STRUCT_ARG_STK
+void genConsumePutStructArgStk(GenTreePutArgStk* putArgStkNode, regNumber dstReg, regNumber srcReg, regNumber sizeReg);
+#endif // FEATURE_PUT_STRUCT_ARG_STK
+#if FEATURE_ARG_SPLIT
+void genConsumeArgSplitStruct(GenTreePutArgSplit* putArgNode);
+#endif // FEATURE_ARG_SPLIT
+
+void genConsumeRegs(GenTree* tree);
+void genConsumeOperands(GenTreeOp* tree);
+void genEmitGSCookieCheck(bool pushReg);
+void genSetRegToIcon(regNumber reg, ssize_t val, var_types type = TYP_INT, insFlags flags = INS_FLAGS_DONT_CARE);
+void genCodeForShift(GenTree* tree);
+
+#if defined(_TARGET_X86_) || defined(_TARGET_ARM_)
+void genCodeForShiftLong(GenTree* tree);
+#endif
+
+#ifdef _TARGET_XARCH_
+void genCodeForShiftRMW(GenTreeStoreInd* storeInd);
+void genCodeForBT(GenTreeOp* bt);
+#endif // _TARGET_XARCH_
+
+void genCodeForCast(GenTreeOp* tree);
+void genCodeForLclAddr(GenTree* tree);
+void genCodeForIndexAddr(GenTreeIndexAddr* tree);
+void genCodeForIndir(GenTreeIndir* tree);
+void genCodeForNegNot(GenTree* tree);
+void genCodeForLclVar(GenTreeLclVar* tree);
+void genCodeForLclFld(GenTreeLclFld* tree);
+void genCodeForStoreLclFld(GenTreeLclFld* tree);
+void genCodeForStoreLclVar(GenTreeLclVar* tree);
+void genCodeForReturnTrap(GenTreeOp* tree);
+void genCodeForJcc(GenTreeCC* tree);
+void genCodeForSetcc(GenTreeCC* setcc);
+void genCodeForStoreInd(GenTreeStoreInd* tree);
+void genCodeForSwap(GenTreeOp* tree);
+void genCodeForCpObj(GenTreeObj* cpObjNode);
+void genCodeForCpBlk(GenTreeBlk* cpBlkNode);
+void genCodeForCpBlkRepMovs(GenTreeBlk* cpBlkNode);
+void genCodeForCpBlkUnroll(GenTreeBlk* cpBlkNode);
+void genCodeForPhysReg(GenTreePhysReg* tree);
+void genCodeForNullCheck(GenTreeOp* tree);
+void genCodeForCmpXchg(GenTreeCmpXchg* tree);
+
+void genAlignStackBeforeCall(GenTreePutArgStk* putArgStk);
+void genAlignStackBeforeCall(GenTreeCall* call);
+void genRemoveAlignmentAfterCall(GenTreeCall* call, unsigned bias = 0);
+
+#if defined(UNIX_X86_ABI)
+
+unsigned curNestedAlignment; // Keep track of alignment adjustment required during codegen.
+unsigned maxNestedAlignment; // The maximum amount of alignment adjustment required.
+
+void SubtractNestedAlignment(unsigned adjustment)
+{
+ assert(curNestedAlignment >= adjustment);
+ unsigned newNestedAlignment = curNestedAlignment - adjustment;
+ if (curNestedAlignment != newNestedAlignment)
+ {
+ JITDUMP("Adjusting stack nested alignment from %d to %d\n", curNestedAlignment, newNestedAlignment);
+ }
+ curNestedAlignment = newNestedAlignment;
+}
+
+void AddNestedAlignment(unsigned adjustment)
+{
+ unsigned newNestedAlignment = curNestedAlignment + adjustment;
+ if (curNestedAlignment != newNestedAlignment)
+ {
+ JITDUMP("Adjusting stack nested alignment from %d to %d\n", curNestedAlignment, newNestedAlignment);
+ }
+ curNestedAlignment = newNestedAlignment;
+
+ if (curNestedAlignment > maxNestedAlignment)
+ {
+ JITDUMP("Max stack nested alignment changed from %d to %d\n", maxNestedAlignment, curNestedAlignment);
+ maxNestedAlignment = curNestedAlignment;
+ }
+}
+
+#endif
+
+#ifndef _TARGET_X86_
+void genPutArgStkFieldList(GenTreePutArgStk* putArgStk, unsigned outArgVarNum);
+#endif // !_TARGET_X86_
+
+#ifdef FEATURE_PUT_STRUCT_ARG_STK
+#ifdef _TARGET_X86_
+bool genAdjustStackForPutArgStk(GenTreePutArgStk* putArgStk);
+void genPushReg(var_types type, regNumber srcReg);
+void genPutArgStkFieldList(GenTreePutArgStk* putArgStk);
+#endif // _TARGET_X86_
+
+void genPutStructArgStk(GenTreePutArgStk* treeNode);
+
+unsigned genMove8IfNeeded(unsigned size, regNumber tmpReg, GenTree* srcAddr, unsigned offset);
+unsigned genMove4IfNeeded(unsigned size, regNumber tmpReg, GenTree* srcAddr, unsigned offset);
+unsigned genMove2IfNeeded(unsigned size, regNumber tmpReg, GenTree* srcAddr, unsigned offset);
+unsigned genMove1IfNeeded(unsigned size, regNumber tmpReg, GenTree* srcAddr, unsigned offset);
+void genStructPutArgRepMovs(GenTreePutArgStk* putArgStkNode);
+void genStructPutArgUnroll(GenTreePutArgStk* putArgStkNode);
+void genStoreRegToStackArg(var_types type, regNumber reg, int offset);
+#endif // FEATURE_PUT_STRUCT_ARG_STK
+
+void genCodeForLoadOffset(instruction ins, emitAttr size, regNumber dst, GenTree* base, unsigned offset);
+void genCodeForStoreOffset(instruction ins, emitAttr size, regNumber src, GenTree* base, unsigned offset);
+
+#ifdef _TARGET_ARM64_
+void genCodeForLoadPairOffset(regNumber dst, regNumber dst2, GenTree* base, unsigned offset);
+void genCodeForStorePairOffset(regNumber src, regNumber src2, GenTree* base, unsigned offset);
+#endif // _TARGET_ARM64_
+
+void genCodeForStoreBlk(GenTreeBlk* storeBlkNode);
+void genCodeForInitBlk(GenTreeBlk* initBlkNode);
+void genCodeForInitBlkRepStos(GenTreeBlk* initBlkNode);
+void genCodeForInitBlkUnroll(GenTreeBlk* initBlkNode);
+void genJumpTable(GenTree* tree);
+void genTableBasedSwitch(GenTree* tree);
+void genCodeForArrIndex(GenTreeArrIndex* treeNode);
+void genCodeForArrOffset(GenTreeArrOffs* treeNode);
+instruction genGetInsForOper(genTreeOps oper, var_types type);
+bool genEmitOptimizedGCWriteBarrier(GCInfo::WriteBarrierForm writeBarrierForm, GenTree* addr, GenTree* data);
+void genCallInstruction(GenTreeCall* call);
+void genJmpMethod(GenTree* jmp);
+BasicBlock* genCallFinally(BasicBlock* block);
+void genCodeForJumpTrue(GenTree* tree);
+#ifdef _TARGET_ARM64_
+void genCodeForJumpCompare(GenTreeOp* tree);
+#endif // _TARGET_ARM64_
+
+#if FEATURE_EH_FUNCLETS
+void genEHCatchRet(BasicBlock* block);
+#else // !FEATURE_EH_FUNCLETS
+void genEHFinallyOrFilterRet(BasicBlock* block);
+#endif // !FEATURE_EH_FUNCLETS
+
+void genMultiRegCallStoreToLocal(GenTree* treeNode);
+
+// Deals with codegen for muti-register struct returns.
+bool isStructReturn(GenTree* treeNode);
+void genStructReturn(GenTree* treeNode);
+
+#if defined(_TARGET_X86_) || defined(_TARGET_ARM_)
+void genLongReturn(GenTree* treeNode);
+#endif // _TARGET_X86_ || _TARGET_ARM_
+
+#if defined(_TARGET_X86_)
+void genFloatReturn(GenTree* treeNode);
+#endif // _TARGET_X86_
+
+#if defined(_TARGET_ARM64_)
+void genSimpleReturn(GenTree* treeNode);
+#endif // _TARGET_ARM64_
+
+void genReturn(GenTree* treeNode);
+
+void genLclHeap(GenTree* tree);
+
+bool genIsRegCandidateLocal(GenTree* tree)
+{
+ if (!tree->IsLocal())
+ {
+ return false;
+ }
+ const LclVarDsc* varDsc = &compiler->lvaTable[tree->gtLclVarCommon.gtLclNum];
+ return (varDsc->lvIsRegCandidate());
+}
+
+#ifdef FEATURE_PUT_STRUCT_ARG_STK
+#ifdef _TARGET_X86_
+bool m_pushStkArg;
+#else // !_TARGET_X86_
+unsigned m_stkArgVarNum;
+unsigned m_stkArgOffset;
+#endif // !_TARGET_X86_
+#endif // !FEATURE_PUT_STRUCT_ARG_STK
+
+#ifdef DEBUG
+GenTree* lastConsumedNode;
+void genNumberOperandUse(GenTree* const operand, int& useNum) const;
+void genCheckConsumeNode(GenTree* const node);
+#else // !DEBUG
+inline void genCheckConsumeNode(GenTree* treeNode)
+{
+}
+#endif // DEBUG
+
+struct GenConditionDesc
+{
+ emitJumpKind jumpKind;
+
+ static const GenConditionDesc& Get(GenCondition condition)
+ {
+ assert(condition.GetCode() < _countof(map));
+ const GenConditionDesc& desc = map[condition.GetCode()];
+ assert(desc.jumpKind != EJ_NONE);
+ return desc;
+ }
+
+private:
+ static const GenConditionDesc map[32];
+};
diff --git a/src/jit/codegenxarch.cpp b/src/jit/codegenxarch.cpp
index e81f5bab03..0ff3608258 100644
--- a/src/jit/codegenxarch.cpp
+++ b/src/jit/codegenxarch.cpp
@@ -1432,6 +1432,47 @@ void CodeGen::genCodeForJumpTrue(GenTree* tree)
}
}
+// clang-format off
+const CodeGen::GenConditionDesc CodeGen::GenConditionDesc::map[32]
+{
+ { }, // NONE
+ { }, // 1
+ { EJ_jl }, // SLT
+ { EJ_jle }, // SLE
+ { EJ_jge }, // SGE
+ { EJ_jg }, // SGT
+ { EJ_js }, // S
+ { EJ_jns }, // NS
+
+ { EJ_je }, // EQ
+ { EJ_jne }, // NE
+ { EJ_jb }, // ULT
+ { EJ_jbe }, // ULE
+ { EJ_jae }, // UGE
+ { EJ_ja }, // UGT
+ { EJ_jb }, // C
+ { EJ_jae }, // NC
+
+ { }, // FEQ
+ { }, // FNE
+ { }, // FLT
+ { }, // FLE
+ { }, // FGE
+ { }, // FGT
+ { EJ_jo }, // O
+ { EJ_jno }, // NO
+
+ { }, // FEQU
+ { }, // FNEU
+ { }, // FLTU
+ { }, // FLEU
+ { }, // FGEU
+ { }, // FGTU
+ { EJ_jpe }, // P
+ { EJ_jpo }, // NP
+};
+// clang-format on
+
//------------------------------------------------------------------------
// genCodeForJcc: Produce code for a GT_JCC node.
//
@@ -1442,10 +1483,9 @@ void CodeGen::genCodeForJcc(GenTreeCC* tree)
{
assert(compiler->compCurBB->bbJumpKind == BBJ_COND);
- CompareKind compareKind = ((tree->gtFlags & GTF_UNSIGNED) != 0) ? CK_UNSIGNED : CK_SIGNED;
- emitJumpKind jumpKind = genJumpKindForOper(tree->gtCondition, compareKind);
+ const GenConditionDesc& desc = GenConditionDesc::Get(tree->gtCondition);
- inst_JMP(jumpKind, compiler->compCurBB->bbJumpDest);
+ inst_JMP(desc.jumpKind, compiler->compCurBB->bbJumpDest);
}
//------------------------------------------------------------------------
@@ -1463,15 +1503,12 @@ void CodeGen::genCodeForJcc(GenTreeCC* tree)
void CodeGen::genCodeForSetcc(GenTreeCC* setcc)
{
- regNumber dstReg = setcc->gtRegNum;
- CompareKind compareKind = setcc->IsUnsigned() ? CK_UNSIGNED : CK_SIGNED;
- emitJumpKind jumpKind = genJumpKindForOper(setcc->gtCondition, compareKind);
-
+ regNumber dstReg = setcc->gtRegNum;
assert(genIsValidIntReg(dstReg) && isByteReg(dstReg));
- // Make sure nobody is setting GTF_RELOP_NAN_UN on this node as it is ignored.
- assert((setcc->gtFlags & GTF_RELOP_NAN_UN) == 0);
- inst_SET(jumpKind, dstReg);
+ const GenConditionDesc& desc = GenConditionDesc::Get(setcc->gtCondition);
+
+ inst_SET(desc.jumpKind, dstReg);
inst_RV_RV(ins_Move_Extend(TYP_UBYTE, true), dstReg, dstReg, TYP_UBYTE, emitTypeSize(TYP_UBYTE));
genProduceReg(setcc);
}
diff --git a/src/jit/gentree.cpp b/src/jit/gentree.cpp
index 99109db1e1..780671fd97 100644
--- a/src/jit/gentree.cpp
+++ b/src/jit/gentree.cpp
@@ -2303,7 +2303,7 @@ GenTree* Compiler::gtReverseCond(GenTree* tree)
else if (tree->OperIs(GT_JCC, GT_SETCC))
{
GenTreeCC* cc = tree->AsCC();
- cc->gtCondition = GenTree::ReverseRelop(cc->gtCondition);
+ cc->gtCondition = GenCondition::Reverse(cc->gtCondition);
}
else if (tree->OperIs(GT_JCMP))
{
@@ -10349,7 +10349,7 @@ void Compiler::gtDispLeaf(GenTree* tree, IndentStack* indentStack)
case GT_JCC:
case GT_SETCC:
- printf(" cond=%s", GenTree::OpName(tree->AsCC()->gtCondition));
+ printf(" cond=%s", tree->AsCC()->gtCondition.Name());
break;
case GT_JCMP:
printf(" cond=%s%s", (tree->gtFlags & GTF_JCMP_TST) ? "TEST_" : "",
diff --git a/src/jit/gentree.h b/src/jit/gentree.h
index c6c5841e98..099e399c10 100644
--- a/src/jit/gentree.h
+++ b/src/jit/gentree.h
@@ -5663,17 +5663,223 @@ struct GenTreeRuntimeLookup final : public GenTreeUnOp
}
};
+// Represents the condition of a GT_JCC or GT_SETCC node.
+
+struct GenCondition
+{
+ // clang-format off
+ enum Code : unsigned char
+ {
+ OperMask = 7,
+ Unsigned = 8,
+ Unordered = Unsigned,
+ Float = 16,
+
+ // 0 would be the encoding of "signed EQ" but since equality is sign insensitive
+ // we'll use 0 as invalid/uninitialized condition code. This will also leave 1
+ // as a spare code.
+ NONE = 0,
+
+ SLT = 2,
+ SLE = 3,
+ SGE = 4,
+ SGT = 5,
+ S = 6,
+ NS = 7,
+
+ EQ = Unsigned | 0, // = 8
+ NE = Unsigned | 1, // = 9
+ ULT = Unsigned | SLT, // = 10
+ ULE = Unsigned | SLE, // = 11
+ UGE = Unsigned | SGE, // = 12
+ UGT = Unsigned | SGT, // = 13
+ C = Unsigned | S, // = 14
+ NC = Unsigned | NS, // = 15
+
+ FEQ = Float | EQ, // = 16
+ FNE = Float | NE, // = 17
+ FLT = Float | SLT, // = 18
+ FLE = Float | SLE, // = 19
+ FGE = Float | SGE, // = 20
+ FGT = Float | SGT, // = 21
+ O = Float | S, // = 22
+ NO = Float | NS, // = 23
+
+ FEQU = Unordered | FEQ, // = 24
+ FNEU = Unordered | FNE, // = 25
+ FLTU = Unordered | FLT, // = 26
+ FLEU = Unordered | FLE, // = 27
+ FGEU = Unordered | FGE, // = 28
+ FGTU = Unordered | FGT, // = 29
+ P = Unordered | O, // = 30
+ NP = Unordered | NO, // = 31
+ };
+ // clang-format on
+
+private:
+ Code m_code;
+
+public:
+ Code GetCode() const
+ {
+ return m_code;
+ }
+
+ bool IsFlag() const
+ {
+ return (m_code & OperMask) >= S;
+ }
+
+ bool IsUnsigned() const
+ {
+ return (ULT <= m_code) && (m_code <= UGT);
+ }
+
+ bool IsFloat() const
+ {
+ return !IsFlag() && (m_code & Float) != 0;
+ }
+
+ bool IsUnordered() const
+ {
+ return !IsFlag() && (m_code & (Float | Unordered)) == (Float | Unordered);
+ }
+
+ bool Is(Code cond) const
+ {
+ return m_code == cond;
+ }
+
+ template <typename... TRest>
+ bool Is(Code c, TRest... rest) const
+ {
+ return Is(c) || Is(rest...);
+ }
+
+ const char* Name() const
+ {
+ // clang-format off
+ static const char* names[]
+ {
+ "NONE", "???", "SLT", "SLE", "SGE", "SGT", "S", "NS",
+ "UEQ", "UNE", "ULT", "ULE", "UGE", "UGT", "C", "NC",
+ "FEQ", "FNE", "FLT", "FLE", "FGE", "FGT", "O", "NO",
+ "FEQU", "FNEU", "FLTU", "FLEU", "FGEU", "FGTU", "P", "NP"
+ };
+ // clang-format on
+
+ assert(m_code < _countof(names));
+ return names[m_code];
+ }
+
+ GenCondition() : m_code()
+ {
+ }
+
+ GenCondition(Code cond) : m_code(cond)
+ {
+ }
+
+ static_assert((GT_NE - GT_EQ) == (NE & ~Unsigned), "bad relop");
+ static_assert((GT_LT - GT_EQ) == SLT, "bad relop");
+ static_assert((GT_LE - GT_EQ) == SLE, "bad relop");
+ static_assert((GT_GE - GT_EQ) == SGE, "bad relop");
+ static_assert((GT_GT - GT_EQ) == SGT, "bad relop");
+#ifndef LEGACY_BACKEND
+ static_assert((GT_TEST_NE - GT_TEST_EQ) == (NE & ~Unsigned), "bad relop");
+#endif
+
+ static GenCondition FromRelop(GenTree* relop)
+ {
+ assert(relop->OperIsCompare());
+
+ if (varTypeIsFloating(relop->gtGetOp1()))
+ {
+ return FromFloatRelop(relop);
+ }
+ else
+ {
+ return FromIntegralRelop(relop);
+ }
+ }
+
+ static GenCondition FromFloatRelop(GenTree* relop)
+ {
+ assert(varTypeIsFloating(relop->gtGetOp1()) && varTypeIsFloating(relop->gtGetOp2()));
+
+ return FromFloatRelop(relop->OperGet(), (relop->gtFlags & GTF_RELOP_NAN_UN) != 0);
+ }
+
+ static GenCondition FromFloatRelop(genTreeOps oper, bool isUnordered)
+ {
+ assert(GenTree::OperIsCompare(oper));
+
+ unsigned code = oper - GT_EQ;
+ assert(code <= SGT);
+ code |= Float;
+
+ if (isUnordered)
+ {
+ code |= Unordered;
+ }
+
+ return GenCondition(static_cast<Code>(code));
+ }
+
+ static GenCondition FromIntegralRelop(GenTree* relop)
+ {
+ assert(!varTypeIsFloating(relop->gtGetOp1()) && !varTypeIsFloating(relop->gtGetOp2()));
+
+ return FromIntegralRelop(relop->OperGet(), relop->IsUnsigned());
+ }
+
+ static GenCondition FromIntegralRelop(genTreeOps oper, bool isUnsigned)
+ {
+ assert(GenTree::OperIsCompare(oper));
+
+#ifndef LEGACY_BACKEND
+ // GT_TEST_EQ/NE are special, they need to be mapped as GT_EQ/NE
+ unsigned code = oper - ((oper >= GT_TEST_EQ) ? GT_TEST_EQ : GT_EQ);
+#else
+ unsigned code = oper - GT_EQ;
+#endif
+
+ if (isUnsigned || (code <= 1)) // EQ/NE are treated as unsigned
+ {
+ code |= Unsigned;
+ }
+
+ return GenCondition(static_cast<Code>(code));
+ }
+
+ static GenCondition Reverse(GenCondition condition)
+ {
+ // clang-format off
+ static const Code reverse[]
+ {
+ // EQ NE LT LE GE GT F NF
+ NONE, NONE, SGE, SGT, SLT, SLE, NS, S,
+ NE, EQ, UGE, UGT, ULT, ULE, NC, C,
+ FNEU, FEQU, FGEU, FGTU, FLTU, FLEU, NO, O,
+ FNE, FEQ, FGE, FGT, FLT, FGT, NP, P
+ };
+ // clang-format on
+
+ assert(condition.m_code < _countof(reverse));
+ return GenCondition(reverse[condition.m_code]);
+ }
+};
+
// Represents a GT_JCC or GT_SETCC node.
struct GenTreeCC final : public GenTree
{
- genTreeOps gtCondition; // any relop
+ GenCondition gtCondition;
- GenTreeCC(genTreeOps oper, genTreeOps condition, var_types type = TYP_VOID)
+ GenTreeCC(genTreeOps oper, GenCondition condition, var_types type = TYP_VOID)
: GenTree(oper, type DEBUGARG(/*largeNode*/ FALSE)), gtCondition(condition)
{
assert(OperIs(GT_JCC, GT_SETCC));
- assert(OperIsCompare(condition));
}
#if DEBUGGABLE_GENTREE
diff --git a/src/jit/lower.cpp b/src/jit/lower.cpp
index 5e29c0b717..fb475a9580 100644
--- a/src/jit/lower.cpp
+++ b/src/jit/lower.cpp
@@ -915,16 +915,16 @@ bool Lowering::TryLowerSwitchToBitTest(
// Rewire the blocks as needed and figure out the condition to use for JCC.
//
- genTreeOps bbSwitchCondition = GT_NONE;
- bbSwitch->bbJumpKind = BBJ_COND;
+ GenCondition bbSwitchCondition;
+ bbSwitch->bbJumpKind = BBJ_COND;
comp->fgRemoveAllRefPreds(bbCase1, bbSwitch);
comp->fgRemoveAllRefPreds(bbCase0, bbSwitch);
if (bbSwitch->bbNext == bbCase0)
{
- // GT_LT + GTF_UNSIGNED generates JC so we jump to bbCase1 when the bit is set
- bbSwitchCondition = GT_LT;
+ // GenCondition::C generates JC so we jump to bbCase1 when the bit is set
+ bbSwitchCondition = GenCondition::C;
bbSwitch->bbJumpDest = bbCase1;
comp->fgAddRefPred(bbCase0, bbSwitch);
@@ -934,8 +934,8 @@ bool Lowering::TryLowerSwitchToBitTest(
{
assert(bbSwitch->bbNext == bbCase1);
- // GT_GE + GTF_UNSIGNED generates JNC so we jump to bbCase0 when the bit is not set
- bbSwitchCondition = GT_GE;
+ // GenCondition::NC generates JNC so we jump to bbCase0 when the bit is not set
+ bbSwitchCondition = GenCondition::NC;
bbSwitch->bbJumpDest = bbCase0;
comp->fgAddRefPred(bbCase0, bbSwitch);
@@ -951,7 +951,7 @@ bool Lowering::TryLowerSwitchToBitTest(
GenTree* bitTest = comp->gtNewOperNode(GT_BT, TYP_VOID, bitTableIcon, switchValue);
bitTest->gtFlags |= GTF_SET_FLAGS;
GenTreeCC* jcc = new (comp, GT_JCC) GenTreeCC(GT_JCC, bbSwitchCondition);
- jcc->gtFlags |= GTF_UNSIGNED | GTF_USE_FLAGS;
+ jcc->gtFlags |= GTF_USE_FLAGS;
LIR::AsRange(bbSwitch).InsertAfter(switchValue, bitTableIcon, bitTest, jcc);
@@ -2518,8 +2518,8 @@ GenTree* Lowering::DecomposeLongCompare(GenTree* cmp)
GenTree* jcc = cmpUse.User();
jcc->gtOp.gtOp1 = nullptr;
jcc->ChangeOper(GT_JCC);
- jcc->gtFlags |= (cmp->gtFlags & GTF_UNSIGNED) | GTF_USE_FLAGS;
- jcc->AsCC()->gtCondition = condition;
+ jcc->gtFlags |= GTF_USE_FLAGS;
+ jcc->AsCC()->gtCondition = GenCondition::FromIntegralRelop(condition, cmp->IsUnsigned());
}
else
{
@@ -2527,7 +2527,7 @@ GenTree* Lowering::DecomposeLongCompare(GenTree* cmp)
cmp->gtOp.gtOp2 = nullptr;
cmp->ChangeOper(GT_SETCC);
cmp->gtFlags |= GTF_USE_FLAGS;
- cmp->AsCC()->gtCondition = condition;
+ cmp->AsCC()->gtCondition = GenCondition::FromIntegralRelop(condition, cmp->IsUnsigned());
}
return cmp->gtNext;
@@ -2729,7 +2729,7 @@ GenTree* Lowering::OptimizeConstCompare(GenTree* cmp)
if (lsh->OperIs(GT_LSH) && varTypeIsIntOrI(lsh->TypeGet()) && lsh->gtGetOp1()->IsIntegralConst(1) &&
BlockRange().TryGetUse(cmp, &cmpUse))
{
- genTreeOps condition = cmp->OperIs(GT_TEST_NE) ? GT_LT : GT_GE;
+ GenCondition condition = cmp->OperIs(GT_TEST_NE) ? GenCondition::C : GenCondition::NC;
cmp->SetOper(GT_BT);
cmp->gtType = TYP_VOID;
@@ -2755,7 +2755,7 @@ GenTree* Lowering::OptimizeConstCompare(GenTree* cmp)
cmpUse.ReplaceWith(comp, cc);
}
- cc->gtFlags |= GTF_USE_FLAGS | GTF_UNSIGNED;
+ cc->gtFlags |= GTF_USE_FLAGS;
return cmp->gtNext;
}
@@ -2812,10 +2812,10 @@ GenTree* Lowering::OptimizeConstCompare(GenTree* cmp)
ccOp = GT_SETCC;
}
- genTreeOps condition = cmp->OperGet();
+ GenCondition condition = GenCondition::FromIntegralRelop(cmp);
cc->ChangeOper(ccOp);
cc->AsCC()->gtCondition = condition;
- cc->gtFlags |= GTF_USE_FLAGS | (cmp->gtFlags & GTF_UNSIGNED);
+ cc->gtFlags |= GTF_USE_FLAGS;
return next;
}
diff --git a/src/jit/lowerxarch.cpp b/src/jit/lowerxarch.cpp
index 292483cd9a..4b7ab06067 100644
--- a/src/jit/lowerxarch.cpp
+++ b/src/jit/lowerxarch.cpp
@@ -836,7 +836,7 @@ void Lowering::LowerSIMD(GenTreeSIMD* simdNode)
jtrue->ChangeOper(GT_JCC);
GenTreeCC* jcc = jtrue->AsCC();
jcc->gtFlags |= GTF_USE_FLAGS;
- jcc->gtCondition = (relopOp2Value == 0) ? GT_NE : GT_EQ;
+ jcc->gtCondition = (relopOp2Value == 0) ? GenCondition::NE : GenCondition::EQ;
BlockRange().Remove(simdUser->gtGetOp2());
BlockRange().Remove(simdUser);
@@ -854,8 +854,9 @@ void Lowering::LowerSIMD(GenTreeSIMD* simdNode)
// to have to handle 2 cases (set flags/set destination register).
//
- genTreeOps condition = (simdNode->gtSIMDIntrinsicID == SIMDIntrinsicOpEquality) ? GT_EQ : GT_NE;
- GenTreeCC* setcc = new (comp, GT_SETCC) GenTreeCC(GT_SETCC, condition, simdNode->TypeGet());
+ GenCondition condition =
+ (simdNode->gtSIMDIntrinsicID == SIMDIntrinsicOpEquality) ? GenCondition::EQ : GenCondition::NE;
+ GenTreeCC* setcc = new (comp, GT_SETCC) GenTreeCC(GT_SETCC, condition, simdNode->TypeGet());
setcc->gtFlags |= GTF_USE_FLAGS;
BlockRange().InsertAfter(simdNode, setcc);
simdUse.ReplaceWith(comp, setcc);