summaryrefslogtreecommitdiff
path: root/src/jit
diff options
context:
space:
mode:
Diffstat (limited to 'src/jit')
-rw-r--r--src/jit/codegencommon.cpp72
-rw-r--r--src/jit/importer.cpp7
-rw-r--r--src/jit/lower.cpp51
-rw-r--r--src/jit/lower.h6
-rw-r--r--src/jit/morph.cpp16
5 files changed, 127 insertions, 25 deletions
diff --git a/src/jit/codegencommon.cpp b/src/jit/codegencommon.cpp
index 99902dc7bc..7c2869199e 100644
--- a/src/jit/codegencommon.cpp
+++ b/src/jit/codegencommon.cpp
@@ -9679,6 +9679,19 @@ void CodeGen::genFnEpilog(BasicBlock* block)
bool jmpEpilog = ((block->bbFlags & BBF_HAS_JMP) != 0);
+ GenTree* lastNode = block->lastNode();
+
+ // Method handle and address info used in case of jump epilog
+ CORINFO_METHOD_HANDLE methHnd = nullptr;
+ CORINFO_CONST_LOOKUP addrInfo;
+ addrInfo.addr = nullptr;
+
+ if (jmpEpilog && lastNode->gtOper == GT_JMP)
+ {
+ methHnd = (CORINFO_METHOD_HANDLE)lastNode->gtVal.gtVal1;
+ compiler->info.compCompHnd->getFunctionEntryPoint(methHnd, &addrInfo);
+ }
+
#ifdef _TARGET_ARM_
// We delay starting the unwind codes until we have an instruction which we know
// needs an unwind code. In particular, for large stack frames in methods without
@@ -9723,6 +9736,30 @@ void CodeGen::genFnEpilog(BasicBlock* block)
unwindStarted = true;
}
+ if (jmpEpilog && lastNode->gtOper == GT_JMP && addrInfo.accessType == IAT_RELPVALUE)
+ {
+ // IAT_RELPVALUE jump at the end is done using relative indirection, so,
+ // additional helper register is required.
+ // We use LR just before it is going to be restored from stack, i.e.
+ //
+ // movw r12, laddr
+ // movt r12, haddr
+ // mov lr, r12
+ // ldr r12, [r12]
+ // add r12, r12, lr
+ // pop {lr}
+ // ...
+ // bx r12
+
+ regNumber indCallReg = REG_R12;
+ regNumber vptrReg1 = REG_LR;
+
+ instGen_Set_Reg_To_Imm(EA_HANDLE_CNS_RELOC, indCallReg, (ssize_t)addrInfo.addr);
+ getEmitter()->emitIns_R_R(INS_mov, EA_PTRSIZE, vptrReg1, indCallReg);
+ getEmitter()->emitIns_R_R_I(INS_ldr, EA_PTRSIZE, indCallReg, indCallReg, 0);
+ getEmitter()->emitIns_R_R(INS_add, EA_PTRSIZE, indCallReg, vptrReg1);
+ }
+
genPopCalleeSavedRegisters(jmpEpilog);
if (regSet.rsMaskPreSpillRegs(true) != RBM_NONE)
@@ -9735,6 +9772,12 @@ void CodeGen::genFnEpilog(BasicBlock* block)
compiler->unwindAllocStack(preSpillRegArgSize);
}
+ if (jmpEpilog)
+ {
+ // We better not have used a pop PC to return otherwise this will be unreachable code
+ noway_assert(!genUsedPopToReturn);
+ }
+
#else // _TARGET_ARM64_
compiler->unwindBegEpilog();
@@ -9743,20 +9786,13 @@ void CodeGen::genFnEpilog(BasicBlock* block)
if (jmpEpilog)
{
-#ifdef _TARGET_ARMARCH_
hasTailCalls = true;
-#endif // _TARGET_ARMARCH_
noway_assert(block->bbJumpKind == BBJ_RETURN);
noway_assert(block->bbTreeList != nullptr);
-#ifdef _TARGET_ARM_
- // We better not have used a pop PC to return otherwise this will be unreachable code
- noway_assert(!genUsedPopToReturn);
-#endif // _TARGET_ARM_
-
/* figure out what jump we have */
- GenTree* jmpNode = block->lastNode();
+ GenTree* jmpNode = lastNode;
#if !FEATURE_FASTTAILCALL
noway_assert(jmpNode->gtOper == GT_JMP);
#else // FEATURE_FASTTAILCALL
@@ -9775,10 +9811,8 @@ void CodeGen::genFnEpilog(BasicBlock* block)
{
// Simply emit a jump to the methodHnd. This is similar to a call so we can use
// the same descriptor with some minor adjustments.
- CORINFO_METHOD_HANDLE methHnd = (CORINFO_METHOD_HANDLE)jmpNode->gtVal.gtVal1;
-
- CORINFO_CONST_LOOKUP addrInfo;
- compiler->info.compCompHnd->getFunctionEntryPoint(methHnd, &addrInfo);
+ assert(methHnd != nullptr);
+ assert(addrInfo.addr != nullptr);
#ifdef _TARGET_ARM_
emitter::EmitCallType callType;
@@ -9814,6 +9848,20 @@ void CodeGen::genFnEpilog(BasicBlock* block)
}
break;
+ case IAT_RELPVALUE:
+ {
+ // Load the address into a register, load relative indirect and call through a register
+ // We have to use R12 since we assume the argument registers are in use
+ // LR is used as helper register right before it is restored from stack, thus,
+ // all relative address calculations are performed before LR is restored.
+ callType = emitter::EC_INDIR_R;
+ indCallReg = REG_R12;
+ addr = NULL;
+
+ regTracker.rsTrackRegTrash(indCallReg);
+ break;
+ }
+
case IAT_PPVALUE:
default:
NO_WAY("Unsupported JMP indirection");
diff --git a/src/jit/importer.cpp b/src/jit/importer.cpp
index 182a049820..80b1e875dd 100644
--- a/src/jit/importer.cpp
+++ b/src/jit/importer.cpp
@@ -1794,7 +1794,7 @@ GenTree* Compiler::impLookupToTree(CORINFO_RESOLVED_TOKEN* pResolvedToken,
CORINFO_GENERIC_HANDLE handle = nullptr;
void* pIndirection = nullptr;
- assert(pLookup->constLookup.accessType != IAT_PPVALUE);
+ assert(pLookup->constLookup.accessType != IAT_PPVALUE && pLookup->constLookup.accessType != IAT_RELPVALUE);
if (pLookup->constLookup.accessType == IAT_VALUE)
{
@@ -1829,7 +1829,7 @@ GenTree* Compiler::impReadyToRunLookupToTree(CORINFO_CONST_LOOKUP* pLookup,
{
CORINFO_GENERIC_HANDLE handle = nullptr;
void* pIndirection = nullptr;
- assert(pLookup->accessType != IAT_PPVALUE);
+ assert(pLookup->accessType != IAT_PPVALUE && pLookup->accessType != IAT_RELPVALUE);
if (pLookup->accessType == IAT_VALUE)
{
@@ -7299,7 +7299,8 @@ var_types Compiler::impImportCall(OPCODE opcode,
call = gtNewCallNode(CT_USER_FUNC, callInfo->hMethod, callRetTyp, nullptr, ilOffset);
call->gtCall.gtStubCallStubAddr = callInfo->stubLookup.constLookup.addr;
call->gtFlags |= GTF_CALL_VIRT_STUB;
- assert(callInfo->stubLookup.constLookup.accessType != IAT_PPVALUE);
+ assert(callInfo->stubLookup.constLookup.accessType != IAT_PPVALUE &&
+ callInfo->stubLookup.constLookup.accessType != IAT_RELPVALUE);
if (callInfo->stubLookup.constLookup.accessType == IAT_PVALUE)
{
call->gtCall.gtCallMoreFlags |= GTF_CALL_M_VIRTSTUB_REL_INDIRECT;
diff --git a/src/jit/lower.cpp b/src/jit/lower.cpp
index 3f609e70e4..1d3eb257d3 100644
--- a/src/jit/lower.cpp
+++ b/src/jit/lower.cpp
@@ -3189,6 +3189,16 @@ GenTree* Lowering::LowerDirectCall(GenTreeCall* call)
result = Ind(Ind(result));
break;
+ case IAT_RELPVALUE:
+ {
+ // Non-virtual direct calls to addresses accessed by
+ // a single relative indirection.
+ GenTree* cellAddr = AddrGen(addr);
+ GenTree* indir = Ind(cellAddr);
+ result = comp->gtNewOperNode(GT_ADD, TYP_I_IMPL, indir, AddrGen(addr));
+ break;
+ }
+
default:
noway_assert(!"Bad accessType");
break;
@@ -3977,6 +3987,9 @@ GenTree* Lowering::LowerNonvirtPinvokeCall(GenTreeCall* call)
case IAT_PPVALUE:
result = Ind(Ind(AddrGen(addr)));
break;
+
+ case IAT_RELPVALUE:
+ unreached();
}
}
@@ -4073,19 +4086,24 @@ GenTree* Lowering::LowerVirtualVtableCall(GenTreeCall* call)
//
// Save relative offset to tmp (vtab is virtual table pointer, vtabOffsOfIndirection is offset of
// vtable-1st-level-indirection):
- // tmp = [vtab + vtabOffsOfIndirection]
+ // tmp = vtab
//
// Save address of method to result (vtabOffsAfterIndirection is offset of vtable-2nd-level-indirection):
- // result = [vtab + vtabOffsOfIndirection + vtabOffsAfterIndirection + tmp]
+ // result = [tmp + vtabOffsOfIndirection + vtabOffsAfterIndirection + [tmp + vtabOffsOfIndirection]]
+ //
+ //
+ // If relative pointers are also in second level indirection, additional temporary is used:
+ // tmp1 = vtab
+ // tmp2 = tmp1 + vtabOffsOfIndirection + vtabOffsAfterIndirection + [tmp1 + vtabOffsOfIndirection]
+ // result = tmp2 + [tmp2]
+ //
unsigned lclNumTmp = comp->lvaGrabTemp(true DEBUGARG("lclNumTmp"));
-
comp->lvaTable[lclNumTmp].incRefCnts(comp->compCurBB->getBBWeight(comp), comp);
- GenTree* lclvNodeStore = comp->gtNewTempAssign(lclNumTmp, result);
- LIR::Range range = LIR::SeqTree(comp, lclvNodeStore);
- JITDUMP("result of obtaining pointer to virtual table:\n");
- DISPRANGE(range);
- BlockRange().InsertBefore(call, std::move(range));
+ unsigned lclNumTmp2 = comp->lvaGrabTemp(true DEBUGARG("lclNumTmp2"));
+ comp->lvaTable[lclNumTmp2].incRefCnts(comp->compCurBB->getBBWeight(comp), comp);
+
+ GenTree* lclvNodeStore = comp->gtNewTempAssign(lclNumTmp, result);
GenTree* tmpTree = comp->gtNewLclvNode(lclNumTmp, result->TypeGet());
tmpTree = Offset(tmpTree, vtabOffsOfIndirection);
@@ -4094,7 +4112,22 @@ GenTree* Lowering::LowerVirtualVtableCall(GenTreeCall* call)
GenTree* offs = comp->gtNewIconNode(vtabOffsOfIndirection + vtabOffsAfterIndirection, TYP_INT);
result = comp->gtNewOperNode(GT_ADD, TYP_I_IMPL, comp->gtNewLclvNode(lclNumTmp, result->TypeGet()), offs);
- result = Ind(OffsetByIndex(result, tmpTree));
+ GenTree* base = OffsetByIndexWithScale(result, tmpTree, 1);
+ GenTree* lclvNodeStore2 = comp->gtNewTempAssign(lclNumTmp2, base);
+
+ LIR::Range range = LIR::SeqTree(comp, lclvNodeStore);
+ JITDUMP("result of obtaining pointer to virtual table:\n");
+ DISPRANGE(range);
+ BlockRange().InsertBefore(call, std::move(range));
+
+ LIR::Range range2 = LIR::SeqTree(comp, lclvNodeStore2);
+ JITDUMP("result of obtaining pointer to virtual table 2nd level indirection:\n");
+ DISPRANGE(range2);
+ BlockRange().InsertAfter(lclvNodeStore, std::move(range2));
+
+ result = Ind(comp->gtNewLclvNode(lclNumTmp2, result->TypeGet()));
+ result =
+ comp->gtNewOperNode(GT_ADD, TYP_I_IMPL, result, comp->gtNewLclvNode(lclNumTmp2, result->TypeGet()));
}
else
{
diff --git a/src/jit/lower.h b/src/jit/lower.h
index 0d298e0a52..63319961be 100644
--- a/src/jit/lower.h
+++ b/src/jit/lower.h
@@ -208,6 +208,12 @@ private:
return new (comp, GT_LEA) GenTreeAddrMode(resultType, base, index, 0, 0);
}
+ GenTree* OffsetByIndexWithScale(GenTree* base, GenTree* index, unsigned scale)
+ {
+ var_types resultType = (base->TypeGet() == TYP_REF) ? TYP_BYREF : base->TypeGet();
+ return new (comp, GT_LEA) GenTreeAddrMode(resultType, base, index, scale, 0);
+ }
+
// Replace the definition of the given use with a lclVar, allocating a new temp
// if 'tempNum' is BAD_VAR_NUM.
unsigned ReplaceWithLclVar(LIR::Use& use, unsigned tempNum = BAD_VAR_NUM)
diff --git a/src/jit/morph.cpp b/src/jit/morph.cpp
index 2c3c280a11..33bbc74001 100644
--- a/src/jit/morph.cpp
+++ b/src/jit/morph.cpp
@@ -7927,9 +7927,23 @@ void Compiler::fgMorphTailCall(GenTreeCall* call)
/* Now the appropriate vtable slot */
- add = gtNewOperNode(GT_ADD, TYP_I_IMPL, vtbl, gtNewIconNode(vtabOffsAfterIndirection, TYP_I_IMPL));
+ add = gtNewOperNode(GT_ADD, TYP_I_IMPL, vtbl, gtNewIconNode(vtabOffsAfterIndirection, TYP_I_IMPL));
+
+ GenTree* indOffTree = nullptr;
+
+ if (isRelative)
+ {
+ indOffTree = impCloneExpr(add, &add, NO_CLASS_HANDLE, (unsigned)CHECK_SPILL_ALL,
+ nullptr DEBUGARG("virtual table call 2"));
+ }
+
vtbl = gtNewOperNode(GT_IND, TYP_I_IMPL, add);
+ if (isRelative)
+ {
+ vtbl = gtNewOperNode(GT_ADD, TYP_I_IMPL, vtbl, indOffTree);
+ }
+
// Switch this to a plain indirect call
call->gtFlags &= ~GTF_CALL_VIRT_KIND_MASK;
assert(!call->IsVirtual());