summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGleb Balykov <g.balykov@samsung.com>2020-07-02 17:01:13 +0300
committerAlexander Soldatov/AI Compiler Lab /SRR/Staff Engineer/Samsung Electronics <soldatov.a@samsung.com>2020-07-13 18:08:49 +0300
commit270f0d63034b0c7ab923766112b6593899a89f62 (patch)
tree4aa00d13f2d83a72ef5e7f7a20a0ead63e93f6ca
parentf1f94f32d098babc4fe9ed877334f402ab57be3a (diff)
downloadcoreclr-270f0d63034b0c7ab923766112b6593899a89f62.tar.gz
coreclr-270f0d63034b0c7ab923766112b6593899a89f62.tar.bz2
coreclr-270f0d63034b0c7ab923766112b6593899a89f62.zip
[Tizen] Support relative indirection for 1st and 2nd levels of vtable on arm64
-rw-r--r--src/jit/codegencommon.cpp21
-rw-r--r--src/vm/arm64/cgencpu.h2
-rw-r--r--src/vm/arm64/stubs.cpp120
3 files changed, 90 insertions, 53 deletions
diff --git a/src/jit/codegencommon.cpp b/src/jit/codegencommon.cpp
index 8725619ceb..3c0d0b600b 100644
--- a/src/jit/codegencommon.cpp
+++ b/src/jit/codegencommon.cpp
@@ -8479,6 +8479,7 @@ void CodeGen::genFnEpilog(BasicBlock* block)
// ...
// bx r12
+ assert(REG_R12 == REG_INDIRECT_CALL_TARGET_REG);
regNumber indCallReg = REG_R12;
regNumber vptrReg1 = REG_LR;
@@ -8510,6 +8511,24 @@ void CodeGen::genFnEpilog(BasicBlock* block)
compiler->unwindBegEpilog();
genPopCalleeSavedRegistersAndFreeLclFrame(jmpEpilog);
+
+ if (jmpEpilog && lastNode->gtOper == GT_JMP && addrInfo.accessType == IAT_RELPVALUE)
+ {
+ // TODO-ARM64-CQ: update this!
+ // IAT_RELPVALUE jump at the end is done using relative indirection, so,
+ // additional helper register is required.
+ // REG_IP1 (x17) is always reserved on arm64 (see Compiler::compRsvdRegCheck)
+ // and is used as a temporary register. Use it as temp register here too.
+
+ assert(REG_IP0 == REG_INDIRECT_CALL_TARGET_REG);
+ regNumber indCallReg = REG_IP0;
+ regNumber vptrReg1 = REG_IP1;
+
+ instGen_Set_Reg_To_Imm(EA_HANDLE_CNS_RELOC, indCallReg, (ssize_t)addrInfo.addr);
+ getEmitter()->emitIns_R_R(INS_mov, EA_PTRSIZE, vptrReg1, indCallReg);
+ getEmitter()->emitIns_R_R_I(INS_ldr, EA_PTRSIZE, indCallReg, indCallReg, 0);
+ getEmitter()->emitIns_R_R_R(INS_add, EA_PTRSIZE, indCallReg, indCallReg, vptrReg1);
+ }
#endif // _TARGET_ARM64_
if (jmpEpilog)
@@ -8583,7 +8602,7 @@ void CodeGen::genFnEpilog(BasicBlock* block)
// LR is used as helper register right before it is restored from stack, thus,
// all relative address calculations are performed before LR is restored.
callType = emitter::EC_INDIR_R;
- indCallReg = REG_R12;
+ indCallReg = REG_INDIRECT_CALL_TARGET_REG;
addr = NULL;
regSet.verifyRegUsed(indCallReg);
diff --git a/src/vm/arm64/cgencpu.h b/src/vm/arm64/cgencpu.h
index 9b81f72d9e..b30d5026f3 100644
--- a/src/vm/arm64/cgencpu.h
+++ b/src/vm/arm64/cgencpu.h
@@ -457,7 +457,7 @@ public:
static void Init();
void EmitCallManagedMethod(MethodDesc *pMD, BOOL fTailCall);
- void EmitCallLabel(CodeLabel *target, BOOL fTailCall, BOOL fIndirect);
+ void EmitCallLabel(CodeLabel *target, BOOL fTailCall, BOOL fIndirect, BOOL fRelativeIndirect);
void EmitShuffleThunk(struct ShuffleEntry *pShuffleEntryArray);
diff --git a/src/vm/arm64/stubs.cpp b/src/vm/arm64/stubs.cpp
index 35e0ee74cd..dd9e43a062 100644
--- a/src/vm/arm64/stubs.cpp
+++ b/src/vm/arm64/stubs.cpp
@@ -88,6 +88,7 @@ class BranchInstructionFormat : public InstructionFormat
// Encoding of the VariationCode:
// bit(0) indicates whether this is a direct or an indirect jump.
// bit(1) indicates whether this is a branch with link -a.k.a call- (BL(R)) or not (B(R))
+ // bit(2) indicates whether this is a relative indirect branch or not
public:
enum VariationCodes
@@ -96,7 +97,9 @@ class BranchInstructionFormat : public InstructionFormat
BIF_VAR_CALL = 0x00000002,
BIF_VAR_JUMP = 0x00000000,
- BIF_VAR_INDIRECT_CALL = 0x00000003
+ BIF_VAR_INDIRECT_CALL = 0x00000003,
+
+ BIF_VAR_RELATIVE_INDIRECT = 0x00000004
};
private:
BOOL IsIndirect(UINT variationCode)
@@ -107,6 +110,12 @@ class BranchInstructionFormat : public InstructionFormat
{
return (variationCode & BIF_VAR_CALL) != 0;
}
+ BOOL IsRelativeIndirect(UINT variationCode)
+ {
+ BOOL result = (variationCode & BIF_VAR_RELATIVE_INDIRECT) != 0;
+ _ASSERTE(result && IsIndirect(variationCode) || !result);
+ return result;
+ }
public:
@@ -120,7 +129,9 @@ class BranchInstructionFormat : public InstructionFormat
LIMITED_METHOD_CONTRACT;
_ASSERTE(refSize == InstructionFormat::k64);
- if (IsIndirect(variationCode))
+ if (IsRelativeIndirect(variationCode))
+ return 20;
+ else if (IsIndirect(variationCode))
return 12;
else
return 8;
@@ -155,62 +166,59 @@ class BranchInstructionFormat : public InstructionFormat
{
LIMITED_METHOD_CONTRACT;
- if (IsIndirect(variationCode))
- {
- _ASSERTE(((UINT_PTR)pDataBuffer & 7) == 0);
- __int64 dataOffset = pDataBuffer - pOutBuffer;
-
- if (dataOffset < -1048576 || dataOffset > 1048572)
- COMPlusThrow(kNotSupportedException);
+ _ASSERTE(((UINT_PTR)pDataBuffer & 7) == 0);
+ __int64 dataOffset = pDataBuffer - pOutBuffer;
+
+ if (dataOffset < -1048576 || dataOffset > 1048572)
+ COMPlusThrow(kNotSupportedException);
- DWORD imm19 = (DWORD)(0x7FFFF & (dataOffset >> 2));
+ DWORD imm19 = (DWORD)(0x7FFFF & (dataOffset >> 2));
- // +0: ldr x16, [pc, #dataOffset]
+ // +0: ldr x16, [pc, #dataOffset]
+ *((DWORD*)pOutBuffer) = (0x58000010 | (imm19 << 5));
+ DWORD offsetbranch = 0;
+
+ if (IsRelativeIndirect(variationCode))
+ {
+ // TODO-ARM64-CQ: update this!
+ // REG_IP1 (x17) is always reserved on arm64 (see Compiler::compRsvdRegCheck)
+ // and is used as a temporary register. Use it as temp register here too.
+ //
+ // +4: mov x17, x16
+ // +8: ldr x16, [x16]
+ // +12: add x16, x16, x17
+ *((DWORD*)(pOutBuffer+4)) = 0xAA1003F1;
+ *((DWORD*)(pOutBuffer+8)) = 0xF9400210;
+ *((DWORD*)(pOutBuffer+12)) = 0x8B110210;
+ offsetbranch = 16;
+ }
+ else if (IsIndirect(variationCode))
+ {
// +4: ldr x16, [x16]
- // +8: b(l)r x16
- *((DWORD*)pOutBuffer) = (0x58000010 | (imm19 << 5));
*((DWORD*)(pOutBuffer+4)) = 0xF9400210;
- if (IsCall(variationCode))
- {
- *((DWORD*)(pOutBuffer+8)) = 0xD63F0200; // blr x16
- }
- else
- {
- *((DWORD*)(pOutBuffer+8)) = 0xD61F0200; // br x16
- }
+ offsetbranch = 8;
+ }
+ else
+ {
+ offsetbranch = 4;
+ }
+ _ASSERTE(offsetbranch != 0);
- *((__int64*)pDataBuffer) = fixedUpReference + (__int64)pOutBuffer;
+ // +offsetbranch: b(l)r x16
+ if (IsCall(variationCode))
+ {
+ *((DWORD*)(pOutBuffer+offsetbranch)) = 0xD63F0200; // blr x16
}
else
{
-
- _ASSERTE(((UINT_PTR)pDataBuffer & 7) == 0);
- __int64 dataOffset = pDataBuffer - pOutBuffer;
-
- if (dataOffset < -1048576 || dataOffset > 1048572)
- COMPlusThrow(kNotSupportedException);
-
- DWORD imm19 = (DWORD)(0x7FFFF & (dataOffset >> 2));
-
- // +0: ldr x16, [pc, #dataOffset]
- // +4: b(l)r x16
- *((DWORD*)pOutBuffer) = (0x58000010 | (imm19 << 5));
- if (IsCall(variationCode))
- {
- *((DWORD*)(pOutBuffer+4)) = 0xD63F0200; // blr x16
- }
- else
- {
- *((DWORD*)(pOutBuffer+4)) = 0xD61F0200; // br x16
- }
-
- if (!ClrSafeInt<__int64>::addition(fixedUpReference, (__int64)pOutBuffer, fixedUpReference))
- COMPlusThrowArithmetic();
- *((__int64*)pDataBuffer) = fixedUpReference;
+ *((DWORD*)(pOutBuffer+offsetbranch)) = 0xD61F0200; // br x16
}
- }
+ if (!ClrSafeInt<__int64>::addition(fixedUpReference, (__int64)pOutBuffer, fixedUpReference))
+ COMPlusThrowArithmetic();
+ *((__int64*)pDataBuffer) = fixedUpReference;
+ }
};
//-----------------------------------------------------------------------
@@ -1856,16 +1864,19 @@ VOID StubLinkerCPU::EmitComputedInstantiatingMethodStub(MethodDesc* pSharedMD, s
EmitCallManagedMethod(pSharedMD, TRUE /* tail call */);
}
-void StubLinkerCPU::EmitCallLabel(CodeLabel *target, BOOL fTailCall, BOOL fIndirect)
+void StubLinkerCPU::EmitCallLabel(CodeLabel *target, BOOL fTailCall, BOOL fIndirect, BOOL fRelativeIndirect)
{
+ _ASSERTE(fRelativeIndirect && fIndirect || !fRelativeIndirect);
+
BranchInstructionFormat::VariationCodes variationCode = BranchInstructionFormat::VariationCodes::BIF_VAR_JUMP;
if (!fTailCall)
variationCode = static_cast<BranchInstructionFormat::VariationCodes>(variationCode | BranchInstructionFormat::VariationCodes::BIF_VAR_CALL);
if (fIndirect)
variationCode = static_cast<BranchInstructionFormat::VariationCodes>(variationCode | BranchInstructionFormat::VariationCodes::BIF_VAR_INDIRECT);
+ if (fRelativeIndirect)
+ variationCode = static_cast<BranchInstructionFormat::VariationCodes>(variationCode | BranchInstructionFormat::VariationCodes::BIF_VAR_RELATIVE_INDIRECT);
EmitLabelRef(target, reinterpret_cast<BranchInstructionFormat&>(gBranchIF), (UINT)variationCode);
-
}
void StubLinkerCPU::EmitCallManagedMethod(MethodDesc *pMD, BOOL fTailCall)
@@ -1873,11 +1884,18 @@ void StubLinkerCPU::EmitCallManagedMethod(MethodDesc *pMD, BOOL fTailCall)
// Use direct call if possible.
if (pMD->HasStableEntryPoint())
{
- EmitCallLabel(NewExternalCodeLabel((LPVOID)pMD->GetStableEntryPoint()), fTailCall, FALSE);
+ EmitCallLabel(NewExternalCodeLabel((LPVOID)pMD->GetStableEntryPoint()), fTailCall, FALSE, FALSE);
}
else
{
- EmitCallLabel(NewExternalCodeLabel((LPVOID)pMD->GetAddrOfSlot()), fTailCall, TRUE);
+ BOOL isRelative = MethodTable::VTableIndir2_t::isRelative
+ && pMD->IsVtableSlot();
+
+#ifndef FEATURE_NGEN_RELOCS_OPTIMIZATIONS
+ _ASSERTE(!isRelative);
+#endif
+
+ EmitCallLabel(NewExternalCodeLabel((LPVOID)pMD->GetAddrOfSlot()), fTailCall, TRUE, isRelative);
}
}