summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorgbalykov <g.balykov@samsung.com>2017-05-05 23:07:44 +0300
committerJan Kotas <jkotas@microsoft.com>2017-05-05 13:07:44 -0700
commit980c1204d68f54be77eb840cc3f2e4fe2df42a26 (patch)
tree1c9de18ed9660a833e156a8bf1be5ebb01d9c82e
parentf32af15fd8c5ceae9ac10b5d2ef0eed9c66ca4e3 (diff)
downloadcoreclr-980c1204d68f54be77eb840cc3f2e4fe2df42a26.tar.gz
coreclr-980c1204d68f54be77eb840cc3f2e4fe2df42a26.tar.bz2
coreclr-980c1204d68f54be77eb840cc3f2e4fe2df42a26.zip
Add compact entry points for ARM (#11109)
-rw-r--r--src/inc/daccess.h6
-rw-r--r--src/vm/arm/asmhelpers.S18
-rw-r--r--src/vm/arm/asmhelpers.asm21
-rw-r--r--src/vm/arm/cgencpu.h55
-rw-r--r--src/vm/arm/stubs.cpp7
-rw-r--r--src/vm/class.h11
-rw-r--r--src/vm/method.cpp221
-rw-r--r--src/vm/method.hpp17
-rw-r--r--src/vm/precode.cpp10
-rw-r--r--src/vm/precode.h5
-rw-r--r--src/vm/prestub.cpp22
11 files changed, 378 insertions, 15 deletions
diff --git a/src/inc/daccess.h b/src/inc/daccess.h
index 7d82e86cb9..40aba86f21 100644
--- a/src/inc/daccess.h
+++ b/src/inc/daccess.h
@@ -617,6 +617,11 @@ typedef struct _DacGlobals
ULONG fn__ThreadpoolMgr__AsyncTimerCallbackCompletion;
ULONG fn__DACNotifyCompilationFinished;
ULONG fn__ThePreStub;
+
+#ifdef _TARGET_ARM_
+ ULONG fn__ThePreStubCompactARM;
+#endif // _TARGET_ARM_
+
ULONG fn__ThePreStubPatchLabel;
ULONG fn__PrecodeFixupThunk;
ULONG fn__StubDispatchFixupStub;
@@ -2345,6 +2350,7 @@ typedef ArrayDPTR(signed char) PTR_SBYTE;
typedef ArrayDPTR(const BYTE) PTR_CBYTE;
typedef DPTR(INT8) PTR_INT8;
typedef DPTR(INT16) PTR_INT16;
+typedef DPTR(UINT16) PTR_UINT16;
typedef DPTR(WORD) PTR_WORD;
typedef DPTR(USHORT) PTR_USHORT;
typedef DPTR(DWORD) PTR_DWORD;
diff --git a/src/vm/arm/asmhelpers.S b/src/vm/arm/asmhelpers.S
index 04d7527180..36933f5ea6 100644
--- a/src/vm/arm/asmhelpers.S
+++ b/src/vm/arm/asmhelpers.S
@@ -509,6 +509,24 @@ LOCAL_LABEL(UM2MThunk_WrapperHelper_ArgumentsSetup):
NESTED_END ThePreStub, _TEXT
// ------------------------------------------------------------------
+ NESTED_ENTRY ThePreStubCompactARM, _TEXT, NoHandler
+
+ // r12 - address of compact entry point + PC_REG_RELATIVE_OFFSET
+
+ PROLOG_WITH_TRANSITION_BLOCK
+
+ mov r0, r12
+
+ bl C_FUNC(PreStubGetMethodDescForCompactEntryPoint)
+
+ mov r12, r0 // pMethodDesc
+
+ EPILOG_WITH_TRANSITION_BLOCK_TAILCALL
+
+ b C_FUNC(ThePreStub)
+
+ NESTED_END ThePreStubCompactARM, _TEXT
+// ------------------------------------------------------------------
// This method does nothing. It's just a fixed function for the debugger to put a breakpoint on.
LEAF_ENTRY ThePreStubPatch, _TEXT
nop
diff --git a/src/vm/arm/asmhelpers.asm b/src/vm/arm/asmhelpers.asm
index 542bdc65cc..e5fd41a513 100644
--- a/src/vm/arm/asmhelpers.asm
+++ b/src/vm/arm/asmhelpers.asm
@@ -24,6 +24,7 @@
IMPORT UMThunkStubRareDisableWorker
IMPORT UM2MDoADCallBack
IMPORT PreStubWorker
+ IMPORT PreStubGetMethodDescForCompactEntryPoint
IMPORT NDirectImportWorker
IMPORT ObjIsInstanceOfNoGC
IMPORT ArrayStoreCheck
@@ -571,6 +572,26 @@ UM2MThunk_WrapperHelper_ArgumentsSetup
NESTED_END
; ------------------------------------------------------------------
+
+ NESTED_ENTRY ThePreStubCompactARM
+
+ ; r12 - address of compact entry point + PC_REG_RELATIVE_OFFSET
+
+ PROLOG_WITH_TRANSITION_BLOCK
+
+ mov r0, r12
+
+ bl PreStubGetMethodDescForCompactEntryPoint
+
+ mov r12, r0 ; pMethodDesc
+
+ EPILOG_WITH_TRANSITION_BLOCK_TAILCALL
+
+ b ThePreStub
+
+ NESTED_END
+
+; ------------------------------------------------------------------
; This method does nothing. It's just a fixed function for the debugger to put a breakpoint on.
LEAF_ENTRY ThePreStubPatch
nop
diff --git a/src/vm/arm/cgencpu.h b/src/vm/arm/cgencpu.h
index 34af8187b2..181d5f10eb 100644
--- a/src/vm/arm/cgencpu.h
+++ b/src/vm/arm/cgencpu.h
@@ -57,7 +57,7 @@ EXTERN_C void checkStack(void);
#define JUMP_ALLOCATE_SIZE 8 // # bytes to allocate for a jump instruction
#define BACK_TO_BACK_JUMP_ALLOCATE_SIZE 8 // # bytes to allocate for a back to back jump instruction
-//#define HAS_COMPACT_ENTRYPOINTS 1
+#define HAS_COMPACT_ENTRYPOINTS 1
#define HAS_NDIRECT_IMPORT_PRECODE 1
@@ -90,6 +90,12 @@ EXTERN_C void setFPReturn(int fpSize, INT64 retVal);
// this is the offset by which it should be decremented to arrive at the callsite.
#define STACKWALK_CONTROLPC_ADJUST_OFFSET 2
+// Max offset for unconditional thumb branch
+#define MAX_OFFSET_UNCONDITIONAL_BRANCH_THUMB 2048
+
+// Offset of pc register
+#define PC_REG_RELATIVE_OFFSET 4
+
//=======================================================================
// IMPORTANT: This value is used to figure out how much to allocate
// for a fixed array of FieldMarshaler's. That means it must be at least
@@ -236,6 +242,53 @@ void emitCOMStubCall (ComCallMethodDesc *pCOMMethod, PCODE target);
#endif // FEATURE_COMINTEROP
//------------------------------------------------------------------------
+inline void emitUnconditionalBranchThumb(LPBYTE pBuffer, int16_t offset)
+{
+ LIMITED_METHOD_CONTRACT;
+
+ uint16_t *pInstr = (uint16_t *) pBuffer;
+
+ // offset from -2KB to +2KB
+ _ASSERTE (offset >= - MAX_OFFSET_UNCONDITIONAL_BRANCH_THUMB && offset < MAX_OFFSET_UNCONDITIONAL_BRANCH_THUMB);
+
+ if (offset >= 0)
+ {
+ offset = offset >> 1;
+ }
+ else
+ {
+ offset = ((MAX_OFFSET_UNCONDITIONAL_BRANCH_THUMB + offset) >> 1) | 0x400;
+ }
+
+ *pInstr = 0xE000 | offset;
+}
+
+//------------------------------------------------------------------------
+inline int16_t decodeUnconditionalBranchThumb(LPBYTE pBuffer)
+{
+ LIMITED_METHOD_CONTRACT;
+
+ uint16_t *pInstr = (uint16_t *) pBuffer;
+
+ int16_t offset = (~0xE000) & (*pInstr);
+
+ if ((offset & 0x400) == 0)
+ {
+ offset = offset << 1;
+ }
+ else
+ {
+ offset = (~0x400) & offset;
+ offset = (offset << 1) - MAX_OFFSET_UNCONDITIONAL_BRANCH_THUMB;
+ }
+
+ // offset from -2KB to +2KB
+ _ASSERTE (offset >= - MAX_OFFSET_UNCONDITIONAL_BRANCH_THUMB && offset < MAX_OFFSET_UNCONDITIONAL_BRANCH_THUMB);
+
+ return offset;
+}
+
+//------------------------------------------------------------------------
inline void emitJump(LPBYTE pBuffer, LPVOID target)
{
LIMITED_METHOD_CONTRACT;
diff --git a/src/vm/arm/stubs.cpp b/src/vm/arm/stubs.cpp
index f1ba278ada..3088761f0b 100644
--- a/src/vm/arm/stubs.cpp
+++ b/src/vm/arm/stubs.cpp
@@ -1333,6 +1333,13 @@ BOOL DoesSlotCallPrestub(PCODE pCode)
{
PTR_WORD pInstr = dac_cast<PTR_WORD>(PCODEToPINSTR(pCode));
+#ifdef HAS_COMPACT_ENTRYPOINTS
+ if (MethodDescChunk::GetMethodDescFromCompactEntryPoint(pCode, TRUE) != NULL)
+ {
+ return TRUE;
+ }
+#endif // HAS_COMPACT_ENTRYPOINTS
+
// FixupPrecode
if (pInstr[0] == 0x46fc && // // mov r12, pc
pInstr[1] == 0xf8df &&
diff --git a/src/vm/class.h b/src/vm/class.h
index 6c74377012..e3ec0ba166 100644
--- a/src/vm/class.h
+++ b/src/vm/class.h
@@ -2502,6 +2502,17 @@ inline PCODE GetPreStubEntryPoint()
return GetEEFuncEntryPoint(ThePreStub);
}
+#if defined(HAS_COMPACT_ENTRYPOINTS) && defined(_TARGET_ARM_)
+
+EXTERN_C void STDCALL ThePreStubCompactARM();
+
+inline PCODE GetPreStubCompactARMEntryPoint()
+{
+ return GetEEFuncEntryPoint(ThePreStubCompactARM);
+}
+
+#endif // defined(HAS_COMPACT_ENTRYPOINTS) && defined(_TARGET_ARM_)
+
PCODE TheUMThunkPreStub();
PCODE TheVarargNDirectStub(BOOL hasRetBuffArg);
diff --git a/src/vm/method.cpp b/src/vm/method.cpp
index 77a6a0d37f..34ae6d9489 100644
--- a/src/vm/method.cpp
+++ b/src/vm/method.cpp
@@ -4571,6 +4571,35 @@ c_CentralJumpCode = {
};
#include <poppack.h>
+#elif defined(_TARGET_ARM_)
+
+#include <pshpack1.h>
+struct CentralJumpCode {
+ BYTE m_ldrPC[4];
+ BYTE m_short[2];
+ MethodDescChunk *m_pChunk;
+ PCODE m_target;
+
+ inline void Setup(PCODE target, MethodDescChunk *pChunk) {
+ WRAPPER_NO_CONTRACT;
+
+ m_target = target;
+ m_pChunk = pChunk;
+ }
+
+ inline BOOL CheckTarget(TADDR target) {
+ WRAPPER_NO_CONTRACT;
+ return ((TADDR)m_target == target);
+ }
+}
+c_CentralJumpCode = {
+ { 0xDF, 0xF8, 0x08, 0xF0 }, // ldr pc, =pTarget
+ { 0x00, 0x00 }, // short offset for alignment
+ 0, // pChunk
+ 0 // pTarget
+};
+#include <poppack.h>
+
#else
#error Unsupported platform
#endif
@@ -4580,10 +4609,92 @@ typedef DPTR(struct CentralJumpCode) PTR_CentralJumpCode;
static_assert_no_msg((TEP_CENTRAL_JUMP_SIZE & 1) == 0);
#define TEP_ENTRY_SIZE 4
+
+#ifdef _TARGET_ARM_
+
+#define TEP_HALF_ENTRY_SIZE (TEP_ENTRY_SIZE / 2)
+
+// Compact entry point on arm consists of two thumb instructions:
+// mov r12, pc
+// b CentralJumpCode
+
+// First instruction 0x46fc
+#define TEP_ENTRY_INSTR1_BYTE1 0xFC
+#define TEP_ENTRY_INSTR1_BYTE2 0x46
+
+// Mask for unconditional branch opcode
+#define TEP_ENTRY_INSTR2_MASK1 0xE0
+
+// Mask for opcode
+#define TEP_ENTRY_INSTR2_MASK2 0xF8
+
+// Bit used for ARM to identify compact entry points
+#define COMPACT_ENTRY_ARM_CODE 0x2
+
+/* static */ int MethodDescChunk::GetCompactEntryPointMaxCount ()
+{
+ LIMITED_METHOD_DAC_CONTRACT;
+
+ return MAX_OFFSET_UNCONDITIONAL_BRANCH_THUMB / TEP_ENTRY_SIZE;
+}
+
+// Get offset from the start of current compact entry point to the CentralJumpCode
+static uint16_t DecodeOffsetFromBranchToCentralJump (uint16_t instr)
+{
+ int16_t offset = decodeUnconditionalBranchThumb ((LPBYTE) &instr);
+
+ offset += PC_REG_RELATIVE_OFFSET + TEP_HALF_ENTRY_SIZE;
+
+ _ASSERTE (offset >= TEP_ENTRY_SIZE && (offset % TEP_ENTRY_SIZE == 0));
+
+ return (uint16_t) offset;
+}
+
+#ifndef DACCESS_COMPILE
+
+// Encode branch instruction to central jump for current compact entry point
+static uint16_t EncodeBranchToCentralJump (int16_t offset)
+{
+ _ASSERTE (offset >= 0 && (offset % TEP_ENTRY_SIZE == 0));
+
+ offset += TEP_HALF_ENTRY_SIZE - PC_REG_RELATIVE_OFFSET;
+
+ uint16_t instr;
+ emitUnconditionalBranchThumb ((LPBYTE) &instr, offset);
+
+ return instr;
+}
+
+#endif // DACCESS_COMPILE
+
+#else // _TARGET_ARM_
+
#define TEP_MAX_BEFORE_INDEX (1 + (127 / TEP_ENTRY_SIZE))
#define TEP_MAX_BLOCK_INDEX (TEP_MAX_BEFORE_INDEX + (128 - TEP_CENTRAL_JUMP_SIZE) / TEP_ENTRY_SIZE)
#define TEP_FULL_BLOCK_SIZE (TEP_MAX_BLOCK_INDEX * TEP_ENTRY_SIZE + TEP_CENTRAL_JUMP_SIZE)
+#endif // _TARGET_ARM_
+
+BOOL MethodDescChunk::IsCompactEntryPointAtAddress(PCODE addr)
+{
+ LIMITED_METHOD_DAC_CONTRACT;
+
+#if defined(_TARGET_X86_) || defined(_TARGET_AMD64_)
+
+ // Compact entrypoints start at odd addresses
+ return (addr & 1) != 0;
+
+#elif defined(_TARGET_ARM_)
+
+ // Compact entrypoints start at odd addresses (thumb) with second bit set to 1
+ uint8_t compactEntryPointMask = THUMB_CODE | COMPACT_ENTRY_ARM_CODE;
+ return (addr & compactEntryPointMask) == compactEntryPointMask;
+
+#else
+ #error Unsupported platform
+#endif
+}
+
//*******************************************************************************
/* static */ MethodDesc* MethodDescChunk::GetMethodDescFromCompactEntryPoint(PCODE addr, BOOL fSpeculative /*=FALSE*/)
{
@@ -4597,18 +4708,39 @@ static_assert_no_msg((TEP_CENTRAL_JUMP_SIZE & 1) == 0);
// Always do consistency check in debug
if (fSpeculative INDEBUG(|| TRUE))
{
+#ifdef _TARGET_ARM_
+ if (!IsCompactEntryPointAtAddress(addr))
+#else // _TARGET_ARM_
if ((addr & 3) != 1 ||
*PTR_BYTE(addr) != X86_INSTR_MOV_AL ||
*PTR_BYTE(addr+2) != X86_INSTR_JMP_REL8)
+#endif // _TARGET_ARM_
{
if (fSpeculative) return NULL;
_ASSERTE(!"Unexpected code in temporary entrypoint");
}
}
+#ifdef _TARGET_ARM_
+
+ // On ARM compact entry points are thumb
+ _ASSERTE ((addr & THUMB_CODE) != 0);
+ addr = addr - THUMB_CODE;
+
+ // Get offset for CentralJumpCode from current compact entry point
+ PTR_UINT16 pBranchInstr = (PTR_UINT16(addr)) + 1;
+ uint16_t offset = DecodeOffsetFromBranchToCentralJump (*pBranchInstr);
+
+ TADDR centralJump = addr + offset;
+ int index = (centralJump - addr - TEP_ENTRY_SIZE) / TEP_ENTRY_SIZE;
+
+#else // _TARGET_ARM_
+
int index = *PTR_BYTE(addr+1);
TADDR centralJump = addr + 4 + *PTR_SBYTE(addr+3);
+#endif // _TARGET_ARM_
+
CentralJumpCode* pCentralJumpCode = PTR_CentralJumpCode(centralJump);
// Always do consistency check in debug
@@ -4625,10 +4757,42 @@ static_assert_no_msg((TEP_CENTRAL_JUMP_SIZE & 1) == 0);
}
}
+#ifdef _TARGET_ARM_
+
+ _ASSERTE_IMPL(pCentralJumpCode->CheckTarget(GetPreStubCompactARMEntryPoint()));
+
+#else // _TARGET_ARM_
+
_ASSERTE_IMPL(pCentralJumpCode->CheckTarget(GetPreStubEntryPoint()));
+
+#endif // _TARGET_ARM_
}
+#ifdef _TARGET_ARM_
+ // Go through all MethodDesc in MethodDescChunk and find the one with the required index
+ PTR_MethodDescChunk pChunk = *((DPTR(PTR_MethodDescChunk))(centralJump + offsetof(CentralJumpCode, m_pChunk)));
+ TADDR pMD = PTR_HOST_TO_TADDR (pChunk->GetFirstMethodDesc ());
+
+ _ASSERTE (index >= 0 && index < ((int) pChunk->GetCount ()));
+
+ index = ((int) pChunk->GetCount ()) - 1 - index;
+
+ SIZE_T totalSize = 0;
+ int curIndex = 0;
+
+ while (index != curIndex)
+ {
+ SIZE_T sizeCur = (PTR_MethodDesc (pMD))->SizeOf ();
+ totalSize += sizeCur;
+
+ pMD += sizeCur;
+ ++curIndex;
+ }
+
+ return PTR_MethodDesc (pMD);
+#else // _TARGET_ARM_
return PTR_MethodDesc((TADDR)pCentralJumpCode->m_pBaseMD + index * MethodDesc::ALIGNMENT);
+#endif // _TARGET_ARM_
}
//*******************************************************************************
@@ -4636,11 +4800,19 @@ SIZE_T MethodDescChunk::SizeOfCompactEntryPoints(int count)
{
LIMITED_METHOD_DAC_CONTRACT;
+#ifdef _TARGET_ARM_
+
+ return COMPACT_ENTRY_ARM_CODE + count * TEP_ENTRY_SIZE + TEP_CENTRAL_JUMP_SIZE;
+
+#else // _TARGET_ARM_
+
int fullBlocks = count / TEP_MAX_BLOCK_INDEX;
int remainder = count % TEP_MAX_BLOCK_INDEX;
return 1 + (fullBlocks * TEP_FULL_BLOCK_SIZE) +
(remainder * TEP_ENTRY_SIZE) + ((remainder != 0) ? TEP_CENTRAL_JUMP_SIZE : 0);
+
+#endif // _TARGET_ARM_
}
#ifndef DACCESS_COMPILE
@@ -4657,16 +4829,37 @@ TADDR MethodDescChunk::AllocateCompactEntryPoints(LoaderAllocator *pLoaderAlloca
TADDR temporaryEntryPoints = (TADDR)pamTracker->Track(pLoaderAllocator->GetPrecodeHeap()->AllocAlignedMem(size, sizeof(TADDR)));
+#ifdef _TARGET_ARM_
+ BYTE* p = (BYTE*)temporaryEntryPoints + COMPACT_ENTRY_ARM_CODE;
+ int relOffset = count * TEP_ENTRY_SIZE - TEP_ENTRY_SIZE; // relative offset for the short jump
+
+ _ASSERTE (relOffset < MAX_OFFSET_UNCONDITIONAL_BRANCH_THUMB);
+#else // _TARGET_ARM_
// make the temporary entrypoints unaligned, so they are easy to identify
BYTE* p = (BYTE*)temporaryEntryPoints + 1;
+ int indexInBlock = TEP_MAX_BLOCK_INDEX; // recompute relOffset in first iteration
+ int relOffset = 0; // relative offset for the short jump
+#endif // _TARGET_ARM_
- int indexInBlock = TEP_MAX_BLOCK_INDEX; // recompute relOffset in first iteration
- int relOffset = 0; // relative offset for the short jump
MethodDesc * pBaseMD = 0; // index of the start of the block
MethodDesc * pMD = GetFirstMethodDesc();
for (int index = 0; index < count; index++)
{
+#ifdef _TARGET_ARM_
+
+ uint8_t *pMovInstrByte1 = (uint8_t *)p;
+ uint8_t *pMovInstrByte2 = (uint8_t *)p+1;
+ uint16_t *pBranchInstr = ((uint16_t *)p)+1;
+
+ *pMovInstrByte1 = TEP_ENTRY_INSTR1_BYTE1;
+ *pMovInstrByte2 = TEP_ENTRY_INSTR1_BYTE2;
+ *pBranchInstr = EncodeBranchToCentralJump ((int16_t) relOffset);
+
+ p += TEP_ENTRY_SIZE;
+
+#else // _TARGET_ARM_
+
if (indexInBlock == TEP_MAX_BLOCK_INDEX)
{
relOffset = (min(count - index, TEP_MAX_BEFORE_INDEX) - 1) * TEP_ENTRY_SIZE;
@@ -4698,14 +4891,28 @@ TADDR MethodDescChunk::AllocateCompactEntryPoints(LoaderAllocator *pLoaderAlloca
relOffset -= TEP_CENTRAL_JUMP_SIZE;
}
- relOffset -= TEP_ENTRY_SIZE;
indexInBlock++;
+#endif // _TARGET_ARM_
+
+ relOffset -= TEP_ENTRY_SIZE;
pMD = (MethodDesc *)((BYTE *)pMD + pMD->SizeOf());
}
+#ifdef _TARGET_ARM_
+
+ CentralJumpCode* pCode = (CentralJumpCode*)p;
+ memcpy(pCode, &c_CentralJumpCode, TEP_CENTRAL_JUMP_SIZE);
+ pCode->Setup (GetPreStubCompactARMEntryPoint(), this);
+
+ _ASSERTE(p + TEP_CENTRAL_JUMP_SIZE == (BYTE*)temporaryEntryPoints + size);
+
+#else // _TARGET_ARM_
+
_ASSERTE(p == (BYTE*)temporaryEntryPoints + size);
+#endif // _TARGET_ARM_
+
ClrFlushInstructionCache((LPVOID)temporaryEntryPoints, size);
SetHasCompactEntryPoints();
@@ -4725,11 +4932,19 @@ PCODE MethodDescChunk::GetTemporaryEntryPoint(int index)
#ifdef HAS_COMPACT_ENTRYPOINTS
if (HasCompactEntryPoints())
{
+#ifdef _TARGET_ARM_
+
+ return GetTemporaryEntryPoints() + COMPACT_ENTRY_ARM_CODE + THUMB_CODE + index * TEP_ENTRY_SIZE;
+
+#else // _TARGET_ARM_
+
int fullBlocks = index / TEP_MAX_BLOCK_INDEX;
int remainder = index % TEP_MAX_BLOCK_INDEX;
return GetTemporaryEntryPoints() + 1 + (fullBlocks * TEP_FULL_BLOCK_SIZE) +
(remainder * TEP_ENTRY_SIZE) + ((remainder >= TEP_MAX_BEFORE_INDEX) ? TEP_CENTRAL_JUMP_SIZE : 0);
+
+#endif // _TARGET_ARM_
}
#endif // HAS_COMPACT_ENTRYPOINTS
diff --git a/src/vm/method.hpp b/src/vm/method.hpp
index 9545da2248..3354e5799a 100644
--- a/src/vm/method.hpp
+++ b/src/vm/method.hpp
@@ -2031,23 +2031,18 @@ public:
// direct call to direct jump.
//
// We use (1) for x86 and (2) for 64-bit to get the best performance on each platform.
- //
+ // For ARM (1) is used.
TADDR AllocateCompactEntryPoints(LoaderAllocator *pLoaderAllocator, AllocMemTracker *pamTracker);
static MethodDesc* GetMethodDescFromCompactEntryPoint(PCODE addr, BOOL fSpeculative = FALSE);
static SIZE_T SizeOfCompactEntryPoints(int count);
- static BOOL IsCompactEntryPointAtAddress(PCODE addr)
- {
-#if defined(_TARGET_X86_) || defined(_TARGET_AMD64_)
- // Compact entrypoints start at odd addresses
- LIMITED_METHOD_DAC_CONTRACT;
- return (addr & 1) != 0;
-#else
- #error Unsupported platform
-#endif
- }
+ static BOOL IsCompactEntryPointAtAddress(PCODE addr);
+
+#ifdef _TARGET_ARM_
+ static int GetCompactEntryPointMaxCount ();
+#endif // _TARGET_ARM_
#endif // HAS_COMPACT_ENTRYPOINTS
FORCEINLINE PTR_MethodTable GetMethodTable()
diff --git a/src/vm/precode.cpp b/src/vm/precode.cpp
index 9707b2756b..1daf6e32b8 100644
--- a/src/vm/precode.cpp
+++ b/src/vm/precode.cpp
@@ -525,6 +525,16 @@ TADDR Precode::AllocateTemporaryEntryPoints(MethodDescChunk * pChunk,
// Note that these are just best guesses to save memory. If we guessed wrong,
// we will allocate a new exact type of precode in GetOrCreatePrecode.
BOOL fForcedPrecode = pFirstMD->RequiresStableEntryPoint(count > 1);
+
+#ifdef _TARGET_ARM_
+ if (pFirstMD->RequiresMethodDescCallingConvention(count > 1)
+ || count >= MethodDescChunk::GetCompactEntryPointMaxCount ())
+ {
+ // We do not pass method desc on scratch register
+ fForcedPrecode = TRUE;
+ }
+#endif // _TARGET_ARM_
+
if (!fForcedPrecode && (totalSize > MethodDescChunk::SizeOfCompactEntryPoints(count)))
return NULL;
#endif
diff --git a/src/vm/precode.h b/src/vm/precode.h
index 7dd4cd22f0..8947192482 100644
--- a/src/vm/precode.h
+++ b/src/vm/precode.h
@@ -170,6 +170,11 @@ public:
align = 8;
#endif // _TARGET_X86_ && HAS_FIXUP_PRECODE
+#if defined(_TARGET_ARM_) && defined(HAS_COMPACT_ENTRYPOINTS)
+ // Precodes have to be aligned to allow fast compact entry points check
+ _ASSERTE (align >= sizeof(void*));
+#endif // _TARGET_ARM_ && HAS_COMPACT_ENTRYPOINTS
+
return align;
}
diff --git a/src/vm/prestub.cpp b/src/vm/prestub.cpp
index 67639e99b2..fccec51bb3 100644
--- a/src/vm/prestub.cpp
+++ b/src/vm/prestub.cpp
@@ -55,6 +55,13 @@
#ifndef DACCESS_COMPILE
EXTERN_C void STDCALL ThePreStub();
+
+#if defined(HAS_COMPACT_ENTRYPOINTS) && defined (_TARGET_ARM_)
+
+EXTERN_C void STDCALL ThePreStubCompactARM();
+
+#endif // defined(HAS_COMPACT_ENTRYPOINTS) && defined (_TARGET_ARM_)
+
EXTERN_C void STDCALL ThePreStubPatch();
//==========================================================================
@@ -1002,6 +1009,21 @@ Stub * MakeInstantiatingStubWorker(MethodDesc *pMD)
}
#endif // defined(FEATURE_SHARE_GENERIC_CODE)
+#if defined (HAS_COMPACT_ENTRYPOINTS) && defined (_TARGET_ARM_)
+
+extern "C" MethodDesc * STDCALL PreStubGetMethodDescForCompactEntryPoint (PCODE pCode)
+{
+ _ASSERTE (pCode >= PC_REG_RELATIVE_OFFSET);
+
+ pCode = (PCODE) (pCode - PC_REG_RELATIVE_OFFSET + THUMB_CODE);
+
+ _ASSERTE (MethodDescChunk::IsCompactEntryPointAtAddress (pCode));
+
+ return MethodDescChunk::GetMethodDescFromCompactEntryPoint(pCode, FALSE);
+}
+
+#endif // defined (HAS_COMPACT_ENTRYPOINTS) && defined (_TARGET_ARM_)
+
//=============================================================================
// This function generates the real code for a method and installs it into
// the methoddesc. Usually ***BUT NOT ALWAYS***, this function runs only once