summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorGleb Balykov <g.balykov@samsung.com>2020-09-14 13:30:50 +0300
committerAlexander Soldatov/Platform Lab /SRR/Staff Engineer/Samsung Electronics <soldatov.a@samsung.com>2020-09-21 20:04:27 +0300
commit676b3e46459a5138d05ac31bb697a54a8a8f685c (patch)
tree9a5d32deb41da42d98bf125b6a1ec9dee8359ce8 /src
parent7a0186fe2611e00cd8019adced7af3a269e2e7f4 (diff)
downloadcoreclr-676b3e46459a5138d05ac31bb697a54a8a8f685c.tar.gz
coreclr-676b3e46459a5138d05ac31bb697a54a8a8f685c.tar.bz2
coreclr-676b3e46459a5138d05ac31bb697a54a8a8f685c.zip
[Tizen] Add RelativeFixupPrecode for arm64, which replaces FixupPrecode in FNV imagessubmit/tizen/20200921.230336accepted/tizen/unified/20200922.034817
Diffstat (limited to 'src')
-rw-r--r--src/debug/daccess/nidump.cpp62
-rw-r--r--src/inc/daccess.h1
-rw-r--r--src/inc/jithelpers.h8
-rw-r--r--src/vm/arm64/asmconstants.h29
-rw-r--r--src/vm/arm64/asmhelpers.S29
-rw-r--r--src/vm/arm64/asmhelpers.asm30
-rw-r--r--src/vm/arm64/cgencpu.h120
-rw-r--r--src/vm/arm64/stubs.cpp123
-rw-r--r--src/vm/method.cpp11
-rw-r--r--src/vm/precode.cpp126
-rw-r--r--src/vm/precode.h31
-rw-r--r--src/vm/prestub.cpp8
-rw-r--r--src/vm/stubmgr.cpp6
13 files changed, 537 insertions, 47 deletions
diff --git a/src/debug/daccess/nidump.cpp b/src/debug/daccess/nidump.cpp
index c750969172..7812bd72f0 100644
--- a/src/debug/daccess/nidump.cpp
+++ b/src/debug/daccess/nidump.cpp
@@ -3486,8 +3486,16 @@ size_t NativeImageDumper::TranslateSymbol(IXCLRDisassemblySupport *dis,
#endif // HAS_NDIRECT_IMPORT_PRECODE
#ifdef HAS_FIXUP_PRECODE
case PRECODE_FIXUP:
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ _ASSERTE(!"unreachable"); break;
+#else // HAS_RELATIVE_FIXUP_PRECODE
precodeName = "FixupPrecode"; break;
+#endif // HAS_RELATIVE_FIXUP_PRECODE
#endif // HAS_FIXUP_PRECODE
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ case PRECODE_RELATIVE_FIXUP:
+ precodeName = "RelativeFixupPrecode"; break;
+#endif // HAS_RELATIVE_FIXUP_PRECODE
#ifdef HAS_THISPTR_RETBUF_PRECODE
case PRECODE_THISPTR_RETBUF:
precodeName = "ThisPtrRetBufPrecode"; break;
@@ -7507,6 +7515,9 @@ void NativeImageDumper::DumpPrecode( PTR_Precode precode, PTR_Module module )
#endif
#ifdef HAS_FIXUP_PRECODE
case PRECODE_FIXUP:
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ _ASSERTE(!"unreachable"); break;
+#else // HAS_RELATIVE_FIXUP_PRECODE
IF_OPT_AND(PRECODES, METHODDESCS)
{
PTR_FixupPrecode p( precode->AsFixupPrecode() );
@@ -7558,7 +7569,56 @@ void NativeImageDumper::DumpPrecode( PTR_Precode precode, PTR_Module module )
DisplayEndStructure( ALWAYS ); //FixupPrecode
}
break;
-#endif
+#endif // HAS_RELATIVE_FIXUP_PRECODE
+#endif // HAS_FIXUP_PRECODE
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ case PRECODE_RELATIVE_FIXUP:
+ IF_OPT_AND(PRECODES, METHODDESCS)
+ {
+ PTR_RelativeFixupPrecode p( precode->AsRelativeFixupPrecode() );
+ DisplayStartStructure( "RelativeFixupPrecode",
+ DPtrToPreferredAddr(p),
+ sizeof(*p),
+ ALWAYS );
+ PTR_MethodDesc precodeMD(p->GetMethodDesc());
+ {
+ DisplayWriteFieldInt( m_MethodDescChunkIndex,
+ p->m_MethodDescChunkIndex, RelativeFixupPrecode,
+ ALWAYS );
+ DisplayWriteFieldInt( m_PrecodeChunkIndex,
+ p->m_PrecodeChunkIndex, RelativeFixupPrecode,
+ ALWAYS );
+ if( p->m_PrecodeChunkIndex == 0 )
+ {
+ //dump the location of the Base
+ DisplayWriteElementAddress( "PrecodeChunkBase",
+ DataPtrToDisplay(p->GetBase()),
+ sizeof(void*), ALWAYS );
+ }
+ //Make sure I align up if there is no code slot to make
+ //sure that I get the padding
+ TADDR mdPtrStart = p->GetBase()
+ + (p->m_MethodDescChunkIndex * MethodDesc::ALIGNMENT);
+ TADDR mdPtrEnd = ALIGN_UP( mdPtrStart + sizeof(MethodDesc*),
+ 8 );
+ CoverageRead( mdPtrStart, (ULONG32)(mdPtrEnd - mdPtrStart) );
+ TADDR precodeMDSlot = p->GetBase()
+ + p->m_MethodDescChunkIndex * MethodDesc::ALIGNMENT;
+ DoWriteFieldMethodDesc( "MethodDesc",
+ (DWORD)(precodeMDSlot - PTR_TO_TADDR(p)),
+ sizeof(TADDR), precodeMD );
+ }
+ TADDR target = p->GetTarget();
+ DisplayWriteElementPointer("Target",
+ DataPtrToDisplay(target),
+ ALWAYS );
+ /* REVISIT_TODO Thu 01/05/2006
+ * dump slot with offset if it is here
+ */
+ DisplayEndStructure( ALWAYS ); //RelativeFixupPrecode
+ }
+ break;
+#endif // HAS_RELATIVE_FIXUP_PRECODE
#ifdef HAS_THISPTR_RETBUF_PRECODE
case PRECODE_THISPTR_RETBUF:
DISPLAY_PRECODE(ThisPtrRetBufPrecode); break;
diff --git a/src/inc/daccess.h b/src/inc/daccess.h
index 802df17d0b..eb0badffbd 100644
--- a/src/inc/daccess.h
+++ b/src/inc/daccess.h
@@ -620,6 +620,7 @@ typedef struct _DacGlobals
ULONG fn__ThePreStubPatchLabel;
ULONG fn__PrecodeFixupThunk;
+ ULONG fn__PrecodeRelativeFixupThunk;
#ifdef FEATURE_PREJIT
ULONG fn__StubDispatchFixupStub;
ULONG fn__StubDispatchFixupPatchLabel;
diff --git a/src/inc/jithelpers.h b/src/inc/jithelpers.h
index d7d501f382..59d938e0ba 100644
--- a/src/inc/jithelpers.h
+++ b/src/inc/jithelpers.h
@@ -291,10 +291,14 @@
JITHELPER(CORINFO_HELP_EE_PRESTUB, ThePreStub, CORINFO_HELP_SIG_NO_ALIGN_STUB)
#if defined(HAS_FIXUP_PRECODE)
+#if defined(HAS_RELATIVE_FIXUP_PRECODE)
+ JITHELPER(CORINFO_HELP_EE_PRECODE_FIXUP, PrecodeRelativeFixupThunk, CORINFO_HELP_SIG_NO_ALIGN_STUB)
+#else // HAS_RELATIVE_FIXUP_PRECODE
JITHELPER(CORINFO_HELP_EE_PRECODE_FIXUP, PrecodeFixupThunk, CORINFO_HELP_SIG_NO_ALIGN_STUB)
-#else
+#endif // HAS_RELATIVE_FIXUP_PRECODE
+#else // HAS_FIXUP_PRECODE
JITHELPER(CORINFO_HELP_EE_PRECODE_FIXUP, NULL, CORINFO_HELP_SIG_NO_ALIGN_STUB)
-#endif
+#endif // HAS_FIXUP_PRECODE
JITHELPER(CORINFO_HELP_EE_PINVOKE_FIXUP, NDirectImportThunk, CORINFO_HELP_SIG_NO_ALIGN_STUB)
diff --git a/src/vm/arm64/asmconstants.h b/src/vm/arm64/asmconstants.h
index 3bb4f6494f..84b92fa5a1 100644
--- a/src/vm/arm64/asmconstants.h
+++ b/src/vm/arm64/asmconstants.h
@@ -176,19 +176,30 @@ ASMCONSTANTS_C_ASSERT(CONTEXT_Pc == offsetof(T_CONTEXT,Pc))
ASMCONSTANTS_C_ASSERT(SIZEOF__FaultingExceptionFrame == sizeof(FaultingExceptionFrame));
ASMCONSTANTS_C_ASSERT(FaultingExceptionFrame__m_fFilterExecuted == offsetof(FaultingExceptionFrame, m_fFilterExecuted));
-#define SIZEOF__FixupPrecode 24
-#define Offset_PrecodeChunkIndex 15
-#define Offset_MethodDescChunkIndex 14
-#define MethodDesc_ALIGNMENT_SHIFT 3
-#define FixupPrecode_ALIGNMENT_SHIFT_1 3
-#define FixupPrecode_ALIGNMENT_SHIFT_2 4
+#define MethodDesc_ALIGNMENT_SHIFT 3
+ASMCONSTANTS_C_ASSERT(MethodDesc_ALIGNMENT_SHIFT == MethodDesc::ALIGNMENT_SHIFT);
+
+#define SIZEOF__FixupPrecode 24
+#define Offset_FixupPrecodeChunkIndex 15
+#define Offset_FixupPrecodeMethodDescChunkIndex 14
+#define FixupPrecode_ALIGNMENT_SHIFT_1 3
+#define FixupPrecode_ALIGNMENT_SHIFT_2 4
ASMCONSTANTS_C_ASSERT(SIZEOF__FixupPrecode == sizeof(FixupPrecode));
-ASMCONSTANTS_C_ASSERT(Offset_PrecodeChunkIndex == offsetof(FixupPrecode, m_PrecodeChunkIndex));
-ASMCONSTANTS_C_ASSERT(Offset_MethodDescChunkIndex == offsetof(FixupPrecode, m_MethodDescChunkIndex));
-ASMCONSTANTS_C_ASSERT(MethodDesc_ALIGNMENT_SHIFT == MethodDesc::ALIGNMENT_SHIFT);
+ASMCONSTANTS_C_ASSERT(Offset_FixupPrecodeChunkIndex == offsetof(FixupPrecode, m_PrecodeChunkIndex));
+ASMCONSTANTS_C_ASSERT(Offset_FixupPrecodeMethodDescChunkIndex == offsetof(FixupPrecode, m_MethodDescChunkIndex));
ASMCONSTANTS_C_ASSERT((1<<FixupPrecode_ALIGNMENT_SHIFT_1) + (1<<FixupPrecode_ALIGNMENT_SHIFT_2) == sizeof(FixupPrecode));
+#define SIZEOF__RelativeFixupPrecode 32
+#define Offset_RelativeFixupPrecodeChunkIndex 17
+#define Offset_RelativeFixupPrecodeMethodDescChunkIndex 16
+#define RelativeFixupPrecode_ALIGNMENT_SHIFT 4
+
+ASMCONSTANTS_C_ASSERT(SIZEOF__RelativeFixupPrecode == sizeof(RelativeFixupPrecode));
+ASMCONSTANTS_C_ASSERT(Offset_RelativeFixupPrecodeChunkIndex == offsetof(RelativeFixupPrecode, m_PrecodeChunkIndex));
+ASMCONSTANTS_C_ASSERT(Offset_RelativeFixupPrecodeMethodDescChunkIndex == offsetof(RelativeFixupPrecode, m_MethodDescChunkIndex));
+ASMCONSTANTS_C_ASSERT((1<<(RelativeFixupPrecode_ALIGNMENT_SHIFT+1)) == sizeof(RelativeFixupPrecode));
+
#ifndef CROSSGEN_COMPILE
#define ResolveCacheElem__target 0x10
#define ResolveCacheElem__pNext 0x18
diff --git a/src/vm/arm64/asmhelpers.S b/src/vm/arm64/asmhelpers.S
index 91aaa5b054..15ec19219b 100644
--- a/src/vm/arm64/asmhelpers.S
+++ b/src/vm/arm64/asmhelpers.S
@@ -142,15 +142,15 @@ NESTED_END NDirectImportThunk, _TEXT
// ------------------------------------------------------------------
// The call in fixup precode initally points to this function.
-// The pupose of this function is to load the MethodDesc and forward the call to prestub.
+// The purpose of this function is to load the MethodDesc and forward the call to prestub.
NESTED_ENTRY PrecodeFixupThunk, _TEXT, NoHandler
// x12 = FixupPrecode *
// On Exit
// x12 = MethodDesc*
// x13, x14 Trashed
// Inline computation done by FixupPrecode::GetMethodDesc()
- ldrb w13, [x12, #Offset_PrecodeChunkIndex] //m_PrecodeChunkIndex
- ldrb w14, [x12, #Offset_MethodDescChunkIndex] // m_MethodDescChunkIndex
+ ldrb w13, [x12, #Offset_FixupPrecodeChunkIndex] //m_PrecodeChunkIndex
+ ldrb w14, [x12, #Offset_FixupPrecodeMethodDescChunkIndex] // m_MethodDescChunkIndex
add x12, x12, w13, uxtw #FixupPrecode_ALIGNMENT_SHIFT_1
add x13, x12, w13, uxtw #FixupPrecode_ALIGNMENT_SHIFT_2
@@ -161,6 +161,29 @@ NESTED_ENTRY PrecodeFixupThunk, _TEXT, NoHandler
NESTED_END PrecodeFixupThunk, _TEXT
// ------------------------------------------------------------------
+// ------------------------------------------------------------------
+// The call in fixup precode initally points to this function.
+// The purpose of this function is to load the MethodDesc and forward the call to prestub.
+NESTED_ENTRY PrecodeRelativeFixupThunk, _TEXT, NoHandler
+ // x11 = RelativeFixupPrecode *
+ // On Exit
+ // x12 = MethodDesc*
+ // x11, x13 Trashed
+ // Inline computation done by RelativeFixupPrecode::GetMethodDesc()
+ ldrb w12, [x11, #Offset_RelativeFixupPrecodeChunkIndex] //m_PrecodeChunkIndex
+ ldrb w13, [x11, #Offset_RelativeFixupPrecodeMethodDescChunkIndex] // m_MethodDescChunkIndex
+
+ add x11, x11, w12, uxtw #RelativeFixupPrecode_ALIGNMENT_SHIFT
+ add x11, x11, w12, uxtw #RelativeFixupPrecode_ALIGNMENT_SHIFT
+ add x12, x11, #SIZEOF__RelativeFixupPrecode // GetBase()
+ ldr x11, [x12] // base
+ add x12, x12, x11
+ add x12, x12, w13, uxtw #MethodDesc_ALIGNMENT_SHIFT
+
+ b ThePreStub
+NESTED_END PrecodeRelativeFixupThunk, _TEXT
+// ------------------------------------------------------------------
+
NESTED_ENTRY ThePreStub, _TEXT, NoHandler
PROLOG_WITH_TRANSITION_BLOCK
diff --git a/src/vm/arm64/asmhelpers.asm b/src/vm/arm64/asmhelpers.asm
index c1f8429489..27e284c0cd 100644
--- a/src/vm/arm64/asmhelpers.asm
+++ b/src/vm/arm64/asmhelpers.asm
@@ -210,7 +210,7 @@ Done
; ------------------------------------------------------------------
; The call in fixup precode initally points to this function.
-; The pupose of this function is to load the MethodDesc and forward the call to prestub.
+; The purpose of this function is to load the MethodDesc and forward the call to prestub.
NESTED_ENTRY PrecodeFixupThunk
; x12 = FixupPrecode *
@@ -218,8 +218,8 @@ Done
; x12 = MethodDesc*
; x13, x14 Trashed
; Inline computation done by FixupPrecode::GetMethodDesc()
- ldrb w13, [x12, #Offset_PrecodeChunkIndex] ; m_PrecodeChunkIndex
- ldrb w14, [x12, #Offset_MethodDescChunkIndex] ; m_MethodDescChunkIndex
+ ldrb w13, [x12, #Offset_FixupPrecodeChunkIndex] ; m_PrecodeChunkIndex
+ ldrb w14, [x12, #Offset_FixupPrecodeMethodDescChunkIndex] ; m_MethodDescChunkIndex
add x12,x12,w13,uxtw #FixupPrecode_ALIGNMENT_SHIFT_1
add x13,x12,w13,uxtw #FixupPrecode_ALIGNMENT_SHIFT_2
@@ -229,6 +229,30 @@ Done
b ThePreStub
NESTED_END
+
+; ------------------------------------------------------------------
+; The call in fixup precode initally points to this function.
+; The purpose of this function is to load the MethodDesc and forward the call to prestub.
+ NESTED_ENTRY PrecodeRelativeFixupThunk
+
+ ; x11 = RelativeFixupPrecode *
+ ; On Exit
+ ; x12 = MethodDesc*
+ ; x11, x13 Trashed
+ ; Inline computation done by RelativeFixupPrecode::GetMethodDesc()
+ ldrb w12, [x11, #Offset_RelativeFixupPrecodeChunkIndex] ; m_PrecodeChunkIndex
+ ldrb w13, [x11, #Offset_RelativeFixupPrecodeMethodDescChunkIndex] ; m_MethodDescChunkIndex
+
+ add x11, x11, w12, uxtw #RelativeFixupPrecode_ALIGNMENT_SHIFT
+ add x11, x11, w12, uxtw #RelativeFixupPrecode_ALIGNMENT_SHIFT
+ add x12, x11, #SIZEOF__RelativeFixupPrecode ; GetBase()
+ ldr x11, [x12] ; base
+ add x12, x12, x11
+ add x12, x12, w13, uxtw #MethodDesc_ALIGNMENT_SHIFT
+
+ b ThePreStub
+
+ NESTED_END
; ------------------------------------------------------------------
NESTED_ENTRY ThePreStub
diff --git a/src/vm/arm64/cgencpu.h b/src/vm/arm64/cgencpu.h
index b30d5026f3..7d01f9d6ff 100644
--- a/src/vm/arm64/cgencpu.h
+++ b/src/vm/arm64/cgencpu.h
@@ -44,6 +44,10 @@ extern PCODE GetPreStubEntryPoint();
#define HAS_FIXUP_PRECODE 1
#define HAS_FIXUP_PRECODE_CHUNKS 1
+#if defined(HAS_FIXUP_PRECODE) && defined(HAS_FIXUP_PRECODE_CHUNKS)
+#define HAS_RELATIVE_FIXUP_PRECODE 1
+#endif
+
// ThisPtrRetBufPrecode one is necessary for closed delegates over static methods with return buffer
#define HAS_THISPTR_RETBUF_PRECODE 1
@@ -561,6 +565,7 @@ struct HijackArgs
};
EXTERN_C VOID STDCALL PrecodeFixupThunk();
+EXTERN_C VOID STDCALL PrecodeRelativeFixupThunk();
// Invalid precode type
struct InvalidPrecode {
@@ -677,9 +682,9 @@ struct FixupPrecode {
// adr x12, #0
// ldr x11, [pc, #12] ; =m_pTarget
// br x11
+ // 2 byte padding
// dcb m_MethodDescChunkIndex
// dcb m_PrecodeChunkIndex
- // 2 byte padding
// dcd m_pTarget
@@ -771,6 +776,119 @@ struct FixupPrecode {
typedef DPTR(FixupPrecode) PTR_FixupPrecode;
+struct RelativeFixupPrecode {
+
+ static const int Type = 0x0B;
+
+ // adr x11, #0 ; registers x11 and x12 are reversed to differentiate from FixupPrecode
+ // ldr x12, #20 ; =m_pTargetOffset, which is relative to "adr x11, #0"
+ // add x12, x11, x12
+ // br x12
+ // dcb m_MethodDescChunkIndex
+ // dcb m_PrecodeChunkIndex
+ // 6 byte padding
+ // dcd m_pTargetOffset
+
+
+ UINT32 m_rgCode[4];
+ BYTE m_MethodDescChunkIndex;
+ BYTE m_PrecodeChunkIndex;
+ BYTE padding[6];
+ TADDR m_pTargetOffset;
+
+ void Init(MethodDesc* pMD, LoaderAllocator *pLoaderAllocator, int iMethodDescChunkIndex = 0, int iPrecodeChunkIndex = 0);
+ void InitCommon()
+ {
+ WRAPPER_NO_CONTRACT;
+ int n = 0;
+
+ m_rgCode[n++] = 0x1000000B; // adr x11, #0
+ m_rgCode[n++] = 0x580000AC; // ldr x12, #20 ; =m_pTargetOffset, which is relative to "adr x11, #0"
+ m_rgCode[n++] = 0x8B0C016C; // add x12, x11, x12
+
+ _ASSERTE((UINT32*)&m_pTargetOffset == &m_rgCode[n + 3]);
+
+ m_rgCode[n++] = 0xD61F0180; // br x12
+
+ _ASSERTE(n == _countof(m_rgCode));
+ }
+
+ TADDR GetBase()
+ {
+ LIMITED_METHOD_CONTRACT;
+ SUPPORTS_DAC;
+
+ return dac_cast<TADDR>(this) + (m_PrecodeChunkIndex + 1) * sizeof(RelativeFixupPrecode);
+ }
+
+ TADDR GetMethodDesc();
+
+ static TADDR GetTargetOffset()
+ {
+ LIMITED_METHOD_DAC_CONTRACT;
+ return 0;
+ }
+
+ PCODE GetTarget()
+ {
+ LIMITED_METHOD_DAC_CONTRACT;
+ return dac_cast<TADDR>(this) + GetTargetOffset() + m_pTargetOffset;
+ }
+
+ void ResetTargetInterlocked()
+ {
+ CONTRACTL
+ {
+ THROWS;
+ GC_NOTRIGGER;
+ }
+ CONTRACTL_END;
+
+ EnsureWritableExecutablePages(&m_pTargetOffset);
+ TADDR addr = (TADDR)GetEEFuncEntryPoint(PrecodeRelativeFixupThunk) - (TADDR)(this) - GetTargetOffset();
+ InterlockedExchange64((LONGLONG*)&m_pTargetOffset, addr);
+ }
+
+ BOOL SetTargetInterlocked(TADDR target, TADDR expected)
+ {
+ CONTRACTL
+ {
+ THROWS;
+ GC_NOTRIGGER;
+ }
+ CONTRACTL_END;
+
+ EnsureWritableExecutablePages(&m_pTargetOffset);
+ TADDR addrExpected = expected - (TADDR)(this) - GetTargetOffset();
+ TADDR addrTarget = target - (TADDR)(this) - GetTargetOffset();
+ return (TADDR)InterlockedCompareExchange64(
+ (LONGLONG*)&m_pTargetOffset, addrTarget, addrExpected) == addrExpected;
+ }
+
+ static BOOL IsRelativeFixupPrecodeByASM(PCODE addr)
+ {
+ PTR_DWORD pInstr = dac_cast<PTR_DWORD>(PCODEToPINSTR(addr));
+ return
+ (pInstr[0] == 0x1000000B) &&
+ (pInstr[1] == 0x580000AC) &&
+ (pInstr[2] == 0x8B0C016C) &&
+ (pInstr[3] == 0xD61F0180);
+ }
+
+#ifdef FEATURE_PREJIT
+ // Partial initialization. Used to save regrouped chunks.
+ void InitForSave(int iPrecodeChunkIndex);
+
+ void Fixup(DataImage *image, MethodDesc * pMD);
+#endif
+
+#ifdef DACCESS_COMPILE
+ void EnumMemoryRegions(CLRDataEnumMemoryFlags flags);
+#endif
+};
+typedef DPTR(RelativeFixupPrecode) PTR_RelativeFixupPrecode;
+
+
// Precode to shuffle this and retbuf for closed delegates over static methods with return buffer
struct ThisPtrRetBufPrecode {
diff --git a/src/vm/arm64/stubs.cpp b/src/vm/arm64/stubs.cpp
index 680557984a..b25dc4d531 100644
--- a/src/vm/arm64/stubs.cpp
+++ b/src/vm/arm64/stubs.cpp
@@ -565,6 +565,18 @@ TADDR FixupPrecode::GetMethodDesc()
return base + (m_MethodDescChunkIndex * MethodDesc::ALIGNMENT);
}
+TADDR RelativeFixupPrecode::GetMethodDesc()
+{
+ LIMITED_METHOD_DAC_CONTRACT;
+
+ // This lookup is also manually inlined in PrecodeFixupThunk assembly code
+ TADDR baseAddr = GetBase();
+ TADDR base = *PTR_TADDR(baseAddr);
+ if (base == NULL)
+ return NULL;
+ return baseAddr + base + (m_MethodDescChunkIndex * MethodDesc::ALIGNMENT);
+}
+
#ifdef DACCESS_COMPILE
void FixupPrecode::EnumMemoryRegions(CLRDataEnumMemoryFlags flags)
{
@@ -573,6 +585,14 @@ void FixupPrecode::EnumMemoryRegions(CLRDataEnumMemoryFlags flags)
DacEnumMemoryRegion(GetBase(), sizeof(TADDR));
}
+
+void RelativeFixupPrecode::EnumMemoryRegions(CLRDataEnumMemoryFlags flags)
+{
+ SUPPORTS_DAC;
+ DacEnumMemoryRegion(dac_cast<TADDR>(this), sizeof(RelativeFixupPrecode));
+
+ DacEnumMemoryRegion(GetBase(), sizeof(TADDR));
+}
#endif // DACCESS_COMPILE
#ifndef DACCESS_COMPILE
@@ -675,6 +695,39 @@ void FixupPrecode::Init(MethodDesc* pMD, LoaderAllocator *pLoaderAllocator, int
}
}
+void RelativeFixupPrecode::Init(MethodDesc* pMD, LoaderAllocator *pLoaderAllocator, int iMethodDescChunkIndex /*=0*/, int iPrecodeChunkIndex /*=0*/)
+{
+ WRAPPER_NO_CONTRACT;
+
+ InitCommon();
+
+ // Initialize chunk indices only if they are not initialized yet. This is necessary to make MethodDesc::Reset work.
+ if (m_PrecodeChunkIndex == 0)
+ {
+ _ASSERTE(FitsInU1(iPrecodeChunkIndex));
+ m_PrecodeChunkIndex = static_cast<BYTE>(iPrecodeChunkIndex);
+ }
+
+ if (iMethodDescChunkIndex != -1)
+ {
+ if (m_MethodDescChunkIndex == 0)
+ {
+ _ASSERTE(FitsInU1(iMethodDescChunkIndex));
+ m_MethodDescChunkIndex = static_cast<BYTE>(iMethodDescChunkIndex);
+ }
+
+ if (*(void**)GetBase() == NULL)
+ *(void**)GetBase() = (BYTE*)pMD - (iMethodDescChunkIndex * MethodDesc::ALIGNMENT) - GetBase();
+ }
+
+ _ASSERTE(GetMethodDesc() == (TADDR)pMD);
+
+ if (pLoaderAllocator != NULL)
+ {
+ m_pTargetOffset = GetEEFuncEntryPoint(PrecodeRelativeFixupThunk) - (TADDR)this - RelativeFixupPrecode::GetTargetOffset();
+ }
+}
+
#ifdef FEATURE_NATIVE_IMAGE_GENERATION
// Partial initialization. Used to save regrouped chunks.
void FixupPrecode::InitForSave(int iPrecodeChunkIndex)
@@ -688,10 +741,27 @@ void FixupPrecode::InitForSave(int iPrecodeChunkIndex)
// The rest is initialized in code:FixupPrecode::Fixup
}
+// Partial initialization. Used to save regrouped chunks.
+void RelativeFixupPrecode::InitForSave(int iPrecodeChunkIndex)
+{
+ STANDARD_VM_CONTRACT;
+
+ InitCommon();
+
+ _ASSERTE(FitsInU1(iPrecodeChunkIndex));
+ m_PrecodeChunkIndex = static_cast<BYTE>(iPrecodeChunkIndex);
+ // The rest is initialized in code:RelativeFixupPrecode::Fixup
+}
+
void FixupPrecode::Fixup(DataImage *image, MethodDesc * pMD)
{
STANDARD_VM_CONTRACT;
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ // FixupPrecode is not saved to image in this case
+ _ASSERTE(!"FixupPrecode is not saved to NGENed image, RelativeFixupPrecode is instead");
+#else // HAS_RELATIVE_FIXUP_PRECODE
+
// Note that GetMethodDesc() does not return the correct value because of
// regrouping of MethodDescs into hot and cold blocks. That's why the caller
// has to supply the actual MethodDesc
@@ -716,6 +786,44 @@ void FixupPrecode::Fixup(DataImage *image, MethodDesc * pMD)
image->FixupFieldToNode(this, (BYTE *)GetBase() - (BYTE *)this,
pMDChunkNode, sizeof(MethodDescChunk));
}
+#endif // HAS_RELATIVE_FIXUP_PRECODE
+}
+
+void RelativeFixupPrecode::Fixup(DataImage *image, MethodDesc * pMD)
+{
+ STANDARD_VM_CONTRACT;
+
+ // Note that GetMethodDesc() does not return the correct value because of
+ // regrouping of MethodDescs into hot and cold blocks. That's why the caller
+ // has to supply the actual MethodDesc
+
+ SSIZE_T mdChunkOffset;
+ ZapNode * pMDChunkNode = image->GetNodeForStructure(pMD, &mdChunkOffset);
+ ZapNode * pHelperThunk = image->GetHelperThunk(CORINFO_HELP_EE_PRECODE_FIXUP);
+
+ image->FixupFieldToNode(this,
+ offsetof(RelativeFixupPrecode, m_pTargetOffset),
+ pHelperThunk,
+ offsetof(RelativeFixupPrecode, m_pTargetOffset) - RelativeFixupPrecode::GetTargetOffset(),
+ IMAGE_REL_BASED_RELPTR);
+
+ // Set the actual chunk index
+ RelativeFixupPrecode * pNewPrecode = (RelativeFixupPrecode *)image->GetImagePointer(this);
+
+ size_t mdOffset = mdChunkOffset - sizeof(MethodDescChunk);
+ size_t chunkIndex = mdOffset / MethodDesc::ALIGNMENT;
+ _ASSERTE(FitsInU1(chunkIndex));
+ pNewPrecode->m_MethodDescChunkIndex = (BYTE)chunkIndex;
+
+ // Fixup the base of MethodDescChunk
+ if (m_PrecodeChunkIndex == 0)
+ {
+ image->FixupFieldToNode(this,
+ (BYTE *)GetBase() - (BYTE *)this,
+ pMDChunkNode,
+ sizeof(MethodDescChunk),
+ IMAGE_REL_BASED_RELPTR);
+ }
}
#endif // FEATURE_NATIVE_IMAGE_GENERATION
@@ -762,6 +870,21 @@ BOOL DoesSlotCallPrestub(PCODE pCode)
}
#endif
+ //RelativeFixupPrecode
+#if defined(HAS_RELATIVE_FIXUP_PRECODE)
+ if (RelativeFixupPrecode::IsRelativeFixupPrecodeByASM(pCode))
+ {
+ PCODE pTarget = dac_cast<PTR_RelativeFixupPrecode>(pInstr)->GetTarget();
+
+ if (isJump(pTarget))
+ {
+ pTarget = decodeJump(pTarget);
+ }
+
+ return pTarget == (TADDR)PrecodeRelativeFixupThunk;
+ }
+#endif // HAS_RELATIVE_FIXUP_PRECODE
+
// StubPrecode
if (pInstr[0] == 0x10000089 && // adr x9, #16
pInstr[1] == 0xA940312A && // ldp x10,x12,[x9]
diff --git a/src/vm/method.cpp b/src/vm/method.cpp
index e4a75d6a4c..923cfc1a1c 100644
--- a/src/vm/method.cpp
+++ b/src/vm/method.cpp
@@ -5747,7 +5747,16 @@ PrecodeType MethodDesc::GetPrecodeType()
if (!RequiresMethodDescCallingConvention())
{
// Use the more efficient fixup precode if possible
- precodeType = PRECODE_FIXUP;
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ if (IsZapped())
+ {
+ precodeType = PRECODE_RELATIVE_FIXUP;
+ }
+ else
+#endif // HAS_RELATIVE_FIXUP_PRECODE
+ {
+ precodeType = PRECODE_FIXUP;
+ }
}
else
#endif // HAS_FIXUP_PRECODE
diff --git a/src/vm/precode.cpp b/src/vm/precode.cpp
index f0e005adb5..5ab4ca8637 100644
--- a/src/vm/precode.cpp
+++ b/src/vm/precode.cpp
@@ -35,6 +35,9 @@ BOOL Precode::IsValidType(PrecodeType t)
#ifdef HAS_FIXUP_PRECODE
case PRECODE_FIXUP:
#endif // HAS_FIXUP_PRECODE
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ case PRECODE_RELATIVE_FIXUP:
+#endif // HAS_RELATIVE_FIXUP_PRECODE
#ifdef HAS_THISPTR_RETBUF_PRECODE
case PRECODE_THISPTR_RETBUF:
#endif // HAS_THISPTR_RETBUF_PRECODE
@@ -61,6 +64,10 @@ SIZE_T Precode::SizeOf(PrecodeType t)
case PRECODE_FIXUP:
return sizeof(FixupPrecode);
#endif // HAS_FIXUP_PRECODE
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ case PRECODE_RELATIVE_FIXUP:
+ return sizeof(RelativeFixupPrecode);
+#endif // HAS_RELATIVE_FIXUP_PRECODE
#ifdef HAS_THISPTR_RETBUF_PRECODE
case PRECODE_THISPTR_RETBUF:
return sizeof(ThisPtrRetBufPrecode);
@@ -92,6 +99,11 @@ PCODE Precode::GetTarget()
target = AsFixupPrecode()->GetTarget();
break;
#endif // HAS_FIXUP_PRECODE
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ case PRECODE_RELATIVE_FIXUP:
+ target = AsRelativeFixupPrecode()->GetTarget();
+ break;
+#endif // HAS_RELATIVE_FIXUP_PRECODE
#ifdef HAS_THISPTR_RETBUF_PRECODE
case PRECODE_THISPTR_RETBUF:
target = AsThisPtrRetBufPrecode()->GetTarget();
@@ -131,6 +143,11 @@ MethodDesc* Precode::GetMethodDesc(BOOL fSpeculative /*= FALSE*/)
pMD = AsFixupPrecode()->GetMethodDesc();
break;
#endif // HAS_FIXUP_PRECODE
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ case PRECODE_RELATIVE_FIXUP:
+ pMD = AsRelativeFixupPrecode()->GetMethodDesc();
+ break;
+#endif // HAS_RELATIVE_FIXUP_PRECODE
#ifdef HAS_THISPTR_RETBUF_PRECODE
case PRECODE_THISPTR_RETBUF:
pMD = AsThisPtrRetBufPrecode()->GetMethodDesc();
@@ -149,6 +166,13 @@ MethodDesc* Precode::GetMethodDesc(BOOL fSpeculative /*= FALSE*/)
UnexpectedPrecodeType("Precode::GetMethodDesc", precodeType);
}
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ if (precodeType == PRECODE_RELATIVE_FIXUP)
+ {
+ _ASSERTE(dac_cast<PTR_MethodDesc>(pMD)->IsZapped());
+ }
+#endif // HAS_RELATIVE_FIXUP_PRECODE
+
// GetMethodDesc() on platform specific precode types returns TADDR. It should return
// PTR_MethodDesc instead. It is a workaround to resolve cyclic dependency between headers.
// Once we headers factoring of headers cleaned up, we should be able to get rid of it.
@@ -179,11 +203,16 @@ BOOL Precode::IsCorrectMethodDesc(MethodDesc * pMD)
{
PrecodeType precodeType = GetType();
-#ifdef HAS_FIXUP_PRECODE_CHUNKS
// We do not keep track of the MethodDesc in every kind of fixup precode
if (precodeType == PRECODE_FIXUP)
return TRUE;
-#endif
+
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ if (precodeType == PRECODE_RELATIVE_FIXUP)
+ {
+ return TRUE;
+ }
+#endif // HAS_RELATIVE_FIXUP_PRECODE
}
#endif // HAS_FIXUP_PRECODE_CHUNKS
@@ -208,6 +237,11 @@ BOOL Precode::IsPointingToPrestub(PCODE target)
return TRUE;
#endif
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ if (IsPointingTo(target, GetEEFuncEntryPoint(PrecodeRelativeFixupThunk)))
+ return TRUE;
+#endif
+
#ifdef FEATURE_PREJIT
Module *pZapModule = GetMethodDesc()->GetZapModule();
if (pZapModule != NULL)
@@ -237,7 +271,11 @@ PCODE Precode::TryToSkipFixupPrecode(PCODE addr)
#if defined(FEATURE_PREJIT) && defined(HAS_FIXUP_PRECODE)
// Early out for common cases
- if (!FixupPrecode::IsFixupPrecodeByASM(addr))
+ if (!FixupPrecode::IsFixupPrecodeByASM(addr)
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ && !RelativeFixupPrecode::IsRelativeFixupPrecodeByASM(addr)
+#endif // HAS_RELATIVE_FIXUP_PRECODE
+ )
return NULL;
// This optimization makes sense in NGened code only.
@@ -279,6 +317,11 @@ Precode* Precode::GetPrecodeForTemporaryEntryPoint(TADDR temporaryEntryPoints, i
return PTR_Precode(temporaryEntryPoints + index * sizeof(FixupPrecode));
}
#endif
+
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ _ASSERTE(t != PRECODE_RELATIVE_FIXUP);
+#endif
+
SIZE_T oneSize = SizeOfTemporaryEntryPoint(t);
return PTR_Precode(temporaryEntryPoints + index * oneSize);
}
@@ -313,6 +356,11 @@ SIZE_T Precode::SizeOfTemporaryEntryPoints(PrecodeType t, bool preallocateJumpSt
_ASSERTE(!preallocateJumpStubs);
}
#endif
+
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ _ASSERTE(t != PRECODE_RELATIVE_FIXUP);
+#endif
+
SIZE_T oneSize = SizeOfTemporaryEntryPoint(t);
return count * oneSize;
}
@@ -349,6 +397,10 @@ Precode* Precode::Allocate(PrecodeType t, MethodDesc* pMD,
SIZE_T size;
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ _ASSERTE(t != PRECODE_RELATIVE_FIXUP);
+#endif
+
#ifdef HAS_FIXUP_PRECODE_CHUNKS
if (t == PRECODE_FIXUP)
{
@@ -388,6 +440,11 @@ void Precode::Init(PrecodeType t, MethodDesc* pMD, LoaderAllocator *pLoaderAlloc
((FixupPrecode*)this)->Init(pMD, pLoaderAllocator);
break;
#endif // HAS_FIXUP_PRECODE
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ case PRECODE_RELATIVE_FIXUP:
+ ((RelativeFixupPrecode*)this)->Init(pMD, pLoaderAllocator);
+ break;
+#endif // HAS_RELATIVE_FIXUP_PRECODE
#ifdef HAS_THISPTR_RETBUF_PRECODE
case PRECODE_THISPTR_RETBUF:
((ThisPtrRetBufPrecode*)this)->Init(pMD, pLoaderAllocator);
@@ -418,6 +475,12 @@ void Precode::ResetTargetInterlocked()
break;
#endif // HAS_FIXUP_PRECODE
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ case PRECODE_RELATIVE_FIXUP:
+ AsRelativeFixupPrecode()->ResetTargetInterlocked();
+ break;
+#endif // HAS_RELATIVE_FIXUP_PRECODE
+
default:
UnexpectedPrecodeType("Precode::ResetTargetInterlocked", precodeType);
break;
@@ -453,6 +516,12 @@ BOOL Precode::SetTargetInterlocked(PCODE target, BOOL fOnlyRedirectFromPrestub)
break;
#endif // HAS_FIXUP_PRECODE
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ case PRECODE_RELATIVE_FIXUP:
+ ret = AsRelativeFixupPrecode()->SetTargetInterlocked(target, expected);
+ break;
+#endif // HAS_RELATIVE_FIXUP_PRECODE
+
#ifdef HAS_THISPTR_RETBUF_PRECODE
case PRECODE_THISPTR_RETBUF:
ret = AsThisPtrRetBufPrecode()->SetTargetInterlocked(target, expected);
@@ -672,6 +741,10 @@ void Precode::Save(DataImage *image)
_ASSERTE(GetType() != PRECODE_FIXUP);
#endif
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ _ASSERTE(GetType() != PRECODE_RELATIVE_FIXUP);
+#endif // HAS_RELATIVE_FIXUP_PRECODE
+
#if defined(_TARGET_X86_) || defined(_TARGET_AMD64_)
// StubPrecode may have straddlers (relocations crossing pages) on x86 and x64. We need
// to insert padding to eliminate it. To do that, we need to save these using custom ZapNode that can only
@@ -722,9 +795,15 @@ void Precode::Fixup(DataImage *image, MethodDesc * pMD)
break;
#endif // HAS_NDIRECT_IMPORT_PRECODE
#ifdef HAS_FIXUP_PRECODE
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ case PRECODE_RELATIVE_FIXUP:
+ AsRelativeFixupPrecode()->Fixup(image, pMD);
+ break;
+#else // HAS_RELATIVE_FIXUP_PRECODE
case PRECODE_FIXUP:
AsFixupPrecode()->Fixup(image, pMD);
break;
+#endif // HAS_RELATIVE_FIXUP_PRECODE
#endif // HAS_FIXUP_PRECODE
default:
UnexpectedPrecodeType("Precode::Save", precodeType);
@@ -754,6 +833,10 @@ void Precode::SaveChunk::Save(DataImage* image, MethodDesc * pMD)
}
#endif // HAS_FIXUP_PRECODE_CHUNKS
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ _ASSERTE(precodeType != PRECODE_RELATIVE_FIXUP);
+#endif // HAS_RELATIVE_FIXUP_PRECODE
+
SIZE_T size = Precode::SizeOf(precodeType);
Precode* pPrecode = (Precode *)new (image->GetHeap()) BYTE[size];
pPrecode->Init(precodeType, pMD, NULL);
@@ -768,20 +851,38 @@ static void SaveFixupPrecodeChunk(DataImage * image, MethodDesc ** rgMD, COUNT_T
{
STANDARD_VM_CONTRACT;
- ULONG size = sizeof(FixupPrecode) * count + sizeof(PTR_MethodDesc);
- FixupPrecode * pBase = (FixupPrecode *)new (image->GetHeap()) BYTE[size];
+ ULONG sizeSinglePrecode;
+ PrecodeType type;
+
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ sizeSinglePrecode = sizeof(RelativeFixupPrecode);
+ type = PRECODE_RELATIVE_FIXUP;
+#else // HAS_RELATIVE_FIXUP_PRECODE
+ sizeSinglePrecode = sizeof(FixupPrecode);
+ type = PRECODE_FIXUP;
+#endif // HAS_RELATIVE_FIXUP_PRECODE
- ZapStoredStructure * pNode = image->StoreStructure(NULL, size, kind,
- Precode::AlignOf(PRECODE_FIXUP));
+ ULONG size = sizeSinglePrecode * count + sizeof(PTR_MethodDesc);
+ ZapStoredStructure * pNode = image->StoreStructure(NULL, size, kind, Precode::AlignOf(type));
+
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ RelativeFixupPrecode * pBase = (RelativeFixupPrecode *)new (image->GetHeap()) BYTE[size];
+#else // HAS_RELATIVE_FIXUP_PRECODE
+ FixupPrecode * pBase = (FixupPrecode *)new (image->GetHeap()) BYTE[size];
+#endif // HAS_RELATIVE_FIXUP_PRECODE
for (COUNT_T i = 0; i < count; i++)
{
MethodDesc * pMD = rgMD[i];
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ RelativeFixupPrecode * pPrecode = pBase + i;
+#else // HAS_RELATIVE_FIXUP_PRECODE
FixupPrecode * pPrecode = pBase + i;
+#endif // HAS_RELATIVE_FIXUP_PRECODE
pPrecode->InitForSave((count - 1) - i);
- image->BindPointer(pPrecode, pNode, i * sizeof(FixupPrecode));
+ image->BindPointer(pPrecode, pNode, i * sizeSinglePrecode);
// Alias the temporary entrypoint
image->RegisterSurrogate(pMD, pPrecode);
@@ -871,7 +972,14 @@ void Precode::EnumMemoryRegions(CLRDataEnumMemoryFlags flags)
}
#endif
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ if (t == PRECODE_RELATIVE_FIXUP)
+ {
+ AsRelativeFixupPrecode()->EnumMemoryRegions(flags);
+ return;
+ }
+#endif // HAS_RELATIVE_FIXUP_PRECODE
+
DacEnumMemoryRegion(GetStart(), SizeOf(t));
}
#endif
-
diff --git a/src/vm/precode.h b/src/vm/precode.h
index fe55d6f954..ba51f22256 100644
--- a/src/vm/precode.h
+++ b/src/vm/precode.h
@@ -25,6 +25,9 @@ enum PrecodeType {
#ifdef HAS_FIXUP_PRECODE
PRECODE_FIXUP = FixupPrecode::Type,
#endif // HAS_FIXUP_PRECODE
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ PRECODE_RELATIVE_FIXUP = RelativeFixupPrecode::Type,
+#endif // HAS_RELATIVE_FIXUP_PRECODE
#ifdef HAS_THISPTR_RETBUF_PRECODE
PRECODE_THISPTR_RETBUF = ThisPtrRetBufPrecode::Type,
#endif // HAS_THISPTR_RETBUF_PRECODE
@@ -70,6 +73,16 @@ private:
}
#endif // HAS_FIXUP_PRECODE
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ RelativeFixupPrecode* AsRelativeFixupPrecode()
+ {
+ LIMITED_METHOD_CONTRACT;
+ SUPPORTS_DAC;
+
+ return dac_cast<PTR_RelativeFixupPrecode>(this);
+ }
+#endif // HAS_RELATIVE_FIXUP_PRECODE
+
#ifdef HAS_THISPTR_RETBUF_PRECODE
ThisPtrRetBufPrecode* AsThisPtrRetBufPrecode()
{
@@ -293,6 +306,9 @@ public:
#ifdef HAS_FIXUP_PRECODE_CHUNKS
_ASSERTE(t != PRECODE_FIXUP);
#endif
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ _ASSERTE(t != PRECODE_RELATIVE_FIXUP);
+#endif // HAS_RELATIVE_FIXUP_PRECODE
return ALIGN_UP(SizeOf(t), AlignOf(t));
}
@@ -331,21 +347,6 @@ public:
#ifdef DACCESS_COMPILE
void EnumMemoryRegions(CLRDataEnumMemoryFlags flags);
#endif
-
-#ifdef HAS_FIXUP_PRECODE_CHUNKS
- static DWORD GetOffsetOfBase(PrecodeType t, DWORD count)
- {
- assert(t == PRECODE_FIXUP);
- return (DWORD)(count * sizeof(FixupPrecode));
- }
-
- static DWORD GetOffset(PrecodeType t, DWORD index, DWORD count)
- {
- assert(t == PRECODE_FIXUP);
- assert(index < count);
- return (DWORD)((count - index - 1)* sizeof(FixupPrecode));
- }
-#endif
};
#endif // __PRECODE_H__
diff --git a/src/vm/prestub.cpp b/src/vm/prestub.cpp
index a8e35a2842..f121278f1a 100644
--- a/src/vm/prestub.cpp
+++ b/src/vm/prestub.cpp
@@ -2254,8 +2254,12 @@ static PCODE PatchNonVirtualExternalMethod(MethodDesc * pMD, PCODE pCode, PTR_CO
// than code:Precode::TryToSkipFixupPrecode.
//
#ifdef HAS_FIXUP_PRECODE
- if (pMD->HasPrecode() && pMD->GetPrecode()->GetType() == PRECODE_FIXUP
- && pMD->IsNativeCodeStableAfterInit())
+ if (pMD->HasPrecode()
+ && (pMD->GetPrecode()->GetType() == PRECODE_FIXUP
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ || pMD->GetPrecode()->GetType() == PRECODE_RELATIVE_FIXUP
+#endif // HAS_RELATIVE_FIXUP_PRECODE
+ ) && pMD->IsNativeCodeStableAfterInit())
{
PCODE pDirectTarget = pMD->IsFCall() ? ECall::GetFCallImpl(pMD) : pMD->GetNativeCode();
if (pDirectTarget != NULL)
diff --git a/src/vm/stubmgr.cpp b/src/vm/stubmgr.cpp
index 27ab5cf614..ea863c0c91 100644
--- a/src/vm/stubmgr.cpp
+++ b/src/vm/stubmgr.cpp
@@ -1058,6 +1058,11 @@ BOOL PrecodeStubManager::DoTraceStub(PCODE stubStartAddress,
break;
#endif // HAS_FIXUP_PRECODE
+#ifdef HAS_RELATIVE_FIXUP_PRECODE
+ case PRECODE_RELATIVE_FIXUP:
+ break;
+#endif // HAS_RELATIVE_FIXUP_PRECODE
+
#ifdef HAS_THISPTR_RETBUF_PRECODE
case PRECODE_THISPTR_RETBUF:
break;
@@ -2591,4 +2596,3 @@ void TailCallStubManager::DoEnumMemoryRegions(CLRDataEnumMemoryFlags flags)
}
#endif // #ifdef DACCESS_COMPILE
-