summaryrefslogtreecommitdiff
path: root/src/vm
diff options
context:
space:
mode:
Diffstat (limited to 'src/vm')
-rw-r--r--src/vm/arm/stubs.cpp88
-rw-r--r--src/vm/array.cpp4
-rw-r--r--src/vm/dataimage.cpp4
-rw-r--r--src/vm/generics.cpp4
-rw-r--r--src/vm/jitinterface.cpp16
-rw-r--r--src/vm/method.cpp86
-rw-r--r--src/vm/method.hpp15
-rw-r--r--src/vm/methodtable.cpp27
-rw-r--r--src/vm/methodtable.h53
-rw-r--r--src/vm/methodtable.inl4
-rw-r--r--src/vm/methodtablebuilder.cpp4
11 files changed, 247 insertions, 58 deletions
diff --git a/src/vm/arm/stubs.cpp b/src/vm/arm/stubs.cpp
index 916c949df1..cb9ff602ff 100644
--- a/src/vm/arm/stubs.cpp
+++ b/src/vm/arm/stubs.cpp
@@ -1680,6 +1680,13 @@ VOID StubLinkerCPU::EmitShuffleThunk(ShuffleEntry *pShuffleEntryArray)
void StubLinkerCPU::ThumbEmitCallManagedMethod(MethodDesc *pMD, bool fTailcall)
{
+ bool isRelative = MethodTable::VTableIndir2_t::isRelative
+ && pMD->IsVtableSlot();
+
+#ifndef FEATURE_NGEN_RELOCS_OPTIMIZATIONS
+ _ASSERTE(!isRelative);
+#endif
+
// Use direct call if possible.
if (pMD->HasStableEntryPoint())
{
@@ -1691,14 +1698,47 @@ void StubLinkerCPU::ThumbEmitCallManagedMethod(MethodDesc *pMD, bool fTailcall)
// mov r12, #slotaddress
ThumbEmitMovConstant(ThumbReg(12), (TADDR)pMD->GetAddrOfSlot());
+ if (isRelative)
+ {
+ if (!fTailcall)
+ {
+ // str r4, [sp, 0]
+ ThumbEmitStoreRegIndirect(ThumbReg(4), thumbRegSp, 0);
+ }
+
+ // mov r4, r12
+ ThumbEmitMovRegReg(ThumbReg(4), ThumbReg(12));
+ }
+
// ldr r12, [r12]
ThumbEmitLoadRegIndirect(ThumbReg(12), ThumbReg(12), 0);
+
+ if (isRelative)
+ {
+ // add r12, r4
+ ThumbEmitAddReg(ThumbReg(12), ThumbReg(4));
+
+ if (!fTailcall)
+ {
+ // ldr r4, [sp, 0]
+ ThumbEmitLoadRegIndirect(ThumbReg(4), thumbRegSp, 0);
+ }
+ }
}
if (fTailcall)
{
- // bx r12
- ThumbEmitJumpRegister(ThumbReg(12));
+ if (!isRelative)
+ {
+ // bx r12
+ ThumbEmitJumpRegister(ThumbReg(12));
+ }
+ else
+ {
+ // Replace LR with R12 on stack: hybrid-tail call, same as for EmitShuffleThunk
+ // str r12, [sp, 4]
+ ThumbEmitStoreRegIndirect(ThumbReg(12), thumbRegSp, 4);
+ }
}
else
{
@@ -1835,6 +1875,13 @@ void StubLinkerCPU::ThumbEmitCallWithGenericInstantiationParameter(MethodDesc *p
}
}
+ bool isRelative = MethodTable::VTableIndir2_t::isRelative
+ && pMD->IsVtableSlot();
+
+#ifndef FEATURE_NGEN_RELOCS_OPTIMIZATIONS
+ _ASSERTE(!isRelative);
+#endif
+
// Update descriptor count to the actual number used.
cArgDescriptors = idxCurrentDesc;
@@ -1927,7 +1974,17 @@ void StubLinkerCPU::ThumbEmitCallWithGenericInstantiationParameter(MethodDesc *p
}
// Emit a tail call to the target method.
+ if (isRelative)
+ {
+ ThumbEmitProlog(1, 0, FALSE);
+ }
+
ThumbEmitCallManagedMethod(pMD, true);
+
+ if (isRelative)
+ {
+ ThumbEmitEpilog();
+ }
}
else
{
@@ -1936,7 +1993,9 @@ void StubLinkerCPU::ThumbEmitCallWithGenericInstantiationParameter(MethodDesc *p
// Calculate the size of the new stack frame:
//
// +------------+
- // SP -> | | <-+
+ // SP -> | | <-- Space for helper arg, if isRelative is true
+ // +------------+
+ // | | <-+
// : : | Outgoing arguments
// | | <-+
// +------------+
@@ -1967,6 +2026,12 @@ void StubLinkerCPU::ThumbEmitCallWithGenericInstantiationParameter(MethodDesc *p
DWORD cbStackArgs = (pLastArg->m_idxDst + 1) * 4;
DWORD cbStackFrame = cbStackArgs + sizeof(GSCookie) + sizeof(StubHelperFrame);
cbStackFrame = ALIGN_UP(cbStackFrame, 8);
+
+ if (isRelative)
+ {
+ cbStackFrame += 4;
+ }
+
DWORD cbStackFrameWithoutSavedRegs = cbStackFrame - (13 * 4); // r0-r11,lr
// Prolog:
@@ -2175,8 +2240,25 @@ void StubLinkerCPU::EmitUnboxMethodStub(MethodDesc *pMD)
// add r0, #4
ThumbEmitIncrement(ThumbReg(0), 4);
+ bool isRelative = MethodTable::VTableIndir2_t::isRelative
+ && pMD->IsVtableSlot();
+
+#ifndef FEATURE_NGEN_RELOCS_OPTIMIZATIONS
+ _ASSERTE(!isRelative);
+#endif
+
+ if (isRelative)
+ {
+ ThumbEmitProlog(1, 0, FALSE);
+ }
+
// Tail call the real target.
ThumbEmitCallManagedMethod(pMD, true /* tail call */);
+
+ if (isRelative)
+ {
+ ThumbEmitEpilog();
+ }
}
}
diff --git a/src/vm/array.cpp b/src/vm/array.cpp
index 6c58fe727b..9295c7cdc8 100644
--- a/src/vm/array.cpp
+++ b/src/vm/array.cpp
@@ -374,7 +374,7 @@ MethodTable* Module::CreateArrayMethodTable(TypeHandle elemTypeHnd, CorElementTy
// If none, we need to allocate space for the slots
if (!canShareVtableChunks)
{
- cbMT += numVirtuals * sizeof(PCODE);
+ cbMT += numVirtuals * sizeof(MethodTable::VTableIndir2_t);
}
// Canonical methodtable has an array of non virtual slots pointed to by the optional member
@@ -544,7 +544,7 @@ MethodTable* Module::CreateArrayMethodTable(TypeHandle elemTypeHnd, CorElementTy
else
{
// Use the locally allocated chunk
- it.SetIndirectionSlot((PTR_PCODE)(pMemory+cbArrayClass+offsetOfUnsharedVtableChunks));
+ it.SetIndirectionSlot((MethodTable::VTableIndir2_t *)(pMemory+cbArrayClass+offsetOfUnsharedVtableChunks));
offsetOfUnsharedVtableChunks += it.GetSize();
}
}
diff --git a/src/vm/dataimage.cpp b/src/vm/dataimage.cpp
index 4e276fe460..854f214aea 100644
--- a/src/vm/dataimage.cpp
+++ b/src/vm/dataimage.cpp
@@ -749,8 +749,10 @@ FORCEINLINE static CorCompileSection GetSectionForNodeType(ZapNodeType type)
return CORCOMPILE_SECTION_READONLY_WARM;
case NodeTypeForItemKind(DataImage::ITEM_DICTIONARY):
+ return CORCOMPILE_SECTION_READONLY_DICTIONARY;
+
case NodeTypeForItemKind(DataImage::ITEM_VTABLE_CHUNK):
- return CORCOMPILE_SECTION_READONLY_VCHUNKS_AND_DICTIONARY;
+ return CORCOMPILE_SECTION_READONLY_VCHUNKS;
// SECTION_CLASS_COLD
case NodeTypeForItemKind(DataImage::ITEM_PARAM_TYPEDESC):
diff --git a/src/vm/generics.cpp b/src/vm/generics.cpp
index 4ff877ed43..b68054985e 100644
--- a/src/vm/generics.cpp
+++ b/src/vm/generics.cpp
@@ -324,7 +324,7 @@ ClassLoader::CreateTypeHandleForNonCanonicalGenericInstantiation(
// If none, we need to allocate space for the slots
if (!canShareVtableChunks)
{
- allocSize += S_SIZE_T( cSlots ) * S_SIZE_T( sizeof(PCODE) );
+ allocSize += S_SIZE_T( cSlots ) * S_SIZE_T( sizeof(MethodTable::VTableIndir2_t) );
}
if (allocSize.IsOverflow())
@@ -446,7 +446,7 @@ ClassLoader::CreateTypeHandleForNonCanonicalGenericInstantiation(
else
{
// Use the locally allocated chunk
- it.SetIndirectionSlot((PTR_PCODE)(pMemory+offsetOfUnsharedVtableChunks));
+ it.SetIndirectionSlot((MethodTable::VTableIndir2_t *)(pMemory+offsetOfUnsharedVtableChunks));
offsetOfUnsharedVtableChunks += it.GetSize();
}
}
diff --git a/src/vm/jitinterface.cpp b/src/vm/jitinterface.cpp
index 502b5ada13..e92f875c4c 100644
--- a/src/vm/jitinterface.cpp
+++ b/src/vm/jitinterface.cpp
@@ -8744,8 +8744,9 @@ void CEEInfo::getMethodVTableOffset (CORINFO_METHOD_HANDLE methodHnd,
_ASSERTE(method->GetSlot() < method->GetMethodTable()->GetNumVirtuals());
*pOffsetOfIndirection = MethodTable::GetVtableOffset() + MethodTable::GetIndexOfVtableIndirection(method->GetSlot()) * sizeof(MethodTable::VTableIndir_t);
- *pOffsetAfterIndirection = MethodTable::GetIndexAfterVtableIndirection(method->GetSlot()) * sizeof(PCODE);
+ *pOffsetAfterIndirection = MethodTable::GetIndexAfterVtableIndirection(method->GetSlot()) * sizeof(MethodTable::VTableIndir2_t);
*isRelative = MethodTable::VTableIndir_t::isRelative ? 1 : 0;
+ _ASSERTE(MethodTable::VTableIndir_t::isRelative == MethodTable::VTableIndir2_t::isRelative);
EE_TO_JIT_TRANSITION_LEAF();
}
@@ -9123,8 +9124,17 @@ void CEEInfo::getFunctionEntryPoint(CORINFO_METHOD_HANDLE ftnHnd,
_ASSERTE((accessFlags & CORINFO_ACCESS_THIS) || !ftn->IsRemotingInterceptedViaVirtualDispatch());
- ret = ftn->GetAddrOfSlot();
- accessType = IAT_PVALUE;
+ ret = (void *)ftn->GetAddrOfSlot();
+
+ if (MethodTable::VTableIndir2_t::isRelative
+ && ftn->IsVtableSlot())
+ {
+ accessType = IAT_RELPVALUE;
+ }
+ else
+ {
+ accessType = IAT_PVALUE;
+ }
}
diff --git a/src/vm/method.cpp b/src/vm/method.cpp
index e1bd021bc7..2b310c423a 100644
--- a/src/vm/method.cpp
+++ b/src/vm/method.cpp
@@ -563,7 +563,7 @@ PCODE MethodDesc::GetMethodEntryPoint()
return GetMethodTable_NoLogging()->GetSlot(GetSlot());
}
-PTR_PCODE MethodDesc::GetAddrOfSlot()
+TADDR MethodDesc::GetAddrOfSlot()
{
CONTRACTL
{
@@ -584,7 +584,7 @@ PTR_PCODE MethodDesc::GetAddrOfSlot()
SIZE_T size = GetBaseSize();
- return PTR_PCODE(dac_cast<TADDR>(this) + size);
+ return dac_cast<TADDR>(this) + size;
}
_ASSERTE(GetMethodTable()->IsCanonicalMethodTable());
@@ -2342,7 +2342,15 @@ void MethodDesc::Reset()
InterlockedUpdateFlags2(enum_flag2_HasStableEntryPoint | enum_flag2_HasPrecode, FALSE);
- *GetAddrOfSlot() = GetTemporaryEntryPoint();
+ TADDR slot = GetAddrOfSlot();
+ if (IsVtableSlot())
+ {
+ ((MethodTable::VTableIndir2_t *) slot)->SetValue(GetTemporaryEntryPoint());
+ }
+ else
+ {
+ *((PCODE *) slot) = GetTemporaryEntryPoint();
+ }
}
if (HasNativeCodeSlot())
@@ -4711,9 +4719,19 @@ void MethodDesc::SetTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator, Alloc
GetMethodDescChunk()->EnsureTemporaryEntryPointsCreated(pLoaderAllocator, pamTracker);
- PTR_PCODE pSlot = GetAddrOfSlot();
- _ASSERTE(*pSlot == NULL);
- *pSlot = GetTemporaryEntryPoint();
+ TADDR slot = GetAddrOfSlot();
+ if (IsVtableSlot())
+ {
+ MethodTable::VTableIndir2_t *slotPtr = ((MethodTable::VTableIndir2_t *) slot);
+ _ASSERTE(slotPtr->IsNull());
+ slotPtr->SetValue(GetTemporaryEntryPoint());
+ }
+ else
+ {
+ PCODE *slotPtr = (PCODE *) slot;
+ _ASSERTE(*slotPtr == NULL);
+ *slotPtr = GetTemporaryEntryPoint();
+ }
if (RequiresStableEntryPoint())
{
@@ -4776,7 +4794,7 @@ Precode* MethodDesc::GetOrCreatePrecode()
return GetPrecode();
}
- PTR_PCODE pSlot = GetAddrOfSlot();
+ TADDR pSlot = GetAddrOfSlot();
PCODE tempEntry = GetTemporaryEntryPoint();
PrecodeType requiredType = GetPrecodeType();
@@ -4796,14 +4814,40 @@ Precode* MethodDesc::GetOrCreatePrecode()
AllocMemTracker amt;
Precode* pPrecode = Precode::Allocate(requiredType, this, GetLoaderAllocator(), &amt);
- if (FastInterlockCompareExchangePointer(EnsureWritablePages(pSlot), pPrecode->GetEntryPoint(), tempEntry) == tempEntry)
+ PCODE newVal;
+ PCODE oldVal;
+ TADDR *slotAddr;
+
+ if (IsVtableSlot())
+ {
+ newVal = MethodTable::VTableIndir2_t::GetRelative(pSlot, pPrecode->GetEntryPoint());
+ oldVal = MethodTable::VTableIndir2_t::GetRelative(pSlot, tempEntry);
+ slotAddr = (TADDR *) EnsureWritablePages((MethodTable::VTableIndir2_t *) pSlot);
+ }
+ else
+ {
+ newVal = pPrecode->GetEntryPoint();
+ oldVal = tempEntry;
+ slotAddr = (TADDR *) EnsureWritablePages((PCODE *) pSlot);
+ }
+
+ if (FastInterlockCompareExchangePointer(slotAddr, (TADDR) newVal, (TADDR) oldVal) == oldVal)
amt.SuppressRelease();
}
// Set the flags atomically
InterlockedUpdateFlags2(enum_flag2_HasStableEntryPoint | enum_flag2_HasPrecode, TRUE);
- return Precode::GetPrecodeFromEntryPoint(*pSlot);
+ PCODE addr;
+ if (IsVtableSlot())
+ {
+ addr = ((MethodTable::VTableIndir2_t *)pSlot)->GetValue();
+ }
+ else
+ {
+ addr = *((PCODE *)pSlot);
+ }
+ return Precode::GetPrecodeFromEntryPoint(addr);
}
//*******************************************************************************
@@ -4857,10 +4901,28 @@ BOOL MethodDesc::SetStableEntryPointInterlocked(PCODE addr)
_ASSERTE(!HasPrecode());
PCODE pExpected = GetTemporaryEntryPoint();
- PTR_PCODE pSlot = GetAddrOfSlot();
- EnsureWritablePages(pSlot);
+ TADDR pSlot = GetAddrOfSlot();
+
+ BOOL fResult;
+
+ TADDR *slotAddr;
+ PCODE newVal;
+ PCODE oldVal;
+
+ if (IsVtableSlot())
+ {
+ newVal = MethodTable::VTableIndir2_t::GetRelative(pSlot, addr);
+ oldVal = MethodTable::VTableIndir2_t::GetRelative(pSlot, pExpected);
+ slotAddr = (TADDR *) EnsureWritablePages((MethodTable::VTableIndir2_t *) pSlot);
+ }
+ else
+ {
+ newVal = addr;
+ oldVal = pExpected;
+ slotAddr = (TADDR *) EnsureWritablePages((PCODE *) pSlot);
+ }
- BOOL fResult = FastInterlockCompareExchangePointer(pSlot, addr, pExpected) == pExpected;
+ fResult = FastInterlockCompareExchangePointer(slotAddr, (TADDR) newVal, (TADDR) oldVal) == oldVal;
InterlockedUpdateFlags2(enum_flag2_HasStableEntryPoint, TRUE);
diff --git a/src/vm/method.hpp b/src/vm/method.hpp
index c1316d06c3..57258eb47c 100644
--- a/src/vm/method.hpp
+++ b/src/vm/method.hpp
@@ -1133,7 +1133,16 @@ public:
}
}
- PTR_PCODE GetAddrOfSlot();
+ inline BOOL IsVirtualSlot()
+ {
+ return GetSlot() < GetMethodTable()->GetNumVirtuals();
+ }
+ inline BOOL IsVtableSlot()
+ {
+ return IsVirtualSlot() && !HasNonVtableSlot();
+ }
+
+ TADDR GetAddrOfSlot();
PTR_MethodDesc GetDeclMethodDesc(UINT32 slotNumber);
@@ -2566,7 +2575,7 @@ public:
};
// The writeable part of the methoddesc.
-#if defined(PLATFORM_UNIX) && defined(_TARGET_ARM_)
+#if defined(FEATURE_NGEN_RELOCS_OPTIMIZATIONS)
RelativePointer<PTR_NDirectWriteableData> m_pWriteableData;
#else
PlainPointer<PTR_NDirectWriteableData> m_pWriteableData;
@@ -3382,7 +3391,7 @@ public: // <TODO>make private: JITinterface.cpp accesses through this </TODO>
//
// For generic method definitions that are not the typical method definition (e.g. C<int>.m<U>)
// this field is null; to obtain the instantiation use LoadMethodInstantiation
-#if defined(PLATFORM_UNIX) && defined(_TARGET_ARM_)
+#if defined(FEATURE_NGEN_RELOCS_OPTIMIZATIONS)
RelativePointer<PTR_Dictionary> m_pPerInstInfo; //SHARED
#else
PlainPointer<PTR_Dictionary> m_pPerInstInfo; //SHARED
diff --git a/src/vm/methodtable.cpp b/src/vm/methodtable.cpp
index 67656235ef..aecb9a6863 100644
--- a/src/vm/methodtable.cpp
+++ b/src/vm/methodtable.cpp
@@ -4297,7 +4297,8 @@ void MethodTable::Save(DataImage *image, DWORD profilingFlags)
{
if (!image->IsStored(it.GetIndirectionSlot()))
{
- if (CanInternVtableChunk(image, it))
+ if (!MethodTable::VTableIndir2_t::isRelative
+ && CanInternVtableChunk(image, it))
image->StoreInternedStructure(it.GetIndirectionSlot(), it.GetSize(), DataImage::ITEM_VTABLE_CHUNK);
else
image->StoreStructure(it.GetIndirectionSlot(), it.GetSize(), DataImage::ITEM_VTABLE_CHUNK);
@@ -4989,7 +4990,7 @@ void MethodTable::Fixup(DataImage *image)
// Virtual slots live in chunks pointed to by vtable indirections
slotBase = (PVOID) GetVtableIndirections()[GetIndexOfVtableIndirection(slotNumber)].GetValueMaybeNull();
- slotOffset = GetIndexAfterVtableIndirection(slotNumber) * sizeof(PCODE);
+ slotOffset = GetIndexAfterVtableIndirection(slotNumber) * sizeof(MethodTable::VTableIndir2_t);
}
else if (HasSingleNonVirtualSlot())
{
@@ -5016,7 +5017,7 @@ void MethodTable::Fixup(DataImage *image)
if (pMD->GetMethodTable() == this)
{
ZapRelocationType relocType;
- if (slotNumber >= GetNumVirtuals())
+ if (slotNumber >= GetNumVirtuals() || MethodTable::VTableIndir2_t::isRelative)
relocType = IMAGE_REL_BASED_RelativePointer;
else
relocType = IMAGE_REL_BASED_PTR;
@@ -5039,9 +5040,15 @@ void MethodTable::Fixup(DataImage *image)
_ASSERTE(pSourceMT->GetMethodDescForSlot(slotNumber) == pMD);
#endif
+ ZapRelocationType relocType;
+ if (MethodTable::VTableIndir2_t::isRelative)
+ relocType = IMAGE_REL_BASED_RELPTR;
+ else
+ relocType = IMAGE_REL_BASED_PTR;
+
if (image->CanEagerBindToMethodDesc(pMD) && pMD->GetLoaderModule() == pZapModule)
{
- pMD->FixupSlot(image, slotBase, slotOffset);
+ pMD->FixupSlot(image, slotBase, slotOffset, relocType);
}
else
{
@@ -5050,7 +5057,7 @@ void MethodTable::Fixup(DataImage *image)
ZapNode * importThunk = image->GetVirtualImportThunk(pMD->GetMethodTable(), pMD, slotNumber);
// On ARM, make sure that the address to the virtual thunk that we write into the
// vtable "chunk" has the Thumb bit set.
- image->FixupFieldToNode(slotBase, slotOffset, importThunk ARM_ARG(THUMB_CODE));
+ image->FixupFieldToNode(slotBase, slotOffset, importThunk ARM_ARG(THUMB_CODE) NOT_ARM_ARG(0), relocType);
}
else
{
@@ -9790,7 +9797,15 @@ void MethodTable::SetSlot(UINT32 slotNumber, PCODE slotCode)
_ASSERTE(IsThumbCode(slotCode));
#endif
- *GetSlotPtrRaw(slotNumber) = slotCode;
+ TADDR slot = GetSlotPtrRaw(slotNumber);
+ if (slotNumber < GetNumVirtuals())
+ {
+ ((MethodTable::VTableIndir2_t *) slot)->SetValueMaybeNull(slotCode);
+ }
+ else
+ {
+ *((PCODE *)slot) = slotCode;
+ }
}
//==========================================================================================
diff --git a/src/vm/methodtable.h b/src/vm/methodtable.h
index e88fe16644..6c216a2559 100644
--- a/src/vm/methodtable.h
+++ b/src/vm/methodtable.h
@@ -111,7 +111,7 @@ struct InterfaceInfo_t
#endif
// Method table of the interface
-#if defined(PLATFORM_UNIX) && defined(_TARGET_ARM_)
+#if defined(FEATURE_NGEN_RELOCS_OPTIMIZATIONS)
RelativeFixupPointer<PTR_MethodTable> m_pMethodTable;
#else
FixupPointer<PTR_MethodTable> m_pMethodTable;
@@ -1542,13 +1542,18 @@ public:
WRAPPER_NO_CONTRACT;
STATIC_CONTRACT_SO_TOLERANT;
CONSISTENCY_CHECK(slotNumber < GetNumVtableSlots());
- PTR_PCODE pSlot = GetSlotPtrRaw(slotNumber);
- if (IsZapped() && slotNumber >= GetNumVirtuals())
+
+ TADDR pSlot = GetSlotPtrRaw(slotNumber);
+ if (slotNumber < GetNumVirtuals())
+ {
+ return VTableIndir2_t::GetValueMaybeNullAtPtr(pSlot);
+ }
+ else if (IsZapped() && slotNumber >= GetNumVirtuals())
{
// Non-virtual slots in NGened images are relative pointers
- return RelativePointer<PCODE>::GetValueAtPtr(dac_cast<TADDR>(pSlot));
+ return RelativePointer<PCODE>::GetValueAtPtr(pSlot);
}
- return *pSlot;
+ return *dac_cast<PTR_PCODE>(pSlot);
}
// Special-case for when we know that the slot number corresponds
@@ -1562,10 +1567,11 @@ public:
DWORD index = GetIndexOfVtableIndirection(slotNum);
TADDR base = dac_cast<TADDR>(&(GetVtableIndirections()[index]));
- return *(VTableIndir_t::GetValueMaybeNullAtPtr(base) + GetIndexAfterVtableIndirection(slotNum));
+ DPTR(VTableIndir2_t) baseAfterInd = VTableIndir_t::GetValueMaybeNullAtPtr(base) + GetIndexAfterVtableIndirection(slotNum);
+ return VTableIndir2_t::GetValueMaybeNullAtPtr(dac_cast<TADDR>(baseAfterInd));
}
- PTR_PCODE GetSlotPtrRaw(UINT32 slotNum)
+ TADDR GetSlotPtrRaw(UINT32 slotNum)
{
WRAPPER_NO_CONTRACT;
STATIC_CONTRACT_SO_TOLERANT;
@@ -1576,25 +1582,26 @@ public:
// Virtual slots live in chunks pointed to by vtable indirections
DWORD index = GetIndexOfVtableIndirection(slotNum);
TADDR base = dac_cast<TADDR>(&(GetVtableIndirections()[index]));
- return VTableIndir_t::GetValueMaybeNullAtPtr(base) + GetIndexAfterVtableIndirection(slotNum);
+ DPTR(VTableIndir2_t) baseAfterInd = VTableIndir_t::GetValueMaybeNullAtPtr(base) + GetIndexAfterVtableIndirection(slotNum);
+ return dac_cast<TADDR>(baseAfterInd);
}
else if (HasSingleNonVirtualSlot())
{
// Non-virtual slots < GetNumVtableSlots live in a single chunk pointed to by an optional member,
// except when there is only one in which case it lives in the optional member itself
_ASSERTE(slotNum == GetNumVirtuals());
- return dac_cast<PTR_PCODE>(GetNonVirtualSlotsPtr());
+ return GetNonVirtualSlotsPtr();
}
else
{
// Non-virtual slots < GetNumVtableSlots live in a single chunk pointed to by an optional member
_ASSERTE(HasNonVirtualSlotsArray());
g_IBCLogger.LogMethodTableNonVirtualSlotsAccess(this);
- return GetNonVirtualSlotsArray() + (slotNum - GetNumVirtuals());
+ return dac_cast<TADDR>(GetNonVirtualSlotsArray() + (slotNum - GetNumVirtuals()));
}
}
- PTR_PCODE GetSlotPtr(UINT32 slotNum)
+ TADDR GetSlotPtr(UINT32 slotNum)
{
WRAPPER_NO_CONTRACT;
STATIC_CONTRACT_SO_TOLERANT;
@@ -1660,10 +1667,12 @@ public:
#define VTABLE_SLOTS_PER_CHUNK 8
#define VTABLE_SLOTS_PER_CHUNK_LOG2 3
-#if defined(PLATFORM_UNIX) && defined(_TARGET_ARM_)
- typedef RelativePointer<PTR_PCODE> VTableIndir_t;
+#if defined(FEATURE_NGEN_RELOCS_OPTIMIZATIONS)
+ typedef RelativePointer<PCODE> VTableIndir2_t;
+ typedef RelativePointer<DPTR(VTableIndir2_t)> VTableIndir_t;
#else
- typedef PlainPointer<PTR_PCODE> VTableIndir_t;
+ typedef PlainPointer<PCODE> VTableIndir2_t;
+ typedef PlainPointer<DPTR(VTableIndir2_t)> VTableIndir_t;
#endif
static DWORD GetIndexOfVtableIndirection(DWORD slotNum);
@@ -1692,10 +1701,10 @@ public:
BOOL Finished();
DWORD GetIndex();
DWORD GetOffsetFromMethodTable();
- PTR_PCODE GetIndirectionSlot();
+ DPTR(VTableIndir2_t) GetIndirectionSlot();
#ifndef DACCESS_COMPILE
- void SetIndirectionSlot(PTR_PCODE pChunk);
+ void SetIndirectionSlot(DPTR(VTableIndir2_t) pChunk);
#endif
DWORD GetStartSlot();
@@ -2173,7 +2182,7 @@ public:
// THE METHOD TABLE PARENT (SUPERCLASS/BASE CLASS)
//
-#if defined(PLATFORM_UNIX) && defined(_TARGET_ARM_)
+#if defined(FEATURE_NGEN_RELOCS_OPTIMIZATIONS)
#define PARENT_MT_FIXUP_OFFSET (-FIXUP_POINTER_INDIRECTION)
typedef RelativeFixupPointer<PTR_MethodTable> ParentMT_t;
#else
@@ -2205,7 +2214,7 @@ public:
inline static PTR_VOID GetParentMethodTableOrIndirection(PTR_VOID pMT)
{
WRAPPER_NO_CONTRACT;
-#if defined(PLATFORM_UNIX) && defined(_TARGET_ARM_)
+#if defined(FEATURE_NGEN_RELOCS_OPTIMIZATIONS)
PTR_MethodTable pMethodTable = dac_cast<PTR_MethodTable>(pMT);
PTR_MethodTable pParentMT = ReadPointerMaybeNull((MethodTable*) pMethodTable, &MethodTable::m_pParentMethodTable);
return dac_cast<PTR_VOID>(pParentMT);
@@ -3111,7 +3120,7 @@ public:
// must have a dictionary entry. On the other hand, for instantiations shared with Dict<string,double> the opposite holds.
//
-#if defined(PLATFORM_UNIX) && defined(_TARGET_ARM_)
+#if defined(FEATURE_NGEN_RELOCS_OPTIMIZATIONS)
typedef RelativePointer<PTR_Dictionary> PerInstInfoElem_t;
typedef RelativePointer<DPTR(PerInstInfoElem_t)> PerInstInfo_t;
#else
@@ -4182,7 +4191,7 @@ private:
RelativePointer<PTR_Module> m_pLoaderModule; // LoaderModule. It is equal to the ZapModule in ngened images
-#if defined(PLATFORM_UNIX) && defined(_TARGET_ARM_)
+#if defined(FEATURE_NGEN_RELOCS_OPTIMIZATIONS)
RelativePointer<PTR_MethodTableWriteableData> m_pWriteableData;
#else
PlainPointer<PTR_MethodTableWriteableData> m_pWriteableData;
@@ -4198,7 +4207,7 @@ private:
static const TADDR UNION_MASK = 3;
union {
-#if defined(PLATFORM_UNIX) && defined(_TARGET_ARM_)
+#if defined(FEATURE_NGEN_RELOCS_OPTIMIZATIONS)
RelativePointer<DPTR(EEClass)> m_pEEClass;
RelativePointer<TADDR> m_pCanonMT;
#else
@@ -4233,7 +4242,7 @@ private:
public:
union
{
-#if defined(PLATFORM_UNIX) && defined(_TARGET_ARM_)
+#if defined(FEATURE_NGEN_RELOCS_OPTIMIZATIONS)
RelativePointer<PTR_InterfaceInfo> m_pInterfaceMap;
#else
PlainPointer<PTR_InterfaceInfo> m_pInterfaceMap;
diff --git a/src/vm/methodtable.inl b/src/vm/methodtable.inl
index 4fa81c931b..9e5df0262c 100644
--- a/src/vm/methodtable.inl
+++ b/src/vm/methodtable.inl
@@ -955,7 +955,7 @@ inline DWORD MethodTable::VtableIndirectionSlotIterator::GetOffsetFromMethodTabl
}
//==========================================================================================
-inline PTR_PCODE MethodTable::VtableIndirectionSlotIterator::GetIndirectionSlot()
+inline DPTR(MethodTable::VTableIndir2_t) MethodTable::VtableIndirectionSlotIterator::GetIndirectionSlot()
{
LIMITED_METHOD_DAC_CONTRACT;
PRECONDITION(m_i != (DWORD) -1 && m_i < m_count);
@@ -965,7 +965,7 @@ inline PTR_PCODE MethodTable::VtableIndirectionSlotIterator::GetIndirectionSlot(
//==========================================================================================
#ifndef DACCESS_COMPILE
-inline void MethodTable::VtableIndirectionSlotIterator::SetIndirectionSlot(PTR_PCODE pChunk)
+inline void MethodTable::VtableIndirectionSlotIterator::SetIndirectionSlot(DPTR(MethodTable::VTableIndir2_t) pChunk)
{
LIMITED_METHOD_CONTRACT;
m_pSlot->SetValueMaybeNull(pChunk);
diff --git a/src/vm/methodtablebuilder.cpp b/src/vm/methodtablebuilder.cpp
index c57677b316..37ca940499 100644
--- a/src/vm/methodtablebuilder.cpp
+++ b/src/vm/methodtablebuilder.cpp
@@ -9987,7 +9987,7 @@ MethodTable * MethodTableBuilder::AllocateNewMT(Module *pLoaderModule,
else
{
// Use the locally allocated chunk
- it.SetIndirectionSlot((PTR_PCODE)(pData+dwCurrentUnsharedSlotOffset));
+ it.SetIndirectionSlot((MethodTable::VTableIndir2_t *)(pData+dwCurrentUnsharedSlotOffset));
dwCurrentUnsharedSlotOffset += it.GetSize();
}
}
@@ -10553,7 +10553,7 @@ MethodTableBuilder::SetupMethodTable2(
if (pMD->HasNonVtableSlot())
{
- *pMD->GetAddrOfSlot() = addr;
+ *((PCODE *)pMD->GetAddrOfSlot()) = addr;
}
else
{