summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGleb Balykov <g.balykov@samsung.com>2018-06-29 17:25:17 +0300
committer이형주/Tizen Platform Lab(SR)/Staff Engineer/삼성전자 <leee.lee@samsung.com>2018-07-06 09:23:22 +0900
commit1f6ace8289c0b3c62d5ae21deb315470a51dafa3 (patch)
treee96ae03e68438ed59d7662d15781ded44f55703d
parentb192de29e9035467716c59187d774f2105c89521 (diff)
downloadcoreclr-1f6ace8289c0b3c62d5ae21deb315470a51dafa3.tar.gz
coreclr-1f6ace8289c0b3c62d5ae21deb315470a51dafa3.tar.bz2
coreclr-1f6ace8289c0b3c62d5ae21deb315470a51dafa3.zip
Remove relocations for vtable chunks (#17147)
* Separate sections READONLY_VCHUNKS and READONLY_DICTIONARY * Remove relocations for second-level indirection of Vtable in case FEATURE_NGEN_RELOCS_OPTIMIZATIONS is enabled. Introduce FEATURE_NGEN_RELOCS_OPTIMIZATIONS, under which NGEN specific relocations optimizations are enabled * Replace push/pop of R11 in stubs with - str/ldr of R4 in space reserved in epilog for non-tail calls - usage of R4 with hybrid-tail calls (same as for EmitShuffleThunk) * Replace push/pop of R11 for function epilog with usage of LR as helper register right before its restore from stack
-rw-r--r--clrdefinitions.cmake3
-rw-r--r--src/debug/daccess/nidump.cpp43
-rw-r--r--src/debug/daccess/nidump.h9
-rw-r--r--src/inc/corcompile.h3
-rw-r--r--src/inc/corinfo.h24
-rw-r--r--src/inc/fixuppointer.h33
-rw-r--r--src/jit/codegencommon.cpp72
-rw-r--r--src/jit/importer.cpp7
-rw-r--r--src/jit/lower.cpp51
-rw-r--r--src/jit/lower.h6
-rw-r--r--src/jit/morph.cpp16
-rw-r--r--src/vm/arm/stubs.cpp88
-rw-r--r--src/vm/array.cpp4
-rw-r--r--src/vm/dataimage.cpp4
-rw-r--r--src/vm/generics.cpp4
-rw-r--r--src/vm/jitinterface.cpp16
-rw-r--r--src/vm/method.cpp86
-rw-r--r--src/vm/method.hpp15
-rw-r--r--src/vm/methodtable.cpp27
-rw-r--r--src/vm/methodtable.h53
-rw-r--r--src/vm/methodtable.inl4
-rw-r--r--src/vm/methodtablebuilder.cpp4
-rw-r--r--src/zap/zapimage.cpp3
23 files changed, 466 insertions, 109 deletions
diff --git a/clrdefinitions.cmake b/clrdefinitions.cmake
index 66490e5b69..d21fb85524 100644
--- a/clrdefinitions.cmake
+++ b/clrdefinitions.cmake
@@ -195,6 +195,9 @@ add_definitions(-DFEATURE_STRONGNAME_MIGRATION)
if (CLR_CMAKE_PLATFORM_UNIX OR CLR_CMAKE_TARGET_ARCH_ARM64)
add_definitions(-DFEATURE_STUBS_AS_IL)
endif ()
+if (FEATURE_NGEN_RELOCS_OPTIMIZATIONS)
+ add_definitions(-DFEATURE_NGEN_RELOCS_OPTIMIZATIONS)
+endif(FEATURE_NGEN_RELOCS_OPTIMIZATIONS)
add_definitions(-DFEATURE_SVR_GC)
add_definitions(-DFEATURE_SYMDIFF)
add_definitions(-DFEATURE_TIERED_COMPILATION)
diff --git a/src/debug/daccess/nidump.cpp b/src/debug/daccess/nidump.cpp
index e948687c0e..b6d7da7090 100644
--- a/src/debug/daccess/nidump.cpp
+++ b/src/debug/daccess/nidump.cpp
@@ -5907,7 +5907,9 @@ void NativeImageDumper::DumpTypes(PTR_Module module)
for (COUNT_T i = 0; i < slotChunkCount; ++i)
{
- DumpMethodTableSlotChunk(m_discoveredSlotChunks[i].addr, m_discoveredSlotChunks[i].nSlots);
+ DumpMethodTableSlotChunk(m_discoveredSlotChunks[i].addr,
+ m_discoveredSlotChunks[i].nSlots,
+ m_discoveredSlotChunks[i].isRelative);
}
}
DisplayEndArray( "Total MethodTableSlotChunks", METHODTABLES );
@@ -7172,8 +7174,9 @@ NativeImageDumper::DumpMethodTable( PTR_MethodTable mt, const char * name,
while (itIndirect.Next())
{
SlotChunk sc;
- sc.addr = itIndirect.GetIndirectionSlot();
+ sc.addr = dac_cast<TADDR>(itIndirect.GetIndirectionSlot());
sc.nSlots = (WORD)itIndirect.GetNumSlots();
+ sc.isRelative = MethodTable::VTableIndir2_t::isRelative;
m_discoveredSlotChunks.AppendEx(sc);
}
@@ -7185,7 +7188,7 @@ NativeImageDumper::DumpMethodTable( PTR_MethodTable mt, const char * name,
DisplayStartElement( "Slot", ALWAYS );
DisplayWriteElementInt( "Index", i, ALWAYS );
TADDR base = dac_cast<TADDR>(&(mt->GetVtableIndirections()[i]));
- PTR_PCODE tgt = MethodTable::VTableIndir_t::GetValueMaybeNullAtPtr(base);
+ DPTR(MethodTable::VTableIndir2_t) tgt = MethodTable::VTableIndir_t::GetValueMaybeNullAtPtr(base);
DisplayWriteElementPointer( "Pointer",
DataPtrToDisplay(dac_cast<TADDR>(tgt)),
ALWAYS );
@@ -7207,8 +7210,9 @@ NativeImageDumper::DumpMethodTable( PTR_MethodTable mt, const char * name,
DisplayEndElement( ALWAYS ); //Slot
SlotChunk sc;
- sc.addr = tgt;
+ sc.addr = dac_cast<TADDR>(tgt);
sc.nSlots = (mt->GetNumVtableSlots() - mt->GetNumVirtuals());
+ sc.isRelative = false;
m_discoveredSlotChunks.AppendEx(sc);
}
else if (mt->HasSingleNonVirtualSlot())
@@ -7344,25 +7348,42 @@ NativeImageDumper::DumpMethodTable( PTR_MethodTable mt, const char * name,
#endif
void
-NativeImageDumper::DumpMethodTableSlotChunk( PTR_PCODE slotChunk, COUNT_T numSlots )
+NativeImageDumper::DumpMethodTableSlotChunk( TADDR slotChunk, COUNT_T numSlots, bool isRelative )
{
IF_OPT( METHODTABLES )
{
- DisplayStartStructure( "MethodTableSlotChunk", DPtrToPreferredAddr(slotChunk), numSlots * sizeof(PCODE),
- METHODTABLES );
+ COUNT_T slotsSize;
+ if (isRelative)
+ {
+ slotsSize = numSlots * sizeof(RelativePointer<PCODE>);
+ }
+ else
+ {
+ slotsSize = numSlots * sizeof(PCODE);
+ }
+ DisplayStartStructure( "MethodTableSlotChunk", DataPtrToDisplay(slotChunk), slotsSize, METHODTABLES );
IF_OPT(VERBOSE_TYPES)
{
DisplayStartList( W("[%-4s]: %s (%s)"), ALWAYS );
for( unsigned i = 0; i < numSlots; ++i )
{
- DumpSlot(i, slotChunk[i]);
+ PCODE target;
+ if (isRelative)
+ {
+ target = RelativePointer<PCODE>::GetValueMaybeNullAtPtr(slotChunk + i * sizeof(RelativePointer<PCODE>));
+ }
+ else
+ {
+ target = dac_cast<PTR_PCODE>(slotChunk)[i];
+ }
+
+ DumpSlot(i, target);
}
DisplayEndList( ALWAYS ); //Slot list
}
else
- CoverageRead( PTR_TO_TADDR(slotChunk),
- numSlots * sizeof(PCODE) );
+ CoverageRead( slotChunk, slotsSize );
DisplayEndStructure(ALWAYS); //Slot chunk
}
}
@@ -7735,7 +7756,7 @@ void NativeImageDumper::DumpMethodDesc( PTR_MethodDesc md, PTR_Module module )
}
if ( md->HasNonVtableSlot() )
{
- DisplayWriteElementInt( "Slot", (DWORD)(PTR_TO_TADDR(md->GetAddrOfSlot()) - PTR_TO_TADDR(md)), ALWAYS);
+ DisplayWriteElementInt( "Slot", (DWORD)(md->GetAddrOfSlot() - PTR_TO_TADDR(md)), ALWAYS);
}
if (md->HasNativeCodeSlot())
{
diff --git a/src/debug/daccess/nidump.h b/src/debug/daccess/nidump.h
index fc57e4bf7f..ac1c093f6a 100644
--- a/src/debug/daccess/nidump.h
+++ b/src/debug/daccess/nidump.h
@@ -194,7 +194,7 @@ public:
PTR_Module module );
#ifndef STUB_DISPATCH_ALL
- void DumpMethodTableSlotChunk( PTR_PCODE slotChunk, COUNT_T size );
+ void DumpMethodTableSlotChunk( TADDR slotChunk, COUNT_T size, bool );
#endif
void DumpSlot( unsigned index, PCODE tgt );
@@ -478,6 +478,8 @@ private:
template<typename T>
TADDR DPtrToPreferredAddr( T ptr );
+ TADDR DPtrToPreferredAddr( TADDR tptr );
+
void DumpAssemblySignature(CORCOMPILE_ASSEMBLY_SIGNATURE & assemblySignature);
SIZE_T CountFields( PTR_MethodTable mt );
@@ -500,12 +502,13 @@ private:
struct SlotChunk
{
- PTR_PCODE addr;
+ TADDR addr;
WORD nSlots;
+ bool isRelative;
inline bool operator==(const SlotChunk& sc) const
{
- return (addr == sc.addr) && (nSlots == sc.nSlots);
+ return (addr == sc.addr) && (nSlots == sc.nSlots) && (isRelative == sc.isRelative);
}
inline bool operator<(const SlotChunk& sc) const
diff --git a/src/inc/corcompile.h b/src/inc/corcompile.h
index 6de9c8e340..03dd981cc5 100644
--- a/src/inc/corcompile.h
+++ b/src/inc/corcompile.h
@@ -1331,7 +1331,8 @@ class ICorCompilePreloader
CORCOMPILE_SECTION(READONLY_HOT) \
CORCOMPILE_SECTION(READONLY_WARM) \
CORCOMPILE_SECTION(READONLY_COLD) \
- CORCOMPILE_SECTION(READONLY_VCHUNKS_AND_DICTIONARY) \
+ CORCOMPILE_SECTION(READONLY_VCHUNKS) \
+ CORCOMPILE_SECTION(READONLY_DICTIONARY) \
CORCOMPILE_SECTION(CLASS_COLD) \
CORCOMPILE_SECTION(CROSS_DOMAIN_INFO) \
CORCOMPILE_SECTION(METHOD_PRECODE_COLD) \
diff --git a/src/inc/corinfo.h b/src/inc/corinfo.h
index d430412f3b..917150e8c8 100644
--- a/src/inc/corinfo.h
+++ b/src/inc/corinfo.h
@@ -213,11 +213,11 @@ TODO: Talk about initializing strutures before use
#define SELECTANY extern __declspec(selectany)
#endif
-SELECTANY const GUID JITEEVersionIdentifier = { /* 0ba106c8-81a0-407f-99a1-928448c1eb62 */
- 0x0ba106c8,
- 0x81a0,
- 0x407f,
- {0x99, 0xa1, 0x92, 0x84, 0x48, 0xc1, 0xeb, 0x62}
+SELECTANY const GUID JITEEVersionIdentifier = { /* 45aafd4d-1d23-4647-9ce1-cf09a2677ca0 */
+ 0x45aafd4d,
+ 0x1d23,
+ 0x4647,
+ {0x9c, 0xe1, 0xcf, 0x09, 0xa2, 0x67, 0x7c, 0xa0}
};
//////////////////////////////////////////////////////////////////////////////////////////////////////////
@@ -977,8 +977,9 @@ enum CorInfoIntrinsics
enum InfoAccessType
{
IAT_VALUE, // The info value is directly available
- IAT_PVALUE, // The value needs to be accessed via an indirection
- IAT_PPVALUE // The value needs to be accessed via a double indirection
+ IAT_PVALUE, // The value needs to be accessed via an indirection
+ IAT_PPVALUE, // The value needs to be accessed via a double indirection
+ IAT_RELPVALUE // The value needs to be accessed via a relative indirection
};
enum CorInfoGCType
@@ -1243,6 +1244,7 @@ struct CORINFO_METHOD_INFO
// Constant Lookups are either:
// IAT_VALUE: immediate (relocatable) values,
// IAT_PVALUE: immediate values access via an indirection through an immediate (relocatable) address
+// IAT_RELPVALUE: immediate values access via a relative indirection through an immediate offset
// IAT_PPVALUE: immediate values access via a double indirection through an immediate (relocatable) address
//
// Runtime Lookups
@@ -1268,9 +1270,10 @@ struct CORINFO_CONST_LOOKUP
// If the handle is obtained at compile-time, then this handle is the "exact" handle (class, method, or field)
// Otherwise, it's a representative...
// If accessType is
- // IAT_VALUE --> "handle" stores the real handle or "addr " stores the computed address
- // IAT_PVALUE --> "addr" stores a pointer to a location which will hold the real handle
- // IAT_PPVALUE --> "addr" stores a double indirection to a location which will hold the real handle
+ // IAT_VALUE --> "handle" stores the real handle or "addr " stores the computed address
+ // IAT_PVALUE --> "addr" stores a pointer to a location which will hold the real handle
+ // IAT_RELPVALUE --> "addr" stores a relative pointer to a location which will hold the real handle
+ // IAT_PPVALUE --> "addr" stores a double indirection to a location which will hold the real handle
InfoAccessType accessType;
union
@@ -1361,6 +1364,7 @@ struct CORINFO_LOOKUP
// Otherwise, it's a representative... If accessType is
// IAT_VALUE --> "handle" stores the real handle or "addr " stores the computed address
// IAT_PVALUE --> "addr" stores a pointer to a location which will hold the real handle
+ // IAT_RELPVALUE --> "addr" stores a relative pointer to a location which will hold the real handle
// IAT_PPVALUE --> "addr" stores a double indirection to a location which will hold the real handle
CORINFO_CONST_LOOKUP constLookup;
};
diff --git a/src/inc/fixuppointer.h b/src/inc/fixuppointer.h
index 5a897e44ea..abed1f96a8 100644
--- a/src/inc/fixuppointer.h
+++ b/src/inc/fixuppointer.h
@@ -156,6 +156,26 @@ public:
}
#endif // DACCESS_COMPILE
+ static TADDR GetRelativeMaybeNull(TADDR base, TADDR addr)
+ {
+ LIMITED_METHOD_DAC_CONTRACT;
+ if (addr == NULL)
+ {
+ return NULL;
+ }
+ else
+ {
+ return addr - base;
+ }
+ }
+
+ static TADDR GetRelative(TADDR base, TADDR addr)
+ {
+ LIMITED_METHOD_DAC_CONTRACT;
+ PRECONDITION(addr != NULL);
+ return addr - base;
+ }
+
private:
#ifndef DACCESS_COMPILE
Volatile<TADDR> m_delta;
@@ -721,6 +741,19 @@ public:
}
#endif
+ static TADDR GetRelativeMaybeNull(TADDR base, TADDR addr)
+ {
+ LIMITED_METHOD_DAC_CONTRACT;
+ return addr;
+ }
+
+ static TADDR GetRelative(TADDR base, TADDR addr)
+ {
+ LIMITED_METHOD_DAC_CONTRACT;
+ PRECONDITION(addr != NULL);
+ return addr;
+ }
+
private:
TADDR m_ptr;
};
diff --git a/src/jit/codegencommon.cpp b/src/jit/codegencommon.cpp
index 99902dc7bc..7c2869199e 100644
--- a/src/jit/codegencommon.cpp
+++ b/src/jit/codegencommon.cpp
@@ -9679,6 +9679,19 @@ void CodeGen::genFnEpilog(BasicBlock* block)
bool jmpEpilog = ((block->bbFlags & BBF_HAS_JMP) != 0);
+ GenTree* lastNode = block->lastNode();
+
+ // Method handle and address info used in case of jump epilog
+ CORINFO_METHOD_HANDLE methHnd = nullptr;
+ CORINFO_CONST_LOOKUP addrInfo;
+ addrInfo.addr = nullptr;
+
+ if (jmpEpilog && lastNode->gtOper == GT_JMP)
+ {
+ methHnd = (CORINFO_METHOD_HANDLE)lastNode->gtVal.gtVal1;
+ compiler->info.compCompHnd->getFunctionEntryPoint(methHnd, &addrInfo);
+ }
+
#ifdef _TARGET_ARM_
// We delay starting the unwind codes until we have an instruction which we know
// needs an unwind code. In particular, for large stack frames in methods without
@@ -9723,6 +9736,30 @@ void CodeGen::genFnEpilog(BasicBlock* block)
unwindStarted = true;
}
+ if (jmpEpilog && lastNode->gtOper == GT_JMP && addrInfo.accessType == IAT_RELPVALUE)
+ {
+ // IAT_RELPVALUE jump at the end is done using relative indirection, so,
+ // additional helper register is required.
+ // We use LR just before it is going to be restored from stack, i.e.
+ //
+ // movw r12, laddr
+ // movt r12, haddr
+ // mov lr, r12
+ // ldr r12, [r12]
+ // add r12, r12, lr
+ // pop {lr}
+ // ...
+ // bx r12
+
+ regNumber indCallReg = REG_R12;
+ regNumber vptrReg1 = REG_LR;
+
+ instGen_Set_Reg_To_Imm(EA_HANDLE_CNS_RELOC, indCallReg, (ssize_t)addrInfo.addr);
+ getEmitter()->emitIns_R_R(INS_mov, EA_PTRSIZE, vptrReg1, indCallReg);
+ getEmitter()->emitIns_R_R_I(INS_ldr, EA_PTRSIZE, indCallReg, indCallReg, 0);
+ getEmitter()->emitIns_R_R(INS_add, EA_PTRSIZE, indCallReg, vptrReg1);
+ }
+
genPopCalleeSavedRegisters(jmpEpilog);
if (regSet.rsMaskPreSpillRegs(true) != RBM_NONE)
@@ -9735,6 +9772,12 @@ void CodeGen::genFnEpilog(BasicBlock* block)
compiler->unwindAllocStack(preSpillRegArgSize);
}
+ if (jmpEpilog)
+ {
+ // We better not have used a pop PC to return otherwise this will be unreachable code
+ noway_assert(!genUsedPopToReturn);
+ }
+
#else // _TARGET_ARM64_
compiler->unwindBegEpilog();
@@ -9743,20 +9786,13 @@ void CodeGen::genFnEpilog(BasicBlock* block)
if (jmpEpilog)
{
-#ifdef _TARGET_ARMARCH_
hasTailCalls = true;
-#endif // _TARGET_ARMARCH_
noway_assert(block->bbJumpKind == BBJ_RETURN);
noway_assert(block->bbTreeList != nullptr);
-#ifdef _TARGET_ARM_
- // We better not have used a pop PC to return otherwise this will be unreachable code
- noway_assert(!genUsedPopToReturn);
-#endif // _TARGET_ARM_
-
/* figure out what jump we have */
- GenTree* jmpNode = block->lastNode();
+ GenTree* jmpNode = lastNode;
#if !FEATURE_FASTTAILCALL
noway_assert(jmpNode->gtOper == GT_JMP);
#else // FEATURE_FASTTAILCALL
@@ -9775,10 +9811,8 @@ void CodeGen::genFnEpilog(BasicBlock* block)
{
// Simply emit a jump to the methodHnd. This is similar to a call so we can use
// the same descriptor with some minor adjustments.
- CORINFO_METHOD_HANDLE methHnd = (CORINFO_METHOD_HANDLE)jmpNode->gtVal.gtVal1;
-
- CORINFO_CONST_LOOKUP addrInfo;
- compiler->info.compCompHnd->getFunctionEntryPoint(methHnd, &addrInfo);
+ assert(methHnd != nullptr);
+ assert(addrInfo.addr != nullptr);
#ifdef _TARGET_ARM_
emitter::EmitCallType callType;
@@ -9814,6 +9848,20 @@ void CodeGen::genFnEpilog(BasicBlock* block)
}
break;
+ case IAT_RELPVALUE:
+ {
+ // Load the address into a register, load relative indirect and call through a register
+ // We have to use R12 since we assume the argument registers are in use
+ // LR is used as helper register right before it is restored from stack, thus,
+ // all relative address calculations are performed before LR is restored.
+ callType = emitter::EC_INDIR_R;
+ indCallReg = REG_R12;
+ addr = NULL;
+
+ regTracker.rsTrackRegTrash(indCallReg);
+ break;
+ }
+
case IAT_PPVALUE:
default:
NO_WAY("Unsupported JMP indirection");
diff --git a/src/jit/importer.cpp b/src/jit/importer.cpp
index 182a049820..80b1e875dd 100644
--- a/src/jit/importer.cpp
+++ b/src/jit/importer.cpp
@@ -1794,7 +1794,7 @@ GenTree* Compiler::impLookupToTree(CORINFO_RESOLVED_TOKEN* pResolvedToken,
CORINFO_GENERIC_HANDLE handle = nullptr;
void* pIndirection = nullptr;
- assert(pLookup->constLookup.accessType != IAT_PPVALUE);
+ assert(pLookup->constLookup.accessType != IAT_PPVALUE && pLookup->constLookup.accessType != IAT_RELPVALUE);
if (pLookup->constLookup.accessType == IAT_VALUE)
{
@@ -1829,7 +1829,7 @@ GenTree* Compiler::impReadyToRunLookupToTree(CORINFO_CONST_LOOKUP* pLookup,
{
CORINFO_GENERIC_HANDLE handle = nullptr;
void* pIndirection = nullptr;
- assert(pLookup->accessType != IAT_PPVALUE);
+ assert(pLookup->accessType != IAT_PPVALUE && pLookup->accessType != IAT_RELPVALUE);
if (pLookup->accessType == IAT_VALUE)
{
@@ -7299,7 +7299,8 @@ var_types Compiler::impImportCall(OPCODE opcode,
call = gtNewCallNode(CT_USER_FUNC, callInfo->hMethod, callRetTyp, nullptr, ilOffset);
call->gtCall.gtStubCallStubAddr = callInfo->stubLookup.constLookup.addr;
call->gtFlags |= GTF_CALL_VIRT_STUB;
- assert(callInfo->stubLookup.constLookup.accessType != IAT_PPVALUE);
+ assert(callInfo->stubLookup.constLookup.accessType != IAT_PPVALUE &&
+ callInfo->stubLookup.constLookup.accessType != IAT_RELPVALUE);
if (callInfo->stubLookup.constLookup.accessType == IAT_PVALUE)
{
call->gtCall.gtCallMoreFlags |= GTF_CALL_M_VIRTSTUB_REL_INDIRECT;
diff --git a/src/jit/lower.cpp b/src/jit/lower.cpp
index 3f609e70e4..1d3eb257d3 100644
--- a/src/jit/lower.cpp
+++ b/src/jit/lower.cpp
@@ -3189,6 +3189,16 @@ GenTree* Lowering::LowerDirectCall(GenTreeCall* call)
result = Ind(Ind(result));
break;
+ case IAT_RELPVALUE:
+ {
+ // Non-virtual direct calls to addresses accessed by
+ // a single relative indirection.
+ GenTree* cellAddr = AddrGen(addr);
+ GenTree* indir = Ind(cellAddr);
+ result = comp->gtNewOperNode(GT_ADD, TYP_I_IMPL, indir, AddrGen(addr));
+ break;
+ }
+
default:
noway_assert(!"Bad accessType");
break;
@@ -3977,6 +3987,9 @@ GenTree* Lowering::LowerNonvirtPinvokeCall(GenTreeCall* call)
case IAT_PPVALUE:
result = Ind(Ind(AddrGen(addr)));
break;
+
+ case IAT_RELPVALUE:
+ unreached();
}
}
@@ -4073,19 +4086,24 @@ GenTree* Lowering::LowerVirtualVtableCall(GenTreeCall* call)
//
// Save relative offset to tmp (vtab is virtual table pointer, vtabOffsOfIndirection is offset of
// vtable-1st-level-indirection):
- // tmp = [vtab + vtabOffsOfIndirection]
+ // tmp = vtab
//
// Save address of method to result (vtabOffsAfterIndirection is offset of vtable-2nd-level-indirection):
- // result = [vtab + vtabOffsOfIndirection + vtabOffsAfterIndirection + tmp]
+ // result = [tmp + vtabOffsOfIndirection + vtabOffsAfterIndirection + [tmp + vtabOffsOfIndirection]]
+ //
+ //
+ // If relative pointers are also in second level indirection, additional temporary is used:
+ // tmp1 = vtab
+ // tmp2 = tmp1 + vtabOffsOfIndirection + vtabOffsAfterIndirection + [tmp1 + vtabOffsOfIndirection]
+ // result = tmp2 + [tmp2]
+ //
unsigned lclNumTmp = comp->lvaGrabTemp(true DEBUGARG("lclNumTmp"));
-
comp->lvaTable[lclNumTmp].incRefCnts(comp->compCurBB->getBBWeight(comp), comp);
- GenTree* lclvNodeStore = comp->gtNewTempAssign(lclNumTmp, result);
- LIR::Range range = LIR::SeqTree(comp, lclvNodeStore);
- JITDUMP("result of obtaining pointer to virtual table:\n");
- DISPRANGE(range);
- BlockRange().InsertBefore(call, std::move(range));
+ unsigned lclNumTmp2 = comp->lvaGrabTemp(true DEBUGARG("lclNumTmp2"));
+ comp->lvaTable[lclNumTmp2].incRefCnts(comp->compCurBB->getBBWeight(comp), comp);
+
+ GenTree* lclvNodeStore = comp->gtNewTempAssign(lclNumTmp, result);
GenTree* tmpTree = comp->gtNewLclvNode(lclNumTmp, result->TypeGet());
tmpTree = Offset(tmpTree, vtabOffsOfIndirection);
@@ -4094,7 +4112,22 @@ GenTree* Lowering::LowerVirtualVtableCall(GenTreeCall* call)
GenTree* offs = comp->gtNewIconNode(vtabOffsOfIndirection + vtabOffsAfterIndirection, TYP_INT);
result = comp->gtNewOperNode(GT_ADD, TYP_I_IMPL, comp->gtNewLclvNode(lclNumTmp, result->TypeGet()), offs);
- result = Ind(OffsetByIndex(result, tmpTree));
+ GenTree* base = OffsetByIndexWithScale(result, tmpTree, 1);
+ GenTree* lclvNodeStore2 = comp->gtNewTempAssign(lclNumTmp2, base);
+
+ LIR::Range range = LIR::SeqTree(comp, lclvNodeStore);
+ JITDUMP("result of obtaining pointer to virtual table:\n");
+ DISPRANGE(range);
+ BlockRange().InsertBefore(call, std::move(range));
+
+ LIR::Range range2 = LIR::SeqTree(comp, lclvNodeStore2);
+ JITDUMP("result of obtaining pointer to virtual table 2nd level indirection:\n");
+ DISPRANGE(range2);
+ BlockRange().InsertAfter(lclvNodeStore, std::move(range2));
+
+ result = Ind(comp->gtNewLclvNode(lclNumTmp2, result->TypeGet()));
+ result =
+ comp->gtNewOperNode(GT_ADD, TYP_I_IMPL, result, comp->gtNewLclvNode(lclNumTmp2, result->TypeGet()));
}
else
{
diff --git a/src/jit/lower.h b/src/jit/lower.h
index 0d298e0a52..63319961be 100644
--- a/src/jit/lower.h
+++ b/src/jit/lower.h
@@ -208,6 +208,12 @@ private:
return new (comp, GT_LEA) GenTreeAddrMode(resultType, base, index, 0, 0);
}
+ GenTree* OffsetByIndexWithScale(GenTree* base, GenTree* index, unsigned scale)
+ {
+ var_types resultType = (base->TypeGet() == TYP_REF) ? TYP_BYREF : base->TypeGet();
+ return new (comp, GT_LEA) GenTreeAddrMode(resultType, base, index, scale, 0);
+ }
+
// Replace the definition of the given use with a lclVar, allocating a new temp
// if 'tempNum' is BAD_VAR_NUM.
unsigned ReplaceWithLclVar(LIR::Use& use, unsigned tempNum = BAD_VAR_NUM)
diff --git a/src/jit/morph.cpp b/src/jit/morph.cpp
index 2c3c280a11..33bbc74001 100644
--- a/src/jit/morph.cpp
+++ b/src/jit/morph.cpp
@@ -7927,9 +7927,23 @@ void Compiler::fgMorphTailCall(GenTreeCall* call)
/* Now the appropriate vtable slot */
- add = gtNewOperNode(GT_ADD, TYP_I_IMPL, vtbl, gtNewIconNode(vtabOffsAfterIndirection, TYP_I_IMPL));
+ add = gtNewOperNode(GT_ADD, TYP_I_IMPL, vtbl, gtNewIconNode(vtabOffsAfterIndirection, TYP_I_IMPL));
+
+ GenTree* indOffTree = nullptr;
+
+ if (isRelative)
+ {
+ indOffTree = impCloneExpr(add, &add, NO_CLASS_HANDLE, (unsigned)CHECK_SPILL_ALL,
+ nullptr DEBUGARG("virtual table call 2"));
+ }
+
vtbl = gtNewOperNode(GT_IND, TYP_I_IMPL, add);
+ if (isRelative)
+ {
+ vtbl = gtNewOperNode(GT_ADD, TYP_I_IMPL, vtbl, indOffTree);
+ }
+
// Switch this to a plain indirect call
call->gtFlags &= ~GTF_CALL_VIRT_KIND_MASK;
assert(!call->IsVirtual());
diff --git a/src/vm/arm/stubs.cpp b/src/vm/arm/stubs.cpp
index 916c949df1..cb9ff602ff 100644
--- a/src/vm/arm/stubs.cpp
+++ b/src/vm/arm/stubs.cpp
@@ -1680,6 +1680,13 @@ VOID StubLinkerCPU::EmitShuffleThunk(ShuffleEntry *pShuffleEntryArray)
void StubLinkerCPU::ThumbEmitCallManagedMethod(MethodDesc *pMD, bool fTailcall)
{
+ bool isRelative = MethodTable::VTableIndir2_t::isRelative
+ && pMD->IsVtableSlot();
+
+#ifndef FEATURE_NGEN_RELOCS_OPTIMIZATIONS
+ _ASSERTE(!isRelative);
+#endif
+
// Use direct call if possible.
if (pMD->HasStableEntryPoint())
{
@@ -1691,14 +1698,47 @@ void StubLinkerCPU::ThumbEmitCallManagedMethod(MethodDesc *pMD, bool fTailcall)
// mov r12, #slotaddress
ThumbEmitMovConstant(ThumbReg(12), (TADDR)pMD->GetAddrOfSlot());
+ if (isRelative)
+ {
+ if (!fTailcall)
+ {
+ // str r4, [sp, 0]
+ ThumbEmitStoreRegIndirect(ThumbReg(4), thumbRegSp, 0);
+ }
+
+ // mov r4, r12
+ ThumbEmitMovRegReg(ThumbReg(4), ThumbReg(12));
+ }
+
// ldr r12, [r12]
ThumbEmitLoadRegIndirect(ThumbReg(12), ThumbReg(12), 0);
+
+ if (isRelative)
+ {
+ // add r12, r4
+ ThumbEmitAddReg(ThumbReg(12), ThumbReg(4));
+
+ if (!fTailcall)
+ {
+ // ldr r4, [sp, 0]
+ ThumbEmitLoadRegIndirect(ThumbReg(4), thumbRegSp, 0);
+ }
+ }
}
if (fTailcall)
{
- // bx r12
- ThumbEmitJumpRegister(ThumbReg(12));
+ if (!isRelative)
+ {
+ // bx r12
+ ThumbEmitJumpRegister(ThumbReg(12));
+ }
+ else
+ {
+ // Replace LR with R12 on stack: hybrid-tail call, same as for EmitShuffleThunk
+ // str r12, [sp, 4]
+ ThumbEmitStoreRegIndirect(ThumbReg(12), thumbRegSp, 4);
+ }
}
else
{
@@ -1835,6 +1875,13 @@ void StubLinkerCPU::ThumbEmitCallWithGenericInstantiationParameter(MethodDesc *p
}
}
+ bool isRelative = MethodTable::VTableIndir2_t::isRelative
+ && pMD->IsVtableSlot();
+
+#ifndef FEATURE_NGEN_RELOCS_OPTIMIZATIONS
+ _ASSERTE(!isRelative);
+#endif
+
// Update descriptor count to the actual number used.
cArgDescriptors = idxCurrentDesc;
@@ -1927,7 +1974,17 @@ void StubLinkerCPU::ThumbEmitCallWithGenericInstantiationParameter(MethodDesc *p
}
// Emit a tail call to the target method.
+ if (isRelative)
+ {
+ ThumbEmitProlog(1, 0, FALSE);
+ }
+
ThumbEmitCallManagedMethod(pMD, true);
+
+ if (isRelative)
+ {
+ ThumbEmitEpilog();
+ }
}
else
{
@@ -1936,7 +1993,9 @@ void StubLinkerCPU::ThumbEmitCallWithGenericInstantiationParameter(MethodDesc *p
// Calculate the size of the new stack frame:
//
// +------------+
- // SP -> | | <-+
+ // SP -> | | <-- Space for helper arg, if isRelative is true
+ // +------------+
+ // | | <-+
// : : | Outgoing arguments
// | | <-+
// +------------+
@@ -1967,6 +2026,12 @@ void StubLinkerCPU::ThumbEmitCallWithGenericInstantiationParameter(MethodDesc *p
DWORD cbStackArgs = (pLastArg->m_idxDst + 1) * 4;
DWORD cbStackFrame = cbStackArgs + sizeof(GSCookie) + sizeof(StubHelperFrame);
cbStackFrame = ALIGN_UP(cbStackFrame, 8);
+
+ if (isRelative)
+ {
+ cbStackFrame += 4;
+ }
+
DWORD cbStackFrameWithoutSavedRegs = cbStackFrame - (13 * 4); // r0-r11,lr
// Prolog:
@@ -2175,8 +2240,25 @@ void StubLinkerCPU::EmitUnboxMethodStub(MethodDesc *pMD)
// add r0, #4
ThumbEmitIncrement(ThumbReg(0), 4);
+ bool isRelative = MethodTable::VTableIndir2_t::isRelative
+ && pMD->IsVtableSlot();
+
+#ifndef FEATURE_NGEN_RELOCS_OPTIMIZATIONS
+ _ASSERTE(!isRelative);
+#endif
+
+ if (isRelative)
+ {
+ ThumbEmitProlog(1, 0, FALSE);
+ }
+
// Tail call the real target.
ThumbEmitCallManagedMethod(pMD, true /* tail call */);
+
+ if (isRelative)
+ {
+ ThumbEmitEpilog();
+ }
}
}
diff --git a/src/vm/array.cpp b/src/vm/array.cpp
index 6c58fe727b..9295c7cdc8 100644
--- a/src/vm/array.cpp
+++ b/src/vm/array.cpp
@@ -374,7 +374,7 @@ MethodTable* Module::CreateArrayMethodTable(TypeHandle elemTypeHnd, CorElementTy
// If none, we need to allocate space for the slots
if (!canShareVtableChunks)
{
- cbMT += numVirtuals * sizeof(PCODE);
+ cbMT += numVirtuals * sizeof(MethodTable::VTableIndir2_t);
}
// Canonical methodtable has an array of non virtual slots pointed to by the optional member
@@ -544,7 +544,7 @@ MethodTable* Module::CreateArrayMethodTable(TypeHandle elemTypeHnd, CorElementTy
else
{
// Use the locally allocated chunk
- it.SetIndirectionSlot((PTR_PCODE)(pMemory+cbArrayClass+offsetOfUnsharedVtableChunks));
+ it.SetIndirectionSlot((MethodTable::VTableIndir2_t *)(pMemory+cbArrayClass+offsetOfUnsharedVtableChunks));
offsetOfUnsharedVtableChunks += it.GetSize();
}
}
diff --git a/src/vm/dataimage.cpp b/src/vm/dataimage.cpp
index 4e276fe460..854f214aea 100644
--- a/src/vm/dataimage.cpp
+++ b/src/vm/dataimage.cpp
@@ -749,8 +749,10 @@ FORCEINLINE static CorCompileSection GetSectionForNodeType(ZapNodeType type)
return CORCOMPILE_SECTION_READONLY_WARM;
case NodeTypeForItemKind(DataImage::ITEM_DICTIONARY):
+ return CORCOMPILE_SECTION_READONLY_DICTIONARY;
+
case NodeTypeForItemKind(DataImage::ITEM_VTABLE_CHUNK):
- return CORCOMPILE_SECTION_READONLY_VCHUNKS_AND_DICTIONARY;
+ return CORCOMPILE_SECTION_READONLY_VCHUNKS;
// SECTION_CLASS_COLD
case NodeTypeForItemKind(DataImage::ITEM_PARAM_TYPEDESC):
diff --git a/src/vm/generics.cpp b/src/vm/generics.cpp
index 4ff877ed43..b68054985e 100644
--- a/src/vm/generics.cpp
+++ b/src/vm/generics.cpp
@@ -324,7 +324,7 @@ ClassLoader::CreateTypeHandleForNonCanonicalGenericInstantiation(
// If none, we need to allocate space for the slots
if (!canShareVtableChunks)
{
- allocSize += S_SIZE_T( cSlots ) * S_SIZE_T( sizeof(PCODE) );
+ allocSize += S_SIZE_T( cSlots ) * S_SIZE_T( sizeof(MethodTable::VTableIndir2_t) );
}
if (allocSize.IsOverflow())
@@ -446,7 +446,7 @@ ClassLoader::CreateTypeHandleForNonCanonicalGenericInstantiation(
else
{
// Use the locally allocated chunk
- it.SetIndirectionSlot((PTR_PCODE)(pMemory+offsetOfUnsharedVtableChunks));
+ it.SetIndirectionSlot((MethodTable::VTableIndir2_t *)(pMemory+offsetOfUnsharedVtableChunks));
offsetOfUnsharedVtableChunks += it.GetSize();
}
}
diff --git a/src/vm/jitinterface.cpp b/src/vm/jitinterface.cpp
index 502b5ada13..e92f875c4c 100644
--- a/src/vm/jitinterface.cpp
+++ b/src/vm/jitinterface.cpp
@@ -8744,8 +8744,9 @@ void CEEInfo::getMethodVTableOffset (CORINFO_METHOD_HANDLE methodHnd,
_ASSERTE(method->GetSlot() < method->GetMethodTable()->GetNumVirtuals());
*pOffsetOfIndirection = MethodTable::GetVtableOffset() + MethodTable::GetIndexOfVtableIndirection(method->GetSlot()) * sizeof(MethodTable::VTableIndir_t);
- *pOffsetAfterIndirection = MethodTable::GetIndexAfterVtableIndirection(method->GetSlot()) * sizeof(PCODE);
+ *pOffsetAfterIndirection = MethodTable::GetIndexAfterVtableIndirection(method->GetSlot()) * sizeof(MethodTable::VTableIndir2_t);
*isRelative = MethodTable::VTableIndir_t::isRelative ? 1 : 0;
+ _ASSERTE(MethodTable::VTableIndir_t::isRelative == MethodTable::VTableIndir2_t::isRelative);
EE_TO_JIT_TRANSITION_LEAF();
}
@@ -9123,8 +9124,17 @@ void CEEInfo::getFunctionEntryPoint(CORINFO_METHOD_HANDLE ftnHnd,
_ASSERTE((accessFlags & CORINFO_ACCESS_THIS) || !ftn->IsRemotingInterceptedViaVirtualDispatch());
- ret = ftn->GetAddrOfSlot();
- accessType = IAT_PVALUE;
+ ret = (void *)ftn->GetAddrOfSlot();
+
+ if (MethodTable::VTableIndir2_t::isRelative
+ && ftn->IsVtableSlot())
+ {
+ accessType = IAT_RELPVALUE;
+ }
+ else
+ {
+ accessType = IAT_PVALUE;
+ }
}
diff --git a/src/vm/method.cpp b/src/vm/method.cpp
index e1bd021bc7..2b310c423a 100644
--- a/src/vm/method.cpp
+++ b/src/vm/method.cpp
@@ -563,7 +563,7 @@ PCODE MethodDesc::GetMethodEntryPoint()
return GetMethodTable_NoLogging()->GetSlot(GetSlot());
}
-PTR_PCODE MethodDesc::GetAddrOfSlot()
+TADDR MethodDesc::GetAddrOfSlot()
{
CONTRACTL
{
@@ -584,7 +584,7 @@ PTR_PCODE MethodDesc::GetAddrOfSlot()
SIZE_T size = GetBaseSize();
- return PTR_PCODE(dac_cast<TADDR>(this) + size);
+ return dac_cast<TADDR>(this) + size;
}
_ASSERTE(GetMethodTable()->IsCanonicalMethodTable());
@@ -2342,7 +2342,15 @@ void MethodDesc::Reset()
InterlockedUpdateFlags2(enum_flag2_HasStableEntryPoint | enum_flag2_HasPrecode, FALSE);
- *GetAddrOfSlot() = GetTemporaryEntryPoint();
+ TADDR slot = GetAddrOfSlot();
+ if (IsVtableSlot())
+ {
+ ((MethodTable::VTableIndir2_t *) slot)->SetValue(GetTemporaryEntryPoint());
+ }
+ else
+ {
+ *((PCODE *) slot) = GetTemporaryEntryPoint();
+ }
}
if (HasNativeCodeSlot())
@@ -4711,9 +4719,19 @@ void MethodDesc::SetTemporaryEntryPoint(LoaderAllocator *pLoaderAllocator, Alloc
GetMethodDescChunk()->EnsureTemporaryEntryPointsCreated(pLoaderAllocator, pamTracker);
- PTR_PCODE pSlot = GetAddrOfSlot();
- _ASSERTE(*pSlot == NULL);
- *pSlot = GetTemporaryEntryPoint();
+ TADDR slot = GetAddrOfSlot();
+ if (IsVtableSlot())
+ {
+ MethodTable::VTableIndir2_t *slotPtr = ((MethodTable::VTableIndir2_t *) slot);
+ _ASSERTE(slotPtr->IsNull());
+ slotPtr->SetValue(GetTemporaryEntryPoint());
+ }
+ else
+ {
+ PCODE *slotPtr = (PCODE *) slot;
+ _ASSERTE(*slotPtr == NULL);
+ *slotPtr = GetTemporaryEntryPoint();
+ }
if (RequiresStableEntryPoint())
{
@@ -4776,7 +4794,7 @@ Precode* MethodDesc::GetOrCreatePrecode()
return GetPrecode();
}
- PTR_PCODE pSlot = GetAddrOfSlot();
+ TADDR pSlot = GetAddrOfSlot();
PCODE tempEntry = GetTemporaryEntryPoint();
PrecodeType requiredType = GetPrecodeType();
@@ -4796,14 +4814,40 @@ Precode* MethodDesc::GetOrCreatePrecode()
AllocMemTracker amt;
Precode* pPrecode = Precode::Allocate(requiredType, this, GetLoaderAllocator(), &amt);
- if (FastInterlockCompareExchangePointer(EnsureWritablePages(pSlot), pPrecode->GetEntryPoint(), tempEntry) == tempEntry)
+ PCODE newVal;
+ PCODE oldVal;
+ TADDR *slotAddr;
+
+ if (IsVtableSlot())
+ {
+ newVal = MethodTable::VTableIndir2_t::GetRelative(pSlot, pPrecode->GetEntryPoint());
+ oldVal = MethodTable::VTableIndir2_t::GetRelative(pSlot, tempEntry);
+ slotAddr = (TADDR *) EnsureWritablePages((MethodTable::VTableIndir2_t *) pSlot);
+ }
+ else
+ {
+ newVal = pPrecode->GetEntryPoint();
+ oldVal = tempEntry;
+ slotAddr = (TADDR *) EnsureWritablePages((PCODE *) pSlot);
+ }
+
+ if (FastInterlockCompareExchangePointer(slotAddr, (TADDR) newVal, (TADDR) oldVal) == oldVal)
amt.SuppressRelease();
}
// Set the flags atomically
InterlockedUpdateFlags2(enum_flag2_HasStableEntryPoint | enum_flag2_HasPrecode, TRUE);
- return Precode::GetPrecodeFromEntryPoint(*pSlot);
+ PCODE addr;
+ if (IsVtableSlot())
+ {
+ addr = ((MethodTable::VTableIndir2_t *)pSlot)->GetValue();
+ }
+ else
+ {
+ addr = *((PCODE *)pSlot);
+ }
+ return Precode::GetPrecodeFromEntryPoint(addr);
}
//*******************************************************************************
@@ -4857,10 +4901,28 @@ BOOL MethodDesc::SetStableEntryPointInterlocked(PCODE addr)
_ASSERTE(!HasPrecode());
PCODE pExpected = GetTemporaryEntryPoint();
- PTR_PCODE pSlot = GetAddrOfSlot();
- EnsureWritablePages(pSlot);
+ TADDR pSlot = GetAddrOfSlot();
+
+ BOOL fResult;
+
+ TADDR *slotAddr;
+ PCODE newVal;
+ PCODE oldVal;
+
+ if (IsVtableSlot())
+ {
+ newVal = MethodTable::VTableIndir2_t::GetRelative(pSlot, addr);
+ oldVal = MethodTable::VTableIndir2_t::GetRelative(pSlot, pExpected);
+ slotAddr = (TADDR *) EnsureWritablePages((MethodTable::VTableIndir2_t *) pSlot);
+ }
+ else
+ {
+ newVal = addr;
+ oldVal = pExpected;
+ slotAddr = (TADDR *) EnsureWritablePages((PCODE *) pSlot);
+ }
- BOOL fResult = FastInterlockCompareExchangePointer(pSlot, addr, pExpected) == pExpected;
+ fResult = FastInterlockCompareExchangePointer(slotAddr, (TADDR) newVal, (TADDR) oldVal) == oldVal;
InterlockedUpdateFlags2(enum_flag2_HasStableEntryPoint, TRUE);
diff --git a/src/vm/method.hpp b/src/vm/method.hpp
index c1316d06c3..57258eb47c 100644
--- a/src/vm/method.hpp
+++ b/src/vm/method.hpp
@@ -1133,7 +1133,16 @@ public:
}
}
- PTR_PCODE GetAddrOfSlot();
+ inline BOOL IsVirtualSlot()
+ {
+ return GetSlot() < GetMethodTable()->GetNumVirtuals();
+ }
+ inline BOOL IsVtableSlot()
+ {
+ return IsVirtualSlot() && !HasNonVtableSlot();
+ }
+
+ TADDR GetAddrOfSlot();
PTR_MethodDesc GetDeclMethodDesc(UINT32 slotNumber);
@@ -2566,7 +2575,7 @@ public:
};
// The writeable part of the methoddesc.
-#if defined(PLATFORM_UNIX) && defined(_TARGET_ARM_)
+#if defined(FEATURE_NGEN_RELOCS_OPTIMIZATIONS)
RelativePointer<PTR_NDirectWriteableData> m_pWriteableData;
#else
PlainPointer<PTR_NDirectWriteableData> m_pWriteableData;
@@ -3382,7 +3391,7 @@ public: // <TODO>make private: JITinterface.cpp accesses through this </TODO>
//
// For generic method definitions that are not the typical method definition (e.g. C<int>.m<U>)
// this field is null; to obtain the instantiation use LoadMethodInstantiation
-#if defined(PLATFORM_UNIX) && defined(_TARGET_ARM_)
+#if defined(FEATURE_NGEN_RELOCS_OPTIMIZATIONS)
RelativePointer<PTR_Dictionary> m_pPerInstInfo; //SHARED
#else
PlainPointer<PTR_Dictionary> m_pPerInstInfo; //SHARED
diff --git a/src/vm/methodtable.cpp b/src/vm/methodtable.cpp
index 67656235ef..aecb9a6863 100644
--- a/src/vm/methodtable.cpp
+++ b/src/vm/methodtable.cpp
@@ -4297,7 +4297,8 @@ void MethodTable::Save(DataImage *image, DWORD profilingFlags)
{
if (!image->IsStored(it.GetIndirectionSlot()))
{
- if (CanInternVtableChunk(image, it))
+ if (!MethodTable::VTableIndir2_t::isRelative
+ && CanInternVtableChunk(image, it))
image->StoreInternedStructure(it.GetIndirectionSlot(), it.GetSize(), DataImage::ITEM_VTABLE_CHUNK);
else
image->StoreStructure(it.GetIndirectionSlot(), it.GetSize(), DataImage::ITEM_VTABLE_CHUNK);
@@ -4989,7 +4990,7 @@ void MethodTable::Fixup(DataImage *image)
// Virtual slots live in chunks pointed to by vtable indirections
slotBase = (PVOID) GetVtableIndirections()[GetIndexOfVtableIndirection(slotNumber)].GetValueMaybeNull();
- slotOffset = GetIndexAfterVtableIndirection(slotNumber) * sizeof(PCODE);
+ slotOffset = GetIndexAfterVtableIndirection(slotNumber) * sizeof(MethodTable::VTableIndir2_t);
}
else if (HasSingleNonVirtualSlot())
{
@@ -5016,7 +5017,7 @@ void MethodTable::Fixup(DataImage *image)
if (pMD->GetMethodTable() == this)
{
ZapRelocationType relocType;
- if (slotNumber >= GetNumVirtuals())
+ if (slotNumber >= GetNumVirtuals() || MethodTable::VTableIndir2_t::isRelative)
relocType = IMAGE_REL_BASED_RelativePointer;
else
relocType = IMAGE_REL_BASED_PTR;
@@ -5039,9 +5040,15 @@ void MethodTable::Fixup(DataImage *image)
_ASSERTE(pSourceMT->GetMethodDescForSlot(slotNumber) == pMD);
#endif
+ ZapRelocationType relocType;
+ if (MethodTable::VTableIndir2_t::isRelative)
+ relocType = IMAGE_REL_BASED_RELPTR;
+ else
+ relocType = IMAGE_REL_BASED_PTR;
+
if (image->CanEagerBindToMethodDesc(pMD) && pMD->GetLoaderModule() == pZapModule)
{
- pMD->FixupSlot(image, slotBase, slotOffset);
+ pMD->FixupSlot(image, slotBase, slotOffset, relocType);
}
else
{
@@ -5050,7 +5057,7 @@ void MethodTable::Fixup(DataImage *image)
ZapNode * importThunk = image->GetVirtualImportThunk(pMD->GetMethodTable(), pMD, slotNumber);
// On ARM, make sure that the address to the virtual thunk that we write into the
// vtable "chunk" has the Thumb bit set.
- image->FixupFieldToNode(slotBase, slotOffset, importThunk ARM_ARG(THUMB_CODE));
+ image->FixupFieldToNode(slotBase, slotOffset, importThunk ARM_ARG(THUMB_CODE) NOT_ARM_ARG(0), relocType);
}
else
{
@@ -9790,7 +9797,15 @@ void MethodTable::SetSlot(UINT32 slotNumber, PCODE slotCode)
_ASSERTE(IsThumbCode(slotCode));
#endif
- *GetSlotPtrRaw(slotNumber) = slotCode;
+ TADDR slot = GetSlotPtrRaw(slotNumber);
+ if (slotNumber < GetNumVirtuals())
+ {
+ ((MethodTable::VTableIndir2_t *) slot)->SetValueMaybeNull(slotCode);
+ }
+ else
+ {
+ *((PCODE *)slot) = slotCode;
+ }
}
//==========================================================================================
diff --git a/src/vm/methodtable.h b/src/vm/methodtable.h
index e88fe16644..6c216a2559 100644
--- a/src/vm/methodtable.h
+++ b/src/vm/methodtable.h
@@ -111,7 +111,7 @@ struct InterfaceInfo_t
#endif
// Method table of the interface
-#if defined(PLATFORM_UNIX) && defined(_TARGET_ARM_)
+#if defined(FEATURE_NGEN_RELOCS_OPTIMIZATIONS)
RelativeFixupPointer<PTR_MethodTable> m_pMethodTable;
#else
FixupPointer<PTR_MethodTable> m_pMethodTable;
@@ -1542,13 +1542,18 @@ public:
WRAPPER_NO_CONTRACT;
STATIC_CONTRACT_SO_TOLERANT;
CONSISTENCY_CHECK(slotNumber < GetNumVtableSlots());
- PTR_PCODE pSlot = GetSlotPtrRaw(slotNumber);
- if (IsZapped() && slotNumber >= GetNumVirtuals())
+
+ TADDR pSlot = GetSlotPtrRaw(slotNumber);
+ if (slotNumber < GetNumVirtuals())
+ {
+ return VTableIndir2_t::GetValueMaybeNullAtPtr(pSlot);
+ }
+ else if (IsZapped() && slotNumber >= GetNumVirtuals())
{
// Non-virtual slots in NGened images are relative pointers
- return RelativePointer<PCODE>::GetValueAtPtr(dac_cast<TADDR>(pSlot));
+ return RelativePointer<PCODE>::GetValueAtPtr(pSlot);
}
- return *pSlot;
+ return *dac_cast<PTR_PCODE>(pSlot);
}
// Special-case for when we know that the slot number corresponds
@@ -1562,10 +1567,11 @@ public:
DWORD index = GetIndexOfVtableIndirection(slotNum);
TADDR base = dac_cast<TADDR>(&(GetVtableIndirections()[index]));
- return *(VTableIndir_t::GetValueMaybeNullAtPtr(base) + GetIndexAfterVtableIndirection(slotNum));
+ DPTR(VTableIndir2_t) baseAfterInd = VTableIndir_t::GetValueMaybeNullAtPtr(base) + GetIndexAfterVtableIndirection(slotNum);
+ return VTableIndir2_t::GetValueMaybeNullAtPtr(dac_cast<TADDR>(baseAfterInd));
}
- PTR_PCODE GetSlotPtrRaw(UINT32 slotNum)
+ TADDR GetSlotPtrRaw(UINT32 slotNum)
{
WRAPPER_NO_CONTRACT;
STATIC_CONTRACT_SO_TOLERANT;
@@ -1576,25 +1582,26 @@ public:
// Virtual slots live in chunks pointed to by vtable indirections
DWORD index = GetIndexOfVtableIndirection(slotNum);
TADDR base = dac_cast<TADDR>(&(GetVtableIndirections()[index]));
- return VTableIndir_t::GetValueMaybeNullAtPtr(base) + GetIndexAfterVtableIndirection(slotNum);
+ DPTR(VTableIndir2_t) baseAfterInd = VTableIndir_t::GetValueMaybeNullAtPtr(base) + GetIndexAfterVtableIndirection(slotNum);
+ return dac_cast<TADDR>(baseAfterInd);
}
else if (HasSingleNonVirtualSlot())
{
// Non-virtual slots < GetNumVtableSlots live in a single chunk pointed to by an optional member,
// except when there is only one in which case it lives in the optional member itself
_ASSERTE(slotNum == GetNumVirtuals());
- return dac_cast<PTR_PCODE>(GetNonVirtualSlotsPtr());
+ return GetNonVirtualSlotsPtr();
}
else
{
// Non-virtual slots < GetNumVtableSlots live in a single chunk pointed to by an optional member
_ASSERTE(HasNonVirtualSlotsArray());
g_IBCLogger.LogMethodTableNonVirtualSlotsAccess(this);
- return GetNonVirtualSlotsArray() + (slotNum - GetNumVirtuals());
+ return dac_cast<TADDR>(GetNonVirtualSlotsArray() + (slotNum - GetNumVirtuals()));
}
}
- PTR_PCODE GetSlotPtr(UINT32 slotNum)
+ TADDR GetSlotPtr(UINT32 slotNum)
{
WRAPPER_NO_CONTRACT;
STATIC_CONTRACT_SO_TOLERANT;
@@ -1660,10 +1667,12 @@ public:
#define VTABLE_SLOTS_PER_CHUNK 8
#define VTABLE_SLOTS_PER_CHUNK_LOG2 3
-#if defined(PLATFORM_UNIX) && defined(_TARGET_ARM_)
- typedef RelativePointer<PTR_PCODE> VTableIndir_t;
+#if defined(FEATURE_NGEN_RELOCS_OPTIMIZATIONS)
+ typedef RelativePointer<PCODE> VTableIndir2_t;
+ typedef RelativePointer<DPTR(VTableIndir2_t)> VTableIndir_t;
#else
- typedef PlainPointer<PTR_PCODE> VTableIndir_t;
+ typedef PlainPointer<PCODE> VTableIndir2_t;
+ typedef PlainPointer<DPTR(VTableIndir2_t)> VTableIndir_t;
#endif
static DWORD GetIndexOfVtableIndirection(DWORD slotNum);
@@ -1692,10 +1701,10 @@ public:
BOOL Finished();
DWORD GetIndex();
DWORD GetOffsetFromMethodTable();
- PTR_PCODE GetIndirectionSlot();
+ DPTR(VTableIndir2_t) GetIndirectionSlot();
#ifndef DACCESS_COMPILE
- void SetIndirectionSlot(PTR_PCODE pChunk);
+ void SetIndirectionSlot(DPTR(VTableIndir2_t) pChunk);
#endif
DWORD GetStartSlot();
@@ -2173,7 +2182,7 @@ public:
// THE METHOD TABLE PARENT (SUPERCLASS/BASE CLASS)
//
-#if defined(PLATFORM_UNIX) && defined(_TARGET_ARM_)
+#if defined(FEATURE_NGEN_RELOCS_OPTIMIZATIONS)
#define PARENT_MT_FIXUP_OFFSET (-FIXUP_POINTER_INDIRECTION)
typedef RelativeFixupPointer<PTR_MethodTable> ParentMT_t;
#else
@@ -2205,7 +2214,7 @@ public:
inline static PTR_VOID GetParentMethodTableOrIndirection(PTR_VOID pMT)
{
WRAPPER_NO_CONTRACT;
-#if defined(PLATFORM_UNIX) && defined(_TARGET_ARM_)
+#if defined(FEATURE_NGEN_RELOCS_OPTIMIZATIONS)
PTR_MethodTable pMethodTable = dac_cast<PTR_MethodTable>(pMT);
PTR_MethodTable pParentMT = ReadPointerMaybeNull((MethodTable*) pMethodTable, &MethodTable::m_pParentMethodTable);
return dac_cast<PTR_VOID>(pParentMT);
@@ -3111,7 +3120,7 @@ public:
// must have a dictionary entry. On the other hand, for instantiations shared with Dict<string,double> the opposite holds.
//
-#if defined(PLATFORM_UNIX) && defined(_TARGET_ARM_)
+#if defined(FEATURE_NGEN_RELOCS_OPTIMIZATIONS)
typedef RelativePointer<PTR_Dictionary> PerInstInfoElem_t;
typedef RelativePointer<DPTR(PerInstInfoElem_t)> PerInstInfo_t;
#else
@@ -4182,7 +4191,7 @@ private:
RelativePointer<PTR_Module> m_pLoaderModule; // LoaderModule. It is equal to the ZapModule in ngened images
-#if defined(PLATFORM_UNIX) && defined(_TARGET_ARM_)
+#if defined(FEATURE_NGEN_RELOCS_OPTIMIZATIONS)
RelativePointer<PTR_MethodTableWriteableData> m_pWriteableData;
#else
PlainPointer<PTR_MethodTableWriteableData> m_pWriteableData;
@@ -4198,7 +4207,7 @@ private:
static const TADDR UNION_MASK = 3;
union {
-#if defined(PLATFORM_UNIX) && defined(_TARGET_ARM_)
+#if defined(FEATURE_NGEN_RELOCS_OPTIMIZATIONS)
RelativePointer<DPTR(EEClass)> m_pEEClass;
RelativePointer<TADDR> m_pCanonMT;
#else
@@ -4233,7 +4242,7 @@ private:
public:
union
{
-#if defined(PLATFORM_UNIX) && defined(_TARGET_ARM_)
+#if defined(FEATURE_NGEN_RELOCS_OPTIMIZATIONS)
RelativePointer<PTR_InterfaceInfo> m_pInterfaceMap;
#else
PlainPointer<PTR_InterfaceInfo> m_pInterfaceMap;
diff --git a/src/vm/methodtable.inl b/src/vm/methodtable.inl
index 4fa81c931b..9e5df0262c 100644
--- a/src/vm/methodtable.inl
+++ b/src/vm/methodtable.inl
@@ -955,7 +955,7 @@ inline DWORD MethodTable::VtableIndirectionSlotIterator::GetOffsetFromMethodTabl
}
//==========================================================================================
-inline PTR_PCODE MethodTable::VtableIndirectionSlotIterator::GetIndirectionSlot()
+inline DPTR(MethodTable::VTableIndir2_t) MethodTable::VtableIndirectionSlotIterator::GetIndirectionSlot()
{
LIMITED_METHOD_DAC_CONTRACT;
PRECONDITION(m_i != (DWORD) -1 && m_i < m_count);
@@ -965,7 +965,7 @@ inline PTR_PCODE MethodTable::VtableIndirectionSlotIterator::GetIndirectionSlot(
//==========================================================================================
#ifndef DACCESS_COMPILE
-inline void MethodTable::VtableIndirectionSlotIterator::SetIndirectionSlot(PTR_PCODE pChunk)
+inline void MethodTable::VtableIndirectionSlotIterator::SetIndirectionSlot(DPTR(MethodTable::VTableIndir2_t) pChunk)
{
LIMITED_METHOD_CONTRACT;
m_pSlot->SetValueMaybeNull(pChunk);
diff --git a/src/vm/methodtablebuilder.cpp b/src/vm/methodtablebuilder.cpp
index c57677b316..37ca940499 100644
--- a/src/vm/methodtablebuilder.cpp
+++ b/src/vm/methodtablebuilder.cpp
@@ -9987,7 +9987,7 @@ MethodTable * MethodTableBuilder::AllocateNewMT(Module *pLoaderModule,
else
{
// Use the locally allocated chunk
- it.SetIndirectionSlot((PTR_PCODE)(pData+dwCurrentUnsharedSlotOffset));
+ it.SetIndirectionSlot((MethodTable::VTableIndir2_t *)(pData+dwCurrentUnsharedSlotOffset));
dwCurrentUnsharedSlotOffset += it.GetSize();
}
}
@@ -10553,7 +10553,7 @@ MethodTableBuilder::SetupMethodTable2(
if (pMD->HasNonVtableSlot())
{
- *pMD->GetAddrOfSlot() = addr;
+ *((PCODE *)pMD->GetAddrOfSlot()) = addr;
}
else
{
diff --git a/src/zap/zapimage.cpp b/src/zap/zapimage.cpp
index 07e512c49b..3ad696720a 100644
--- a/src/zap/zapimage.cpp
+++ b/src/zap/zapimage.cpp
@@ -572,7 +572,8 @@ void ZapImage::AllocateVirtualSections()
#endif // defined(WIN64EXCEPTIONS)
m_pPreloadSections[CORCOMPILE_SECTION_READONLY_WARM] = NewVirtualSection(pTextSection, IBCProfiledSection | WarmRange | ReadonlySection, sizeof(TADDR));
- m_pPreloadSections[CORCOMPILE_SECTION_READONLY_VCHUNKS_AND_DICTIONARY] = NewVirtualSection(pTextSection, IBCProfiledSection | WarmRange | ReadonlySection, sizeof(TADDR));
+ m_pPreloadSections[CORCOMPILE_SECTION_READONLY_VCHUNKS] = NewVirtualSection(pTextSection, IBCProfiledSection | WarmRange | ReadonlySection, sizeof(TADDR));
+ m_pPreloadSections[CORCOMPILE_SECTION_READONLY_DICTIONARY] = NewVirtualSection(pTextSection, IBCProfiledSection | WarmRange | ReadonlySection, sizeof(TADDR));
//
// GC Info for methods which were not touched in profiling