summaryrefslogtreecommitdiff
path: root/src/vm/prestub.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/vm/prestub.cpp')
-rw-r--r--src/vm/prestub.cpp191
1 files changed, 136 insertions, 55 deletions
diff --git a/src/vm/prestub.cpp b/src/vm/prestub.cpp
index 29f09ab109..5c08becafa 100644
--- a/src/vm/prestub.cpp
+++ b/src/vm/prestub.cpp
@@ -46,9 +46,8 @@
#include "perfmap.h"
#endif
-#ifdef FEATURE_TIERED_COMPILATION
#include "callcounter.h"
-#endif
+#include "methoddescbackpatchinfo.h"
#if defined(FEATURE_GDBJIT)
#include "gdbjit.h"
@@ -83,58 +82,116 @@ PCODE MethodDesc::DoBackpatch(MethodTable * pMT, MethodTable *pDispatchingMT, BO
{
STANDARD_VM_CHECK;
PRECONDITION(!ContainsGenericVariables());
- PRECONDITION(HasStableEntryPoint());
PRECONDITION(pMT == GetMethodTable());
}
CONTRACTL_END;
- PCODE pTarget = GetStableEntryPoint();
- if (!HasTemporaryEntryPoint())
- return pTarget;
+ bool isVersionableWithVtableSlotBackpatch = IsVersionableWithVtableSlotBackpatch();
+ LoaderAllocator *mdLoaderAllocator = isVersionableWithVtableSlotBackpatch ? GetLoaderAllocator() : nullptr;
+
+ // Only take the lock if the method is versionable with vtable slot backpatch, for recording slots and synchronizing with
+ // backpatching slots
+ MethodDescBackpatchInfoTracker::ConditionalLockHolder lockHolder(isVersionableWithVtableSlotBackpatch);
+
+ // Get the method entry point inside the lock above to synchronize with backpatching in
+ // MethodDesc::BackpatchEntryPointSlots()
+ PCODE pTarget = GetMethodEntryPoint();
+
+ PCODE pExpected;
+ if (isVersionableWithVtableSlotBackpatch)
+ {
+ _ASSERTE(pTarget == GetEntryPointToBackpatch_Locked());
- PCODE pExpected = GetTemporaryEntryPoint();
+ pExpected = GetTemporaryEntryPoint();
+ if (pExpected == pTarget)
+ return pTarget;
- if (pExpected == pTarget)
- return pTarget;
+ // True interface methods are never backpatched and are not versionable with vtable slot backpatch
+ _ASSERTE(!(pMT->IsInterface() && !IsStatic()));
- // True interface methods are never backpatched
- if (pMT->IsInterface() && !IsStatic())
- return pTarget;
+ // Backpatching the funcptr stub:
+ // For methods versionable with vtable slot backpatch, a funcptr stub is guaranteed to point to the at-the-time
+ // current entry point shortly after creation, and backpatching it further is taken care of by
+ // MethodDesc::BackpatchEntryPointSlots()
- if (fFullBackPatch)
+ // Backpatching the temporary entry point:
+ // The temporary entry point is never backpatched for methods versionable with vtable slot backpatch. New vtable
+ // slots inheriting the method will initially point to the temporary entry point and it must point to the prestub
+ // and come here for backpatching such that the new vtable slot can be discovered and recorded for future
+ // backpatching.
+
+ _ASSERTE(!HasNonVtableSlot());
+ }
+ else
{
- FuncPtrStubs * pFuncPtrStubs = GetLoaderAllocator()->GetFuncPtrStubsNoCreate();
- if (pFuncPtrStubs != NULL)
+ _ASSERTE(pTarget == GetStableEntryPoint());
+
+ if (!HasTemporaryEntryPoint())
+ return pTarget;
+
+ pExpected = GetTemporaryEntryPoint();
+ if (pExpected == pTarget)
+ return pTarget;
+
+ // True interface methods are never backpatched
+ if (pMT->IsInterface() && !IsStatic())
+ return pTarget;
+
+ if (fFullBackPatch)
{
- Precode* pFuncPtrPrecode = pFuncPtrStubs->Lookup(this);
- if (pFuncPtrPrecode != NULL)
+ FuncPtrStubs * pFuncPtrStubs = GetLoaderAllocator()->GetFuncPtrStubsNoCreate();
+ if (pFuncPtrStubs != NULL)
{
- // If there is a funcptr precode to patch, we are done for this round.
- if (pFuncPtrPrecode->SetTargetInterlocked(pTarget))
- return pTarget;
+ Precode* pFuncPtrPrecode = pFuncPtrStubs->Lookup(this);
+ if (pFuncPtrPrecode != NULL)
+ {
+ // If there is a funcptr precode to patch, we are done for this round.
+ if (pFuncPtrPrecode->SetTargetInterlocked(pTarget))
+ return pTarget;
+ }
}
- }
#ifndef HAS_COMPACT_ENTRYPOINTS
- // Patch the fake entrypoint if necessary
- Precode::GetPrecodeFromEntryPoint(pExpected)->SetTargetInterlocked(pTarget);
+ // Patch the fake entrypoint if necessary
+ Precode::GetPrecodeFromEntryPoint(pExpected)->SetTargetInterlocked(pTarget);
#endif // HAS_COMPACT_ENTRYPOINTS
+ }
+
+ if (HasNonVtableSlot())
+ return pTarget;
}
- if (HasNonVtableSlot())
- return pTarget;
+ auto RecordAndBackpatchSlot = [&](MethodTable *patchedMT, DWORD slotIndex)
+ {
+ WRAPPER_NO_CONTRACT;
+ _ASSERTE(isVersionableWithVtableSlotBackpatch);
+
+ RecordAndBackpatchEntryPointSlot_Locked(
+ mdLoaderAllocator,
+ patchedMT->GetLoaderAllocator(),
+ patchedMT->GetSlotPtr(slotIndex),
+ EntryPointSlots::SlotType_Vtable,
+ pTarget);
+ };
BOOL fBackpatched = FALSE;
-#define BACKPATCH(pPatchedMT) \
- do \
- { \
- if (pPatchedMT->GetSlot(dwSlot) == pExpected) \
- { \
- pPatchedMT->SetSlot(dwSlot, pTarget); \
- fBackpatched = TRUE; \
- } \
- } \
+#define BACKPATCH(pPatchedMT) \
+ do \
+ { \
+ if (pPatchedMT->GetSlot(dwSlot) == pExpected) \
+ { \
+ if (isVersionableWithVtableSlotBackpatch) \
+ { \
+ RecordAndBackpatchSlot(pPatchedMT, dwSlot); \
+ } \
+ else \
+ { \
+ pPatchedMT->SetSlot(dwSlot, pTarget); \
+ } \
+ fBackpatched = TRUE; \
+ } \
+ } \
while(0)
// The owning slot has been updated already, so there is no need to backpatch it
@@ -154,8 +211,10 @@ PCODE MethodDesc::DoBackpatch(MethodTable * pMT, MethodTable *pDispatchingMT, BO
// that it returns the stable entrypoint eventually to avoid going through the slow path all the time.
//
MethodTable * pRestoredSlotMT = pDispatchingMT->GetRestoredSlotMT(dwSlot);
-
- BACKPATCH(pRestoredSlotMT);
+ if (pRestoredSlotMT != pDispatchingMT)
+ {
+ BACKPATCH(pRestoredSlotMT);
+ }
}
}
@@ -168,7 +227,7 @@ PCODE MethodDesc::DoBackpatch(MethodTable * pMT, MethodTable *pDispatchingMT, BO
BACKPATCH(pMT);
- if (pDispatchingMT != NULL)
+ if (pDispatchingMT != NULL && pDispatchingMT != pMT)
{
BACKPATCH(pDispatchingMT);
}
@@ -185,7 +244,7 @@ PCODE MethodDesc::DoBackpatch(MethodTable * pMT, MethodTable *pDispatchingMT, BO
{
BACKPATCH(pMT);
- if (pDispatchingMT != NULL)
+ if (pDispatchingMT != NULL && pDispatchingMT != pMT)
{
BACKPATCH(pDispatchingMT);
}
@@ -1752,8 +1811,10 @@ PCODE MethodDesc::DoPrestub(MethodTable *pDispatchingMT)
/*************************** VERSIONABLE CODE *********************/
BOOL fIsPointingToPrestub = IsPointingToPrestub();
+ bool fIsVersionableWithoutJumpStamp = false;
#ifdef FEATURE_CODE_VERSIONING
- if (IsVersionableWithPrecode() ||
+ fIsVersionableWithoutJumpStamp = IsVersionableWithoutJumpStamp();
+ if (fIsVersionableWithoutJumpStamp ||
(!fIsPointingToPrestub && IsVersionableWithJumpStamp()))
{
pCode = GetCodeVersionManager()->PublishVersionableCodeIfNecessary(this, fCanBackpatchPrestub);
@@ -1803,7 +1864,7 @@ PCODE MethodDesc::DoPrestub(MethodTable *pDispatchingMT)
#endif // defined(FEATURE_SHARE_GENERIC_CODE)
else if (IsIL() || IsNoMetadata())
{
- if (!IsNativeCodeStableAfterInit())
+ if (!IsNativeCodeStableAfterInit() && (!fIsVersionableWithoutJumpStamp || IsVersionableWithPrecode()))
{
GetOrCreatePrecode();
}
@@ -1874,13 +1935,14 @@ PCODE MethodDesc::DoPrestub(MethodTable *pDispatchingMT)
if (pCode != NULL)
{
- if (HasPrecode())
- GetPrecode()->SetTargetInterlocked(pCode);
- else
- if (!HasStableEntryPoint())
- {
- SetStableEntryPointInterlocked(pCode);
- }
+ if (fIsVersionableWithoutJumpStamp)
+ {
+ // Methods versionable without a jump stamp should not get here unless there was a failure. There may have been a
+ // failure to update the code versions above for some reason. Don't backpatch this time and try again next time.
+ return pCode;
+ }
+
+ SetCodeEntryPoint(pCode);
}
else
{
@@ -1888,8 +1950,7 @@ PCODE MethodDesc::DoPrestub(MethodTable *pDispatchingMT)
{
pStub->DecRef();
}
- else
- if (pStub->HasExternalEntryPoint())
+ else if (pStub->HasExternalEntryPoint())
{
// If the Stub wraps code that is outside of the Stub allocation, then we
// need to free the Stub allocation now.
@@ -2319,11 +2380,20 @@ EXTERN_C PCODE STDCALL ExternalMethodFixupWorker(TransitionBlock * pTransitionBl
pCode = pMD->GetMethodEntryPoint();
//
- // Note that we do not want to call code:MethodDesc::IsPointingToPrestub() here. It does not take remoting interception
- // into account and so it would cause otherwise intercepted methods to be JITed. It is a compat issue if the JITing fails.
+ // Note that we do not want to call code:MethodDesc::IsPointingToPrestub() here. It does not take remoting
+ // interception into account and so it would cause otherwise intercepted methods to be JITed. It is a compat
+ // issue if the JITing fails.
//
if (!DoesSlotCallPrestub(pCode))
{
+ if (pMD->IsVersionableWithVtableSlotBackpatch())
+ {
+ // The entry point for this method needs to be versionable, so use a FuncPtrStub similarly to what is done
+ // in MethodDesc::GetMultiCallableAddrOfCode()
+ GCX_COOP();
+ pCode = pMD->GetLoaderAllocator()->GetFuncPtrStubs()->GetFuncPtrStub(pMD);
+ }
+
pCode = PatchNonVirtualExternalMethod(pMD, pCode, pImportSection, pIndirection);
}
}
@@ -2380,10 +2450,21 @@ EXTERN_C PCODE VirtualMethodFixupWorker(Object * pThisPtr, CORCOMPILE_VIRTUAL_I
if (!DoesSlotCallPrestub(pCode))
{
- // Skip fixup precode jump for better perf
- PCODE pDirectTarget = Precode::TryToSkipFixupPrecode(pCode);
- if (pDirectTarget != NULL)
- pCode = pDirectTarget;
+ MethodDesc *pMD = MethodTable::GetMethodDescForSlotAddress(pCode);
+ if (pMD->IsVersionableWithVtableSlotBackpatch())
+ {
+ // The entry point for this method needs to be versionable, so use a FuncPtrStub similarly to what is done in
+ // MethodDesc::GetMultiCallableAddrOfCode()
+ GCX_COOP();
+ pCode = pMD->GetLoaderAllocator()->GetFuncPtrStubs()->GetFuncPtrStub(pMD);
+ }
+ else
+ {
+ // Skip fixup precode jump for better perf
+ PCODE pDirectTarget = Precode::TryToSkipFixupPrecode(pCode);
+ if (pDirectTarget != NULL)
+ pCode = pDirectTarget;
+ }
// Patch the thunk to the actual method body
if (EnsureWritableExecutablePagesNoThrow(&pThunk->m_pTarget, sizeof(pThunk->m_pTarget)))