summaryrefslogtreecommitdiff
path: root/src/vm/i386
diff options
context:
space:
mode:
authorKoundinya Veluri <kouvel@users.noreply.github.com>2018-03-15 10:05:42 -0700
committerGitHub <noreply@github.com>2018-03-15 10:05:42 -0700
commit1c5d0281719f2aad04e6738b99c845b4f95c214a (patch)
tree66fdb672bfab2618abd670d5744e1ec3a32bedd9 /src/vm/i386
parent65d0df04b37ec2679e087d813597cc524b2465c7 (diff)
downloadcoreclr-1c5d0281719f2aad04e6738b99c845b4f95c214a.tar.gz
coreclr-1c5d0281719f2aad04e6738b99c845b4f95c214a.tar.bz2
coreclr-1c5d0281719f2aad04e6738b99c845b4f95c214a.zip
Fix to not reuse preallocated jump stubs for dynamic methods (#16941)
Fix to not reuse preallocated jump stubs for dynamic methods Fixes https://github.com/dotnet/coreclr/issues/16940 - Allocate an extra jump stub per temporary entry points chunk that is shared by all precodes in the chunk. This jump stub always points to PrecodeFixupThunk. - Use that for PrecodeFixupThunk, and use the precode-associated jump stub for pointing to the jitted code - Considered allocating the extra jump stub only if coreclr is far away, but it involves reallocation which may be common in some environments/scenarios. Figured 12 extra bytes per dynamic type is not too significant.
Diffstat (limited to 'src/vm/i386')
-rw-r--r--src/vm/i386/stublinkerx86.cpp51
-rw-r--r--src/vm/i386/stublinkerx86.h1
2 files changed, 44 insertions, 8 deletions
diff --git a/src/vm/i386/stublinkerx86.cpp b/src/vm/i386/stublinkerx86.cpp
index ff3ec48d60..0b63e68942 100644
--- a/src/vm/i386/stublinkerx86.cpp
+++ b/src/vm/i386/stublinkerx86.cpp
@@ -6454,16 +6454,43 @@ TADDR FixupPrecode::GetMethodDesc()
#endif
#ifdef FIXUP_PRECODE_PREALLOCATE_DYNAMIC_METHOD_JUMP_STUBS
+PCODE FixupPrecode::GetDynamicMethodPrecodeFixupJumpStub()
+{
+ WRAPPER_NO_CONTRACT;
+ _ASSERTE(((PTR_MethodDesc)GetMethodDesc())->IsLCGMethod());
+
+ // The precode fixup jump stub is shared by all fixup precodes in a chunk, and immediately follows the MethodDesc. Jump
+ // stubs cannot be reused currently for the same method:
+ // - The jump stub's target would change separately from the precode being updated from "call Func" to "jmp Func", both
+ // changes would have to be done atomically with runtime suspension, which is not done currently
+ // - When changing the entry point from one version of jitted code to another, the jump stub's target pointer is not
+ // aligned to 8 bytes in order to be able to do an interlocked update of the target address
+ // So, when initially the precode intends to be of the form "call PrecodeFixupThunk", if the target address happens to be
+ // too far for a relative 32-bit jump, it will use the shared precode fixup jump stub. When changing the entry point to
+ // jitted code, the jump stub associated with the precode is patched, and the precode is updated to use that jump stub.
+ //
+ // Notes:
+ // - Dynamic method descs, and hence their precodes and preallocated jump stubs, may be reused for a different method
+ // (along with reinitializing the precode), but only with a transition where the original method is no longer accessible
+ // to user code
+ // - Concurrent calls to a dynamic method that has not yet been jitted may trigger multiple writes to the jump stub
+ // associated with the precode, but only to the same target address (and while the precode is still pointing to
+ // PrecodeFixupThunk)
+ return GetBase() + sizeof(PTR_MethodDesc);
+}
+
PCODE FixupPrecode::GetDynamicMethodEntryJumpStub()
{
+ WRAPPER_NO_CONTRACT;
_ASSERTE(((PTR_MethodDesc)GetMethodDesc())->IsLCGMethod());
// m_PrecodeChunkIndex has a value inverted to the order of precodes in memory (the precode at the lowest address has the
// highest index, and the precode at the highest address has the lowest index). To map a precode to its jump stub by memory
- // order, invert the precode index to get the jump stub index.
+ // order, invert the precode index to get the jump stub index. Also skip the precode fixup jump stub (see
+ // GetDynamicMethodPrecodeFixupJumpStub()).
UINT32 count = ((PTR_MethodDesc)GetMethodDesc())->GetMethodDescChunk()->GetCount();
_ASSERTE(m_PrecodeChunkIndex < count);
- SIZE_T jumpStubIndex = count - 1 - m_PrecodeChunkIndex;
+ SIZE_T jumpStubIndex = count - m_PrecodeChunkIndex;
return GetBase() + sizeof(PTR_MethodDesc) + jumpStubIndex * BACK_TO_BACK_JUMP_ALLOCATE_SIZE;
}
@@ -6607,7 +6634,7 @@ void FixupPrecode::Init(MethodDesc* pMD, LoaderAllocator *pLoaderAllocator, int
#ifdef FIXUP_PRECODE_PREALLOCATE_DYNAMIC_METHOD_JUMP_STUBS
if (pMD->IsLCGMethod())
{
- m_rel32 = rel32UsingPreallocatedJumpStub(&m_rel32, target, GetDynamicMethodEntryJumpStub());
+ m_rel32 = rel32UsingPreallocatedJumpStub(&m_rel32, target, GetDynamicMethodPrecodeFixupJumpStub(), false /* emitJump */);
return;
}
#endif // FIXUP_PRECODE_PREALLOCATE_DYNAMIC_METHOD_JUMP_STUBS
@@ -6632,12 +6659,15 @@ void FixupPrecode::ResetTargetInterlocked()
PCODE target = (PCODE)GetEEFuncEntryPoint(PrecodeFixupThunk);
MethodDesc* pMD = (MethodDesc*)GetMethodDesc();
- newValue.m_rel32 =
#ifdef FIXUP_PRECODE_PREALLOCATE_DYNAMIC_METHOD_JUMP_STUBS
- pMD->IsLCGMethod() ?
- rel32UsingPreallocatedJumpStub(&m_rel32, target, GetDynamicMethodEntryJumpStub()) :
+ // The entry point of LCG methods cannot revert back to the original entry point, as their jump stubs would have to be
+ // reused, which is currently not supported. This method is intended for resetting the entry point while the method is
+ // callable, which implies that the entry point may later be changed again to something else. Currently, this is not done
+ // for LCG methods. See GetDynamicMethodPrecodeFixupJumpStub() for more.
+ _ASSERTE(!pMD->IsLCGMethod());
#endif // FIXUP_PRECODE_PREALLOCATE_DYNAMIC_METHOD_JUMP_STUBS
- rel32UsingJumpStub(&m_rel32, target, pMD);
+
+ newValue.m_rel32 = rel32UsingJumpStub(&m_rel32, target, pMD);
_ASSERTE(IS_ALIGNED(this, sizeof(INT64)));
EnsureWritableExecutablePages(this, sizeof(INT64));
@@ -6659,6 +6689,11 @@ BOOL FixupPrecode::SetTargetInterlocked(TADDR target, TADDR expected)
MethodDesc * pMD = (MethodDesc*)GetMethodDesc();
g_IBCLogger.LogMethodPrecodeWriteAccess(pMD);
+#ifdef FIXUP_PRECODE_PREALLOCATE_DYNAMIC_METHOD_JUMP_STUBS
+ // A different jump stub is used for this case, see Init(). This call is unexpected for resetting the entry point.
+ _ASSERTE(!pMD->IsLCGMethod() || target != (TADDR)GetEEFuncEntryPoint(PrecodeFixupThunk));
+#endif // FIXUP_PRECODE_PREALLOCATE_DYNAMIC_METHOD_JUMP_STUBS
+
INT64 newValue = oldValue;
BYTE* pNewValue = (BYTE*)&newValue;
@@ -6687,7 +6722,7 @@ BOOL FixupPrecode::SetTargetInterlocked(TADDR target, TADDR expected)
*(INT32*)(&pNewValue[offsetof(FixupPrecode, m_rel32)]) =
#ifdef FIXUP_PRECODE_PREALLOCATE_DYNAMIC_METHOD_JUMP_STUBS
pMD->IsLCGMethod() ?
- rel32UsingPreallocatedJumpStub(&m_rel32, target, GetDynamicMethodEntryJumpStub()) :
+ rel32UsingPreallocatedJumpStub(&m_rel32, target, GetDynamicMethodEntryJumpStub(), true /* emitJump */) :
#endif // FIXUP_PRECODE_PREALLOCATE_DYNAMIC_METHOD_JUMP_STUBS
rel32UsingJumpStub(&m_rel32, target, pMD);
diff --git a/src/vm/i386/stublinkerx86.h b/src/vm/i386/stublinkerx86.h
index 229ab0767b..c5378422a1 100644
--- a/src/vm/i386/stublinkerx86.h
+++ b/src/vm/i386/stublinkerx86.h
@@ -720,6 +720,7 @@ struct FixupPrecode {
#endif // HAS_FIXUP_PRECODE_CHUNKS
#ifdef FIXUP_PRECODE_PREALLOCATE_DYNAMIC_METHOD_JUMP_STUBS
+ PCODE GetDynamicMethodPrecodeFixupJumpStub();
PCODE GetDynamicMethodEntryJumpStub();
#endif // FIXUP_PRECODE_PREALLOCATE_DYNAMIC_METHOD_JUMP_STUBS