summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/debug/ee/debugger.cpp9
-rw-r--r--src/vm/methoddescbackpatchinfo.cpp27
-rw-r--r--src/vm/methoddescbackpatchinfo.h44
-rw-r--r--src/vm/tieredcompilation.cpp12
4 files changed, 89 insertions, 3 deletions
diff --git a/src/debug/ee/debugger.cpp b/src/debug/ee/debugger.cpp
index 82af1af568..909589877f 100644
--- a/src/debug/ee/debugger.cpp
+++ b/src/debug/ee/debugger.cpp
@@ -15313,6 +15313,15 @@ HRESULT Debugger::FuncEvalSetup(DebuggerIPCE_FuncEvalInfo *pEvalInfo,
return CORDBG_E_FUNC_EVAL_BAD_START_POINT;
}
+ if (MethodDescBackpatchInfoTracker::IsLockedByAnyThread())
+ {
+ // A thread may have suspended for the debugger while holding the slot backpatching lock while trying to enter
+ // cooperative GC mode. If the FuncEval calls a method that is eligible for slot backpatching (virtual or interface
+ // methods that are eligible for tiering), the FuncEval may deadlock on trying to acquire the same lock. Fail the
+ // FuncEval to avoid the issue.
+ return CORDBG_E_FUNC_EVAL_BAD_START_POINT;
+ }
+
// Create a DebuggerEval to hold info about this eval while its in progress. Constructor copies the thread's
// CONTEXT.
DebuggerEval *pDE = new (interopsafe, nothrow) DebuggerEval(filterContext, pEvalInfo, fInException);
diff --git a/src/vm/methoddescbackpatchinfo.cpp b/src/vm/methoddescbackpatchinfo.cpp
index 571007cdfc..3734b52763 100644
--- a/src/vm/methoddescbackpatchinfo.cpp
+++ b/src/vm/methoddescbackpatchinfo.cpp
@@ -66,6 +66,7 @@ void EntryPointSlots::Backpatch_Locked(TADDR slot, SlotType slotType, PCODE entr
// MethodDescBackpatchInfoTracker
CrstStatic MethodDescBackpatchInfoTracker::s_lock;
+bool MethodDescBackpatchInfoTracker::s_isLocked = false;
#ifndef DACCESS_COMPILE
@@ -140,4 +141,30 @@ bool MethodDescBackpatchInfoTracker::MayHaveEntryPointSlotsToBackpatch(PTR_Metho
#endif // _DEBUG
+#ifndef DACCESS_COMPILE
+void MethodDescBackpatchInfoTracker::PollForDebuggerSuspension()
+{
+ CONTRACTL
+ {
+ NOTHROW;
+ GC_TRIGGERS;
+ MODE_PREEMPTIVE;
+ }
+ CONTRACTL_END;
+
+ _ASSERTE(!IsLockedByCurrentThread());
+
+ // If suspension is pending for the debugger, pulse the GC mode to suspend the thread here. Following this call, typically
+ // the lock is acquired and the GC mode is changed, and suspending there would cause FuncEvals to fail (see
+ // Debugger::FuncEvalSetup() at the reference to IsLockOwnedByAnyThread()). Since this thread is in preemptive mode, the
+ // debugger may think it's already suspended and it would be unfortunate to suspend the thread with the lock held.
+ Thread *thread = GetThread();
+ _ASSERTE(thread != nullptr);
+ if (thread->HasThreadState(Thread::TS_DebugSuspendPending))
+ {
+ GCX_COOP();
+ }
+}
+#endif
+
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
diff --git a/src/vm/methoddescbackpatchinfo.h b/src/vm/methoddescbackpatchinfo.h
index c5d92a29da..bc9d6f895b 100644
--- a/src/vm/methoddescbackpatchinfo.h
+++ b/src/vm/methoddescbackpatchinfo.h
@@ -66,6 +66,7 @@ class MethodDescBackpatchInfoTracker
{
private:
static CrstStatic s_lock;
+ static bool s_isLocked;
class BackpatchInfoTrackerHashTraits : public NoRemoveDefaultCrossLoaderAllocatorHashTraits<MethodDesc *, UINT_PTR>
{
@@ -93,9 +94,23 @@ public:
static bool IsLockedByCurrentThread();
#endif
+#ifndef DACCESS_COMPILE
+public:
+ static bool IsLockedByAnyThread()
+ {
+ LIMITED_METHOD_CONTRACT;
+ return VolatileLoadWithoutBarrier(&s_isLocked);
+ }
+
+ static void PollForDebuggerSuspension();
+#endif
+
public:
class ConditionalLockHolder : CrstHolderWithState
{
+ private:
+ bool m_isLocked;
+
public:
ConditionalLockHolder(bool acquireLock = true)
: CrstHolderWithState(
@@ -104,9 +119,34 @@ public:
#else
nullptr
#endif
- )
+ ),
+ m_isLocked(false)
+ {
+ WRAPPER_NO_CONTRACT;
+
+ #ifndef DACCESS_COMPILE
+ if (acquireLock)
+ {
+ _ASSERTE(IsLockedByCurrentThread());
+ _ASSERTE(!s_isLocked);
+ m_isLocked = true;
+ s_isLocked = true;
+ }
+ #endif
+ }
+
+ ~ConditionalLockHolder()
{
- LIMITED_METHOD_CONTRACT;
+ WRAPPER_NO_CONTRACT;
+
+ #ifndef DACCESS_COMPILE
+ if (m_isLocked)
+ {
+ _ASSERTE(IsLockedByCurrentThread());
+ _ASSERTE(s_isLocked);
+ s_isLocked = false;
+ }
+ #endif
}
};
diff --git a/src/vm/tieredcompilation.cpp b/src/vm/tieredcompilation.cpp
index 8cdfc4815b..6515188701 100644
--- a/src/vm/tieredcompilation.cpp
+++ b/src/vm/tieredcompilation.cpp
@@ -522,7 +522,13 @@ void TieredCompilationManager::ResumeCountingCalls(MethodDesc* pMethodDesc)
{
WRAPPER_NO_CONTRACT;
_ASSERTE(pMethodDesc != nullptr);
- MethodDescBackpatchInfoTracker::ConditionalLockHolder lockHolder(pMethodDesc->MayHaveEntryPointSlotsToBackpatch());
+
+ bool mayHaveEntryPointSlotsToBackpatch = pMethodDesc->MayHaveEntryPointSlotsToBackpatch();
+ if (mayHaveEntryPointSlotsToBackpatch)
+ {
+ MethodDescBackpatchInfoTracker::PollForDebuggerSuspension();
+ }
+ MethodDescBackpatchInfoTracker::ConditionalLockHolder lockHolder(mayHaveEntryPointSlotsToBackpatch);
EX_TRY
{
@@ -746,6 +752,10 @@ void TieredCompilationManager::ActivateCodeVersion(NativeCodeVersion nativeCodeV
ILCodeVersion ilParent;
HRESULT hr = S_OK;
bool mayHaveEntryPointSlotsToBackpatch = pMethod->MayHaveEntryPointSlotsToBackpatch();
+ if (mayHaveEntryPointSlotsToBackpatch)
+ {
+ MethodDescBackpatchInfoTracker::PollForDebuggerSuspension();
+ }
MethodDescBackpatchInfoTracker::ConditionalLockHolder lockHolder(mayHaveEntryPointSlotsToBackpatch);
{