summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorKoundinya Veluri <kouvel@users.noreply.github.com>2020-02-18 16:46:37 -0500
committerGitHub <noreply@github.com>2020-02-18 16:46:37 -0500
commitc5d3d752260383fbed72ba2b4d86d82fea673c76 (patch)
treeccf26422210e03ad5b777c688d7c1cafde875c5f
parentb4b4098794a58914e4a1d2897c59d1fe995d1a0d (diff)
downloadcoreclr-c5d3d752260383fbed72ba2b4d86d82fea673c76.tar.gz
coreclr-c5d3d752260383fbed72ba2b4d86d82fea673c76.tar.bz2
coreclr-c5d3d752260383fbed72ba2b4d86d82fea673c76.zip
[3.1] Fail FuncEval if slot backpatching lock is held by any thread (#28006)
- In many cases cooperative GC mode is entered after acquiring the slot backpatching lock and the thread may block for debugger suspension while holding the lock. A FuncEval may time out on entering the lock if for example it calls a virtual or interface method for the first time. Failing the FuncEval when the lock is held enables the debugger to fall back to other options for expression evaluation. - Also added polls for debugger suspension before acquiring the slot backpatching lock on background threads that often operate in preemptive GC mode. A common case is when the debugger breaks while the tiering delay timer is active, the timer ticks shortly afterwards (after debugger suspension completes) and if a thread pool thread is already available, the background thread would block while holding the lock. The poll checks for debugger suspension and pulses the GC mode to block before acquiring the lock. Risks: - The fix is only a heuristic and lessens the problem when it is detected that the lock is held by some thread. Since the lock is acquired in preemptive GC mode, it is still possible that after the check at the start of a FuncEval, another thread acquires the lock and the FuncEval may time out. The polling makes it less likely for the lock to be taken by background tiering work, for example if a FuncEval starts while rejitting a method. - The expression evaluation experience may be worse when it is detected that the lock is held, and may still happen from unfortunate timing - Low risk for the change itself Port of https://github.com/dotnet/runtime/pull/2380 Fix for https://github.com/dotnet/runtime/issues/1537
-rw-r--r--src/debug/ee/debugger.cpp9
-rw-r--r--src/vm/methoddescbackpatchinfo.cpp27
-rw-r--r--src/vm/methoddescbackpatchinfo.h44
-rw-r--r--src/vm/tieredcompilation.cpp12
4 files changed, 89 insertions, 3 deletions
diff --git a/src/debug/ee/debugger.cpp b/src/debug/ee/debugger.cpp
index 82af1af568..909589877f 100644
--- a/src/debug/ee/debugger.cpp
+++ b/src/debug/ee/debugger.cpp
@@ -15313,6 +15313,15 @@ HRESULT Debugger::FuncEvalSetup(DebuggerIPCE_FuncEvalInfo *pEvalInfo,
return CORDBG_E_FUNC_EVAL_BAD_START_POINT;
}
+ if (MethodDescBackpatchInfoTracker::IsLockedByAnyThread())
+ {
+ // A thread may have suspended for the debugger while holding the slot backpatching lock while trying to enter
+ // cooperative GC mode. If the FuncEval calls a method that is eligible for slot backpatching (virtual or interface
+ // methods that are eligible for tiering), the FuncEval may deadlock on trying to acquire the same lock. Fail the
+ // FuncEval to avoid the issue.
+ return CORDBG_E_FUNC_EVAL_BAD_START_POINT;
+ }
+
// Create a DebuggerEval to hold info about this eval while its in progress. Constructor copies the thread's
// CONTEXT.
DebuggerEval *pDE = new (interopsafe, nothrow) DebuggerEval(filterContext, pEvalInfo, fInException);
diff --git a/src/vm/methoddescbackpatchinfo.cpp b/src/vm/methoddescbackpatchinfo.cpp
index 571007cdfc..3734b52763 100644
--- a/src/vm/methoddescbackpatchinfo.cpp
+++ b/src/vm/methoddescbackpatchinfo.cpp
@@ -66,6 +66,7 @@ void EntryPointSlots::Backpatch_Locked(TADDR slot, SlotType slotType, PCODE entr
// MethodDescBackpatchInfoTracker
CrstStatic MethodDescBackpatchInfoTracker::s_lock;
+bool MethodDescBackpatchInfoTracker::s_isLocked = false;
#ifndef DACCESS_COMPILE
@@ -140,4 +141,30 @@ bool MethodDescBackpatchInfoTracker::MayHaveEntryPointSlotsToBackpatch(PTR_Metho
#endif // _DEBUG
+#ifndef DACCESS_COMPILE
+void MethodDescBackpatchInfoTracker::PollForDebuggerSuspension()
+{
+ CONTRACTL
+ {
+ NOTHROW;
+ GC_TRIGGERS;
+ MODE_PREEMPTIVE;
+ }
+ CONTRACTL_END;
+
+ _ASSERTE(!IsLockedByCurrentThread());
+
+ // If suspension is pending for the debugger, pulse the GC mode to suspend the thread here. Following this call, typically
+ // the lock is acquired and the GC mode is changed, and suspending there would cause FuncEvals to fail (see
+ // Debugger::FuncEvalSetup() at the reference to IsLockOwnedByAnyThread()). Since this thread is in preemptive mode, the
+ // debugger may think it's already suspended and it would be unfortunate to suspend the thread with the lock held.
+ Thread *thread = GetThread();
+ _ASSERTE(thread != nullptr);
+ if (thread->HasThreadState(Thread::TS_DebugSuspendPending))
+ {
+ GCX_COOP();
+ }
+}
+#endif
+
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
diff --git a/src/vm/methoddescbackpatchinfo.h b/src/vm/methoddescbackpatchinfo.h
index c5d92a29da..bc9d6f895b 100644
--- a/src/vm/methoddescbackpatchinfo.h
+++ b/src/vm/methoddescbackpatchinfo.h
@@ -66,6 +66,7 @@ class MethodDescBackpatchInfoTracker
{
private:
static CrstStatic s_lock;
+ static bool s_isLocked;
class BackpatchInfoTrackerHashTraits : public NoRemoveDefaultCrossLoaderAllocatorHashTraits<MethodDesc *, UINT_PTR>
{
@@ -93,9 +94,23 @@ public:
static bool IsLockedByCurrentThread();
#endif
+#ifndef DACCESS_COMPILE
+public:
+ static bool IsLockedByAnyThread()
+ {
+ LIMITED_METHOD_CONTRACT;
+ return VolatileLoadWithoutBarrier(&s_isLocked);
+ }
+
+ static void PollForDebuggerSuspension();
+#endif
+
public:
class ConditionalLockHolder : CrstHolderWithState
{
+ private:
+ bool m_isLocked;
+
public:
ConditionalLockHolder(bool acquireLock = true)
: CrstHolderWithState(
@@ -104,9 +119,34 @@ public:
#else
nullptr
#endif
- )
+ ),
+ m_isLocked(false)
+ {
+ WRAPPER_NO_CONTRACT;
+
+ #ifndef DACCESS_COMPILE
+ if (acquireLock)
+ {
+ _ASSERTE(IsLockedByCurrentThread());
+ _ASSERTE(!s_isLocked);
+ m_isLocked = true;
+ s_isLocked = true;
+ }
+ #endif
+ }
+
+ ~ConditionalLockHolder()
{
- LIMITED_METHOD_CONTRACT;
+ WRAPPER_NO_CONTRACT;
+
+ #ifndef DACCESS_COMPILE
+ if (m_isLocked)
+ {
+ _ASSERTE(IsLockedByCurrentThread());
+ _ASSERTE(s_isLocked);
+ s_isLocked = false;
+ }
+ #endif
}
};
diff --git a/src/vm/tieredcompilation.cpp b/src/vm/tieredcompilation.cpp
index 8cdfc4815b..6515188701 100644
--- a/src/vm/tieredcompilation.cpp
+++ b/src/vm/tieredcompilation.cpp
@@ -522,7 +522,13 @@ void TieredCompilationManager::ResumeCountingCalls(MethodDesc* pMethodDesc)
{
WRAPPER_NO_CONTRACT;
_ASSERTE(pMethodDesc != nullptr);
- MethodDescBackpatchInfoTracker::ConditionalLockHolder lockHolder(pMethodDesc->MayHaveEntryPointSlotsToBackpatch());
+
+ bool mayHaveEntryPointSlotsToBackpatch = pMethodDesc->MayHaveEntryPointSlotsToBackpatch();
+ if (mayHaveEntryPointSlotsToBackpatch)
+ {
+ MethodDescBackpatchInfoTracker::PollForDebuggerSuspension();
+ }
+ MethodDescBackpatchInfoTracker::ConditionalLockHolder lockHolder(mayHaveEntryPointSlotsToBackpatch);
EX_TRY
{
@@ -746,6 +752,10 @@ void TieredCompilationManager::ActivateCodeVersion(NativeCodeVersion nativeCodeV
ILCodeVersion ilParent;
HRESULT hr = S_OK;
bool mayHaveEntryPointSlotsToBackpatch = pMethod->MayHaveEntryPointSlotsToBackpatch();
+ if (mayHaveEntryPointSlotsToBackpatch)
+ {
+ MethodDescBackpatchInfoTracker::PollForDebuggerSuspension();
+ }
MethodDescBackpatchInfoTracker::ConditionalLockHolder lockHolder(mayHaveEntryPointSlotsToBackpatch);
{