diff options
author | Koundinya Veluri <kouvel@users.noreply.github.com> | 2018-01-25 12:01:32 -0800 |
---|---|---|
committer | GitHub <noreply@github.com> | 2018-01-25 12:01:32 -0800 |
commit | 209415618ca5d1a5d1d9e39ca78d643d0935534e (patch) | |
tree | d83c946783390afbb52e3e0f968018c38dfd2560 /src/vm/prestub.cpp | |
parent | e9985126acb0f1efd7c780faac4e66bc798b73c0 (diff) | |
download | coreclr-209415618ca5d1a5d1d9e39ca78d643d0935534e.tar.gz coreclr-209415618ca5d1a5d1d9e39ca78d643d0935534e.tar.bz2 coreclr-209415618ca5d1a5d1d9e39ca78d643d0935534e.zip |
Enable tiered jitting for R2R methods (#15967)
Enable tiered jitting for R2R methods
- Included R2R methods and generics over value types in CoreLib for tiered jitting. Tier 0 for R2R methods is the precompiled code if available, and tier 1 is selectively scheduled based on call counting.
- Added a delay before starting to count calls for tier 1 promotion. The delay is a short duration after frequent tier 0 jitting stops (current heuristic for identifying startup).
- Startup time and steady-state performance have improved on JitBench. There is a regression shortly following startup due to call counting and tier 1 jitting, for a short duration before steady-state performance stabilizes.
- Added two new config values, one for configuring the call count threshold for promoting to tier 1, and another for specifying the delay from the last tier 0 JIT invocation before starting to count calls
Diffstat (limited to 'src/vm/prestub.cpp')
-rw-r--r-- | src/vm/prestub.cpp | 24 |
1 files changed, 20 insertions, 4 deletions
diff --git a/src/vm/prestub.cpp b/src/vm/prestub.cpp index 8934f25d67..cd857134ab 100644 --- a/src/vm/prestub.cpp +++ b/src/vm/prestub.cpp @@ -730,6 +730,13 @@ PCODE MethodDesc::JitCompileCodeLockedEventWrapper(PrepareCodeConfig* pConfig, J } +#ifdef FEATURE_TIERED_COMPILATION + if (g_pConfig->TieredCompilation() && !flags.IsSet(CORJIT_FLAGS::CORJIT_FLAG_TIER1)) + { + GetAppDomain()->GetTieredCompilationManager()->OnTier0JitInvoked(); + } +#endif // FEATURE_TIERED_COMPILATION + #ifdef FEATURE_STACK_SAMPLING StackSampler::RecordJittingInfo(this, flags); #endif // FEATURE_STACK_SAMPLING @@ -1699,11 +1706,14 @@ PCODE MethodDesc::DoPrestub(MethodTable *pDispatchingMT) // for this method only then do we back-patch it. BOOL fCanBackpatchPrestub = TRUE; #ifdef FEATURE_TIERED_COMPILATION + TieredCompilationManager* pTieredCompilationManager = nullptr; BOOL fEligibleForTieredCompilation = IsEligibleForTieredCompilation(); + BOOL fWasPromotedToTier1 = FALSE; if (fEligibleForTieredCompilation) { + pTieredCompilationManager = GetAppDomain()->GetTieredCompilationManager(); CallCounter * pCallCounter = GetCallCounter(); - fCanBackpatchPrestub = pCallCounter->OnMethodCalled(this); + pCallCounter->OnMethodCalled(this, pTieredCompilationManager, &fCanBackpatchPrestub, &fWasPromotedToTier1); } #endif @@ -1715,6 +1725,12 @@ PCODE MethodDesc::DoPrestub(MethodTable *pDispatchingMT) (!fIsPointingToPrestub && IsVersionableWithJumpStamp())) { pCode = GetCodeVersionManager()->PublishVersionableCodeIfNecessary(this, fCanBackpatchPrestub); + + if (pTieredCompilationManager != nullptr && fCanBackpatchPrestub && pCode != NULL && !fWasPromotedToTier1) + { + pTieredCompilationManager->OnMethodCallCountingStoppedWithoutTier1Promotion(this); + } + fIsPointingToPrestub = IsPointingToPrestub(); } #endif @@ -1733,10 +1749,10 @@ PCODE MethodDesc::DoPrestub(MethodTable *pDispatchingMT) if (pCode) { - // The only reason we are still pointing to prestub is because the call counter - // prevented it. We should still short circuit and return the code without + // The only reasons we are still pointing to prestub is because the call counter + // prevented it or this thread lost the race with another thread in updating the + // entry point. We should still short circuit and return the code without // backpatching. - _ASSERTE(!fCanBackpatchPrestub); RETURN pCode; } |