summaryrefslogtreecommitdiff
path: root/src/utilcode
diff options
context:
space:
mode:
authorJan Kotas <jkotas@microsoft.com>2017-11-30 23:21:40 -0800
committerGitHub <noreply@github.com>2017-11-30 23:21:40 -0800
commitc1e44d9db9cc0d03e5f44af665acefa3589a6883 (patch)
tree2b9ad535324b85548de84a5eec8eabfe39ca3de5 /src/utilcode
parent845c2c248c6450107acd37c5469f56d05b391183 (diff)
downloadcoreclr-c1e44d9db9cc0d03e5f44af665acefa3589a6883.tar.gz
coreclr-c1e44d9db9cc0d03e5f44af665acefa3589a6883.tar.bz2
coreclr-c1e44d9db9cc0d03e5f44af665acefa3589a6883.zip
Jumpstub fixes (#15296)
- Reserve space for jump stubs for precodes and other code fragments at the end of each code heap segment. This is trying to ensure that eventual allocation of jump stubs for precodes and other code fragments succeeds. Accounting is done conservatively - reserves more than strictly required. It wastes a bit of address space, but no actual memory. Also, this reserve is not used to allocate jump stubs for JITed code since the JITing can recover from failure to allocate the jump stub now. Fixes #14996. - Improve algorithm to reuse HostCodeHeap segments: Maintain estimated size of the largest free block in HostCodeHeap. This estimate is updated when allocation request fails, and also when memory is returned to the HostCodeHeap. Fixes #14995. - Retry JITing on failure to allocate jump stub. Failure to allocate jump during JITing is not fatal anymore. There is extra memory reserved for jump stubs on retry to ensure that the retry succeeds allocating the jump stubs that it needs with high probability. - Respect CodeHeapRequestInfo::getRequestSize for HostCodeHeap. CodeHeapRequestInfo::getRequestSize is used to throttle code heap segment size for large workloads. Not respecting it in HostCodeHeap lead to too many too small code heap segments in large workloads. - Switch HostCodeHeap nibble map to be allocated on regular heap as part. It simplied the math required to estimate the nibble map size, and allocating on regular heap is overall goodness since it does not need to be executable.
Diffstat (limited to 'src/utilcode')
-rw-r--r--src/utilcode/loaderheap.cpp4
1 files changed, 2 insertions, 2 deletions
diff --git a/src/utilcode/loaderheap.cpp b/src/utilcode/loaderheap.cpp
index ca1ed666f4..90b23c3411 100644
--- a/src/utilcode/loaderheap.cpp
+++ b/src/utilcode/loaderheap.cpp
@@ -1731,7 +1731,7 @@ void *UnlockedLoaderHeap::UnlockedAllocAlignedMem(size_t dwRequestedSize,
-void *UnlockedLoaderHeap::UnlockedAllocMemForCode_NoThrow(size_t dwHeaderSize, size_t dwCodeSize, DWORD dwCodeAlignment)
+void *UnlockedLoaderHeap::UnlockedAllocMemForCode_NoThrow(size_t dwHeaderSize, size_t dwCodeSize, DWORD dwCodeAlignment, size_t dwReserveForJumpStubs)
{
CONTRACT(void*)
{
@@ -1753,7 +1753,7 @@ void *UnlockedLoaderHeap::UnlockedAllocMemForCode_NoThrow(size_t dwHeaderSize, s
//
// Thus, we'll request as much heap growth as is needed for the worst case (we request an extra dwCodeAlignment - 1 bytes)
- S_SIZE_T cbAllocSize = S_SIZE_T(dwHeaderSize) + S_SIZE_T(dwCodeSize) + S_SIZE_T(dwCodeAlignment - 1);
+ S_SIZE_T cbAllocSize = S_SIZE_T(dwHeaderSize) + S_SIZE_T(dwCodeSize) + S_SIZE_T(dwCodeAlignment - 1) + S_SIZE_T(dwReserveForJumpStubs);
if( cbAllocSize.IsOverflow() )
{
RETURN NULL;