summaryrefslogtreecommitdiff
path: root/src/utilcode
diff options
context:
space:
mode:
authorKonstantin Baladurin <k.baladurin@partner.samsung.com>2018-01-12 19:11:05 +0300
committerJan Kotas <jkotas@microsoft.com>2018-01-12 12:40:37 -0800
commit02f172c7f0476df791ebc364344b73464b83a73c (patch)
tree1f60848b7b8cc31e539edf8940c5be9bcff461d9 /src/utilcode
parentf2e52524fe421ac050b8a693438f662b40214074 (diff)
downloadcoreclr-02f172c7f0476df791ebc364344b73464b83a73c.tar.gz
coreclr-02f172c7f0476df791ebc364344b73464b83a73c.tar.bz2
coreclr-02f172c7f0476df791ebc364344b73464b83a73c.zip
LoaderHeap: remove LHF_ZEROINIT option.
This option was used for UMEntryThunkCode::Poison. Now we use own free list to store freed thunks and don't return allocated memory to the LoaderHeap. So reused thunks are always uninitialized.
Diffstat (limited to 'src/utilcode')
-rw-r--r--src/utilcode/loaderheap.cpp19
1 files changed, 4 insertions, 15 deletions
diff --git a/src/utilcode/loaderheap.cpp b/src/utilcode/loaderheap.cpp
index 90b23c3411..49f8a049f9 100644
--- a/src/utilcode/loaderheap.cpp
+++ b/src/utilcode/loaderheap.cpp
@@ -11,7 +11,6 @@
#include "eventtracebase.h"
#define LHF_EXECUTABLE 0x1
-#define LHF_ZEROINIT 0x2
#ifndef DACCESS_COMPILE
@@ -906,8 +905,7 @@ UnlockedLoaderHeap::UnlockedLoaderHeap(DWORD dwReserveBlockSize,
SIZE_T dwReservedRegionSize,
size_t *pPrivatePerfCounter_LoaderBytes,
RangeList *pRangeList,
- BOOL fMakeExecutable,
- BOOL fZeroInit)
+ BOOL fMakeExecutable)
{
CONTRACTL
{
@@ -950,9 +948,6 @@ UnlockedLoaderHeap::UnlockedLoaderHeap(DWORD dwReserveBlockSize,
m_Options |= LHF_EXECUTABLE;
#endif // CROSSGEN_COMPILE
- if (fZeroInit)
- m_Options |= LHF_ZEROINIT;
-
m_pFirstFreeBlock = NULL;
if (dwReservedRegionAddress != NULL && dwReservedRegionSize > 0)
@@ -1360,7 +1355,7 @@ again:
// Don't fill the memory we allocated - it is assumed to be zeroed - fill the memory after it
memset(pAllocatedBytes + dwRequestedSize, 0xEE, LOADER_HEAP_DEBUG_BOUNDARY);
#endif
- if ((dwRequestedSize > 0) && (m_Options & LHF_ZEROINIT))
+ if (dwRequestedSize > 0)
{
_ASSERTE_MSG(pAllocatedBytes[0] == 0 && memcmp(pAllocatedBytes, pAllocatedBytes + 1, dwRequestedSize - 1) == 0,
"LoaderHeap must return zero-initialized memory");
@@ -1538,8 +1533,7 @@ void UnlockedLoaderHeap::UnlockedBackoutMem(void *pMem,
{
// Cool. This was the last block allocated. We can just undo the allocation instead
// of going to the freelist.
- if (m_Options & LHF_ZEROINIT)
- memset(pMem, 0x00, dwSize); // Fill freed region with 0
+ memset(pMem, 0x00, dwSize); // Fill freed region with 0
m_pAllocPtr = (BYTE*)pMem;
}
else
@@ -1657,7 +1651,7 @@ void *UnlockedLoaderHeap::UnlockedAllocAlignedMem_NoThrow(size_t dwRequestedSiz
memset(pAllocatedBytes + dwRequestedSize, 0xee, LOADER_HEAP_DEBUG_BOUNDARY);
#endif
- if ((dwRequestedSize != 0) && (m_Options & LHF_ZEROINIT))
+ if (dwRequestedSize != 0)
{
_ASSERTE_MSG(pAllocatedBytes[0] == 0 && memcmp(pAllocatedBytes, pAllocatedBytes + 1, dwRequestedSize - 1) == 0,
"LoaderHeap must return zero-initialized memory");
@@ -1782,11 +1776,6 @@ BOOL UnlockedLoaderHeap::IsExecutable()
return (m_Options & LHF_EXECUTABLE);
}
-BOOL UnlockedLoaderHeap::IsZeroInit()
-{
- return (m_Options & LHF_ZEROINIT);
-}
-
#ifdef DACCESS_COMPILE
void UnlockedLoaderHeap::EnumMemoryRegions(CLRDataEnumMemoryFlags flags)