summaryrefslogtreecommitdiff
path: root/src/jit/arraystack.h
diff options
context:
space:
mode:
authormikedn <onemihaid@hotmail.com>2018-06-30 20:05:30 +0300
committerAndy Ayers <andya@microsoft.com>2018-06-30 10:05:30 -0700
commitc2baf04cd2c2211334949ba12df2e49fd9109728 (patch)
tree115f0424beefd7faab021edf734f98cfe1e49237 /src/jit/arraystack.h
parent9f98cc53e8530787e363c038be3993f3231e2708 (diff)
downloadcoreclr-c2baf04cd2c2211334949ba12df2e49fd9109728.tar.gz
coreclr-c2baf04cd2c2211334949ba12df2e49fd9109728.tar.bz2
coreclr-c2baf04cd2c2211334949ba12df2e49fd9109728.zip
Pass CompAllocator by value (#15025)
Passing CompAllocator objects by value is advantageous because it no longer needs to be dynamically allocated and cached. CompAllocator instances can now be freely created, copied and stored, which makes adding new CompMemKind values easier. Together with other cleanup this also improves memory allocation performance by removing some extra levels of indirection that were previously required - jitstd::allocator had a pointer to CompAllocator, CompAllocator had a pointer to Compiler and Compiler finally had a pointer to ArenaAllocator. Without MEASURE_MEM_ALLOC enabled, both jitstd::allocator and CompAllocator now just contain a pointer to ArenaAllocator. When MEASURE_MEM_ALLOC is enabled CompAllocator also contains a pointer but to a MemStatsAllocator object that holds the relevant memory kind. This way CompAllocator is always pointer sized so that enabling MEASURE_MEM_ALLOC does not result in increased memory usage due to objects that store a CompAllocator instance. In order to implement this, 2 additional signficant changes have been made: * MemStats has been moved to ArenaAllocator, it's after all the allocator's job to maintain statistics. This also fixes some issues related to memory statistics, such as not tracking the memory allocated by the inlinee compiler (since that one used its own MemStats instance). * Extract the arena page pooling logic out of the allocator. It doesn't make sense to pool an allocator, it has very little state that can actually be reused and everyting else (including MemStats) needs to be reset on reuse. What really needs to be pooled is just a page of memory. Since this was touching allocation code the opportunity has been used to perform additional cleanup: * Remove unnecessary LSRA ListElementAllocator * Remove compGetMem and compGetMemArray * Make CompAllocator and HostAllocator more like the std allocator * Update HashTable to use CompAllocator * Update ArrayStack to use CompAllocator * Move CompAllocator & friends to alloc.h
Diffstat (limited to 'src/jit/arraystack.h')
-rw-r--r--src/jit/arraystack.h16
1 files changed, 7 insertions, 9 deletions
diff --git a/src/jit/arraystack.h b/src/jit/arraystack.h
index c6ac6b2628..2565e19856 100644
--- a/src/jit/arraystack.h
+++ b/src/jit/arraystack.h
@@ -11,14 +11,12 @@ class ArrayStack
static const int builtinSize = 8;
public:
- ArrayStack(Compiler* comp, int initialSize = builtinSize)
+ ArrayStack(CompAllocator alloc, int initialSize = builtinSize) : m_alloc(alloc)
{
- compiler = comp;
-
if (initialSize > builtinSize)
{
maxIndex = initialSize;
- data = new (compiler, CMK_ArrayStack) T[initialSize];
+ data = new (alloc) T[initialSize];
}
else
{
@@ -58,7 +56,7 @@ public:
// and copy over
T* oldData = data;
noway_assert(maxIndex * 2 > maxIndex);
- data = new (compiler, CMK_ArrayStack) T[maxIndex * 2];
+ data = new (m_alloc) T[maxIndex * 2];
for (int i = 0; i < maxIndex; i++)
{
data[i] = oldData[i];
@@ -149,10 +147,10 @@ public:
}
private:
- Compiler* compiler; // needed for allocation
- int tosIndex; // first free location
- int maxIndex;
- T* data;
+ CompAllocator m_alloc;
+ int tosIndex; // first free location
+ int maxIndex;
+ T* data;
// initial allocation
T builtinData[builtinSize];
};