summaryrefslogtreecommitdiff
path: root/src/jit/jitstd
diff options
context:
space:
mode:
authormikedn <onemihaid@hotmail.com>2018-06-30 20:05:30 +0300
committerAndy Ayers <andya@microsoft.com>2018-06-30 10:05:30 -0700
commitc2baf04cd2c2211334949ba12df2e49fd9109728 (patch)
tree115f0424beefd7faab021edf734f98cfe1e49237 /src/jit/jitstd
parent9f98cc53e8530787e363c038be3993f3231e2708 (diff)
downloadcoreclr-c2baf04cd2c2211334949ba12df2e49fd9109728.tar.gz
coreclr-c2baf04cd2c2211334949ba12df2e49fd9109728.tar.bz2
coreclr-c2baf04cd2c2211334949ba12df2e49fd9109728.zip
Pass CompAllocator by value (#15025)
Passing CompAllocator objects by value is advantageous because it no longer needs to be dynamically allocated and cached. CompAllocator instances can now be freely created, copied and stored, which makes adding new CompMemKind values easier. Together with other cleanup this also improves memory allocation performance by removing some extra levels of indirection that were previously required - jitstd::allocator had a pointer to CompAllocator, CompAllocator had a pointer to Compiler and Compiler finally had a pointer to ArenaAllocator. Without MEASURE_MEM_ALLOC enabled, both jitstd::allocator and CompAllocator now just contain a pointer to ArenaAllocator. When MEASURE_MEM_ALLOC is enabled CompAllocator also contains a pointer but to a MemStatsAllocator object that holds the relevant memory kind. This way CompAllocator is always pointer sized so that enabling MEASURE_MEM_ALLOC does not result in increased memory usage due to objects that store a CompAllocator instance. In order to implement this, 2 additional signficant changes have been made: * MemStats has been moved to ArenaAllocator, it's after all the allocator's job to maintain statistics. This also fixes some issues related to memory statistics, such as not tracking the memory allocated by the inlinee compiler (since that one used its own MemStats instance). * Extract the arena page pooling logic out of the allocator. It doesn't make sense to pool an allocator, it has very little state that can actually be reused and everyting else (including MemStats) needs to be reset on reuse. What really needs to be pooled is just a page of memory. Since this was touching allocation code the opportunity has been used to perform additional cleanup: * Remove unnecessary LSRA ListElementAllocator * Remove compGetMem and compGetMemArray * Make CompAllocator and HostAllocator more like the std allocator * Update HashTable to use CompAllocator * Update ArrayStack to use CompAllocator * Move CompAllocator & friends to alloc.h
Diffstat (limited to 'src/jit/jitstd')
-rw-r--r--src/jit/jitstd/allocator.h32
-rw-r--r--src/jit/jitstd/utility.h13
2 files changed, 16 insertions, 29 deletions
diff --git a/src/jit/jitstd/allocator.h b/src/jit/jitstd/allocator.h
index f370af8e9d..a6a25deae4 100644
--- a/src/jit/jitstd/allocator.h
+++ b/src/jit/jitstd/allocator.h
@@ -32,7 +32,7 @@ private:
allocator();
public:
- inline allocator(CompAllocator* pAlloc);
+ inline allocator(CompAllocator alloc);
template <typename U>
inline allocator(const allocator<U>& alloc);
@@ -43,31 +43,31 @@ public:
inline allocator& operator=(const allocator<U>& alloc);
private:
- CompAllocator* m_pAlloc;
+ CompAllocator m_alloc;
template <typename U>
friend class allocator;
};
-allocator<void>::allocator(CompAllocator* pAlloc)
- : m_pAlloc(pAlloc)
+allocator<void>::allocator(CompAllocator alloc)
+ : m_alloc(alloc)
{
}
allocator<void>::allocator(const allocator& alloc)
- : m_pAlloc(alloc.m_pAlloc)
+ : m_alloc(alloc.m_alloc)
{
}
template <typename U>
allocator<void>::allocator(const allocator<U>& alloc)
- : m_pAlloc(alloc.m_pAlloc)
+ : m_alloc(alloc.m_alloc)
{
}
template <typename U>
allocator<void>& allocator<void>::operator=(const allocator<U>& alloc)
{
- m_pAlloc = alloc.m_pAlloc;
+ m_alloc = alloc.m_alloc;
return *this;
}
@@ -86,7 +86,7 @@ public:
private:
allocator();
public:
- allocator(CompAllocator* pAlloc);
+ allocator(CompAllocator alloc);
template <typename U>
allocator(const allocator<U>& alloc);
@@ -110,7 +110,7 @@ public:
};
private:
- CompAllocator* m_pAlloc;
+ CompAllocator m_alloc;
template <typename U>
friend class allocator;
};
@@ -122,21 +122,21 @@ namespace jitstd
{
template <typename T>
-allocator<T>::allocator(CompAllocator* pAlloc)
- : m_pAlloc(pAlloc)
+allocator<T>::allocator(CompAllocator alloc)
+ : m_alloc(alloc)
{
}
template <typename T>
template <typename U>
allocator<T>::allocator(const allocator<U>& alloc)
- : m_pAlloc(alloc.m_pAlloc)
+ : m_alloc(alloc.m_alloc)
{
}
template <typename T>
allocator<T>::allocator(const allocator<T>& alloc)
- : m_pAlloc(alloc.m_pAlloc)
+ : m_alloc(alloc.m_alloc)
{
}
@@ -144,7 +144,7 @@ template <typename T>
template <typename U>
allocator<T>& allocator<T>::operator=(const allocator<U>& alloc)
{
- m_pAlloc = alloc.m_pAlloc;
+ m_alloc = alloc.m_alloc;
return *this;
}
@@ -163,7 +163,7 @@ typename allocator<T>::const_pointer allocator<T>::address(const_reference val)
template <typename T>
T* allocator<T>::allocate(size_type count, allocator<void>::const_pointer hint)
{
- return (pointer) m_pAlloc->Alloc(sizeof(value_type) * count);
+ return m_alloc.allocate<value_type>(count);
}
template <typename T>
@@ -175,7 +175,7 @@ void allocator<T>::construct(pointer ptr, const_reference val)
template <typename T>
void allocator<T>::deallocate(pointer ptr, size_type size)
{
- // m_pAlloc->Free(ptr);
+ m_alloc.deallocate(ptr);
}
template <typename T>
diff --git a/src/jit/jitstd/utility.h b/src/jit/jitstd/utility.h
index 80ce58e4d7..1930be8fbe 100644
--- a/src/jit/jitstd/utility.h
+++ b/src/jit/jitstd/utility.h
@@ -45,19 +45,6 @@ namespace utility
};
- // Helper to allocate objects of any type, given an allocator of void type.
- //
- // @param alloc An allocator of void type used to create an allocator of type T.
- // @param count The number of objects of type T that need to be allocated.
- //
- // @return A pointer to an object or an array of objects that was allocated.
- template <typename T>
- inline
- static T* allocate(jitstd::allocator<void>& alloc, size_t count = 1)
- {
- return jitstd::allocator<T>(alloc).allocate(count);
- }
-
// Ensures that "wset" is the union of the initial state of "wset" and "rset".
// Elements from "rset" that were not in "wset" are added to "cset."
template <typename Set>