summaryrefslogtreecommitdiff
path: root/src/vm
diff options
context:
space:
mode:
Diffstat (limited to 'src/vm')
-rw-r--r--src/vm/ceeload.cpp14
-rw-r--r--src/vm/generics.cpp5
-rw-r--r--src/vm/jithelpers.cpp69
-rw-r--r--src/vm/loaderallocator.cpp249
-rw-r--r--src/vm/loaderallocator.hpp37
-rw-r--r--src/vm/loaderallocator.inl61
-rw-r--r--src/vm/methodtable.cpp4
-rw-r--r--src/vm/methodtablebuilder.cpp4
-rw-r--r--src/vm/threads.cpp22
-rw-r--r--src/vm/threads.h4
-rw-r--r--src/vm/threadstatics.cpp128
-rw-r--r--src/vm/threadstatics.h126
-rw-r--r--src/vm/typedesc.cpp4
13 files changed, 529 insertions, 198 deletions
diff --git a/src/vm/ceeload.cpp b/src/vm/ceeload.cpp
index 5160b7953b..01ecacab1c 100644
--- a/src/vm/ceeload.cpp
+++ b/src/vm/ceeload.cpp
@@ -41,7 +41,7 @@
#include "sigbuilder.h"
#include "metadataexports.h"
#include "inlinetracking.h"
-
+#include "threads.h"
#ifdef FEATURE_PREJIT
#include "exceptionhandling.h"
@@ -2904,6 +2904,18 @@ void Module::FreeModuleIndex()
_ASSERTE(!Module::IsEncodedModuleIndex((SIZE_T)m_ModuleID));
_ASSERTE(m_ModuleIndex == m_ModuleID->GetModuleIndex());
+#ifndef CROSSGEN_COMPILE
+ if (IsCollectible())
+ {
+ ThreadStoreLockHolder tsLock;
+ Thread *pThread = NULL;
+ while ((pThread = ThreadStore::GetThreadList(pThread)) != NULL)
+ {
+ pThread->DeleteThreadStaticData(m_ModuleIndex);
+ }
+ }
+#endif // CROSSGEN_COMPILE
+
// Get the ModuleIndex from the DLM and free it
Module::FreeModuleIndex(m_ModuleIndex);
}
diff --git a/src/vm/generics.cpp b/src/vm/generics.cpp
index b68054985e..a92177d9d0 100644
--- a/src/vm/generics.cpp
+++ b/src/vm/generics.cpp
@@ -227,7 +227,6 @@ ClassLoader::CreateTypeHandleForNonCanonicalGenericInstantiation(
BOOL fHasRemotingVtsInfo = FALSE;
BOOL fHasContextStatics = FALSE;
BOOL fHasGenericsStaticsInfo = pOldMT->HasGenericsStaticsInfo();
- BOOL fHasThreadStatics = (pOldMT->GetNumThreadStaticFields() > 0);
#ifdef FEATURE_COMINTEROP
BOOL fHasDynamicInterfaceMap = pOldMT->HasDynamicInterfaceMap();
@@ -240,11 +239,11 @@ ClassLoader::CreateTypeHandleForNonCanonicalGenericInstantiation(
// Collectible types have some special restrictions
if (pAllocator->IsCollectible())
{
- if (fHasThreadStatics || fHasContextStatics)
+ if (fHasContextStatics)
{
ClassLoader::ThrowTypeLoadException(pTypeKey, IDS_CLASSLOAD_COLLECTIBLESPECIALSTATICS);
}
- else if (pOldMT->HasFixedAddressVTStatics())
+ if (pOldMT->HasFixedAddressVTStatics())
{
ClassLoader::ThrowTypeLoadException(pTypeKey, IDS_CLASSLOAD_COLLECTIBLEFIXEDVTATTR);
}
diff --git a/src/vm/jithelpers.cpp b/src/vm/jithelpers.cpp
index 9bb6d9889e..f92e52b479 100644
--- a/src/vm/jithelpers.cpp
+++ b/src/vm/jithelpers.cpp
@@ -1935,11 +1935,14 @@ HCIMPL2(void*, JIT_GetSharedNonGCThreadStaticBaseDynamicClass, SIZE_T moduleDoma
{
FCALL_CONTRACT;
- // Get the ModuleIndex
- ModuleIndex index =
+ // Obtain the DomainLocalModule
+ DomainLocalModule *pDomainLocalModule =
(Module::IsEncodedModuleIndex(moduleDomainID)) ?
- Module::IDToIndex(moduleDomainID) :
- ((DomainLocalModule *)moduleDomainID)->GetModuleIndex();
+ GetAppDomain()->GetDomainLocalBlock()->GetModuleSlot(Module::IDToIndex(moduleDomainID)) :
+ (DomainLocalModule *)moduleDomainID;
+
+ // Get the ModuleIndex
+ ModuleIndex index = pDomainLocalModule->GetModuleIndex();
// Get the relevant ThreadLocalModule
ThreadLocalModule * pThreadLocalModule = ThreadStatics::GetTLMIfExists(index);
@@ -1950,18 +1953,18 @@ HCIMPL2(void*, JIT_GetSharedNonGCThreadStaticBaseDynamicClass, SIZE_T moduleDoma
{
ThreadLocalModule::PTR_DynamicClassInfo pLocalInfo = pThreadLocalModule->GetDynamicClassInfoIfInitialized(dwDynamicClassDomainID);
if (pLocalInfo != NULL)
- return (void*)pLocalInfo->m_pDynamicEntry->GetNonGCStaticsBasePointer();
+ {
+ PTR_BYTE retval;
+ GET_DYNAMICENTRY_NONGCTHREADSTATICS_BASEPOINTER(pDomainLocalModule->GetDomainFile()->GetModule()->GetLoaderAllocator(),
+ pLocalInfo,
+ &retval);
+ return retval;
+ }
}
// If the TLM was not allocated or if the class was not marked as initialized
// then we have to go through the slow path
- // Obtain the DomainLocalModule
- DomainLocalModule *pDomainLocalModule =
- (Module::IsEncodedModuleIndex(moduleDomainID)) ?
- GetAppDomain()->GetDomainLocalBlock()->GetModuleSlot(Module::IDToIndex(moduleDomainID)) :
- (DomainLocalModule *) moduleDomainID;
-
// Obtain the Module
Module * pModule = pDomainLocalModule->GetDomainFile()->GetModule();
@@ -1986,11 +1989,14 @@ HCIMPL2(void*, JIT_GetSharedGCThreadStaticBaseDynamicClass, SIZE_T moduleDomainI
{
FCALL_CONTRACT;
- // Get the ModuleIndex
- ModuleIndex index =
+ // Obtain the DomainLocalModule
+ DomainLocalModule *pDomainLocalModule =
(Module::IsEncodedModuleIndex(moduleDomainID)) ?
- Module::IDToIndex(moduleDomainID) :
- ((DomainLocalModule *)moduleDomainID)->GetModuleIndex();
+ GetAppDomain()->GetDomainLocalBlock()->GetModuleSlot(Module::IDToIndex(moduleDomainID)) :
+ (DomainLocalModule *)moduleDomainID;
+
+ // Get the ModuleIndex
+ ModuleIndex index = pDomainLocalModule->GetModuleIndex();
// Get the relevant ThreadLocalModule
ThreadLocalModule * pThreadLocalModule = ThreadStatics::GetTLMIfExists(index);
@@ -2001,18 +2007,19 @@ HCIMPL2(void*, JIT_GetSharedGCThreadStaticBaseDynamicClass, SIZE_T moduleDomainI
{
ThreadLocalModule::PTR_DynamicClassInfo pLocalInfo = pThreadLocalModule->GetDynamicClassInfoIfInitialized(dwDynamicClassDomainID);
if (pLocalInfo != NULL)
- return (void*)pLocalInfo->m_pDynamicEntry->GetGCStaticsBasePointer();
+ {
+ PTR_BYTE retval;
+ GET_DYNAMICENTRY_GCTHREADSTATICS_BASEPOINTER(pDomainLocalModule->GetDomainFile()->GetModule()->GetLoaderAllocator(),
+ pLocalInfo,
+ &retval);
+
+ return retval;
+ }
}
// If the TLM was not allocated or if the class was not marked as initialized
// then we have to go through the slow path
- // Obtain the DomainLocalModule
- DomainLocalModule *pDomainLocalModule =
- (Module::IsEncodedModuleIndex(moduleDomainID)) ?
- GetAppDomain()->GetDomainLocalBlock()->GetModuleSlot(Module::IDToIndex(moduleDomainID)) :
- (DomainLocalModule *) moduleDomainID;
-
// Obtain the Module
Module * pModule = pDomainLocalModule->GetDomainFile()->GetModule();
@@ -2060,7 +2067,14 @@ HCIMPL1(void*, JIT_GetGenericsNonGCThreadStaticBase, MethodTable *pMT)
{
ThreadLocalModule::PTR_DynamicClassInfo pLocalInfo = pThreadLocalModule->GetDynamicClassInfoIfInitialized(dwDynamicClassDomainID);
if (pLocalInfo != NULL)
- return (void*)pLocalInfo->m_pDynamicEntry->GetNonGCStaticsBasePointer();
+ {
+ PTR_BYTE retval;
+ GET_DYNAMICENTRY_NONGCSTATICS_BASEPOINTER(pMT->GetLoaderAllocator(),
+ pLocalInfo,
+ &retval);
+
+ return retval;
+ }
}
// If the TLM was not allocated or if the class was not marked as initialized
@@ -2105,7 +2119,14 @@ HCIMPL1(void*, JIT_GetGenericsGCThreadStaticBase, MethodTable *pMT)
{
ThreadLocalModule::PTR_DynamicClassInfo pLocalInfo = pThreadLocalModule->GetDynamicClassInfoIfInitialized(dwDynamicClassDomainID);
if (pLocalInfo != NULL)
- return (void*)pLocalInfo->m_pDynamicEntry->GetGCStaticsBasePointer();
+ {
+ PTR_BYTE retval;
+ GET_DYNAMICENTRY_GCTHREADSTATICS_BASEPOINTER(pMT->GetLoaderAllocator(),
+ pLocalInfo,
+ &retval);
+
+ return retval;
+ }
}
// If the TLM was not allocated or if the class was not marked as initialized
diff --git a/src/vm/loaderallocator.cpp b/src/vm/loaderallocator.cpp
index cb5f81752d..46358f79d0 100644
--- a/src/vm/loaderallocator.cpp
+++ b/src/vm/loaderallocator.cpp
@@ -701,6 +701,8 @@ BOOL QCALLTYPE LoaderAllocator::Destroy(QCall::LoaderAllocatorHandle pLoaderAllo
return ret;
} // LoaderAllocator::Destroy
+#define MAX_LOADERALLOCATOR_HANDLE 0x40000000
+
// Returns NULL if the managed LoaderAllocator object was already collected.
LOADERHANDLE LoaderAllocator::AllocateHandle(OBJECTREF value)
{
@@ -714,32 +716,6 @@ LOADERHANDLE LoaderAllocator::AllocateHandle(OBJECTREF value)
LOADERHANDLE retVal;
- GCPROTECT_BEGIN(value);
- CrstHolder ch(&m_crstLoaderAllocator);
-
- retVal = AllocateHandle_Unlocked(value);
- GCPROTECT_END();
-
- return retVal;
-}
-
-#define MAX_LOADERALLOCATOR_HANDLE 0x40000000
-
-// Returns NULL if the managed LoaderAllocator object was already collected.
-LOADERHANDLE LoaderAllocator::AllocateHandle_Unlocked(OBJECTREF valueUNSAFE)
-{
- CONTRACTL
- {
- THROWS;
- GC_TRIGGERS;
- MODE_COOPERATIVE;
- }
- CONTRACTL_END;
-
- _ASSERTE(m_crstLoaderAllocator.OwnedByCurrentThread());
-
- UINT_PTR retVal;
-
struct _gc
{
OBJECTREF value;
@@ -752,57 +728,106 @@ LOADERHANDLE LoaderAllocator::AllocateHandle_Unlocked(OBJECTREF valueUNSAFE)
GCPROTECT_BEGIN(gc);
- gc.value = valueUNSAFE;
+ gc.value = value;
+ // The handle table is read locklessly, be careful
+ if (IsCollectible())
{
- // The handle table is read locklessly, be careful
- if (IsCollectible())
+ gc.loaderAllocator = (LOADERALLOCATORREF)ObjectFromHandle(m_hLoaderAllocatorObjectHandle);
+ if (gc.loaderAllocator == NULL)
+ { // The managed LoaderAllocator is already collected, we cannot allocate any exposed managed objects for it
+ retVal = NULL;
+ }
+ else
{
- gc.loaderAllocator = (LOADERALLOCATORREF)ObjectFromHandle(m_hLoaderAllocatorObjectHandle);
- if (gc.loaderAllocator == NULL)
- { // The managed LoaderAllocator is already collected, we cannot allocate any exposed managed objects for it
- retVal = NULL;
- }
- else
- {
- DWORD slotsUsed = gc.loaderAllocator->GetSlotsUsed();
+ DWORD slotsUsed;
+ DWORD numComponents;
- if (slotsUsed > MAX_LOADERALLOCATOR_HANDLE)
+ do
+ {
{
- COMPlusThrowOM();
+ CrstHolder ch(&m_crstLoaderAllocator);
+
+ gc.handleTable = gc.loaderAllocator->GetHandleTable();
+
+ if (!m_freeHandleIndexesStack.IsEmpty())
+ {
+ // Reuse a handle slot that was previously freed
+ DWORD freeHandleIndex = m_freeHandleIndexesStack.Pop();
+ gc.handleTable->SetAt(freeHandleIndex, gc.value);
+ retVal = (UINT_PTR)((freeHandleIndex + 1) << 1);
+ break;
+ }
+
+ slotsUsed = gc.loaderAllocator->GetSlotsUsed();
+
+ if (slotsUsed > MAX_LOADERALLOCATOR_HANDLE)
+ {
+ COMPlusThrowOM();
+ }
+
+ numComponents = gc.handleTable->GetNumComponents();
+
+ if (slotsUsed < numComponents)
+ {
+ // The handle table is large enough, allocate next slot from it
+ gc.handleTable->SetAt(slotsUsed, gc.value);
+ gc.loaderAllocator->SetSlotsUsed(slotsUsed + 1);
+ retVal = (UINT_PTR)((slotsUsed + 1) << 1);
+ break;
+ }
}
- gc.handleTable = gc.loaderAllocator->GetHandleTable();
- /* If we need to enlarge the table, do it now. */
- if (slotsUsed >= gc.handleTable->GetNumComponents())
+ // We need to enlarge the handle table
+ gc.handleTableOld = gc.handleTable;
+
+ DWORD newSize = numComponents * 2;
+ gc.handleTable = (PTRARRAYREF)AllocateObjectArray(newSize, g_pObjectClass);
+
{
- gc.handleTableOld = gc.handleTable;
+ CrstHolder ch(&m_crstLoaderAllocator);
- DWORD newSize = gc.handleTable->GetNumComponents() * 2;
- gc.handleTable = (PTRARRAYREF)AllocateObjectArray(newSize, g_pObjectClass);
+ if (gc.loaderAllocator->GetHandleTable() == gc.handleTableOld)
+ {
+ /* Copy out of old array */
+ memmoveGCRefs(gc.handleTable->GetDataPtr(), gc.handleTableOld->GetDataPtr(), slotsUsed * sizeof(Object *));
+ gc.loaderAllocator->SetHandleTable(gc.handleTable);
+ }
+ else
+ {
+ // Another thread has beaten us on enlarging the handle array, use the handle table it has allocated
+ gc.handleTable = gc.loaderAllocator->GetHandleTable();
+ }
+
+ numComponents = gc.handleTable->GetNumComponents();
- /* Copy out of old array */
- memmoveGCRefs(gc.handleTable->GetDataPtr(), gc.handleTableOld->GetDataPtr(), slotsUsed * sizeof(Object *));
- gc.loaderAllocator->SetHandleTable(gc.handleTable);
+ if (slotsUsed < numComponents)
+ {
+ // The handle table is large enough, allocate next slot from it
+ gc.handleTable->SetAt(slotsUsed, gc.value);
+ gc.loaderAllocator->SetSlotsUsed(slotsUsed + 1);
+ retVal = (UINT_PTR)((slotsUsed + 1) << 1);
+ break;
+ }
}
- gc.handleTable->SetAt(slotsUsed, gc.value);
- gc.loaderAllocator->SetSlotsUsed(slotsUsed + 1);
- retVal = (UINT_PTR)((slotsUsed + 1) << 1);
- }
- }
- else
- {
- OBJECTREF* pRef = GetDomain()->AllocateObjRefPtrsInLargeTable(1);
- SetObjectReference(pRef, gc.value, IsDomainNeutral() ? NULL : GetDomain()->AsAppDomain());
- retVal = (((UINT_PTR)pRef) + 1);
+ // Loop in the unlikely case that another thread has beaten us on the handle array enlarging, but
+ // all the slots were used up before the current thread was scheduled.
+ }
+ while (true);
}
}
+ else
+ {
+ OBJECTREF* pRef = GetDomain()->AllocateObjRefPtrsInLargeTable(1);
+ SetObjectReference(pRef, gc.value, IsDomainNeutral() ? NULL : GetDomain()->AsAppDomain());
+ retVal = (((UINT_PTR)pRef) + 1);
+ }
GCPROTECT_END();
- return (LOADERHANDLE)retVal;
-} // LoaderAllocator::AllocateHandle_Unlocked
+ return retVal;
+}
OBJECTREF LoaderAllocator::GetHandleValue(LOADERHANDLE handle)
{
@@ -820,18 +845,32 @@ OBJECTREF LoaderAllocator::GetHandleValue(LOADERHANDLE handle)
return objRet;
}
-void LoaderAllocator::ClearHandle(LOADERHANDLE handle)
+void LoaderAllocator::FreeHandle(LOADERHANDLE handle)
{
CONTRACTL
{
- THROWS;
- GC_TRIGGERS;
- MODE_COOPERATIVE;
+ NOTHROW;
+ GC_NOTRIGGER;
+ MODE_ANY;
PRECONDITION(handle != NULL);
}
CONTRACTL_END;
SetHandleValue(handle, NULL);
+
+ if ((((UINT_PTR)handle) & 1) == 0)
+ {
+ // The slot value doesn't have the low bit set, so it is an index to the handle table.
+ // In this case, push the index of the handle to the stack of freed indexes for
+ // reuse.
+ CrstHolder ch(&m_crstLoaderAllocator);
+
+ UINT_PTR index = (((UINT_PTR)handle) >> 1) - 1;
+ // The Push can fail due to OOM. Ignore this failure, it is better than crashing. The
+ // only effect is that the slot will not be reused in the future if the runtime survives
+ // the low memory situation.
+ m_freeHandleIndexesStack.Push((DWORD)index);
+ }
}
OBJECTREF LoaderAllocator::CompareExchangeValueInHandle(LOADERHANDLE handle, OBJECTREF valueUNSAFE, OBJECTREF compareUNSAFE)
@@ -860,34 +899,32 @@ OBJECTREF LoaderAllocator::CompareExchangeValueInHandle(LOADERHANDLE handle, OBJ
gc.value = valueUNSAFE;
gc.compare = compareUNSAFE;
- /* The handle table is read locklessly, be careful */
+ if ((((UINT_PTR)handle) & 1) != 0)
{
- CrstHolder ch(&m_crstLoaderAllocator);
-
- if ((((UINT_PTR)handle) & 1) != 0)
+ OBJECTREF *ptr = (OBJECTREF *)(((UINT_PTR)handle) - 1);
+ gc.previous = *ptr;
+ if ((*ptr) == gc.compare)
{
- OBJECTREF *ptr = (OBJECTREF *)(((UINT_PTR)handle) - 1);
- gc.previous = *ptr;
- if ((*ptr) == gc.compare)
- {
- SetObjectReference(ptr, gc.value, IsDomainNeutral() ? NULL : GetDomain()->AsAppDomain());
- }
+ SetObjectReference(ptr, gc.value, IsDomainNeutral() ? NULL : GetDomain()->AsAppDomain());
}
- else
- {
- _ASSERTE(!ObjectHandleIsNull(m_hLoaderAllocatorObjectHandle));
+ }
+ else
+ {
+ /* The handle table is read locklessly, be careful */
+ CrstHolder ch(&m_crstLoaderAllocator);
- UINT_PTR index = (((UINT_PTR)handle) >> 1) - 1;
- LOADERALLOCATORREF loaderAllocator = (LOADERALLOCATORREF)ObjectFromHandle(m_hLoaderAllocatorObjectHandle);
- PTRARRAYREF handleTable = loaderAllocator->GetHandleTable();
+ _ASSERTE(!ObjectHandleIsNull(m_hLoaderAllocatorObjectHandle));
- gc.previous = handleTable->GetAt(index);
- if (gc.previous == gc.compare)
- {
- handleTable->SetAt(index, gc.value);
- }
+ UINT_PTR index = (((UINT_PTR)handle) >> 1) - 1;
+ LOADERALLOCATORREF loaderAllocator = (LOADERALLOCATORREF)ObjectFromHandle(m_hLoaderAllocatorObjectHandle);
+ PTRARRAYREF handleTable = loaderAllocator->GetHandleTable();
+
+ gc.previous = handleTable->GetAt(index);
+ if (gc.previous == gc.compare)
+ {
+ handleTable->SetAt(index, gc.value);
}
- } // End critical section
+ }
retVal = gc.previous;
GCPROTECT_END();
@@ -899,35 +936,35 @@ void LoaderAllocator::SetHandleValue(LOADERHANDLE handle, OBJECTREF value)
{
CONTRACTL
{
- THROWS;
- GC_TRIGGERS;
- MODE_COOPERATIVE;
+ NOTHROW;
+ GC_NOTRIGGER;
+ MODE_ANY;
PRECONDITION(handle != NULL);
}
CONTRACTL_END;
+ GCX_COOP();
+
GCPROTECT_BEGIN(value);
- // The handle table is read locklessly, be careful
+ // If the slot value does have the low bit set, then it is a simple pointer to the value
+ // Otherwise, we will need a more complicated operation to clear the value.
+ if ((((UINT_PTR)handle) & 1) != 0)
+ {
+ OBJECTREF *ptr = (OBJECTREF *)(((UINT_PTR)handle) - 1);
+ SetObjectReference(ptr, value, IsDomainNeutral() ? NULL : GetDomain()->AsAppDomain());
+ }
+ else
{
+ // The handle table is read locklessly, be careful
CrstHolder ch(&m_crstLoaderAllocator);
- // If the slot value does have the low bit set, then it is a simple pointer to the value
- // Otherwise, we will need a more complicated operation to clear the value.
- if ((((UINT_PTR)handle) & 1) != 0)
- {
- OBJECTREF *ptr = (OBJECTREF *)(((UINT_PTR)handle) - 1);
- SetObjectReference(ptr, value, IsDomainNeutral() ? NULL : GetDomain()->AsAppDomain());
- }
- else
- {
- _ASSERTE(!ObjectHandleIsNull(m_hLoaderAllocatorObjectHandle));
+ _ASSERTE(!ObjectHandleIsNull(m_hLoaderAllocatorObjectHandle));
- UINT_PTR index = (((UINT_PTR)handle) >> 1) - 1;
- LOADERALLOCATORREF loaderAllocator = (LOADERALLOCATORREF)ObjectFromHandle(m_hLoaderAllocatorObjectHandle);
- PTRARRAYREF handleTable = loaderAllocator->GetHandleTable();
- handleTable->SetAt(index, value);
- }
+ UINT_PTR index = (((UINT_PTR)handle) >> 1) - 1;
+ LOADERALLOCATORREF loaderAllocator = (LOADERALLOCATORREF)ObjectFromHandle(m_hLoaderAllocatorObjectHandle);
+ PTRARRAYREF handleTable = loaderAllocator->GetHandleTable();
+ handleTable->SetAt(index, value);
}
GCPROTECT_END();
@@ -1001,7 +1038,7 @@ void LoaderAllocator::Init(BaseDomain *pDomain, BYTE *pExecutableHeapMemory)
m_pDomain = pDomain;
- m_crstLoaderAllocator.Init(CrstLoaderAllocator);
+ m_crstLoaderAllocator.Init(CrstLoaderAllocator, (CrstFlags)CRST_UNSAFE_COOPGC);
//
// Initialize the heaps
diff --git a/src/vm/loaderallocator.hpp b/src/vm/loaderallocator.hpp
index abfd4d0c45..f047076829 100644
--- a/src/vm/loaderallocator.hpp
+++ b/src/vm/loaderallocator.hpp
@@ -93,6 +93,38 @@ public:
COUNT_T Hash();
};
+// Segmented stack to store freed handle indices
+class SegmentedHandleIndexStack
+{
+ // Segment of the stack
+ struct Segment
+ {
+ static const int Size = 64;
+
+ Segment* m_prev;
+ DWORD m_data[Size];
+ };
+
+ // Segment containing the TOS
+ Segment * m_TOSSegment = NULL;
+ // One free segment to prevent rapid delete / new if pop / push happens rapidly
+ // at the boundary of two segments.
+ Segment * m_freeSegment = NULL;
+ // Index of the top of stack in the TOS segment
+ int m_TOSIndex = Segment::Size;
+
+public:
+
+ // Push the value to the stack. If the push cannot be done due to OOM, return false;
+ inline bool Push(DWORD value);
+
+ // Pop the value from the stack
+ inline DWORD Pop();
+
+ // Check if the stack is empty.
+ inline bool IsEmpty();
+};
+
class StringLiteralMap;
class VirtualCallStubManager;
template <typename ELEMENT>
@@ -210,8 +242,9 @@ private:
SList<FailedTypeInitCleanupListItem> m_failedTypeInitCleanupList;
+ SegmentedHandleIndexStack m_freeHandleIndexesStack;
+
#ifndef DACCESS_COMPILE
- LOADERHANDLE AllocateHandle_Unlocked(OBJECTREF value);
public:
// CleanupFailedTypeInit is called from AppDomain
@@ -393,7 +426,7 @@ public:
void SetHandleValue(LOADERHANDLE handle, OBJECTREF value);
OBJECTREF CompareExchangeValueInHandle(LOADERHANDLE handle, OBJECTREF value, OBJECTREF compare);
- void ClearHandle(LOADERHANDLE handle);
+ void FreeHandle(LOADERHANDLE handle);
// The default implementation is a no-op. Only collectible loader allocators implement this method.
virtual void RegisterHandleForCleanup(OBJECTHANDLE /* objHandle */) { }
diff --git a/src/vm/loaderallocator.inl b/src/vm/loaderallocator.inl
index 46c253f4bd..327dd3e432 100644
--- a/src/vm/loaderallocator.inl
+++ b/src/vm/loaderallocator.inl
@@ -179,5 +179,66 @@ FORCEINLINE OBJECTREF LoaderAllocator::GetHandleValueFastCannotFailType2(LOADERH
return handleTable->GetAt(index);
}
+
+inline bool SegmentedHandleIndexStack::Push(DWORD value)
+{
+ LIMITED_METHOD_CONTRACT;
+
+ if (m_TOSIndex == Segment::Size)
+ {
+ Segment* segment;
+
+ if (m_freeSegment == NULL)
+ {
+ segment = new (nothrow) Segment();
+ if (segment == NULL)
+ {
+ return false;
+ }
+ }
+ else
+ {
+ segment = m_freeSegment;
+ m_freeSegment = NULL;
+ }
+
+ segment->m_prev = m_TOSSegment;
+ m_TOSSegment = segment;
+
+ m_TOSIndex = 0;
+ }
+
+ m_TOSSegment->m_data[m_TOSIndex++] = value;
+ return true;
+}
+
+inline DWORD SegmentedHandleIndexStack::Pop()
+{
+ LIMITED_METHOD_CONTRACT;
+
+ _ASSERTE(!IsEmpty());
+
+ if (m_TOSIndex == 0)
+ {
+ Segment* prevSegment = m_TOSSegment->m_prev;
+ _ASSERTE(prevSegment != NULL);
+
+ delete m_freeSegment;
+ m_freeSegment = m_TOSSegment;
+
+ m_TOSSegment = prevSegment;
+ m_TOSIndex = Segment::Size;
+ }
+
+ return m_TOSSegment->m_data[--m_TOSIndex];
+}
+
+inline bool SegmentedHandleIndexStack::IsEmpty()
+{
+ LIMITED_METHOD_CONTRACT;
+
+ return (m_TOSSegment == NULL) || ((m_TOSIndex == 0) && (m_TOSSegment->m_prev == NULL));
+}
+
#endif // _LOADER_ALLOCATOR_I
diff --git a/src/vm/methodtable.cpp b/src/vm/methodtable.cpp
index 53a0be79a5..c3441d7d16 100644
--- a/src/vm/methodtable.cpp
+++ b/src/vm/methodtable.cpp
@@ -3489,7 +3489,7 @@ void MethodTable::DoRunClassInitThrowing()
if (hNewInitException != NULL &&
InterlockedCompareExchangeT((&pEntry->m_hInitException), hNewInitException, hOrigInitException) != hOrigInitException)
{
- pEntry->m_pLoaderAllocator->ClearHandle(hNewInitException);
+ pEntry->m_pLoaderAllocator->FreeHandle(hNewInitException);
}
}
}
@@ -4042,7 +4042,7 @@ OBJECTREF MethodTable::GetManagedClassObject()
if (FastInterlockCompareExchangePointer(&(EnsureWritablePages(GetWriteableDataForWrite())->m_hExposedClassObject), exposedClassObjectHandle, static_cast<LOADERHANDLE>(NULL)))
{
- pLoaderAllocator->ClearHandle(exposedClassObjectHandle);
+ pLoaderAllocator->FreeHandle(exposedClassObjectHandle);
}
GCPROTECT_END();
diff --git a/src/vm/methodtablebuilder.cpp b/src/vm/methodtablebuilder.cpp
index 56397ca267..15ce7d2af3 100644
--- a/src/vm/methodtablebuilder.cpp
+++ b/src/vm/methodtablebuilder.cpp
@@ -3822,7 +3822,7 @@ VOID MethodTableBuilder::InitializeFieldDescs(FieldDesc *pFieldDescList,
IfFailThrow(COR_E_TYPELOAD);
}
- if ((fIsThreadStatic || fIsContextStatic || bmtFP->fHasFixedAddressValueTypes) && GetAssembly()->IsCollectible())
+ if ((fIsContextStatic || bmtFP->fHasFixedAddressValueTypes) && GetAssembly()->IsCollectible())
{
if (bmtFP->fHasFixedAddressValueTypes)
{
@@ -6529,7 +6529,7 @@ VOID MethodTableBuilder::PlaceInterfaceDeclarationOnClass(
}
}
#endif
-
+
#ifdef _DEBUG
if (bmtInterface->dbg_fShouldInjectInterfaceDuplicates)
{ // We injected interface duplicates
diff --git a/src/vm/threads.cpp b/src/vm/threads.cpp
index 9a0ebdc7ca..8557f9c9f3 100644
--- a/src/vm/threads.cpp
+++ b/src/vm/threads.cpp
@@ -9136,6 +9136,28 @@ void Thread::DeleteThreadStaticData()
//+----------------------------------------------------------------------------
//
+// Method: Thread::DeleteThreadStaticData public
+//
+// Synopsis: Delete the static data for the given module. This is called
+// when the AssemblyLoadContext unloads.
+//
+//
+//+----------------------------------------------------------------------------
+
+void Thread::DeleteThreadStaticData(ModuleIndex index)
+{
+ for (SIZE_T i = 0; i < m_TLBTableSize; ++i)
+ {
+ ThreadLocalBlock * pTLB = m_pTLBTable[i];
+ if (pTLB != NULL)
+ {
+ pTLB->FreeTLM(index.m_dwIndex, FALSE /* isThreadShuttingDown */);
+ }
+ }
+}
+
+//+----------------------------------------------------------------------------
+//
// Method: Thread::DeleteThreadStaticData protected
//
// Synopsis: Delete the static data for the given appdomain. This is called
diff --git a/src/vm/threads.h b/src/vm/threads.h
index dc468630f5..3f281c0274 100644
--- a/src/vm/threads.h
+++ b/src/vm/threads.h
@@ -4526,6 +4526,10 @@ public:
}
*/
+ // Called during AssemblyLoadContext teardown to clean up all structures
+ // associated with thread statics for the specific Module
+ void DeleteThreadStaticData(ModuleIndex index);
+
protected:
// Called during AD teardown to clean up any references this
diff --git a/src/vm/threadstatics.cpp b/src/vm/threadstatics.cpp
index 501cbbcba8..7e9a9da987 100644
--- a/src/vm/threadstatics.cpp
+++ b/src/vm/threadstatics.cpp
@@ -17,7 +17,7 @@
#ifndef DACCESS_COMPILE
-void ThreadLocalBlock::FreeTLM(SIZE_T i)
+void ThreadLocalBlock::FreeTLM(SIZE_T i, BOOL isThreadShuttingdown)
{
CONTRACTL
{
@@ -27,10 +27,20 @@ void ThreadLocalBlock::FreeTLM(SIZE_T i)
MODE_ANY;
}
CONTRACTL_END;
- _ASSERTE(m_pTLMTable != NULL);
- PTR_ThreadLocalModule pThreadLocalModule = m_pTLMTable[i].pTLM;
- m_pTLMTable[i].pTLM = NULL;
+ PTR_ThreadLocalModule pThreadLocalModule;
+
+ {
+ SpinLock::Holder lock(&m_TLMTableLock);
+
+ _ASSERTE(m_pTLMTable != NULL);
+ if (i >= m_TLMTableSize)
+ {
+ return;
+ }
+ pThreadLocalModule = m_pTLMTable[i].pTLM;
+ m_pTLMTable[i].pTLM = NULL;
+ }
if (pThreadLocalModule != NULL)
{
@@ -40,6 +50,20 @@ void ThreadLocalBlock::FreeTLM(SIZE_T i)
{
if (pThreadLocalModule->m_pDynamicClassTable[k].m_pDynamicEntry != NULL)
{
+ if (isThreadShuttingdown && (pThreadLocalModule->m_pDynamicClassTable[k].m_dwFlags & ClassInitFlags::COLLECTIBLE_FLAG))
+ {
+ ThreadLocalModule::CollectibleDynamicEntry *entry = (ThreadLocalModule::CollectibleDynamicEntry*)pThreadLocalModule->m_pDynamicClassTable[k].m_pDynamicEntry;
+ PTR_LoaderAllocator pLoaderAllocator = entry->m_pLoaderAllocator;
+
+ if (entry->m_hGCStatics != NULL)
+ {
+ pLoaderAllocator->FreeHandle(entry->m_hGCStatics);
+ }
+ if (entry->m_hNonGCStatics != NULL)
+ {
+ pLoaderAllocator->FreeHandle(entry->m_hNonGCStatics);
+ }
+ }
delete pThreadLocalModule->m_pDynamicClassTable[k].m_pDynamicEntry;
pThreadLocalModule->m_pDynamicClassTable[k].m_pDynamicEntry = NULL;
}
@@ -70,7 +94,7 @@ void ThreadLocalBlock::FreeTable()
{
if (m_pTLMTable[i].pTLM != NULL)
{
- FreeTLM(i);
+ FreeTLM(i, TRUE /* isThreadShuttingDown */);
}
}
@@ -119,19 +143,23 @@ void ThreadLocalBlock::EnsureModuleIndex(ModuleIndex index)
// Zero out the new TLM table
memset(pNewModuleSlots, 0 , sizeof(TLMTableEntry) * aModuleIndices);
- if (m_pTLMTable != NULL)
- {
- memcpy(pNewModuleSlots, m_pTLMTable, sizeof(TLMTableEntry) * m_TLMTableSize);
- }
- else
+ PTR_TLMTableEntry pOldModuleSlots = m_pTLMTable;
+
{
- _ASSERTE(m_TLMTableSize == 0);
- }
+ SpinLock::Holder lock(&m_TLMTableLock);
- PTR_TLMTableEntry pOldModuleSlots = m_pTLMTable;
-
- m_pTLMTable = pNewModuleSlots;
- m_TLMTableSize = aModuleIndices;
+ if (m_pTLMTable != NULL)
+ {
+ memcpy(pNewModuleSlots, m_pTLMTable, sizeof(TLMTableEntry) * m_TLMTableSize);
+ }
+ else
+ {
+ _ASSERTE(m_TLMTableSize == 0);
+ }
+
+ m_pTLMTable = pNewModuleSlots;
+ m_TLMTableSize = aModuleIndices;
+ }
if (pOldModuleSlots != NULL)
delete pOldModuleSlots;
@@ -500,34 +528,72 @@ void ThreadLocalModule::AllocateDynamicClass(MethodTable *pMT)
// We need this check because maybe a class had a cctor but no statics
if (dwStaticBytes > 0 || dwNumHandleStatics > 0)
{
- // Collectible types do not support static fields yet
- if (pMT->Collectible())
- COMPlusThrow(kNotSupportedException, W("NotSupported_CollectibleNotYet"));
-
if (pDynamicStatics == NULL)
{
+ SIZE_T dynamicEntrySize;
+ if (pMT->Collectible())
+ {
+ dynamicEntrySize = sizeof(CollectibleDynamicEntry);
+ }
+ else
+ {
+ dynamicEntrySize = DynamicEntry::GetOffsetOfDataBlob() + dwStaticBytes;
+ }
+
// If this allocation fails, we will throw
- pDynamicStatics = (DynamicEntry*)new BYTE[sizeof(DynamicEntry) + dwStaticBytes];
+ pDynamicStatics = (DynamicEntry*)new BYTE[dynamicEntrySize];
#ifdef FEATURE_64BIT_ALIGNMENT
// The memory block has be aligned at MAX_PRIMITIVE_FIELD_SIZE to guarantee alignment of statics
- static_assert_no_msg(sizeof(DynamicEntry) % MAX_PRIMITIVE_FIELD_SIZE == 0);
+ static_assert_no_msg(sizeof(NormalDynamicEntry) % MAX_PRIMITIVE_FIELD_SIZE == 0);
_ASSERTE(IS_ALIGNED(pDynamicStatics, MAX_PRIMITIVE_FIELD_SIZE));
#endif
// Zero out the new DynamicEntry
- memset((BYTE*)pDynamicStatics, 0, sizeof(DynamicEntry) + dwStaticBytes);
+ memset((BYTE*)pDynamicStatics, 0, dynamicEntrySize);
+
+ if (pMT->Collectible())
+ {
+ ((CollectibleDynamicEntry*)pDynamicStatics)->m_pLoaderAllocator = pMT->GetLoaderAllocator();
+ }
// Save the DynamicEntry in the DynamicClassTable
m_pDynamicClassTable[dwID].m_pDynamicEntry = pDynamicStatics;
}
+ if (pMT->Collectible() && (dwStaticBytes != 0))
+ {
+ OBJECTREF nongcStaticsArray = NULL;
+ GCPROTECT_BEGIN(nongcStaticsArray);
+#ifdef FEATURE_64BIT_ALIGNMENT
+ // Allocate memory with extra alignment only if it is really necessary
+ if (dwStaticBytes >= MAX_PRIMITIVE_FIELD_SIZE)
+ nongcStaticsArray = AllocatePrimitiveArray(ELEMENT_TYPE_I8, (dwStaticBytes + (sizeof(CLR_I8) - 1)) / (sizeof(CLR_I8)));
+ else
+#endif
+ nongcStaticsArray = AllocatePrimitiveArray(ELEMENT_TYPE_U1, dwStaticBytes);
+
+ ((CollectibleDynamicEntry *)pDynamicStatics)->m_hNonGCStatics = pMT->GetLoaderAllocator()->AllocateHandle(nongcStaticsArray);
+ GCPROTECT_END();
+ }
+
if (dwNumHandleStatics > 0)
{
- PTR_ThreadLocalBlock pThreadLocalBlock = GetThread()->m_pThreadLocalBlock;
- _ASSERTE(pThreadLocalBlock != NULL);
- pThreadLocalBlock->AllocateStaticFieldObjRefPtrs(dwNumHandleStatics,
- &pDynamicStatics->m_pGCStatics);
+ if (!pMT->Collectible())
+ {
+ PTR_ThreadLocalBlock pThreadLocalBlock = GetThread()->m_pThreadLocalBlock;
+ _ASSERTE(pThreadLocalBlock != NULL);
+ pThreadLocalBlock->AllocateStaticFieldObjRefPtrs(dwNumHandleStatics,
+ &((NormalDynamicEntry *)pDynamicStatics)->m_pGCStatics);
+ }
+ else
+ {
+ OBJECTREF gcStaticsArray = NULL;
+ GCPROTECT_BEGIN(gcStaticsArray);
+ gcStaticsArray = AllocateObjectArray(dwNumHandleStatics, g_pObjectClass);
+ ((CollectibleDynamicEntry *)pDynamicStatics)->m_hGCStatics = pMT->GetLoaderAllocator()->AllocateHandle(gcStaticsArray);
+ GCPROTECT_END();
+ }
}
}
}
@@ -552,6 +618,11 @@ void ThreadLocalModule::PopulateClass(MethodTable *pMT)
if (pMT->IsDynamicStatics())
AllocateDynamicClass(pMT);
+ if (pMT->Collectible())
+ {
+ SetClassFlags(pMT, ClassInitFlags::COLLECTIBLE_FLAG);
+ }
+
// We need to allocate boxes any value-type statics that are not
// primitives or enums, because these statics may contain references
// to objects on the GC heap
@@ -668,6 +739,7 @@ PTR_ThreadLocalModule ThreadStatics::AllocateTLM(Module * pModule)
}
CONTRACTL_END;
+
SIZE_T size = pModule->GetThreadLocalModuleSize();
_ASSERTE(size >= ThreadLocalModule::OffsetOfDataBlob());
@@ -681,7 +753,7 @@ PTR_ThreadLocalModule ThreadStatics::AllocateTLM(Module * pModule)
// Zero out the part of memory where the TLM resides
memset(pThreadLocalModule, 0, size);
-
+
return pThreadLocalModule;
}
diff --git a/src/vm/threadstatics.h b/src/vm/threadstatics.h
index 3e61049f59..b6cd7db4d7 100644
--- a/src/vm/threadstatics.h
+++ b/src/vm/threadstatics.h
@@ -29,6 +29,7 @@
#include "field.h"
#include "methodtable.h"
#include "threads.h"
+#include "spinlock.h"
// Defines ObjectHandeList type
#include "specialstatics.h"
@@ -42,8 +43,63 @@ struct ThreadLocalModule
friend class CheckAsmOffsets;
friend struct ThreadLocalBlock;
+ // After these macros complete, they may have returned an interior pointer into a gc object. This pointer will have been cast to a byte pointer
+ // It is critically important that no GC is allowed to occur before this pointer is used.
+#define GET_DYNAMICENTRY_GCTHREADSTATICS_BASEPOINTER(pLoaderAllocator, dynamicClassInfoParam, pGCStatics) \
+ {\
+ ThreadLocalModule::PTR_DynamicClassInfo dynamicClassInfo = dac_cast<ThreadLocalModule::PTR_DynamicClassInfo>(dynamicClassInfoParam);\
+ ThreadLocalModule::PTR_DynamicEntry pDynamicEntry = dac_cast<ThreadLocalModule::PTR_DynamicEntry>((ThreadLocalModule::DynamicEntry*)dynamicClassInfo->m_pDynamicEntry); \
+ if ((dynamicClassInfo->m_dwFlags) & ClassInitFlags::COLLECTIBLE_FLAG) \
+ {\
+ PTRARRAYREF objArray;\
+ objArray = (PTRARRAYREF)pLoaderAllocator->GetHandleValueFastCannotFailType2( \
+ (dac_cast<ThreadLocalModule::PTR_CollectibleDynamicEntry>(pDynamicEntry))->m_hGCStatics);\
+ *(pGCStatics) = dac_cast<PTR_BYTE>(PTR_READ(PTR_TO_TADDR(OBJECTREFToObject( objArray )) + offsetof(PtrArray, m_Array), objArray->GetNumComponents() * sizeof(void*))) ;\
+ }\
+ else\
+ {\
+ *(pGCStatics) = (dac_cast<ThreadLocalModule::PTR_NormalDynamicEntry>(pDynamicEntry))->GetGCStaticsBasePointer();\
+ }\
+ }\
+
+#define GET_DYNAMICENTRY_NONGCTHREADSTATICS_BASEPOINTER(pLoaderAllocator, dynamicClassInfoParam, pNonGCStatics) \
+ {\
+ ThreadLocalModule::PTR_DynamicClassInfo dynamicClassInfo = dac_cast<ThreadLocalModule::PTR_DynamicClassInfo>(dynamicClassInfoParam);\
+ ThreadLocalModule::PTR_DynamicEntry pDynamicEntry = dac_cast<ThreadLocalModule::PTR_DynamicEntry>((ThreadLocalModule::DynamicEntry*)(dynamicClassInfo)->m_pDynamicEntry); \
+ if (((dynamicClassInfo)->m_dwFlags) & ClassInitFlags::COLLECTIBLE_FLAG) \
+ {\
+ if ((dac_cast<ThreadLocalModule::PTR_CollectibleDynamicEntry>(pDynamicEntry))->m_hNonGCStatics != 0) \
+ { \
+ U1ARRAYREF objArray;\
+ objArray = (U1ARRAYREF)pLoaderAllocator->GetHandleValueFastCannotFailType2( \
+ (dac_cast<ThreadLocalModule::PTR_CollectibleDynamicEntry>(pDynamicEntry))->m_hNonGCStatics);\
+ *(pNonGCStatics) = dac_cast<PTR_BYTE>(PTR_READ( \
+ PTR_TO_TADDR(OBJECTREFToObject( objArray )) + sizeof(ArrayBase) - ThreadLocalModule::DynamicEntry::GetOffsetOfDataBlob(), \
+ objArray->GetNumComponents() * (DWORD)objArray->GetComponentSize() + ThreadLocalModule::DynamicEntry::GetOffsetOfDataBlob())); \
+ } else (*pNonGCStatics) = NULL; \
+ }\
+ else\
+ {\
+ *(pNonGCStatics) = dac_cast<ThreadLocalModule::PTR_NormalDynamicEntry>(pDynamicEntry)->GetNonGCStaticsBasePointer();\
+ }\
+ }\
+
struct DynamicEntry
{
+ static DWORD GetOffsetOfDataBlob();
+ };
+ typedef DPTR(DynamicEntry) PTR_DynamicEntry;
+
+ struct CollectibleDynamicEntry : public DynamicEntry
+ {
+ LOADERHANDLE m_hGCStatics;
+ LOADERHANDLE m_hNonGCStatics;
+ PTR_LoaderAllocator m_pLoaderAllocator;
+ };
+ typedef DPTR(CollectibleDynamicEntry) PTR_CollectibleDynamicEntry;
+
+ struct NormalDynamicEntry : public DynamicEntry
+ {
OBJECTHANDLE m_pGCStatics;
#ifdef FEATURE_64BIT_ALIGNMENT
// Padding to make m_pDataBlob aligned at MAX_PRIMITIVE_FIELD_SIZE.
@@ -80,13 +136,8 @@ struct ThreadLocalModule
SUPPORTS_DAC;
return dac_cast<PTR_BYTE>(this);
}
- static DWORD GetOffsetOfDataBlob()
- {
- LIMITED_METHOD_CONTRACT;
- return offsetof(DynamicEntry, m_pDataBlob);
- }
};
- typedef DPTR(DynamicEntry) PTR_DynamicEntry;
+ typedef DPTR(NormalDynamicEntry) PTR_NormalDynamicEntry;
struct DynamicClassInfo
{
@@ -168,7 +219,7 @@ struct ThreadLocalModule
if (pMT->IsDynamicStatics())
{
- return GetDynamicEntryGCStaticsBasePointer(pMT->GetModuleDynamicEntryID());
+ return GetDynamicEntryGCStaticsBasePointer(pMT->GetModuleDynamicEntryID(), pMT->GetLoaderAllocator());
}
else
{
@@ -189,7 +240,7 @@ struct ThreadLocalModule
if (pMT->IsDynamicStatics())
{
- return GetDynamicEntryNonGCStaticsBasePointer(pMT->GetModuleDynamicEntryID());
+ return GetDynamicEntryNonGCStaticsBasePointer(pMT->GetModuleDynamicEntryID(), pMT->GetLoaderAllocator());
}
else
{
@@ -207,9 +258,19 @@ struct ThreadLocalModule
return pEntry;
}
+ inline DynamicClassInfo* GetDynamicClassInfo(DWORD n)
+ {
+ LIMITED_METHOD_CONTRACT
+ SUPPORTS_DAC;
+ _ASSERTE(m_pDynamicClassTable && m_aDynamicEntries > n);
+ dac_cast<PTR_DynamicEntry>(m_pDynamicClassTable[n].m_pDynamicEntry);
+
+ return &m_pDynamicClassTable[n];
+ }
+
// These helpers can now return null, as the debugger may do queries on a type
// before the calls to PopulateClass happen
- inline PTR_BYTE GetDynamicEntryGCStaticsBasePointer(DWORD n)
+ inline PTR_BYTE GetDynamicEntryGCStaticsBasePointer(DWORD n, PTR_LoaderAllocator pLoaderAllocator)
{
CONTRACTL
{
@@ -225,16 +286,20 @@ struct ThreadLocalModule
return NULL;
}
- DynamicEntry* pEntry = GetDynamicEntry(n);
- if (!pEntry)
+ DynamicClassInfo* pClassInfo = GetDynamicClassInfo(n);
+ if (!pClassInfo->m_pDynamicEntry)
{
return NULL;
}
- return pEntry->GetGCStaticsBasePointer();
+ PTR_BYTE retval = NULL;
+
+ GET_DYNAMICENTRY_GCTHREADSTATICS_BASEPOINTER(pLoaderAllocator, pClassInfo, &retval);
+
+ return retval;
}
- inline PTR_BYTE GetDynamicEntryNonGCStaticsBasePointer(DWORD n)
+ inline PTR_BYTE GetDynamicEntryNonGCStaticsBasePointer(DWORD n, PTR_LoaderAllocator pLoaderAllocator)
{
CONTRACTL
{
@@ -250,13 +315,17 @@ struct ThreadLocalModule
return NULL;
}
- DynamicEntry* pEntry = GetDynamicEntry(n);
- if (!pEntry)
+ DynamicClassInfo* pClassInfo = GetDynamicClassInfo(n);
+ if (!pClassInfo->m_pDynamicEntry)
{
return NULL;
}
- return pEntry->GetNonGCStaticsBasePointer();
+ PTR_BYTE retval = NULL;
+
+ GET_DYNAMICENTRY_NONGCTHREADSTATICS_BASEPOINTER(pLoaderAllocator, pClassInfo, &retval);
+
+ return retval;
}
FORCEINLINE PTR_DynamicClassInfo GetDynamicClassInfoIfInitialized(DWORD n)
@@ -320,16 +389,6 @@ struct ThreadLocalModule
SetClassFlags(pMT, ClassInitFlags::INITIALIZED_FLAG);
}
- void SetClassAllocatedAndInitialized(MethodTable* pMT)
- {
- WRAPPER_NO_CONTRACT;
-
- _ASSERTE(!IsClassInitialized(pMT));
- _ASSERTE(!IsClassInitError(pMT));
-
- SetClassFlags(pMT, ClassInitFlags::ALLOCATECLASS_FLAG | ClassInitFlags::INITIALIZED_FLAG);
- }
-
void SetClassAllocated(MethodTable* pMT)
{
WRAPPER_NO_CONTRACT;
@@ -465,6 +524,7 @@ struct ThreadLocalBlock
private:
PTR_TLMTableEntry m_pTLMTable; // Table of ThreadLocalModules
SIZE_T m_TLMTableSize; // Current size of table
+ SpinLock m_TLMTableLock; // Spinlock used to synchronize growing the table and freeing TLM by other threads
// Each ThreadLocalBlock has its own ThreadStaticHandleTable. The ThreadStaticHandleTable works
// by allocating Object arrays on the GC heap and keeping them alive with pinning handles.
@@ -498,9 +558,12 @@ public:
#ifndef DACCESS_COMPILE
ThreadLocalBlock()
- : m_pTLMTable(NULL), m_TLMTableSize(0), m_pThreadStaticHandleTable(NULL) {}
+ : m_pTLMTable(NULL), m_TLMTableSize(0), m_pThreadStaticHandleTable(NULL)
+ {
+ m_TLMTableLock.Init(LOCK_TYPE_DEFAULT);
+ }
- void FreeTLM(SIZE_T i);
+ void FreeTLM(SIZE_T i, BOOL isThreadShuttingDown);
void FreeTable();
@@ -676,5 +739,12 @@ class ThreadStatics
};
+/* static */
+inline DWORD ThreadLocalModule::DynamicEntry::GetOffsetOfDataBlob()
+{
+ LIMITED_METHOD_CONTRACT;
+ _ASSERTE(DWORD(offsetof(NormalDynamicEntry, m_pDataBlob)) == offsetof(NormalDynamicEntry, m_pDataBlob));
+ return (DWORD)offsetof(NormalDynamicEntry, m_pDataBlob);
+}
#endif
diff --git a/src/vm/typedesc.cpp b/src/vm/typedesc.cpp
index b244f4503c..1b3d5f5783 100644
--- a/src/vm/typedesc.cpp
+++ b/src/vm/typedesc.cpp
@@ -835,7 +835,7 @@ OBJECTREF ParamTypeDesc::GetManagedClassObject()
EnsureWritablePages(this);
if (FastInterlockCompareExchangePointer(&m_hExposedClassObject, hExposedClassObject, static_cast<LOADERHANDLE>(NULL)))
{
- pLoaderAllocator->ClearHandle(hExposedClassObject);
+ pLoaderAllocator->FreeHandle(hExposedClassObject);
}
if (OwnsTemplateMethodTable())
@@ -2271,7 +2271,7 @@ OBJECTREF TypeVarTypeDesc::GetManagedClassObject()
if (FastInterlockCompareExchangePointer(EnsureWritablePages(&m_hExposedClassObject), hExposedClassObject, static_cast<LOADERHANDLE>(NULL)))
{
- pLoaderAllocator->ClearHandle(hExposedClassObject);
+ pLoaderAllocator->FreeHandle(hExposedClassObject);
}
GCPROTECT_END();