summaryrefslogtreecommitdiff
path: root/src/vm
diff options
context:
space:
mode:
Diffstat (limited to 'src/vm')
-rw-r--r--src/vm/appdomain.cpp144
-rw-r--r--src/vm/appdomain.hpp11
-rw-r--r--src/vm/arm/cgencpu.h1
-rw-r--r--src/vm/arm/stubs.cpp5
-rw-r--r--src/vm/assembly.cpp54
-rw-r--r--src/vm/assemblynative.cpp57
-rw-r--r--src/vm/assemblynative.hpp3
-rw-r--r--src/vm/assemblyspec.cpp103
-rw-r--r--src/vm/assemblyspec.hpp6
-rw-r--r--src/vm/ceeload.cpp5
-rw-r--r--src/vm/clrprivbinderloadfile.h12
-rw-r--r--src/vm/clrprivbinderwinrt.h13
-rw-r--r--src/vm/comdelegate.cpp57
-rw-r--r--src/vm/comdelegate.h49
-rw-r--r--src/vm/coreassemblyspec.cpp2
-rw-r--r--src/vm/domainfile.cpp17
-rw-r--r--src/vm/domainfile.h21
-rw-r--r--src/vm/dynamicmethod.cpp2
-rw-r--r--src/vm/ecalllist.h1
-rw-r--r--src/vm/eventtrace.cpp29
-rw-r--r--src/vm/excep.cpp3
-rw-r--r--src/vm/hash.h8
-rw-r--r--src/vm/i386/stublinkerx86.cpp4
-rw-r--r--src/vm/i386/stublinkerx86.h1
-rw-r--r--src/vm/jithelpers.cpp6
-rw-r--r--src/vm/jitinterface.cpp7
-rw-r--r--src/vm/loaderallocator.cpp270
-rw-r--r--src/vm/loaderallocator.hpp93
-rw-r--r--src/vm/loaderallocator.inl28
-rw-r--r--src/vm/methodtable.inl2
-rw-r--r--src/vm/methodtablebuilder.cpp8
-rw-r--r--src/vm/stublink.cpp84
-rw-r--r--src/vm/stublink.h5
33 files changed, 791 insertions, 320 deletions
diff --git a/src/vm/appdomain.cpp b/src/vm/appdomain.cpp
index 63d077e4ff..f1c7d0a81f 100644
--- a/src/vm/appdomain.cpp
+++ b/src/vm/appdomain.cpp
@@ -4834,7 +4834,7 @@ void AppDomain::AddAssembly(DomainAssembly * assem)
}
}
-void AppDomain::RemoveAssembly_Unlocked(DomainAssembly * pAsm)
+void AppDomain::RemoveAssembly(DomainAssembly * pAsm)
{
CONTRACTL
{
@@ -4843,8 +4843,7 @@ void AppDomain::RemoveAssembly_Unlocked(DomainAssembly * pAsm)
}
CONTRACTL_END;
- _ASSERTE(GetAssemblyListLock()->OwnedByCurrentThread());
-
+ CrstHolder ch(GetAssemblyListLock());
DWORD asmCount = m_Assemblies.GetCount_Unlocked();
for (DWORD i = 0; i < asmCount; ++i)
{
@@ -5561,11 +5560,26 @@ DomainAssembly *AppDomain::LoadDomainAssemblyInternal(AssemblySpec* pIdentity,
if (result == NULL)
{
+ LoaderAllocator *pLoaderAllocator = NULL;
+
+#ifndef CROSSGEN_COMPILE
+ ICLRPrivBinder *pFileBinder = pFile->GetBindingContext();
+ if (pFileBinder != NULL)
+ {
+ // Assemblies loaded with AssemblyLoadContext need to use a different LoaderAllocator if
+ // marked as collectible
+ pFileBinder->GetLoaderAllocator((LPVOID*)&pLoaderAllocator);
+ }
+#endif // !CROSSGEN_COMPILE
+
+ if (pLoaderAllocator == NULL)
+ {
+ pLoaderAllocator = this->GetLoaderAllocator();
+ }
+
// Allocate the DomainAssembly a bit early to avoid GC mode problems. We could potentially avoid
// a rare redundant allocation by moving this closer to FileLoadLock::Create, but it's not worth it.
-
- NewHolder<DomainAssembly> pDomainAssembly;
- pDomainAssembly = new DomainAssembly(this, pFile, this->GetLoaderAllocator());
+ NewHolder<DomainAssembly> pDomainAssembly = new DomainAssembly(this, pFile, pLoaderAllocator);
LoadLockHolder lock(this);
@@ -5580,6 +5594,14 @@ DomainAssembly *AppDomain::LoadDomainAssemblyInternal(AssemblySpec* pIdentity,
// We are the first one in - create the DomainAssembly
fileLock = FileLoadLock::Create(lock, pFile, pDomainAssembly);
pDomainAssembly.SuppressRelease();
+#ifndef CROSSGEN_COMPILE
+ if (pDomainAssembly->IsCollectible())
+ {
+ // We add the assembly to the LoaderAllocator only when we are sure that it can be added
+ // and won't be deleted in case of a concurrent load from the same ALC
+ ((AssemblyLoaderAllocator *)pLoaderAllocator)->AddDomainAssembly(pDomainAssembly);
+ }
+#endif // !CROSSGEN_COMPILE
}
}
else
@@ -6003,17 +6025,15 @@ AppDomain::SharePolicy AppDomain::GetSharePolicy()
#endif // FEATURE_LOADER_OPTIMIZATION
-void AppDomain::CheckForMismatchedNativeImages(AssemblySpec * pSpec, const GUID * pGuid)
+static void NormalizeAssemblySpecForNativeDependencies(AssemblySpec * pSpec)
{
- STANDARD_VM_CONTRACT;
-
- //
- // The native images are ever used only for trusted images in CoreCLR.
- // We don't wish to open the IL file at runtime so we just forgo any
- // eager consistency checking. But we still want to prevent mistmatched
- // NGen images from being used. We record all mappings between assembly
- // names and MVID, and fail once we detect mismatch.
- //
+ CONTRACTL
+ {
+ THROWS;
+ GC_NOTRIGGER;
+ MODE_ANY;
+ }
+ CONTRACTL_END;
if (pSpec->IsStrongNamed() && pSpec->HasPublicKey())
{
@@ -6031,7 +6051,21 @@ void AppDomain::CheckForMismatchedNativeImages(AssemblySpec * pSpec, const GUID
pContext->usRevisionNumber = (USHORT)-1;
// Ignore the WinRT type while considering if two assemblies have the same identity.
- pSpec->SetWindowsRuntimeType(NULL, NULL);
+ pSpec->SetWindowsRuntimeType(NULL, NULL);
+}
+
+void AppDomain::CheckForMismatchedNativeImages(AssemblySpec * pSpec, const GUID * pGuid)
+{
+ STANDARD_VM_CONTRACT;
+
+ //
+ // The native images are ever used only for trusted images in CoreCLR.
+ // We don't wish to open the IL file at runtime so we just forgo any
+ // eager consistency checking. But we still want to prevent mistmatched
+ // NGen images from being used. We record all mappings between assembly
+ // names and MVID, and fail once we detect mismatch.
+ //
+ NormalizeAssemblySpecForNativeDependencies(pSpec);
CrstHolder ch(&m_DomainCrst);
@@ -6052,23 +6086,39 @@ void AppDomain::CheckForMismatchedNativeImages(AssemblySpec * pSpec, const GUID
//
// No entry yet - create one
//
- AllocMemTracker amTracker;
- AllocMemTracker *pamTracker = &amTracker;
-
- NativeImageDependenciesEntry * pNewEntry =
- new (pamTracker->Track(GetLowFrequencyHeap()->AllocMem(S_SIZE_T(sizeof(NativeImageDependenciesEntry)))))
- NativeImageDependenciesEntry();
-
+ NativeImageDependenciesEntry * pNewEntry = new NativeImageDependenciesEntry();
pNewEntry->m_AssemblySpec.CopyFrom(pSpec);
- pNewEntry->m_AssemblySpec.CloneFieldsToLoaderHeap(AssemblySpec::ALL_OWNED, GetLowFrequencyHeap(), pamTracker);
-
+ pNewEntry->m_AssemblySpec.CloneFields(AssemblySpec::ALL_OWNED);
pNewEntry->m_guidMVID = *pGuid;
-
m_NativeImageDependencies.Add(pNewEntry);
- amTracker.SuppressRelease();
}
}
+BOOL AppDomain::RemoveNativeImageDependency(AssemblySpec * pSpec)
+{
+ CONTRACTL
+ {
+ GC_NOTRIGGER;
+ PRECONDITION(CheckPointer(pSpec));
+ }
+ CONTRACTL_END;
+
+ BOOL result = FALSE;
+ NormalizeAssemblySpecForNativeDependencies(pSpec);
+
+ CrstHolder ch(&m_DomainCrst);
+
+ const NativeImageDependenciesEntry * pEntry = m_NativeImageDependencies.Lookup(pSpec);
+
+ if (pEntry != NULL)
+ {
+ m_NativeImageDependencies.Remove(pSpec);
+ delete pEntry;
+ result = TRUE;
+ }
+
+ return result;
+}
void AppDomain::SetupSharedStatics()
{
@@ -6484,6 +6534,44 @@ HMODULE AppDomain::FindUnmanagedImageInCache(LPCWSTR libraryName)
RETURN (HMODULE) m_UnmanagedCache.LookupEntry(&spec, 0);
}
+BOOL AppDomain::RemoveFileFromCache(PEAssembly *pFile)
+{
+ CONTRACTL
+ {
+ GC_TRIGGERS;
+ PRECONDITION(CheckPointer(pFile));
+ }
+ CONTRACTL_END;
+
+ LoadLockHolder lock(this);
+ FileLoadLock *fileLock = (FileLoadLock *)lock->FindFileLock(pFile);
+
+ if (fileLock == NULL)
+ return FALSE;
+
+ VERIFY(lock->Unlink(fileLock));
+
+ fileLock->Release();
+
+ return TRUE;
+}
+
+BOOL AppDomain::RemoveAssemblyFromCache(DomainAssembly* pAssembly)
+{
+ CONTRACTL
+ {
+ THROWS;
+ GC_TRIGGERS;
+ MODE_ANY;
+ PRECONDITION(CheckPointer(pAssembly));
+ INJECT_FAULT(COMPlusThrowOM(););
+ }
+ CONTRACTL_END;
+
+ CrstHolder holder(&m_DomainCacheCrst);
+
+ return m_AssemblyCache.RemoveAssembly(pAssembly);
+}
BOOL AppDomain::IsCached(AssemblySpec *pSpec)
{
diff --git a/src/vm/appdomain.hpp b/src/vm/appdomain.hpp
index 4990f75f33..eb28f3ae11 100644
--- a/src/vm/appdomain.hpp
+++ b/src/vm/appdomain.hpp
@@ -2295,7 +2295,7 @@ private:
GUID m_guidMVID;
};
- class NativeImageDependenciesTraits : public NoRemoveSHashTraits<DefaultSHashTraits<NativeImageDependenciesEntry *> >
+ class NativeImageDependenciesTraits : public DeleteElementsOnDestructSHashTraits<DefaultSHashTraits<NativeImageDependenciesEntry *> >
{
public:
typedef BaseAssemblySpec *key_t;
@@ -2316,6 +2316,7 @@ private:
public:
void CheckForMismatchedNativeImages(AssemblySpec * pSpec, const GUID * pGuid);
+ BOOL RemoveNativeImageDependency(AssemblySpec* pSpec);
public:
class PathIterator
@@ -2425,15 +2426,19 @@ public:
void CacheStringsForDAC();
BOOL AddFileToCache(AssemblySpec* pSpec, PEAssembly *pFile, BOOL fAllowFailure = FALSE);
+ BOOL RemoveFileFromCache(PEAssembly *pFile);
+
BOOL AddAssemblyToCache(AssemblySpec* pSpec, DomainAssembly *pAssembly);
+ BOOL RemoveAssemblyFromCache(DomainAssembly* pAssembly);
+
BOOL AddExceptionToCache(AssemblySpec* pSpec, Exception *ex);
void AddUnmanagedImageToCache(LPCWSTR libraryName, HMODULE hMod);
HMODULE FindUnmanagedImageInCache(LPCWSTR libraryName);
//****************************************************************************************
//
- // Adds an assembly to the domain.
+ // Adds or removes an assembly to the domain.
void AddAssembly(DomainAssembly * assem);
- void RemoveAssembly_Unlocked(DomainAssembly * pAsm);
+ void RemoveAssembly(DomainAssembly * pAsm);
BOOL ContainsAssembly(Assembly * assem);
diff --git a/src/vm/arm/cgencpu.h b/src/vm/arm/cgencpu.h
index 7f02b7b090..745626bd40 100644
--- a/src/vm/arm/cgencpu.h
+++ b/src/vm/arm/cgencpu.h
@@ -964,6 +964,7 @@ public:
#endif // FEATURE_SHARE_GENERIC_CODE
static Stub * CreateTailCallCopyArgsThunk(CORINFO_SIG_INFO * pSig,
+ MethodDesc* pMD,
CorInfoHelperTailCallSpecialHandling flags);
private:
diff --git a/src/vm/arm/stubs.cpp b/src/vm/arm/stubs.cpp
index cb9ff602ff..d97062d40f 100644
--- a/src/vm/arm/stubs.cpp
+++ b/src/vm/arm/stubs.cpp
@@ -3161,6 +3161,7 @@ void StubLinkerCPU::ThumbCopyOneTailCallArg(UINT * pnSrcAlign, const ArgLocDesc
Stub * StubLinkerCPU::CreateTailCallCopyArgsThunk(CORINFO_SIG_INFO * pSig,
+ MethodDesc* pMD,
CorInfoHelperTailCallSpecialHandling flags)
{
STANDARD_VM_CONTRACT;
@@ -3407,8 +3408,8 @@ Stub * StubLinkerCPU::CreateTailCallCopyArgsThunk(CORINFO_SIG_INFO * pSig,
pSl->ThumbEmitJumpRegister(thumbRegLr);
}
-
- return pSl->Link();
+ LoaderHeap* pHeap = pMD->GetLoaderAllocatorForCode()->GetStubHeap();
+ return pSl->Link(pHeap);
}
diff --git a/src/vm/assembly.cpp b/src/vm/assembly.cpp
index 4f9f13cf01..0eb1e2a8c6 100644
--- a/src/vm/assembly.cpp
+++ b/src/vm/assembly.cpp
@@ -402,53 +402,8 @@ void Assembly::Terminate( BOOL signalProfiler )
m_pClassLoader = NULL;
}
- if (m_pLoaderAllocator != NULL)
- {
- if (IsCollectible())
- {
- // This cleanup code starts resembling parts of AppDomain::Terminate too much.
- // It would be useful to reduce duplication and also establish clear responsibilites
- // for LoaderAllocator::Destroy, Assembly::Terminate, LoaderAllocator::Terminate
- // and LoaderAllocator::~LoaderAllocator. We need to establish how these
- // cleanup paths interact with app-domain unload and process tear-down, too.
-
- if (!IsAtProcessExit())
- {
- // Suspend the EE to do some clean up that can only occur
- // while no threads are running.
- GCX_COOP (); // SuspendEE may require current thread to be in Coop mode
- // SuspendEE cares about the reason flag only when invoked for a GC
- // Other values are typically ignored. If using SUSPEND_FOR_APPDOMAIN_SHUTDOWN
- // is inappropriate, we can introduce a new flag or hijack an unused one.
- ThreadSuspend::SuspendEE(ThreadSuspend::SUSPEND_FOR_APPDOMAIN_SHUTDOWN);
- }
-
- ExecutionManager::Unload(m_pLoaderAllocator);
-
- m_pLoaderAllocator->UninitVirtualCallStubManager();
- MethodTable::ClearMethodDataCache();
- _ASSERTE(m_pDomain->IsAppDomain());
- AppDomain *pAppDomain = m_pDomain->AsAppDomain();
- ClearJitGenericHandleCache(pAppDomain);
-
- if (!IsAtProcessExit())
- {
- // Resume the EE.
- ThreadSuspend::RestartEE(FALSE, TRUE);
- }
-
- // Once the manifest file is tenured, the managed LoaderAllocatorScout is responsible for cleanup.
- if (m_pManifest != NULL && m_pManifest->IsTenured())
- {
- pAppDomain->RegisterLoaderAllocatorForDeletion(m_pLoaderAllocator);
- }
- }
- m_pLoaderAllocator = NULL;
- }
-
COUNTER_ONLY(GetPerfCounters().m_Loading.cAssemblies--);
-
#ifdef PROFILING_SUPPORTED
if (CORProfilerTrackAssemblyLoads())
{
@@ -709,6 +664,7 @@ Assembly *Assembly::CreateDynamic(AppDomain *pDomain, CreateDynamicAssemblyArgs
if ((args->access & ASSEMBLY_ACCESS_COLLECT) != 0)
{
AssemblyLoaderAllocator *pAssemblyLoaderAllocator = new AssemblyLoaderAllocator();
+ pAssemblyLoaderAllocator->SetCollectible();
pLoaderAllocator = pAssemblyLoaderAllocator;
// Some of the initialization functions are not virtual. Call through the derived class
@@ -728,6 +684,12 @@ Assembly *Assembly::CreateDynamic(AppDomain *pDomain, CreateDynamicAssemblyArgs
// Create a domain assembly
pDomainAssembly = new DomainAssembly(pDomain, pFile, pLoaderAllocator);
+ if (pDomainAssembly->IsCollectible())
+ {
+ // We add the assembly to the LoaderAllocator only when we are sure that it can be added
+ // and won't be deleted in case of a concurrent load from the same ALC
+ ((AssemblyLoaderAllocator *)(LoaderAllocator *)pLoaderAllocator)->AddDomainAssembly(pDomainAssembly);
+ }
}
// Start loading process
@@ -749,7 +711,7 @@ Assembly *Assembly::CreateDynamic(AppDomain *pDomain, CreateDynamicAssemblyArgs
{
// Initializing the virtual call stub manager is delayed to remove the need for the LoaderAllocator destructor to properly handle
// uninitializing the VSD system. (There is a need to suspend the runtime, and that's tricky)
- pLoaderAllocator->InitVirtualCallStubManager(pDomain, TRUE);
+ pLoaderAllocator->InitVirtualCallStubManager(pDomain);
}
}
diff --git a/src/vm/assemblynative.cpp b/src/vm/assemblynative.cpp
index be1cbc39b9..e658f25822 100644
--- a/src/vm/assemblynative.cpp
+++ b/src/vm/assemblynative.cpp
@@ -1184,7 +1184,8 @@ void QCALLTYPE AssemblyNative::GetImageRuntimeVersion(QCall::AssemblyHandle pAss
}
/*static*/
-INT_PTR QCALLTYPE AssemblyNative::InitializeAssemblyLoadContext(INT_PTR ptrManagedAssemblyLoadContext, BOOL fRepresentsTPALoadContext)
+
+INT_PTR QCALLTYPE AssemblyNative::InitializeAssemblyLoadContext(INT_PTR ptrManagedAssemblyLoadContext, BOOL fRepresentsTPALoadContext, BOOL fIsCollectible)
{
QCALL_CONTRACT;
@@ -1203,7 +1204,41 @@ INT_PTR QCALLTYPE AssemblyNative::InitializeAssemblyLoadContext(INT_PTR ptrManag
{
// Initialize a custom Assembly Load Context
CLRPrivBinderAssemblyLoadContext *pBindContext = NULL;
- IfFailThrow(CLRPrivBinderAssemblyLoadContext::SetupContext(pCurDomain->GetId().m_dwId, pTPABinderContext, ptrManagedAssemblyLoadContext, &pBindContext));
+
+ AssemblyLoaderAllocator* loaderAllocator = NULL;
+ OBJECTHANDLE loaderAllocatorHandle = NULL;
+
+ if (fIsCollectible)
+ {
+ // Create a new AssemblyLoaderAllocator for an AssemblyLoadContext
+ loaderAllocator = new AssemblyLoaderAllocator();
+ loaderAllocator->SetCollectible();
+
+ GCX_COOP();
+ LOADERALLOCATORREF pManagedLoaderAllocator = NULL;
+ GCPROTECT_BEGIN(pManagedLoaderAllocator);
+ {
+ GCX_PREEMP();
+ // Some of the initialization functions are not virtual. Call through the derived class
+ // to prevent calling the base class version.
+ loaderAllocator->Init(pCurDomain);
+ loaderAllocator->InitVirtualCallStubManager(pCurDomain);
+
+ // Setup the managed proxy now, but do not actually transfer ownership to it.
+ // Once everything is setup and nothing can fail anymore, the ownership will be
+ // atomically transfered by call to LoaderAllocator::ActivateManagedTracking().
+ loaderAllocator->SetupManagedTracking(&pManagedLoaderAllocator);
+ }
+
+ // Create a strong handle to the LoaderAllocator
+ loaderAllocatorHandle = pCurDomain->CreateHandle(pManagedLoaderAllocator);
+
+ GCPROTECT_END();
+
+ loaderAllocator->ActivateManagedTracking();
+ }
+
+ IfFailThrow(CLRPrivBinderAssemblyLoadContext::SetupContext(pCurDomain->GetId().m_dwId, pTPABinderContext, loaderAllocator, loaderAllocatorHandle, ptrManagedAssemblyLoadContext, &pBindContext));
ptrNativeAssemblyLoadContext = reinterpret_cast<INT_PTR>(pBindContext);
}
else
@@ -1227,6 +1262,24 @@ INT_PTR QCALLTYPE AssemblyNative::InitializeAssemblyLoadContext(INT_PTR ptrManag
}
/*static*/
+void QCALLTYPE AssemblyNative::PrepareForAssemblyLoadContextRelease(INT_PTR ptrNativeAssemblyLoadContext, INT_PTR ptrManagedStrongAssemblyLoadContext)
+{
+ QCALL_CONTRACT;
+
+ BOOL fDestroyed = FALSE;
+
+ BEGIN_QCALL;
+
+
+ {
+ GCX_COOP();
+ reinterpret_cast<CLRPrivBinderAssemblyLoadContext *>(ptrNativeAssemblyLoadContext)->PrepareForLoadContextRelease(ptrManagedStrongAssemblyLoadContext);
+ }
+
+ END_QCALL;
+}
+
+/*static*/
BOOL QCALLTYPE AssemblyNative::OverrideDefaultAssemblyLoadContextForCurrentDomain(INT_PTR ptrNativeAssemblyLoadContext)
{
QCALL_CONTRACT;
diff --git a/src/vm/assemblynative.hpp b/src/vm/assemblynative.hpp
index 655f5c7ff4..0bdb2c31ed 100644
--- a/src/vm/assemblynative.hpp
+++ b/src/vm/assemblynative.hpp
@@ -117,7 +117,8 @@ public:
// PEFile QCalls
//
- static INT_PTR QCALLTYPE InitializeAssemblyLoadContext(INT_PTR ptrManagedAssemblyLoadContext, BOOL fRepresentsTPALoadContext);
+ static INT_PTR QCALLTYPE InitializeAssemblyLoadContext(INT_PTR ptrManagedAssemblyLoadContext, BOOL fRepresentsTPALoadContext, BOOL fIsCollectible);
+ static void QCALLTYPE PrepareForAssemblyLoadContextRelease(INT_PTR ptrNativeAssemblyLoadContext, INT_PTR ptrManagedStrongAssemblyLoadContext);
static BOOL QCALLTYPE OverrideDefaultAssemblyLoadContextForCurrentDomain(INT_PTR ptrNativeAssemblyLoadContext);
static BOOL QCALLTYPE CanUseAppPathAssemblyLoadContextInCurrentDomain();
static void QCALLTYPE LoadFromPath(INT_PTR ptrNativeAssemblyLoadContext, LPCWSTR pwzILPath, LPCWSTR pwzNIPath, QCall::ObjectHandleOnStack retLoadedAssembly);
diff --git a/src/vm/assemblyspec.cpp b/src/vm/assemblyspec.cpp
index f274c13c61..2d3d76cf03 100644
--- a/src/vm/assemblyspec.cpp
+++ b/src/vm/assemblyspec.cpp
@@ -1267,7 +1267,7 @@ void AssemblySpecBindingCache::Init(CrstBase *pCrst, LoaderHeap *pHeap)
m_pHeap = pHeap;
}
-AssemblySpecBindingCache::AssemblyBinding* AssemblySpecBindingCache::GetAssemblyBindingEntryForAssemblySpec(AssemblySpec* pSpec, BOOL fThrow)
+AssemblySpecBindingCache::AssemblyBinding* AssemblySpecBindingCache::LookupInternal(AssemblySpec* pSpec, BOOL fThrow)
{
CONTRACTL
{
@@ -1288,9 +1288,9 @@ AssemblySpecBindingCache::AssemblyBinding* AssemblySpecBindingCache::GetAssembly
}
CONTRACTL_END;
- AssemblyBinding* pEntry = (AssemblyBinding *) INVALIDENTRY;
UPTR key = (UPTR)pSpec->Hash();
-
+ UPTR lookupKey = key;
+
// On CoreCLR, we will use the BinderID as the key
ICLRPrivBinder *pBinderContextForLookup = NULL;
AppDomain *pSpecDomain = pSpec->GetAppDomain();
@@ -1309,7 +1309,7 @@ AssemblySpecBindingCache::AssemblyBinding* AssemblySpecBindingCache::GetAssembly
if (pBinderContextForLookup != NULL)
{
// We are working with the actual binding context in which the assembly was expected to be loaded.
- // Thus, we dont need to get it from the parent assembly.
+ // Thus, we don't need to get it from the parent assembly.
fGetBindingContextFromParent = false;
}
@@ -1324,7 +1324,6 @@ AssemblySpecBindingCache::AssemblyBinding* AssemblySpecBindingCache::GetAssembly
}
}
- UPTR lookupKey = key;
if (pBinderContextForLookup)
{
UINT_PTR binderID = 0;
@@ -1332,9 +1331,9 @@ AssemblySpecBindingCache::AssemblyBinding* AssemblySpecBindingCache::GetAssembly
_ASSERTE(SUCCEEDED(hr));
lookupKey = key^binderID;
}
-
- pEntry = (AssemblyBinding *) m_map.LookupValue(lookupKey, pSpec);
-
+
+ AssemblyBinding* pEntry = (AssemblyBinding *)m_map.LookupValue(lookupKey, pSpec);
+
// Reset the binding context if one was originally never present in the AssemblySpec and we didnt find any entry
// in the cache.
if (fGetBindingContextFromParent)
@@ -1351,8 +1350,7 @@ AssemblySpecBindingCache::AssemblyBinding* AssemblySpecBindingCache::GetAssembly
BOOL AssemblySpecBindingCache::Contains(AssemblySpec *pSpec)
{
WRAPPER_NO_CONTRACT;
-
- return (GetAssemblyBindingEntryForAssemblySpec(pSpec, TRUE) != (AssemblyBinding *) INVALIDENTRY);
+ return (LookupInternal(pSpec, TRUE) != (AssemblyBinding *) INVALIDENTRY);
}
DomainAssembly *AssemblySpecBindingCache::LookupAssembly(AssemblySpec *pSpec,
@@ -1378,7 +1376,7 @@ DomainAssembly *AssemblySpecBindingCache::LookupAssembly(AssemblySpec *pSpec,
AssemblyBinding *entry = (AssemblyBinding *) INVALIDENTRY;
- entry = GetAssemblyBindingEntryForAssemblySpec(pSpec, fThrow);
+ entry = LookupInternal(pSpec, fThrow);
if (entry == (AssemblyBinding *) INVALIDENTRY)
RETURN NULL;
@@ -1414,9 +1412,8 @@ PEAssembly *AssemblySpecBindingCache::LookupFile(AssemblySpec *pSpec, BOOL fThro
}
CONTRACT_END;
- AssemblyBinding *entry = (AssemblyBinding *) INVALIDENTRY;
-
- entry = GetAssemblyBindingEntryForAssemblySpec(pSpec, fThrow);
+ AssemblyBinding *entry = (AssemblyBinding *) INVALIDENTRY;
+ entry = LookupInternal(pSpec, fThrow);
if (entry == (AssemblyBinding *) INVALIDENTRY)
RETURN NULL;
@@ -1545,6 +1542,7 @@ BOOL AssemblySpecBindingCache::StoreAssembly(AssemblySpec *pSpec, DomainAssembly
// On CoreCLR, we will use the BinderID as the key
ICLRPrivBinder* pBinderContextForLookup = pAssembly->GetFile()->GetBindingContext();
+
_ASSERTE(pBinderContextForLookup || pAssembly->GetFile()->IsSystem());
if (pBinderContextForLookup)
{
@@ -1558,15 +1556,21 @@ BOOL AssemblySpecBindingCache::StoreAssembly(AssemblySpec *pSpec, DomainAssembly
pSpec->SetBindingContext(pBinderContextForLookup);
}
}
-
+
AssemblyBinding *entry = (AssemblyBinding *) m_map.LookupValue(key, pSpec);
if (entry == (AssemblyBinding *) INVALIDENTRY)
{
AssemblyBindingHolder abHolder;
- entry = abHolder.CreateAssemblyBinding(m_pHeap);
- entry->Init(pSpec,pAssembly->GetFile(),pAssembly,NULL,m_pHeap, abHolder.GetPamTracker());
+ LoaderHeap* pHeap = m_pHeap;
+ if (pAssembly->IsCollectible())
+ {
+ pHeap = pAssembly->GetLoaderAllocator()->GetHighFrequencyHeap();
+ }
+
+ entry = abHolder.CreateAssemblyBinding(pHeap);
+ entry->Init(pSpec,pAssembly->GetFile(),pAssembly,NULL,pHeap, abHolder.GetPamTracker());
m_map.InsertValue(key, entry);
@@ -1625,6 +1629,7 @@ BOOL AssemblySpecBindingCache::StoreFile(AssemblySpec *pSpec, PEAssembly *pFile)
// On CoreCLR, we will use the BinderID as the key
ICLRPrivBinder* pBinderContextForLookup = pFile->GetBindingContext();
+
_ASSERTE(pBinderContextForLookup || pFile->IsSystem());
if (pBinderContextForLookup)
{
@@ -1644,9 +1649,27 @@ BOOL AssemblySpecBindingCache::StoreFile(AssemblySpec *pSpec, PEAssembly *pFile)
if (entry == (AssemblyBinding *) INVALIDENTRY)
{
AssemblyBindingHolder abHolder;
- entry = abHolder.CreateAssemblyBinding(m_pHeap);
- entry->Init(pSpec,pFile,NULL,NULL,m_pHeap, abHolder.GetPamTracker());
+ LoaderHeap* pHeap = m_pHeap;
+
+#ifndef CROSSGEN_COMPILE
+ if (pBinderContextForLookup != NULL)
+ {
+ LoaderAllocator* pLoaderAllocator = NULL;
+
+ // Assemblies loaded with AssemblyLoadContext need to use a different heap if
+ // marked as collectible
+ if (SUCCEEDED(pBinderContextForLookup->GetLoaderAllocator((LPVOID*)&pLoaderAllocator)))
+ {
+ _ASSERTE(pLoaderAllocator != NULL);
+ pHeap = pLoaderAllocator->GetHighFrequencyHeap();
+ }
+ }
+#endif // !CROSSGEN_COMPILE
+
+ entry = abHolder.CreateAssemblyBinding(pHeap);
+
+ entry->Init(pSpec,pFile,NULL,NULL,pHeap, abHolder.GetPamTracker());
m_map.InsertValue(key, entry);
abHolder.SuppressRelease();
@@ -1694,7 +1717,7 @@ BOOL AssemblySpecBindingCache::StoreException(AssemblySpec *pSpec, Exception* pE
UPTR key = (UPTR)pSpec->Hash();
- AssemblyBinding *entry = GetAssemblyBindingEntryForAssemblySpec(pSpec, TRUE);
+ AssemblyBinding *entry = LookupInternal(pSpec, TRUE);
if (entry == (AssemblyBinding *) INVALIDENTRY)
{
// TODO: Merge this with the failure lookup in the binder
@@ -1751,6 +1774,40 @@ BOOL AssemblySpecBindingCache::StoreException(AssemblySpec *pSpec, Exception* pE
}
}
+BOOL AssemblySpecBindingCache::RemoveAssembly(DomainAssembly* pAssembly)
+{
+ CONTRACT(BOOL)
+ {
+ INSTANCE_CHECK;
+ NOTHROW;
+ GC_TRIGGERS;
+ MODE_ANY;
+ PRECONDITION(pAssembly != NULL);
+ }
+ CONTRACT_END;
+ BOOL result = FALSE;
+ PtrHashMap::PtrIterator i = m_map.begin();
+ while (!i.end())
+ {
+ AssemblyBinding* entry = (AssemblyBinding*)i.GetValue();
+ if (entry->GetAssembly() == pAssembly)
+ {
+ UPTR key = i.GetKey();
+ m_map.DeleteValue(key, entry);
+
+ if (m_pHeap == NULL)
+ delete entry;
+ else
+ entry->~AssemblyBinding();
+
+ result = TRUE;
+ }
+ ++i;
+ }
+
+ RETURN result;
+}
+
/* static */
BOOL AssemblySpecHash::CompareSpecs(UPTR u1, UPTR u2)
{
@@ -1759,9 +1816,6 @@ BOOL AssemblySpecHash::CompareSpecs(UPTR u1, UPTR u2)
return AssemblySpecBindingCache::CompareSpecs(u1,u2);
}
-
-
-
/* static */
BOOL AssemblySpecBindingCache::CompareSpecs(UPTR u1, UPTR u2)
{
@@ -1772,8 +1826,6 @@ BOOL AssemblySpecBindingCache::CompareSpecs(UPTR u1, UPTR u2)
return a1->CompareEx(a2);
}
-
-
/* static */
BOOL DomainAssemblyCache::CompareBindingSpec(UPTR spec1, UPTR spec2)
{
@@ -1785,7 +1837,6 @@ BOOL DomainAssemblyCache::CompareBindingSpec(UPTR spec1, UPTR spec2)
return pSpec1->CompareEx(&pEntry2->spec);
}
-
DomainAssemblyCache::AssemblyEntry* DomainAssemblyCache::LookupEntry(AssemblySpec* pSpec)
{
CONTRACT (DomainAssemblyCache::AssemblyEntry*)
diff --git a/src/vm/assemblyspec.hpp b/src/vm/assemblyspec.hpp
index 6db0d1ac8f..bbcc2ea2b4 100644
--- a/src/vm/assemblyspec.hpp
+++ b/src/vm/assemblyspec.hpp
@@ -573,7 +573,7 @@ class AssemblySpecBindingCache
PtrHashMap m_map;
LoaderHeap *m_pHeap;
- AssemblySpecBindingCache::AssemblyBinding* GetAssemblyBindingEntryForAssemblySpec(AssemblySpec* pSpec, BOOL fThrow);
+ AssemblySpecBindingCache::AssemblyBinding* LookupInternal(AssemblySpec* pSpec, BOOL fThrow = FALSE);
public:
@@ -595,12 +595,14 @@ class AssemblySpecBindingCache
BOOL StoreException(AssemblySpec *pSpec, Exception* pEx);
+ BOOL RemoveAssembly(DomainAssembly* pAssembly);
+
DWORD Hash(AssemblySpec *pSpec)
{
WRAPPER_NO_CONTRACT;
return pSpec->Hash();
}
-
+
#if !defined(DACCESS_COMPILE)
void GetAllAssemblies(SetSHash<PTR_DomainAssembly>& assemblyList)
{
diff --git a/src/vm/ceeload.cpp b/src/vm/ceeload.cpp
index 8aaaae4713..cd9e485af7 100644
--- a/src/vm/ceeload.cpp
+++ b/src/vm/ceeload.cpp
@@ -3195,7 +3195,10 @@ void Module::SetDomainFile(DomainFile *pDomainFile)
// Allocate static handles now.
// NOTE: Bootstrapping issue with mscorlib - we will manually allocate later
- if (g_pPredefinedArrayTypes[ELEMENT_TYPE_OBJECT] != NULL)
+ // If the assembly is collectible, we don't initialize static handles for them
+ // as it is currently initialized through the DomainLocalModule::PopulateClass in MethodTable::CheckRunClassInitThrowing
+ // (If we don't do this, it would allocate here unused regular static handles that will be overridden later)
+ if (g_pPredefinedArrayTypes[ELEMENT_TYPE_OBJECT] != NULL && !GetAssembly()->IsCollectible())
AllocateRegularStaticHandles(pDomainFile->GetAppDomain());
}
diff --git a/src/vm/clrprivbinderloadfile.h b/src/vm/clrprivbinderloadfile.h
index 02c4053ff4..0aa2f33ef6 100644
--- a/src/vm/clrprivbinderloadfile.h
+++ b/src/vm/clrprivbinderloadfile.h
@@ -57,6 +57,10 @@ public:
ICLRPrivAssembly ** ppAssembly)
{ STATIC_CONTRACT_WRAPPER; return E_FAIL; }
+ STDMETHOD(GetLoaderAllocator)(
+ /* [retval][out] */ LoaderAllocator** pLoaderAllocator)
+ { STATIC_CONTRACT_WRAPPER; return E_FAIL; }
+
//=============================================================================================
// Class methods
//---------------------------------------------------------------------------------------------
@@ -145,4 +149,12 @@ public:
HRESULT * pResult,
ICLRPrivAssembly ** ppAssembly)
{ STATIC_CONTRACT_WRAPPER; return m_pBinder->FindAssemblyBySpec(pvAppDomain, pvAssemblySpec, pResult, ppAssembly); }
+
+ //---------------------------------------------------------------------------------------------
+ STDMETHOD(GetLoaderAllocator)(
+ LoaderAllocator** pLoaderAllocator)
+ {
+ WRAPPER_NO_CONTRACT;
+ return m_pBinder->GetLoaderAllocator(pLoaderAllocator);
+ }
};
diff --git a/src/vm/clrprivbinderwinrt.h b/src/vm/clrprivbinderwinrt.h
index 14c467044b..2bf1061d18 100644
--- a/src/vm/clrprivbinderwinrt.h
+++ b/src/vm/clrprivbinderwinrt.h
@@ -176,6 +176,12 @@ public:
}
}
+ STDMETHOD(GetLoaderAllocator)(
+ LPVOID * pLoaderAllocator)
+ {
+ return E_FAIL;
+ }
+
HRESULT FindWinRTAssemblyBySpec(
LPVOID pvAppDomain,
LPVOID pvAssemblySpec,
@@ -382,6 +388,13 @@ public:
return m_pBinder->FindAssemblyBySpec(pvAppDomain, pvAssemblySpec, pResult, ppAssembly);
}
+ STDMETHOD(GetLoaderAllocator)(
+ LPVOID * pLoaderAllocator)
+ {
+ WRAPPER_NO_CONTRACT;
+ return m_pBinder->GetLoaderAllocator(pLoaderAllocator);
+ }
+
//=============================================================================================
// ICLRPrivAssembly interface methods
diff --git a/src/vm/comdelegate.cpp b/src/vm/comdelegate.cpp
index d80d0deb89..749310bb67 100644
--- a/src/vm/comdelegate.cpp
+++ b/src/vm/comdelegate.cpp
@@ -508,51 +508,6 @@ VOID GenerateShuffleArray(MethodDesc* pInvoke, MethodDesc *pTargetMeth, SArray<S
}
-class ShuffleThunkCache : public StubCacheBase
-{
-private:
- //---------------------------------------------------------
- // Compile a static delegate shufflethunk. Always returns
- // STANDALONE since we don't interpret these things.
- //---------------------------------------------------------
- virtual void CompileStub(const BYTE *pRawStub,
- StubLinker *pstublinker)
- {
- STANDARD_VM_CONTRACT;
-
- ((CPUSTUBLINKER*)pstublinker)->EmitShuffleThunk((ShuffleEntry*)pRawStub);
- }
-
- //---------------------------------------------------------
- // Tells the StubCacheBase the length of a ShuffleEntryArray.
- //---------------------------------------------------------
- virtual UINT Length(const BYTE *pRawStub)
- {
- LIMITED_METHOD_CONTRACT;
- ShuffleEntry *pse = (ShuffleEntry*)pRawStub;
- while (pse->srcofs != ShuffleEntry::SENTINEL)
- {
- pse++;
- }
- return sizeof(ShuffleEntry) * (UINT)(1 + (pse - (ShuffleEntry*)pRawStub));
- }
-
- virtual void AddStub(const BYTE* pRawStub, Stub* pNewStub)
- {
- CONTRACTL
- {
- THROWS;
- GC_NOTRIGGER;
- MODE_ANY;
- }
- CONTRACTL_END;
-
-#ifndef CROSSGEN_COMPILE
- DelegateInvokeStubManager::g_pManager->AddStub(pNewStub);
-#endif
- }
-};
-
ShuffleThunkCache *COMDelegate::m_pShuffleThunkCache = NULL;
MulticastStubCache *COMDelegate::m_pSecureDelegateStubCache = NULL;
MulticastStubCache *COMDelegate::m_pMulticastStubCache = NULL;
@@ -579,7 +534,7 @@ void COMDelegate::Init()
LockOwner lock = {&COMDelegate::s_DelegateToFPtrHashCrst, IsOwnerOfCrst};
s_pDelegateToFPtrHash->Init(TRUE, &lock);
- m_pShuffleThunkCache = new ShuffleThunkCache();
+ m_pShuffleThunkCache = new ShuffleThunkCache(SystemDomain::GetGlobalLoaderAllocator()->GetStubHeap());
m_pMulticastStubCache = new MulticastStubCache();
m_pSecureDelegateStubCache = new MulticastStubCache();
}
@@ -642,7 +597,15 @@ Stub* COMDelegate::SetupShuffleThunk(MethodTable * pDelMT, MethodDesc *pTargetMe
StackSArray<ShuffleEntry> rShuffleEntryArray;
GenerateShuffleArray(pMD, pTargetMeth, &rShuffleEntryArray);
- Stub* pShuffleThunk = m_pShuffleThunkCache->Canonicalize((const BYTE *)&rShuffleEntryArray[0]);
+ ShuffleThunkCache* pShuffleThunkCache = m_pShuffleThunkCache;
+
+ LoaderAllocator* pLoaderAllocator = pDelMT->GetLoaderAllocator();
+ if (pLoaderAllocator->IsCollectible())
+ {
+ pShuffleThunkCache = ((AssemblyLoaderAllocator*)pLoaderAllocator)->GetShuffleThunkCache();
+ }
+
+ Stub* pShuffleThunk = pShuffleThunkCache->Canonicalize((const BYTE *)&rShuffleEntryArray[0]);
if (!pShuffleThunk)
{
COMPlusThrowOM();
diff --git a/src/vm/comdelegate.h b/src/vm/comdelegate.h
index 1f6d10b907..5877b48185 100644
--- a/src/vm/comdelegate.h
+++ b/src/vm/comdelegate.h
@@ -224,4 +224,53 @@ struct ShuffleEntry
#include <poppack.h>
+class ShuffleThunkCache : public StubCacheBase
+{
+public:
+ ShuffleThunkCache(LoaderHeap* heap) : StubCacheBase(heap)
+ {
+ }
+private:
+ //---------------------------------------------------------
+ // Compile a static delegate shufflethunk. Always returns
+ // STANDALONE since we don't interpret these things.
+ //---------------------------------------------------------
+ virtual void CompileStub(const BYTE *pRawStub,
+ StubLinker *pstublinker)
+ {
+ STANDARD_VM_CONTRACT;
+
+ ((CPUSTUBLINKER*)pstublinker)->EmitShuffleThunk((ShuffleEntry*)pRawStub);
+ }
+
+ //---------------------------------------------------------
+ // Tells the StubCacheBase the length of a ShuffleEntryArray.
+ //---------------------------------------------------------
+ virtual UINT Length(const BYTE *pRawStub)
+ {
+ LIMITED_METHOD_CONTRACT;
+ ShuffleEntry *pse = (ShuffleEntry*)pRawStub;
+ while (pse->srcofs != ShuffleEntry::SENTINEL)
+ {
+ pse++;
+ }
+ return sizeof(ShuffleEntry) * (UINT)(1 + (pse - (ShuffleEntry*)pRawStub));
+ }
+
+ virtual void AddStub(const BYTE* pRawStub, Stub* pNewStub)
+ {
+ CONTRACTL
+ {
+ THROWS;
+ GC_NOTRIGGER;
+ MODE_ANY;
+ }
+ CONTRACTL_END;
+
+#ifndef CROSSGEN_COMPILE
+ DelegateInvokeStubManager::g_pManager->AddStub(pNewStub);
+#endif
+ }
+};
+
#endif // _COMDELEGATE_H_
diff --git a/src/vm/coreassemblyspec.cpp b/src/vm/coreassemblyspec.cpp
index 1d3567e769..5b606dcd38 100644
--- a/src/vm/coreassemblyspec.cpp
+++ b/src/vm/coreassemblyspec.cpp
@@ -379,7 +379,7 @@ HRESULT BaseAssemblySpec::ParseName()
#if !defined(DACCESS_COMPILE) && !defined(CROSSGEN_COMPILE)
if (pDomain->GetFusionContext() != pDomain->GetTPABinderContext())
{
- pAppContext = (static_cast<CLRPrivBinderAssemblyLoadContext *>(pIUnknownBinder))->GetAppContext();
+ pAppContext = (static_cast<CLRPrivBinderAssemblyLoadContext *>(static_cast<ICLRPrivBinder*>(pIUnknownBinder)))->GetAppContext();
}
else
#endif // !defined(DACCESS_COMPILE) && !defined(CROSSGEN_COMPILE)
diff --git a/src/vm/domainfile.cpp b/src/vm/domainfile.cpp
index 314f165eea..6b30ee7c3a 100644
--- a/src/vm/domainfile.cpp
+++ b/src/vm/domainfile.cpp
@@ -1473,7 +1473,9 @@ DomainAssembly::DomainAssembly(AppDomain *pDomain, PEFile *pFile, LoaderAllocato
m_fCollectible(pLoaderAllocator->IsCollectible()),
m_fHostAssemblyPublished(false),
m_fCalculatedShouldLoadDomainNeutral(false),
- m_fShouldLoadDomainNeutral(false)
+ m_fShouldLoadDomainNeutral(false),
+ m_pLoaderAllocator(pLoaderAllocator),
+ m_NextDomainAssemblyInSameALC(NULL)
{
CONTRACTL
{
@@ -1485,13 +1487,6 @@ DomainAssembly::DomainAssembly(AppDomain *pDomain, PEFile *pFile, LoaderAllocato
pFile->ValidateForExecution();
-#ifndef CROSSGEN_COMPILE
- if (m_fCollectible)
- {
- ((AssemblyLoaderAllocator *)pLoaderAllocator)->SetDomainAssembly(this);
- }
-#endif
-
// !!! backout
m_hExposedAssemblyObject = NULL;
@@ -2005,7 +2000,7 @@ void DomainAssembly::Allocate()
// Go ahead and create new shared version of the assembly if possible
// <TODO> We will need to pass a valid OBJECREF* here in the future when we implement SCU </TODO>
- assemblyHolder = pAssembly = Assembly::Create(pSharedDomain, GetFile(), GetDebuggerInfoBits(), FALSE, pamTracker, NULL);
+ assemblyHolder = pAssembly = Assembly::Create(pSharedDomain, GetFile(), GetDebuggerInfoBits(), this->IsCollectible(), pamTracker, this->IsCollectible() ? this->GetLoaderAllocator() : NULL);
if (MissingDependenciesCheckDone())
pAssembly->SetMissingDependenciesCheckDone();
@@ -2040,7 +2035,7 @@ void DomainAssembly::Allocate()
// <TODO> We will need to pass a valid OBJECTREF* here in the future when we implement SCU </TODO>
SharedDomain * pSharedDomain = SharedDomain::GetDomain();
- assemblyHolder = pAssembly = Assembly::Create(pSharedDomain, GetFile(), GetDebuggerInfoBits(), FALSE, pamTracker, NULL);
+ assemblyHolder = pAssembly = Assembly::Create(pSharedDomain, GetFile(), GetDebuggerInfoBits(), this->IsCollectible(), pamTracker, this->IsCollectible() ? this->GetLoaderAllocator() : NULL);
pAssembly->SetIsTenured();
}
#endif // FEATURE_LOADER_OPTIMIZATION
@@ -2051,7 +2046,7 @@ void DomainAssembly::Allocate()
GetFile()->MakeMDImportPersistent();
// <TODO> We will need to pass a valid OBJECTREF* here in the future when we implement SCU </TODO>
- assemblyHolder = pAssembly = Assembly::Create(m_pDomain, GetFile(), GetDebuggerInfoBits(), FALSE, pamTracker, NULL);
+ assemblyHolder = pAssembly = Assembly::Create(m_pDomain, GetFile(), GetDebuggerInfoBits(), this->IsCollectible(), pamTracker, this->IsCollectible() ? this->GetLoaderAllocator() : NULL);
assemblyHolder->SetIsTenured();
}
diff --git a/src/vm/domainfile.h b/src/vm/domainfile.h
index b163da87c6..070c616ecb 100644
--- a/src/vm/domainfile.h
+++ b/src/vm/domainfile.h
@@ -81,7 +81,7 @@ class DomainFile
DomainFile() {LIMITED_METHOD_CONTRACT;};
#endif
- LoaderAllocator *GetLoaderAllocator();
+ virtual LoaderAllocator *GetLoaderAllocator();
PTR_AppDomain GetAppDomain()
{
@@ -513,6 +513,12 @@ public:
return PTR_PEAssembly(m_pFile);
}
+ LoaderAllocator *GetLoaderAllocator()
+ {
+ LIMITED_METHOD_CONTRACT;
+ return m_pLoaderAllocator;
+ }
+
#ifdef FEATURE_LOADER_OPTIMIZATION
public:
@@ -776,8 +782,21 @@ private:
Volatile<bool> m_fHostAssemblyPublished;
Volatile<bool> m_fCalculatedShouldLoadDomainNeutral;
Volatile<bool> m_fShouldLoadDomainNeutral;
+ PTR_LoaderAllocator m_pLoaderAllocator;
+ DomainAssembly* m_NextDomainAssemblyInSameALC;
public:
+ DomainAssembly* GetNextDomainAssemblyInSameALC()
+ {
+ return m_NextDomainAssemblyInSameALC;
+ }
+
+ void SetNextDomainAssemblyInSameALC(DomainAssembly* domainAssembly)
+ {
+ _ASSERTE(m_NextDomainAssemblyInSameALC == NULL);
+ m_NextDomainAssemblyInSameALC = domainAssembly;
+ }
+
// Indicates if the assembly can be cached in a binding cache such as AssemblySpecBindingCache.
inline bool CanUseWithBindingCache()
{ STATIC_CONTRACT_WRAPPER; return GetFile()->CanUseWithBindingCache(); }
diff --git a/src/vm/dynamicmethod.cpp b/src/vm/dynamicmethod.cpp
index 5fd552a0a1..ba97b3de5d 100644
--- a/src/vm/dynamicmethod.cpp
+++ b/src/vm/dynamicmethod.cpp
@@ -869,7 +869,7 @@ void DynamicMethodDesc::Destroy(BOOL fDomainUnload)
if (pLoaderAllocator->Release())
{
GCX_PREEMP();
- LoaderAllocator::GCLoaderAllocators(pLoaderAllocator->GetDomain()->AsAppDomain());
+ LoaderAllocator::GCLoaderAllocators(pLoaderAllocator);
}
}
}
diff --git a/src/vm/ecalllist.h b/src/vm/ecalllist.h
index c147ae716e..7a6cb315cf 100644
--- a/src/vm/ecalllist.h
+++ b/src/vm/ecalllist.h
@@ -548,6 +548,7 @@ FCFuncEnd()
FCFuncStart(gAssemblyLoadContextFuncs)
QCFuncElement("InitializeAssemblyLoadContext", AssemblyNative::InitializeAssemblyLoadContext)
+ QCFuncElement("PrepareForAssemblyLoadContextRelease", AssemblyNative::PrepareForAssemblyLoadContextRelease)
QCFuncElement("LoadFromPath", AssemblyNative::LoadFromPath)
QCFuncElement("InternalLoadUnmanagedDllFromPath", AssemblyNative::InternalLoadUnmanagedDllFromPath)
QCFuncElement("CanUseAppPathAssemblyLoadContextInCurrentDomain", AssemblyNative::CanUseAppPathAssemblyLoadContextInCurrentDomain)
diff --git a/src/vm/eventtrace.cpp b/src/vm/eventtrace.cpp
index 00d519d2b8..3e11983a56 100644
--- a/src/vm/eventtrace.cpp
+++ b/src/vm/eventtrace.cpp
@@ -7228,19 +7228,26 @@ VOID ETW::EnumerationLog::IterateCollectibleLoaderAllocator(AssemblyLoaderAlloca
ETW::MethodLog::SendEventsForJitMethods(NULL, pLoaderAllocator, enumerationOptions);
}
- Assembly *pAssembly = pLoaderAllocator->Id()->GetDomainAssembly()->GetAssembly();
- _ASSERTE(!pAssembly->IsDomainNeutral()); // Collectible Assemblies are not domain neutral.
-
- DomainModuleIterator domainModuleIterator = pLoaderAllocator->Id()->GetDomainAssembly()->IterateModules(kModIterIncludeLoaded);
- while (domainModuleIterator.Next())
+ // Iterate on all DomainAssembly loaded from the same AssemblyLoaderAllocator
+ DomainAssemblyIterator domainAssemblyIt = pLoaderAllocator->Id()->GetDomainAssemblyIterator();
+ while (!domainAssemblyIt.end())
{
- Module *pModule = domainModuleIterator.GetModule();
- ETW::EnumerationLog::IterateModule(pModule, enumerationOptions);
- }
+ Assembly *pAssembly = domainAssemblyIt->GetAssembly(); // TODO: handle iterator
+ _ASSERTE(!pAssembly->IsDomainNeutral()); // Collectible Assemblies are not domain neutral.
- if (enumerationOptions & ETW::EnumerationLog::EnumerationStructs::DomainAssemblyModuleUnload)
- {
- ETW::EnumerationLog::IterateAssembly(pAssembly, enumerationOptions);
+ DomainModuleIterator domainModuleIterator = domainAssemblyIt->IterateModules(kModIterIncludeLoaded);
+ while (domainModuleIterator.Next())
+ {
+ Module *pModule = domainModuleIterator.GetModule();
+ ETW::EnumerationLog::IterateModule(pModule, enumerationOptions);
+ }
+
+ if (enumerationOptions & ETW::EnumerationLog::EnumerationStructs::DomainAssemblyModuleUnload)
+ {
+ ETW::EnumerationLog::IterateAssembly(pAssembly, enumerationOptions);
+ }
+
+ domainAssemblyIt++;
}
// Load Jit Method events
diff --git a/src/vm/excep.cpp b/src/vm/excep.cpp
index da07c5ad99..306e4d0d83 100644
--- a/src/vm/excep.cpp
+++ b/src/vm/excep.cpp
@@ -6357,7 +6357,8 @@ static STRINGREF MissingMemberException_FormatSignature_Internal(I1ARRAYREF* ppP
psl->EmitUtf8(")");
}
psl->Emit8('\0');
- pstub = psl->Link();
+
+ pstub = psl->Link(NULL);
}
pString = StringObject::NewString( (LPCUTF8)(pstub->GetEntryPoint()) );
diff --git a/src/vm/hash.h b/src/vm/hash.h
index 0855ed481c..404392fff1 100644
--- a/src/vm/hash.h
+++ b/src/vm/hash.h
@@ -722,6 +722,14 @@ public:
return iter.end();
}
+ UPTR GetKey()
+ {
+ WRAPPER_NO_CONTRACT;
+ SUPPORTS_DAC;
+
+ return iter.GetKey();
+ }
+
PTR_VOID GetValue()
{
WRAPPER_NO_CONTRACT;
diff --git a/src/vm/i386/stublinkerx86.cpp b/src/vm/i386/stublinkerx86.cpp
index 02f3a61c63..ac5006eb8f 100644
--- a/src/vm/i386/stublinkerx86.cpp
+++ b/src/vm/i386/stublinkerx86.cpp
@@ -6040,6 +6040,7 @@ static void AppendGCLayout(ULONGARRAY &gcLayout, size_t baseOffset, BOOL fIsType
}
Stub * StubLinkerCPU::CreateTailCallCopyArgsThunk(CORINFO_SIG_INFO * pSig,
+ MethodDesc* pMD,
CorInfoHelperTailCallSpecialHandling flags)
{
STANDARD_VM_CONTRACT;
@@ -6429,7 +6430,8 @@ Stub * StubLinkerCPU::CreateTailCallCopyArgsThunk(CORINFO_SIG_INFO * pSig,
EncodeGCOffsets(pSl, gcLayout);
}
- return pSl->Link();
+ LoaderHeap* pHeap = pMD->GetLoaderAllocatorForCode()->GetStubHeap();
+ return pSl->Link(pHeap);
}
#endif // DACCESS_COMPILE
diff --git a/src/vm/i386/stublinkerx86.h b/src/vm/i386/stublinkerx86.h
index 55ba942920..44bfc79fd2 100644
--- a/src/vm/i386/stublinkerx86.h
+++ b/src/vm/i386/stublinkerx86.h
@@ -437,6 +437,7 @@ class StubLinkerCPU : public StubLinker
#ifdef _TARGET_AMD64_
static Stub * CreateTailCallCopyArgsThunk(CORINFO_SIG_INFO * pSig,
+ MethodDesc* pMD,
CorInfoHelperTailCallSpecialHandling flags);
#endif // _TARGET_AMD64_
diff --git a/src/vm/jithelpers.cpp b/src/vm/jithelpers.cpp
index 6d31a18729..9bb6d9889e 100644
--- a/src/vm/jithelpers.cpp
+++ b/src/vm/jithelpers.cpp
@@ -6238,6 +6238,8 @@ void InitJitHelperLogging()
ThrowLastError();
}
+ LoaderHeap* pHeap = SystemDomain::GetGlobalLoaderAllocator()->GetStubHeap();
+
// iterate through the jit helper tables replacing helpers with logging thunks
//
// NOTE: if NGEN'd images were NGEN'd with hard binding on then static helper
@@ -6278,7 +6280,7 @@ void InitJitHelperLogging()
#endif // _TARGET_AMD64_
pSl->EmitJITHelperLoggingThunk(GetEEFuncEntryPoint(hlpFunc->pfnHelper), (LPVOID)hlpFuncCount);
- Stub* pStub = pSl->Link();
+ Stub* pStub = pSl->Link(pHeap);
hlpFunc->pfnHelper = (void*)pStub->GetEntryPoint();
}
else
@@ -6335,7 +6337,7 @@ void InitJitHelperLogging()
#endif // _TARGET_AMD64_
pSl->EmitJITHelperLoggingThunk(GetEEFuncEntryPoint(dynamicHlpFunc->pfnHelper), (LPVOID)hlpFuncCount);
- Stub* pStub = pSl->Link();
+ Stub* pStub = pSl->Link(pHeap);
dynamicHlpFunc->pfnHelper = (void*)pStub->GetEntryPoint();
}
}
diff --git a/src/vm/jitinterface.cpp b/src/vm/jitinterface.cpp
index 0697adc0a7..4391167067 100644
--- a/src/vm/jitinterface.cpp
+++ b/src/vm/jitinterface.cpp
@@ -11731,14 +11731,13 @@ void* CEEJitInfo::getFieldAddress(CORINFO_FIELD_HANDLE fieldHnd,
_ASSERTE(!pMT->ContainsGenericVariables());
- // We must not call here for statics of collectible types.
- _ASSERTE(!pMT->Collectible());
-
void *base = NULL;
if (!field->IsRVA())
{
// <REVISIT_TODO>@todo: assert that the current method being compiled is unshared</REVISIT_TODO>
+ // We must not call here for statics of collectible types.
+ _ASSERTE(!pMT->Collectible());
// Allocate space for the local class if necessary, but don't trigger
// class construction.
@@ -13760,7 +13759,7 @@ void* CEEInfo::getTailCallCopyArgsThunk(CORINFO_SIG_INFO *pSig,
JIT_TO_EE_TRANSITION();
- Stub* pStub = CPUSTUBLINKER::CreateTailCallCopyArgsThunk(pSig, flags);
+ Stub* pStub = CPUSTUBLINKER::CreateTailCallCopyArgsThunk(pSig, m_pMethodBeingCompiled, flags);
ftn = (void*)pStub->GetEntryPoint();
diff --git a/src/vm/loaderallocator.cpp b/src/vm/loaderallocator.cpp
index 6b118ad92b..77f0c9e3c1 100644
--- a/src/vm/loaderallocator.cpp
+++ b/src/vm/loaderallocator.cpp
@@ -6,6 +6,10 @@
#include "common.h"
#include "stringliteralmap.h"
#include "virtualcallstub.h"
+#include "threadsuspend.h"
+#ifndef DACCESS_COMPILE
+#include "comdelegate.h"
+#endif
//*****************************************************************************
// Used by LoaderAllocator::Init for easier readability.
@@ -43,7 +47,7 @@ LoaderAllocator::LoaderAllocator()
m_cReferences = (UINT32)-1;
- m_pDomainAssemblyToDelete = NULL;
+ m_pFirstDomainAssemblyFromSameALCToDelete = NULL;
#ifdef FAT_DISPATCH_TOKENS
// DispatchTokenFat pointer table for token overflow scenarios. Lazily allocated.
@@ -66,6 +70,7 @@ LoaderAllocator::LoaderAllocator()
m_pLastUsedCodeHeap = NULL;
m_pLastUsedDynamicCodeHeap = NULL;
m_pJumpStubCache = NULL;
+ m_IsCollectible = false;
m_nLoaderAllocator = InterlockedIncrement64((LONGLONG *)&LoaderAllocator::cLoaderAllocatorsCreated);
}
@@ -344,15 +349,11 @@ LoaderAllocator * LoaderAllocator::GCLoaderAllocators_RemoveAssemblies(AppDomain
CONTRACTL
{
THROWS;
- GC_NOTRIGGER; // Because we are holding assembly list lock code:BaseDomain#AssemblyListLock
+ GC_TRIGGERS;
MODE_PREEMPTIVE;
SO_INTOLERANT;
}
CONTRACTL_END;
-
- _ASSERTE(pAppDomain->GetLoaderAllocatorReferencesLock()->OwnedByCurrentThread());
- _ASSERTE(pAppDomain->GetAssemblyListLock()->OwnedByCurrentThread());
-
// List of LoaderAllocators being deleted
LoaderAllocator * pFirstDestroyedLoaderAllocator = NULL;
@@ -392,6 +393,8 @@ LoaderAllocator * LoaderAllocator::GCLoaderAllocators_RemoveAssemblies(AppDomain
AppDomain::AssemblyIterator i;
// Iterate through every loader allocator, marking as we go
{
+ CrstHolder chAssemblyListLock(pAppDomain->GetAssemblyListLock());
+
i = pAppDomain->IterateAssembliesEx((AssemblyIterationFlags)(
kIncludeExecution | kIncludeLoaded | kIncludeCollected));
CollectibleAssemblyHolder<DomainAssembly *> pDomainAssembly;
@@ -416,6 +419,9 @@ LoaderAllocator * LoaderAllocator::GCLoaderAllocators_RemoveAssemblies(AppDomain
// Iterate through every loader allocator, unmarking marked loaderallocators, and
// build a free list of unmarked ones
{
+ CrstHolder chLoaderAllocatorReferencesLock(pAppDomain->GetLoaderAllocatorReferencesLock());
+ CrstHolder chAssemblyListLock(pAppDomain->GetAssemblyListLock());
+
i = pAppDomain->IterateAssembliesEx((AssemblyIterationFlags)(
kIncludeExecution | kIncludeLoaded | kIncludeCollected));
CollectibleAssemblyHolder<DomainAssembly *> pDomainAssembly;
@@ -436,10 +442,29 @@ LoaderAllocator * LoaderAllocator::GCLoaderAllocators_RemoveAssemblies(AppDomain
}
else if (!pLoaderAllocator->IsAlive())
{
- pLoaderAllocator->m_pLoaderAllocatorDestroyNext = pFirstDestroyedLoaderAllocator;
- // We will store a reference to this assembly, and use it later in this function
- pFirstDestroyedLoaderAllocator = pLoaderAllocator;
- _ASSERTE(pLoaderAllocator->m_pDomainAssemblyToDelete != NULL);
+ // Check that we don't have already this LoaderAllocator in the list to destroy
+ // (in case multiple assemblies are loaded in the same LoaderAllocator)
+ bool addAllocator = true;
+ LoaderAllocator * pCheckAllocatorToDestroy = pFirstDestroyedLoaderAllocator;
+ while (pCheckAllocatorToDestroy != NULL)
+ {
+ if (pCheckAllocatorToDestroy == pLoaderAllocator)
+ {
+ addAllocator = false;
+ break;
+ }
+
+ pCheckAllocatorToDestroy = pCheckAllocatorToDestroy->m_pLoaderAllocatorDestroyNext;
+ }
+
+ // Otherwise, we have a LoaderAllocator that we add to the list
+ if (addAllocator)
+ {
+ pLoaderAllocator->m_pLoaderAllocatorDestroyNext = pFirstDestroyedLoaderAllocator;
+ // We will store a reference to this assembly, and use it later in this function
+ pFirstDestroyedLoaderAllocator = pLoaderAllocator;
+ _ASSERTE(pLoaderAllocator->m_pFirstDomainAssemblyFromSameALCToDelete != NULL);
+ }
}
}
}
@@ -452,10 +477,27 @@ LoaderAllocator * LoaderAllocator::GCLoaderAllocators_RemoveAssemblies(AppDomain
while (pDomainLoaderAllocatorDestroyIterator != NULL)
{
_ASSERTE(!pDomainLoaderAllocatorDestroyIterator->IsAlive());
- _ASSERTE(pDomainLoaderAllocatorDestroyIterator->m_pDomainAssemblyToDelete != NULL);
-
- pAppDomain->RemoveAssembly_Unlocked(pDomainLoaderAllocatorDestroyIterator->m_pDomainAssemblyToDelete);
-
+
+ DomainAssemblyIterator domainAssemblyIt(pDomainLoaderAllocatorDestroyIterator->m_pFirstDomainAssemblyFromSameALCToDelete);
+
+ // Release all assemblies from the same ALC
+ while (!domainAssemblyIt.end())
+ {
+ DomainAssembly* domainAssemblyToRemove = domainAssemblyIt;
+ pAppDomain->RemoveAssembly(domainAssemblyToRemove);
+
+ if (!domainAssemblyToRemove->GetAssembly()->IsDynamic())
+ {
+ pAppDomain->RemoveFileFromCache(domainAssemblyToRemove->GetFile());
+ AssemblySpec spec;
+ spec.InitializeSpec(domainAssemblyToRemove->GetFile());
+ VERIFY(pAppDomain->RemoveAssemblyFromCache(domainAssemblyToRemove));
+ pAppDomain->RemoveNativeImageDependency(&spec);
+ }
+
+ domainAssemblyIt++;
+ }
+
pDomainLoaderAllocatorDestroyIterator = pDomainLoaderAllocatorDestroyIterator->m_pLoaderAllocatorDestroyNext;
}
@@ -467,7 +509,7 @@ LoaderAllocator * LoaderAllocator::GCLoaderAllocators_RemoveAssemblies(AppDomain
// Collect unreferenced assemblies, delete all their remaining resources.
//
//static
-void LoaderAllocator::GCLoaderAllocators(AppDomain * pAppDomain)
+void LoaderAllocator::GCLoaderAllocators(LoaderAllocator* pOriginalLoaderAllocator)
{
CONTRACTL
{
@@ -481,20 +523,16 @@ void LoaderAllocator::GCLoaderAllocators(AppDomain * pAppDomain)
// List of LoaderAllocators being deleted
LoaderAllocator * pFirstDestroyedLoaderAllocator = NULL;
- {
- CrstHolder chLoaderAllocatorReferencesLock(pAppDomain->GetLoaderAllocatorReferencesLock());
-
- // We will lock the assembly list, so no other thread can delete items from it while we are deleting
- // them.
- // Note: Because of the previously taken lock we could just lock during every enumeration, but this
- // is more robust for the future.
- // This lock switches thread to GC_NOTRIGGER (see code:BaseDomain#AssemblyListLock).
- CrstHolder chAssemblyListLock(pAppDomain->GetAssemblyListLock());
-
- pFirstDestroyedLoaderAllocator = GCLoaderAllocators_RemoveAssemblies(pAppDomain);
- }
+ AppDomain* pAppDomain = (AppDomain*)pOriginalLoaderAllocator->GetDomain();
+
+ // Collect all LoaderAllocators that don't have anymore DomainAssemblies alive
+ // Note: that it may not collect our pOriginalLoaderAllocator in case this
+ // LoaderAllocator hasn't loaded any DomainAssembly. We handle this case in the next loop.
// Note: The removed LoaderAllocators are not reachable outside of this function anymore, because we
// removed them from the assembly list
+ pFirstDestroyedLoaderAllocator = GCLoaderAllocators_RemoveAssemblies(pAppDomain);
+
+ bool isOriginalLoaderAllocatorFound = false;
// Iterate through free list, firing ETW events and notifying the debugger
LoaderAllocator * pDomainLoaderAllocatorDestroyIterator = pFirstDestroyedLoaderAllocator;
@@ -507,27 +545,90 @@ void LoaderAllocator::GCLoaderAllocators(AppDomain * pAppDomain)
// Set the unloaded flag before notifying the debugger
pDomainLoaderAllocatorDestroyIterator->SetIsUnloaded();
- DomainAssembly * pDomainAssembly = pDomainLoaderAllocatorDestroyIterator->m_pDomainAssemblyToDelete;
- _ASSERTE(pDomainAssembly != NULL);
- // Notify the debugger
- pDomainAssembly->NotifyDebuggerUnload();
-
+ DomainAssemblyIterator domainAssemblyIt(pDomainLoaderAllocatorDestroyIterator->m_pFirstDomainAssemblyFromSameALCToDelete);
+ while (!domainAssemblyIt.end())
+ {
+ // Notify the debugger
+ domainAssemblyIt->NotifyDebuggerUnload();
+ domainAssemblyIt++;
+ }
+
+ if (pDomainLoaderAllocatorDestroyIterator == pOriginalLoaderAllocator)
+ {
+ isOriginalLoaderAllocatorFound = true;
+ }
pDomainLoaderAllocatorDestroyIterator = pDomainLoaderAllocatorDestroyIterator->m_pLoaderAllocatorDestroyNext;
}
+
+ // If the original LoaderAllocator was not processed, it is most likely a LoaderAllocator without any loaded DomainAssembly
+ // But we still want to collect it so we add it to the list of LoaderAllocator to destroy
+ if (!isOriginalLoaderAllocatorFound && !pOriginalLoaderAllocator->IsAlive())
+ {
+ pOriginalLoaderAllocator->m_pLoaderAllocatorDestroyNext = pFirstDestroyedLoaderAllocator;
+ pFirstDestroyedLoaderAllocator = pOriginalLoaderAllocator;
+ }
// Iterate through free list, deleting DomainAssemblies
pDomainLoaderAllocatorDestroyIterator = pFirstDestroyedLoaderAllocator;
while (pDomainLoaderAllocatorDestroyIterator != NULL)
{
_ASSERTE(!pDomainLoaderAllocatorDestroyIterator->IsAlive());
- _ASSERTE(pDomainLoaderAllocatorDestroyIterator->m_pDomainAssemblyToDelete != NULL);
-
- delete pDomainLoaderAllocatorDestroyIterator->m_pDomainAssemblyToDelete;
+
+ DomainAssemblyIterator domainAssemblyIt(pDomainLoaderAllocatorDestroyIterator->m_pFirstDomainAssemblyFromSameALCToDelete);
+ while (!domainAssemblyIt.end())
+ {
+ delete (DomainAssembly*)domainAssemblyIt;
+ domainAssemblyIt++;
+ }
// We really don't have to set it to NULL as the assembly is not reachable anymore, but just in case ...
// (Also debugging NULL AVs if someone uses it accidentally is so much easier)
- pDomainLoaderAllocatorDestroyIterator->m_pDomainAssemblyToDelete = NULL;
-
- pDomainLoaderAllocatorDestroyIterator = pDomainLoaderAllocatorDestroyIterator->m_pLoaderAllocatorDestroyNext;
+ pDomainLoaderAllocatorDestroyIterator->m_pFirstDomainAssemblyFromSameALCToDelete = NULL;
+
+ pDomainLoaderAllocatorDestroyIterator->ReleaseManagedAssemblyLoadContext();
+
+ // The following code was previously happening on delete ~DomainAssembly->Terminate
+ // We are moving this part here in order to make sure that we can unload a LoaderAllocator
+ // that didn't have a DomainAssembly
+ // (we have now a LoaderAllocator with 0-n DomainAssembly)
+
+ // This cleanup code starts resembling parts of AppDomain::Terminate too much.
+ // It would be useful to reduce duplication and also establish clear responsibilites
+ // for LoaderAllocator::Destroy, Assembly::Terminate, LoaderAllocator::Terminate
+ // and LoaderAllocator::~LoaderAllocator. We need to establish how these
+ // cleanup paths interact with app-domain unload and process tear-down, too.
+
+ if (!IsAtProcessExit())
+ {
+ // Suspend the EE to do some clean up that can only occur
+ // while no threads are running.
+ GCX_COOP(); // SuspendEE may require current thread to be in Coop mode
+ // SuspendEE cares about the reason flag only when invoked for a GC
+ // Other values are typically ignored. If using SUSPEND_FOR_APPDOMAIN_SHUTDOWN
+ // is inappropriate, we can introduce a new flag or hijack an unused one.
+ ThreadSuspend::SuspendEE(ThreadSuspend::SUSPEND_FOR_APPDOMAIN_SHUTDOWN);
+ }
+
+ ExecutionManager::Unload(pDomainLoaderAllocatorDestroyIterator);
+ pDomainLoaderAllocatorDestroyIterator->UninitVirtualCallStubManager();
+
+ // TODO: Do we really want to perform this on each LoaderAllocator?
+ MethodTable::ClearMethodDataCache();
+ ClearJitGenericHandleCache(pAppDomain);
+
+ if (!IsAtProcessExit())
+ {
+ // Resume the EE.
+ ThreadSuspend::RestartEE(FALSE, TRUE);
+ }
+
+ // Because RegisterLoaderAllocatorForDeletion is modifying m_pLoaderAllocatorDestroyNext, we are saving it here
+ LoaderAllocator* pLoaderAllocatorDestroyNext = pDomainLoaderAllocatorDestroyIterator->m_pLoaderAllocatorDestroyNext;
+
+ // Register this LoaderAllocator for cleanup
+ pAppDomain->RegisterLoaderAllocatorForDeletion(pDomainLoaderAllocatorDestroyIterator);
+
+ // Go to next
+ pDomainLoaderAllocatorDestroyIterator = pLoaderAllocatorDestroyNext;
}
// Deleting the DomainAssemblies will have created a list of LoaderAllocator's on the AppDomain
@@ -554,19 +655,20 @@ BOOL QCALLTYPE LoaderAllocator::Destroy(QCall::LoaderAllocatorHandle pLoaderAllo
// This will probably change for shared code unloading
_ASSERTE(pID->GetType() == LAT_Assembly);
- Assembly *pAssembly = pID->GetDomainAssembly()->GetCurrentAssembly();
-
- //if not fully loaded, it is still domain specific, so just get one from DomainAssembly
- BaseDomain *pDomain = pAssembly ? pAssembly->Parent() : pID->GetDomainAssembly()->GetAppDomain();
-
- pLoaderAllocator->CleanupStringLiteralMap();
+ DomainAssembly* pDomainAssembly = (DomainAssembly*)(pID->GetDomainAssemblyIterator());
+ if (pDomainAssembly != NULL)
+ {
+ Assembly *pAssembly = pDomainAssembly->GetCurrentAssembly();
- // This will probably change for shared code unloading
- _ASSERTE(pDomain->IsAppDomain());
+ //if not fully loaded, it is still domain specific, so just get one from DomainAssembly
+ BaseDomain *pDomain = pAssembly ? pAssembly->Parent() : pDomainAssembly->GetAppDomain();
- AppDomain *pAppDomain = pDomain->AsAppDomain();
+ // This will probably change for shared code unloading
+ _ASSERTE(pDomain->IsAppDomain());
- pLoaderAllocator->m_pDomainAssemblyToDelete = pAssembly->GetDomainAssembly(pAppDomain);
+ AppDomain *pAppDomain = pDomain->AsAppDomain();
+ pLoaderAllocator->m_pFirstDomainAssemblyFromSameALCToDelete = pAssembly->GetDomainAssembly(pAppDomain);
+ }
// Iterate through all references to other loader allocators and decrement their reference
// count
@@ -587,7 +689,7 @@ BOOL QCALLTYPE LoaderAllocator::Destroy(QCall::LoaderAllocatorHandle pLoaderAllo
// may hit zero early.
if (fIsLastReferenceReleased)
{
- LoaderAllocator::GCLoaderAllocators(pAppDomain);
+ LoaderAllocator::GCLoaderAllocators(pLoaderAllocator);
}
STRESS_LOG1(LF_CLASSLOADER, LL_INFO100, "End LoaderAllocator::Destroy for loader allocator %p\n", reinterpret_cast<void *>(static_cast<PTR_LoaderAllocator>(pLoaderAllocator)));
@@ -1217,6 +1319,8 @@ void LoaderAllocator::Terminate()
}
#endif // FAT_DISPATCH_TOKENS
+ CleanupStringLiteralMap();
+
LOG((LF_CLASSLOADER, LL_INFO100, "End LoaderAllocator::Terminate for loader allocator %p\n", reinterpret_cast<void *>(static_cast<PTR_LoaderAllocator>(this))));
}
@@ -1400,7 +1504,7 @@ DispatchToken LoaderAllocator::TryLookupDispatchToken(UINT32 typeId, UINT32 slot
}
}
-void LoaderAllocator::InitVirtualCallStubManager(BaseDomain * pDomain, BOOL fCollectible /* = FALSE */)
+void LoaderAllocator::InitVirtualCallStubManager(BaseDomain * pDomain)
{
STANDARD_VM_CONTRACT;
@@ -1468,9 +1572,56 @@ BOOL LoaderAllocator::IsDomainNeutral()
return GetDomain()->IsSharedDomain();
}
+DomainAssemblyIterator::DomainAssemblyIterator(DomainAssembly* pFirstAssembly)
+{
+ pCurrentAssembly = pFirstAssembly;
+ pNextAssembly = pCurrentAssembly ? pCurrentAssembly->GetNextDomainAssemblyInSameALC() : NULL;
+}
+
+void DomainAssemblyIterator::operator++()
+{
+ pCurrentAssembly = pNextAssembly;
+ pNextAssembly = pCurrentAssembly ? pCurrentAssembly->GetNextDomainAssemblyInSameALC() : NULL;
+}
+
+void AssemblyLoaderAllocator::SetCollectible()
+{
+ CONTRACTL
+ {
+ THROWS;
+ }
+ CONTRACTL_END;
+
+ m_IsCollectible = true;
+#ifndef DACCESS_COMPILE
+ m_pShuffleThunkCache = new ShuffleThunkCache(m_pStubHeap);
+#endif
+}
+
#ifndef DACCESS_COMPILE
#ifndef CROSSGEN_COMPILE
+
+AssemblyLoaderAllocator::~AssemblyLoaderAllocator()
+{
+ if (m_binderToRelease != NULL)
+ {
+ VERIFY(m_binderToRelease->Release() == 0);
+ m_binderToRelease = NULL;
+ }
+
+ delete m_pShuffleThunkCache;
+ m_pShuffleThunkCache = NULL;
+}
+
+void AssemblyLoaderAllocator::RegisterBinder(CLRPrivBinderAssemblyLoadContext* binderToRelease)
+{
+ // When the binder is registered it will be released by the destructor
+ // of this instance
+ _ASSERTE(m_binderToRelease == NULL);
+ m_binderToRelease = binderToRelease;
+}
+
STRINGREF *LoaderAllocator::GetStringObjRefPtrFromUnicodeString(EEStringData *pStringData)
{
CONTRACTL
@@ -1666,6 +1817,25 @@ void LoaderAllocator::CleanupFailedTypeInit()
pLock->Unlink(pItem->m_pListLockEntry);
}
}
+
+void AssemblyLoaderAllocator::ReleaseManagedAssemblyLoadContext()
+{
+ CONTRACTL
+ {
+ THROWS;
+ GC_TRIGGERS;
+ MODE_ANY;
+ SO_INTOLERANT;
+ }
+ CONTRACTL_END;
+
+ if (m_binderToRelease != NULL)
+ {
+ // Release the managed ALC
+ m_binderToRelease->ReleaseLoadContext();
+ }
+}
+
#endif // !CROSSGEN_COMPILE
#endif // !DACCESS_COMPILE
diff --git a/src/vm/loaderallocator.hpp b/src/vm/loaderallocator.hpp
index b057283136..abfd4d0c45 100644
--- a/src/vm/loaderallocator.hpp
+++ b/src/vm/loaderallocator.hpp
@@ -29,6 +29,40 @@ enum LoaderAllocatorType
LAT_Assembly
};
+class CLRPrivBinderAssemblyLoadContext;
+
+// Iterator over a DomainAssembly in the same ALC
+class DomainAssemblyIterator
+{
+ DomainAssembly* pCurrentAssembly;
+ DomainAssembly* pNextAssembly;
+
+public:
+ DomainAssemblyIterator(DomainAssembly* pFirstAssembly);
+
+ bool end() const
+ {
+ return pCurrentAssembly == NULL;
+ }
+
+ operator DomainAssembly*() const
+ {
+ return pCurrentAssembly;
+ }
+
+ DomainAssembly* operator ->() const
+ {
+ return pCurrentAssembly;
+ }
+
+ void operator++();
+
+ void operator++(int dummy)
+ {
+ this->operator++();
+ }
+};
+
class LoaderAllocatorID
{
@@ -52,12 +86,11 @@ public:
VOID Init();
VOID Init(AppDomain* pAppDomain);
LoaderAllocatorType GetType();
- VOID SetDomainAssembly(DomainAssembly* pDomainAssembly);
- DomainAssembly* GetDomainAssembly();
+ VOID AddDomainAssembly(DomainAssembly* pDomainAssembly);
+ DomainAssemblyIterator GetDomainAssemblyIterator();
AppDomain* GetAppDomain();
BOOL Equals(LoaderAllocatorID* pId);
COUNT_T Hash();
- BOOL IsCollectible();
};
class StringLiteralMap;
@@ -101,6 +134,7 @@ protected:
bool m_fTerminated;
bool m_fMarked;
int m_nGCCount;
+ bool m_IsCollectible;
// Pre-allocated blocks of heap for collectible assemblies. Will be set to NULL as soon as it is
// used. See code in GetVSDHeapInitialBlock and GetCodeHeapInitialBlock
@@ -156,7 +190,7 @@ private:
Volatile<UINT32> m_cReferences;
// This will be set by code:LoaderAllocator::Destroy (from managed scout finalizer) and signalizes that
// the assembly was collected
- DomainAssembly * m_pDomainAssemblyToDelete;
+ DomainAssembly * m_pFirstDomainAssemblyFromSameALCToDelete;
BOOL CheckAddReference_Unlocked(LoaderAllocator *pOtherLA);
@@ -271,11 +305,11 @@ public:
// Checks if managed scout is alive - see code:#AssemblyPhases.
BOOL IsManagedScoutAlive()
{
- return (m_pDomainAssemblyToDelete == NULL);
+ return (m_pFirstDomainAssemblyFromSameALCToDelete == NULL);
}
// Collect unreferenced assemblies, delete all their remaining resources.
- static void GCLoaderAllocators(AppDomain *pAppDomain);
+ static void GCLoaderAllocators(LoaderAllocator* firstLoaderAllocator);
UINT64 GetCreationNumber() { LIMITED_METHOD_DAC_CONTRACT; return m_nLoaderAllocator; }
@@ -298,7 +332,7 @@ public:
DispatchToken TryLookupDispatchToken(UINT32 typeId, UINT32 slotNumber);
virtual LoaderAllocatorID* Id() =0;
- BOOL IsCollectible() { WRAPPER_NO_CONTRACT; return Id()->IsCollectible(); }
+ BOOL IsCollectible() { WRAPPER_NO_CONTRACT; return m_IsCollectible; }
#ifdef DACCESS_COMPILE
void EnumMemoryRegions(CLRDataEnumMemoryFlags flags);
@@ -400,6 +434,8 @@ public:
BOOL IsDomainNeutral();
void Init(BaseDomain *pDomain, BYTE *pExecutableHeapMemory = NULL);
void Terminate();
+ virtual void ReleaseManagedAssemblyLoadContext() {}
+
SIZE_T EstimateSize();
void SetupManagedTracking(LOADERALLOCATORREF *pLoaderAllocatorKeepAlive);
@@ -433,7 +469,7 @@ public:
STRINGREF *GetOrInternString(STRINGREF *pString);
void CleanupStringLiteralMap();
- void InitVirtualCallStubManager(BaseDomain *pDomain, BOOL fCollectible = FALSE);
+ void InitVirtualCallStubManager(BaseDomain *pDomain);
void UninitVirtualCallStubManager();
#ifndef CROSSGEN_COMPILE
inline VirtualCallStubManager *GetVirtualCallStubManager()
@@ -449,7 +485,7 @@ typedef VPTR(LoaderAllocator) PTR_LoaderAllocator;
class GlobalLoaderAllocator : public LoaderAllocator
{
VPTR_VTABLE_CLASS(GlobalLoaderAllocator, LoaderAllocator)
- VPTR_UNIQUE(VPTRU_LoaderAllocator+1);
+ VPTR_UNIQUE(VPTRU_LoaderAllocator+1)
BYTE m_ExecutableHeapInstance[sizeof(LoaderHeap)];
@@ -469,7 +505,7 @@ typedef VPTR(GlobalLoaderAllocator) PTR_GlobalLoaderAllocator;
class AppDomainLoaderAllocator : public LoaderAllocator
{
VPTR_VTABLE_CLASS(AppDomainLoaderAllocator, LoaderAllocator)
- VPTR_UNIQUE(VPTRU_LoaderAllocator+2);
+ VPTR_UNIQUE(VPTRU_LoaderAllocator+2)
protected:
LoaderAllocatorID m_Id;
@@ -482,23 +518,49 @@ public:
typedef VPTR(AppDomainLoaderAllocator) PTR_AppDomainLoaderAllocator;
+class ShuffleThunkCache;
+
class AssemblyLoaderAllocator : public LoaderAllocator
{
VPTR_VTABLE_CLASS(AssemblyLoaderAllocator, LoaderAllocator)
- VPTR_UNIQUE(VPTRU_LoaderAllocator+3);
+ VPTR_UNIQUE(VPTRU_LoaderAllocator+3)
protected:
- LoaderAllocatorID m_Id;
+ LoaderAllocatorID m_Id;
+ ShuffleThunkCache* m_pShuffleThunkCache;
public:
virtual LoaderAllocatorID* Id();
- AssemblyLoaderAllocator() : m_Id(LAT_Assembly) { LIMITED_METHOD_CONTRACT; }
+ AssemblyLoaderAllocator() : m_Id(LAT_Assembly), m_pShuffleThunkCache(NULL)
+#if !defined(DACCESS_COMPILE) && !defined(CROSSGEN_COMPILE)
+ , m_binderToRelease(NULL)
+#endif
+ { LIMITED_METHOD_CONTRACT; }
void Init(AppDomain *pAppDomain);
virtual BOOL CanUnload();
- void SetDomainAssembly(DomainAssembly *pDomainAssembly) { WRAPPER_NO_CONTRACT; m_Id.SetDomainAssembly(pDomainAssembly); }
+
+ void SetCollectible();
+
+ void AddDomainAssembly(DomainAssembly *pDomainAssembly)
+ {
+ WRAPPER_NO_CONTRACT;
+ m_Id.AddDomainAssembly(pDomainAssembly);
+ }
+
+ ShuffleThunkCache* GetShuffleThunkCache()
+ {
+ return m_pShuffleThunkCache;
+ }
#if !defined(DACCESS_COMPILE) && !defined(CROSSGEN_COMPILE)
virtual void RegisterHandleForCleanup(OBJECTHANDLE objHandle);
virtual void CleanupHandles();
+ CLRPrivBinderAssemblyLoadContext* GetBinder()
+ {
+ return m_binderToRelease;
+ }
+ virtual ~AssemblyLoaderAllocator();
+ void RegisterBinder(CLRPrivBinderAssemblyLoadContext* binderToRelease);
+ virtual void ReleaseManagedAssemblyLoadContext();
#endif // !defined(DACCESS_COMPILE) && !defined(CROSSGEN_COMPILE)
private:
@@ -514,6 +576,9 @@ private:
};
SList<HandleCleanupListItem> m_handleCleanupList;
+#if !defined(DACCESS_COMPILE) && !defined(CROSSGEN_COMPILE)
+ CLRPrivBinderAssemblyLoadContext* m_binderToRelease;
+#endif
};
typedef VPTR(AssemblyLoaderAllocator) PTR_AssemblyLoaderAllocator;
diff --git a/src/vm/loaderallocator.inl b/src/vm/loaderallocator.inl
index 3f23ac9c8c..46c253f4bd 100644
--- a/src/vm/loaderallocator.inl
+++ b/src/vm/loaderallocator.inl
@@ -56,51 +56,51 @@ inline void LoaderAllocatorID::Init()
m_type = LAT_Assembly;
};
-inline void LoaderAllocatorID::SetDomainAssembly(DomainAssembly* pAssembly)
+inline void LoaderAllocatorID::AddDomainAssembly(DomainAssembly* pAssembly)
{
LIMITED_METHOD_CONTRACT;
_ASSERTE(m_type == LAT_Assembly);
+
+ // Link domain assembly together
+ if (m_pDomainAssembly != NULL)
+ {
+ pAssembly->SetNextDomainAssemblyInSameALC(m_pDomainAssembly);
+ }
m_pDomainAssembly = pAssembly;
}
inline VOID* LoaderAllocatorID::GetValue()
{
- LIMITED_METHOD_DAC_CONTRACT;
+ LIMITED_METHOD_DAC_CONTRACT;
return m_pValue;
}
inline COUNT_T LoaderAllocatorID::Hash()
{
- LIMITED_METHOD_DAC_CONTRACT;
+ LIMITED_METHOD_DAC_CONTRACT;
return (COUNT_T)(SIZE_T)GetValue();
}
inline LoaderAllocatorType LoaderAllocatorID::GetType()
{
- LIMITED_METHOD_DAC_CONTRACT;
+ LIMITED_METHOD_DAC_CONTRACT;
return m_type;
}
-inline DomainAssembly* LoaderAllocatorID::GetDomainAssembly()
+inline DomainAssemblyIterator LoaderAllocatorID::GetDomainAssemblyIterator()
{
- LIMITED_METHOD_DAC_CONTRACT;
+ LIMITED_METHOD_DAC_CONTRACT;
_ASSERTE(m_type == LAT_Assembly);
- return m_pDomainAssembly;
+ return DomainAssemblyIterator(m_pDomainAssembly);
}
inline AppDomain *LoaderAllocatorID::GetAppDomain()
{
- LIMITED_METHOD_DAC_CONTRACT;
+ LIMITED_METHOD_DAC_CONTRACT;
_ASSERTE(m_type == LAT_AppDomain);
return m_pAppDomain;
}
-inline BOOL LoaderAllocatorID::IsCollectible()
-{
- LIMITED_METHOD_DAC_CONTRACT;
- return m_type == LAT_Assembly;
-}
-
inline LoaderAllocatorID* AssemblyLoaderAllocator::Id()
{
LIMITED_METHOD_DAC_CONTRACT;
diff --git a/src/vm/methodtable.inl b/src/vm/methodtable.inl
index 9e5df0262c..f669f23a98 100644
--- a/src/vm/methodtable.inl
+++ b/src/vm/methodtable.inl
@@ -197,7 +197,7 @@ inline void MethodTable::SetLoaderAllocator(LoaderAllocator* pAllocator)
LIMITED_METHOD_CONTRACT;
_ASSERTE(pAllocator == GetLoaderAllocator());
- if (pAllocator->Id()->IsCollectible())
+ if (pAllocator->IsCollectible())
{
SetFlag(enum_flag_Collectible);
}
diff --git a/src/vm/methodtablebuilder.cpp b/src/vm/methodtablebuilder.cpp
index ef9c37c5d5..08474ad3cf 100644
--- a/src/vm/methodtablebuilder.cpp
+++ b/src/vm/methodtablebuilder.cpp
@@ -1723,7 +1723,7 @@ MethodTableBuilder::BuildMethodTableThrowing(
// the offsets of our fields will depend on this. For the dynamic case (which requires
// an extra indirection (indirect depending of methodtable) we'll allocate the slot
// in setupmethodtable
- if (((pModule->IsReflection() || bmtGenerics->HasInstantiation() || !pModule->IsStaticStoragePrepared(cl)) &&
+ if (((pAllocator->IsCollectible() || pModule->IsReflection() || bmtGenerics->HasInstantiation() || !pModule->IsStaticStoragePrepared(cl)) &&
(bmtVT->GetClassCtorSlotIndex() != INVALID_SLOT_INDEX || bmtEnumFields->dwNumStaticFields !=0))
#ifdef EnC_SUPPORTED
// Classes in modules that have been edited (would do on class level if there were a
@@ -3187,12 +3187,6 @@ MethodTableBuilder::EnumerateClassMethods()
type = METHOD_TYPE_NORMAL;
}
- // PInvoke methods are not permitted on collectible types
- if ((type == METHOD_TYPE_NDIRECT) && GetAssembly()->IsCollectible())
- {
- BuildMethodTableThrowException(IDS_CLASSLOAD_COLLECTIBLEPINVOKE);
- }
-
// Generic methods should always be METHOD_TYPE_INSTANTIATED
if ((numGenericMethodArgs != 0) && (type != METHOD_TYPE_INSTANTIATED))
{
diff --git a/src/vm/stublink.cpp b/src/vm/stublink.cpp
index 631f51150b..d9715b7630 100644
--- a/src/vm/stublink.cpp
+++ b/src/vm/stublink.cpp
@@ -195,7 +195,7 @@ FindStubFunctionEntry (
}
-void UnregisterUnwindInfoInLoaderHeapCallback (PVOID pvAllocationBase, SIZE_T cbReserved)
+bool UnregisterUnwindInfoInLoaderHeapCallback (PVOID pvArgs, PVOID pvAllocationBase, SIZE_T cbReserved)
{
CONTRACTL
{
@@ -251,6 +251,8 @@ void UnregisterUnwindInfoInLoaderHeapCallback (PVOID pvAllocationBase, SIZE_T cb
ppPrevStubHeapSegment = &pStubHeapSegment->pNext;
}
}
+
+ return false; // Keep enumerating
}
@@ -264,7 +266,7 @@ VOID UnregisterUnwindInfoInLoaderHeap (UnlockedLoaderHeap *pHeap)
}
CONTRACTL_END;
- pHeap->EnumPageRegions(&UnregisterUnwindInfoInLoaderHeapCallback);
+ pHeap->EnumPageRegions(&UnregisterUnwindInfoInLoaderHeapCallback, NULL /* pvArgs */);
#ifdef _DEBUG
pHeap->m_fStubUnwindInfoUnregistered = TRUE;
@@ -854,7 +856,7 @@ Stub *StubLinker::LinkInterceptor(LoaderHeap *pHeap, Stub* interceptee, void *pR
, UnwindInfoSize(globalsize)
#endif
);
- bool fSuccess; fSuccess = EmitStub(pStub, globalsize);
+ bool fSuccess; fSuccess = EmitStub(pStub, globalsize, pHeap);
#ifdef STUBLINKER_GENERATES_UNWIND_INFO
if (fSuccess)
@@ -910,7 +912,7 @@ Stub *StubLinker::Link(LoaderHeap *pHeap, DWORD flags)
);
ASSERT(pStub != NULL);
- bool fSuccess; fSuccess = EmitStub(pStub, globalsize);
+ bool fSuccess; fSuccess = EmitStub(pStub, globalsize, pHeap);
#ifdef STUBLINKER_GENERATES_UNWIND_INFO
if (fSuccess)
@@ -1072,7 +1074,7 @@ int StubLinker::CalculateSize(int* pGlobalSize)
return globalsize + datasize;
}
-bool StubLinker::EmitStub(Stub* pStub, int globalsize)
+bool StubLinker::EmitStub(Stub* pStub, int globalsize, LoaderHeap* pHeap)
{
STANDARD_VM_CONTRACT;
@@ -1157,7 +1159,7 @@ bool StubLinker::EmitStub(Stub* pStub, int globalsize)
#ifdef STUBLINKER_GENERATES_UNWIND_INFO
if (pStub->HasUnwindInfo())
{
- if (!EmitUnwindInfo(pStub, globalsize))
+ if (!EmitUnwindInfo(pStub, globalsize, pHeap))
return false;
}
#endif // STUBLINKER_GENERATES_UNWIND_INFO
@@ -1308,7 +1310,34 @@ UNWIND_CODE *StubLinker::AllocUnwindInfo (UCHAR Op, UCHAR nExtraSlots /*= 0*/)
}
#endif // defined(_TARGET_AMD64_)
-bool StubLinker::EmitUnwindInfo(Stub* pStub, int globalsize)
+struct FindBlockArgs
+{
+ BYTE *pCode;
+ BYTE *pBlockBase;
+ SIZE_T cbBlockSize;
+};
+
+bool FindBlockCallback (PTR_VOID pvArgs, PTR_VOID pvAllocationBase, SIZE_T cbReserved)
+{
+ CONTRACTL
+ {
+ NOTHROW;
+ GC_TRIGGERS;
+ }
+ CONTRACTL_END;
+
+ FindBlockArgs* pArgs = (FindBlockArgs*)pvArgs;
+ if (pArgs->pCode >= pvAllocationBase && (pArgs->pCode < ((BYTE *)pvAllocationBase + cbReserved)))
+ {
+ pArgs->pBlockBase = (BYTE*)pvAllocationBase;
+ pArgs->cbBlockSize = cbReserved;
+ return true;
+ }
+
+ return false;
+}
+
+bool StubLinker::EmitUnwindInfo(Stub* pStub, int globalsize, LoaderHeap* pHeap)
{
STANDARD_VM_CONTRACT;
@@ -1316,19 +1345,21 @@ bool StubLinker::EmitUnwindInfo(Stub* pStub, int globalsize)
//
// Determine the lower bound of the address space containing the stub.
- // The properties of individual pages may change, but the bounds of a
- // VirtualAlloc(MEM_RESERVE)'d region will never change.
//
- MEMORY_BASIC_INFORMATION mbi;
+ FindBlockArgs findBlockArgs;
+ findBlockArgs.pCode = pCode;
+ findBlockArgs.pBlockBase = NULL;
+
+ pHeap->EnumPageRegions(&FindBlockCallback, &findBlockArgs);
- if (sizeof(mbi) != ClrVirtualQuery(pCode, &mbi, sizeof(mbi)))
+ if (findBlockArgs.pBlockBase == NULL)
{
// REVISIT_TODO better exception
COMPlusThrowOM();
}
- BYTE *pbRegionBaseAddress = (BYTE*)mbi.AllocationBase;
+ BYTE *pbRegionBaseAddress = findBlockArgs.pBlockBase;
#ifdef _DEBUG
static SIZE_T MaxSegmentSize = -1;
@@ -1805,39 +1836,12 @@ bool StubLinker::EmitUnwindInfo(Stub* pStub, int globalsize)
if (!pStubHeapSegment)
{
//
- // Determine the upper bound of the address space containing the stub.
- // Start with stub region's allocation base, and query for each
- // successive region's allocation base until it changes or we hit an
- // unreserved region.
- //
-
- PBYTE pbCurrentBase = pbBaseAddress;
-
- for (;;)
- {
- if (sizeof(mbi) != ClrVirtualQuery(pbCurrentBase, &mbi, sizeof(mbi)))
- {
- // REVISIT_TODO better exception
- COMPlusThrowOM();
- }
-
- // AllocationBase is undefined if this is set.
- if (mbi.State & MEM_FREE)
- break;
-
- if (pbRegionBaseAddress != mbi.AllocationBase)
- break;
-
- pbCurrentBase += mbi.RegionSize;
- }
-
- //
// RtlInstallFunctionTableCallback will only accept a ULONG for the
// region size. We've already checked above that the RUNTIME_FUNCTION
// offsets will work relative to pbBaseAddress.
//
- SIZE_T cbSegment = pbCurrentBase - pbBaseAddress;
+ SIZE_T cbSegment = findBlockArgs.cbBlockSize;
if (cbSegment > MaxSegmentSize)
cbSegment = MaxSegmentSize;
diff --git a/src/vm/stublink.h b/src/vm/stublink.h
index d7f0034587..41c11ebb49 100644
--- a/src/vm/stublink.h
+++ b/src/vm/stublink.h
@@ -281,7 +281,6 @@ public:
//
// Throws exception on failure.
//---------------------------------------------------------------
- Stub *Link(DWORD flags = 0) { WRAPPER_NO_CONTRACT; return Link(NULL, flags); }
Stub *Link(LoaderHeap *heap, DWORD flags = 0);
//---------------------------------------------------------------
@@ -411,11 +410,11 @@ private:
// Writes out the code element into memory following the
// stub object.
- bool EmitStub(Stub* pStub, int globalsize);
+ bool EmitStub(Stub* pStub, int globalsize, LoaderHeap* pHeap);
CodeRun *GetLastCodeRunIfAny();
- bool EmitUnwindInfo(Stub* pStub, int globalsize);
+ bool EmitUnwindInfo(Stub* pStub, int globalsize, LoaderHeap* pHeap);
#if defined(_TARGET_AMD64_) && defined(STUBLINKER_GENERATES_UNWIND_INFO)
UNWIND_CODE *AllocUnwindInfo (UCHAR Op, UCHAR nExtraSlots = 0);