summaryrefslogtreecommitdiff
path: root/src/vm/amd64
diff options
context:
space:
mode:
authorKoundinya Veluri <kouvel@microsoft.com>2017-03-01 20:52:15 -0800
committerGitHub <noreply@github.com>2017-03-01 20:52:15 -0800
commit4bafc1004b99013eaa58450e4f974dc7169b5af1 (patch)
treec0238dcc72437a366a7f96e1aa5af4d5bc87e365 /src/vm/amd64
parent1bf3bbb44397875e94ae95b8527fd1928b5373fe (diff)
downloadcoreclr-4bafc1004b99013eaa58450e4f974dc7169b5af1.tar.gz
coreclr-4bafc1004b99013eaa58450e4f974dc7169b5af1.tar.bz2
coreclr-4bafc1004b99013eaa58450e4f974dc7169b5af1.zip
Preallocate jump stubs for dynamic methods (#9883)
Preallocate jump stubs for dynamic methods - This eliminates the possibility of running into an out-of-memory situation after compiling the method - The temporary entry points block containing FixupPrecodes is extended for dynamic methods to include sufficient space for jump stubs - When the target is too far for the FixupPrecode to encode a short relative jump, it instead does a short relative call or jump to the corresponding jump stub, which does an absolute jump to the target
Diffstat (limited to 'src/vm/amd64')
-rw-r--r--src/vm/amd64/cgenamd64.cpp29
-rw-r--r--src/vm/amd64/cgencpu.h4
2 files changed, 33 insertions, 0 deletions
diff --git a/src/vm/amd64/cgenamd64.cpp b/src/vm/amd64/cgenamd64.cpp
index 51aac1ebc6..497abcd502 100644
--- a/src/vm/amd64/cgenamd64.cpp
+++ b/src/vm/amd64/cgenamd64.cpp
@@ -727,6 +727,35 @@ INT32 rel32UsingJumpStub(INT32 UNALIGNED * pRel32, PCODE target, MethodDesc *pMe
return static_cast<INT32>(offset);
}
+INT32 rel32UsingPreallocatedJumpStub(INT32 UNALIGNED * pRel32, PCODE target, PCODE jumpStubAddr)
+{
+ CONTRACTL
+ {
+ THROWS; // emitBackToBackJump may throw (see emitJump)
+ GC_NOTRIGGER;
+ }
+ CONTRACTL_END;
+
+ TADDR baseAddr = (TADDR)pRel32 + 4;
+ _ASSERTE(FitsInI4(jumpStubAddr - baseAddr));
+
+ INT_PTR offset = target - baseAddr;
+ if (!FitsInI4(offset) INDEBUG(|| PEDecoder::GetForceRelocs()))
+ {
+ offset = jumpStubAddr - baseAddr;
+ if (!FitsInI4(offset))
+ {
+ _ASSERTE(!"jump stub was not in expected range");
+ EEPOLICY_HANDLE_FATAL_ERROR(COR_E_EXECUTIONENGINE);
+ }
+
+ emitBackToBackJump((LPBYTE)jumpStubAddr, (LPVOID)target);
+ }
+
+ _ASSERTE(FitsInI4(offset));
+ return static_cast<INT32>(offset);
+}
+
BOOL DoesSlotCallPrestub(PCODE pCode)
{
CONTRACTL {
diff --git a/src/vm/amd64/cgencpu.h b/src/vm/amd64/cgencpu.h
index 769f4029ee..2d4dce0e6e 100644
--- a/src/vm/amd64/cgencpu.h
+++ b/src/vm/amd64/cgencpu.h
@@ -57,6 +57,7 @@ EXTERN_C void FastCallFinalizeWorker(Object *obj, PCODE funcPtr);
//#define HAS_REMOTING_PRECODE 1 // TODO: Implement
#define HAS_FIXUP_PRECODE 1
#define HAS_FIXUP_PRECODE_CHUNKS 1
+#define FIXUP_PRECODE_PREALLOCATE_DYNAMIC_METHOD_JUMP_STUBS 1
// ThisPtrRetBufPrecode one is necessary for closed delegates over static methods with return buffer
#define HAS_THISPTR_RETBUF_PRECODE 1
@@ -381,6 +382,9 @@ void EncodeLoadAndJumpThunk (LPBYTE pBuffer, LPVOID pv, LPVOID pTarget);
// Get Rel32 destination, emit jumpStub if necessary
INT32 rel32UsingJumpStub(INT32 UNALIGNED * pRel32, PCODE target, MethodDesc *pMethod, LoaderAllocator *pLoaderAllocator = NULL);
+// Get Rel32 destination, emit jumpStub if necessary into a preallocated location
+INT32 rel32UsingPreallocatedJumpStub(INT32 UNALIGNED * pRel32, PCODE target, PCODE jumpStubAddr);
+
void emitCOMStubCall (ComCallMethodDesc *pCOMMethod, PCODE target);
void emitJump(LPBYTE pBuffer, LPVOID target);