1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
.intel_syntax noprefix
#include "unixasmmacros.inc"
#include "asmconstants.h"
//
// METHODDESC_REGISTER: UMEntryThunk*
//
NESTED_ENTRY TheUMEntryPrestub, _TEXT, UnhandledExceptionHandlerUnix
PUSH_ARGUMENT_REGISTERS
// +8 for alignment
alloc_stack (SIZEOF_MAX_FP_ARG_SPILL + 8)
SAVE_FLOAT_ARGUMENT_REGISTERS 0
END_PROLOGUE
mov rdi, METHODDESC_REGISTER
call C_FUNC(TheUMEntryPrestubWorker)
// we're going to tail call to the exec stub that we just setup
RESTORE_FLOAT_ARGUMENT_REGISTERS 0
free_stack (SIZEOF_MAX_FP_ARG_SPILL + 8)
POP_ARGUMENT_REGISTERS
TAILJMP_RAX
NESTED_END TheUMEntryPrestub, _TEXT
//
// METHODDESC_REGISTER: UMEntryThunk*
//
NESTED_ENTRY UMThunkStub, _TEXT, UnhandledExceptionHandlerUnix
#define UMThunkStubAMD64_FIXED_STACK_ALLOC_SIZE (SIZEOF_MAX_INT_ARG_SPILL + SIZEOF_MAX_FP_ARG_SPILL + 0x8)
#define UMThunkStubAMD64_XMM_SAVE_OFFSET 0x0
#define UMThunkStubAMD64_INT_ARG_OFFSET (SIZEOF_MAX_FP_ARG_SPILL + 0x8)
#define UMThunkStubAMD64_METHODDESC_OFFSET SIZEOF_MAX_FP_ARG_SPILL
#define UMThunkStubAMD64_RBP_OFFSET (UMThunkStubAMD64_FIXED_STACK_ALLOC_SIZE + 8)
// {optional stack args passed to callee} <-- new RSP
// xmm0 <-- RBP
// xmm1
// xmm2
// xmm3
// xmm4
// xmm5
// xmm6
// xmm7
// METHODDESC_REGISTER
// rdi
// rsi
// rcx
// rdx
// r8
// r9
// r12
// rbp
// return address <-- entry RSP
push_nonvol_reg rbp
mov rbp, rsp
push_nonvol_reg r12 // stack_args
alloc_stack UMThunkStubAMD64_FIXED_STACK_ALLOC_SIZE
save_reg_postrsp rdi, (UMThunkStubAMD64_INT_ARG_OFFSET)
save_reg_postrsp rsi, (UMThunkStubAMD64_INT_ARG_OFFSET + 0x08)
save_reg_postrsp rdx, (UMThunkStubAMD64_INT_ARG_OFFSET + 0x10)
save_reg_postrsp rcx, (UMThunkStubAMD64_INT_ARG_OFFSET + 0x18)
save_reg_postrsp r8, (UMThunkStubAMD64_INT_ARG_OFFSET + 0x20)
save_reg_postrsp r9, (UMThunkStubAMD64_INT_ARG_OFFSET + 0x28)
save_reg_postrsp METHODDESC_REGISTER, UMThunkStubAMD64_METHODDESC_OFFSET
SAVE_FLOAT_ARGUMENT_REGISTERS UMThunkStubAMD64_XMM_SAVE_OFFSET
set_cfa_register rbp, (2*8)
END_PROLOGUE
//
// Call GetThread()
//
call C_FUNC(GetThread)
test rax, rax
jz LOCAL_LABEL(DoThreadSetup)
LOCAL_LABEL(HaveThread):
mov r12, rax // r12 <- Thread*
//FailFast if a native callable method is invoked via ldftn and calli.
cmp dword ptr [r12 + OFFSETOF__Thread__m_fPreemptiveGCDisabled], 1
jz LOCAL_LABEL(InvalidTransition)
//
// disable preemptive GC
//
mov dword ptr [r12 + OFFSETOF__Thread__m_fPreemptiveGCDisabled], 1
//
// catch returning thread here if a GC is in progress
//
PREPARE_EXTERNAL_VAR g_TrapReturningThreads, rax
cmp dword ptr [rax], 0
jnz LOCAL_LABEL(DoTrapReturningThreadsTHROW)
LOCAL_LABEL(InCooperativeMode):
mov METHODDESC_REGISTER, [rbp - UMThunkStubAMD64_RBP_OFFSET + UMThunkStubAMD64_METHODDESC_OFFSET]
#if _DEBUG
mov rax, [r12 + OFFSETOF__Thread__m_pDomain]
mov eax, [rax + OFFSETOF__AppDomain__m_dwId]
mov r11d, [METHODDESC_REGISTER + OFFSETOF__UMEntryThunk__m_dwDomainId]
cmp rax, r11
jne LOCAL_LABEL(WrongAppDomain)
#endif
mov r11, [METHODDESC_REGISTER + OFFSETOF__UMEntryThunk__m_pUMThunkMarshInfo]
mov eax, [r11 + OFFSETOF__UMThunkMarshInfo__m_cbActualArgSize] // stack_args
test rax, rax // stack_args
jnz LOCAL_LABEL(UMThunkStub_CopyStackArgs) // stack_args
LOCAL_LABEL(UMThunkStub_ArgumentsSetup):
mov rdi, [rbp - UMThunkStubAMD64_RBP_OFFSET + UMThunkStubAMD64_INT_ARG_OFFSET]
mov rsi, [rbp - UMThunkStubAMD64_RBP_OFFSET + UMThunkStubAMD64_INT_ARG_OFFSET + 0x08]
mov rdx, [rbp - UMThunkStubAMD64_RBP_OFFSET + UMThunkStubAMD64_INT_ARG_OFFSET + 0x10]
mov rcx, [rbp - UMThunkStubAMD64_RBP_OFFSET + UMThunkStubAMD64_INT_ARG_OFFSET + 0x18]
mov r8, [rbp - UMThunkStubAMD64_RBP_OFFSET + UMThunkStubAMD64_INT_ARG_OFFSET + 0x20]
mov r9, [rbp - UMThunkStubAMD64_RBP_OFFSET + UMThunkStubAMD64_INT_ARG_OFFSET + 0x28]
movdqa xmm0, xmmword ptr [rbp - UMThunkStubAMD64_RBP_OFFSET + UMThunkStubAMD64_XMM_SAVE_OFFSET]
movdqa xmm1, xmmword ptr [rbp - UMThunkStubAMD64_RBP_OFFSET + UMThunkStubAMD64_XMM_SAVE_OFFSET + 0x10]
movdqa xmm2, xmmword ptr [rbp - UMThunkStubAMD64_RBP_OFFSET + UMThunkStubAMD64_XMM_SAVE_OFFSET + 0x20]
movdqa xmm3, xmmword ptr [rbp - UMThunkStubAMD64_RBP_OFFSET + UMThunkStubAMD64_XMM_SAVE_OFFSET + 0x30]
movdqa xmm4, xmmword ptr [rbp - UMThunkStubAMD64_RBP_OFFSET + UMThunkStubAMD64_XMM_SAVE_OFFSET + 0x40]
movdqa xmm5, xmmword ptr [rbp - UMThunkStubAMD64_RBP_OFFSET + UMThunkStubAMD64_XMM_SAVE_OFFSET + 0x50]
movdqa xmm6, xmmword ptr [rbp - UMThunkStubAMD64_RBP_OFFSET + UMThunkStubAMD64_XMM_SAVE_OFFSET + 0x60]
movdqa xmm7, xmmword ptr [rbp - UMThunkStubAMD64_RBP_OFFSET + UMThunkStubAMD64_XMM_SAVE_OFFSET + 0x70]
mov rax, [r11 + OFFSETOF__UMThunkMarshInfo__m_pILStub] // rax <- Stub*
call rax
LOCAL_LABEL(PostCall):
//
// enable preemptive GC
//
mov dword ptr [r12 + OFFSETOF__Thread__m_fPreemptiveGCDisabled], 0
// epilog
lea rsp, [rbp - 8] // deallocate arguments
set_cfa_register rsp, (3*8)
pop_nonvol_reg r12
pop_nonvol_reg rbp
ret
LOCAL_LABEL(DoThreadSetup):
call C_FUNC(CreateThreadBlockThrow)
jmp LOCAL_LABEL(HaveThread)
LOCAL_LABEL(InvalidTransition):
//No arguments to setup , ReversePInvokeBadTransition will failfast
call C_FUNC(ReversePInvokeBadTransition)
LOCAL_LABEL(DoTrapReturningThreadsTHROW):
mov rdi, r12 // Thread* pThread
mov rsi, [rbp - UMThunkStubAMD64_RBP_OFFSET + UMThunkStubAMD64_METHODDESC_OFFSET] // UMEntryThunk* pUMEntry
call C_FUNC(UMThunkStubRareDisableWorker)
jmp LOCAL_LABEL(InCooperativeMode)
LOCAL_LABEL(UMThunkStub_CopyStackArgs):
// rax = cbStackArgs
sub rsp, rax
and rsp, -16
// rax = number of bytes
lea rdi, [rbp + 0x10] // rbp + ra
lea rsi, [rsp]
LOCAL_LABEL(CopyLoop):
// rax = number of bytes
// rdi = src
// rsi = dest
// rdx = sratch
add rax, -8
mov rdx, [rdi + rax]
mov [rsi + rax], rdx
jnz LOCAL_LABEL(CopyLoop)
jmp LOCAL_LABEL(UMThunkStub_ArgumentsSetup)
#if _DEBUG
LOCAL_LABEL(WrongAppDomain):
int3
#endif
NESTED_END UMThunkStub, _TEXT
//
// EXTERN_C void __stdcall UM2MThunk_WrapperHelper(
// void *pThunkArgs, // rdi
// int argLen, // rsi
// void *pAddr, // rdx // not used
// UMEntryThunk *pEntryThunk, // rcx
// Thread *pThread); // r8
//
NESTED_ENTRY UM2MThunk_WrapperHelper, _TEXT, NoHandler
int3
NESTED_END UM2MThunk_WrapperHelper, _TEXT
|