1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
.intel_syntax noprefix
#include "unixasmmacros.inc"
#include "asmconstants.h"
//
// FramedMethodFrame prolog
//
.macro STUB_PROLOG
// push ebp-frame
PROLOG_BEG
// save CalleeSavedRegisters
PROLOG_PUSH ebx
PROLOG_PUSH esi
PROLOG_PUSH edi
// push ArgumentRegisters
PROLOG_PUSH ecx
PROLOG_PUSH edx
// set frame pointer
PROLOG_END
.endm
//
// FramedMethodFrame epilog
//
.macro STUB_EPILOG
// restore stack pointer
EPILOG_BEG
// pop ArgumentRegisters
EPILOG_POP edx
EPILOG_POP ecx
// pop CalleeSavedRegisters
EPILOG_POP edi
EPILOG_POP esi
EPILOG_POP ebx
// pop ebp-frame
EPILOG_END
.endm
//
// FramedMethodFrame epilog
//
.macro STUB_EPILOG_RETURN
// pop ArgumentRegisters
add esp, 8
// pop CalleeSavedRegisters
pop edi
pop esi
pop ebx
pop ebp
.endm
.macro STUB_PROLOG_2_HIDDEN_ARGS
//
// The stub arguments are where we want to setup the TransitionBlock. We will
// setup the TransitionBlock later once we can trash them
//
// push ebp-frame
// push ebp
// mov ebp,esp
// save CalleeSavedRegisters
// push ebx
push esi
push edi
// push ArgumentRegisters
push ecx
push edx
mov ecx, [esp + 4*4]
mov edx, [esp + 5*4]
// Setup up proper EBP frame now that the stub arguments can be trashed
mov [esp + 4*4], ebx
mov [esp + 5*4], ebp
lea ebp, [esp + 5*4]
.endm
LEAF_ENTRY ResetCurrentContext, _TEXT
push eax
// clear the direction flag (used for rep instructions)
cld
// load flags into AX
fnstcw [esp - 2]
mov ax, [esp - 2]
fninit // reset FPU
and ax, 0f00h // preserve precision and rounding control
or ax, 007fh // mask all exceptions
// preserve precision control
mov ax, [esp - 2]
fldcw [esp - 2]
pop eax
ret
LEAF_END ResetCurrentContext, _TEXT
// Incoming:
// ESP+4: Pointer to buffer to which FPU state should be saved
LEAF_ENTRY CaptureFPUContext, _TEXT
mov ecx, [esp + 4]
fnstenv [ecx]
ret 4
LEAF_END CaptureFPUContext, _TEXT
// Incoming:
// ESP+4: Pointer to buffer from which FPU state should be restored
LEAF_ENTRY RestoreFPUContext, _TEXT
mov ecx, [esp + 4]
fldenv [ecx]
ret 4
LEAF_END RestoreFPUContext, _TEXT
LEAF_ENTRY GetSpecificCpuTypeAsm, _TEXT
push ebx // ebx is trashed by the cpuid calls
// See if the chip supports CPUID
pushfd
pop ecx // Get the EFLAGS
mov eax, ecx // Save for later testing
xor ecx, 200000h // Invert the ID bit
push ecx
popfd // Save the updated flags
pushfd
pop ecx // Retrieve the updated flags
xor ecx, eax // Test if it actually changed (bit set means yes)
push eax
popfd // Restore the flags
test ecx, 200000h
jz LOCAL_LABEL(Assume486)
xor eax, eax
cpuid
test eax, eax
jz LOCAL_LABEL(Assume486) // brif CPUID1 not allowed
mov eax, 1
cpuid
// filter out everything except family and model
// Note that some multi-procs have different stepping number for each proc
and eax, 0ff0h
jmp LOCAL_LABEL(CpuTypeDone)
LOCAL_LABEL(Assume486):
mov eax, 0400h // report 486
LOCAL_LABEL(CpuTypeDone):
pop ebx
ret
LEAF_END GetSpecificCpuTypeAsm, _TEXT
// DWORD __stdcall GetSpecificCpuFeaturesAsm(DWORD *pInfo);
LEAF_ENTRY GetSpecificCpuFeaturesAsm, _TEXT
push ebx // ebx is trashed by the cpuid calls
// See if the chip supports CPUID
pushfd
pop ecx // Get the EFLAGS
mov eax, ecx // Save for later testing
xor ecx, 200000h // Invert the ID bit.
push ecx
popfd // Save the updated flags.
pushfd
pop ecx // Retrieve the updated flags
xor ecx, eax // Test if it actually changed (bit set means yes)
push eax
popfd // Restore the flags
test ecx, 200000h
jz LOCAL_LABEL(CpuFeaturesFail)
xor eax, eax
cpuid
test eax, eax
jz LOCAL_LABEL(CpuFeaturesDone) // br if CPUID1 not allowed
mov eax, 1
cpuid
mov eax, edx // return all feature flags
mov edx, [esp + 8]
test edx, edx
jz LOCAL_LABEL(CpuFeaturesDone)
mov [edx],ebx // return additional useful information
jmp LOCAL_LABEL(CpuFeaturesDone)
LOCAL_LABEL(CpuFeaturesFail):
xor eax, eax // Nothing to report
LOCAL_LABEL(CpuFeaturesDone):
pop ebx
ret 4
LEAF_END GetSpecificCpuFeaturesAsm, _TEXT
// -----------------------------------------------------------------------
// The out-of-line portion of the code to enable preemptive GC.
// After the work is done, the code jumps back to the "pRejoinPoint"
// which should be emitted right after the inline part is generated.
//
// Assumptions:
// ebx = Thread
// Preserves
// all registers except ecx.
//
// -----------------------------------------------------------------------
NESTED_ENTRY StubRareEnable, _TEXT, NoHandler
push eax
push edx
push ebx
call C_FUNC(StubRareEnableWorker)
pop edx
pop eax
ret
NESTED_END StubRareEnable, _TEXT
NESTED_ENTRY StubRareDisableTHROW, _TEXT, NoHandler
push eax
push edx
push ebx // Thread
call C_FUNC(StubRareDisableTHROWWorker)
pop edx
pop eax
ret
NESTED_END StubRareDisableTHROW, _TEXT
LEAF_ENTRY InternalExceptionWorker, _TEXT
pop edx // recover RETADDR
add esp, eax // release caller's args
push edx // restore RETADDR
jmp C_FUNC(JIT_InternalThrow)
LEAF_END InternalExceptionWorker, _TEXT
// EAX -> number of caller arg bytes on the stack that we must remove before going
// to the throw helper, which assumes the stack is clean.
LEAF_ENTRY ArrayOpStubNullException, _TEXT
// kFactorReg and kTotalReg could not have been modified, but let's pop
// them anyway for consistency and to avoid future bugs.
pop esi
pop edi
mov ecx, CORINFO_NullReferenceException_ASM
jmp C_FUNC(InternalExceptionWorker)
LEAF_END ArrayOpStubNullException, _TEXT
// EAX -> number of caller arg bytes on the stack that we must remove before going
// to the throw helper, which assumes the stack is clean.
LEAF_ENTRY ArrayOpStubRangeException, _TEXT
// kFactorReg and kTotalReg could not have been modified, but let's pop
// them anyway for consistency and to avoid future bugs.
pop esi
pop edi
mov ecx, CORINFO_IndexOutOfRangeException_ASM
jmp C_FUNC(InternalExceptionWorker)
LEAF_END ArrayOpStubRangeException, _TEXT
// EAX -> number of caller arg bytes on the stack that we must remove before going
// to the throw helper, which assumes the stack is clean.
LEAF_ENTRY ArrayOpStubTypeMismatchException, _TEXT
// kFactorReg and kTotalReg could not have been modified, but let's pop
// them anyway for consistency and to avoid future bugs.
pop esi
pop edi
mov ecx, CORINFO_ArrayTypeMismatchException_ASM
jmp C_FUNC(InternalExceptionWorker)
LEAF_END ArrayOpStubTypeMismatchException, _TEXT
// ------------------------------------------------------------------------------
// This helper routine enregisters the appropriate arguments and makes the
// actual call.
// ------------------------------------------------------------------------------
// void STDCALL CallDescrWorkerInternal(CallDescrWorkerParams * pParams)
NESTED_ENTRY CallDescrWorkerInternal, _TEXT, NoHandler
PROLOG_BEG
PROLOG_PUSH ebx
PROLOG_END
mov ebx, [esp + ((2 + 1) * 4)]
// compute padding size
mov eax, esp
mov ecx, [ebx + CallDescrData__numStackSlots]
shl ecx, 2
sub eax, ecx
and eax, 15
// adjust stack offset
sub esp, eax
// copy the stack
mov ecx, [ebx +CallDescrData__numStackSlots]
mov eax, [ebx +CallDescrData__pSrc]
test ecx, ecx
jz LOCAL_LABEL(donestack)
lea eax, [eax + 4*ecx - 4] // last argument
push DWORD PTR [eax]
dec ecx
jz LOCAL_LABEL(donestack)
sub eax, 4
push DWORD PTR [eax]
dec ecx
jz LOCAL_LABEL(donestack)
LOCAL_LABEL(stackloop):
sub eax, 4
push DWORD PTR [eax]
dec ecx
jnz LOCAL_LABEL(stackloop)
LOCAL_LABEL(donestack):
// now we must push each field of the ArgumentRegister structure
mov eax, [ebx + CallDescrData__pArgumentRegisters]
mov edx, DWORD PTR [eax]
mov ecx, DWORD PTR [eax + 4]
CHECK_STACK_ALIGNMENT
call [ebx + CallDescrData__pTarget]
#ifdef _DEBUG
nop // This is a tag that we use in an assert. Fcalls expect to
// be called from Jitted code or from certain blessed call sites like
// this one. (See HelperMethodFrame::InsureInit)
#endif
// Save FP return value if necessary
mov ecx, [ebx + CallDescrData__fpReturnSize]
cmp ecx, 0
je LOCAL_LABEL(ReturnsInt)
cmp ecx, 4
je LOCAL_LABEL(ReturnsFloat)
cmp ecx, 8
je LOCAL_LABEL(ReturnsDouble)
// unexpected
jmp LOCAL_LABEL(Epilog)
LOCAL_LABEL(ReturnsInt):
mov [ebx + CallDescrData__returnValue], eax
mov [ebx + CallDescrData__returnValue + 4], edx
LOCAL_LABEL(Epilog):
// restore the stake pointer
lea esp, [ebp - 4]
EPILOG_BEG
EPILOG_POP ebx
EPILOG_END
ret 4
LOCAL_LABEL(ReturnsFloat):
fstp DWORD PTR [ebx + CallDescrData__returnValue] // Spill the Float return value
jmp LOCAL_LABEL(Epilog)
LOCAL_LABEL(ReturnsDouble):
fstp QWORD PTR [ebx + CallDescrData__returnValue] // Spill the Double return value
jmp LOCAL_LABEL(Epilog)
NESTED_END CallDescrWorkerInternal, _TEXT
#ifdef _DEBUG
// int __fastcall HelperMethodFrameRestoreState(HelperMethodFrame*, struct MachState *)
LEAF_ENTRY HelperMethodFrameRestoreState, _TEXT
mov eax, edx // eax = MachState*
#else // _DEBUG
// int __fastcall HelperMethodFrameRestoreState(struct MachState *)
LEAF_ENTRY HelperMethodFrameRestoreState, _TEXT
mov eax, ecx // eax = MachState*
#endif // _DEBUG
// restore the registers from the m_MachState stucture. Note that
// we only do this for register that where not saved on the stack
// at the time the machine state snapshot was taken.
cmp dword ptr [eax+MachState__pRetAddr], 0
#ifdef _DEBUG
jnz LOCAL_LABEL(noConfirm)
push ebp
push ebx
push edi
push esi
push ecx // HelperFrame*
call C_FUNC(HelperMethodFrameConfirmState)
// on return, eax = MachState*
cmp DWORD PTR [eax + MachState__pRetAddr], 0
LOCAL_LABEL(noConfirm):
#endif // _DEBUG
jz LOCAL_LABEL(doRet)
lea edx, [eax + MachState__esi] // Did we have to spill ESI
cmp [eax + MachState__pEsi], edx
jnz LOCAL_LABEL(SkipESI)
mov esi, [edx] // Then restore it
LOCAL_LABEL(SkipESI):
lea edx, [eax + MachState__edi] // Did we have to spill EDI
cmp [eax + MachState__pEdi], edx
jnz LOCAL_LABEL(SkipEDI)
mov edi, [edx] // Then restore it
LOCAL_LABEL(SkipEDI):
lea edx, [eax + MachState__ebx] // Did we have to spill EBX
cmp [eax + MachState__pEbx], edx
jnz LOCAL_LABEL(SkipEBX)
mov ebx, [edx] // Then restore it
LOCAL_LABEL(SkipEBX):
lea edx, [eax + MachState__ebp] // Did we have to spill EBP
cmp [eax + MachState__pEbp], edx
jnz LOCAL_LABEL(SkipEBP)
mov ebp, [edx] // Then restore it
LOCAL_LABEL(SkipEBP):
LOCAL_LABEL(doRet):
xor eax, eax
ret
LEAF_END HelperMethodFrameRestoreState, _TEXT
#ifdef FEATURE_HIJACK
// A JITted method's return address was hijacked to return to us here.
// VOID OnHijackTripThread()
NESTED_ENTRY OnHijackTripThread, _TEXT, NoHandler
// Don't fiddle with this unless you change HijackFrame::UpdateRegDisplay
// and HijackArgs
push eax // make room for the real return address (Eip)
push ebp
push eax
push ecx
push edx
push ebx
push esi
push edi
// unused space for floating point state
sub esp,12
push esp
call C_FUNC(OnHijackWorker)
// unused space for floating point state
add esp,12
pop edi
pop esi
pop ebx
pop edx
pop ecx
pop eax
pop ebp
ret // return to the correct place, adjusted by our caller
NESTED_END OnHijackTripThread, _TEXT
// VOID OnHijackFPTripThread()
NESTED_ENTRY OnHijackFPTripThread, _TEXT, NoHandler
// Don't fiddle with this unless you change HijackFrame::UpdateRegDisplay
// and HijackArgs
push eax // make room for the real return address (Eip)
push ebp
push eax
push ecx
push edx
push ebx
push esi
push edi
sub esp,12
// save top of the floating point stack (there is return value passed in it)
// save full 10 bytes to avoid precision loss
fstp QWORD PTR [esp]
push esp
call C_FUNC(OnHijackWorker)
// restore top of the floating point stack
fld QWORD PTR [esp]
add esp,12
pop edi
pop esi
pop ebx
pop edx
pop ecx
pop eax
pop ebp
ret // return to the correct place, adjusted by our caller
NESTED_END OnHijackFPTripThread, _TEXT
#endif // FEATURE_HIJACK
// ==========================================================================
// This function is reached only via the embedded ImportThunkGlue code inside
// an NDirectMethodDesc. It's purpose is to load the DLL associated with an
// N/Direct method, then backpatch the DLL target into the methoddesc.
//
// Initial state:
//
// Preemptive GC is *enabled*: we are actually in an unmanaged state.
//
//
// [esp+...] - The *unmanaged* parameters to the DLL target.
// [esp+4] - Return address back into the JIT'ted code that made
// the DLL call.
// [esp] - Contains the "return address." Because we got here
// thru a call embedded inside a MD, this "return address"
// gives us an easy to way to find the MD (which was the
// whole purpose of the embedded call manuever.)
//
//
//
// ==========================================================================
LEAF_ENTRY NDirectImportThunk, _TEXT
// Preserve argument registers
push ecx
push edx
// Invoke the function that does the real work.
push eax
call C_FUNC(NDirectImportWorker)
// Restore argument registers
pop edx
pop ecx
// If we got back from NDirectImportWorker, the MD has been successfully
// linked and "eax" contains the DLL target. Proceed to execute the
// original DLL call.
jmp eax // Jump to DLL target
LEAF_END NDirectImportThunk, _TEXT
// ==========================================================================
// The call in fixup precode initally points to this function.
// The pupose of this function is to load the MethodDesc and forward the call the prestub.
LEAF_ENTRY PrecodeFixupThunk, _TEXT
// Pop the return address. It points right after the call instruction in the precode.
pop eax
push esi
push edi
// Inline computation done by FixupPrecode::GetMethodDesc()
movzx esi, BYTE PTR [eax + 2] // m_PrecodeChunkIndex
movzx edi, BYTE PTR [eax + 1] // m_MethodDescChunkIndex
mov eax, DWORD PTR [eax + esi*8 +3]
lea eax, [eax + edi*4]
pop edi
pop esi
jmp C_FUNC(ThePreStub)
LEAF_END PrecodeFixupThunk, _TEXT
// void __stdcall UM2MThunk_WrapperHelper(void *pThunkArgs,
// int argLen,
// void *pAddr,
// UMEntryThunk *pEntryThunk,
// Thread *pThread)
NESTED_ENTRY UM2MThunk_WrapperHelper, _TEXT, NoHandler
push ebx
mov eax, [esp + 20] // pEntryThunk
mov ecx, [esp + 24] // pThread
mov ebx, [esp + 8] // pThunkArgs
call [esp + 16] // pAddr
pop ebx
ret 20
NESTED_END UM2MThunk_WrapperHelper, _TEXT
NESTED_ENTRY UMThunkStubRareDisable, _TEXT, NoHandler
push eax
push ecx
push eax // Push the UMEntryThunk
push ecx // Push thread
call C_FUNC(UMThunkStubRareDisableWorker)
pop ecx
pop eax
ret
NESTED_END UMThunkStubRareDisable, _TEXT
//
// Used to get the current instruction pointer value
//
// UINT_PTR __stdcall GetCurrentIP(void);
LEAF_ENTRY GetCurrentIP, _TEXT
mov eax, [esp]
ret
LEAF_END GetCurrentIP, _TEXT
// LPVOID __stdcall GetCurrentSP(void);
LEAF_ENTRY GetCurrentSP, _TEXT
mov eax, esp
ret
LEAF_END GetCurrentSP, _TEXT
// ==========================================================================
// Invoked for vararg forward P/Invoke calls as a stub.
// Except for secret return buffer, arguments come on the stack so EDX is available as scratch.
// EAX - the NDirectMethodDesc
// ECX - may be return buffer address
// [ESP + 4] - the VASigCookie
//
NESTED_ENTRY VarargPInvokeStub, _TEXT, NoHandler
// EDX <- VASigCookie
mov edx, [esp + 4] // skip retaddr
mov edx, [edx + VASigCookie__StubOffset]
test edx, edx
jz LOCAL_LABEL(GoCallVarargWorker)
// ---------------------------------------
// EAX contains MD ptr for the IL stub
jmp edx
LOCAL_LABEL(GoCallVarargWorker):
//
// MD ptr in EAX, VASigCookie ptr at [esp+4]
//
STUB_PROLOG
mov esi, esp
// save pMD
push eax
push eax // pMD
push dword ptr [esi + 4*7] // pVaSigCookie
push esi // pTransitionBlock
call C_FUNC(VarargPInvokeStubWorker)
// restore pMD
pop eax
STUB_EPILOG
// jump back to the helper - this time it won't come back here as the stub already exists
jmp C_FUNC(VarargPInvokeStub)
NESTED_END VarargPInvokeStub, _TEXT
// ==========================================================================
// Invoked for marshaling-required unmanaged CALLI calls as a stub.
// EAX - the unmanaged target
// ECX, EDX - arguments
// [ESP + 4] - the VASigCookie
//
LEAF_ENTRY GenericPInvokeCalliHelper, _TEXT
// save the target
push eax
// EAX <- VASigCookie
mov eax, [esp + 8] // skip target and retaddr
mov eax, [eax + VASigCookie__StubOffset]
test eax, eax
jz LOCAL_LABEL(GoCallCalliWorker)
// ---------------------------------------
push eax
// stack layout at this point:
//
// | ... |
// | stack arguments | ESP + 16
// +----------------------+
// | VASigCookie* | ESP + 12
// +----------------------+
// | return address | ESP + 8
// +----------------------+
// | CALLI target address | ESP + 4
// +----------------------+
// | stub entry point | ESP + 0
// ------------------------
// remove VASigCookie from the stack
mov eax, [esp + 8]
mov [esp + 12], eax
// move stub entry point below the RA
mov eax, [esp]
mov [esp + 8], eax
// load EAX with the target address
pop eax
pop eax
// stack layout at this point:
//
// | ... |
// | stack arguments | ESP + 8
// +----------------------+
// | return address | ESP + 4
// +----------------------+
// | stub entry point | ESP + 0
// ------------------------
// CALLI target address is in EAX
ret
LOCAL_LABEL(GoCallCalliWorker):
// the target is on the stack and will become m_Datum of PInvokeCalliFrame
// call the stub generating worker
pop eax
//
// target ptr in EAX, VASigCookie ptr in EDX
//
STUB_PROLOG
mov esi, esp
// save target
push eax
push eax // unmanaged target
push dword ptr [esi + 4*7] // pVaSigCookie (first stack argument)
push esi // pTransitionBlock
call C_FUNC(GenericPInvokeCalliStubWorker)
// restore target
pop eax
STUB_EPILOG
// jump back to the helper - this time it won't come back here as the stub already exists
jmp C_FUNC(GenericPInvokeCalliHelper)
LEAF_END GenericPInvokeCalliHelper, _TEXT
#ifdef FEATURE_PREJIT
// =========================================================================
NESTED_ENTRY StubDispatchFixupStub, _TEXT, NoHandler
STUB_PROLOG
mov esi, esp
push 0
push 0
push eax // siteAddrForRegisterIndirect (for tailcalls)
push esi // pTransitionBlock
call C_FUNC(StubDispatchFixupWorker)
STUB_EPILOG
PATCH_LABEL StubDispatchFixupPatchLabel
// Tailcall target
jmp eax
// This will never be executed. It is just to help out stack-walking logic
// which disassembles the epilog to unwind the stack.
ret
NESTED_END StubDispatchFixupStub, _TEXT
// ==========================================================================
NESTED_ENTRY ExternalMethodFixupStub, _TEXT_ NoHandler
// pop off the return address to the stub
// leaving the actual caller's return address on top of the stack
pop eax
STUB_PROLOG
mov esi, esp
// EAX is return address into CORCOMPILE_EXTERNAL_METHOD_THUNK. Subtract 5 to get start address.
sub eax, 5
push 0
push 0
push eax
// pTransitionBlock
push esi
call C_FUNC(ExternalMethodFixupWorker)
// eax now contains replacement stub. PreStubWorker will never return
// NULL (it throws an exception if stub creation fails.)
// From here on, mustn't trash eax
STUB_EPILOG
PATCH_LABEL ExternalMethodFixupPatchLabel
// Tailcall target
jmp eax
// This will never be executed. It is just to help out stack-walking logic
// which disassembles the epilog to unwind the stack.
ret
NESTED_END ExternalMethodFixupStub, _TEXT
#ifdef FEATURE_READYTORUN
// ==========================================================================
NESTED_ENTRY DelayLoad_MethodCall, _TEXT, NoHandler
STUB_PROLOG_2_HIDDEN_ARGS
mov esi, esp
push ecx
push edx
push eax
// pTransitionBlock
push esi
call C_FUNC(ExternalMethodFixupWorker)
// eax now contains replacement stub. PreStubWorker will never return
// NULL (it throws an exception if stub creation fails.)
// From here on, mustn't trash eax
STUB_EPILOG
// Share the patch label
jmp C_FUNC(ExternalMethodFixupPatchLabel)
// This will never be executed. It is just to help out stack-walking logic
// which disassembles the epilog to unwind the stack.
ret
NESTED_END DelayLoad_MethodCall, _TEXT
#endif // FEATURE_READYTORUN
// =======================================================================================
// The call in softbound vtable slots initially points to this function.
// The pupose of this function is to transfer the control to right target and
// to optionally patch the target of the jump so that we do not take this slow path again.
//
NESTED_ENTRY VirtualMethodFixupStub, _TEXT, NoHandler
// Pop the return address. It points right after the call instruction in the thunk.
pop eax
// Calculate the address of the thunk
sub eax, 5
// Push ebp frame to get good callstack under debugger
PROLOG_BEG
// Preserve argument registers
PROLOG_PUSH ecx
PROLOG_PUSH edx
// Set frame pointer
PROLOG_END
push eax // address of the thunk
push ecx // this ptr
call C_FUNC(VirtualMethodFixupWorker)
// Restore stack pointer
EPILOG_BEG
// Restore argument registers
EPILOG_POP edx
EPILOG_POP ecx
// Pop ebp frame
EPILOG_END
PATCH_LABEL VirtualMethodFixupPatchLabel
// Proceed to execute the actual method.
jmp eax
// This will never be executed. It is just to help out stack-walking logic
// which disassembles the epilog to unwind the stack.
ret
NESTED_END VirtualMethodFixupStub, _TEXT
#endif // FEATURE_PREJIT
NESTED_ENTRY ThePreStub, _TEXT, NoHandler
STUB_PROLOG
mov esi, esp
// Compute padding size
lea ebx, [esp - 8]
and ebx, 15
// Adjust stack offset
sub esp, ebx
// EAX contains MethodDesc* from the precode. Push it here as argument
// for PreStubWorker
push eax
push esi
CHECK_STACK_ALIGNMENT
call C_FUNC(PreStubWorker)
// eax now contains replacement stub. PreStubWorker will never return
// NULL (it throws an exception if stub creation fails.)
// From here on, mustn't trash eax
// Restore stack pointer
mov esp, esi
STUB_EPILOG
// Tailcall target
jmp eax
// This will never be executed. It is just to help out stack-walking logic
// which disassembles the epilog to unwind the stack.
ret
NESTED_END ThePreStub, _TEXT
// This method does nothing. It's just a fixed function for the debugger to put a breakpoint
// on so that it can trace a call target.
LEAF_ENTRY ThePreStubPatch, _TEXT
// make sure that the basic block is unique
test eax,34
PATCH_LABEL ThePreStubPatchLabel
ret
LEAF_END ThePreStubPatch, _TEXT
#ifdef FEATURE_READYTORUN
// ==========================================================================
// Define helpers for delay loading of readytorun helpers
.macro DYNAMICHELPER frameFlags, suffix
NESTED_ENTRY DelayLoad_Helper\suffix, _TEXT, NoHandler
STUB_PROLOG_2_HIDDEN_ARGS
mov esi, esp
push \frameFlags
push ecx // module
push edx // section index
push eax // indirection cell address.
push esi // pTransitionBlock
call C_FUNC(DynamicHelperWorker)
test eax,eax
jnz LOCAL_LABEL(TailCallDelayLoad_Helper\suffix)
mov eax, [esi] // The result is stored in the argument area of the transition block
STUB_EPILOG_RETURN
ret
LOCAL_LABEL(TailCallDelayLoad_Helper\suffix):
STUB_EPILOG
jmp eax
NESTED_END DelayLoad_Helper\suffix, _TEXT
.endm
DYNAMICHELPER DynamicHelperFrameFlags_Default
DYNAMICHELPER DynamicHelperFrameFlags_ObjectArg, _Obj
DYNAMICHELPER (DynamicHelperFrameFlags_ObjectArg | DynamicHelperFrameFlags_ObjectArg2), _ObjObj
#endif // FEATURE_READYTORUN
NESTED_ENTRY ResolveWorkerAsmStub, _TEXT, NoHandler
//
// The stub arguments are where we want to setup the TransitionBlock. We will
// setup the TransitionBlock later once we can trash them
//
// push ebp-frame
// push ebp
// mov ebp,esp
// save CalleeSavedRegisters
// push ebx
push esi
push edi
// push ArgumentRegisters
push ecx
push edx
mov esi, esp
push [esi + 4*4] // dispatch token
push [esi + 5*4] // siteAddrForRegisterIndirect
push esi // pTransitionBlock
// Setup up proper EBP frame now that the stub arguments can be trashed
mov [esi + 4*4],ebx
mov [esi + 5*4],ebp
lea ebp, [esi + 5*4]
// Make the call
call C_FUNC(VSD_ResolveWorker)
// From here on, mustn't trash eax
// pop ArgumentRegisters
pop edx
pop ecx
// pop CalleeSavedRegisters
pop edi
pop esi
pop ebx
pop ebp
// Now jump to the target
jmp eax // continue on into the method
NESTED_END ResolveWorkerAsmStub, _TEXT
#ifdef FEATURE_STUBS_AS_IL
// ==========================================================================
// void SinglecastDelegateInvokeStub();
//
LEAF_ENTRY SinglecastDelegateInvokeStub, _TEXT
test ecx, ecx
jz LOCAL_LABEL(NullObject)
mov eax, [ecx + DelegateObject___methodPtr]
mov ecx, [ecx + DelegateObject___target] // replace "this" pointer
jmp eax
LOCAL_LABEL(NullObject):
mov ecx, CORINFO_NullReferenceException_ASM
jmp C_FUNC(JIT_InternalThrow)
LEAF_END SinglecastDelegateInvokeStub, _TEXT
#endif // FEATURE_STUBS_AS_IL
#ifndef CROSSGEN_COMPILE
// =======================================================================================
// void ResolveWorkerChainLookupAsmStub();
//
// This will perform a chained lookup of the entry if the initial cache lookup fails
//
// Entry stack:
// dispatch token
// siteAddrForRegisterIndirect (used only if this is a RegisterIndirect dispatch call)
// return address of caller to stub
// Also, EAX contains the pointer to the first ResolveCacheElem pointer for the calculated
// bucket in the cache table.
//
NESTED_ENTRY ResolveWorkerChainLookupAsmStub, _TEXT, NoHandler
#define CALL_STUB_CACHE_INITIAL_SUCCESS_COUNT 0x100
// this is the part of the stack that is present as we enter this function:
#define ChainLookup__token 0x00
#define ChainLookup__indirect_addr 0x04
#define ChainLookup__caller_ret_addr 0x08
#define ChainLookup__ret_esp 0x0c
#define ChainLookup_spilled_reg_size 8
// spill regs
push edx
push ecx
// move the token into edx
mov edx, [esp + ChainLookup_spilled_reg_size + ChainLookup__token]
// move the MT into ecx
mov ecx, [ecx]
LOCAL_LABEL(main_loop):
// get the next entry in the chain (don't bother checking the first entry again)
mov eax, [eax + ResolveCacheElem__pNext]
// test if we hit a terminating NULL
test eax, eax
jz LOCAL_LABEL(fail)
// compare the MT of the ResolveCacheElem
cmp ecx, [eax + ResolveCacheElem__pMT]
jne LOCAL_LABEL(main_loop)
// compare the token of the ResolveCacheElem
cmp edx, [eax + ResolveCacheElem__token]
jne LOCAL_LABEL(main_loop)
// success
// decrement success counter and move entry to start if necessary
PREPARE_EXTERNAL_VAR g_dispatch_cache_chain_success_counter, edx
mov ecx, dword ptr [edx]
sub ecx, 1
mov dword ptr [edx], ecx
//@TODO: Perhaps this should be a jl for better branch prediction?
jge LOCAL_LABEL(nopromote)
// be quick to reset the counter so we don't get a bunch of contending threads
mov dword ptr [edx], CALL_STUB_CACHE_INITIAL_SUCCESS_COUNT
// promote the entry to the beginning of the chain
mov ecx, eax
// call C_FUNC(VirtualCallStubManager::PromoteChainEntry)
call C_FUNC(_ZN22VirtualCallStubManager17PromoteChainEntryEP16ResolveCacheElem)
LOCAL_LABEL(nopromote):
pop ecx
pop edx
add esp, (ChainLookup__caller_ret_addr - ChainLookup__token)
mov eax, [eax + ResolveCacheElem__target]
jmp eax
LOCAL_LABEL(fail):
// restore registers
pop ecx
pop edx
jmp ResolveWorkerAsmStub
NESTED_END ResolveWorkerChainLookupAsmStub, _TEXT
#endif // CROSSGEN_COMPILE
|