summaryrefslogtreecommitdiff
path: root/src/inc/genericstackprobe.h
blob: cac6f457b81ed4627ab3f3ce11a230786521f50d (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
//

//
//-----------------------------------------------------------------------------
// Generic Stack Probe Code
// Used to setup stack guards and probes outside the VM tree
//-----------------------------------------------------------------------------

#ifndef __GENERICSTACKPROBE_h__
#define __GENERICSTACKPROBE_h__

#include "staticcontract.h"
#include "predeftlsslot.h"

#if defined(DISABLE_CONTRACTS)
#undef FEATURE_STACK_PROBE
#endif

#if defined(FEATURE_STACK_PROBE)
#ifdef _DEBUG
#define STACK_GUARDS_DEBUG
#else
#define STACK_GUARDS_RELEASE
#endif
#endif

#ifdef FEATURE_STACK_PROBE
#define SO_INFRASTRUCTURE_CODE(x) x
#define NO_SO_INFRASTRUCTURE_CODE_ASSERTE(x)
#else
#define SO_INFRASTRUCTURE_CODE(x)
#define NO_SO_INFRASTRUCTURE_CODE_ASSERTE(x) _ASSERTE(x);
#endif

/* This macro is redefined in stackprobe.h
 * so that code expanded using this macro is present only for files
 * within VM directory. See StackProbe.h for more details
 */
#define VM_NO_SO_INFRASTRUCTURE_CODE(x)

// The types of stack validation we support in holders.
enum HolderStackValidation
{
    HSV_NoValidation,
    HSV_ValidateMinimumStackReq,
    HSV_ValidateNormalStackReq,        
};

// Used to track transitions into the profiler
#define REMOVE_STACK_GUARD_FOR_PROFILER_CALL \
        REMOVE_STACK_GUARD

// For AMD64, the stack size is 4K, same as X86, but the pointer size is 64, so the
// stack tends to grow a lot faster than X86.
#ifdef _TARGET_AMD64_
#define ADJUST_PROBE(n)  (2 * (n))
#else 
#define ADJUST_PROBE(n)  (n)
#endif

#if defined(FEATURE_STACK_PROBE)

#ifdef STACK_GUARDS_DEBUG // DAC and non-DAC - all data structures referenced in DAC'ized code
                          // must be included so we can calculate layout. SO probes are not
                          // active in the DAC but the SO probe structures contribute to layout
                          

// This class is used to place a marker upstack and verify that it was not overrun.  It is
// different from the full blown stack probes in that it does not chain with other probes or
// test for stack overflow.  Its sole purpose is to verify stack consumption.
// It is effectively an implicit probe though, because we are guaranteeing that we have
// enought stack to run and will not take an SO.  So we enter SO-intolerant code when
// we install one of these.

class StackMarkerStack;
struct ClrDebugState;

class BaseStackMarker
{
    friend StackMarkerStack;

    ClrDebugState  *m_pDebugState;  
    BOOL            m_prevWasSOTolerant;   // Were we SO-tolerant when we came in? 
    BOOL            m_fMarkerSet;          // Has the marker been set?
    BOOL            m_fTemporarilyDisabled;// Has the marker been temporarely disabled?
    BOOL            m_fAddedToStack;       // Has this BaseStackMarker been added to the stack of markers for the thread.
    float           m_numPages;
    UINT_PTR       *m_pMarker;    // Pointer to where to put our marker cookie on the stack.
    BaseStackMarker*m_pPrevious;
    BOOL            m_fProtectedStackPage;
    BOOL            m_fAllowDisabling;

    BaseStackMarker() {};   // no default construction allowed

    // These should only be called by the ClrDebugState.
    void RareDisableMarker();
    void RareReEnableMarker();        

  public:
    BaseStackMarker(float numPages, BOOL fAllowDisabling); 

    // we have this so that the check of the global can be inlined
    // and we don't make the call to CheckMarker unless we need to.
    void CheckForBackoutViolation();

    void SetMarker(float numPages);
    void CheckMarker();
    
    void ProtectMarkerPageInDebugger();
    void UndoPageProtectionInDebugger();
    
};

class StackMarkerStack
{
public:
    // Since this is used from the ClrDebugState which can't have a default constructor,
    // we need to provide an Init method to intialize the instance instead of having a constructor.
    void Init() 
    {
        m_pTopStackMarker = NULL;
        m_fDisabled = FALSE;
    }
            
    void PushStackMarker(BaseStackMarker *pStackMarker);
    BaseStackMarker *PopStackMarker();
    
    BOOL IsEmpty()
    {
        return (m_pTopStackMarker == NULL);
    } 
    BOOL IsDisabled()
    {
        return m_fDisabled;
    }

    void RareDisableStackMarkers();
    void RareReEnableStackMarkers();

private:
    BaseStackMarker     *m_pTopStackMarker;     // The top of the stack of stack markers for the current thread.
    BOOL                m_fDisabled;
};

#endif // STACK_GUARDS_DEBUG

#if !defined(DACCESS_COMPILE)

// In debug builds, we redefine DEFAULT_ENTRY_PROBE_AMOUNT to a global static
// so that we can tune the entry point probe size at runtime.
#define DEFAULT_ENTRY_PROBE_SIZE 12
#define DEFAULT_ENTRY_PROBE_AMOUNT DEFAULT_ENTRY_PROBE_SIZE

#define BACKOUT_CODE_STACK_LIMIT 4.0
#define HOLDER_CODE_NORMAL_STACK_LIMIT BACKOUT_CODE_STACK_LIMIT
#define HOLDER_CODE_MINIMUM_STACK_LIMIT 0.25

void DontCallDirectlyForceStackOverflow();
void SOBackoutViolation(const char *szFunction, const char *szFile, int lineNum); 
typedef void *EEThreadHandle;
class SOIntolerantTransitionHandler;
extern bool g_StackProbingEnabled;
extern void (*g_fpCheckForSOInSOIntolerantCode)();
extern void (*g_fpSetSOIntolerantTransitionMarker)();
extern BOOL (*g_fpDoProbe)(unsigned int n);
extern void (*g_fpHandleSoftStackOverflow)(BOOL fSkipDebugger);

// Once we enter SO-intolerant code, we can never take a hard SO as we will be 
// in an unknown state. SOIntolerantTransitionHandler is used to detect a hard SO in SO-intolerant
// code and to raise a Fatal Error if one occurs.
class SOIntolerantTransitionHandler
{
private:
    bool   m_exceptionOccured;
    void * m_pPreviousHandler;
    
public:
    FORCEINLINE SOIntolerantTransitionHandler() 
    {
        if (g_StackProbingEnabled)
        {
            CtorImpl();
        }
    }

    FORCEINLINE ~SOIntolerantTransitionHandler()
    {
        if (g_StackProbingEnabled)
        {
            DtorImpl();
        }
    }

    NOINLINE void CtorImpl();
    NOINLINE void DtorImpl();

    void SetNoException()
    {
        m_exceptionOccured = false;
    }

    bool DidExceptionOccur()
    {
        return m_exceptionOccured;
    }
};


extern void (*g_fpHandleStackOverflowAfterCatch)();
void HandleStackOverflowAfterCatch();

#if defined(STACK_GUARDS_DEBUG)

#ifdef _WIN64
#define STACK_COOKIE_VALUE 0x0123456789ABCDEF
#define DISABLED_STACK_COOKIE_VALUE 0xDCDCDCDCDCDCDCDC
#else
#define STACK_COOKIE_VALUE 0x01234567
#define DISABLED_STACK_COOKIE_VALUE 0xDCDCDCDC
#endif

// This allows us to adjust the probe amount at run-time in checked builds
#undef DEFAULT_ENTRY_PROBE_AMOUNT
#define DEFAULT_ENTRY_PROBE_AMOUNT g_EntryPointProbeAmount

class BaseStackGuardGeneric;
class BaseStackGuard;

extern void (*g_fpRestoreCurrentStackGuard)(BOOL fDisabled);
extern BOOL (*g_fp_BaseStackGuard_RequiresNStackPages)(BaseStackGuardGeneric *pGuard, unsigned int n, BOOL fThrowOnSO);
extern void (*g_fp_BaseStackGuard_CheckStack)(BaseStackGuardGeneric *pGuard);
extern BOOL (*g_fpCheckNStackPagesAvailable)(unsigned int n);
extern BOOL  g_ProtectStackPagesInDebugger;
void RestoreSOToleranceState();
void EnsureSOTolerant();

extern BOOL g_EnableBackoutStackValidation;
extern DWORD g_EntryPointProbeAmount;

//-----------------------------------------------------------------------------
// Check if a cookie is still at the given marker
//-----------------------------------------------------------------------------
inline  BOOL IsMarkerOverrun(UINT_PTR *pMarker)
{
    return (*pMarker != STACK_COOKIE_VALUE);
}

class AutoCleanupStackMarker : public BaseStackMarker
{
public:
    DEBUG_NOINLINE AutoCleanupStackMarker(float numPages) : 
        BaseStackMarker(numPages, TRUE)
    {
        SCAN_SCOPE_BEGIN;
        ANNOTATION_FN_SO_INTOLERANT;
    }

    DEBUG_NOINLINE ~AutoCleanupStackMarker()
    {
        SCAN_SCOPE_END;
        CheckForBackoutViolation();
    }
};

#define VALIDATE_BACKOUT_STACK_CONSUMPTION \
    AutoCleanupStackMarker __stackMarker(ADJUST_PROBE(BACKOUT_CODE_STACK_LIMIT));

#define VALIDATE_BACKOUT_STACK_CONSUMPTION_FOR(numPages) \
    AutoCleanupStackMarker __stackMarker(ADJUST_PROBE(numPages));

#define UNSAFE_BEGIN_VALIDATE_BACKOUT_STACK_CONSUMPTION_NO_DISABLE \
    BaseStackMarker __stackMarkerNoDisable(ADJUST_PROBE(BACKOUT_CODE_STACK_LIMIT), FALSE);

#define UNSAFE_BEGIN_VALIDATE_BACKOUT_STACK_CONSUMPTION_NO_DISABLE_FOR(numPages) \
    BaseStackMarker __stackMarkerNoDisable(ADJUST_PROBE(numPages), FALSE);

#define UNSAFE_END_VALIDATE_BACKOUT_STACK_CONSUMPTION_NO_DISABLE \
        __stackMarkerNoDisable.CheckForBackoutViolation(); 

#define VALIDATE_HOLDER_STACK_CONSUMPTION_FOR_TYPE(validationType) \
    _ASSERTE(validationType != HSV_NoValidation);                  \
    AutoCleanupStackMarker __stackMarker(                          \
        ADJUST_PROBE(validationType == HSV_ValidateNormalStackReq ? HOLDER_CODE_NORMAL_STACK_LIMIT : HOLDER_CODE_MINIMUM_STACK_LIMIT));

class AutoCleanupDisableBackoutStackValidation
{
  public:
    AutoCleanupDisableBackoutStackValidation();
    ~AutoCleanupDisableBackoutStackValidation();
    
private:
    BOOL m_fAlreadyDisabled;

};

// This macros disables the backout stack validation in the current scope. It should 
// only be used in very rare situations. If you think you might have such a situation, 
// please talk to the stack overflow devs before using it.
#define DISABLE_BACKOUT_STACK_VALIDATION \
    AutoCleanupDisableBackoutStackValidation __disableBacoutStackValidation;

// In debug mode, we want to do a little more work on this transition to note the transition in the thread.
class DebugSOIntolerantTransitionHandler : public SOIntolerantTransitionHandler
{
    BOOL m_prevSOTolerantState;
    ClrDebugState* m_clrDebugState;

  public: 
    DebugSOIntolerantTransitionHandler(); 
    ~DebugSOIntolerantTransitionHandler();
};

// This is the base class structure for our probe infrastructure.  We declare it here
// so that we can properly declare instances outside of the VM tree.  But we only do the
// probes when we have a managed thread.
class BaseStackGuardGeneric
{
public:
    enum
    {
        cPartialInit,       // Not yet intialized
        cInit,              // Initialized and installed
        cUnwound,           // Unwound on a normal path (used for debugging)
        cEHUnwound          // Unwound on an exception path (used for debugging)
    } m_eInitialized;
        
    // *** Following fields must not move.  The fault injection framework depends on them.
    BaseStackGuard *m_pPrevGuard; // Previous guard for this thread.
    UINT_PTR       *m_pMarker;    // Pointer to where to put our marker cookie on the stack.
    unsigned int    m_numPages;        // space needed, specified in number of pages
    BOOL            m_isBoundaryGuard;  // used to mark when we've left the EE
    BOOL            m_fDisabled;       // Used to enable/disable stack guard


    // *** End of fault injection-dependent fields

    // The following fields are really here to provide us with some nice debugging information.
    const char     *m_szFunction;
    const char     *m_szFile;
    unsigned int    m_lineNum;
    const char     *m_szNextFunction;       // Name of the probe that came after us.
    const char     *m_szNextFile;
    unsigned int    m_nextLineNum;
    DWORD           m_UniqueId;
    unsigned int    m_depth;                // How deep is this guard in the list of guards for this thread?
    BOOL            m_fProtectedStackPage;  // TRUE if we protected a stack page with PAGE_NOACCESS.
    BOOL            m_fEHInProgress;        // Is an EH in progress?  This is cleared on a catch.
    BOOL            m_exceptionOccured;     // Did an exception occur through this probe?

protected:
    BaseStackGuardGeneric()
    {
    }

public:
    BaseStackGuardGeneric(const char *szFunction, const char *szFile, unsigned int lineNum) :
        m_pPrevGuard(NULL), m_pMarker(NULL), 
        m_szFunction(szFunction), m_szFile(szFile), m_lineNum(lineNum),
        m_szNextFunction(NULL), m_szNextFile(NULL), m_nextLineNum(0),
        m_fProtectedStackPage(FALSE), m_UniqueId(-1), m_numPages(0), 
        m_eInitialized(cPartialInit), m_fDisabled(FALSE),
        m_isBoundaryGuard(FALSE),
        m_fEHInProgress(FALSE),     
        m_exceptionOccured(FALSE)
    { 
        STATIC_CONTRACT_LEAF;
    }

    BOOL RequiresNStackPages(unsigned int n, BOOL fThrowOnSO = TRUE)
    {
        if (g_fp_BaseStackGuard_RequiresNStackPages == NULL)
        {
            return TRUE;
        }
        return g_fp_BaseStackGuard_RequiresNStackPages(this, n, fThrowOnSO);
    }

    BOOL RequiresNStackPagesThrowing(unsigned int n)
    {
        if (g_fp_BaseStackGuard_RequiresNStackPages == NULL)
        {
            return TRUE;
        }
        return g_fp_BaseStackGuard_RequiresNStackPages(this, n, TRUE);
    }

    BOOL RequiresNStackPagesNoThrow(unsigned int n)
    {
        if (g_fp_BaseStackGuard_RequiresNStackPages == NULL)
        {
            return TRUE;
        }
        return g_fp_BaseStackGuard_RequiresNStackPages(this, n, FALSE);
    }

    void CheckStack()
    {
        if (m_eInitialized == cInit)
        {
            g_fp_BaseStackGuard_CheckStack(this);
        }
    }

    void SetNoException()
    {
        m_exceptionOccured = FALSE;
    }

    BOOL DidExceptionOccur()
    {
        return m_exceptionOccured;
    }

    BOOL Enabled()
    {
        return !m_fDisabled;
    }

    void DisableGuard()
    {
        // As long as we don't have threads mucking with other thread's stack
        // guards, we don't need to synchronize this.
        m_fDisabled = TRUE;
    }

    void EnableGuard()
    {
        // As long as we don't have threads mucking with other thread's stack
        // guards, we don't need to synchronize this.
        m_fDisabled = FALSE;
    }

    
};

class StackGuardDisabler
{
    BOOL m_fDisabledGuard;

public:
    StackGuardDisabler();
    ~StackGuardDisabler();
    void NeverRestoreGuard();

};



// Derived version, add a dtor that automatically calls Check_Stack, move convenient, but can't use with SEH.
class AutoCleanupStackGuardGeneric : public BaseStackGuardGeneric
{
protected:
    AutoCleanupStackGuardGeneric()
    {
    }
    
public:
    AutoCleanupStackGuardGeneric(const char *szFunction, const char *szFile, unsigned int lineNum) :
        BaseStackGuardGeneric(szFunction, szFile, lineNum)
    { 
        STATIC_CONTRACT_LEAF;
    }

    ~AutoCleanupStackGuardGeneric()
    { 
        STATIC_CONTRACT_WRAPPER;
        CheckStack(); 
    }
};


// Used to remove stack guard... (kind of like a poor man's BEGIN_SO_TOLERANT
#define REMOVE_STACK_GUARD \
        StackGuardDisabler __guardDisable;

// Used to transition into intolerant code when handling a SO
#define BEGIN_SO_INTOLERANT_CODE_NOPROBE                                                  \
    {                                                                                     \
        DebugSOIntolerantTransitionHandler __soIntolerantTransitionHandler;               \
        /* work around unreachable code warning */                                        \
        if (true)                                                                         \
        {                                                                                 \
            DEBUG_ASSURE_NO_RETURN_BEGIN(SO_INTOLERANT)

#define END_SO_INTOLERANT_CODE_NOPROBE                              \
            ;                                                       \
            DEBUG_ASSURE_NO_RETURN_END(SO_INTOLERANT)               \
        }                                                           \
        __soIntolerantTransitionHandler.SetNoException();           \
    }                                                               \
            


#define BEGIN_SO_INTOLERANT_CODE_NO_THROW_CHECK_THREAD(ActionOnSO)                        \
    {                                                                                       \
        AutoCleanupStackGuardGeneric stack_guard_XXX(__FUNCTION__, __FILE__, __LINE__);         \
        if (! stack_guard_XXX.RequiresNStackPagesNoThrow(ADJUST_PROBE(g_EntryPointProbeAmount))) \
        {                                                                                   \
            ActionOnSO;                                                                     \
        }                                                                                   \
        else                                                                                \
        {                                                                                   \
            DebugSOIntolerantTransitionHandler __soIntolerantTransitionHandler;             \
            ANNOTATION_SO_PROBE_BEGIN(DEFAULT_ENTRY_PROBE_AMOUNT);                          \
            if (true)                                                                       \
            {                                                                               \
                DEBUG_ASSURE_NO_RETURN_BEGIN(SO_INTOLERANT)


#define END_SO_INTOLERANT_CODE                                                              \
                ;                                                                           \
                DEBUG_ASSURE_NO_RETURN_END(SO_INTOLERANT)                                   \
            }                                                                               \
            ANNOTATION_SO_PROBE_END;                                                        \
            __soIntolerantTransitionHandler.SetNoException();                               \
            stack_guard_XXX.SetNoException();                                               \
        }                                                                                   \
    }                                                                                       \


#define BEGIN_SO_INTOLERANT_CODE_NO_THROW_CHECK_THREAD_FORCE_SO()                           \
    EnsureSOTolerant();                                                                     \
    BEGIN_SO_INTOLERANT_CODE_NO_THROW_CHECK_THREAD(DontCallDirectlyForceStackOverflow());   \
    

// Restores the SO-tolerance state and the marker for the current guard if any
#define RESTORE_SO_TOLERANCE_STATE \
    RestoreSOToleranceState();

#define HANDLE_STACKOVERFLOW_AFTER_CATCH \
    HandleStackOverflowAfterCatch()

#elif defined(STACK_GUARDS_RELEASE)

#define VALIDATE_BACKOUT_STACK_CONSUMPTION
#define VALIDATE_BACKOUT_STACK_CONSUMPTION_FOR
#define UNSAFE_BEGIN_VALIDATE_BACKOUT_STACK_CONSUMPTION_NO_DISABLE
#define UNSAFE_BEGIN_VALIDATE_BACKOUT_STACK_CONSUMPTION_NO_DISABLE_FOR(numPages)
#define UNSAFE_END_VALIDATE_BACKOUT_STACK_CONSUMPTION_NO_DISABLE
#define VALIDATE_HOLDER_STACK_CONSUMPTION_FOR_TYPE(validationType)
#define RESTORE_SO_TOLERANCE_STATE
#define HANDLE_STACKOVERFLOW_AFTER_CATCH \
    HandleStackOverflowAfterCatch()
#define DISABLE_BACKOUT_STACK_VALIDATION
#define BACKOUT_STACK_VALIDATION_VIOLATION
#define BEGIN_SO_INTOLERANT_CODE_NOPROBE                                                  
#define END_SO_INTOLERANT_CODE_NOPROBE                                                  
#define REMOVE_STACK_GUARD

#define BEGIN_SO_INTOLERANT_CODE_NO_THROW_CHECK_THREAD(ActionOnSO)                          \
    {                                                                                       \
        if (g_StackProbingEnabled && !g_fpDoProbe(ADJUST_PROBE(DEFAULT_ENTRY_PROBE_AMOUNT)))\
        {                                                                                   \
            ActionOnSO;                                                                     \
        } else {                                                                            \
            SOIntolerantTransitionHandler __soIntolerantTransitionHandler;                  \
            /* work around unreachable code warning */                                      \
            if (true)                                                                       \
            {                                                                               \
                DEBUG_ASSURE_NO_RETURN_BEGIN(SO_INTOLERANT)

#define BEGIN_SO_INTOLERANT_CODE_NO_THROW_CHECK_THREAD_FORCE_SO()                           \
    BEGIN_SO_INTOLERANT_CODE_NO_THROW_CHECK_THREAD(DontCallDirectlyForceStackOverflow());   \

#define END_SO_INTOLERANT_CODE                                                              \
                ;                                                                           \
                DEBUG_ASSURE_NO_RETURN_END(SO_INTOLERANT)                                   \
            }                                                                               \
            __soIntolerantTransitionHandler.SetNoException();                               \
        }                                                                                   \
    }

#endif

#endif // !DACCESS_COMPILE
#endif // FEATURE_STACK_PROBES

// if the feature is off or we are compiling for DAC, disable all the probes
#if !defined(FEATURE_STACK_PROBE) || defined(DACCESS_COMPILE)

#define VALIDATE_BACKOUT_STACK_CONSUMPTION
#define VALIDATE_BACKOUT_STACK_CONSUMPTION_FOR
#define UNSAFE_BEGIN_VALIDATE_BACKOUT_STACK_CONSUMPTION_NO_DISABLE
#define UNSAFE_BEGIN_VALIDATE_BACKOUT_STACK_CONSUMPTION_NO_DISABLE_FOR(numPages)
#define UNSAFE_END_VALIDATE_BACKOUT_STACK_CONSUMPTION_NO_DISABLE
#define VALIDATE_HOLDER_STACK_CONSUMPTION_FOR_TYPE(validationType)
#define BEGIN_SO_INTOLERANT_CODE_NO_THROW_CHECK_THREAD(ActionOnSO)
#define BEGIN_SO_INTOLERANT_CODE_NO_THROW_CHECK_THREAD_FORCE_SO()
#define END_SO_INTOLERANT_CODE
#define RESTORE_SO_TOLERANCE_STATE

#define HANDLE_STACKOVERFLOW_AFTER_CATCH

#define DISABLE_BACKOUT_STACK_VALIDATION
#define BACKOUT_STACK_VALIDATION_VIOLATION
#define BEGIN_SO_INTOLERANT_CODE_NOPROBE
#define END_SO_INTOLERANT_CODE_NOPROBE
#define REMOVE_STACK_GUARD

// Probe size is 0 as Stack Overflow probing is not enabled
#define DEFAULT_ENTRY_PROBE_AMOUNT 0

#define BACKOUT_CODE_STACK_LIMIT 0

#endif //!FEATURE_STACK_PROBE || DACCESS_COMPILE

#endif  // __GENERICSTACKPROBE_h__