summaryrefslogtreecommitdiff
path: root/src/jit/importer.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/jit/importer.cpp')
-rw-r--r--src/jit/importer.cpp32
1 files changed, 13 insertions, 19 deletions
diff --git a/src/jit/importer.cpp b/src/jit/importer.cpp
index 22bc79e7d6..30812737ad 100644
--- a/src/jit/importer.cpp
+++ b/src/jit/importer.cpp
@@ -3533,7 +3533,7 @@ GenTree* Compiler::impIntrinsic(GenTree* newobjThis,
GenTree* retNode = nullptr;
// Under debug and minopts, only expand what is required.
- if (!mustExpand && (opts.compDbgCode || opts.MinOpts()))
+ if (!mustExpand && opts.OptimizationDisabled())
{
*pIntrinsicID = CORINFO_INTRINSIC_Illegal;
return retNode;
@@ -3654,7 +3654,7 @@ GenTree* Compiler::impIntrinsic(GenTree* newobjThis,
case CORINFO_INTRINSIC_StringLength:
op1 = impPopStack().val;
- if (!opts.MinOpts() && !opts.compDbgCode)
+ if (opts.OptimizationEnabled())
{
GenTreeArrLen* arrLen = gtNewArrLen(TYP_INT, op1, OFFSETOF__CORINFO_String__stringLen);
op1 = arrLen;
@@ -6263,7 +6263,7 @@ void Compiler::impImportAndPushBox(CORINFO_RESOLVED_TOKEN* pResolvedToken)
// structs is cheap.
JITDUMP("\nCompiler::impImportAndPushBox -- handling BOX(value class) via");
bool canExpandInline = (boxHelper == CORINFO_HELP_BOX);
- bool optForSize = !exprToBox->IsCall() && (operCls != nullptr) && (opts.compDbgCode || opts.MinOpts());
+ bool optForSize = !exprToBox->IsCall() && (operCls != nullptr) && opts.OptimizationDisabled();
bool expandInline = canExpandInline && !optForSize;
if (expandInline)
@@ -6281,7 +6281,7 @@ void Compiler::impImportAndPushBox(CORINFO_RESOLVED_TOKEN* pResolvedToken)
// and the other you get
// *(temp+4) = expr
- if (opts.MinOpts() || opts.compDbgCode)
+ if (opts.OptimizationDisabled())
{
// For minopts/debug code, try and minimize the total number
// of box temps by reusing an existing temp when possible.
@@ -7508,7 +7508,7 @@ bool Compiler::impIsImplicitTailCallCandidate(
return false;
}
- if (opts.compDbgCode || opts.MinOpts())
+ if (opts.OptimizationDisabled())
{
return false;
}
@@ -10530,7 +10530,7 @@ GenTree* Compiler::impOptimizeCastClassOrIsInst(GenTree* op1, CORINFO_RESOLVED_T
assert(op1->TypeGet() == TYP_REF);
// Don't optimize for minopts or debug codegen.
- if (opts.compDbgCode || opts.MinOpts())
+ if (opts.OptimizationDisabled())
{
return nullptr;
}
@@ -10637,7 +10637,7 @@ GenTree* Compiler::impCastClassOrIsInstToTree(GenTree* op1,
// Don't bother with inline expansion when jit is trying to
// generate code quickly, or the cast is in code that won't run very
// often, or the method already is pretty big.
- if (compCurBB->isRunRarely() || opts.compDbgCode || opts.MinOpts())
+ if (compCurBB->isRunRarely() || opts.OptimizationDisabled())
{
// not worth the code expansion if jitting fast or in a rarely run block
shouldExpandInline = false;
@@ -12616,7 +12616,7 @@ void Compiler::impImportBlockCode(BasicBlock* block)
type = op1->TypeGet();
// brfalse and brtrue is only allowed on I4, refs, and byrefs.
- if (!opts.MinOpts() && !opts.compDbgCode && block->bbJumpDest == block->bbNext)
+ if (opts.OptimizationEnabled() && (block->bbJumpDest == block->bbNext))
{
block->bbJumpKind = BBJ_NONE;
@@ -12850,7 +12850,7 @@ void Compiler::impImportBlockCode(BasicBlock* block)
varTypeIsI(op1->TypeGet()) && varTypeIsI(op2->TypeGet()) ||
varTypeIsFloating(op1->gtType) && varTypeIsFloating(op2->gtType));
- if (!opts.MinOpts() && !opts.compDbgCode && block->bbJumpDest == block->bbNext)
+ if (opts.OptimizationEnabled() && (block->bbJumpDest == block->bbNext))
{
block->bbJumpKind = BBJ_NONE;
@@ -15415,7 +15415,7 @@ void Compiler::impImportBlockCode(BasicBlock* block)
// Check legality and profitability of inline expansion for unboxing.
const bool canExpandInline = (helper == CORINFO_HELP_UNBOX);
- const bool shouldExpandInline = !(compCurBB->isRunRarely() || opts.compDbgCode || opts.MinOpts());
+ const bool shouldExpandInline = !compCurBB->isRunRarely() && opts.OptimizationEnabled();
if (canExpandInline && shouldExpandInline)
{
@@ -16237,7 +16237,7 @@ void Compiler::impImportBlockCode(BasicBlock* block)
}
op1 = impPopStack().val;
- if (!opts.MinOpts() && !opts.compDbgCode)
+ if (opts.OptimizationEnabled())
{
/* Use GT_ARR_LENGTH operator so rng check opts see this */
GenTreeArrLen* arrLen = gtNewArrLen(TYP_INT, op1, OFFSETOF__CORINFO_Array__length);
@@ -20225,13 +20225,7 @@ void Compiler::impDevirtualizeCall(GenTreeCall* call,
assert(call->IsVirtual());
// Bail if not optimizing
- if (opts.MinOpts())
- {
- return;
- }
-
- // Bail if debuggable codegen
- if (opts.compDbgCode)
+ if (opts.OptimizationDisabled())
{
return;
}
@@ -20937,7 +20931,7 @@ void Compiler::addGuardedDevirtualizationCandidate(GenTreeCall* call,
}
// Bail if not optimizing or the call site is very likely cold
- if (compCurBB->isRunRarely() || opts.compDbgCode || opts.MinOpts())
+ if (compCurBB->isRunRarely() || opts.OptimizationDisabled())
{
JITDUMP("NOT Marking call [%06u] as guarded devirtualization candidate -- rare / dbg / minopts\n",
dspTreeID(call));