summaryrefslogtreecommitdiff
path: root/src/jit/importer.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/jit/importer.cpp')
-rw-r--r--src/jit/importer.cpp67
1 files changed, 53 insertions, 14 deletions
diff --git a/src/jit/importer.cpp b/src/jit/importer.cpp
index a991598258..74018c48d4 100644
--- a/src/jit/importer.cpp
+++ b/src/jit/importer.cpp
@@ -2402,7 +2402,7 @@ void Compiler::impSpillLclRefs(ssize_t lclNum)
* Returns the basic block of the actual handler.
*/
-BasicBlock* Compiler::impPushCatchArgOnStack(BasicBlock* hndBlk, CORINFO_CLASS_HANDLE clsHnd)
+BasicBlock* Compiler::impPushCatchArgOnStack(BasicBlock* hndBlk, CORINFO_CLASS_HANDLE clsHnd, bool isSingleBlockFilter)
{
// Do not inject the basic block twice on reimport. This should be
// hit only under JIT stress. See if the block is the one we injected.
@@ -2440,8 +2440,14 @@ BasicBlock* Compiler::impPushCatchArgOnStack(BasicBlock* hndBlk, CORINFO_CLASS_H
* moved around since it is tied to a fixed location (EAX) */
arg->gtFlags |= GTF_ORDER_SIDEEFF;
+#if defined(JIT32_GCENCODER)
+ const bool forceInsertNewBlock = isSingleBlockFilter || compStressCompile(STRESS_CATCH_ARG, 5);
+#else
+ const bool forceInsertNewBlock = compStressCompile(STRESS_CATCH_ARG, 5);
+#endif // defined(JIT32_GCENCODER)
+
/* Spill GT_CATCH_ARG to a temp if there are jumps to the beginning of the handler */
- if (hndBlk->bbRefs > 1 || compStressCompile(STRESS_CATCH_ARG, 5))
+ if (hndBlk->bbRefs > 1 || forceInsertNewBlock)
{
if (hndBlk->bbRefs == 1)
{
@@ -3520,6 +3526,10 @@ GenTreePtr Compiler::impIntrinsic(GenTreePtr newobjThis,
gtNewIconNode(offsetof(CORINFO_String, stringLen), TYP_I_IMPL));
op1 = gtNewOperNode(GT_IND, TYP_INT, op1);
}
+
+ // Getting the length of a null string should throw
+ op1->gtFlags |= GTF_EXCEPT;
+
retNode = op1;
break;
@@ -6047,6 +6057,11 @@ GenTreePtr Compiler::impImportStaticFieldAccess(CORINFO_RESOLVED_TOKEN* pResolve
// In future, it may be better to just create the right tree here instead of folding it later.
op1 = gtNewFieldRef(lclTyp, pResolvedToken->hField);
+ if (pFieldInfo->fieldFlags & CORINFO_FLG_FIELD_INITCLASS)
+ {
+ op1->gtFlags |= GTF_FLD_INITCLASS;
+ }
+
if (pFieldInfo->fieldFlags & CORINFO_FLG_FIELD_STATIC_IN_HEAP)
{
op1->gtType = TYP_REF; // points at boxed object
@@ -6078,14 +6093,16 @@ GenTreePtr Compiler::impImportStaticFieldAccess(CORINFO_RESOLVED_TOKEN* pResolve
FieldSeqNode* fldSeq = GetFieldSeqStore()->CreateSingleton(pResolvedToken->hField);
/* Create the data member node */
- if (pFldAddr == nullptr)
+ op1 = gtNewIconHandleNode(pFldAddr == nullptr ? (size_t)fldAddr : (size_t)pFldAddr, GTF_ICON_STATIC_HDL,
+ fldSeq);
+
+ if (pFieldInfo->fieldFlags & CORINFO_FLG_FIELD_INITCLASS)
{
- op1 = gtNewIconHandleNode((size_t)fldAddr, GTF_ICON_STATIC_HDL, fldSeq);
+ op1->gtFlags |= GTF_ICON_INITCLASS;
}
- else
- {
- op1 = gtNewIconHandleNode((size_t)pFldAddr, GTF_ICON_STATIC_HDL, fldSeq);
+ if (pFldAddr != nullptr)
+ {
// There are two cases here, either the static is RVA based,
// in which case the type of the FIELD node is not a GC type
// and the handle to the RVA is a TYP_I_IMPL. Or the FIELD node is
@@ -7325,8 +7342,7 @@ var_types Compiler::impImportCall(OPCODE opcode,
// instParam.
instParam = gtNewIconNode(0, TYP_REF);
}
-
- if (!exactContextNeedsRuntimeLookup)
+ else if (!exactContextNeedsRuntimeLookup)
{
#ifdef FEATURE_READYTORUN_COMPILER
if (opts.IsReadyToRun())
@@ -14806,6 +14822,11 @@ void Compiler::impImportBlockCode(BasicBlock* block)
// Could point anywhere, example a boxed class static int
op1->gtFlags |= GTF_IND_TGTANYWHERE | GTF_GLOB_REF;
assertImp(varTypeIsArithmetic(op1->gtType));
+
+ if (prefixFlags & PREFIX_UNALIGNED)
+ {
+ op1->gtFlags |= GTF_IND_UNALIGNED;
+ }
}
else
{
@@ -15616,7 +15637,7 @@ void Compiler::impVerifyEHBlock(BasicBlock* block, bool isTryStart)
// push catch arg the stack, spill to a temp if necessary
// Note: can update HBtab->ebdHndBeg!
- hndBegBB = impPushCatchArgOnStack(hndBegBB, clsHnd);
+ hndBegBB = impPushCatchArgOnStack(hndBegBB, clsHnd, false);
}
// Queue up the handler for importing
@@ -15637,7 +15658,8 @@ void Compiler::impVerifyEHBlock(BasicBlock* block, bool isTryStart)
// push catch arg the stack, spill to a temp if necessary
// Note: can update HBtab->ebdFilter!
- filterBB = impPushCatchArgOnStack(filterBB, impGetObjectClass());
+ const bool isSingleBlockFilter = (filterBB->bbNext == hndBegBB);
+ filterBB = impPushCatchArgOnStack(filterBB, impGetObjectClass(), isSingleBlockFilter);
impImportBlockPending(filterBB);
}
@@ -17954,8 +17976,12 @@ GenTreePtr Compiler::impInlineFetchArg(unsigned lclNum, InlArgInfo* inlArgInfo,
op1 = argInfo.argNode;
argInfo.argTmpNum = op1->gtLclVarCommon.gtLclNum;
- // Use an equivalent copy if this is the second or subsequent use.
- if (argInfo.argIsUsed)
+ // Use an equivalent copy if this is the second or subsequent
+ // use, or if we need to retype.
+ //
+ // Note argument type mismatches that prevent inlining should
+ // have been caught in impInlineInitVars.
+ if (argInfo.argIsUsed || (op1->TypeGet() != lclTyp))
{
assert(op1->gtOper == GT_LCL_VAR);
assert(lclNum == op1->gtLclVar.gtLclILoffs);
@@ -18568,7 +18594,20 @@ void Compiler::impDevirtualizeCall(GenTreeCall* call,
#if defined(DEBUG)
// Validate that callInfo has up to date method flags
const DWORD freshBaseMethodAttribs = info.compCompHnd->getMethodAttribs(baseMethod);
- assert(freshBaseMethodAttribs == baseMethodAttribs);
+
+ // All the base method attributes should agree, save that
+ // CORINFO_FLG_DONT_INLINE may have changed from 0 to 1
+ // because of concurrent jitting activity.
+ //
+ // Note we don't look at this particular flag bit below, and
+ // later on (if we do try and inline) we will rediscover why
+ // the method can't be inlined, so there's no danger here in
+ // seeing this particular flag bit in different states between
+ // the cached and fresh values.
+ if ((freshBaseMethodAttribs & ~CORINFO_FLG_DONT_INLINE) != (baseMethodAttribs & ~CORINFO_FLG_DONT_INLINE))
+ {
+ assert(!"mismatched method attributes");
+ }
#endif // DEBUG
}