summaryrefslogtreecommitdiff
path: root/src/jit
diff options
context:
space:
mode:
authorAndy Ayers <andya@microsoft.com>2018-07-18 14:35:08 -0700
committerGitHub <noreply@github.com>2018-07-18 14:35:08 -0700
commitb2842bbef5162383f7bf67de2976f2c21dbfdb1d (patch)
tree3b03b3f0a166eb8518804b99913ef0311322fa2f /src/jit
parent4037baf5675ec275c624395ef2b0337cf58836cd (diff)
downloadcoreclr-b2842bbef5162383f7bf67de2976f2c21dbfdb1d.tar.gz
coreclr-b2842bbef5162383f7bf67de2976f2c21dbfdb1d.tar.bz2
coreclr-b2842bbef5162383f7bf67de2976f2c21dbfdb1d.zip
JIT: force all local var ref counts to be accessed via API (#18979)
This is a preparatory change for auditing and controlling how local variable ref counts are observed and manipulated. See #18969 for context. No diffs seen locally. No TP impact expected. There is a small chance we may see some asserts in broader testing as there were places in original code where local ref counts were incremented without checking for possible overflows. The new APIs will assert for overflow cases.
Diffstat (limited to 'src/jit')
-rw-r--r--src/jit/assertionprop.cpp4
-rw-r--r--src/jit/codegencommon.cpp10
-rw-r--r--src/jit/compiler.h58
-rw-r--r--src/jit/compiler.hpp38
-rw-r--r--src/jit/emit.cpp2
-rw-r--r--src/jit/gcencode.cpp4
-rw-r--r--src/jit/gcinfo.cpp2
-rw-r--r--src/jit/gentree.cpp2
-rw-r--r--src/jit/lclvars.cpp54
-rw-r--r--src/jit/liveness.cpp8
-rw-r--r--src/jit/lower.cpp14
-rw-r--r--src/jit/lowerxarch.cpp2
-rw-r--r--src/jit/lsra.cpp24
-rw-r--r--src/jit/lsrabuild.cpp4
-rw-r--r--src/jit/morph.cpp48
-rw-r--r--src/jit/optcse.cpp12
-rw-r--r--src/jit/regalloc.cpp8
-rw-r--r--src/jit/scopeinfo.cpp2
18 files changed, 174 insertions, 122 deletions
diff --git a/src/jit/assertionprop.cpp b/src/jit/assertionprop.cpp
index 4e4924fb7d..93ca437519 100644
--- a/src/jit/assertionprop.cpp
+++ b/src/jit/assertionprop.cpp
@@ -130,7 +130,7 @@ void Compiler::optAddCopies()
}
// We require that the weighted ref count be significant.
- if (varDsc->lvRefCntWtd <= (BB_LOOP_WEIGHT * BB_UNITY_WEIGHT / 2))
+ if (varDsc->lvRefCntWtd() <= (BB_LOOP_WEIGHT * BB_UNITY_WEIGHT / 2))
{
continue;
}
@@ -144,7 +144,7 @@ void Compiler::optAddCopies()
BlockSet paramImportantUseDom(BlockSetOps::MakeFull(this));
// This will be threshold for determining heavier-than-average uses
- unsigned paramAvgWtdRefDiv2 = (varDsc->lvRefCntWtd + varDsc->lvRefCnt / 2) / (varDsc->lvRefCnt * 2);
+ unsigned paramAvgWtdRefDiv2 = (varDsc->lvRefCntWtd() + varDsc->lvRefCnt() / 2) / (varDsc->lvRefCnt() * 2);
bool paramFoundImportantUse = false;
diff --git a/src/jit/codegencommon.cpp b/src/jit/codegencommon.cpp
index bbe1dc7904..8572f288ca 100644
--- a/src/jit/codegencommon.cpp
+++ b/src/jit/codegencommon.cpp
@@ -3678,7 +3678,7 @@ void CodeGen::genFnPrologCalleeRegArgs(regNumber xtraReg, bool* pXtraRegClobbere
// For LSRA, it may not be in regArgMaskLive if it has a zero
// refcnt. This is in contrast with the non-LSRA case in which all
// non-tracked args are assumed live on entry.
- noway_assert((varDsc->lvRefCnt == 0) || (varDsc->lvType == TYP_STRUCT) ||
+ noway_assert((varDsc->lvRefCnt() == 0) || (varDsc->lvType == TYP_STRUCT) ||
(varDsc->lvAddrExposed && compiler->info.compIsVarArgs) ||
(varDsc->lvAddrExposed && compiler->opts.compUseSoftFP));
#endif // !_TARGET_X86_
@@ -3979,7 +3979,7 @@ void CodeGen::genFnPrologCalleeRegArgs(regNumber xtraReg, bool* pXtraRegClobbere
if (!varDsc->lvOnFrame)
{
- noway_assert(varDsc->lvRefCnt == 0);
+ noway_assert(varDsc->lvRefCnt() == 0);
}
else
{
@@ -4638,7 +4638,7 @@ void CodeGen::genCheckUseBlockInit()
if (!varDsc->lvIsInReg() && !varDsc->lvOnFrame)
{
- noway_assert(varDsc->lvRefCnt == 0);
+ noway_assert(varDsc->lvRefCnt() == 0);
continue;
}
@@ -7876,7 +7876,7 @@ void CodeGen::genFnProlog()
if (!varDsc->lvIsInReg() && !varDsc->lvOnFrame)
{
- noway_assert(varDsc->lvRefCnt == 0);
+ noway_assert(varDsc->lvRefCnt() == 0);
continue;
}
@@ -8492,7 +8492,7 @@ void CodeGen::genFnProlog()
// (our argument pointer register has a refcount > 0).
unsigned argsStartVar = compiler->lvaVarargsBaseOfStkArgs;
- if (compiler->info.compIsVarArgs && compiler->lvaTable[argsStartVar].lvRefCnt > 0)
+ if (compiler->info.compIsVarArgs && compiler->lvaTable[argsStartVar].lvRefCnt() > 0)
{
varDsc = &compiler->lvaTable[argsStartVar];
diff --git a/src/jit/compiler.h b/src/jit/compiler.h
index 2248b32649..e5d4d67efe 100644
--- a/src/jit/compiler.h
+++ b/src/jit/compiler.h
@@ -591,15 +591,65 @@ public:
regMaskSmall lvPrefReg; // set of regs it prefers to live in
unsigned short lvVarIndex; // variable tracking index
- unsigned short lvRefCnt; // unweighted (real) reference count. For implicit by reference
+
+private:
+ unsigned short m_lvRefCnt; // unweighted (real) reference count. For implicit by reference
// parameters, this gets hijacked from fgMarkImplicitByRefArgs
// through fgMarkDemotedImplicitByRefArgs, to provide a static
// appearance count (computed during address-exposed analysis)
// that fgMakeOutgoingStructArgCopy consults during global morph
// to determine if eliding its copy is legal.
- unsigned lvRefCntWtd; // weighted reference count
- int lvStkOffs; // stack offset of home
- unsigned lvExactSize; // (exact) size of the type in bytes
+ unsigned m_lvRefCntWtd; // weighted reference count
+
+public:
+ unsigned short lvRefCnt() const
+ {
+ return m_lvRefCnt;
+ }
+
+ void incLvRefCnt(unsigned short delta)
+ {
+ unsigned short oldRefCnt = m_lvRefCnt;
+ m_lvRefCnt += delta;
+ assert(m_lvRefCnt >= oldRefCnt);
+ }
+
+ void decLvRefCnt(unsigned short delta)
+ {
+ assert(m_lvRefCnt >= delta);
+ m_lvRefCnt -= delta;
+ }
+
+ void setLvRefCnt(unsigned short newValue)
+ {
+ m_lvRefCnt = newValue;
+ }
+
+ unsigned lvRefCntWtd() const
+ {
+ return m_lvRefCntWtd;
+ }
+
+ void incLvRefCntWtd(unsigned delta)
+ {
+ unsigned oldRefCntWtd = m_lvRefCntWtd;
+ m_lvRefCntWtd += delta;
+ assert(m_lvRefCntWtd >= oldRefCntWtd);
+ }
+
+ void decLvRefCntWtd(unsigned delta)
+ {
+ assert(m_lvRefCntWtd >= delta);
+ m_lvRefCntWtd -= delta;
+ }
+
+ void setLvRefCntWtd(unsigned newValue)
+ {
+ m_lvRefCntWtd = newValue;
+ }
+
+ int lvStkOffs; // stack offset of home
+ unsigned lvExactSize; // (exact) size of the type in bytes
// Is this a promoted struct?
// This method returns true only for structs (including SIMD structs), not for
diff --git a/src/jit/compiler.hpp b/src/jit/compiler.hpp
index 270a5932df..c4a50c0fa9 100644
--- a/src/jit/compiler.hpp
+++ b/src/jit/compiler.hpp
@@ -1798,8 +1798,8 @@ inline unsigned Compiler::lvaGrabTempWithImplicitUse(bool shortLifetime DEBUGARG
lvaSetVarAddrExposed(lclNum);
// We need lvRefCnt to be non-zero to prevent various asserts from firing.
- varDsc->lvRefCnt = 1;
- varDsc->lvRefCntWtd = BB_UNITY_WEIGHT;
+ varDsc->setLvRefCnt(1);
+ varDsc->setLvRefCntWtd(BB_UNITY_WEIGHT);
return lclNum;
}
@@ -1819,9 +1819,9 @@ inline void LclVarDsc::lvaResetSortAgainFlag(Compiler* comp)
comp->lvaSortAgain = true;
}
/* Set weighted ref count to zero if ref count is zero */
- if (lvRefCnt == 0)
+ if (lvRefCnt() == 0)
{
- lvRefCntWtd = 0;
+ setLvRefCntWtd(0);
}
}
@@ -1844,17 +1844,17 @@ inline void LclVarDsc::decRefCnts(BasicBlock::weight_t weight, Compiler* comp, b
//
if (lvType != TYP_STRUCT || promotionType != Compiler::PROMOTION_TYPE_INDEPENDENT)
{
- assert(lvRefCnt); // Can't decrement below zero
+ assert(lvRefCnt()); // Can't decrement below zero
// TODO: Well, the assert above could be bogus.
// If lvRefCnt has overflowed before, then might drop to 0.
// Therefore we do need the following check to keep lvRefCnt from underflow:
- if (lvRefCnt > 0)
+ if (lvRefCnt() > 0)
{
//
// Decrement lvRefCnt
//
- lvRefCnt--;
+ decLvRefCnt(1);
//
// Decrement lvRefCntWtd
@@ -1866,13 +1866,13 @@ inline void LclVarDsc::decRefCnts(BasicBlock::weight_t weight, Compiler* comp, b
weight *= 2;
}
- if (lvRefCntWtd <= weight)
+ if (lvRefCntWtd() <= weight)
{ // Can't go below zero
- lvRefCntWtd = 0;
+ setLvRefCntWtd(0);
}
else
{
- lvRefCntWtd -= weight;
+ decLvRefCntWtd(weight);
}
}
}
@@ -1910,7 +1910,8 @@ inline void LclVarDsc::decRefCnts(BasicBlock::weight_t weight, Compiler* comp, b
{
unsigned varNum = (unsigned)(this - comp->lvaTable);
assert(&comp->lvaTable[varNum] == this);
- printf("New refCnts for V%02u: refCnt = %2u, refCntWtd = %s\n", varNum, lvRefCnt, refCntWtd2str(lvRefCntWtd));
+ printf("New refCnts for V%02u: refCnt = %2u, refCntWtd = %s\n", varNum, lvRefCnt(),
+ refCntWtd2str(lvRefCntWtd()));
}
#endif
}
@@ -1936,10 +1937,10 @@ inline void LclVarDsc::incRefCnts(BasicBlock::weight_t weight, Compiler* comp, b
//
// Increment lvRefCnt
//
- int newRefCnt = lvRefCnt + 1;
+ int newRefCnt = lvRefCnt() + 1;
if (newRefCnt == (unsigned short)newRefCnt) // lvRefCnt is an "unsigned short". Don't overflow it.
{
- lvRefCnt = (unsigned short)newRefCnt;
+ setLvRefCnt((unsigned short)newRefCnt);
}
// This fires when an uninitialize value for 'weight' is used (see lvaMarkRefsWeight)
@@ -1956,14 +1957,14 @@ inline void LclVarDsc::incRefCnts(BasicBlock::weight_t weight, Compiler* comp, b
weight *= 2;
}
- unsigned newWeight = lvRefCntWtd + weight;
- if (newWeight >= lvRefCntWtd)
+ unsigned newWeight = lvRefCntWtd() + weight;
+ if (newWeight >= lvRefCntWtd())
{ // lvRefCntWtd is an "unsigned". Don't overflow it
- lvRefCntWtd = newWeight;
+ setLvRefCntWtd(newWeight);
}
else
{ // On overflow we assign ULONG_MAX
- lvRefCntWtd = ULONG_MAX;
+ setLvRefCntWtd(ULONG_MAX);
}
}
}
@@ -2000,7 +2001,8 @@ inline void LclVarDsc::incRefCnts(BasicBlock::weight_t weight, Compiler* comp, b
{
unsigned varNum = (unsigned)(this - comp->lvaTable);
assert(&comp->lvaTable[varNum] == this);
- printf("New refCnts for V%02u: refCnt = %2u, refCntWtd = %s\n", varNum, lvRefCnt, refCntWtd2str(lvRefCntWtd));
+ printf("New refCnts for V%02u: refCnt = %2u, refCntWtd = %s\n", varNum, lvRefCnt(),
+ refCntWtd2str(lvRefCntWtd()));
}
#endif
}
diff --git a/src/jit/emit.cpp b/src/jit/emit.cpp
index 3492f2fee4..748e1c3c00 100644
--- a/src/jit/emit.cpp
+++ b/src/jit/emit.cpp
@@ -4656,7 +4656,7 @@ unsigned emitter::emitEndCodeGen(Compiler* comp,
assert(!dsc->lvRegister);
assert(dsc->lvTracked);
- assert(dsc->lvRefCnt != 0);
+ assert(dsc->lvRefCnt() != 0);
assert(dsc->TypeGet() == TYP_REF || dsc->TypeGet() == TYP_BYREF);
diff --git a/src/jit/gcencode.cpp b/src/jit/gcencode.cpp
index bc38f64b54..d742822511 100644
--- a/src/jit/gcencode.cpp
+++ b/src/jit/gcencode.cpp
@@ -2240,7 +2240,7 @@ size_t GCInfo::gcMakeRegPtrTable(BYTE* dest, int mask, const InfoHdr& header, un
/* If this non-enregistered pointer arg is never
* used, we don't need to report it
*/
- assert(varDsc->lvRefCnt == 0); // This assert is currently a known issue for X86-RyuJit
+ assert(varDsc->lvRefCnt() == 0); // This assert is currently a known issue for X86-RyuJit
continue;
}
else if (varDsc->lvIsRegArg && varDsc->lvTracked)
@@ -4220,7 +4220,7 @@ void GCInfo::gcMakeRegPtrTable(
{
// If this non-enregistered pointer arg is never
// used, we don't need to report it.
- assert(varDsc->lvRefCnt == 0);
+ assert(varDsc->lvRefCnt() == 0);
continue;
}
else if (varDsc->lvIsRegArg && varDsc->lvTracked)
diff --git a/src/jit/gcinfo.cpp b/src/jit/gcinfo.cpp
index ee9f91df7e..0ec4933001 100644
--- a/src/jit/gcinfo.cpp
+++ b/src/jit/gcinfo.cpp
@@ -427,7 +427,7 @@ void GCInfo::gcCountForHeader(UNALIGNED unsigned int* untrackedCount, UNALIGNED
/* If this non-enregistered pointer arg is never
* used, we don't need to report it
*/
- assert(varDsc->lvRefCnt == 0);
+ assert(varDsc->lvRefCnt() == 0);
continue;
}
else if (varDsc->lvIsRegArg && varDsc->lvTracked)
diff --git a/src/jit/gentree.cpp b/src/jit/gentree.cpp
index fc1250ccac..1f60fe6169 100644
--- a/src/jit/gentree.cpp
+++ b/src/jit/gentree.cpp
@@ -2983,7 +2983,7 @@ bool Compiler::gtIsLikelyRegVar(GenTree* tree)
return false;
}
- if (varDsc->lvRefCntWtd < (BB_UNITY_WEIGHT * 3))
+ if (varDsc->lvRefCntWtd() < (BB_UNITY_WEIGHT * 3))
{
return false;
}
diff --git a/src/jit/lclvars.cpp b/src/jit/lclvars.cpp
index b9b5038650..e72ae11cf5 100644
--- a/src/jit/lclvars.cpp
+++ b/src/jit/lclvars.cpp
@@ -2070,7 +2070,7 @@ void Compiler::lvaPromoteLongVars()
{
LclVarDsc* varDsc = &lvaTable[lclNum];
if (!varTypeIsLong(varDsc) || varDsc->lvDoNotEnregister || varDsc->lvIsMultiRegArgOrRet() ||
- (varDsc->lvRefCnt == 0) || varDsc->lvIsStructField || (fgNoStructPromotion && varDsc->lvIsParam))
+ (varDsc->lvRefCnt() == 0) || varDsc->lvIsStructField || (fgNoStructPromotion && varDsc->lvIsParam))
{
continue;
}
@@ -3005,8 +3005,8 @@ int __cdecl Compiler::RefCntCmp(const void* op1, const void* op2)
return (dsc2->lvTracked) ? +1 : -1;
}
- unsigned weight1 = dsc1->lvRefCnt;
- unsigned weight2 = dsc2->lvRefCnt;
+ unsigned weight1 = dsc1->lvRefCnt();
+ unsigned weight2 = dsc2->lvRefCnt();
#ifndef _TARGET_ARM_
// ARM-TODO: this was disabled for ARM under !FEATURE_FP_REGALLOC; it was probably a left-over from
@@ -3039,7 +3039,7 @@ int __cdecl Compiler::RefCntCmp(const void* op1, const void* op2)
/* The unweighted ref counts were the same */
/* If the weighted ref counts are different then use their difference */
- diff = dsc2->lvRefCntWtd - dsc1->lvRefCntWtd;
+ diff = dsc2->lvRefCntWtd() - dsc1->lvRefCntWtd();
if (diff != 0)
{
@@ -3145,8 +3145,8 @@ int __cdecl Compiler::WtdRefCntCmp(const void* op1, const void* op2)
return (dsc2->lvTracked) ? +1 : -1;
}
- unsigned weight1 = dsc1->lvRefCntWtd;
- unsigned weight2 = dsc2->lvRefCntWtd;
+ unsigned weight1 = dsc1->lvRefCntWtd();
+ unsigned weight2 = dsc2->lvRefCntWtd();
#ifndef _TARGET_ARM_
// ARM-TODO: this was disabled for ARM under !FEATURE_FP_REGALLOC; it was probably a left-over from
@@ -3209,7 +3209,7 @@ int __cdecl Compiler::WtdRefCntCmp(const void* op1, const void* op2)
// Otherwise, we have equal weighted ref counts.
/* If the unweighted ref counts are different then use their difference */
- int diff = (int)dsc2->lvRefCnt - (int)dsc1->lvRefCnt;
+ int diff = (int)dsc2->lvRefCnt() - (int)dsc1->lvRefCnt();
if (diff != 0)
{
@@ -3288,12 +3288,12 @@ void Compiler::lvaDumpRefCounts()
for (unsigned lclNum = 0; lclNum < lvaCount; lclNum++)
{
- unsigned refCnt = lvaRefSorted[lclNum]->lvRefCnt;
+ unsigned refCnt = lvaRefSorted[lclNum]->lvRefCnt();
if (refCnt == 0)
{
break;
}
- unsigned refCntWtd = lvaRefSorted[lclNum]->lvRefCntWtd;
+ unsigned refCntWtd = lvaRefSorted[lclNum]->lvRefCntWtd();
printf(" ");
gtDispLclVar((unsigned)(lvaRefSorted[lclNum] - lvaTable));
@@ -3374,11 +3374,11 @@ void Compiler::lvaSortByRefCount()
varDsc->lvTracked = 1;
/* If the ref count is zero */
- if (varDsc->lvRefCnt == 0)
+ if (varDsc->lvRefCnt() == 0)
{
/* Zero ref count, make this untracked */
- varDsc->lvTracked = 0;
- varDsc->lvRefCntWtd = 0;
+ varDsc->lvTracked = 0;
+ varDsc->setLvRefCntWtd(0);
}
#if !defined(_TARGET_64BIT_)
@@ -3527,7 +3527,7 @@ void Compiler::lvaSortByRefCount()
varDsc = lvaRefSorted[lclNum];
if (varDsc->lvTracked)
{
- noway_assert(varDsc->lvRefCnt > 0);
+ noway_assert(varDsc->lvRefCnt() > 0);
/* This variable will be tracked - assign it an index */
@@ -4028,8 +4028,8 @@ void Compiler::lvaMarkLocalVars()
/* Set the refCnt, it is used in the prolog and return block(s) */
- lvaTable[info.compLvFrameListRoot].lvRefCnt = 2;
- lvaTable[info.compLvFrameListRoot].lvRefCntWtd = 2 * BB_UNITY_WEIGHT;
+ lvaTable[info.compLvFrameListRoot].setLvRefCnt(2);
+ lvaTable[info.compLvFrameListRoot].setLvRefCntWtd(2 * BB_UNITY_WEIGHT);
}
}
@@ -4132,7 +4132,7 @@ void Compiler::lvaMarkLocalVars()
break; // early exit for loop
}
- if ((varDsc->lvIsRegArg) && (varDsc->lvRefCnt > 0))
+ if ((varDsc->lvIsRegArg) && (varDsc->lvRefCnt() > 0))
{
// Fix 388376 ARM JitStress WP7
varDsc->incRefCnts(BB_UNITY_WEIGHT, this);
@@ -4148,16 +4148,16 @@ void Compiler::lvaMarkLocalVars()
}
#endif
- if (lvaKeepAliveAndReportThis() && lvaTable[0].lvRefCnt == 0)
+ if (lvaKeepAliveAndReportThis() && lvaTable[0].lvRefCnt() == 0)
{
- lvaTable[0].lvRefCnt = 1;
+ lvaTable[0].setLvRefCnt(1);
// This isn't strictly needed as we will make a copy of the param-type-arg
// in the prolog. However, this ensures that the LclVarDsc corresponding to
// info.compTypeCtxtArg is valid.
}
- else if (lvaReportParamTypeArg() && lvaTable[info.compTypeCtxtArg].lvRefCnt == 0)
+ else if (lvaReportParamTypeArg() && lvaTable[info.compTypeCtxtArg].lvRefCnt() == 0)
{
- lvaTable[info.compTypeCtxtArg].lvRefCnt = 1;
+ lvaTable[info.compTypeCtxtArg].setLvRefCnt(1);
}
lvaLocalVarRefCounted = true;
@@ -4180,8 +4180,8 @@ void Compiler::lvaAllocOutgoingArgSpaceVar()
/* Set the refCnts */
- lvaTable[lvaOutgoingArgSpaceVar].lvRefCnt = 1;
- lvaTable[lvaOutgoingArgSpaceVar].lvRefCntWtd = BB_UNITY_WEIGHT;
+ lvaTable[lvaOutgoingArgSpaceVar].setLvRefCnt(1);
+ lvaTable[lvaOutgoingArgSpaceVar].setLvRefCntWtd(BB_UNITY_WEIGHT);
}
noway_assert(lvaOutgoingArgSpaceVar >= info.compLocalsCount && lvaOutgoingArgSpaceVar < lvaCount);
@@ -6480,7 +6480,7 @@ void Compiler::lvaAssignFrameOffsetsToPromotedStructs()
else
{
varDsc->lvOnFrame = false;
- noway_assert(varDsc->lvRefCnt == 0);
+ noway_assert(varDsc->lvRefCnt() == 0);
}
}
}
@@ -6702,7 +6702,7 @@ void Compiler::lvaDumpEntry(unsigned lclNum, FrameLayoutState curState, size_t r
}
else
{
- if (varDsc->lvRefCnt == 0)
+ if (varDsc->lvRefCnt() == 0)
{
// Print this with a special indicator that the variable is unused. Even though the
// variable itself is unused, it might be a struct that is promoted, so seeing it
@@ -6737,7 +6737,7 @@ void Compiler::lvaDumpEntry(unsigned lclNum, FrameLayoutState curState, size_t r
printf(" ]");
}
- printf(" (%3u,%*s)", varDsc->lvRefCnt, (int)refCntWtdWidth, refCntWtd2str(varDsc->lvRefCntWtd));
+ printf(" (%3u,%*s)", varDsc->lvRefCnt(), (int)refCntWtdWidth, refCntWtd2str(varDsc->lvRefCntWtd()));
printf(" %7s ", varTypeName(type));
if (genTypeSize(type) == 0)
@@ -6750,7 +6750,7 @@ void Compiler::lvaDumpEntry(unsigned lclNum, FrameLayoutState curState, size_t r
}
// The register or stack location field is 11 characters wide.
- if (varDsc->lvRefCnt == 0)
+ if (varDsc->lvRefCnt() == 0)
{
printf("zero-ref ");
}
@@ -6987,7 +6987,7 @@ void Compiler::lvaTableDump(FrameLayoutState curState)
{
for (lclNum = 0, varDsc = lvaTable; lclNum < lvaCount; lclNum++, varDsc++)
{
- size_t width = strlen(refCntWtd2str(varDsc->lvRefCntWtd));
+ size_t width = strlen(refCntWtd2str(varDsc->lvRefCntWtd()));
if (width > refCntWtdWidth)
{
refCntWtdWidth = width;
diff --git a/src/jit/liveness.cpp b/src/jit/liveness.cpp
index c99df5cc70..3c34681daa 100644
--- a/src/jit/liveness.cpp
+++ b/src/jit/liveness.cpp
@@ -31,11 +31,11 @@ void Compiler::fgMarkUseDef(GenTreeLclVarCommon* tree)
LclVarDsc* const varDsc = &lvaTable[lclNum];
// We should never encounter a reference to a lclVar that has a zero refCnt.
- if (varDsc->lvRefCnt == 0 && (!varTypeIsPromotable(varDsc) || !varDsc->lvPromoted))
+ if (varDsc->lvRefCnt() == 0 && (!varTypeIsPromotable(varDsc) || !varDsc->lvPromoted))
{
JITDUMP("Found reference to V%02u with zero refCnt.\n", lclNum);
assert(!"We should never encounter a reference to a lclVar that has a zero refCnt.");
- varDsc->lvRefCnt = 1;
+ varDsc->setLvRefCnt(1);
}
const bool isDef = (tree->gtFlags & GTF_VAR_DEF) != 0;
@@ -1047,9 +1047,9 @@ void Compiler::fgExtendDbgLifetimes()
unsigned lclNum = 0;
for (LclVarDsc *varDsc = lvaTable; lclNum < lvaCount; lclNum++, varDsc++)
{
- if (varDsc->lvRefCnt == 0 && varDsc->lvIsRegArg)
+ if (varDsc->lvRefCnt() == 0 && varDsc->lvIsRegArg)
{
- varDsc->lvRefCnt = 1;
+ varDsc->setLvRefCnt(1);
}
}
diff --git a/src/jit/lower.cpp b/src/jit/lower.cpp
index f6a0b7cb28..1c4cc1a9b1 100644
--- a/src/jit/lower.cpp
+++ b/src/jit/lower.cpp
@@ -479,10 +479,10 @@ GenTree* Lowering::LowerSwitch(GenTree* node)
// the result of the child subtree to a temp.
GenTree* rhs = node->gtOp.gtOp1;
- unsigned lclNum = comp->lvaGrabTemp(true DEBUGARG("Lowering is creating a new local variable"));
- comp->lvaSortAgain = true;
- comp->lvaTable[lclNum].lvType = rhs->TypeGet();
- comp->lvaTable[lclNum].lvRefCnt = 1;
+ unsigned lclNum = comp->lvaGrabTemp(true DEBUGARG("Lowering is creating a new local variable"));
+ comp->lvaSortAgain = true;
+ comp->lvaTable[lclNum].lvType = rhs->TypeGet();
+ comp->lvaTable[lclNum].setLvRefCnt(1);
GenTreeLclVar* store =
new (comp, GT_STORE_LCL_VAR) GenTreeLclVar(GT_STORE_LCL_VAR, rhs->TypeGet(), lclNum, BAD_IL_OFFSET);
@@ -2052,9 +2052,9 @@ void Lowering::LowerFastTailCall(GenTreeCall* call)
tmpLclNum = comp->lvaGrabTemp(
true DEBUGARG("Fast tail call lowering is creating a new local variable"));
- comp->lvaSortAgain = true;
- comp->lvaTable[tmpLclNum].lvType = tmpType;
- comp->lvaTable[tmpLclNum].lvRefCnt = 1;
+ comp->lvaSortAgain = true;
+ comp->lvaTable[tmpLclNum].lvType = tmpType;
+ comp->lvaTable[tmpLclNum].setLvRefCnt(1);
comp->lvaTable[tmpLclNum].lvDoNotEnregister = comp->lvaTable[lcl->gtLclNum].lvDoNotEnregister;
}
diff --git a/src/jit/lowerxarch.cpp b/src/jit/lowerxarch.cpp
index 938ddb5862..327eb2b8cc 100644
--- a/src/jit/lowerxarch.cpp
+++ b/src/jit/lowerxarch.cpp
@@ -1338,7 +1338,7 @@ GenTree* Lowering::PreferredRegOptionalOperand(GenTree* tree)
// weight as reg optional.
// If either is not tracked, it may be that it was introduced after liveness
// was run, in which case we will always prefer op1 (should we use raw refcnt??).
- if (v1->lvTracked && v2->lvTracked && (v1->lvRefCntWtd >= v2->lvRefCntWtd))
+ if (v1->lvTracked && v2->lvTracked && (v1->lvRefCntWtd() >= v2->lvRefCntWtd()))
{
preferredOp = op2;
}
diff --git a/src/jit/lsra.cpp b/src/jit/lsra.cpp
index f6f749ffa0..36007b8407 100644
--- a/src/jit/lsra.cpp
+++ b/src/jit/lsra.cpp
@@ -180,7 +180,7 @@ unsigned LinearScan::getWeight(RefPosition* refPos)
// ref position.
GenTreeLclVarCommon* lclCommon = treeNode->AsLclVarCommon();
LclVarDsc* varDsc = &(compiler->lvaTable[lclCommon->gtLclNum]);
- weight = varDsc->lvRefCntWtd;
+ weight = varDsc->lvRefCntWtd();
if (refPos->getInterval()->isSpilled)
{
// Decrease the weight if the interval has already been spilled.
@@ -1390,9 +1390,9 @@ bool LinearScan::isRegCandidate(LclVarDsc* varDsc)
}
// Don't enregister if the ref count is zero.
- if (varDsc->lvRefCnt == 0)
+ if (varDsc->lvRefCnt() == 0)
{
- varDsc->lvRefCntWtd = 0;
+ varDsc->setLvRefCntWtd(0);
return false;
}
@@ -1615,22 +1615,22 @@ void LinearScan::identifyCandidates()
{
if (varDsc->lvIsParam && !varDsc->lvIsRegArg)
{
- refCntStkParam += varDsc->lvRefCnt;
+ refCntStkParam += varDsc->lvRefCnt();
}
else if (!isRegCandidate(varDsc) || varDsc->lvDoNotEnregister)
{
- refCntStk += varDsc->lvRefCnt;
+ refCntStk += varDsc->lvRefCnt();
if ((varDsc->lvType == TYP_DOUBLE) ||
((varTypeIsStruct(varDsc) && varDsc->lvStructDoubleAlign &&
(compiler->lvaGetPromotionType(varDsc) != Compiler::PROMOTION_TYPE_INDEPENDENT))))
{
- refCntWtdStkDbl += varDsc->lvRefCntWtd;
+ refCntWtdStkDbl += varDsc->lvRefCntWtd();
}
}
else
{
- refCntReg += varDsc->lvRefCnt;
- refCntWtdReg += varDsc->lvRefCntWtd;
+ refCntReg += varDsc->lvRefCnt();
+ refCntWtdReg += varDsc->lvRefCntWtd();
}
}
#endif // DOUBLE_ALIGN
@@ -1682,7 +1682,7 @@ void LinearScan::identifyCandidates()
{
largeVectorVarCount++;
VarSetOps::AddElemD(compiler, largeVectorVars, varDsc->lvVarIndex);
- unsigned refCntWtd = varDsc->lvRefCntWtd;
+ unsigned refCntWtd = varDsc->lvRefCntWtd();
if (refCntWtd >= thresholdLargeVectorRefCntWtd)
{
VarSetOps::AddElemD(compiler, largeVectorCalleeSaveCandidateVars, varDsc->lvVarIndex);
@@ -1693,7 +1693,7 @@ void LinearScan::identifyCandidates()
if (regType(type) == FloatRegisterType)
{
floatVarCount++;
- unsigned refCntWtd = varDsc->lvRefCntWtd;
+ unsigned refCntWtd = varDsc->lvRefCntWtd();
if (varDsc->lvIsRegArg)
{
// Don't count the initial reference for register params. In those cases,
@@ -5223,7 +5223,7 @@ void LinearScan::allocateRegisters()
// inserting a store.
LclVarDsc* varDsc = currentInterval->getLocalVar(compiler);
assert(varDsc != nullptr);
- if (refType == RefTypeParamDef && varDsc->lvRefCntWtd <= BB_UNITY_WEIGHT)
+ if (refType == RefTypeParamDef && varDsc->lvRefCntWtd() <= BB_UNITY_WEIGHT)
{
INDEBUG(dumpLsraAllocationEvent(LSRA_EVENT_NO_ENTRY_REG_ALLOCATED, currentInterval));
didDump = true;
@@ -6922,7 +6922,7 @@ void LinearScan::resolveRegisters()
{
// Dead interval
varDsc->lvLRACandidate = false;
- if (varDsc->lvRefCnt == 0)
+ if (varDsc->lvRefCnt() == 0)
{
varDsc->lvOnFrame = false;
}
diff --git a/src/jit/lsrabuild.cpp b/src/jit/lsrabuild.cpp
index f42486d5fe..ff746f86ac 100644
--- a/src/jit/lsrabuild.cpp
+++ b/src/jit/lsrabuild.cpp
@@ -1934,7 +1934,7 @@ void LinearScan::buildIntervals()
// Use lvRefCnt instead of checking bbLiveIn because if it's volatile we
// won't have done dataflow on it, but it needs to be marked as live-in so
// it will get saved in the prolog.
- if (!compiler->compJmpOpUsed && argDsc->lvRefCnt == 0 && !compiler->opts.compDbgCode)
+ if (!compiler->compJmpOpUsed && argDsc->lvRefCnt() == 0 && !compiler->opts.compDbgCode)
{
continue;
}
@@ -1976,7 +1976,7 @@ void LinearScan::buildIntervals()
else
{
// We can overwrite the register (i.e. codegen saves it on entry)
- assert(argDsc->lvRefCnt == 0 || !argDsc->lvIsRegArg || argDsc->lvDoNotEnregister ||
+ assert(argDsc->lvRefCnt() == 0 || !argDsc->lvIsRegArg || argDsc->lvDoNotEnregister ||
!argDsc->lvLRACandidate || (varTypeIsFloating(argDsc->TypeGet()) && compiler->opts.compDbgCode));
}
}
diff --git a/src/jit/morph.cpp b/src/jit/morph.cpp
index 2e756ad9c0..f5063b06bd 100644
--- a/src/jit/morph.cpp
+++ b/src/jit/morph.cpp
@@ -2338,7 +2338,7 @@ void fgArgInfo::EvalArgsToTemps()
// We'll reference this temporary variable just once
// when we perform the function call after
// setting up this argument.
- varDsc->lvRefCnt = 1;
+ varDsc->setLvRefCnt(1);
}
var_types lclVarType = genActualType(argx->gtType);
@@ -5224,9 +5224,9 @@ void Compiler::fgMakeOutgoingStructArgCopy(GenTreeCall* call,
// on the caller's frame. If an argument lives on the caller caller's frame, it may get
// overwritten if that frame is reused for the tail call. Therefore, we should always copy
// struct parameters if they are passed as arguments to a tail call.
- if (!call->IsTailCallViaHelper() && (varDsc->lvRefCnt == 1) && !fgMightHaveLoop())
+ if (!call->IsTailCallViaHelper() && (varDsc->lvRefCnt() == 1) && !fgMightHaveLoop())
{
- varDsc->lvRefCnt = 0;
+ varDsc->setLvRefCnt(0);
args->gtOp.gtOp1 = lcl;
fp->node = lcl;
@@ -12576,11 +12576,11 @@ DONE_MORPHING_CHILDREN:
// And then emitter::emitEndCodeGen will assert in the following line:
// noway_assert( dsc->lvTracked);
// </BUGNUM>
- noway_assert(varDsc->lvRefCnt == 0 || // lvRefCnt may not have been set yet.
- varDsc->lvRefCnt == 2 // Or, we assume this tmp should only be used here,
- // and it only shows up twice.
+ noway_assert(varDsc->lvRefCnt() == 0 || // lvRefCnt may not have been set yet.
+ varDsc->lvRefCnt() == 2 // Or, we assume this tmp should only be used here,
+ // and it only shows up twice.
);
- lvaTable[lclNum].lvRefCnt = 0;
+ lvaTable[lclNum].setLvRefCnt(0);
lvaTable[lclNum].lvaResetSortAgainFlag(this);
}
@@ -17342,8 +17342,8 @@ Compiler::fgWalkResult Compiler::fgMorphStructField(GenTree* tree, fgWalkData* f
// chance, so have to check now.
JITDUMP(
"Incrementing ref count from %d to %d for V%02d in fgMorphStructField for promoted struct\n",
- varDsc->lvRefCnt, varDsc->lvRefCnt + 1, lclNum);
- varDsc->lvRefCnt++;
+ varDsc->lvRefCnt(), varDsc->lvRefCnt() + 1, lclNum);
+ varDsc->incLvRefCnt(1);
}
tree->SetOper(GT_LCL_VAR);
@@ -17433,8 +17433,8 @@ Compiler::fgWalkResult Compiler::fgMorphStructField(GenTree* tree, fgWalkData* f
// lclVars, but here we're about to return SKIP_SUBTREES and rob it of the
// chance, so have to check now.
JITDUMP("Incrementing ref count from %d to %d for V%02d in fgMorphStructField for normed struct\n",
- varDsc->lvRefCnt, varDsc->lvRefCnt + 1, lclNum);
- varDsc->lvRefCnt++;
+ varDsc->lvRefCnt(), varDsc->lvRefCnt() + 1, lclNum);
+ varDsc->incLvRefCnt(1);
}
tree->ChangeOper(GT_LCL_VAR);
@@ -17590,7 +17590,7 @@ void Compiler::fgMarkImplicitByRefArgs()
// appearance of implicit-by-ref param so that call arg morphing can do an
// optimization for single-use implicit-by-ref params whose single use is as
// an outgoing call argument.
- varDsc->lvRefCnt = 0;
+ varDsc->setLvRefCnt(0);
}
}
}
@@ -17677,7 +17677,7 @@ void Compiler::fgRetypeImplicitByRefArgs()
// parameter if it weren't promoted at all (otherwise the initialization
// of the new temp would just be a needless memcpy at method entry).
bool undoPromotion = (lvaGetPromotionType(newVarDsc) == PROMOTION_TYPE_DEPENDENT) ||
- (varDsc->lvRefCnt <= varDsc->lvFieldCnt);
+ (varDsc->lvRefCnt() <= varDsc->lvFieldCnt);
if (!undoPromotion)
{
@@ -17715,7 +17715,7 @@ void Compiler::fgRetypeImplicitByRefArgs()
// to the implicit byref parameter when morphing calls that pass the implicit byref
// out as an outgoing argument value, but that doesn't pertain to this field local
// which is now a field of a non-arg local.
- fieldVarDsc->lvRefCnt = 0;
+ fieldVarDsc->setLvRefCnt(0);
}
fieldVarDsc->lvIsParam = false;
@@ -17832,12 +17832,12 @@ void Compiler::fgMarkDemotedImplicitByRefArgs()
// call morphing could identify single-use implicit byrefs; we're done with
// that, and want it to be in its default state of zero when we go to set
// real ref counts for all variables.
- varDsc->lvRefCnt = 0;
+ varDsc->setLvRefCnt(0);
// The temp struct is now unused; set flags appropriately so that we
// won't allocate space for it on the stack.
- LclVarDsc* structVarDsc = &lvaTable[structLclNum];
- structVarDsc->lvRefCnt = 0;
+ LclVarDsc* structVarDsc = &lvaTable[structLclNum];
+ structVarDsc->setLvRefCnt(0);
structVarDsc->lvAddrExposed = false;
#ifdef DEBUG
structVarDsc->lvUnusedStruct = true;
@@ -17856,7 +17856,7 @@ void Compiler::fgMarkDemotedImplicitByRefArgs()
// The field local is now unused; set flags appropriately so that
// we won't allocate stack space for it.
- fieldVarDsc->lvRefCnt = 0;
+ fieldVarDsc->setLvRefCnt(0);
fieldVarDsc->lvAddrExposed = false;
}
}
@@ -18251,10 +18251,10 @@ Compiler::fgWalkResult Compiler::fgMarkAddrTakenLocalsPreCB(GenTree** pTree, fgW
// checks the ref counts for implicit byref params when deciding if it's legal
// to elide certain copies of them.
LclVarDsc* varDsc = &comp->lvaTable[lclNum];
- JITDUMP("Incrementing ref count from %d to %d for V%02d in fgMorphStructField\n", varDsc->lvRefCnt,
- varDsc->lvRefCnt + 1, lclNum);
+ JITDUMP("Incrementing ref count from %d to %d for V%02d in fgMorphStructField\n", varDsc->lvRefCnt(),
+ varDsc->lvRefCnt() + 1, lclNum);
- varDsc->lvRefCnt++;
+ varDsc->incLvRefCnt(1);
}
// This recognizes certain forms, and does all the work. In that case, returns WALK_SKIP_SUBTREES,
// else WALK_CONTINUE. We do the same here.
@@ -18289,10 +18289,10 @@ Compiler::fgWalkResult Compiler::fgMarkAddrTakenLocalsPreCB(GenTree** pTree, fgW
// byref (here during address-exposed analysis); fgMakeOutgoingStructArgCopy
// checks the ref counts for implicit byref params when deciding if it's legal
// to elide certain copies of them.
- JITDUMP("Incrementing ref count from %d to %d for V%02d in fgMorphStructField\n", varDsc->lvRefCnt,
- varDsc->lvRefCnt + 1, lclNum);
+ JITDUMP("Incrementing ref count from %d to %d for V%02d in fgMorphStructField\n", varDsc->lvRefCnt(),
+ varDsc->lvRefCnt() + 1, lclNum);
- varDsc->lvRefCnt++;
+ varDsc->incLvRefCnt(1);
}
if (axc == AXC_Addr || axc == AXC_AddrWide)
diff --git a/src/jit/optcse.cpp b/src/jit/optcse.cpp
index 48aded86fe..f389aa5bee 100644
--- a/src/jit/optcse.cpp
+++ b/src/jit/optcse.cpp
@@ -1327,7 +1327,7 @@ public:
for (lclNum = 0, varDsc = m_pCompiler->lvaTable; lclNum < m_pCompiler->lvaCount; lclNum++, varDsc++)
{
- if (varDsc->lvRefCnt == 0)
+ if (varDsc->lvRefCnt() == 0)
{
continue;
}
@@ -1368,7 +1368,7 @@ public:
// will consider this LclVar as being enregistered.
// Now we reduce the remaining regAvailEstimate by
// an appropriate amount.
- if (varDsc->lvRefCnt <= 2)
+ if (varDsc->lvRefCnt() <= 2)
{
// a single use single def LclVar only uses 1
regAvailEstimate -= 1;
@@ -1435,22 +1435,22 @@ public:
{
if (CodeOptKind() == Compiler::SMALL_CODE)
{
- aggressiveRefCnt = varDsc->lvRefCnt + BB_UNITY_WEIGHT;
+ aggressiveRefCnt = varDsc->lvRefCnt() + BB_UNITY_WEIGHT;
}
else
{
- aggressiveRefCnt = varDsc->lvRefCntWtd + BB_UNITY_WEIGHT;
+ aggressiveRefCnt = varDsc->lvRefCntWtd() + BB_UNITY_WEIGHT;
}
}
if ((moderateRefCnt == 0) && (enregCount > ((CNT_CALLEE_ENREG * 3) + (CNT_CALLEE_TRASH * 2))))
{
if (CodeOptKind() == Compiler::SMALL_CODE)
{
- moderateRefCnt = varDsc->lvRefCnt;
+ moderateRefCnt = varDsc->lvRefCnt();
}
else
{
- moderateRefCnt = varDsc->lvRefCntWtd;
+ moderateRefCnt = varDsc->lvRefCntWtd();
}
}
}
diff --git a/src/jit/regalloc.cpp b/src/jit/regalloc.cpp
index aa3d0f43bf..f80c81bbe1 100644
--- a/src/jit/regalloc.cpp
+++ b/src/jit/regalloc.cpp
@@ -295,7 +295,7 @@ void Compiler::raMarkStkVars()
goto NOT_STK;
}
/* Unused variables typically don't get any frame space */
- else if (varDsc->lvRefCnt == 0)
+ else if (varDsc->lvRefCnt() == 0)
{
bool needSlot = false;
@@ -344,7 +344,7 @@ void Compiler::raMarkStkVars()
if (lvaTypeIsGC(lclNum))
{
- varDsc->lvRefCnt = 1;
+ varDsc->setLvRefCnt(1);
}
if (!varDsc->lvIsParam)
@@ -404,7 +404,7 @@ void Compiler::raMarkStkVars()
// It must be in a register, on frame, or have zero references.
- noway_assert(varDsc->lvIsInReg() || varDsc->lvOnFrame || varDsc->lvRefCnt == 0);
+ noway_assert(varDsc->lvIsInReg() || varDsc->lvOnFrame || varDsc->lvRefCnt() == 0);
// We can't have both lvRegister and lvOnFrame
noway_assert(!varDsc->lvRegister || !varDsc->lvOnFrame);
@@ -424,7 +424,7 @@ void Compiler::raMarkStkVars()
{
if (!varDsc->lvPromoted && !varDsc->lvIsStructField)
{
- noway_assert(varDsc->lvRefCnt == 0 && !varDsc->lvRegister && !varDsc->lvOnFrame);
+ noway_assert(varDsc->lvRefCnt() == 0 && !varDsc->lvRegister && !varDsc->lvOnFrame);
}
}
#endif
diff --git a/src/jit/scopeinfo.cpp b/src/jit/scopeinfo.cpp
index 66f52a2758..ddb5e8ce01 100644
--- a/src/jit/scopeinfo.cpp
+++ b/src/jit/scopeinfo.cpp
@@ -475,7 +475,7 @@ void CodeGen::siBeginBlock(BasicBlock* block)
// So we need to check if this tracked variable is actually used.
if (!compiler->lvaTable[varNum].lvIsInReg() && !compiler->lvaTable[varNum].lvOnFrame)
{
- assert(compiler->lvaTable[varNum].lvRefCnt == 0);
+ assert(compiler->lvaTable[varNum].lvRefCnt() == 0);
continue;
}