summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSergey Andreenko <seandree@microsoft.com>2018-01-31 17:52:39 -0800
committerGitHub <noreply@github.com>2018-01-31 17:52:39 -0800
commit8dde886767682feac4b5414366dfae7be3c08412 (patch)
treec9ffc898bcd1a1f863833d9ce92755739cfed251
parent45c2c01cb146d2173f0a2f204859e549ad536a34 (diff)
downloadcoreclr-8dde886767682feac4b5414366dfae7be3c08412.tar.gz
coreclr-8dde886767682feac4b5414366dfae7be3c08412.tar.bz2
coreclr-8dde886767682feac4b5414366dfae7be3c08412.zip
Delete GenTreePtr. (#16027)
* jit sources: Each local pointer variable must be declared on its own line. Implement https://github.com/dotnet/coreclr/blob/master/Documentation/coding-guidelines/clr-jit-coding-conventions.md#101-pointer-declarations Each local pointer variable must be declared on its own line. * add constGenTreePtr * delete GenTreePtr * delete constGenTreePtr * fix arm
-rw-r--r--src/jit/assertionprop.cpp210
-rw-r--r--src/jit/block.cpp16
-rw-r--r--src/jit/codegen.h70
-rw-r--r--src/jit/codegenarm.cpp70
-rw-r--r--src/jit/codegenarm64.cpp80
-rw-r--r--src/jit/codegenarmarch.cpp114
-rw-r--r--src/jit/codegenclassic.h280
-rw-r--r--src/jit/codegencommon.cpp94
-rw-r--r--src/jit/codegeninterface.h40
-rw-r--r--src/jit/codegenlegacy.cpp536
-rw-r--r--src/jit/codegenlinear.cpp10
-rw-r--r--src/jit/codegenlinear.h52
-rw-r--r--src/jit/codegenxarch.cpp222
-rw-r--r--src/jit/compiler.cpp30
-rw-r--r--src/jit/compiler.h1073
-rw-r--r--src/jit/compiler.hpp140
-rw-r--r--src/jit/copyprop.cpp16
-rw-r--r--src/jit/decomposelongs.cpp8
-rw-r--r--src/jit/earlyprop.cpp60
-rw-r--r--src/jit/ee_il_dll.cpp2
-rw-r--r--src/jit/emitxarch.cpp8
-rw-r--r--src/jit/emitxarch.h2
-rw-r--r--src/jit/flowgraph.cpp506
-rw-r--r--src/jit/gcinfo.cpp8
-rw-r--r--src/jit/gentree.cpp486
-rw-r--r--src/jit/gentree.h214
-rw-r--r--src/jit/gschecks.cpp24
-rw-r--r--src/jit/importer.cpp606
-rw-r--r--src/jit/inline.h32
-rw-r--r--src/jit/instr.cpp60
-rw-r--r--src/jit/jitgcinfo.h10
-rw-r--r--src/jit/lclvars.cpp34
-rw-r--r--src/jit/lir.cpp8
-rw-r--r--src/jit/liveness.cpp50
-rw-r--r--src/jit/loopcloning.cpp18
-rw-r--r--src/jit/loopcloning.h20
-rw-r--r--src/jit/lower.cpp123
-rw-r--r--src/jit/lower.h22
-rw-r--r--src/jit/lowerarmarch.cpp39
-rw-r--r--src/jit/lowerxarch.cpp88
-rw-r--r--src/jit/lsra.cpp54
-rw-r--r--src/jit/lsra.h2
-rw-r--r--src/jit/lsraarm.cpp12
-rw-r--r--src/jit/lsraarm64.cpp10
-rw-r--r--src/jit/lsraarmarch.cpp36
-rw-r--r--src/jit/lsraxarch.cpp54
-rw-r--r--src/jit/morph.cpp1024
-rw-r--r--src/jit/objectalloc.cpp22
-rw-r--r--src/jit/objectalloc.h10
-rw-r--r--src/jit/optcse.cpp112
-rw-r--r--src/jit/optimizer.cpp276
-rw-r--r--src/jit/rangecheck.cpp38
-rw-r--r--src/jit/rangecheck.h32
-rw-r--r--src/jit/regalloc.cpp66
-rw-r--r--src/jit/registerfp.cpp81
-rw-r--r--src/jit/regset.cpp40
-rw-r--r--src/jit/regset.h59
-rw-r--r--src/jit/sharedfloat.cpp28
-rw-r--r--src/jit/simd.cpp188
-rw-r--r--src/jit/simdcodegenxarch.cpp8
-rw-r--r--src/jit/ssabuilder.cpp60
-rw-r--r--src/jit/ssabuilder.h2
-rw-r--r--src/jit/stackfp.cpp146
-rw-r--r--src/jit/valuenum.cpp92
-rw-r--r--src/jit/valuenum.h4
65 files changed, 3951 insertions, 3986 deletions
diff --git a/src/jit/assertionprop.cpp b/src/jit/assertionprop.cpp
index 9dfac42e40..8193d2902c 100644
--- a/src/jit/assertionprop.cpp
+++ b/src/jit/assertionprop.cpp
@@ -22,14 +22,14 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
*/
/* static */
-Compiler::fgWalkResult Compiler::optAddCopiesCallback(GenTreePtr* pTree, fgWalkData* data)
+Compiler::fgWalkResult Compiler::optAddCopiesCallback(GenTree** pTree, fgWalkData* data)
{
- GenTreePtr tree = *pTree;
+ GenTree* tree = *pTree;
if (tree->OperIsAssignment())
{
- GenTreePtr op1 = tree->gtOp.gtOp1;
- Compiler* comp = data->compiler;
+ GenTree* op1 = tree->gtOp.gtOp1;
+ Compiler* comp = data->compiler;
if ((op1->gtOper == GT_LCL_VAR) && (op1->gtLclVarCommon.gtLclNum == comp->optAddCopyLclNum))
{
@@ -276,8 +276,8 @@ void Compiler::optAddCopies()
continue;
}
- GenTreePtr stmt;
- unsigned copyLclNum = lvaGrabTemp(false DEBUGARG("optAddCopies"));
+ GenTree* stmt;
+ unsigned copyLclNum = lvaGrabTemp(false DEBUGARG("optAddCopies"));
// Because lvaGrabTemp may have reallocated the lvaTable, ensure varDsc
// is still in sync with lvaTable[lclNum];
@@ -298,7 +298,7 @@ void Compiler::optAddCopies()
noway_assert(varDsc->lvDefStmt == nullptr || varDsc->lvIsStructField);
// Create a new copy assignment tree
- GenTreePtr copyAsgn = gtNewTempAssign(copyLclNum, gtNewLclvNode(lclNum, typ));
+ GenTree* copyAsgn = gtNewTempAssign(copyLclNum, gtNewLclvNode(lclNum, typ));
/* Find the best block to insert the new assignment */
/* We will choose the lowest weighted block, and within */
@@ -452,8 +452,8 @@ void Compiler::optAddCopies()
noway_assert(optAddCopyAsgnNode);
- GenTreePtr tree = optAddCopyAsgnNode;
- GenTreePtr op1 = tree->gtOp.gtOp1;
+ GenTree* tree = optAddCopyAsgnNode;
+ GenTree* op1 = tree->gtOp.gtOp1;
noway_assert(tree && op1 && tree->OperIsAssignment() && (op1->gtOper == GT_LCL_VAR) &&
(op1->gtLclVarCommon.gtLclNum == lclNum));
@@ -467,11 +467,11 @@ void Compiler::optAddCopies()
/* Assign the old expression into the new temp */
- GenTreePtr newAsgn = gtNewTempAssign(copyLclNum, tree->gtOp.gtOp2);
+ GenTree* newAsgn = gtNewTempAssign(copyLclNum, tree->gtOp.gtOp2);
/* Copy the new temp to op1 */
- GenTreePtr copyAsgn = gtNewAssignNode(op1, gtNewLclvNode(copyLclNum, typ));
+ GenTree* copyAsgn = gtNewAssignNode(op1, gtNewLclvNode(copyLclNum, typ));
/* Change the tree to a GT_COMMA with the two assignments as child nodes */
@@ -821,7 +821,7 @@ Compiler::AssertionDsc* Compiler::optGetAssertion(AssertionIndex assertIndex)
* if they don't care about it. Refer overloaded method optCreateAssertion.
*
*/
-AssertionIndex Compiler::optCreateAssertion(GenTreePtr op1, GenTreePtr op2, optAssertionKind assertionKind)
+AssertionIndex Compiler::optCreateAssertion(GenTree* op1, GenTree* op2, optAssertionKind assertionKind)
{
AssertionDsc assertionDsc;
return optCreateAssertion(op1, op2, assertionKind, &assertionDsc);
@@ -843,8 +843,8 @@ AssertionIndex Compiler::optCreateAssertion(GenTreePtr op1, GenTreePtr op2, optA
* NO_ASSERTION_INDEX and we could not create the assertion.
*
*/
-AssertionIndex Compiler::optCreateAssertion(GenTreePtr op1,
- GenTreePtr op2,
+AssertionIndex Compiler::optCreateAssertion(GenTree* op1,
+ GenTree* op2,
optAssertionKind assertionKind,
AssertionDsc* assertion)
{
@@ -1409,7 +1409,7 @@ DONE_ASSERTION:
* constant. Set "vnBased" to true to indicate local or global assertion prop.
* "pFlags" indicates if the constant is a handle marked by GTF_ICON_HDL_MASK.
*/
-bool Compiler::optIsTreeKnownIntValue(bool vnBased, GenTreePtr tree, ssize_t* pConstant, unsigned* pFlags)
+bool Compiler::optIsTreeKnownIntValue(bool vnBased, GenTree* tree, ssize_t* pConstant, unsigned* pFlags)
{
// Is Local assertion prop?
if (!vnBased)
@@ -1694,7 +1694,7 @@ void Compiler::optDebugCheckAssertions(AssertionIndex index)
*
*/
-void Compiler::optCreateComplementaryAssertion(AssertionIndex assertionIndex, GenTreePtr op1, GenTreePtr op2)
+void Compiler::optCreateComplementaryAssertion(AssertionIndex assertionIndex, GenTree* op1, GenTree* op2)
{
if (assertionIndex == NO_ASSERTION_INDEX)
{
@@ -1741,9 +1741,7 @@ void Compiler::optCreateComplementaryAssertion(AssertionIndex assertionIndex, Ge
* for the operands.
*/
-AssertionIndex Compiler::optCreateJtrueAssertions(GenTreePtr op1,
- GenTreePtr op2,
- Compiler::optAssertionKind assertionKind)
+AssertionIndex Compiler::optCreateJtrueAssertions(GenTree* op1, GenTree* op2, Compiler::optAssertionKind assertionKind)
{
AssertionDsc candidateAssertion;
AssertionIndex assertionIndex = optCreateAssertion(op1, op2, assertionKind, &candidateAssertion);
@@ -1756,15 +1754,15 @@ AssertionIndex Compiler::optCreateJtrueAssertions(GenTreePtr op1
return assertionIndex;
}
-AssertionInfo Compiler::optCreateJTrueBoundsAssertion(GenTreePtr tree)
+AssertionInfo Compiler::optCreateJTrueBoundsAssertion(GenTree* tree)
{
- GenTreePtr relop = tree->gtGetOp1();
+ GenTree* relop = tree->gtGetOp1();
if ((relop->OperKind() & GTK_RELOP) == 0)
{
return NO_ASSERTION_INDEX;
}
- GenTreePtr op1 = relop->gtGetOp1();
- GenTreePtr op2 = relop->gtGetOp2();
+ GenTree* op1 = relop->gtGetOp1();
+ GenTree* op2 = relop->gtGetOp2();
ValueNum vn = op1->gtVNPair.GetConservative();
@@ -1894,7 +1892,7 @@ AssertionInfo Compiler::optCreateJTrueBoundsAssertion(GenTreePtr tree)
*
* Compute assertions for the JTrue node.
*/
-AssertionInfo Compiler::optAssertionGenJtrue(GenTreePtr tree)
+AssertionInfo Compiler::optAssertionGenJtrue(GenTree* tree)
{
// Only create assertions for JTRUE when we are in the global phase
if (optLocalAssertionProp)
@@ -1902,7 +1900,7 @@ AssertionInfo Compiler::optAssertionGenJtrue(GenTreePtr tree)
return NO_ASSERTION_INDEX;
}
- GenTreePtr relop = tree->gtOp.gtOp1;
+ GenTree* relop = tree->gtOp.gtOp1;
if ((relop->OperKind() & GTK_RELOP) == 0)
{
return NO_ASSERTION_INDEX;
@@ -1910,8 +1908,8 @@ AssertionInfo Compiler::optAssertionGenJtrue(GenTreePtr tree)
Compiler::optAssertionKind assertionKind = OAK_INVALID;
- GenTreePtr op1 = relop->gtOp.gtOp1;
- GenTreePtr op2 = relop->gtOp.gtOp2;
+ GenTree* op1 = relop->gtOp.gtOp1;
+ GenTree* op2 = relop->gtOp.gtOp2;
AssertionInfo info = optCreateJTrueBoundsAssertion(tree);
if (info.HasAssertion())
@@ -1998,14 +1996,14 @@ AssertionInfo Compiler::optAssertionGenJtrue(GenTreePtr tree)
* from all of the constituent phi operands.
*
*/
-AssertionIndex Compiler::optAssertionGenPhiDefn(GenTreePtr tree)
+AssertionIndex Compiler::optAssertionGenPhiDefn(GenTree* tree)
{
if (!tree->IsPhiDefn())
{
return NO_ASSERTION_INDEX;
}
- GenTreePtr phi = tree->gtOp.gtOp2;
+ GenTree* phi = tree->gtOp.gtOp2;
// Try to find if all phi arguments are known to be non-null.
bool isNonNull = true;
@@ -2032,7 +2030,7 @@ AssertionIndex Compiler::optAssertionGenPhiDefn(GenTreePtr tree)
* then assign an index to the given value assignment by adding
* it to the lookup table, if necessary.
*/
-void Compiler::optAssertionGen(GenTreePtr tree)
+void Compiler::optAssertionGen(GenTree* tree)
{
tree->ClearAssertion();
@@ -2095,7 +2093,7 @@ void Compiler::optAssertionGen(GenTreePtr tree)
if ((tree->gtFlags & GTF_CALL_NULLCHECK) || tree->AsCall()->IsVirtual())
{
// Retrieve the 'this' arg
- GenTreePtr thisArg = gtGetThisArg(tree->AsCall());
+ GenTree* thisArg = gtGetThisArg(tree->AsCall());
#if defined(_TARGET_X86_) || defined(_TARGET_AMD64_) || defined(_TARGET_ARM_)
if (thisArg == nullptr)
{
@@ -2205,7 +2203,7 @@ AssertionIndex Compiler::optFindComplementary(AssertionIndex assertIndex)
* if one such assertion could not be found in "assertions."
*/
-AssertionIndex Compiler::optAssertionIsSubrange(GenTreePtr tree, var_types toType, ASSERT_VALARG_TP assertions)
+AssertionIndex Compiler::optAssertionIsSubrange(GenTree* tree, var_types toType, ASSERT_VALARG_TP assertions)
{
if (!optLocalAssertionProp && BitVecOps::IsEmpty(apTraits, assertions))
{
@@ -2270,7 +2268,7 @@ AssertionIndex Compiler::optAssertionIsSubrange(GenTreePtr tree, var_types toTyp
* could not be found, then it returns NO_ASSERTION_INDEX.
*
*/
-AssertionIndex Compiler::optAssertionIsSubtype(GenTreePtr tree, GenTreePtr methodTableArg, ASSERT_VALARG_TP assertions)
+AssertionIndex Compiler::optAssertionIsSubtype(GenTree* tree, GenTree* methodTableArg, ASSERT_VALARG_TP assertions)
{
if (!optLocalAssertionProp && BitVecOps::IsEmpty(apTraits, assertions))
{
@@ -2357,7 +2355,7 @@ AssertionIndex Compiler::optAssertionIsSubtype(GenTreePtr tree, GenTreePtr metho
// appropriately decremented. The ref-counts of variables in the side-effect
// nodes will be retained.
//
-GenTreePtr Compiler::optVNConstantPropOnTree(BasicBlock* block, GenTreePtr stmt, GenTreePtr tree)
+GenTree* Compiler::optVNConstantPropOnTree(BasicBlock* block, GenTree* stmt, GenTree* tree)
{
if (tree->OperGet() == GT_JTRUE)
{
@@ -2381,8 +2379,8 @@ GenTreePtr Compiler::optVNConstantPropOnTree(BasicBlock* block, GenTreePtr stmt,
return nullptr;
}
- GenTreePtr newTree = tree;
- GenTreePtr sideEffList = nullptr;
+ GenTree* newTree = tree;
+ GenTree* sideEffList = nullptr;
switch (vnStore->TypeOfVN(vnCns))
{
case TYP_FLOAT:
@@ -2576,9 +2574,9 @@ GenTreePtr Compiler::optVNConstantPropOnTree(BasicBlock* block, GenTreePtr stmt,
* Perform constant propagation on a tree given the "curAssertion" is true at the point of the "tree."
*
*/
-GenTreePtr Compiler::optConstantAssertionProp(AssertionDsc* curAssertion,
- GenTreePtr tree,
- GenTreePtr stmt DEBUGARG(AssertionIndex index))
+GenTree* Compiler::optConstantAssertionProp(AssertionDsc* curAssertion,
+ GenTree* tree,
+ GenTree* stmt DEBUGARG(AssertionIndex index))
{
unsigned lclNum = tree->gtLclVarCommon.gtLclNum;
@@ -2587,7 +2585,7 @@ GenTreePtr Compiler::optConstantAssertionProp(AssertionDsc* curAssertion,
return nullptr;
}
- GenTreePtr newTree = tree;
+ GenTree* newTree = tree;
// Update 'newTree' with the new value from our table
// Typically newTree == tree and we are updating the node in place
@@ -2714,7 +2712,7 @@ GenTreePtr Compiler::optConstantAssertionProp(AssertionDsc* curAssertion,
* "copyVar." Before substituting "copyVar" for "lclVar", we make sure using "copy" doesn't widen access.
*
*/
-bool Compiler::optAssertionProp_LclVarTypeCheck(GenTreePtr tree, LclVarDsc* lclVarDsc, LclVarDsc* copyVarDsc)
+bool Compiler::optAssertionProp_LclVarTypeCheck(GenTree* tree, LclVarDsc* lclVarDsc, LclVarDsc* copyVarDsc)
{
/*
Small struct field locals are stored using the exact width and loaded widened
@@ -2766,9 +2764,9 @@ bool Compiler::optAssertionProp_LclVarTypeCheck(GenTreePtr tree, LclVarDsc* lclV
* the "curAssertion."
*
*/
-GenTreePtr Compiler::optCopyAssertionProp(AssertionDsc* curAssertion,
- GenTreePtr tree,
- GenTreePtr stmt DEBUGARG(AssertionIndex index))
+GenTree* Compiler::optCopyAssertionProp(AssertionDsc* curAssertion,
+ GenTree* tree,
+ GenTree* stmt DEBUGARG(AssertionIndex index))
{
const AssertionDsc::AssertionDscOp1& op1 = curAssertion->op1;
const AssertionDsc::AssertionDscOp2& op2 = curAssertion->op2;
@@ -2843,7 +2841,7 @@ GenTreePtr Compiler::optCopyAssertionProp(AssertionDsc* curAssertion,
* be nullptr. Returns the modified tree, or nullptr if no assertion prop took place.
*/
-GenTreePtr Compiler::optAssertionProp_LclVar(ASSERT_VALARG_TP assertions, const GenTreePtr tree, const GenTreePtr stmt)
+GenTree* Compiler::optAssertionProp_LclVar(ASSERT_VALARG_TP assertions, GenTree* tree, GenTree* stmt)
{
assert(tree->gtOper == GT_LCL_VAR);
// If we have a var definition then bail or
@@ -2880,7 +2878,7 @@ GenTreePtr Compiler::optAssertionProp_LclVar(ASSERT_VALARG_TP assertions, const
if (optLocalAssertionProp)
{
// Perform copy assertion prop.
- GenTreePtr newTree = optCopyAssertionProp(curAssertion, tree, stmt DEBUGARG(assertionIndex));
+ GenTree* newTree = optCopyAssertionProp(curAssertion, tree, stmt DEBUGARG(assertionIndex));
if (newTree == nullptr)
{
// Skip and try next assertion.
@@ -2966,9 +2964,7 @@ AssertionIndex Compiler::optLocalAssertionIsEqualOrNotEqual(
* "op1" == "op2" or "op1" != "op2." Does a value number based comparison.
*
*/
-AssertionIndex Compiler::optGlobalAssertionIsEqualOrNotEqual(ASSERT_VALARG_TP assertions,
- GenTreePtr op1,
- GenTreePtr op2)
+AssertionIndex Compiler::optGlobalAssertionIsEqualOrNotEqual(ASSERT_VALARG_TP assertions, GenTree* op1, GenTree* op2)
{
if (BitVecOps::IsEmpty(apTraits, assertions))
{
@@ -3006,7 +3002,7 @@ AssertionIndex Compiler::optGlobalAssertionIsEqualOrNotEqual(ASSERT_VALARG_TP as
* Returns the modified tree, or nullptr if no assertion prop took place
*/
-GenTreePtr Compiler::optAssertionProp_RelOp(ASSERT_VALARG_TP assertions, const GenTreePtr tree, const GenTreePtr stmt)
+GenTree* Compiler::optAssertionProp_RelOp(ASSERT_VALARG_TP assertions, GenTree* tree, GenTree* stmt)
{
assert(tree->OperKind() & GTK_RELOP);
@@ -3036,15 +3032,13 @@ GenTreePtr Compiler::optAssertionProp_RelOp(ASSERT_VALARG_TP assertions, const G
* perform Value numbering based relop assertion propagation on the tree.
*
*/
-GenTreePtr Compiler::optAssertionPropGlobal_RelOp(ASSERT_VALARG_TP assertions,
- const GenTreePtr tree,
- const GenTreePtr stmt)
+GenTree* Compiler::optAssertionPropGlobal_RelOp(ASSERT_VALARG_TP assertions, GenTree* tree, GenTree* stmt)
{
assert(tree->OperGet() == GT_EQ || tree->OperGet() == GT_NE);
- GenTreePtr newTree = tree;
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtOp.gtOp2;
+ GenTree* newTree = tree;
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtOp.gtOp2;
if (op1->gtOper != GT_LCL_VAR)
{
@@ -3219,14 +3213,12 @@ GenTreePtr Compiler::optAssertionPropGlobal_RelOp(ASSERT_VALARG_TP assertions,
* perform local variable name based relop assertion propagation on the tree.
*
*/
-GenTreePtr Compiler::optAssertionPropLocal_RelOp(ASSERT_VALARG_TP assertions,
- const GenTreePtr tree,
- const GenTreePtr stmt)
+GenTree* Compiler::optAssertionPropLocal_RelOp(ASSERT_VALARG_TP assertions, GenTree* tree, GenTree* stmt)
{
assert(tree->OperGet() == GT_EQ || tree->OperGet() == GT_NE);
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtOp.gtOp2;
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtOp.gtOp2;
// For Local AssertionProp we only can fold when op1 is a GT_LCL_VAR
if (op1->gtOper != GT_LCL_VAR)
@@ -3315,12 +3307,12 @@ GenTreePtr Compiler::optAssertionPropLocal_RelOp(ASSERT_VALARG_TP assertions,
*
* Returns the modified tree, or nullptr if no assertion prop took place.
*/
-GenTreePtr Compiler::optAssertionProp_Cast(ASSERT_VALARG_TP assertions, const GenTreePtr tree, const GenTreePtr stmt)
+GenTree* Compiler::optAssertionProp_Cast(ASSERT_VALARG_TP assertions, GenTree* tree, GenTree* stmt)
{
assert(tree->gtOper == GT_CAST);
- var_types toType = tree->gtCast.gtCastType;
- GenTreePtr op1 = tree->gtCast.CastOp();
+ var_types toType = tree->gtCast.gtCastType;
+ GenTree* op1 = tree->gtCast.CastOp();
// If we have a cast involving floating point types, then bail.
if (varTypeIsFloating(toType) || varTypeIsFloating(op1->TypeGet()))
@@ -3329,7 +3321,7 @@ GenTreePtr Compiler::optAssertionProp_Cast(ASSERT_VALARG_TP assertions, const Ge
}
// Skip over a GT_COMMA node(s), if necessary to get to the lcl.
- GenTreePtr lcl = op1;
+ GenTree* lcl = op1;
while (lcl->gtOper == GT_COMMA)
{
lcl = lcl->gtOp.gtOp2;
@@ -3385,7 +3377,7 @@ GenTreePtr Compiler::optAssertionProp_Cast(ASSERT_VALARG_TP assertions, const Ge
// Change the "lcl" type to match what the cast wanted, by propagating the type
// change down the comma nodes leading to the "lcl", if we skipped them earlier.
- GenTreePtr tmp = op1;
+ GenTree* tmp = op1;
while (tmp->gtOper == GT_COMMA)
{
tmp->gtType = toType;
@@ -3412,7 +3404,7 @@ GenTreePtr Compiler::optAssertionProp_Cast(ASSERT_VALARG_TP assertions, const Ge
* Given a tree with an array bounds check node, eliminate it because it was
* checked already in the program.
*/
-GenTreePtr Compiler::optAssertionProp_Comma(ASSERT_VALARG_TP assertions, const GenTreePtr tree, const GenTreePtr stmt)
+GenTree* Compiler::optAssertionProp_Comma(ASSERT_VALARG_TP assertions, GenTree* tree, GenTree* stmt)
{
// Remove the bounds check as part of the GT_COMMA node since we need parent pointer to remove nodes.
// When processing visits the bounds check, it sets the throw kind to None if the check is redundant.
@@ -3435,7 +3427,7 @@ GenTreePtr Compiler::optAssertionProp_Comma(ASSERT_VALARG_TP assertions, const G
*
*/
-GenTreePtr Compiler::optAssertionProp_Ind(ASSERT_VALARG_TP assertions, const GenTreePtr tree, const GenTreePtr stmt)
+GenTree* Compiler::optAssertionProp_Ind(ASSERT_VALARG_TP assertions, GenTree* tree, GenTree* stmt)
{
assert(tree->OperIsIndir());
@@ -3445,7 +3437,7 @@ GenTreePtr Compiler::optAssertionProp_Ind(ASSERT_VALARG_TP assertions, const Gen
}
// Check for add of a constant.
- GenTreePtr op1 = tree->AsIndir()->Addr();
+ GenTree* op1 = tree->AsIndir()->Addr();
if ((op1->gtOper == GT_ADD) && (op1->gtOp.gtOp2->gtOper == GT_CNS_INT))
{
op1 = op1->gtOp.gtOp1;
@@ -3494,7 +3486,7 @@ GenTreePtr Compiler::optAssertionProp_Ind(ASSERT_VALARG_TP assertions, const Gen
* Note: If both VN and assertion table yield a matching assertion, "pVnBased"
* is only set and the return value is "NO_ASSERTION_INDEX."
*/
-bool Compiler::optAssertionIsNonNull(GenTreePtr op,
+bool Compiler::optAssertionIsNonNull(GenTree* op,
ASSERT_VALARG_TP assertions DEBUGARG(bool* pVnBased)
DEBUGARG(AssertionIndex* pIndex))
{
@@ -3523,7 +3515,7 @@ bool Compiler::optAssertionIsNonNull(GenTreePtr op,
* from the set of "assertions."
*
*/
-AssertionIndex Compiler::optAssertionIsNonNullInternal(GenTreePtr op, ASSERT_VALARG_TP assertions)
+AssertionIndex Compiler::optAssertionIsNonNullInternal(GenTree* op, ASSERT_VALARG_TP assertions)
{
// If local assertion prop use lcl comparison, else use VN comparison.
if (!optLocalAssertionProp)
@@ -3582,13 +3574,13 @@ AssertionIndex Compiler::optAssertionIsNonNullInternal(GenTreePtr op, ASSERT_VAL
* Returns the modified tree, or nullptr if no assertion prop took place.
*
*/
-GenTreePtr Compiler::optNonNullAssertionProp_Call(ASSERT_VALARG_TP assertions, GenTreeCall* call, const GenTreePtr stmt)
+GenTree* Compiler::optNonNullAssertionProp_Call(ASSERT_VALARG_TP assertions, GenTreeCall* call, GenTree* stmt)
{
if ((call->gtFlags & GTF_CALL_NULLCHECK) == 0)
{
return nullptr;
}
- GenTreePtr op1 = gtGetThisArg(call);
+ GenTree* op1 = gtGetThisArg(call);
noway_assert(op1 != nullptr);
if (op1->gtOper != GT_LCL_VAR)
{
@@ -3629,7 +3621,7 @@ GenTreePtr Compiler::optNonNullAssertionProp_Call(ASSERT_VALARG_TP assertions, G
*
*/
-GenTreePtr Compiler::optAssertionProp_Call(ASSERT_VALARG_TP assertions, GenTreeCall* call, const GenTreePtr stmt)
+GenTree* Compiler::optAssertionProp_Call(ASSERT_VALARG_TP assertions, GenTreeCall* call, GenTree* stmt)
{
if (optNonNullAssertionProp_Call(assertions, call, stmt))
{
@@ -3647,13 +3639,13 @@ GenTreePtr Compiler::optAssertionProp_Call(ASSERT_VALARG_TP assertions, GenTreeC
call->gtCallMethHnd == eeFindHelper(CORINFO_HELP_CHKCASTANY) ||
call->gtCallMethHnd == eeFindHelper(CORINFO_HELP_CHKCASTCLASS_SPECIAL))
{
- GenTreePtr arg1 = gtArgEntryByArgNum(call, 1)->node;
+ GenTree* arg1 = gtArgEntryByArgNum(call, 1)->node;
if (arg1->gtOper != GT_LCL_VAR)
{
return nullptr;
}
- GenTreePtr arg2 = gtArgEntryByArgNum(call, 0)->node;
+ GenTree* arg2 = gtArgEntryByArgNum(call, 0)->node;
unsigned index = optAssertionIsSubtype(arg1, arg2, assertions);
if (index != NO_ASSERTION_INDEX)
@@ -3665,7 +3657,7 @@ GenTreePtr Compiler::optAssertionProp_Call(ASSERT_VALARG_TP assertions, GenTreeC
gtDispTree(call, nullptr, nullptr, true);
}
#endif
- GenTreePtr list = nullptr;
+ GenTree* list = nullptr;
gtExtractSideEffList(call, &list, GTF_SIDE_EFFECT, true);
if (list != nullptr)
{
@@ -3686,7 +3678,7 @@ GenTreePtr Compiler::optAssertionProp_Call(ASSERT_VALARG_TP assertions, GenTreeC
* Given a tree consisting of a comma node with a bounds check, remove any
* redundant bounds check that has already been checked in the program flow.
*/
-GenTreePtr Compiler::optAssertionProp_BndsChk(ASSERT_VALARG_TP assertions, const GenTreePtr tree, const GenTreePtr stmt)
+GenTree* Compiler::optAssertionProp_BndsChk(ASSERT_VALARG_TP assertions, GenTree* tree, GenTree* stmt)
{
if (optLocalAssertionProp)
{
@@ -3821,7 +3813,7 @@ GenTreePtr Compiler::optAssertionProp_BndsChk(ASSERT_VALARG_TP assertions, const
*
*/
-GenTreePtr Compiler::optAssertionProp_Update(const GenTreePtr newTree, const GenTreePtr tree, const GenTreePtr stmt)
+GenTree* Compiler::optAssertionProp_Update(GenTree* newTree, GenTree* tree, GenTree* stmt)
{
noway_assert(newTree != nullptr);
@@ -3837,7 +3829,7 @@ GenTreePtr Compiler::optAssertionProp_Update(const GenTreePtr newTree, const Gen
// locate our parent node and update it so that it points to newTree
if (newTree != tree)
{
- GenTreePtr* link = gtFindLink(stmt, tree);
+ GenTree** link = gtFindLink(stmt, tree);
#ifdef DEBUG
if (link == nullptr)
{
@@ -3880,7 +3872,7 @@ GenTreePtr Compiler::optAssertionProp_Update(const GenTreePtr newTree, const Gen
* Returns the modified tree, or nullptr if no assertion prop took place.
*/
-GenTreePtr Compiler::optAssertionProp(ASSERT_VALARG_TP assertions, const GenTreePtr tree, const GenTreePtr stmt)
+GenTree* Compiler::optAssertionProp(ASSERT_VALARG_TP assertions, GenTree* tree, GenTree* stmt)
{
switch (tree->gtOper)
{
@@ -4434,11 +4426,11 @@ ASSERT_TP* Compiler::optComputeAssertionGen()
GenTree* jtrue = nullptr;
// Walk the statement trees in this basic block.
- for (GenTreePtr stmt = block->bbTreeList; stmt; stmt = stmt->gtNext)
+ for (GenTree* stmt = block->bbTreeList; stmt; stmt = stmt->gtNext)
{
noway_assert(stmt->gtOper == GT_STMT);
- for (GenTreePtr tree = stmt->gtStmt.gtStmtList; tree; tree = tree->gtNext)
+ for (GenTree* tree = stmt->gtStmt.gtStmtList; tree; tree = tree->gtNext)
{
if (tree->gtOper == GT_JTRUE)
{
@@ -4569,9 +4561,9 @@ ASSERT_TP* Compiler::optInitAssertionDataflowFlags()
struct VNAssertionPropVisitorInfo
{
Compiler* pThis;
- GenTreePtr stmt;
+ GenTree* stmt;
BasicBlock* block;
- VNAssertionPropVisitorInfo(Compiler* pThis, BasicBlock* block, GenTreePtr stmt)
+ VNAssertionPropVisitorInfo(Compiler* pThis, BasicBlock* block, GenTree* stmt)
: pThis(pThis), stmt(stmt), block(block)
{
}
@@ -4609,10 +4601,10 @@ struct VNAssertionPropVisitorInfo
// Either the "newTree" is returned when no side effects are present or a comma
// separated side effect list with "newTree" is returned.
//
-GenTreePtr Compiler::optPrepareTreeForReplacement(GenTreePtr oldTree, GenTreePtr newTree)
+GenTree* Compiler::optPrepareTreeForReplacement(GenTree* oldTree, GenTree* newTree)
{
// If we have side effects, extract them and append newTree to the list.
- GenTreePtr sideEffList = nullptr;
+ GenTree* sideEffList = nullptr;
if (oldTree->gtFlags & GTF_PERSISTENT_SIDE_EFFECTS)
{
gtExtractSideEffList(oldTree, &sideEffList, GTF_PERSISTENT_SIDE_EFFECTS_IN_CSE);
@@ -4670,9 +4662,9 @@ GenTreePtr Compiler::optPrepareTreeForReplacement(GenTreePtr oldTree, GenTreePtr
// sensitive to adding new statements. Hence the change is not made directly
// into fgFoldConditional.
//
-GenTreePtr Compiler::optVNConstantPropOnJTrue(BasicBlock* block, GenTreePtr stmt, GenTreePtr test)
+GenTree* Compiler::optVNConstantPropOnJTrue(BasicBlock* block, GenTree* stmt, GenTree* test)
{
- GenTreePtr relop = test->gtGetOp1();
+ GenTree* relop = test->gtGetOp1();
// VN based assertion non-null on this relop has been performed.
if (!relop->OperIsCompare())
@@ -4693,11 +4685,11 @@ GenTreePtr Compiler::optVNConstantPropOnJTrue(BasicBlock* block, GenTreePtr stmt
}
// Prepare the tree for replacement so any side effects can be extracted.
- GenTreePtr sideEffList = optPrepareTreeForReplacement(test, nullptr);
+ GenTree* sideEffList = optPrepareTreeForReplacement(test, nullptr);
while (sideEffList)
{
- GenTreePtr newStmt;
+ GenTree* newStmt;
if (sideEffList->OperGet() == GT_COMMA)
{
newStmt = fgInsertStmtNearEnd(block, sideEffList->gtGetOp1());
@@ -4749,7 +4741,7 @@ GenTreePtr Compiler::optVNConstantPropOnJTrue(BasicBlock* block, GenTreePtr stmt
// evaluates to constant, then the tree is replaced by its side effects and
// the constant node.
//
-Compiler::fgWalkResult Compiler::optVNConstantPropCurStmt(BasicBlock* block, GenTreePtr stmt, GenTreePtr tree)
+Compiler::fgWalkResult Compiler::optVNConstantPropCurStmt(BasicBlock* block, GenTree* stmt, GenTree* tree)
{
// Don't propagate floating-point constants into a TYP_STRUCT LclVar
// This can occur for HFA return values (see hfa_sf3E_r.exe)
@@ -4823,7 +4815,7 @@ Compiler::fgWalkResult Compiler::optVNConstantPropCurStmt(BasicBlock* block, Gen
}
// Perform the constant propagation
- GenTreePtr newTree = optVNConstantPropOnTree(block, stmt, tree);
+ GenTree* newTree = optVNConstantPropOnTree(block, stmt, tree);
if (newTree == nullptr)
{
// Not propagated, keep going.
@@ -4860,10 +4852,10 @@ Compiler::fgWalkResult Compiler::optVNConstantPropCurStmt(BasicBlock* block, Gen
// indirections. This is different from flow based assertions and helps
// unify VN based constant prop and non-null prop in a single pre-order walk.
//
-void Compiler::optVnNonNullPropCurStmt(BasicBlock* block, GenTreePtr stmt, GenTreePtr tree)
+void Compiler::optVnNonNullPropCurStmt(BasicBlock* block, GenTree* stmt, GenTree* tree)
{
- ASSERT_TP empty = BitVecOps::UninitVal();
- GenTreePtr newTree = nullptr;
+ ASSERT_TP empty = BitVecOps::UninitVal();
+ GenTree* newTree = nullptr;
if (tree->OperGet() == GT_CALL)
{
newTree = optNonNullAssertionProp_Call(empty, tree->AsCall(), stmt);
@@ -4895,7 +4887,7 @@ void Compiler::optVnNonNullPropCurStmt(BasicBlock* block, GenTreePtr stmt, GenTr
// value numbers.
//
/* static */
-Compiler::fgWalkResult Compiler::optVNAssertionPropCurStmtVisitor(GenTreePtr* ppTree, fgWalkData* data)
+Compiler::fgWalkResult Compiler::optVNAssertionPropCurStmtVisitor(GenTree** ppTree, fgWalkData* data)
{
VNAssertionPropVisitorInfo* pData = (VNAssertionPropVisitorInfo*)data->pCallbackData;
Compiler* pThis = pData->pThis;
@@ -4914,7 +4906,7 @@ Compiler::fgWalkResult Compiler::optVNAssertionPropCurStmtVisitor(GenTreePtr* pp
* Returns the skipped next stmt if the current statement or next few
* statements got removed, else just returns the incoming stmt.
*/
-GenTreePtr Compiler::optVNAssertionPropCurStmt(BasicBlock* block, GenTreePtr stmt)
+GenTree* Compiler::optVNAssertionPropCurStmt(BasicBlock* block, GenTree* stmt)
{
// TODO-Review: EH successor/predecessor iteration seems broken.
// See: SELF_HOST_TESTS_ARM\jit\Directed\ExcepFilters\fault\fault.exe
@@ -4924,7 +4916,7 @@ GenTreePtr Compiler::optVNAssertionPropCurStmt(BasicBlock* block, GenTreePtr stm
}
// Preserve the prev link before the propagation and morph.
- GenTreePtr prev = (stmt == block->firstStmt()) ? nullptr : stmt->gtPrev;
+ GenTree* prev = (stmt == block->firstStmt()) ? nullptr : stmt->gtPrev;
// Perform VN based assertion prop first, in case we don't find
// anything in assertion gen.
@@ -4940,7 +4932,7 @@ GenTreePtr Compiler::optVNAssertionPropCurStmt(BasicBlock* block, GenTreePtr stm
// Check if propagation removed statements starting from current stmt.
// If so, advance to the next good statement.
- GenTreePtr nextStmt = (prev == nullptr) ? block->firstStmt() : prev->gtNext;
+ GenTree* nextStmt = (prev == nullptr) ? block->firstStmt() : prev->gtNext;
return nextStmt;
}
@@ -4975,7 +4967,7 @@ void Compiler::optAssertionPropMain()
fgRemoveRestOfBlock = false;
- GenTreePtr stmt = block->bbTreeList;
+ GenTree* stmt = block->bbTreeList;
while (stmt)
{
// We need to remove the rest of the block.
@@ -4988,7 +4980,7 @@ void Compiler::optAssertionPropMain()
else
{
// Perform VN based assertion prop before assertion gen.
- GenTreePtr nextStmt = optVNAssertionPropCurStmt(block, stmt);
+ GenTree* nextStmt = optVNAssertionPropCurStmt(block, stmt);
// Propagation resulted in removal of the remaining stmts, perform it.
if (fgRemoveRestOfBlock)
@@ -5006,7 +4998,7 @@ void Compiler::optAssertionPropMain()
}
// Perform assertion gen for control flow based assertions.
- for (GenTreePtr tree = stmt->gtStmt.gtStmtList; tree; tree = tree->gtNext)
+ for (GenTree* tree = stmt->gtStmt.gtStmtList; tree; tree = tree->gtNext)
{
optAssertionGen(tree);
}
@@ -5078,7 +5070,7 @@ void Compiler::optAssertionPropMain()
fgRemoveRestOfBlock = false;
// Walk the statement trees in this basic block
- GenTreePtr stmt = block->FirstNonPhiDef();
+ GenTree* stmt = block->FirstNonPhiDef();
while (stmt)
{
noway_assert(stmt->gtOper == GT_STMT);
@@ -5093,11 +5085,11 @@ void Compiler::optAssertionPropMain()
// Preserve the prev link before the propagation and morph, to check if propagation
// removes the current stmt.
- GenTreePtr prev = (stmt == block->firstStmt()) ? nullptr : stmt->gtPrev;
+ GenTree* prev = (stmt == block->firstStmt()) ? nullptr : stmt->gtPrev;
optAssertionPropagatedCurrentStmt = false; // set to true if a assertion propagation took place
// and thus we must morph, set order, re-link
- for (GenTreePtr tree = stmt->gtStmt.gtStmtList; tree; tree = tree->gtNext)
+ for (GenTree* tree = stmt->gtStmt.gtStmtList; tree; tree = tree->gtNext)
{
if (tree->OperIs(GT_JTRUE))
{
@@ -5110,7 +5102,7 @@ void Compiler::optAssertionPropMain()
BitVecOps::ToString(apTraits, assertions), block->bbNum, dspTreeID(stmt), dspTreeID(tree),
tree->GetAssertionInfo().GetAssertionIndex());
- GenTreePtr newTree = optAssertionProp(assertions, tree, stmt);
+ GenTree* newTree = optAssertionProp(assertions, tree, stmt);
if (newTree)
{
assert(optAssertionPropagatedCurrentStmt == true);
@@ -5142,8 +5134,8 @@ void Compiler::optAssertionPropMain()
// Check if propagation removed statements starting from current stmt.
// If so, advance to the next good statement.
- GenTreePtr nextStmt = (prev == nullptr) ? block->firstStmt() : prev->gtNext;
- stmt = (stmt == nextStmt) ? stmt->gtNext : nextStmt;
+ GenTree* nextStmt = (prev == nullptr) ? block->firstStmt() : prev->gtNext;
+ stmt = (stmt == nextStmt) ? stmt->gtNext : nextStmt;
}
optAssertionPropagatedCurrentStmt = false; // clear it back as we are done with stmts.
}
diff --git a/src/jit/block.cpp b/src/jit/block.cpp
index ab4be46571..b75aa1ea61 100644
--- a/src/jit/block.cpp
+++ b/src/jit/block.cpp
@@ -645,7 +645,7 @@ bool BasicBlock::CloneBlockState(
to->bbTgtStkDepth = from->bbTgtStkDepth;
#endif // DEBUG
- for (GenTreePtr fromStmt = from->bbTreeList; fromStmt != nullptr; fromStmt = fromStmt->gtNext)
+ for (GenTree* fromStmt = from->bbTreeList; fromStmt != nullptr; fromStmt = fromStmt->gtNext)
{
auto newExpr = compiler->gtCloneExpr(fromStmt->gtStmt.gtStmtExpr, 0, varNum, varVal);
if (!newExpr)
@@ -831,12 +831,12 @@ bool BasicBlock::isEmpty()
GenTreeStmt* BasicBlock::FirstNonPhiDef()
{
- GenTreePtr stmt = bbTreeList;
+ GenTree* stmt = bbTreeList;
if (stmt == nullptr)
{
return nullptr;
}
- GenTreePtr tree = stmt->gtStmt.gtStmtExpr;
+ GenTree* tree = stmt->gtStmt.gtStmtExpr;
while ((tree->OperGet() == GT_ASG && tree->gtOp.gtOp2->OperGet() == GT_PHI) ||
(tree->OperGet() == GT_STORE_LCL_VAR && tree->gtOp.gtOp1->OperGet() == GT_PHI))
{
@@ -850,14 +850,14 @@ GenTreeStmt* BasicBlock::FirstNonPhiDef()
return stmt->AsStmt();
}
-GenTreePtr BasicBlock::FirstNonPhiDefOrCatchArgAsg()
+GenTree* BasicBlock::FirstNonPhiDefOrCatchArgAsg()
{
- GenTreePtr stmt = FirstNonPhiDef();
+ GenTree* stmt = FirstNonPhiDef();
if (stmt == nullptr)
{
return nullptr;
}
- GenTreePtr tree = stmt->gtStmt.gtStmtExpr;
+ GenTree* tree = stmt->gtStmt.gtStmtExpr;
if ((tree->OperGet() == GT_ASG && tree->gtOp.gtOp2->OperGet() == GT_CATCH_ARG) ||
(tree->OperGet() == GT_STORE_LCL_VAR && tree->gtOp.gtOp1->OperGet() == GT_CATCH_ARG))
{
@@ -1161,8 +1161,8 @@ bool BasicBlock::endsWithJmpMethod(Compiler* comp)
//
bool BasicBlock::endsWithTailCallOrJmp(Compiler* comp, bool fastTailCallsOnly /*=false*/)
{
- GenTreePtr tailCall = nullptr;
- bool tailCallsConvertibleToLoopOnly = false;
+ GenTree* tailCall = nullptr;
+ bool tailCallsConvertibleToLoopOnly = false;
return endsWithJmpMethod(comp) ||
endsWithTailCall(comp, fastTailCallsOnly, tailCallsConvertibleToLoopOnly, &tailCall);
}
diff --git a/src/jit/codegen.h b/src/jit/codegen.h
index bb8bc50f81..5a086e99fe 100644
--- a/src/jit/codegen.h
+++ b/src/jit/codegen.h
@@ -35,13 +35,13 @@ public:
virtual void genGenerateCode(void** codePtr, ULONG* nativeSizeOfCode);
// TODO-Cleanup: Abstract out the part of this that finds the addressing mode, and
// move it to Lower
- virtual bool genCreateAddrMode(GenTreePtr addr,
- int mode,
- bool fold,
- regMaskTP regMask,
- bool* revPtr,
- GenTreePtr* rv1Ptr,
- GenTreePtr* rv2Ptr,
+ virtual bool genCreateAddrMode(GenTree* addr,
+ int mode,
+ bool fold,
+ regMaskTP regMask,
+ bool* revPtr,
+ GenTree** rv1Ptr,
+ GenTree** rv2Ptr,
#if SCALED_ADDR_MODES
unsigned* mulPtr,
#endif
@@ -64,7 +64,7 @@ private:
CORINFO_FIELD_HANDLE u8ToDblBitmask;
// Generates SSE2 code for the given tree as "Operand BitWiseOp BitMask"
- void genSSE2BitwiseOp(GenTreePtr treeNode);
+ void genSSE2BitwiseOp(GenTree* treeNode);
// Generates SSE41 code for the given tree as a round operation
void genSSE41RoundOp(GenTreeOp* treeNode);
@@ -110,7 +110,7 @@ private:
// 'true' label corresponds to jump target of the current basic block i.e. the target to
// branch to on compare condition being true. 'false' label corresponds to the target to
// branch to on condition being false.
- static void genJumpKindsForTree(GenTreePtr cmpTree, emitJumpKind jmpKind[2], bool jmpToTrueLabel[2]);
+ static void genJumpKindsForTree(GenTree* cmpTree, emitJumpKind jmpKind[2], bool jmpToTrueLabel[2]);
static bool genShouldRoundFP();
@@ -200,13 +200,13 @@ private:
#endif
#ifdef LEGACY_BACKEND
- regMaskTP genNewLiveRegMask(GenTreePtr first, GenTreePtr second);
+ regMaskTP genNewLiveRegMask(GenTree* first, GenTree* second);
// During codegen, determine the LiveSet after tree.
// Preconditions: must be called during codegen, when compCurLife and
// compCurLifeTree are being maintained, and tree must occur in the current
// statement.
- VARSET_VALRET_TP genUpdateLiveSetForward(GenTreePtr tree);
+ VARSET_VALRET_TP genUpdateLiveSetForward(GenTree* tree);
#endif
//-------------------------------------------------------------------------
@@ -254,7 +254,7 @@ protected:
#ifdef DEBUG
static const char* genSizeStr(emitAttr size);
- void genStressRegs(GenTreePtr tree);
+ void genStressRegs(GenTree* tree);
#endif // DEBUG
void genCodeForBBlist();
@@ -262,7 +262,7 @@ protected:
public:
#ifndef LEGACY_BACKEND
// genSpillVar is called by compUpdateLifeVar in the !LEGACY_BACKEND case
- void genSpillVar(GenTreePtr tree);
+ void genSpillVar(GenTree* tree);
#endif // !LEGACY_BACKEND
protected:
@@ -272,7 +272,7 @@ protected:
void genEmitHelperCall(unsigned helper, int argSize, emitAttr retSize);
#endif
- void genGCWriteBarrier(GenTreePtr tgt, GCInfo::WriteBarrierForm wbf);
+ void genGCWriteBarrier(GenTree* tgt, GCInfo::WriteBarrierForm wbf);
BasicBlock* genCreateTempLabel();
@@ -285,12 +285,12 @@ protected:
void genExitCode(BasicBlock* block);
#ifdef LEGACY_BACKEND
- GenTreePtr genMakeConst(const void* cnsAddr, var_types cnsType, GenTreePtr cnsTree, bool dblAlign);
+ GenTree* genMakeConst(const void* cnsAddr, var_types cnsType, GenTree* cnsTree, bool dblAlign);
#endif
- void genJumpToThrowHlpBlk(emitJumpKind jumpKind, SpecialCodeKind codeKind, GenTreePtr failBlk = nullptr);
+ void genJumpToThrowHlpBlk(emitJumpKind jumpKind, SpecialCodeKind codeKind, GenTree* failBlk = nullptr);
- void genCheckOverflow(GenTreePtr tree);
+ void genCheckOverflow(GenTree* tree);
//-------------------------------------------------------------------------
//
@@ -868,25 +868,25 @@ public:
size_t argSize,
emitAttr retSize MULTIREG_HAS_SECOND_GC_RET_ONLY_ARG(emitAttr secondRetSize));
- void instEmit_RM(instruction ins, GenTreePtr tree, GenTreePtr addr, unsigned offs);
+ void instEmit_RM(instruction ins, GenTree* tree, GenTree* addr, unsigned offs);
- void instEmit_RM_RV(instruction ins, emitAttr size, GenTreePtr tree, regNumber reg, unsigned offs);
+ void instEmit_RM_RV(instruction ins, emitAttr size, GenTree* tree, regNumber reg, unsigned offs);
- void instEmit_RV_RM(instruction ins, emitAttr size, regNumber reg, GenTreePtr tree, unsigned offs);
+ void instEmit_RV_RM(instruction ins, emitAttr size, regNumber reg, GenTree* tree, unsigned offs);
void instEmit_RV_RIA(instruction ins, regNumber reg1, regNumber reg2, unsigned offs);
- void inst_TT(instruction ins, GenTreePtr tree, unsigned offs = 0, int shfv = 0, emitAttr size = EA_UNKNOWN);
+ void inst_TT(instruction ins, GenTree* tree, unsigned offs = 0, int shfv = 0, emitAttr size = EA_UNKNOWN);
void inst_TT_RV(instruction ins,
- GenTreePtr tree,
+ GenTree* tree,
regNumber reg,
unsigned offs = 0,
emitAttr size = EA_UNKNOWN,
insFlags flags = INS_FLAGS_DONT_CARE);
void inst_TT_IV(instruction ins,
- GenTreePtr tree,
+ GenTree* tree,
ssize_t val,
unsigned offs = 0,
emitAttr size = EA_UNKNOWN,
@@ -896,30 +896,30 @@ public:
emitAttr size,
var_types type,
regNumber reg,
- GenTreePtr tree,
+ GenTree* tree,
unsigned offs = 0,
insFlags flags = INS_FLAGS_DONT_CARE);
- void inst_AT_IV(instruction ins, emitAttr size, GenTreePtr baseTree, int icon, unsigned offs = 0);
+ void inst_AT_IV(instruction ins, emitAttr size, GenTree* baseTree, int icon, unsigned offs = 0);
void inst_RV_TT(instruction ins,
regNumber reg,
- GenTreePtr tree,
+ GenTree* tree,
unsigned offs = 0,
emitAttr size = EA_UNKNOWN,
insFlags flags = INS_FLAGS_DONT_CARE);
- void inst_RV_TT_IV(instruction ins, regNumber reg, GenTreePtr tree, int val);
+ void inst_RV_TT_IV(instruction ins, regNumber reg, GenTree* tree, int val);
- void inst_FS_TT(instruction ins, GenTreePtr tree);
+ void inst_FS_TT(instruction ins, GenTree* tree);
void inst_RV_SH(instruction ins, emitAttr size, regNumber reg, unsigned val, insFlags flags = INS_FLAGS_DONT_CARE);
- void inst_TT_SH(instruction ins, GenTreePtr tree, unsigned val, unsigned offs = 0);
+ void inst_TT_SH(instruction ins, GenTree* tree, unsigned val, unsigned offs = 0);
void inst_RV_CL(instruction ins, regNumber reg, var_types type = TYP_I_IMPL);
- void inst_TT_CL(instruction ins, GenTreePtr tree, unsigned offs = 0);
+ void inst_TT_CL(instruction ins, GenTree* tree, unsigned offs = 0);
#if defined(_TARGET_XARCH_)
void inst_RV_RV_IV(instruction ins, emitAttr size, regNumber reg1, regNumber reg2, unsigned ival);
@@ -927,13 +927,13 @@ public:
void inst_RV_RR(instruction ins, emitAttr size, regNumber reg1, regNumber reg2);
- void inst_RV_ST(instruction ins, emitAttr size, regNumber reg, GenTreePtr tree);
+ void inst_RV_ST(instruction ins, emitAttr size, regNumber reg, GenTree* tree);
- void inst_mov_RV_ST(regNumber reg, GenTreePtr tree);
+ void inst_mov_RV_ST(regNumber reg, GenTree* tree);
- void instGetAddrMode(GenTreePtr addr, regNumber* baseReg, unsigned* indScale, regNumber* indReg, unsigned* cns);
+ void instGetAddrMode(GenTree* addr, regNumber* baseReg, unsigned* indScale, regNumber* indReg, unsigned* cns);
- void inst_set_SV_var(GenTreePtr tree);
+ void inst_set_SV_var(GenTree* tree);
#ifdef _TARGET_ARM_
bool arm_Valid_Imm_For_Instr(instruction ins, ssize_t imm, insFlags flags);
@@ -1012,7 +1012,7 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
* a tree (which has been made addressable).
*/
-inline void CodeGen::inst_FS_TT(instruction ins, GenTreePtr tree)
+inline void CodeGen::inst_FS_TT(instruction ins, GenTree* tree)
{
assert(instIsFP(ins));
diff --git a/src/jit/codegenarm.cpp b/src/jit/codegenarm.cpp
index 0092ca4c9e..26fba7e2a5 100644
--- a/src/jit/codegenarm.cpp
+++ b/src/jit/codegenarm.cpp
@@ -131,7 +131,7 @@ void CodeGen::instGen_Set_Reg_To_Imm(emitAttr size, regNumber reg, ssize_t imm,
// Notes:
// This does not call genProduceReg() on the target register.
//
-void CodeGen::genSetRegToConst(regNumber targetReg, var_types targetType, GenTreePtr tree)
+void CodeGen::genSetRegToConst(regNumber targetReg, var_types targetType, GenTree* tree)
{
switch (tree->gtOper)
{
@@ -215,8 +215,8 @@ void CodeGen::genCodeForBinary(GenTree* treeNode)
assert(oper == GT_ADD || oper == GT_SUB || oper == GT_MUL || oper == GT_ADD_LO || oper == GT_ADD_HI ||
oper == GT_SUB_LO || oper == GT_SUB_HI || oper == GT_OR || oper == GT_XOR || oper == GT_AND);
- GenTreePtr op1 = treeNode->gtGetOp1();
- GenTreePtr op2 = treeNode->gtGetOp2();
+ GenTree* op1 = treeNode->gtGetOp1();
+ GenTree* op2 = treeNode->gtGetOp2();
instruction ins = genGetInsForOper(oper, targetType);
@@ -249,11 +249,11 @@ void CodeGen::genCodeForBinary(GenTree* treeNode)
// Return Value:
// None
//
-void CodeGen::genReturn(GenTreePtr treeNode)
+void CodeGen::genReturn(GenTree* treeNode)
{
assert(treeNode->OperGet() == GT_RETURN || treeNode->OperGet() == GT_RETFILT);
- GenTreePtr op1 = treeNode->gtGetOp1();
- var_types targetType = treeNode->TypeGet();
+ GenTree* op1 = treeNode->gtGetOp1();
+ var_types targetType = treeNode->TypeGet();
// A void GT_RETFILT is the end of a finally. For non-void filter returns we need to load the result in the return
// register, if it's not already there. The processing is the same as GT_RETURN. For filters, the IL spec says the
@@ -359,11 +359,11 @@ void CodeGen::genLockedInstructions(GenTreeOp* treeNode)
//
// Notes: Size N should be aligned to STACK_ALIGN before any allocation
//
-void CodeGen::genLclHeap(GenTreePtr tree)
+void CodeGen::genLclHeap(GenTree* tree)
{
assert(tree->OperGet() == GT_LCLHEAP);
- GenTreePtr size = tree->gtOp.gtOp1;
+ GenTree* size = tree->gtOp.gtOp1;
noway_assert((genActualType(size->gtType) == TYP_INT) || (genActualType(size->gtType) == TYP_I_IMPL));
// Result of localloc will be returned in regCnt.
@@ -802,7 +802,7 @@ void CodeGen::genCodeForNegNot(GenTree* tree)
// The dst can only be a register.
assert(targetReg != REG_NA);
- GenTreePtr operand = tree->gtGetOp1();
+ GenTree* operand = tree->gtGetOp1();
assert(!operand->isContained());
// The src must be a register.
regNumber operandReg = genConsumeReg(operand);
@@ -842,12 +842,12 @@ void CodeGen::genCodeForNegNot(GenTree* tree)
// str tempReg, [R14, #8]
void CodeGen::genCodeForCpObj(GenTreeObj* cpObjNode)
{
- GenTreePtr dstAddr = cpObjNode->Addr();
- GenTreePtr source = cpObjNode->Data();
- var_types srcAddrType = TYP_BYREF;
- bool sourceIsLocal = false;
- regNumber dstReg = REG_NA;
- regNumber srcReg = REG_NA;
+ GenTree* dstAddr = cpObjNode->Addr();
+ GenTree* source = cpObjNode->Data();
+ var_types srcAddrType = TYP_BYREF;
+ bool sourceIsLocal = false;
+ regNumber dstReg = REG_NA;
+ regNumber srcReg = REG_NA;
assert(source->isContained());
if (source->gtOper == GT_IND)
@@ -957,7 +957,7 @@ void CodeGen::genCodeForCpObj(GenTreeObj* cpObjNode)
// a) All GenTrees are register allocated.
// b) The shift-by-amount in tree->gtOp.gtOp2 is a contained constant
//
-void CodeGen::genCodeForShiftLong(GenTreePtr tree)
+void CodeGen::genCodeForShiftLong(GenTree* tree)
{
// Only the non-RMW case here.
genTreeOps oper = tree->OperGet();
@@ -979,7 +979,7 @@ void CodeGen::genCodeForShiftLong(GenTreePtr tree)
var_types targetType = tree->TypeGet();
instruction ins = genGetInsForOper(oper, targetType);
- GenTreePtr shiftBy = tree->gtGetOp2();
+ GenTree* shiftBy = tree->gtGetOp2();
assert(shiftBy->isContainedIntOrIImmed());
@@ -1059,7 +1059,7 @@ void CodeGen::genCodeForStoreLclFld(GenTreeLclFld* tree)
// Ensure that lclVar nodes are typed correctly.
assert(!varDsc->lvNormalizeOnStore() || targetType == genActualType(varDsc->TypeGet()));
- GenTreePtr data = tree->gtOp1;
+ GenTree* data = tree->gtOp1;
instruction ins = ins_Store(targetType);
emitAttr attr = emitTypeSize(targetType);
if (data->isContainedIntOrIImmed())
@@ -1097,7 +1097,7 @@ void CodeGen::genCodeForStoreLclVar(GenTreeLclVar* tree)
// Ensure that lclVar nodes are typed correctly.
assert(!varDsc->lvNormalizeOnStore() || targetType == genActualType(varDsc->TypeGet()));
- GenTreePtr data = tree->gtOp1;
+ GenTree* data = tree->gtOp1;
// var = call, where call returns a multi-reg return value
// case is handled separately.
@@ -1174,9 +1174,9 @@ void CodeGen::genCodeForDivMod(GenTreeOp* tree)
noway_assert(targetReg != REG_NA);
- GenTreePtr dst = tree;
- GenTreePtr src1 = tree->gtGetOp1();
- GenTreePtr src2 = tree->gtGetOp2();
+ GenTree* dst = tree;
+ GenTree* src1 = tree->gtGetOp1();
+ GenTree* src2 = tree->gtGetOp2();
instruction ins = genGetInsForOper(tree->OperGet(), targetType);
emitAttr attr = emitTypeSize(tree);
regNumber result = REG_NA;
@@ -1215,7 +1215,7 @@ void CodeGen::genCodeForDivMod(GenTreeOp* tree)
// Assumptions:
// GT_CKFINITE node has reserved an internal register.
//
-void CodeGen::genCkfinite(GenTreePtr treeNode)
+void CodeGen::genCkfinite(GenTree* treeNode)
{
assert(treeNode->OperGet() == GT_CKFINITE);
@@ -1262,10 +1262,10 @@ void CodeGen::genCodeForCompare(GenTreeOp* tree)
// TODO-ARM-CQ: Check for the case where we can simply transfer the carry bit to a register
// (signed < or >= where targetReg != REG_NA)
- GenTreePtr op1 = tree->gtOp1;
- GenTreePtr op2 = tree->gtOp2;
- var_types op1Type = op1->TypeGet();
- var_types op2Type = op2->TypeGet();
+ GenTree* op1 = tree->gtOp1;
+ GenTree* op2 = tree->gtOp2;
+ var_types op1Type = op1->TypeGet();
+ var_types op2Type = op2->TypeGet();
assert(!varTypeIsLong(op1Type));
assert(!varTypeIsLong(op2Type));
@@ -1404,7 +1404,7 @@ void CodeGen::genCodeForStoreInd(GenTreeStoreInd* tree)
// Preconditions:
// The condition codes must already have been appropriately set.
//
-void CodeGen::genSetRegToCond(regNumber dstReg, GenTreePtr tree)
+void CodeGen::genSetRegToCond(regNumber dstReg, GenTree* tree)
{
// Emit code like that:
// ...
@@ -1542,7 +1542,7 @@ void CodeGen::genLongToIntCast(GenTree* cast)
// The treeNode must have an assigned register.
// SrcType= int32/uint32/int64/uint64 and DstType=float/double.
//
-void CodeGen::genIntToFloatCast(GenTreePtr treeNode)
+void CodeGen::genIntToFloatCast(GenTree* treeNode)
{
// int --> float/double conversions are always non-overflow ones
assert(treeNode->OperGet() == GT_CAST);
@@ -1551,7 +1551,7 @@ void CodeGen::genIntToFloatCast(GenTreePtr treeNode)
regNumber targetReg = treeNode->gtRegNum;
assert(genIsValidFloatReg(targetReg));
- GenTreePtr op1 = treeNode->gtOp.gtOp1;
+ GenTree* op1 = treeNode->gtOp.gtOp1;
assert(!op1->isContained()); // Cannot be contained
assert(genIsValidIntReg(op1->gtRegNum)); // Must be a valid int reg.
@@ -1606,7 +1606,7 @@ void CodeGen::genIntToFloatCast(GenTreePtr treeNode)
// The treeNode must have an assigned register.
// SrcType=float/double and DstType= int32/uint32/int64/uint64
//
-void CodeGen::genFloatToIntCast(GenTreePtr treeNode)
+void CodeGen::genFloatToIntCast(GenTree* treeNode)
{
// we don't expect to see overflow detecting float/double --> int type conversions here
// as they should have been converted into helper calls by front-end.
@@ -1616,7 +1616,7 @@ void CodeGen::genFloatToIntCast(GenTreePtr treeNode)
regNumber targetReg = treeNode->gtRegNum;
assert(genIsValidIntReg(targetReg)); // Must be a valid int reg.
- GenTreePtr op1 = treeNode->gtOp.gtOp1;
+ GenTree* op1 = treeNode->gtOp.gtOp1;
assert(!op1->isContained()); // Cannot be contained
assert(genIsValidFloatReg(op1->gtRegNum)); // Must be a valid float reg.
@@ -1746,7 +1746,7 @@ void CodeGen::genStoreLongLclVar(GenTree* treeNode)
LclVarDsc* varDsc = &(compiler->lvaTable[lclNum]);
assert(varDsc->TypeGet() == TYP_LONG);
assert(!varDsc->lvPromoted);
- GenTreePtr op1 = treeNode->gtOp.gtOp1;
+ GenTree* op1 = treeNode->gtOp.gtOp1;
noway_assert(op1->OperGet() == GT_LONG || op1->OperGet() == GT_MUL_LONG);
genConsumeRegs(op1);
@@ -1755,8 +1755,8 @@ void CodeGen::genStoreLongLclVar(GenTree* treeNode)
// Definitions of register candidates will have been lowered to 2 int lclVars.
assert(!treeNode->gtHasReg());
- GenTreePtr loVal = op1->gtGetOp1();
- GenTreePtr hiVal = op1->gtGetOp2();
+ GenTree* loVal = op1->gtGetOp1();
+ GenTree* hiVal = op1->gtGetOp2();
// NYI: Contained immediates.
NYI_IF((loVal->gtRegNum == REG_NA) || (hiVal->gtRegNum == REG_NA),
diff --git a/src/jit/codegenarm64.cpp b/src/jit/codegenarm64.cpp
index 0928b6f40e..e418d16c4e 100644
--- a/src/jit/codegenarm64.cpp
+++ b/src/jit/codegenarm64.cpp
@@ -1436,7 +1436,7 @@ void CodeGen::instGen_Set_Reg_To_Imm(emitAttr size, regNumber reg, ssize_t imm,
* specified by the constant (GT_CNS_INT or GT_CNS_DBL) in 'tree'. This does not call
* genProduceReg() on the target register.
*/
-void CodeGen::genSetRegToConst(regNumber targetReg, var_types targetType, GenTreePtr tree)
+void CodeGen::genSetRegToConst(regNumber targetReg, var_types targetType, GenTree* tree)
{
switch (tree->gtOper)
{
@@ -1510,8 +1510,8 @@ void CodeGen::genCodeForMulHi(GenTreeOp* treeNode)
emitAttr attr = emitActualTypeSize(treeNode);
unsigned isUnsigned = (treeNode->gtFlags & GTF_UNSIGNED);
- GenTreePtr op1 = treeNode->gtGetOp1();
- GenTreePtr op2 = treeNode->gtGetOp2();
+ GenTree* op1 = treeNode->gtGetOp1();
+ GenTree* op2 = treeNode->gtGetOp2();
assert(!varTypeIsFloating(targetType));
@@ -1552,8 +1552,8 @@ void CodeGen::genCodeForBinary(GenTree* treeNode)
assert(oper == GT_ADD || oper == GT_SUB || oper == GT_MUL || oper == GT_DIV || oper == GT_UDIV || oper == GT_AND ||
oper == GT_OR || oper == GT_XOR);
- GenTreePtr op1 = treeNode->gtGetOp1();
- GenTreePtr op2 = treeNode->gtGetOp2();
+ GenTree* op1 = treeNode->gtGetOp1();
+ GenTree* op2 = treeNode->gtGetOp2();
instruction ins = genGetInsForOper(treeNode->OperGet(), targetType);
if ((treeNode->gtFlags & GTF_SET_FLAGS) != 0)
@@ -1655,7 +1655,7 @@ void CodeGen::genCodeForStoreLclFld(GenTreeLclFld* tree)
// Ensure that lclVar nodes are typed correctly.
assert(!varDsc->lvNormalizeOnStore() || targetType == genActualType(varDsc->TypeGet()));
- GenTreePtr data = tree->gtOp1;
+ GenTree* data = tree->gtOp1;
genConsumeRegs(data);
regNumber dataReg = REG_NA;
@@ -1703,7 +1703,7 @@ void CodeGen::genCodeForStoreLclVar(GenTreeLclVar* tree)
// Ensure that lclVar nodes are typed correctly.
assert(!varDsc->lvNormalizeOnStore() || targetType == genActualType(varDsc->TypeGet()));
- GenTreePtr data = tree->gtOp1;
+ GenTree* data = tree->gtOp1;
// var = call, where call returns a multi-reg return value
// case is handled separately.
@@ -1784,11 +1784,11 @@ void CodeGen::genCodeForStoreLclVar(GenTreeLclVar* tree)
// Return Value:
// None
//
-void CodeGen::genReturn(GenTreePtr treeNode)
+void CodeGen::genReturn(GenTree* treeNode)
{
assert(treeNode->OperGet() == GT_RETURN || treeNode->OperGet() == GT_RETFILT);
- GenTreePtr op1 = treeNode->gtGetOp1();
- var_types targetType = treeNode->TypeGet();
+ GenTree* op1 = treeNode->gtGetOp1();
+ var_types targetType = treeNode->TypeGet();
// A void GT_RETFILT is the end of a finally. For non-void filter returns we need to load the result in the return
// register, if it's not already there. The processing is the same as GT_RETURN. For filters, the IL spec says the
@@ -1865,11 +1865,11 @@ void CodeGen::genReturn(GenTreePtr treeNode)
/***********************************************************************************************
* Generate code for localloc
*/
-void CodeGen::genLclHeap(GenTreePtr tree)
+void CodeGen::genLclHeap(GenTree* tree)
{
assert(tree->OperGet() == GT_LCLHEAP);
- GenTreePtr size = tree->gtOp.gtOp1;
+ GenTree* size = tree->gtOp.gtOp1;
noway_assert((genActualType(size->gtType) == TYP_INT) || (genActualType(size->gtType) == TYP_I_IMPL));
regNumber targetReg = tree->gtRegNum;
@@ -2223,7 +2223,7 @@ void CodeGen::genCodeForNegNot(GenTree* tree)
// The dst can only be a register.
assert(targetReg != REG_NA);
- GenTreePtr operand = tree->gtGetOp1();
+ GenTree* operand = tree->gtGetOp1();
assert(!operand->isContained());
// The src must be a register.
regNumber operandReg = genConsumeReg(operand);
@@ -2257,8 +2257,8 @@ void CodeGen::genCodeForDivMod(GenTreeOp* tree)
}
else // an integer divide operation
{
- GenTreePtr divisorOp = tree->gtGetOp2();
- emitAttr size = EA_ATTR(genTypeSize(genActualType(tree->TypeGet())));
+ GenTree* divisorOp = tree->gtGetOp2();
+ emitAttr size = EA_ATTR(genTypeSize(genActualType(tree->TypeGet())));
if (divisorOp->IsIntegralConst(0))
{
@@ -2356,9 +2356,9 @@ void CodeGen::genCodeForDivMod(GenTreeOp* tree)
void CodeGen::genCodeForInitBlkUnroll(GenTreeBlk* initBlkNode)
{
// Make sure we got the arguments of the initblk/initobj operation in the right registers
- unsigned size = initBlkNode->Size();
- GenTreePtr dstAddr = initBlkNode->Addr();
- GenTreePtr initVal = initBlkNode->Data();
+ unsigned size = initBlkNode->Size();
+ GenTree* dstAddr = initBlkNode->Addr();
+ GenTree* initVal = initBlkNode->Data();
if (initVal->OperIsInitVal())
{
initVal = initVal->gtGetOp1();
@@ -2486,10 +2486,10 @@ void CodeGen::genCodeForStorePairOffset(regNumber src, regNumber src2, GenTree*
// str tempReg, [R14, #8]
void CodeGen::genCodeForCpObj(GenTreeObj* cpObjNode)
{
- GenTreePtr dstAddr = cpObjNode->Addr();
- GenTreePtr source = cpObjNode->Data();
- var_types srcAddrType = TYP_BYREF;
- bool sourceIsLocal = false;
+ GenTree* dstAddr = cpObjNode->Addr();
+ GenTree* source = cpObjNode->Data();
+ var_types srcAddrType = TYP_BYREF;
+ bool sourceIsLocal = false;
assert(source->isContained());
if (source->gtOper == GT_IND)
@@ -2801,9 +2801,9 @@ void CodeGen::genCodeForCmpXchg(GenTreeCmpXchg* treeNode)
{
assert(treeNode->OperIs(GT_CMPXCHG));
- GenTreePtr addr = treeNode->gtOpLocation; // arg1
- GenTreePtr data = treeNode->gtOpValue; // arg2
- GenTreePtr comparand = treeNode->gtOpComparand; // arg3
+ GenTree* addr = treeNode->gtOpLocation; // arg1
+ GenTree* data = treeNode->gtOpValue; // arg2
+ GenTree* comparand = treeNode->gtOpComparand; // arg3
regNumber targetReg = treeNode->gtRegNum;
regNumber dataReg = data->gtRegNum;
@@ -3215,7 +3215,7 @@ void CodeGen::genCodeForSwap(GenTreeOp* tree)
// A full 64-bit value of either 1 or 0 is setup in the 'dstReg'
//-------------------------------------------------------------------------------------------
-void CodeGen::genSetRegToCond(regNumber dstReg, GenTreePtr tree)
+void CodeGen::genSetRegToCond(regNumber dstReg, GenTree* tree)
{
emitJumpKind jumpKind[2];
bool branchToTrueLabel[2];
@@ -3272,7 +3272,7 @@ void CodeGen::genSetRegToCond(regNumber dstReg, GenTreePtr tree)
// The treeNode must have an assigned register.
// SrcType= int32/uint32/int64/uint64 and DstType=float/double.
//
-void CodeGen::genIntToFloatCast(GenTreePtr treeNode)
+void CodeGen::genIntToFloatCast(GenTree* treeNode)
{
// int type --> float/double conversions are always non-overflow ones
assert(treeNode->OperGet() == GT_CAST);
@@ -3281,7 +3281,7 @@ void CodeGen::genIntToFloatCast(GenTreePtr treeNode)
regNumber targetReg = treeNode->gtRegNum;
assert(genIsValidFloatReg(targetReg));
- GenTreePtr op1 = treeNode->gtOp.gtOp1;
+ GenTree* op1 = treeNode->gtOp.gtOp1;
assert(!op1->isContained()); // Cannot be contained
assert(genIsValidIntReg(op1->gtRegNum)); // Must be a valid int reg.
@@ -3349,7 +3349,7 @@ void CodeGen::genIntToFloatCast(GenTreePtr treeNode)
// The treeNode must have an assigned register.
// SrcType=float/double and DstType= int32/uint32/int64/uint64
//
-void CodeGen::genFloatToIntCast(GenTreePtr treeNode)
+void CodeGen::genFloatToIntCast(GenTree* treeNode)
{
// we don't expect to see overflow detecting float/double --> int type conversions here
// as they should have been converted into helper calls by front-end.
@@ -3359,7 +3359,7 @@ void CodeGen::genFloatToIntCast(GenTreePtr treeNode)
regNumber targetReg = treeNode->gtRegNum;
assert(genIsValidIntReg(targetReg)); // Must be a valid int reg.
- GenTreePtr op1 = treeNode->gtOp.gtOp1;
+ GenTree* op1 = treeNode->gtOp.gtOp1;
assert(!op1->isContained()); // Cannot be contained
assert(genIsValidFloatReg(op1->gtRegNum)); // Must be a valid float reg.
@@ -3427,14 +3427,14 @@ void CodeGen::genFloatToIntCast(GenTreePtr treeNode)
// Assumptions:
// GT_CKFINITE node has reserved an internal register.
//
-void CodeGen::genCkfinite(GenTreePtr treeNode)
+void CodeGen::genCkfinite(GenTree* treeNode)
{
assert(treeNode->OperGet() == GT_CKFINITE);
- GenTreePtr op1 = treeNode->gtOp.gtOp1;
- var_types targetType = treeNode->TypeGet();
- int expMask = (targetType == TYP_FLOAT) ? 0x7F8 : 0x7FF; // Bit mask to extract exponent.
- int shiftAmount = targetType == TYP_FLOAT ? 20 : 52;
+ GenTree* op1 = treeNode->gtOp.gtOp1;
+ var_types targetType = treeNode->TypeGet();
+ int expMask = (targetType == TYP_FLOAT) ? 0x7F8 : 0x7FF; // Bit mask to extract exponent.
+ int shiftAmount = targetType == TYP_FLOAT ? 20 : 52;
emitter* emit = getEmitter();
@@ -3472,10 +3472,10 @@ void CodeGen::genCodeForCompare(GenTreeOp* tree)
regNumber targetReg = tree->gtRegNum;
emitter* emit = getEmitter();
- GenTreePtr op1 = tree->gtOp1;
- GenTreePtr op2 = tree->gtOp2;
- var_types op1Type = genActualType(op1->TypeGet());
- var_types op2Type = genActualType(op2->TypeGet());
+ GenTree* op1 = tree->gtOp1;
+ GenTree* op2 = tree->gtOp2;
+ var_types op1Type = genActualType(op1->TypeGet());
+ var_types op2Type = genActualType(op2->TypeGet());
assert(!op1->isUsedFromMemory());
assert(!op2->isUsedFromMemory());
@@ -4939,7 +4939,7 @@ void CodeGen::genStoreLclTypeSIMD12(GenTree* treeNode)
offs = treeNode->gtLclFld.gtLclOffs;
}
- GenTreePtr op1 = treeNode->gtOp.gtOp1;
+ GenTree* op1 = treeNode->gtOp.gtOp1;
assert(!op1->isContained());
regNumber operandReg = genConsumeReg(op1);
diff --git a/src/jit/codegenarmarch.cpp b/src/jit/codegenarmarch.cpp
index 680ff6db4f..22eb70e40a 100644
--- a/src/jit/codegenarmarch.cpp
+++ b/src/jit/codegenarmarch.cpp
@@ -30,7 +30,7 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
// Preconditions:
// All operands have been evaluated.
//
-void CodeGen::genCodeForTreeNode(GenTreePtr treeNode)
+void CodeGen::genCodeForTreeNode(GenTree* treeNode)
{
regNumber targetReg = treeNode->gtRegNum;
var_types targetType = treeNode->TypeGet();
@@ -486,12 +486,12 @@ void CodeGen::genSetRegToIcon(regNumber reg, ssize_t val, var_types type, insFla
// Return value:
// None
//
-void CodeGen::genIntrinsic(GenTreePtr treeNode)
+void CodeGen::genIntrinsic(GenTree* treeNode)
{
assert(treeNode->OperIs(GT_INTRINSIC));
// Both operand and its result must be of the same floating point type.
- GenTreePtr srcNode = treeNode->gtOp.gtOp1;
+ GenTree* srcNode = treeNode->gtOp.gtOp1;
assert(varTypeIsFloating(srcNode));
assert(srcNode->TypeGet() == treeNode->TypeGet());
@@ -546,9 +546,9 @@ void CodeGen::genIntrinsic(GenTreePtr treeNode)
void CodeGen::genPutArgStk(GenTreePutArgStk* treeNode)
{
assert(treeNode->OperIs(GT_PUTARG_STK));
- GenTreePtr source = treeNode->gtOp1;
- var_types targetType = genActualType(source->TypeGet());
- emitter* emit = getEmitter();
+ GenTree* source = treeNode->gtOp1;
+ var_types targetType = genActualType(source->TypeGet());
+ emitter* emit = getEmitter();
// This is the varNum for our store operations,
// typically this is the varNum for the Outgoing arg space
@@ -660,7 +660,7 @@ void CodeGen::genPutArgStk(GenTreePutArgStk* treeNode)
// and store their register into the outgoing argument area
for (; fieldListPtr != nullptr; fieldListPtr = fieldListPtr->Rest())
{
- GenTreePtr nextArgNode = fieldListPtr->gtOp.gtOp1;
+ GenTree* nextArgNode = fieldListPtr->gtOp.gtOp1;
genConsumeReg(nextArgNode);
regNumber reg = nextArgNode->gtRegNum;
@@ -692,7 +692,7 @@ void CodeGen::genPutArgStk(GenTreePutArgStk* treeNode)
regNumber addrReg = REG_NA;
GenTreeLclVarCommon* varNode = nullptr;
- GenTreePtr addrNode = nullptr;
+ GenTree* addrNode = nullptr;
if (source->OperGet() == GT_LCL_VAR)
{
@@ -998,11 +998,11 @@ void CodeGen::genPutArgSplit(GenTreePutArgSplit* treeNode)
{
assert(treeNode->OperIs(GT_PUTARG_SPLIT));
- GenTreePtr source = treeNode->gtOp1;
- emitter* emit = getEmitter();
- unsigned varNumOut = compiler->lvaOutgoingArgSpaceVar;
- unsigned argOffsetMax = compiler->lvaOutgoingArgSpaceSize;
- unsigned argOffsetOut = treeNode->gtSlotNum * TARGET_POINTER_SIZE;
+ GenTree* source = treeNode->gtOp1;
+ emitter* emit = getEmitter();
+ unsigned varNumOut = compiler->lvaOutgoingArgSpaceVar;
+ unsigned argOffsetMax = compiler->lvaOutgoingArgSpaceSize;
+ unsigned argOffsetOut = treeNode->gtSlotNum * TARGET_POINTER_SIZE;
if (source->OperGet() == GT_FIELD_LIST)
{
@@ -1012,8 +1012,8 @@ void CodeGen::genPutArgSplit(GenTreePutArgSplit* treeNode)
for (GenTreeFieldList* fieldListPtr = source->AsFieldList(); fieldListPtr != nullptr;
fieldListPtr = fieldListPtr->Rest())
{
- GenTreePtr nextArgNode = fieldListPtr->gtGetOp1();
- regNumber fieldReg = nextArgNode->gtRegNum;
+ GenTree* nextArgNode = fieldListPtr->gtGetOp1();
+ regNumber fieldReg = nextArgNode->gtRegNum;
genConsumeReg(nextArgNode);
if (regIndex >= treeNode->gtNumRegs)
@@ -1068,7 +1068,7 @@ void CodeGen::genPutArgSplit(GenTreePutArgSplit* treeNode)
regNumber addrReg = REG_NA;
GenTreeLclVarCommon* varNode = nullptr;
- GenTreePtr addrNode = nullptr;
+ GenTree* addrNode = nullptr;
addrNode = source->gtOp.gtOp1;
@@ -1213,7 +1213,7 @@ void CodeGen::genPutArgSplit(GenTreePutArgSplit* treeNode)
// The child of store is a multi-reg call node.
// genProduceReg() on treeNode is made by caller of this routine.
//
-void CodeGen::genMultiRegCallStoreToLocal(GenTreePtr treeNode)
+void CodeGen::genMultiRegCallStoreToLocal(GenTree* treeNode)
{
assert(treeNode->OperGet() == GT_STORE_LCL_VAR);
@@ -1324,7 +1324,7 @@ void CodeGen::genMultiRegCallStoreToLocal(GenTreePtr treeNode)
//------------------------------------------------------------------------
// genRangeCheck: generate code for GT_ARR_BOUNDS_CHECK node.
//
-void CodeGen::genRangeCheck(GenTreePtr oper)
+void CodeGen::genRangeCheck(GenTree* oper)
{
#ifdef FEATURE_SIMD
noway_assert(oper->OperGet() == GT_ARR_BOUNDS_CHECK || oper->OperGet() == GT_SIMD_CHK);
@@ -1334,10 +1334,10 @@ void CodeGen::genRangeCheck(GenTreePtr oper)
GenTreeBoundsChk* bndsChk = oper->AsBoundsChk();
- GenTreePtr arrLen = bndsChk->gtArrLen;
- GenTreePtr arrIndex = bndsChk->gtIndex;
- GenTreePtr arrRef = NULL;
- int lenOffset = 0;
+ GenTree* arrLen = bndsChk->gtArrLen;
+ GenTree* arrIndex = bndsChk->gtIndex;
+ GenTree* arrRef = NULL;
+ int lenOffset = 0;
GenTree* src1;
GenTree* src2;
@@ -1475,12 +1475,12 @@ unsigned CodeGen::genOffsetOfMDArrayDimensionSize(var_types elemType, unsigned r
//
void CodeGen::genCodeForArrIndex(GenTreeArrIndex* arrIndex)
{
- emitter* emit = getEmitter();
- GenTreePtr arrObj = arrIndex->ArrObj();
- GenTreePtr indexNode = arrIndex->IndexExpr();
- regNumber arrReg = genConsumeReg(arrObj);
- regNumber indexReg = genConsumeReg(indexNode);
- regNumber tgtReg = arrIndex->gtRegNum;
+ emitter* emit = getEmitter();
+ GenTree* arrObj = arrIndex->ArrObj();
+ GenTree* indexNode = arrIndex->IndexExpr();
+ regNumber arrReg = genConsumeReg(arrObj);
+ regNumber indexReg = genConsumeReg(indexNode);
+ regNumber tgtReg = arrIndex->gtRegNum;
noway_assert(tgtReg != REG_NA);
// We will use a temp register to load the lower bound and dimension size values.
@@ -1525,9 +1525,9 @@ void CodeGen::genCodeForArrIndex(GenTreeArrIndex* arrIndex)
void CodeGen::genCodeForArrOffset(GenTreeArrOffs* arrOffset)
{
- GenTreePtr offsetNode = arrOffset->gtOffset;
- GenTreePtr indexNode = arrOffset->gtIndex;
- regNumber tgtReg = arrOffset->gtRegNum;
+ GenTree* offsetNode = arrOffset->gtOffset;
+ GenTree* indexNode = arrOffset->gtIndex;
+ regNumber tgtReg = arrOffset->gtRegNum;
noway_assert(tgtReg != REG_NA);
@@ -1574,7 +1574,7 @@ void CodeGen::genCodeForArrOffset(GenTreeArrOffs* arrOffset)
// Assumptions:
// a) All GenTrees are register allocated.
//
-void CodeGen::genCodeForShift(GenTreePtr tree)
+void CodeGen::genCodeForShift(GenTree* tree)
{
var_types targetType = tree->TypeGet();
genTreeOps oper = tree->OperGet();
@@ -1585,8 +1585,8 @@ void CodeGen::genCodeForShift(GenTreePtr tree)
genConsumeOperands(tree->AsOp());
- GenTreePtr operand = tree->gtGetOp1();
- GenTreePtr shiftBy = tree->gtGetOp2();
+ GenTree* operand = tree->gtGetOp1();
+ GenTree* shiftBy = tree->gtGetOp2();
if (!shiftBy->IsCnsIntOrI())
{
getEmitter()->emitIns_R_R_R(ins, size, tree->gtRegNum, operand->gtRegNum, shiftBy->gtRegNum);
@@ -1840,8 +1840,8 @@ void CodeGen::genCodeForIndir(GenTreeIndir* tree)
void CodeGen::genCodeForCpBlk(GenTreeBlk* cpBlkNode)
{
// Make sure we got the arguments of the cpblk operation in the right registers
- unsigned blockSize = cpBlkNode->Size();
- GenTreePtr dstAddr = cpBlkNode->Addr();
+ unsigned blockSize = cpBlkNode->Size();
+ GenTree* dstAddr = cpBlkNode->Addr();
assert(!dstAddr->isContained());
genConsumeBlockOp(cpBlkNode, REG_ARG_0, REG_ARG_1, REG_ARG_2);
@@ -1888,10 +1888,10 @@ void CodeGen::genCodeForCpBlk(GenTreeBlk* cpBlkNode)
void CodeGen::genCodeForCpBlkUnroll(GenTreeBlk* cpBlkNode)
{
// Make sure we got the arguments of the cpblk operation in the right registers
- unsigned size = cpBlkNode->Size();
- GenTreePtr dstAddr = cpBlkNode->Addr();
- GenTreePtr source = cpBlkNode->Data();
- GenTreePtr srcAddr = nullptr;
+ unsigned size = cpBlkNode->Size();
+ GenTree* dstAddr = cpBlkNode->Addr();
+ GenTree* source = cpBlkNode->Data();
+ GenTree* srcAddr = nullptr;
assert((size != 0) && (size <= CPBLK_UNROLL_LIMIT));
@@ -2026,9 +2026,9 @@ void CodeGen::genCodeForCpBlkUnroll(GenTreeBlk* cpBlkNode)
// b) The size argument of the InitBlk is >= INITBLK_STOS_LIMIT bytes.
void CodeGen::genCodeForInitBlk(GenTreeBlk* initBlkNode)
{
- unsigned size = initBlkNode->Size();
- GenTreePtr dstAddr = initBlkNode->Addr();
- GenTreePtr initVal = initBlkNode->Data();
+ unsigned size = initBlkNode->Size();
+ GenTree* dstAddr = initBlkNode->Addr();
+ GenTree* initVal = initBlkNode->Data();
if (initVal->OperIsInitVal())
{
initVal = initVal->gtGetOp1();
@@ -2194,11 +2194,11 @@ void CodeGen::genCallInstruction(GenTreeCall* call)
assert(!call->IsVirtual() || call->gtControlExpr || call->gtCallAddr);
// Consume all the arg regs
- for (GenTreePtr list = call->gtCallLateArgs; list; list = list->MoveNext())
+ for (GenTree* list = call->gtCallLateArgs; list; list = list->MoveNext())
{
assert(list->OperIsList());
- GenTreePtr argNode = list->Current();
+ GenTree* argNode = list->Current();
fgArgTabEntry* curArgTabEntry = compiler->gtArgEntryByNode(call, argNode);
assert(curArgTabEntry);
@@ -2217,7 +2217,7 @@ void CodeGen::genCallInstruction(GenTreeCall* call)
regNumber argReg = curArgTabEntry->regNum;
for (; argListPtr != nullptr; argListPtr = argListPtr->Rest(), iterationNum++)
{
- GenTreePtr putArgRegNode = argListPtr->gtOp.gtOp1;
+ GenTree* putArgRegNode = argListPtr->gtOp.gtOp1;
assert(putArgRegNode->gtOper == GT_PUTARG_REG);
genConsumeReg(putArgRegNode);
@@ -2543,7 +2543,7 @@ void CodeGen::genCallInstruction(GenTreeCall* call)
// The arguments of the caller needs to be transferred to the callee before exiting caller.
// The actual jump to callee is generated as part of caller epilog sequence.
// Therefore the codegen of GT_JMP is to ensure that the callee arguments are correctly setup.
-void CodeGen::genJmpMethod(GenTreePtr jmp)
+void CodeGen::genJmpMethod(GenTree* jmp)
{
assert(jmp->OperGet() == GT_JMP);
assert(compiler->compJmpOpUsed);
@@ -2884,12 +2884,12 @@ void CodeGen::genJmpMethod(GenTreePtr jmp)
//
// TODO-ARM64-CQ: Allow castOp to be a contained node without an assigned register.
//
-void CodeGen::genIntToIntCast(GenTreePtr treeNode)
+void CodeGen::genIntToIntCast(GenTree* treeNode)
{
assert(treeNode->OperGet() == GT_CAST);
- GenTreePtr castOp = treeNode->gtCast.CastOp();
- emitter* emit = getEmitter();
+ GenTree* castOp = treeNode->gtCast.CastOp();
+ emitter* emit = getEmitter();
var_types dstType = treeNode->CastToType();
var_types srcType = genActualType(castOp->TypeGet());
@@ -3092,7 +3092,7 @@ void CodeGen::genIntToIntCast(GenTreePtr treeNode)
// The treeNode must have an assigned register.
// The cast is between float and double.
//
-void CodeGen::genFloatToFloatCast(GenTreePtr treeNode)
+void CodeGen::genFloatToFloatCast(GenTree* treeNode)
{
// float <--> double conversions are always non-overflow ones
assert(treeNode->OperGet() == GT_CAST);
@@ -3101,7 +3101,7 @@ void CodeGen::genFloatToFloatCast(GenTreePtr treeNode)
regNumber targetReg = treeNode->gtRegNum;
assert(genIsValidFloatReg(targetReg));
- GenTreePtr op1 = treeNode->gtOp.gtOp1;
+ GenTree* op1 = treeNode->gtOp.gtOp1;
assert(!op1->isContained()); // Cannot be contained
assert(genIsValidFloatReg(op1->gtRegNum)); // Must be a valid float reg.
@@ -3229,7 +3229,7 @@ void CodeGen::genCreateAndStoreGCInfo(unsigned codeSize,
// Typically only one conditional branch is needed
// and the second jmpKind[] value is set to EJ_NONE
//
-void CodeGen::genJumpKindsForTree(GenTreePtr cmpTree, emitJumpKind jmpKind[2], bool jmpToTrueLabel[2])
+void CodeGen::genJumpKindsForTree(GenTree* cmpTree, emitJumpKind jmpKind[2], bool jmpToTrueLabel[2])
{
// On ARM both branches will always branch to the true label
jmpToTrueLabel[0] = true;
@@ -3343,7 +3343,7 @@ void CodeGen::genJumpKindsForTree(GenTreePtr cmpTree, emitJumpKind jmpKind[2], b
// Return Value:
// None
//
-void CodeGen::genCodeForJumpTrue(GenTreePtr tree)
+void CodeGen::genCodeForJumpTrue(GenTree* tree)
{
GenTree* cmp = tree->gtOp.gtOp1;
assert(cmp->OperIsCompare());
@@ -3647,7 +3647,7 @@ void CodeGen::genLeaInstruction(GenTreeAddrMode* lea)
// Returns true if the 'treeNode" is a GT_RETURN node of type struct.
// Otherwise returns false.
//
-bool CodeGen::isStructReturn(GenTreePtr treeNode)
+bool CodeGen::isStructReturn(GenTree* treeNode)
{
// This method could be called for 'treeNode' of GT_RET_FILT or GT_RETURN.
// For the GT_RET_FILT, the return is always
@@ -3668,11 +3668,11 @@ bool CodeGen::isStructReturn(GenTreePtr treeNode)
//
// Assumption:
// op1 of GT_RETURN node is either GT_LCL_VAR or multi-reg GT_CALL
-void CodeGen::genStructReturn(GenTreePtr treeNode)
+void CodeGen::genStructReturn(GenTree* treeNode)
{
assert(treeNode->OperGet() == GT_RETURN);
assert(isStructReturn(treeNode));
- GenTreePtr op1 = treeNode->gtGetOp1();
+ GenTree* op1 = treeNode->gtGetOp1();
if (op1->OperGet() == GT_LCL_VAR)
{
diff --git a/src/jit/codegenclassic.h b/src/jit/codegenclassic.h
index babc6a472f..7785a25dba 100644
--- a/src/jit/codegenclassic.h
+++ b/src/jit/codegenclassic.h
@@ -14,13 +14,13 @@
#ifdef LEGACY_BACKEND // Not necessary (it's this way in the #include location), but helpful to IntelliSense
public:
-regNumber genIsEnregisteredIntVariable(GenTreePtr tree);
+regNumber genIsEnregisteredIntVariable(GenTree* tree);
void sched_AM(instruction ins,
emitAttr size,
regNumber ireg,
bool rdst,
- GenTreePtr tree,
+ GenTree* tree,
unsigned offs,
bool cons = false,
int cval = 0,
@@ -37,14 +37,14 @@ unsigned genFPdeadRegCnt; // The dead unpopped part of genFPregCnt
void genSetRegToIcon(regNumber reg, ssize_t val, var_types type = TYP_INT, insFlags flags = INS_FLAGS_DONT_CARE);
regNumber genGetRegSetToIcon(ssize_t val, regMaskTP regBest = 0, var_types type = TYP_INT);
-void genDecRegBy(regNumber reg, ssize_t ival, GenTreePtr tree);
-void genIncRegBy(regNumber reg, ssize_t ival, GenTreePtr tree, var_types dstType = TYP_INT, bool ovfl = false);
+void genDecRegBy(regNumber reg, ssize_t ival, GenTree* tree);
+void genIncRegBy(regNumber reg, ssize_t ival, GenTree* tree, var_types dstType = TYP_INT, bool ovfl = false);
-void genMulRegBy(regNumber reg, ssize_t ival, GenTreePtr tree, var_types dstType = TYP_INT, bool ovfl = false);
+void genMulRegBy(regNumber reg, ssize_t ival, GenTree* tree, var_types dstType = TYP_INT, bool ovfl = false);
//-------------------------------------------------------------------------
-bool genRegTrashable(regNumber reg, GenTreePtr tree);
+bool genRegTrashable(regNumber reg, GenTree* tree);
//
// Prolog functions and data (there are a few exceptions for more generally used things)
@@ -61,47 +61,47 @@ regNumber genPInvokeCallProlog(LclVarDsc* varDsc,
void genPInvokeCallEpilog(LclVarDsc* varDsc, regMaskTP retVal);
-regNumber genLclHeap(GenTreePtr size);
+regNumber genLclHeap(GenTree* size);
void genDyingVars(VARSET_VALARG_TP beforeSet, VARSET_VALARG_TP afterSet);
-bool genContainsVarDeath(GenTreePtr from, GenTreePtr to, unsigned varNum);
+bool genContainsVarDeath(GenTree* from, GenTree* to, unsigned varNum);
void genComputeReg(
- GenTreePtr tree, regMaskTP needReg, RegSet::ExactReg mustReg, RegSet::KeepReg keepReg, bool freeOnly = false);
+ GenTree* tree, regMaskTP needReg, RegSet::ExactReg mustReg, RegSet::KeepReg keepReg, bool freeOnly = false);
-void genCompIntoFreeReg(GenTreePtr tree, regMaskTP needReg, RegSet::KeepReg keepReg);
+void genCompIntoFreeReg(GenTree* tree, regMaskTP needReg, RegSet::KeepReg keepReg);
-void genReleaseReg(GenTreePtr tree);
+void genReleaseReg(GenTree* tree);
-void genRecoverReg(GenTreePtr tree, regMaskTP needReg, RegSet::KeepReg keepReg);
+void genRecoverReg(GenTree* tree, regMaskTP needReg, RegSet::KeepReg keepReg);
-void genMoveRegPairHalf(GenTreePtr tree, regNumber dst, regNumber src, int off = 0);
+void genMoveRegPairHalf(GenTree* tree, regNumber dst, regNumber src, int off = 0);
-void genMoveRegPair(GenTreePtr tree, regMaskTP needReg, regPairNo newPair);
+void genMoveRegPair(GenTree* tree, regMaskTP needReg, regPairNo newPair);
void genComputeRegPair(
- GenTreePtr tree, regPairNo needRegPair, regMaskTP avoidReg, RegSet::KeepReg keepReg, bool freeOnly = false);
+ GenTree* tree, regPairNo needRegPair, regMaskTP avoidReg, RegSet::KeepReg keepReg, bool freeOnly = false);
-void genCompIntoFreeRegPair(GenTreePtr tree, regMaskTP avoidReg, RegSet::KeepReg keepReg);
+void genCompIntoFreeRegPair(GenTree* tree, regMaskTP avoidReg, RegSet::KeepReg keepReg);
-void genComputeAddressable(GenTreePtr tree,
+void genComputeAddressable(GenTree* tree,
regMaskTP addrReg,
RegSet::KeepReg keptReg,
regMaskTP needReg,
RegSet::KeepReg keepReg,
bool freeOnly = false);
-void genReleaseRegPair(GenTreePtr tree);
+void genReleaseRegPair(GenTree* tree);
-void genRecoverRegPair(GenTreePtr tree, regPairNo regPair, RegSet::KeepReg keepReg);
+void genRecoverRegPair(GenTree* tree, regPairNo regPair, RegSet::KeepReg keepReg);
-void genEvalIntoFreeRegPair(GenTreePtr tree, regPairNo regPair, regMaskTP avoidReg);
+void genEvalIntoFreeRegPair(GenTree* tree, regPairNo regPair, regMaskTP avoidReg);
void genMakeRegPairAvailable(regPairNo regPair);
-bool genMakeIndAddrMode(GenTreePtr addr,
- GenTreePtr oper,
+bool genMakeIndAddrMode(GenTree* addr,
+ GenTree* oper,
bool forLea,
regMaskTP regMask,
RegSet::KeepReg keepReg,
@@ -109,14 +109,14 @@ bool genMakeIndAddrMode(GenTreePtr addr,
bool deferOp = false);
regMaskTP genMakeRvalueAddressable(
- GenTreePtr tree, regMaskTP needReg, RegSet::KeepReg keepReg, bool forLoadStore, bool smallOK = false);
+ GenTree* tree, regMaskTP needReg, RegSet::KeepReg keepReg, bool forLoadStore, bool smallOK = false);
regMaskTP genMakeAddressable(
- GenTreePtr tree, regMaskTP needReg, RegSet::KeepReg keepReg, bool smallOK = false, bool deferOK = false);
+ GenTree* tree, regMaskTP needReg, RegSet::KeepReg keepReg, bool smallOK = false, bool deferOK = false);
-regMaskTP genMakeAddrArrElem(GenTreePtr arrElem, GenTreePtr tree, regMaskTP needReg, RegSet::KeepReg keepReg);
+regMaskTP genMakeAddrArrElem(GenTree* arrElem, GenTree* tree, regMaskTP needReg, RegSet::KeepReg keepReg);
-regMaskTP genMakeAddressable2(GenTreePtr tree,
+regMaskTP genMakeAddressable2(GenTree* tree,
regMaskTP needReg,
RegSet::KeepReg keepReg,
bool forLoadStore,
@@ -124,25 +124,25 @@ regMaskTP genMakeAddressable2(GenTreePtr tree,
bool deferOK = false,
bool evalSideEffs = false);
-bool genStillAddressable(GenTreePtr tree);
+bool genStillAddressable(GenTree* tree);
-regMaskTP genRestoreAddrMode(GenTreePtr addr, GenTreePtr tree, bool lockPhase);
+regMaskTP genRestoreAddrMode(GenTree* addr, GenTree* tree, bool lockPhase);
-regMaskTP genRestAddressable(GenTreePtr tree, regMaskTP addrReg, regMaskTP lockMask);
+regMaskTP genRestAddressable(GenTree* tree, regMaskTP addrReg, regMaskTP lockMask);
-regMaskTP genKeepAddressable(GenTreePtr tree, regMaskTP addrReg, regMaskTP avoidMask = RBM_NONE);
+regMaskTP genKeepAddressable(GenTree* tree, regMaskTP addrReg, regMaskTP avoidMask = RBM_NONE);
-void genDoneAddressable(GenTreePtr tree, regMaskTP addrReg, RegSet::KeepReg keptReg);
+void genDoneAddressable(GenTree* tree, regMaskTP addrReg, RegSet::KeepReg keptReg);
-GenTreePtr genMakeAddrOrFPstk(GenTreePtr tree, regMaskTP* regMaskPtr, bool roundResult);
+GenTree* genMakeAddrOrFPstk(GenTree* tree, regMaskTP* regMaskPtr, bool roundResult);
void genEmitGSCookieCheck(bool pushReg);
-void genEvalSideEffects(GenTreePtr tree);
+void genEvalSideEffects(GenTree* tree);
-void genCondJump(GenTreePtr cond, BasicBlock* destTrue = NULL, BasicBlock* destFalse = NULL, bool bStackFPFixup = true);
+void genCondJump(GenTree* cond, BasicBlock* destTrue = NULL, BasicBlock* destFalse = NULL, bool bStackFPFixup = true);
-emitJumpKind genCondSetFlags(GenTreePtr cond);
+emitJumpKind genCondSetFlags(GenTree* cond);
void genJCC(genTreeOps cmp, BasicBlock* block, var_types type);
@@ -150,79 +150,79 @@ void genJccLongHi(genTreeOps cmp, BasicBlock* jumpTrue, BasicBlock* jumpFalse, b
void genJccLongLo(genTreeOps cmp, BasicBlock* jumpTrue, BasicBlock* jumpFalse);
-void genCondJumpLng(GenTreePtr cond, BasicBlock* jumpTrue, BasicBlock* jumpFalse, bool bFPTransition = false);
+void genCondJumpLng(GenTree* cond, BasicBlock* jumpTrue, BasicBlock* jumpFalse, bool bFPTransition = false);
bool genUse_fcomip();
void genTableSwitch(regNumber reg, unsigned jumpCnt, BasicBlock** jumpTab);
-regMaskTP WriteBarrier(GenTreePtr tgt, GenTreePtr assignVal, regMaskTP addrReg);
+regMaskTP WriteBarrier(GenTree* tgt, GenTree* assignVal, regMaskTP addrReg);
-void genCodeForTreeConst(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg = RBM_NONE);
+void genCodeForTreeConst(GenTree* tree, regMaskTP destReg, regMaskTP bestReg = RBM_NONE);
-void genCodeForTreeLeaf(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg = RBM_NONE);
+void genCodeForTreeLeaf(GenTree* tree, regMaskTP destReg, regMaskTP bestReg = RBM_NONE);
// If "tree" is a comma node, generates code for the left comma arguments,
// in order, returning the first right argument in the list that is not
// a comma node.
-GenTreePtr genCodeForCommaTree(GenTreePtr tree);
+GenTree* genCodeForCommaTree(GenTree* tree);
-void genCodeForTreeLeaf_GT_JMP(GenTreePtr tree);
+void genCodeForTreeLeaf_GT_JMP(GenTree* tree);
static Compiler::fgWalkPreFn fgIsVarAssignedTo;
-void genCodeForQmark(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg);
+void genCodeForQmark(GenTree* tree, regMaskTP destReg, regMaskTP bestReg);
-bool genCodeForQmarkWithCMOV(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg);
+bool genCodeForQmarkWithCMOV(GenTree* tree, regMaskTP destReg, regMaskTP bestReg);
#ifdef _TARGET_XARCH_
-void genCodeForMultEAX(GenTreePtr tree);
+void genCodeForMultEAX(GenTree* tree);
#endif
#ifdef _TARGET_ARM_
-void genCodeForMult64(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg);
+void genCodeForMult64(GenTree* tree, regMaskTP destReg, regMaskTP bestReg);
#endif
-void genCodeForTreeSmpBinArithLogOp(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg);
+void genCodeForTreeSmpBinArithLogOp(GenTree* tree, regMaskTP destReg, regMaskTP bestReg);
-void genCodeForTreeSmpBinArithLogAsgOp(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg);
+void genCodeForTreeSmpBinArithLogAsgOp(GenTree* tree, regMaskTP destReg, regMaskTP bestReg);
-void genCodeForUnsignedMod(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg);
+void genCodeForUnsignedMod(GenTree* tree, regMaskTP destReg, regMaskTP bestReg);
-void genCodeForSignedMod(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg);
+void genCodeForSignedMod(GenTree* tree, regMaskTP destReg, regMaskTP bestReg);
-void genCodeForUnsignedDiv(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg);
+void genCodeForUnsignedDiv(GenTree* tree, regMaskTP destReg, regMaskTP bestReg);
-void genCodeForSignedDiv(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg);
+void genCodeForSignedDiv(GenTree* tree, regMaskTP destReg, regMaskTP bestReg);
-void genCodeForGeneralDivide(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg);
+void genCodeForGeneralDivide(GenTree* tree, regMaskTP destReg, regMaskTP bestReg);
-void genCodeForAsgShift(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg);
+void genCodeForAsgShift(GenTree* tree, regMaskTP destReg, regMaskTP bestReg);
-void genCodeForShift(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg);
+void genCodeForShift(GenTree* tree, regMaskTP destReg, regMaskTP bestReg);
-void genCodeForRelop(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg);
+void genCodeForRelop(GenTree* tree, regMaskTP destReg, regMaskTP bestReg);
-void genCodeForCopyObj(GenTreePtr tree, regMaskTP destReg);
+void genCodeForCopyObj(GenTree* tree, regMaskTP destReg);
-void genCodeForBlkOp(GenTreePtr tree, regMaskTP destReg);
+void genCodeForBlkOp(GenTree* tree, regMaskTP destReg);
-void genCodeForTreeSmpOp(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg = RBM_NONE);
+void genCodeForTreeSmpOp(GenTree* tree, regMaskTP destReg, regMaskTP bestReg = RBM_NONE);
regNumber genIntegerCast(GenTree* tree, regMaskTP needReg, regMaskTP bestReg);
-void genCodeForNumericCast(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg);
+void genCodeForNumericCast(GenTree* tree, regMaskTP destReg, regMaskTP bestReg);
-void genCodeForTreeSmpOp_GT_ADDR(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg = RBM_NONE);
+void genCodeForTreeSmpOp_GT_ADDR(GenTree* tree, regMaskTP destReg, regMaskTP bestReg = RBM_NONE);
-void genCodeForTreeSmpOpAsg(GenTreePtr tree);
+void genCodeForTreeSmpOpAsg(GenTree* tree);
-void genCodeForTreeSmpOpAsg_DONE_ASSG(GenTreePtr tree, regMaskTP addrReg, regNumber reg, bool ovfl);
+void genCodeForTreeSmpOpAsg_DONE_ASSG(GenTree* tree, regMaskTP addrReg, regNumber reg, bool ovfl);
-void genCodeForTreeSpecialOp(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg = RBM_NONE);
+void genCodeForTreeSpecialOp(GenTree* tree, regMaskTP destReg, regMaskTP bestReg = RBM_NONE);
-void genCodeForTree(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg = RBM_NONE);
+void genCodeForTree(GenTree* tree, regMaskTP destReg, regMaskTP bestReg = RBM_NONE);
-void genCodeForTree_DONE_LIFE(GenTreePtr tree, regNumber reg)
+void genCodeForTree_DONE_LIFE(GenTree* tree, regNumber reg)
{
/* We've computed the value of 'tree' into 'reg' */
@@ -232,14 +232,14 @@ void genCodeForTree_DONE_LIFE(GenTreePtr tree, regNumber reg)
genMarkTreeInReg(tree, reg);
}
-void genCodeForTree_DONE_LIFE(GenTreePtr tree, regPairNo regPair)
+void genCodeForTree_DONE_LIFE(GenTree* tree, regPairNo regPair)
{
/* We've computed the value of 'tree' into 'regPair' */
genMarkTreeInRegPair(tree, regPair);
}
-void genCodeForTree_DONE(GenTreePtr tree, regNumber reg)
+void genCodeForTree_DONE(GenTree* tree, regNumber reg)
{
/* Check whether this subtree has freed up any variables */
@@ -248,7 +248,7 @@ void genCodeForTree_DONE(GenTreePtr tree, regNumber reg)
genCodeForTree_DONE_LIFE(tree, reg);
}
-void genCodeForTree_REG_VAR1(GenTreePtr tree)
+void genCodeForTree_REG_VAR1(GenTree* tree)
{
/* Value is already in a register */
@@ -259,29 +259,29 @@ void genCodeForTree_REG_VAR1(GenTreePtr tree)
genCodeForTree_DONE(tree, reg);
}
-void genCodeForTreeLng(GenTreePtr tree, regMaskTP needReg, regMaskTP avoidReg);
+void genCodeForTreeLng(GenTree* tree, regMaskTP needReg, regMaskTP avoidReg);
-regPairNo genCodeForLongModInt(GenTreePtr tree, regMaskTP needReg);
+regPairNo genCodeForLongModInt(GenTree* tree, regMaskTP needReg);
-unsigned genRegCountForLiveIntEnregVars(GenTreePtr tree);
+unsigned genRegCountForLiveIntEnregVars(GenTree* tree);
#ifdef _TARGET_ARM_
-void genStoreFromFltRetRegs(GenTreePtr tree);
-void genLoadIntoFltRetRegs(GenTreePtr tree);
+void genStoreFromFltRetRegs(GenTree* tree);
+void genLoadIntoFltRetRegs(GenTree* tree);
void genLdStFltRetRegsPromotedVar(LclVarDsc* varDsc, bool isLoadIntoFltReg);
#endif
#if CPU_HAS_FP_SUPPORT
-void genRoundFpExpression(GenTreePtr op, var_types type = TYP_UNDEF);
-void genCodeForTreeFlt(GenTreePtr tree, regMaskTP needReg = RBM_ALLFLOAT, regMaskTP bestReg = RBM_NONE);
+void genRoundFpExpression(GenTree* op, var_types type = TYP_UNDEF);
+void genCodeForTreeFlt(GenTree* tree, regMaskTP needReg = RBM_ALLFLOAT, regMaskTP bestReg = RBM_NONE);
#endif
// FP stuff
#include "fp.h"
-void genCodeForJumpTable(GenTreePtr tree);
-void genCodeForSwitchTable(GenTreePtr tree);
-void genCodeForSwitch(GenTreePtr tree);
+void genCodeForJumpTable(GenTree* tree);
+void genCodeForSwitchTable(GenTree* tree);
+void genCodeForSwitch(GenTree* tree);
size_t genPushArgList(GenTreeCall* call);
@@ -331,7 +331,7 @@ size_t genPushArgList(GenTreeCall* call);
// allocates a register, uses it, and sets "*pRegTmp" to the allocated register.
//
// Returns "true" iff it filled two slots with an 8-byte value.
-bool genFillSlotFromPromotedStruct(GenTreePtr arg,
+bool genFillSlotFromPromotedStruct(GenTree* arg,
fgArgTabEntry* curArgTabEntry,
LclVarDsc* promotedStructLocalVarDesc,
emitAttr fieldSize,
@@ -349,26 +349,26 @@ bool genFillSlotFromPromotedStruct(GenTreePtr arg,
// then returns a regMaskTP representing the set of registers holding
// fieldVars of the RHS that go dead with this use (as determined by the live set
// of cpBlk).
-regMaskTP genFindDeadFieldRegs(GenTreePtr cpBlk);
+regMaskTP genFindDeadFieldRegs(GenTree* cpBlk);
void SetupLateArgs(GenTreeCall* call);
#ifdef _TARGET_ARM_
-void PushMkRefAnyArg(GenTreePtr mkRefAnyTree, fgArgTabEntry* curArgTabEntry, regMaskTP regNeedMask);
+void PushMkRefAnyArg(GenTree* mkRefAnyTree, fgArgTabEntry* curArgTabEntry, regMaskTP regNeedMask);
#endif // _TARGET_ARM_
regMaskTP genLoadIndirectCallTarget(GenTreeCall* call);
regMaskTP genCodeForCall(GenTreeCall* call, bool valUsed);
-GenTreePtr genGetAddrModeBase(GenTreePtr tree);
+GenTree* genGetAddrModeBase(GenTree* tree);
-GenTreePtr genIsAddrMode(GenTreePtr tree, GenTreePtr* indxPtr);
+GenTree* genIsAddrMode(GenTree* tree, GenTree** indxPtr);
private:
-bool genIsLocalLastUse(GenTreePtr tree);
+bool genIsLocalLastUse(GenTree* tree);
-bool genIsRegCandidateLocal(GenTreePtr tree);
+bool genIsRegCandidateLocal(GenTree* tree);
//=========================================================================
// Debugging support
@@ -401,54 +401,54 @@ void FlatFPX87_Unload(FlatFPStateX87* pState, unsigned iVirtual, bool bEmitCode
#endif
// Codegen functions. This is the API that codegen will use
-regMaskTP genPushArgumentStackFP(GenTreePtr arg);
-void genRoundFpExpressionStackFP(GenTreePtr op, var_types type = TYP_UNDEF);
-void genCodeForTreeStackFP_Const(GenTreePtr tree);
-void genCodeForTreeStackFP_Leaf(GenTreePtr tree);
-void genCodeForTreeStackFP_SmpOp(GenTreePtr tree);
-void genCodeForTreeStackFP_Special(GenTreePtr tree);
-void genCodeForTreeStackFP_Cast(GenTreePtr tree);
-void genCodeForTreeStackFP(GenTreePtr tree);
-void genCondJumpFltStackFP(GenTreePtr cond, BasicBlock* jumpTrue, BasicBlock* jumpFalse, bool bDoTransition = true);
-void genCondJumpFloat(GenTreePtr cond, BasicBlock* jumpTrue, BasicBlock* jumpFalse);
-void genCondJumpLngStackFP(GenTreePtr cond, BasicBlock* jumpTrue, BasicBlock* jumpFalse);
+regMaskTP genPushArgumentStackFP(GenTree* arg);
+void genRoundFpExpressionStackFP(GenTree* op, var_types type = TYP_UNDEF);
+void genCodeForTreeStackFP_Const(GenTree* tree);
+void genCodeForTreeStackFP_Leaf(GenTree* tree);
+void genCodeForTreeStackFP_SmpOp(GenTree* tree);
+void genCodeForTreeStackFP_Special(GenTree* tree);
+void genCodeForTreeStackFP_Cast(GenTree* tree);
+void genCodeForTreeStackFP(GenTree* tree);
+void genCondJumpFltStackFP(GenTree* cond, BasicBlock* jumpTrue, BasicBlock* jumpFalse, bool bDoTransition = true);
+void genCondJumpFloat(GenTree* cond, BasicBlock* jumpTrue, BasicBlock* jumpFalse);
+void genCondJumpLngStackFP(GenTree* cond, BasicBlock* jumpTrue, BasicBlock* jumpFalse);
void genFloatConst(GenTree* tree, RegSet::RegisterPreference* pref);
void genFloatLeaf(GenTree* tree, RegSet::RegisterPreference* pref);
void genFloatSimple(GenTree* tree, RegSet::RegisterPreference* pref);
void genFloatMath(GenTree* tree, RegSet::RegisterPreference* pref);
void genFloatCheckFinite(GenTree* tree, RegSet::RegisterPreference* pref);
-void genLoadFloat(GenTreePtr tree, regNumber reg);
+void genLoadFloat(GenTree* tree, regNumber reg);
void genFloatAssign(GenTree* tree);
void genFloatArith(GenTree* tree, RegSet::RegisterPreference* pref);
void genFloatAsgArith(GenTree* tree);
-regNumber genAssignArithFloat(genTreeOps oper, GenTreePtr dst, regNumber dstreg, GenTreePtr src, regNumber srcreg);
+regNumber genAssignArithFloat(genTreeOps oper, GenTree* dst, regNumber dstreg, GenTree* src, regNumber srcreg);
-GenTreePtr genMakeAddressableFloat(GenTreePtr tree,
- regMaskTP* regMaskIntPtr,
- regMaskTP* regMaskFltPtr,
- bool bCollapseConstantDoubles = true);
+GenTree* genMakeAddressableFloat(GenTree* tree,
+ regMaskTP* regMaskIntPtr,
+ regMaskTP* regMaskFltPtr,
+ bool bCollapseConstantDoubles = true);
-void genCodeForTreeFloat(GenTreePtr tree, RegSet::RegisterPreference* pref = NULL);
+void genCodeForTreeFloat(GenTree* tree, RegSet::RegisterPreference* pref = NULL);
-void genCodeForTreeFloat(GenTreePtr tree, regMaskTP needReg, regMaskTP bestReg);
+void genCodeForTreeFloat(GenTree* tree, regMaskTP needReg, regMaskTP bestReg);
regNumber genArithmFloat(
- genTreeOps oper, GenTreePtr dst, regNumber dstreg, GenTreePtr src, regNumber srcreg, bool bReverse);
-void genCodeForTreeCastFloat(GenTreePtr tree, RegSet::RegisterPreference* pref);
-void genCodeForTreeCastToFloat(GenTreePtr tree, RegSet::RegisterPreference* pref);
-void genCodeForTreeCastFromFloat(GenTreePtr tree, RegSet::RegisterPreference* pref);
-void genKeepAddressableFloat(GenTreePtr tree, regMaskTP* regMaskIntPtr, regMaskTP* regMaskFltPtr);
-void genDoneAddressableFloat(GenTreePtr tree, regMaskTP addrRegInt, regMaskTP addrRegFlt, RegSet::KeepReg keptReg);
-void genComputeAddressableFloat(GenTreePtr tree,
+ genTreeOps oper, GenTree* dst, regNumber dstreg, GenTree* src, regNumber srcreg, bool bReverse);
+void genCodeForTreeCastFloat(GenTree* tree, RegSet::RegisterPreference* pref);
+void genCodeForTreeCastToFloat(GenTree* tree, RegSet::RegisterPreference* pref);
+void genCodeForTreeCastFromFloat(GenTree* tree, RegSet::RegisterPreference* pref);
+void genKeepAddressableFloat(GenTree* tree, regMaskTP* regMaskIntPtr, regMaskTP* regMaskFltPtr);
+void genDoneAddressableFloat(GenTree* tree, regMaskTP addrRegInt, regMaskTP addrRegFlt, RegSet::KeepReg keptReg);
+void genComputeAddressableFloat(GenTree* tree,
regMaskTP addrRegInt,
regMaskTP addrRegFlt,
RegSet::KeepReg keptReg,
regMaskTP needReg,
RegSet::KeepReg keepReg,
bool freeOnly = false);
-void genRoundFloatExpression(GenTreePtr op, var_types type);
+void genRoundFloatExpression(GenTree* op, var_types type);
#if FEATURE_STACK_FP_X87
// Assumes then block will be generated before else block.
@@ -457,25 +457,25 @@ struct QmarkStateStackFP
FlatFPStateX87 stackState;
};
-void genQMarkRegVarTransition(GenTreePtr nextNode, VARSET_VALARG_TP liveset);
-void genQMarkBeforeElseStackFP(QmarkStateStackFP* pState, VARSET_VALARG_TP varsetCond, GenTreePtr nextNode);
-void genQMarkAfterElseBlockStackFP(QmarkStateStackFP* pState, VARSET_VALARG_TP varsetCond, GenTreePtr nextNode);
+void genQMarkRegVarTransition(GenTree* nextNode, VARSET_VALARG_TP liveset);
+void genQMarkBeforeElseStackFP(QmarkStateStackFP* pState, VARSET_VALARG_TP varsetCond, GenTree* nextNode);
+void genQMarkAfterElseBlockStackFP(QmarkStateStackFP* pState, VARSET_VALARG_TP varsetCond, GenTree* nextNode);
void genQMarkAfterThenBlockStackFP(QmarkStateStackFP* pState);
#endif
-GenTreePtr genMakeAddressableStackFP(GenTreePtr tree,
- regMaskTP* regMaskIntPtr,
- regMaskTP* regMaskFltPtr,
- bool bCollapseConstantDoubles = true);
-void genKeepAddressableStackFP(GenTreePtr tree, regMaskTP* regMaskIntPtr, regMaskTP* regMaskFltPtr);
-void genDoneAddressableStackFP(GenTreePtr tree, regMaskTP addrRegInt, regMaskTP addrRegFlt, RegSet::KeepReg keptReg);
+GenTree* genMakeAddressableStackFP(GenTree* tree,
+ regMaskTP* regMaskIntPtr,
+ regMaskTP* regMaskFltPtr,
+ bool bCollapseConstantDoubles = true);
+void genKeepAddressableStackFP(GenTree* tree, regMaskTP* regMaskIntPtr, regMaskTP* regMaskFltPtr);
+void genDoneAddressableStackFP(GenTree* tree, regMaskTP addrRegInt, regMaskTP addrRegFlt, RegSet::KeepReg keptReg);
-void genCodeForTreeStackFP_Asg(GenTreePtr tree);
-void genCodeForTreeStackFP_AsgArithm(GenTreePtr tree);
-void genCodeForTreeStackFP_Arithm(GenTreePtr tree);
-void genCodeForTreeStackFP_DONE(GenTreePtr tree, regNumber reg);
-void genCodeForTreeFloat_DONE(GenTreePtr tree, regNumber reg);
+void genCodeForTreeStackFP_Asg(GenTree* tree);
+void genCodeForTreeStackFP_AsgArithm(GenTree* tree);
+void genCodeForTreeStackFP_Arithm(GenTree* tree);
+void genCodeForTreeStackFP_DONE(GenTree* tree, regNumber reg);
+void genCodeForTreeFloat_DONE(GenTree* tree, regNumber reg);
void genSetupStateStackFP(BasicBlock* block);
regMaskTP genRegMaskFromLivenessStackFP(VARSET_VALARG_TP varset);
@@ -484,7 +484,7 @@ regMaskTP genRegMaskFromLivenessStackFP(VARSET_VALARG_TP varset);
// If op1 or op2 are comma expressions, will do code-gen for their non-last comma parts,
// and set op1 and op2 to the remaining non-comma expressions.
void genSetupForOpStackFP(
- GenTreePtr& op1, GenTreePtr& op2, bool bReverse, bool bMakeOp1Addressable, bool bOp1ReadOnly, bool bOp2ReadOnly);
+ GenTree*& op1, GenTree*& op2, bool bReverse, bool bMakeOp1Addressable, bool bOp1ReadOnly, bool bOp2ReadOnly);
#if FEATURE_STACK_FP_X87
@@ -510,18 +510,18 @@ BasicBlock* genTransitionBlockStackFP(FlatFPStateX87* pState, BasicBlock* pFrom,
// should know about x87 instructions.
int genNumberTemps();
-void genDiscardStackFP(GenTreePtr tree);
+void genDiscardStackFP(GenTree* tree);
void genRegRenameWithMasks(regNumber dstReg, regNumber srcReg);
-void genRegVarBirthStackFP(GenTreePtr tree);
+void genRegVarBirthStackFP(GenTree* tree);
void genRegVarBirthStackFP(LclVarDsc* varDsc);
-void genRegVarDeathStackFP(GenTreePtr tree);
+void genRegVarDeathStackFP(GenTree* tree);
void genRegVarDeathStackFP(LclVarDsc* varDsc);
-void genLoadStackFP(GenTreePtr tree, regNumber reg);
-void genMovStackFP(GenTreePtr dst, regNumber dstreg, GenTreePtr src, regNumber srcreg);
-bool genCompInsStackFP(GenTreePtr tos, GenTreePtr other);
+void genLoadStackFP(GenTree* tree, regNumber reg);
+void genMovStackFP(GenTree* dst, regNumber dstreg, GenTree* src, regNumber srcreg);
+bool genCompInsStackFP(GenTree* tos, GenTree* other);
regNumber genArithmStackFP(
- genTreeOps oper, GenTreePtr dst, regNumber dstreg, GenTreePtr src, regNumber srcreg, bool bReverse);
-regNumber genAsgArithmStackFP(genTreeOps oper, GenTreePtr dst, regNumber dstreg, GenTreePtr src, regNumber srcreg);
+ genTreeOps oper, GenTree* dst, regNumber dstreg, GenTree* src, regNumber srcreg, bool bReverse);
+regNumber genAsgArithmStackFP(genTreeOps oper, GenTree* dst, regNumber dstreg, GenTree* src, regNumber srcreg);
void genCondJmpInsStackFP(emitJumpKind jumpKind,
BasicBlock* jumpTrue,
BasicBlock* jumpFalse,
@@ -536,7 +536,7 @@ void SpillForCallRegisterFP(regMaskTP noSpillMask);
// When bOnlyNoMemAccess = true, the load will be generated only for constant loading that doesn't
// involve memory accesses, (ie: fldz for positive zero, or fld1 for 1). Will return true the function
// did the load
-bool genConstantLoadStackFP(GenTreePtr tree, bool bOnlyNoMemAccess = false);
+bool genConstantLoadStackFP(GenTree* tree, bool bOnlyNoMemAccess = false);
void genEndOfStatement();
#if FEATURE_STACK_FP_X87
@@ -546,12 +546,12 @@ struct genRegVarDiesInSubTreeData
bool result;
};
static Compiler::fgWalkPreFn genRegVarDiesInSubTreeWorker;
-bool genRegVarDiesInSubTree(GenTreePtr tree, regNumber reg);
+bool genRegVarDiesInSubTree(GenTree* tree, regNumber reg);
#endif // FEATURE_STACK_FP_X87
// Float spill
void UnspillFloat(RegSet::SpillDsc* spillDsc);
-void UnspillFloat(GenTreePtr tree);
+void UnspillFloat(GenTree* tree);
void UnspillFloat(LclVarDsc* varDsc);
void UnspillFloatMachineDep(RegSet::SpillDsc* spillDsc);
void UnspillFloatMachineDep(RegSet::SpillDsc* spillDsc, bool useSameReg);
@@ -588,8 +588,8 @@ regNumber genFlagsEqReg;
unsigned genFlagsEqVar;
void genFlagsEqualToNone();
-void genFlagsEqualToReg(GenTreePtr tree, regNumber reg);
-void genFlagsEqualToVar(GenTreePtr tree, unsigned var);
+void genFlagsEqualToReg(GenTree* tree, regNumber reg);
+void genFlagsEqualToVar(GenTree* tree, unsigned var);
bool genFlagsAreReg(regNumber reg);
bool genFlagsAreVar(unsigned var);
diff --git a/src/jit/codegencommon.cpp b/src/jit/codegencommon.cpp
index e0046d2279..4c0fce504c 100644
--- a/src/jit/codegencommon.cpp
+++ b/src/jit/codegencommon.cpp
@@ -186,7 +186,7 @@ CodeGen::CodeGen(Compiler* theCompiler) : CodeGenInterface(theCompiler)
#endif
}
-void CodeGenInterface::genMarkTreeInReg(GenTreePtr tree, regNumber reg)
+void CodeGenInterface::genMarkTreeInReg(GenTree* tree, regNumber reg)
{
tree->gtRegNum = reg;
#ifdef LEGACY_BACKEND
@@ -195,7 +195,7 @@ void CodeGenInterface::genMarkTreeInReg(GenTreePtr tree, regNumber reg)
}
#if CPU_LONG_USES_REGPAIR
-void CodeGenInterface::genMarkTreeInRegPair(GenTreePtr tree, regPairNo regPair)
+void CodeGenInterface::genMarkTreeInRegPair(GenTree* tree, regPairNo regPair)
{
tree->gtRegPair = regPair;
#ifdef LEGACY_BACKEND
@@ -463,7 +463,7 @@ void CodeGen::genPrepForEHCodegen()
#endif // FEATURE_EH_CALLFINALLY_THUNKS
}
-void CodeGenInterface::genUpdateLife(GenTreePtr tree)
+void CodeGenInterface::genUpdateLife(GenTree* tree)
{
compiler->compUpdateLife</*ForCodeGen*/ true>(tree);
}
@@ -478,10 +478,10 @@ void CodeGenInterface::genUpdateLife(VARSET_VALARG_TP newLife)
// "tree" MUST occur in the current statement, AFTER the most recent
// update of compiler->compCurLifeTree and compiler->compCurLife.
//
-VARSET_VALRET_TP CodeGen::genUpdateLiveSetForward(GenTreePtr tree)
+VARSET_VALRET_TP CodeGen::genUpdateLiveSetForward(GenTree* tree)
{
- VARSET_TP startLiveSet(VarSetOps::MakeCopy(compiler, compiler->compCurLife));
- GenTreePtr startNode;
+ VARSET_TP startLiveSet(VarSetOps::MakeCopy(compiler, compiler->compCurLife));
+ GenTree* startNode;
assert(tree != compiler->compCurLifeTree);
if (compiler->compCurLifeTree == nullptr)
{
@@ -501,7 +501,7 @@ VARSET_VALRET_TP CodeGen::genUpdateLiveSetForward(GenTreePtr tree)
// 1. "first" must occur after compiler->compCurLifeTree in execution order for the current statement
// 2. "second" must occur after "first" in the current statement
//
-regMaskTP CodeGen::genNewLiveRegMask(GenTreePtr first, GenTreePtr second)
+regMaskTP CodeGen::genNewLiveRegMask(GenTree* first, GenTree* second)
{
// First, compute the liveset after "first"
VARSET_TP firstLiveSet = genUpdateLiveSetForward(first);
@@ -537,7 +537,7 @@ regMaskTP CodeGenInterface::genGetRegMask(const LclVarDsc* varDsc)
// Return the register mask for the given lclVar or regVar tree node
// inline
-regMaskTP CodeGenInterface::genGetRegMask(GenTreePtr tree)
+regMaskTP CodeGenInterface::genGetRegMask(GenTree* tree)
{
assert(tree->gtOper == GT_LCL_VAR || tree->gtOper == GT_REG_VAR);
@@ -565,7 +565,7 @@ regMaskTP CodeGenInterface::genGetRegMask(GenTreePtr tree)
// It might be both going live and dying (that is, it is a dead store) under MinOpts.
// Update regSet.rsMaskVars accordingly.
// inline
-void CodeGenInterface::genUpdateRegLife(const LclVarDsc* varDsc, bool isBorn, bool isDying DEBUGARG(GenTreePtr tree))
+void CodeGenInterface::genUpdateRegLife(const LclVarDsc* varDsc, bool isBorn, bool isDying DEBUGARG(GenTree* tree))
{
#if FEATURE_STACK_FP_X87
// The stack fp reg vars are handled elsewhere
@@ -758,13 +758,13 @@ regMaskTP Compiler::compNoGCHelperCallKillSet(CorInfoHelpFunc helper)
// regSet.rsMaskVars as well)
// if the given lclVar (or indir(addr(local)))/regVar node is going live (being born) or dying.
template <bool ForCodeGen>
-void Compiler::compUpdateLifeVar(GenTreePtr tree, VARSET_TP* pLastUseVars)
+void Compiler::compUpdateLifeVar(GenTree* tree, VARSET_TP* pLastUseVars)
{
- GenTreePtr indirAddrLocal = fgIsIndirOfAddrOfLocal(tree);
+ GenTree* indirAddrLocal = fgIsIndirOfAddrOfLocal(tree);
assert(tree->OperIsNonPhiLocal() || indirAddrLocal != nullptr);
// Get the local var tree -- if "tree" is "Ldobj(addr(x))", or "ind(addr(x))" this is "x", else it's "tree".
- GenTreePtr lclVarTree = indirAddrLocal;
+ GenTree* lclVarTree = indirAddrLocal;
if (lclVarTree == nullptr)
{
lclVarTree = tree;
@@ -797,7 +797,7 @@ void Compiler::compUpdateLifeVar(GenTreePtr tree, VARSET_TP* pLastUseVars)
// ifdef'ed out for AMD64).
else if (!varDsc->lvIsStructField)
{
- GenTreePtr prevTree;
+ GenTree* prevTree;
for (prevTree = tree->gtPrev;
prevTree != NULL && prevTree != compCurLifeTree;
prevTree = prevTree->gtPrev)
@@ -1076,10 +1076,10 @@ void Compiler::compUpdateLifeVar(GenTreePtr tree, VARSET_TP* pLastUseVars)
}
// Need an explicit instantiation.
-template void Compiler::compUpdateLifeVar<false>(GenTreePtr tree, VARSET_TP* pLastUseVars);
+template void Compiler::compUpdateLifeVar<false>(GenTree* tree, VARSET_TP* pLastUseVars);
template <bool ForCodeGen>
-void Compiler::compChangeLife(VARSET_VALARG_TP newLife DEBUGARG(GenTreePtr tree))
+void Compiler::compChangeLife(VARSET_VALARG_TP newLife DEBUGARG(GenTree* tree))
{
LclVarDsc* varDsc;
@@ -1219,7 +1219,7 @@ void Compiler::compChangeLife(VARSET_VALARG_TP newLife DEBUGARG(GenTreePtr tree)
}
// Need an explicit instantiation.
-template void Compiler::compChangeLife<true>(VARSET_VALARG_TP newLife DEBUGARG(GenTreePtr tree));
+template void Compiler::compChangeLife<true>(VARSET_VALARG_TP newLife DEBUGARG(GenTree* tree));
#ifdef LEGACY_BACKEND
@@ -1231,11 +1231,11 @@ template void Compiler::compChangeLife<true>(VARSET_VALARG_TP newLife DEBUGARG(G
* The output is the mask of integer registers that are currently
* alive and holding the enregistered local variables.
*/
-regMaskTP CodeGenInterface::genLiveMask(GenTreePtr tree)
+regMaskTP CodeGenInterface::genLiveMask(GenTree* tree)
{
regMaskTP liveMask = regSet.rsMaskVars;
- GenTreePtr nextNode;
+ GenTree* nextNode;
if (compiler->compCurLifeTree == nullptr)
{
assert(compiler->compCurStmt != nullptr);
@@ -1726,7 +1726,7 @@ void CodeGen::genAdjustStackLevel(BasicBlock* block)
#ifdef _TARGET_ARMARCH_
// return size
// alignmentWB is out param
-unsigned CodeGenInterface::InferOpSizeAlign(GenTreePtr op, unsigned* alignmentWB)
+unsigned CodeGenInterface::InferOpSizeAlign(GenTree* op, unsigned* alignmentWB)
{
unsigned alignment = 0;
unsigned opSize = 0;
@@ -1749,7 +1749,7 @@ unsigned CodeGenInterface::InferOpSizeAlign(GenTreePtr op, unsigned* alignmentWB
}
// return size
// alignmentWB is out param
-unsigned CodeGenInterface::InferStructOpSizeAlign(GenTreePtr op, unsigned* alignmentWB)
+unsigned CodeGenInterface::InferStructOpSizeAlign(GenTree* op, unsigned* alignmentWB)
{
unsigned alignment = 0;
unsigned opSize = 0;
@@ -1785,7 +1785,7 @@ unsigned CodeGenInterface::InferStructOpSizeAlign(GenTreePtr op, unsigned* align
}
else if (op->OperIsCopyBlkOp())
{
- GenTreePtr op2 = op->gtOp.gtOp2;
+ GenTree* op2 = op->gtOp.gtOp2;
if (op2->OperGet() == GT_CNS_INT)
{
@@ -1798,10 +1798,10 @@ unsigned CodeGenInterface::InferStructOpSizeAlign(GenTreePtr op, unsigned* align
}
else
{
- opSize = (unsigned)op2->gtIntCon.gtIconVal;
- GenTreePtr op1 = op->gtOp.gtOp1;
+ opSize = (unsigned)op2->gtIntCon.gtIconVal;
+ GenTree* op1 = op->gtOp.gtOp1;
assert(op1->OperGet() == GT_LIST);
- GenTreePtr dstAddr = op1->gtOp.gtOp1;
+ GenTree* dstAddr = op1->gtOp.gtOp1;
if (dstAddr->OperGet() == GT_ADDR)
{
InferStructOpSizeAlign(dstAddr->gtOp.gtOp1, &alignment);
@@ -1890,13 +1890,13 @@ unsigned CodeGenInterface::InferStructOpSizeAlign(GenTreePtr op, unsigned* align
* form an address mode later on.
*/
-bool CodeGen::genCreateAddrMode(GenTreePtr addr,
- int mode,
- bool fold,
- regMaskTP regMask,
- bool* revPtr,
- GenTreePtr* rv1Ptr,
- GenTreePtr* rv2Ptr,
+bool CodeGen::genCreateAddrMode(GenTree* addr,
+ int mode,
+ bool fold,
+ regMaskTP regMask,
+ bool* revPtr,
+ GenTree** rv1Ptr,
+ GenTree** rv2Ptr,
#if SCALED_ADDR_MODES
unsigned* mulPtr,
#endif
@@ -1949,18 +1949,18 @@ bool CodeGen::genCreateAddrMode(GenTreePtr addr,
return false;
}
- GenTreePtr rv1 = nullptr;
- GenTreePtr rv2 = nullptr;
+ GenTree* rv1 = nullptr;
+ GenTree* rv2 = nullptr;
- GenTreePtr op1;
- GenTreePtr op2;
+ GenTree* op1;
+ GenTree* op2;
ssize_t cns;
#if SCALED_ADDR_MODES
unsigned mul;
#endif
- GenTreePtr tmp;
+ GenTree* tmp;
/* What order are the sub-operands to be evaluated */
@@ -2447,8 +2447,8 @@ FOUND_AM:
if (fold)
{
- ssize_t tmpMul;
- GenTreePtr index;
+ ssize_t tmpMul;
+ GenTree* index;
if ((rv2->gtOper == GT_MUL || rv2->gtOper == GT_LSH) && (rv2->gtOp.gtOp2->IsCnsIntOrI()))
{
@@ -2791,7 +2791,7 @@ void CodeGen::genExitCode(BasicBlock* block)
* For non-dbg code, we share the helper blocks created by fgAddCodeRef().
*/
-void CodeGen::genJumpToThrowHlpBlk(emitJumpKind jumpKind, SpecialCodeKind codeKind, GenTreePtr failBlk)
+void CodeGen::genJumpToThrowHlpBlk(emitJumpKind jumpKind, SpecialCodeKind codeKind, GenTree* failBlk)
{
bool useThrowHlpBlk = !compiler->opts.compDbgCode;
@@ -2864,7 +2864,7 @@ void CodeGen::genJumpToThrowHlpBlk(emitJumpKind jumpKind, SpecialCodeKind codeKi
*/
// inline
-void CodeGen::genCheckOverflow(GenTreePtr tree)
+void CodeGen::genCheckOverflow(GenTree* tree)
{
// Overflow-check should be asked for this tree
noway_assert(tree->gtOverflow());
@@ -4068,7 +4068,7 @@ CorInfoHelpFunc CodeGenInterface::genWriteBarrierHelperForWriteBarrierForm(GenTr
// tgt - target tree of write (e.g., GT_STOREIND)
// wbf - already computed write barrier form to use
//
-void CodeGen::genGCWriteBarrier(GenTreePtr tgt, GCInfo::WriteBarrierForm wbf)
+void CodeGen::genGCWriteBarrier(GenTree* tgt, GCInfo::WriteBarrierForm wbf)
{
CorInfoHelpFunc helper = genWriteBarrierHelperForWriteBarrierForm(tgt, wbf);
@@ -4087,9 +4087,9 @@ void CodeGen::genGCWriteBarrier(GenTreePtr tgt, GCInfo::WriteBarrierForm wbf)
CheckedWriteBarrierKinds wbKind = CWBKind_Unclassified;
if (tgt->gtOper == GT_IND)
{
- GenTreePtr lcl = NULL;
+ GenTree* lcl = NULL;
- GenTreePtr indArg = tgt->gtOp.gtOp1;
+ GenTree* indArg = tgt->gtOp.gtOp1;
if (indArg->gtOper == GT_ADDR && indArg->gtOp.gtOp1->gtOper == GT_IND)
{
indArg = indArg->gtOp.gtOp1->gtOp.gtOp1;
@@ -11124,7 +11124,7 @@ void CodeGen::genGenerateStackProbe()
* Record the constant and return a tree node that yields its address.
*/
-GenTreePtr CodeGen::genMakeConst(const void* cnsAddr, var_types cnsType, GenTreePtr cnsTree, bool dblAlign)
+GenTree* CodeGen::genMakeConst(const void* cnsAddr, var_types cnsType, GenTree* cnsTree, bool dblAlign)
{
// Assign the constant an offset in the data section
UNATIVE_OFFSET cnsSize = genTypeSize(cnsType);
@@ -11372,7 +11372,7 @@ bool Compiler::IsHfa(CORINFO_CLASS_HANDLE hClass)
#endif
}
-bool Compiler::IsHfa(GenTreePtr tree)
+bool Compiler::IsHfa(GenTree* tree)
{
#ifdef FEATURE_HFA
return IsHfa(gtGetStructHandleIfPresent(tree));
@@ -11381,7 +11381,7 @@ bool Compiler::IsHfa(GenTreePtr tree)
#endif
}
-var_types Compiler::GetHfaType(GenTreePtr tree)
+var_types Compiler::GetHfaType(GenTree* tree)
{
#ifdef FEATURE_HFA
return GetHfaType(gtGetStructHandleIfPresent(tree));
@@ -11390,7 +11390,7 @@ var_types Compiler::GetHfaType(GenTreePtr tree)
#endif
}
-unsigned Compiler::GetHfaCount(GenTreePtr tree)
+unsigned Compiler::GetHfaCount(GenTree* tree)
{
return GetHfaCount(gtGetStructHandleIfPresent(tree));
}
diff --git a/src/jit/codegeninterface.h b/src/jit/codegeninterface.h
index 5c84a4d2a8..91c932764c 100644
--- a/src/jit/codegeninterface.h
+++ b/src/jit/codegeninterface.h
@@ -61,7 +61,7 @@ public:
// TODO-Cleanup: We should handle the spill directly in CodeGen, rather than
// calling it from compUpdateLifeVar. Then this can be non-virtual.
- virtual void genSpillVar(GenTreePtr tree) = 0;
+ virtual void genSpillVar(GenTree* tree) = 0;
#endif // !LEGACY_BACKEND
//-------------------------------------------------------------------------
@@ -79,13 +79,13 @@ public:
// TODO-Cleanup: Abstract out the part of this that finds the addressing mode, and
// move it to Lower
- virtual bool genCreateAddrMode(GenTreePtr addr,
- int mode,
- bool fold,
- regMaskTP regMask,
- bool* revPtr,
- GenTreePtr* rv1Ptr,
- GenTreePtr* rv2Ptr,
+ virtual bool genCreateAddrMode(GenTree* addr,
+ int mode,
+ bool fold,
+ regMaskTP regMask,
+ bool* revPtr,
+ GenTree** rv1Ptr,
+ GenTree** rv2Ptr,
#if SCALED_ADDR_MODES
unsigned* mulPtr,
#endif
@@ -126,9 +126,9 @@ public:
//-------------------------------------------------------------------------
// Liveness-related fields & methods
public:
- void genUpdateRegLife(const LclVarDsc* varDsc, bool isBorn, bool isDying DEBUGARG(GenTreePtr tree));
+ void genUpdateRegLife(const LclVarDsc* varDsc, bool isBorn, bool isDying DEBUGARG(GenTree* tree));
#ifndef LEGACY_BACKEND
- void genUpdateVarReg(LclVarDsc* varDsc, GenTreePtr tree);
+ void genUpdateVarReg(LclVarDsc* varDsc, GenTree* tree);
#endif // !LEGACY_BACKEND
protected:
@@ -141,13 +141,13 @@ protected:
regMaskTP genLastLiveMask; // these two are used in genLiveMask
regMaskTP genGetRegMask(const LclVarDsc* varDsc);
- regMaskTP genGetRegMask(GenTreePtr tree);
+ regMaskTP genGetRegMask(GenTree* tree);
- void genUpdateLife(GenTreePtr tree);
+ void genUpdateLife(GenTree* tree);
void genUpdateLife(VARSET_VALARG_TP newLife);
#ifdef LEGACY_BACKEND
- regMaskTP genLiveMask(GenTreePtr tree);
+ regMaskTP genLiveMask(GenTree* tree);
regMaskTP genLiveMask(VARSET_VALARG_TP liveSet);
#endif
@@ -317,23 +317,23 @@ public:
#endif // FEATURE_STACK_FP_X87
#ifndef LEGACY_BACKEND
- regNumber genGetAssignedReg(GenTreePtr tree);
+ regNumber genGetAssignedReg(GenTree* tree);
#endif // !LEGACY_BACKEND
#ifdef LEGACY_BACKEND
// Changes GT_LCL_VAR nodes to GT_REG_VAR nodes if possible.
- bool genMarkLclVar(GenTreePtr tree);
+ bool genMarkLclVar(GenTree* tree);
- void genBashLclVar(GenTreePtr tree, unsigned varNum, LclVarDsc* varDsc);
+ void genBashLclVar(GenTree* tree, unsigned varNum, LclVarDsc* varDsc);
#endif // LEGACY_BACKEND
public:
- unsigned InferStructOpSizeAlign(GenTreePtr op, unsigned* alignmentWB);
- unsigned InferOpSizeAlign(GenTreePtr op, unsigned* alignmentWB);
+ unsigned InferStructOpSizeAlign(GenTree* op, unsigned* alignmentWB);
+ unsigned InferOpSizeAlign(GenTree* op, unsigned* alignmentWB);
- void genMarkTreeInReg(GenTreePtr tree, regNumber reg);
+ void genMarkTreeInReg(GenTree* tree, regNumber reg);
#if CPU_LONG_USES_REGPAIR
- void genMarkTreeInRegPair(GenTreePtr tree, regPairNo regPair);
+ void genMarkTreeInRegPair(GenTree* tree, regPairNo regPair);
#endif
// Methods to abstract target information
diff --git a/src/jit/codegenlegacy.cpp b/src/jit/codegenlegacy.cpp
index 945f7bd812..7fb946dbed 100644
--- a/src/jit/codegenlegacy.cpp
+++ b/src/jit/codegenlegacy.cpp
@@ -125,7 +125,7 @@ void CodeGen::genDyingVars(VARSET_VALARG_TP beforeSet, VARSET_VALARG_TP afterSet
* Change the given enregistered local variable node to a register variable node
*/
-void CodeGenInterface::genBashLclVar(GenTreePtr tree, unsigned varNum, LclVarDsc* varDsc)
+void CodeGenInterface::genBashLclVar(GenTree* tree, unsigned varNum, LclVarDsc* varDsc)
{
noway_assert(tree->gtOper == GT_LCL_VAR);
noway_assert(varDsc->lvRegister);
@@ -194,7 +194,7 @@ void CodeGen::checkLiveness(genLivenessSet* ls)
}
// inline
-bool CodeGenInterface::genMarkLclVar(GenTreePtr tree)
+bool CodeGenInterface::genMarkLclVar(GenTree* tree)
{
unsigned varNum;
LclVarDsc* varDsc;
@@ -225,13 +225,13 @@ bool CodeGenInterface::genMarkLclVar(GenTreePtr tree)
}
// inline
-GenTreePtr CodeGen::genGetAddrModeBase(GenTreePtr tree)
+GenTree* CodeGen::genGetAddrModeBase(GenTree* tree)
{
- bool rev;
- unsigned mul;
- unsigned cns;
- GenTreePtr adr;
- GenTreePtr idx;
+ bool rev;
+ unsigned mul;
+ unsigned cns;
+ GenTree* adr;
+ GenTree* idx;
if (genCreateAddrMode(tree, // address
0, // mode
@@ -461,7 +461,7 @@ regNumber CodeGen::genGetRegSetToIcon(ssize_t val, regMaskTP regBest /* = 0 */,
* 'tree' is the resulting tree
*/
-void CodeGen::genIncRegBy(regNumber reg, ssize_t ival, GenTreePtr tree, var_types dstType, bool ovfl)
+void CodeGen::genIncRegBy(regNumber reg, ssize_t ival, GenTree* tree, var_types dstType, bool ovfl)
{
bool setFlags = (tree != NULL) && tree->gtSetFlags();
@@ -529,7 +529,7 @@ UPDATE_LIVENESS:
* 'tree' is the resulting tree.
*/
-void CodeGen::genDecRegBy(regNumber reg, ssize_t ival, GenTreePtr tree)
+void CodeGen::genDecRegBy(regNumber reg, ssize_t ival, GenTree* tree)
{
noway_assert((tree->gtFlags & GTF_OVERFLOW) &&
((tree->gtFlags & GTF_UNSIGNED) || ival == ((tree->gtType == TYP_INT) ? INT32_MIN : SSIZE_T_MIN)));
@@ -558,7 +558,7 @@ void CodeGen::genDecRegBy(regNumber reg, ssize_t ival, GenTreePtr tree)
* 'tree' is the resulting tree
*/
-void CodeGen::genMulRegBy(regNumber reg, ssize_t ival, GenTreePtr tree, var_types dstType, bool ovfl)
+void CodeGen::genMulRegBy(regNumber reg, ssize_t ival, GenTree* tree, var_types dstType, bool ovfl)
{
noway_assert(genActualType(dstType) == TYP_INT || genActualType(dstType) == TYP_I_IMPL);
@@ -615,7 +615,7 @@ void CodeGen::genMulRegBy(regNumber reg, ssize_t ival, GenTreePtr tree, var_type
*/
void CodeGen::genComputeReg(
- GenTreePtr tree, regMaskTP needReg, RegSet::ExactReg mustReg, RegSet::KeepReg keepReg, bool freeOnly)
+ GenTree* tree, regMaskTP needReg, RegSet::ExactReg mustReg, RegSet::KeepReg keepReg, bool freeOnly)
{
noway_assert(tree->gtType != TYP_VOID);
@@ -746,7 +746,7 @@ REG_OK:
*/
// inline
-void CodeGen::genCompIntoFreeReg(GenTreePtr tree, regMaskTP needReg, RegSet::KeepReg keepReg)
+void CodeGen::genCompIntoFreeReg(GenTree* tree, regMaskTP needReg, RegSet::KeepReg keepReg)
{
genComputeReg(tree, needReg, RegSet::ANY_REG, keepReg, true);
}
@@ -757,7 +757,7 @@ void CodeGen::genCompIntoFreeReg(GenTreePtr tree, regMaskTP needReg, RegSet::Kee
* register (but also make sure the value is presently in a register).
*/
-void CodeGen::genReleaseReg(GenTreePtr tree)
+void CodeGen::genReleaseReg(GenTree* tree)
{
if (tree->gtFlags & GTF_SPILLED)
{
@@ -778,7 +778,7 @@ void CodeGen::genReleaseReg(GenTreePtr tree)
* where tree will be recovered to, so we disallow keepReg==RegSet::FREE_REG for GC type trees.
*/
-void CodeGen::genRecoverReg(GenTreePtr tree, regMaskTP needReg, RegSet::KeepReg keepReg)
+void CodeGen::genRecoverReg(GenTree* tree, regMaskTP needReg, RegSet::KeepReg keepReg)
{
if (tree->gtFlags & GTF_SPILLED)
{
@@ -830,7 +830,7 @@ void CodeGen::genRecoverReg(GenTreePtr tree, regMaskTP needReg, RegSet::KeepReg
*/
// inline
-void CodeGen::genMoveRegPairHalf(GenTreePtr tree, regNumber dst, regNumber src, int off)
+void CodeGen::genMoveRegPairHalf(GenTree* tree, regNumber dst, regNumber src, int off)
{
if (src == REG_STK)
{
@@ -863,7 +863,7 @@ void CodeGen::genMoveRegPairHalf(GenTreePtr tree, regNumber dst, regNumber src,
* assume that the current register pair is marked as used and free it.
*/
-void CodeGen::genMoveRegPair(GenTreePtr tree, regMaskTP needReg, regPairNo newPair)
+void CodeGen::genMoveRegPair(GenTree* tree, regMaskTP needReg, regPairNo newPair)
{
regPairNo oldPair;
@@ -1013,7 +1013,7 @@ void CodeGen::genMoveRegPair(GenTreePtr tree, regMaskTP needReg, regPairNo newPa
*/
void CodeGen::genComputeRegPair(
- GenTreePtr tree, regPairNo needRegPair, regMaskTP avoidReg, RegSet::KeepReg keepReg, bool freeOnly)
+ GenTree* tree, regPairNo needRegPair, regMaskTP avoidReg, RegSet::KeepReg keepReg, bool freeOnly)
{
regMaskTP regMask;
regPairNo regPair;
@@ -1177,7 +1177,7 @@ void CodeGen::genComputeRegPair(
*/
// inline
-void CodeGen::genCompIntoFreeRegPair(GenTreePtr tree, regMaskTP avoidReg, RegSet::KeepReg keepReg)
+void CodeGen::genCompIntoFreeRegPair(GenTree* tree, regMaskTP avoidReg, RegSet::KeepReg keepReg)
{
genComputeRegPair(tree, REG_PAIR_NONE, avoidReg, keepReg, true);
}
@@ -1189,7 +1189,7 @@ void CodeGen::genCompIntoFreeRegPair(GenTreePtr tree, regMaskTP avoidReg, RegSet
* pair).
*/
-void CodeGen::genReleaseRegPair(GenTreePtr tree)
+void CodeGen::genReleaseRegPair(GenTree* tree)
{
if (tree->gtFlags & GTF_SPILLED)
{
@@ -1209,7 +1209,7 @@ void CodeGen::genReleaseRegPair(GenTreePtr tree)
* if 'keepReg' is 0, free the register pair.
*/
-void CodeGen::genRecoverRegPair(GenTreePtr tree, regPairNo regPair, RegSet::KeepReg keepReg)
+void CodeGen::genRecoverRegPair(GenTree* tree, regPairNo regPair, RegSet::KeepReg keepReg)
{
if (tree->gtFlags & GTF_SPILLED)
{
@@ -1254,7 +1254,7 @@ void CodeGen::genRecoverRegPair(GenTreePtr tree, regPairNo regPair, RegSet::Keep
*/
// inline
-void CodeGen::genEvalIntoFreeRegPair(GenTreePtr tree, regPairNo regPair, regMaskTP avoidReg)
+void CodeGen::genEvalIntoFreeRegPair(GenTree* tree, regPairNo regPair, regMaskTP avoidReg)
{
genComputeRegPair(tree, regPair, avoidReg, RegSet::KEEP_REG);
genRecoverRegPair(tree, regPair, RegSet::FREE_REG);
@@ -1301,8 +1301,8 @@ void CodeGen::genMakeRegPairAvailable(regPairNo regPair)
* calling genDoneAddressable(addr, *useMaskPtr, RegSet::FREE_REG);
*/
-bool CodeGen::genMakeIndAddrMode(GenTreePtr addr,
- GenTreePtr oper,
+bool CodeGen::genMakeIndAddrMode(GenTree* addr,
+ GenTree* oper,
bool forLea,
regMaskTP regMask,
RegSet::KeepReg keepReg,
@@ -1316,21 +1316,21 @@ bool CodeGen::genMakeIndAddrMode(GenTreePtr addr,
return true;
}
- bool rev;
- GenTreePtr rv1;
- GenTreePtr rv2;
- bool operIsArrIndex; // is oper an array index
- GenTreePtr scaledIndex; // If scaled addressing mode can't be used
+ bool rev;
+ GenTree* rv1;
+ GenTree* rv2;
+ bool operIsArrIndex; // is oper an array index
+ GenTree* scaledIndex; // If scaled addressing mode can't be used
regMaskTP anyMask = RBM_ALLINT;
unsigned cns;
unsigned mul;
- GenTreePtr tmp;
- int ixv = INT_MAX; // unset value
+ GenTree* tmp;
+ int ixv = INT_MAX; // unset value
- GenTreePtr scaledIndexVal;
+ GenTree* scaledIndexVal;
regMaskTP newLiveMask;
regMaskTP rv1Mask;
@@ -1835,17 +1835,17 @@ YES:
* 'oper' is an array bounds check (a GT_ARR_BOUNDS_CHECK node).
*/
-void CodeGen::genRangeCheck(GenTreePtr oper)
+void CodeGen::genRangeCheck(GenTree* oper)
{
noway_assert(oper->OperGet() == GT_ARR_BOUNDS_CHECK);
GenTreeBoundsChk* bndsChk = oper->AsBoundsChk();
- GenTreePtr arrLen = bndsChk->gtArrLen;
- GenTreePtr arrRef = NULL;
- int lenOffset = 0;
+ GenTree* arrLen = bndsChk->gtArrLen;
+ GenTree* arrRef = NULL;
+ int lenOffset = 0;
/* Is the array index a constant value? */
- GenTreePtr index = bndsChk->gtIndex;
+ GenTree* index = bndsChk->gtIndex;
if (!index->IsCnsIntOrI())
{
// No, it's not a constant.
@@ -2030,7 +2030,7 @@ void CodeGen::genRangeCheck(GenTreePtr oper)
// inline
regMaskTP CodeGen::genMakeRvalueAddressable(
- GenTreePtr tree, regMaskTP needReg, RegSet::KeepReg keepReg, bool forLoadStore, bool smallOK)
+ GenTree* tree, regMaskTP needReg, RegSet::KeepReg keepReg, bool forLoadStore, bool smallOK)
{
regNumber reg;
@@ -2055,7 +2055,7 @@ regMaskTP CodeGen::genMakeRvalueAddressable(
/*****************************************************************************/
-bool CodeGen::genIsLocalLastUse(GenTreePtr tree)
+bool CodeGen::genIsLocalLastUse(GenTree* tree)
{
const LclVarDsc* varDsc = &compiler->lvaTable[tree->gtLclVarCommon.gtLclNum];
@@ -2078,7 +2078,7 @@ bool CodeGen::genIsLocalLastUse(GenTreePtr tree)
* where to look for the offset to use.
*/
-regMaskTP CodeGen::genMakeAddrArrElem(GenTreePtr arrElem, GenTreePtr tree, regMaskTP needReg, RegSet::KeepReg keepReg)
+regMaskTP CodeGen::genMakeAddrArrElem(GenTree* arrElem, GenTree* tree, regMaskTP needReg, RegSet::KeepReg keepReg)
{
noway_assert(arrElem->gtOper == GT_ARR_ELEM);
noway_assert(!tree || tree->gtOper == GT_IND || tree == arrElem);
@@ -2096,11 +2096,11 @@ regMaskTP CodeGen::genMakeAddrArrElem(GenTreePtr arrElem, GenTreePtr tree, regMa
applies to all type of tree nodes except for GT_ARR_ELEM.
*/
- GenTreePtr arrObj = arrElem->gtArrElem.gtArrObj;
- unsigned rank = arrElem->gtArrElem.gtArrRank;
- var_types elemType = arrElem->gtArrElem.gtArrElemType;
- regMaskTP addrReg = RBM_NONE;
- regMaskTP regNeed = RBM_ALLINT;
+ GenTree* arrObj = arrElem->gtArrElem.gtArrObj;
+ unsigned rank = arrElem->gtArrElem.gtArrRank;
+ var_types elemType = arrElem->gtArrElem.gtArrElemType;
+ regMaskTP addrReg = RBM_NONE;
+ regMaskTP regNeed = RBM_ALLINT;
#if FEATURE_WRITE_BARRIER && !NOGC_WRITE_BARRIERS
// In CodeGen::WriteBarrier we set up ARG_1 followed by ARG_0
@@ -2161,7 +2161,7 @@ regMaskTP CodeGen::genMakeAddrArrElem(GenTreePtr arrElem, GenTreePtr tree, regMa
for (dim = 0; dim < rank; dim++)
{
- GenTreePtr index = arrElem->gtArrElem.gtArrInds[dim];
+ GenTree* index = arrElem->gtArrElem.gtArrInds[dim];
/* Get the index into a free register (other than the register holding the array) */
@@ -2285,10 +2285,10 @@ regMaskTP CodeGen::genMakeAddrArrElem(GenTreePtr arrElem, GenTreePtr tree, regMa
*/
regMaskTP CodeGen::genMakeAddressable(
- GenTreePtr tree, regMaskTP needReg, RegSet::KeepReg keepReg, bool smallOK, bool deferOK)
+ GenTree* tree, regMaskTP needReg, RegSet::KeepReg keepReg, bool smallOK, bool deferOK)
{
- GenTreePtr addr = NULL;
- regMaskTP regMask;
+ GenTree* addr = NULL;
+ regMaskTP regMask;
/* Is the value simply sitting in a register? */
@@ -2429,7 +2429,7 @@ GOT_VAL:
* freeOnly - target register needs to be a scratch register
*/
-void CodeGen::genComputeAddressable(GenTreePtr tree,
+void CodeGen::genComputeAddressable(GenTree* tree,
regMaskTP addrReg,
RegSet::KeepReg keptReg,
regMaskTP needReg,
@@ -2483,7 +2483,7 @@ void CodeGen::genComputeAddressable(GenTreePtr tree,
* Should be similar to genMakeAddressable() but gives more control.
*/
-regMaskTP CodeGen::genMakeAddressable2(GenTreePtr tree,
+regMaskTP CodeGen::genMakeAddressable2(GenTree* tree,
regMaskTP needReg,
RegSet::KeepReg keepReg,
bool forLoadStore,
@@ -2540,7 +2540,7 @@ regMaskTP CodeGen::genMakeAddressable2(GenTreePtr tree,
*/
// inline
-bool CodeGen::genStillAddressable(GenTreePtr tree)
+bool CodeGen::genStillAddressable(GenTree* tree)
{
/* Has the value (or one or more of its sub-operands) been spilled? */
@@ -2556,7 +2556,7 @@ bool CodeGen::genStillAddressable(GenTreePtr tree)
* argument indicates whether we're in the 'lock' or 'reload' phase.
*/
-regMaskTP CodeGen::genRestoreAddrMode(GenTreePtr addr, GenTreePtr tree, bool lockPhase)
+regMaskTP CodeGen::genRestoreAddrMode(GenTree* addr, GenTree* tree, bool lockPhase)
{
regMaskTP regMask = RBM_NONE;
@@ -2646,7 +2646,7 @@ regMaskTP CodeGen::genRestoreAddrMode(GenTreePtr addr, GenTreePtr tree, bool loc
* registers).
*/
-regMaskTP CodeGen::genRestAddressable(GenTreePtr tree, regMaskTP addrReg, regMaskTP lockMask)
+regMaskTP CodeGen::genRestAddressable(GenTree* tree, regMaskTP addrReg, regMaskTP lockMask)
{
noway_assert((regSet.rsMaskLock & lockMask) == lockMask);
@@ -2720,7 +2720,7 @@ regMaskTP CodeGen::genRestAddressable(GenTreePtr tree, regMaskTP addrReg, regMas
* the address (these will be marked as used on exit).
*/
-regMaskTP CodeGen::genKeepAddressable(GenTreePtr tree, regMaskTP addrReg, regMaskTP avoidMask)
+regMaskTP CodeGen::genKeepAddressable(GenTree* tree, regMaskTP addrReg, regMaskTP avoidMask)
{
/* Is the operand still addressable? */
@@ -2755,7 +2755,7 @@ regMaskTP CodeGen::genKeepAddressable(GenTreePtr tree, regMaskTP addrReg, regMas
* by genMakeAddressable().
*/
-void CodeGen::genDoneAddressable(GenTreePtr tree, regMaskTP addrReg, RegSet::KeepReg keptReg)
+void CodeGen::genDoneAddressable(GenTree* tree, regMaskTP addrReg, RegSet::KeepReg keptReg)
{
if (keptReg == RegSet::FREE_REG)
{
@@ -2788,7 +2788,7 @@ void CodeGen::genDoneAddressable(GenTreePtr tree, regMaskTP addrReg, RegSet::Kee
* to evaluate into the FP stack, we do this and return zero.
*/
-GenTreePtr CodeGen::genMakeAddrOrFPstk(GenTreePtr tree, regMaskTP* regMaskPtr, bool roundResult)
+GenTree* CodeGen::genMakeAddrOrFPstk(GenTree* tree, regMaskTP* regMaskPtr, bool roundResult)
{
*regMaskPtr = 0;
@@ -2964,7 +2964,7 @@ void CodeGen::genEmitGSCookieCheck(bool pushReg)
* Generate any side effects within the given expression tree.
*/
-void CodeGen::genEvalSideEffects(GenTreePtr tree)
+void CodeGen::genEvalSideEffects(GenTree* tree)
{
genTreeOps oper;
unsigned kind;
@@ -3098,7 +3098,7 @@ AGAIN:
* RBM_NONE if a write-barrier is not needed.
*/
-regMaskTP CodeGen::WriteBarrier(GenTreePtr tgt, GenTreePtr assignVal, regMaskTP tgtAddrReg)
+regMaskTP CodeGen::WriteBarrier(GenTree* tgt, GenTree* assignVal, regMaskTP tgtAddrReg)
{
noway_assert(assignVal->InReg());
@@ -3545,14 +3545,14 @@ void CodeGen::genJccLongLo(genTreeOps cmp, BasicBlock* jumpTrue, BasicBlock* jum
* Called by genCondJump() for TYP_LONG.
*/
-void CodeGen::genCondJumpLng(GenTreePtr cond, BasicBlock* jumpTrue, BasicBlock* jumpFalse, bool bFPTransition)
+void CodeGen::genCondJumpLng(GenTree* cond, BasicBlock* jumpTrue, BasicBlock* jumpFalse, bool bFPTransition)
{
noway_assert(jumpTrue && jumpFalse);
noway_assert((cond->gtFlags & GTF_REVERSE_OPS) == false); // Done in genCondJump()
noway_assert(cond->gtOp.gtOp1->gtType == TYP_LONG);
- GenTreePtr op1 = cond->gtOp.gtOp1;
- GenTreePtr op2 = cond->gtOp.gtOp2;
+ GenTree* op1 = cond->gtOp.gtOp1;
+ GenTree* op2 = cond->gtOp.gtOp2;
genTreeOps cmp = cond->OperGet();
regMaskTP addrReg;
@@ -3894,13 +3894,13 @@ bool CodeGen::genUse_fcomip()
* Returns the flags the following jump/set instruction should use.
*/
-emitJumpKind CodeGen::genCondSetFlags(GenTreePtr cond)
+emitJumpKind CodeGen::genCondSetFlags(GenTree* cond)
{
noway_assert(cond->OperIsCompare());
noway_assert(varTypeIsI(genActualType(cond->gtOp.gtOp1->gtType)));
- GenTreePtr op1 = cond->gtOp.gtOp1;
- GenTreePtr op2 = cond->gtOp.gtOp2;
+ GenTree* op1 = cond->gtOp.gtOp1;
+ GenTree* op2 = cond->gtOp.gtOp2;
genTreeOps cmp = cond->OperGet();
if (cond->gtFlags & GTF_REVERSE_OPS)
@@ -3981,8 +3981,8 @@ emitJumpKind CodeGen::genCondSetFlags(GenTreePtr cond)
if (op1->gtOper == GT_AND)
{
- GenTreePtr an1 = op1->gtOp.gtOp1;
- GenTreePtr an2 = op1->gtOp.gtOp2;
+ GenTree* an1 = op1->gtOp.gtOp1;
+ GenTree* an2 = op1->gtOp.gtOp2;
/* Check for the case "expr & icon" */
@@ -4670,13 +4670,13 @@ DONE_FLAGS: // We have determined what jumpKind to use
* the given relational operator yields 'true'.
*/
-void CodeGen::genCondJump(GenTreePtr cond, BasicBlock* destTrue, BasicBlock* destFalse, bool bStackFPFixup)
+void CodeGen::genCondJump(GenTree* cond, BasicBlock* destTrue, BasicBlock* destFalse, bool bStackFPFixup)
{
BasicBlock* jumpTrue;
BasicBlock* jumpFalse;
- GenTreePtr op1 = cond->gtOp.gtOp1;
- GenTreePtr op2 = cond->gtOp.gtOp2;
+ GenTree* op1 = cond->gtOp.gtOp1;
+ GenTree* op2 = cond->gtOp.gtOp2;
genTreeOps cmp = cond->OperGet();
if (destTrue)
@@ -4776,7 +4776,7 @@ void CodeGen::genCondJump(GenTreePtr cond, BasicBlock* destTrue, BasicBlock* des
#ifdef DEBUG
-void CodeGen::genStressRegs(GenTreePtr tree)
+void CodeGen::genStressRegs(GenTree* tree)
{
if (regSet.rsStressRegs() < 2)
return;
@@ -4829,8 +4829,8 @@ void CodeGen::genStressRegs(GenTreePtr tree)
if (tree->gtType == TYP_INT && tree->OperIsSimple())
{
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtOp.gtOp2;
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtOp.gtOp2;
if (op1 && (op1->InReg()))
trashRegs &= ~genRegMask(op1->gtRegNum);
if (op2 && (op2->InReg()))
@@ -4869,7 +4869,7 @@ void CodeGen::genStressRegs(GenTreePtr tree)
* Generate code for a GTK_CONST tree
*/
-void CodeGen::genCodeForTreeConst(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg)
+void CodeGen::genCodeForTreeConst(GenTree* tree, regMaskTP destReg, regMaskTP bestReg)
{
noway_assert(tree->IsCnsIntOrI());
GenTreeIntConCommon* con = tree->AsIntConCommon();
@@ -4950,7 +4950,7 @@ REG_LOADED:
* Generate code for a GTK_LEAF tree
*/
-void CodeGen::genCodeForTreeLeaf(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg)
+void CodeGen::genCodeForTreeLeaf(GenTree* tree, regMaskTP destReg, regMaskTP bestReg)
{
genTreeOps oper = tree->OperGet();
regNumber reg = DUMMY_INIT(REG_CORRUPT);
@@ -5133,11 +5133,11 @@ void CodeGen::genCodeForTreeLeaf(GenTreePtr tree, regMaskTP destReg, regMaskTP b
genCodeForTree_DONE(tree, reg);
}
-GenTreePtr CodeGen::genCodeForCommaTree(GenTreePtr tree)
+GenTree* CodeGen::genCodeForCommaTree(GenTree* tree)
{
while (tree->OperGet() == GT_COMMA)
{
- GenTreePtr op1 = tree->gtOp.gtOp1;
+ GenTree* op1 = tree->gtOp.gtOp1;
genEvalSideEffects(op1);
gcInfo.gcMarkRegPtrVal(op1);
@@ -5151,7 +5151,7 @@ GenTreePtr CodeGen::genCodeForCommaTree(GenTreePtr tree)
* Generate code for the a leaf node of type GT_JMP
*/
-void CodeGen::genCodeForTreeLeaf_GT_JMP(GenTreePtr tree)
+void CodeGen::genCodeForTreeLeaf_GT_JMP(GenTree* tree)
{
noway_assert(compiler->compCurBB->bbFlags & BBF_HAS_JMP);
@@ -5452,9 +5452,9 @@ void CodeGen::genCodeForTreeLeaf_GT_JMP(GenTreePtr tree)
* passed in pCallBackData. If the variable is assigned to, return
* Compiler::WALK_ABORT. Otherwise return Compiler::WALK_CONTINUE.
*/
-Compiler::fgWalkResult CodeGen::fgIsVarAssignedTo(GenTreePtr* pTree, Compiler::fgWalkData* data)
+Compiler::fgWalkResult CodeGen::fgIsVarAssignedTo(GenTree** pTree, Compiler::fgWalkData* data)
{
- GenTreePtr tree = *pTree;
+ GenTree* tree = *pTree;
if ((tree->OperIsAssignment()) && (tree->gtOp.gtOp1->OperGet() == GT_LCL_VAR) &&
(tree->gtOp.gtOp1->gtLclVarCommon.gtLclNum == (unsigned)(size_t)data->pCallbackData))
{
@@ -5464,7 +5464,7 @@ Compiler::fgWalkResult CodeGen::fgIsVarAssignedTo(GenTreePtr* pTree, Compiler::f
return Compiler::WALK_CONTINUE;
}
-regNumber CodeGen::genIsEnregisteredIntVariable(GenTreePtr tree)
+regNumber CodeGen::genIsEnregisteredIntVariable(GenTree* tree)
{
unsigned varNum;
LclVarDsc* varDsc;
@@ -5525,21 +5525,21 @@ void CodeGen::unspillLiveness(genLivenessSet* ls)
* Generate code for a qmark colon
*/
-void CodeGen::genCodeForQmark(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg)
+void CodeGen::genCodeForQmark(GenTree* tree, regMaskTP destReg, regMaskTP bestReg)
{
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtOp.gtOp2;
- regNumber reg;
- regMaskTP regs = regSet.rsMaskUsed;
- regMaskTP needReg = destReg;
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtOp.gtOp2;
+ regNumber reg;
+ regMaskTP regs = regSet.rsMaskUsed;
+ regMaskTP needReg = destReg;
noway_assert(compiler->compQmarkUsed);
noway_assert(tree->gtOper == GT_QMARK);
noway_assert(op1->OperIsCompare());
noway_assert(op2->gtOper == GT_COLON);
- GenTreePtr thenNode = op2->AsColon()->ThenNode();
- GenTreePtr elseNode = op2->AsColon()->ElseNode();
+ GenTree* thenNode = op2->AsColon()->ThenNode();
+ GenTree* elseNode = op2->AsColon()->ElseNode();
/* If elseNode is a Nop node you must reverse the
thenNode and elseNode prior to reaching here!
@@ -6036,17 +6036,18 @@ void CodeGen::genCodeForQmark(GenTreePtr tree, regMaskTP destReg, regMaskTP best
* genCodeForQmark to implement it using branches).
*/
-bool CodeGen::genCodeForQmarkWithCMOV(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg)
+bool CodeGen::genCodeForQmarkWithCMOV(GenTree* tree, regMaskTP destReg, regMaskTP bestReg)
{
#ifdef _TARGET_XARCH_
- GenTreePtr cond = tree->gtOp.gtOp1;
- GenTreePtr colon = tree->gtOp.gtOp2;
+ GenTree* cond = tree->gtOp.gtOp1;
+ GenTree* colon = tree->gtOp.gtOp2;
// Warning: this naming of the local vars is backwards!
- GenTreePtr thenNode = colon->gtOp.gtOp1;
- GenTreePtr elseNode = colon->gtOp.gtOp2;
- GenTreePtr alwaysNode, predicateNode;
- regNumber reg;
- regMaskTP needReg = destReg;
+ GenTree* thenNode = colon->gtOp.gtOp1;
+ GenTree* elseNode = colon->gtOp.gtOp2;
+ GenTree* alwaysNode;
+ GenTree* predicateNode;
+ regNumber reg;
+ regMaskTP needReg = destReg;
noway_assert(tree->gtOper == GT_QMARK);
noway_assert(cond->OperIsCompare());
@@ -6209,13 +6210,13 @@ bool CodeGen::genCodeForQmarkWithCMOV(GenTreePtr tree, regMaskTP destReg, regMas
}
#ifdef _TARGET_XARCH_
-void CodeGen::genCodeForMultEAX(GenTreePtr tree)
+void CodeGen::genCodeForMultEAX(GenTree* tree)
{
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtGetOp2();
- bool ovfl = tree->gtOverflow();
- regNumber reg = DUMMY_INIT(REG_CORRUPT);
- regMaskTP addrReg;
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtGetOp2();
+ bool ovfl = tree->gtOverflow();
+ regNumber reg = DUMMY_INIT(REG_CORRUPT);
+ regMaskTP addrReg;
noway_assert(tree->OperGet() == GT_MUL);
@@ -6346,10 +6347,10 @@ void CodeGen::genCodeForMultEAX(GenTreePtr tree)
#endif // _TARGET_XARCH_
#ifdef _TARGET_ARM_
-void CodeGen::genCodeForMult64(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg)
+void CodeGen::genCodeForMult64(GenTree* tree, regMaskTP destReg, regMaskTP bestReg)
{
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtGetOp2();
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtGetOp2();
noway_assert(tree->OperGet() == GT_MUL);
@@ -6438,13 +6439,13 @@ void CodeGen::genCodeForMult64(GenTreePtr tree, regMaskTP destReg, regMaskTP bes
* Handles GT_AND, GT_OR, GT_XOR, GT_ADD, GT_SUB, GT_MUL.
*/
-void CodeGen::genCodeForTreeSmpBinArithLogOp(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg)
+void CodeGen::genCodeForTreeSmpBinArithLogOp(GenTree* tree, regMaskTP destReg, regMaskTP bestReg)
{
instruction ins;
genTreeOps oper = tree->OperGet();
const var_types treeType = tree->TypeGet();
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtGetOp2();
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtGetOp2();
insFlags flags = tree->gtSetFlags() ? INS_FLAGS_SET : INS_FLAGS_DONT_CARE;
regNumber reg = DUMMY_INIT(REG_CORRUPT);
regMaskTP needReg = destReg;
@@ -7198,13 +7199,13 @@ CHK_OVF:
* Handles GT_ASG_AND, GT_ASG_OR, GT_ASG_XOR, GT_ASG_ADD, GT_ASG_SUB.
*/
-void CodeGen::genCodeForTreeSmpBinArithLogAsgOp(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg)
+void CodeGen::genCodeForTreeSmpBinArithLogAsgOp(GenTree* tree, regMaskTP destReg, regMaskTP bestReg)
{
instruction ins;
const genTreeOps oper = tree->OperGet();
const var_types treeType = tree->TypeGet();
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtGetOp2();
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtGetOp2();
insFlags flags = tree->gtSetFlags() ? INS_FLAGS_SET : INS_FLAGS_DONT_CARE;
regNumber reg = DUMMY_INIT(REG_CORRUPT);
regMaskTP needReg = destReg;
@@ -7705,12 +7706,12 @@ void CodeGen::genCodeForTreeSmpBinArithLogAsgOp(GenTreePtr tree, regMaskTP destR
* Generate code for GT_UMOD.
*/
-void CodeGen::genCodeForUnsignedMod(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg)
+void CodeGen::genCodeForUnsignedMod(GenTree* tree, regMaskTP destReg, regMaskTP bestReg)
{
assert(tree->OperGet() == GT_UMOD);
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtOp.gtOp2;
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtOp.gtOp2;
const var_types treeType = tree->TypeGet();
regMaskTP needReg = destReg;
regNumber reg;
@@ -7747,12 +7748,12 @@ void CodeGen::genCodeForUnsignedMod(GenTreePtr tree, regMaskTP destReg, regMaskT
* Generate code for GT_MOD.
*/
-void CodeGen::genCodeForSignedMod(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg)
+void CodeGen::genCodeForSignedMod(GenTree* tree, regMaskTP destReg, regMaskTP bestReg)
{
assert(tree->OperGet() == GT_MOD);
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtOp.gtOp2;
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtOp.gtOp2;
const var_types treeType = tree->TypeGet();
regMaskTP needReg = destReg;
regNumber reg;
@@ -7816,12 +7817,12 @@ void CodeGen::genCodeForSignedMod(GenTreePtr tree, regMaskTP destReg, regMaskTP
* Generate code for GT_UDIV.
*/
-void CodeGen::genCodeForUnsignedDiv(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg)
+void CodeGen::genCodeForUnsignedDiv(GenTree* tree, regMaskTP destReg, regMaskTP bestReg)
{
assert(tree->OperGet() == GT_UDIV);
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtOp.gtOp2;
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtOp.gtOp2;
const var_types treeType = tree->TypeGet();
regMaskTP needReg = destReg;
regNumber reg;
@@ -7864,12 +7865,12 @@ void CodeGen::genCodeForUnsignedDiv(GenTreePtr tree, regMaskTP destReg, regMaskT
* Generate code for GT_DIV.
*/
-void CodeGen::genCodeForSignedDiv(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg)
+void CodeGen::genCodeForSignedDiv(GenTree* tree, regMaskTP destReg, regMaskTP bestReg)
{
assert(tree->OperGet() == GT_DIV);
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtOp.gtOp2;
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtOp.gtOp2;
const var_types treeType = tree->TypeGet();
regMaskTP needReg = destReg;
regNumber reg;
@@ -7970,13 +7971,13 @@ void CodeGen::genCodeForSignedDiv(GenTreePtr tree, regMaskTP destReg, regMaskTP
* (if op2 is not a power of 2 constant).
*/
-void CodeGen::genCodeForGeneralDivide(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg)
+void CodeGen::genCodeForGeneralDivide(GenTree* tree, regMaskTP destReg, regMaskTP bestReg)
{
assert(tree->OperGet() == GT_UMOD || tree->OperGet() == GT_MOD || tree->OperGet() == GT_UDIV ||
tree->OperGet() == GT_DIV);
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtOp.gtOp2;
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtOp.gtOp2;
const var_types treeType = tree->TypeGet();
regMaskTP needReg = destReg;
regNumber reg;
@@ -8210,13 +8211,13 @@ void CodeGen::genCodeForGeneralDivide(GenTreePtr tree, regMaskTP destReg, regMas
* Generate code for an assignment shift (x <op>= ). Handles GT_ASG_LSH, GT_ASG_RSH, GT_ASG_RSZ.
*/
-void CodeGen::genCodeForAsgShift(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg)
+void CodeGen::genCodeForAsgShift(GenTree* tree, regMaskTP destReg, regMaskTP bestReg)
{
assert(tree->OperGet() == GT_ASG_LSH || tree->OperGet() == GT_ASG_RSH || tree->OperGet() == GT_ASG_RSZ);
const genTreeOps oper = tree->OperGet();
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtOp.gtOp2;
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtOp.gtOp2;
const var_types treeType = tree->TypeGet();
insFlags flags = tree->gtSetFlags() ? INS_FLAGS_SET : INS_FLAGS_DONT_CARE;
regMaskTP needReg = destReg;
@@ -8403,13 +8404,13 @@ void CodeGen::genCodeForAsgShift(GenTreePtr tree, regMaskTP destReg, regMaskTP b
* Generate code for a shift. Handles GT_LSH, GT_RSH, GT_RSZ.
*/
-void CodeGen::genCodeForShift(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg)
+void CodeGen::genCodeForShift(GenTree* tree, regMaskTP destReg, regMaskTP bestReg)
{
assert(tree->OperIsShift());
const genTreeOps oper = tree->OperGet();
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtOp.gtOp2;
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtOp.gtOp2;
const var_types treeType = tree->TypeGet();
insFlags flags = tree->gtSetFlags() ? INS_FLAGS_SET : INS_FLAGS_DONT_CARE;
regMaskTP needReg = destReg;
@@ -8568,13 +8569,13 @@ void CodeGen::genCodeForShift(GenTreePtr tree, regMaskTP destReg, regMaskTP best
* Handles GT_EQ, GT_NE, GT_LT, GT_LE, GT_GE, GT_GT.
*/
-void CodeGen::genCodeForRelop(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg)
+void CodeGen::genCodeForRelop(GenTree* tree, regMaskTP destReg, regMaskTP bestReg)
{
assert(tree->OperGet() == GT_EQ || tree->OperGet() == GT_NE || tree->OperGet() == GT_LT ||
tree->OperGet() == GT_LE || tree->OperGet() == GT_GE || tree->OperGet() == GT_GT);
const genTreeOps oper = tree->OperGet();
- GenTreePtr op1 = tree->gtOp.gtOp1;
+ GenTree* op1 = tree->gtOp.gtOp1;
const var_types treeType = tree->TypeGet();
regMaskTP needReg = destReg;
regNumber reg;
@@ -8677,7 +8678,7 @@ void CodeGen::genCodeForRelop(GenTreePtr tree, regMaskTP destReg, regMaskTP best
// Return Value:
// None
-void CodeGen::genCodeForCopyObj(GenTreePtr tree, regMaskTP destReg)
+void CodeGen::genCodeForCopyObj(GenTree* tree, regMaskTP destReg)
{
// If the value class doesn't have any fields that are GC refs or
// the target isn't on the GC-heap, we can merge it with CPBLK.
@@ -8697,8 +8698,8 @@ void CodeGen::genCodeForCopyObj(GenTreePtr tree, regMaskTP destReg)
}
#endif
assert(tree->gtOp.gtOp2->OperIsIndir());
- GenTreePtr srcObj = tree->gtOp.gtOp2->AsIndir()->Addr();
- GenTreePtr dstObj = cpObjOp->Addr();
+ GenTree* srcObj = tree->gtOp.gtOp2->AsIndir()->Addr();
+ GenTree* dstObj = cpObjOp->Addr();
noway_assert(dstObj->gtType == TYP_BYREF || dstObj->gtType == TYP_I_IMPL);
@@ -8717,8 +8718,9 @@ void CodeGen::genCodeForCopyObj(GenTreePtr tree, regMaskTP destReg)
unsigned gcPtrCount = cpObjOp->gtGcPtrCount;
assert(blkSize == cpObjOp->gtBlkSize);
- GenTreePtr treeFirst, treeSecond;
- regNumber regFirst, regSecond;
+ GenTree* treeFirst;
+ GenTree* treeSecond;
+ regNumber regFirst, regSecond;
// Check what order the object-ptrs have to be evaluated in ?
@@ -8976,26 +8978,26 @@ void CodeGen::genCodeForCopyObj(GenTreePtr tree, regMaskTP destReg)
// tree - The block assignment
// destReg - The expected destination register
//
-void CodeGen::genCodeForBlkOp(GenTreePtr tree, regMaskTP destReg)
+void CodeGen::genCodeForBlkOp(GenTree* tree, regMaskTP destReg)
{
genTreeOps oper = tree->OperGet();
- GenTreePtr dest = tree->gtOp.gtOp1;
- GenTreePtr src = tree->gtGetOp2();
+ GenTree* dest = tree->gtOp.gtOp1;
+ GenTree* src = tree->gtGetOp2();
regMaskTP needReg = destReg;
regMaskTP regs = regSet.rsMaskUsed;
- GenTreePtr opsPtr[3];
+ GenTree* opsPtr[3];
regMaskTP regsPtr[3];
- GenTreePtr destPtr;
- GenTreePtr srcPtrOrVal;
+ GenTree* destPtr;
+ GenTree* srcPtrOrVal;
noway_assert(tree->OperIsBlkOp());
- bool isCopyBlk = false;
- bool isInitBlk = false;
- bool hasGCpointer = false;
- unsigned blockSize = dest->AsBlk()->gtBlkSize;
- GenTreePtr sizeNode = nullptr;
- bool sizeIsConst = true;
+ bool isCopyBlk = false;
+ bool isInitBlk = false;
+ bool hasGCpointer = false;
+ unsigned blockSize = dest->AsBlk()->gtBlkSize;
+ GenTree* sizeNode = nullptr;
+ bool sizeIsConst = true;
if (dest->gtOper == GT_DYN_BLK)
{
sizeNode = dest->AsDynBlk()->gtDynamicSize;
@@ -9402,12 +9404,12 @@ void CodeGen::genCodeForBlkOp(GenTreePtr tree, regMaskTP destReg)
//
if (sizeIsConst && (isCopyBlk || (srcPtrOrVal->OperGet() == GT_CNS_INT)))
{
- GenTreePtr dstOp = destPtr;
- GenTreePtr srcOp = srcPtrOrVal;
- unsigned length = blockSize;
- unsigned fullStoreCount = length / TARGET_POINTER_SIZE;
- unsigned initVal = 0;
- bool useLoop = false;
+ GenTree* dstOp = destPtr;
+ GenTree* srcOp = srcPtrOrVal;
+ unsigned length = blockSize;
+ unsigned fullStoreCount = length / TARGET_POINTER_SIZE;
+ unsigned initVal = 0;
+ bool useLoop = false;
if (isInitBlk)
{
@@ -9708,12 +9710,12 @@ BasicBlock dummyBB;
#pragma warning(push)
#pragma warning(disable : 21000) // Suppress PREFast warning about overly large function
#endif
-void CodeGen::genCodeForTreeSmpOp(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg)
+void CodeGen::genCodeForTreeSmpOp(GenTree* tree, regMaskTP destReg, regMaskTP bestReg)
{
const genTreeOps oper = tree->OperGet();
const var_types treeType = tree->TypeGet();
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtGetOp2IfPresent();
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtGetOp2IfPresent();
regNumber reg = DUMMY_INIT(REG_CORRUPT);
regMaskTP regs = regSet.rsMaskUsed;
regMaskTP needReg = destReg;
@@ -9721,7 +9723,7 @@ void CodeGen::genCodeForTreeSmpOp(GenTreePtr tree, regMaskTP destReg, regMaskTP
emitAttr size;
instruction ins;
regMaskTP addrReg;
- GenTreePtr opsPtr[3];
+ GenTree* opsPtr[3];
regMaskTP regsPtr[3];
#ifdef DEBUG
@@ -10323,8 +10325,8 @@ void CodeGen::genCodeForTreeSmpOp(GenTreePtr tree, regMaskTP destReg, regMaskTP
//
// For reference, gtOp1 is the location. gtOp2 is the addend or the value.
- GenTreePtr location = op1;
- GenTreePtr value = op2;
+ GenTree* location = op1;
+ GenTree* value = op2;
// Again, a friendly reminder. IL calling convention is left to right.
if (tree->gtFlags & GTF_REVERSE_OPS)
@@ -10437,8 +10439,8 @@ void CodeGen::genCodeForTreeSmpOp(GenTreePtr tree, regMaskTP destReg, regMaskTP
case GT_ARR_LENGTH:
{
// Make the corresponding ind(a + c) node, and do codegen for that.
- GenTreePtr addr = compiler->gtNewOperNode(GT_ADD, TYP_BYREF, tree->gtArrLen.ArrRef(),
- compiler->gtNewIconNode(tree->AsArrLen()->ArrLenOffset()));
+ GenTree* addr = compiler->gtNewOperNode(GT_ADD, TYP_BYREF, tree->gtArrLen.ArrRef(),
+ compiler->gtNewIconNode(tree->AsArrLen()->ArrLenOffset()));
tree->SetOper(GT_IND);
tree->gtFlags |= GTF_IND_ARR_LEN; // Record that this node represents an array length expression.
assert(tree->TypeGet() == TYP_INT);
@@ -10471,7 +10473,7 @@ regNumber CodeGen::genIntegerCast(GenTree* tree, regMaskTP needReg, regMaskTP be
bool unsv;
bool andv = false;
regNumber reg;
- GenTreePtr op1 = tree->gtOp.gtOp1->gtEffectiveVal();
+ GenTree* op1 = tree->gtOp.gtOp1->gtEffectiveVal();
var_types dstType = tree->CastToType();
var_types srcType = op1->TypeGet();
@@ -10613,16 +10615,16 @@ regNumber CodeGen::genIntegerCast(GenTree* tree, regMaskTP needReg, regMaskTP be
return reg;
}
-void CodeGen::genCodeForNumericCast(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg)
+void CodeGen::genCodeForNumericCast(GenTree* tree, regMaskTP destReg, regMaskTP bestReg)
{
- GenTreePtr op1 = tree->gtOp.gtOp1;
- var_types dstType = tree->CastToType();
- var_types baseType = TYP_INT;
- regNumber reg = DUMMY_INIT(REG_CORRUPT);
- regMaskTP needReg = destReg;
- regMaskTP addrReg;
- emitAttr size;
- BOOL unsv;
+ GenTree* op1 = tree->gtOp.gtOp1;
+ var_types dstType = tree->CastToType();
+ var_types baseType = TYP_INT;
+ regNumber reg = DUMMY_INIT(REG_CORRUPT);
+ regMaskTP needReg = destReg;
+ regMaskTP addrReg;
+ emitAttr size;
+ BOOL unsv;
/*
* Constant casts should have been folded earlier
@@ -10658,7 +10660,7 @@ void CodeGen::genCodeForNumericCast(GenTreePtr tree, regMaskTP destReg, regMaskT
2) A long constant that is small enough to fit in an integer
*/
- GenTreePtr modop2 = op1->gtOp.gtOp2;
+ GenTree* modop2 = op1->gtOp.gtOp2;
if ((genActualType(modop2->gtType) == TYP_INT) ||
((modop2->gtOper == GT_CNS_LNG) && (modop2->gtLngCon.gtLconVal == (int)modop2->gtLngCon.gtLconVal)))
{
@@ -11061,11 +11063,11 @@ void CodeGen::genCodeForNumericCast(GenTreePtr tree, regMaskTP destReg, regMaskT
* Generate code for a leaf node of type GT_ADDR
*/
-void CodeGen::genCodeForTreeSmpOp_GT_ADDR(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg)
+void CodeGen::genCodeForTreeSmpOp_GT_ADDR(GenTree* tree, regMaskTP destReg, regMaskTP bestReg)
{
genTreeOps oper = tree->OperGet();
const var_types treeType = tree->TypeGet();
- GenTreePtr op1;
+ GenTree* op1;
regNumber reg;
regMaskTP needReg = destReg;
regMaskTP addrReg;
@@ -11196,7 +11198,7 @@ void CodeGen::genLdStFltRetRegsPromotedVar(LclVarDsc* varDsc, bool isLoadIntoFlt
}
}
-void CodeGen::genLoadIntoFltRetRegs(GenTreePtr tree)
+void CodeGen::genLoadIntoFltRetRegs(GenTree* tree)
{
assert(tree->TypeGet() == TYP_STRUCT);
assert(tree->gtOper == GT_LCL_VAR);
@@ -11226,13 +11228,13 @@ void CodeGen::genLoadIntoFltRetRegs(GenTreePtr tree)
genMarkTreeInReg(tree, REG_FLOATRET);
}
-void CodeGen::genStoreFromFltRetRegs(GenTreePtr tree)
+void CodeGen::genStoreFromFltRetRegs(GenTree* tree)
{
assert(tree->TypeGet() == TYP_STRUCT);
assert(tree->OperGet() == GT_ASG);
// LHS should be lcl var or fld.
- GenTreePtr op1 = tree->gtOp.gtOp1;
+ GenTree* op1 = tree->gtOp.gtOp1;
// TODO: We had a bug where op1 was a GT_IND, the result of morphing a GT_BOX, and not properly
// handling multiple levels of inlined functions that return HFA on the right-hand-side.
@@ -11245,7 +11247,7 @@ void CodeGen::genStoreFromFltRetRegs(GenTreePtr tree)
assert(compiler->IsHfa(compiler->lvaGetStruct(varNum)));
// The RHS should be a call.
- GenTreePtr op2 = tree->gtOp.gtOp2;
+ GenTree* op2 = tree->gtOp.gtOp2;
assert(op2->gtOper == GT_CALL);
// Generate code for call and copy the return registers into the local.
@@ -11297,12 +11299,12 @@ void CodeGen::genStoreFromFltRetRegs(GenTreePtr tree)
#pragma warning(push)
#pragma warning(disable : 21000) // Suppress PREFast warning about overly large function
#endif
-void CodeGen::genCodeForTreeSmpOpAsg(GenTreePtr tree)
+void CodeGen::genCodeForTreeSmpOpAsg(GenTree* tree)
{
noway_assert(tree->gtOper == GT_ASG);
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtOp.gtOp2;
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtOp.gtOp2;
regMaskTP needReg = RBM_ALLINT;
regMaskTP bestReg = RBM_CORRUPT;
regMaskTP addrReg = DUMMY_INIT(RBM_CORRUPT);
@@ -12029,11 +12031,11 @@ LExit:
* Generate code to complete the assignment operation
*/
-void CodeGen::genCodeForTreeSmpOpAsg_DONE_ASSG(GenTreePtr tree, regMaskTP addrReg, regNumber reg, bool ovfl)
+void CodeGen::genCodeForTreeSmpOpAsg_DONE_ASSG(GenTree* tree, regMaskTP addrReg, regNumber reg, bool ovfl)
{
const var_types treeType = tree->TypeGet();
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtOp.gtOp2;
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtOp.gtOp2;
noway_assert(op2);
if (op1->gtOper == GT_LCL_VAR || op1->gtOper == GT_REG_VAR)
@@ -12090,7 +12092,7 @@ void CodeGen::genCodeForTreeSmpOpAsg_DONE_ASSG(GenTreePtr tree, regMaskTP addrRe
* Generate code for a special op tree
*/
-void CodeGen::genCodeForTreeSpecialOp(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg)
+void CodeGen::genCodeForTreeSpecialOp(GenTree* tree, regMaskTP destReg, regMaskTP bestReg)
{
genTreeOps oper = tree->OperGet();
regNumber reg = DUMMY_INIT(REG_CORRUPT);
@@ -12142,10 +12144,10 @@ void CodeGen::genCodeForTreeSpecialOp(GenTreePtr tree, regMaskTP destReg, regMas
// As a friendly reminder. IL args are evaluated left to right.
- GenTreePtr location = tree->gtCmpXchg.gtOpLocation; // arg1
- GenTreePtr value = tree->gtCmpXchg.gtOpValue; // arg2
- GenTreePtr comparand = tree->gtCmpXchg.gtOpComparand; // arg3
- regMaskTP addrReg;
+ GenTree* location = tree->gtCmpXchg.gtOpLocation; // arg1
+ GenTree* value = tree->gtCmpXchg.gtOpValue; // arg2
+ GenTree* comparand = tree->gtCmpXchg.gtOpComparand; // arg3
+ regMaskTP addrReg;
bool isAddr = genMakeIndAddrMode(location, tree, false, /* not for LEA */
RBM_ALLINT, RegSet::KEEP_REG, &addrReg);
@@ -12279,7 +12281,7 @@ void CodeGen::genCodeForTreeSpecialOp(GenTreePtr tree, regMaskTP destReg, regMas
* register will not be consumed right away and could possibly be spilled.
*/
-void CodeGen::genCodeForTree(GenTreePtr tree, regMaskTP destReg, regMaskTP bestReg)
+void CodeGen::genCodeForTree(GenTree* tree, regMaskTP destReg, regMaskTP bestReg)
{
#if 0
if (compiler->verbose)
@@ -12690,10 +12692,10 @@ void CodeGen::genCodeForBBlist()
if (handlerGetsXcptnObj(block->bbCatchTyp))
{
- GenTreePtr firstStmt = block->FirstNonPhiDef();
+ GenTree* firstStmt = block->FirstNonPhiDef();
if (firstStmt != NULL)
{
- GenTreePtr firstTree = firstStmt->gtStmt.gtStmtExpr;
+ GenTree* firstTree = firstStmt->gtStmt.gtStmtExpr;
if (compiler->gtHasCatchArg(firstTree))
{
gcInfo.gcRegGCrefSetCur |= RBM_EXCEPTION_OBJECT;
@@ -12793,7 +12795,7 @@ void CodeGen::genCodeForBBlist()
}
#endif // FEATURE_EH_FUNCLETS
- for (GenTreePtr stmt = block->FirstNonPhiDef(); stmt; stmt = stmt->gtNext)
+ for (GenTree* stmt = block->FirstNonPhiDef(); stmt; stmt = stmt->gtNext)
{
noway_assert(stmt->gtOper == GT_STMT);
@@ -12821,7 +12823,7 @@ void CodeGen::genCodeForBBlist()
#endif // DEBUG
/* Get hold of the statement tree */
- GenTreePtr tree = stmt->gtStmt.gtStmtExpr;
+ GenTree* tree = stmt->gtStmt.gtStmtExpr;
#ifdef DEBUG
stmtNum++;
@@ -12838,7 +12840,7 @@ void CodeGen::genCodeForBBlist()
#endif
printf("Execution Order:\n");
- for (GenTreePtr treeNode = stmt->gtStmt.gtStmtList; treeNode != NULL; treeNode = treeNode->gtNext)
+ for (GenTree* treeNode = stmt->gtStmt.gtStmtList; treeNode != NULL; treeNode = treeNode->gtNext)
{
compiler->gtDispTree(treeNode, 0, NULL, true);
}
@@ -13198,7 +13200,7 @@ void CodeGen::genCodeForBBlist()
#pragma warning(push)
#pragma warning(disable : 21000) // Suppress PREFast warning about overly large function
#endif
-void CodeGen::genCodeForTreeLng(GenTreePtr tree, regMaskTP needReg, regMaskTP avoidReg)
+void CodeGen::genCodeForTreeLng(GenTree* tree, regMaskTP needReg, regMaskTP avoidReg)
{
genTreeOps oper;
unsigned kind;
@@ -13387,8 +13389,8 @@ void CodeGen::genCodeForTreeLng(GenTreePtr tree, regMaskTP needReg, regMaskTP av
bool setCarry = false;
int helper;
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtGetOp2IfPresent();
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtGetOp2IfPresent();
switch (oper)
{
@@ -13549,7 +13551,7 @@ void CodeGen::genCodeForTreeLng(GenTreePtr tree, regMaskTP needReg, regMaskTP av
if (regPair != REG_PAIR_NONE)
{
/* Swap the operands */
- GenTreePtr op = op1;
+ GenTree* op = op1;
op1 = op2;
op2 = op;
}
@@ -13572,7 +13574,7 @@ void CodeGen::genCodeForTreeLng(GenTreePtr tree, regMaskTP needReg, regMaskTP av
{
/* Generate the small RHS into a register pair */
- GenTreePtr smallOpr = op2->gtOp.gtOp1;
+ GenTree* smallOpr = op2->gtOp.gtOp1;
genComputeReg(smallOpr, 0, RegSet::ANY_REG, RegSet::KEEP_REG);
@@ -13757,8 +13759,8 @@ void CodeGen::genCodeForTreeLng(GenTreePtr tree, regMaskTP needReg, regMaskTP av
if (oper == GT_OR && op1->gtOper == GT_LSH)
{
- GenTreePtr lshLHS = op1->gtOp.gtOp1;
- GenTreePtr lshRHS = op1->gtOp.gtOp2;
+ GenTree* lshLHS = op1->gtOp.gtOp1;
+ GenTree* lshRHS = op1->gtOp.gtOp2;
if (lshLHS->gtOper == GT_CAST && lshRHS->gtOper == GT_CNS_INT && lshRHS->gtIntCon.gtIconVal == 32 &&
genTypeSize(TYP_INT) == genTypeSize(lshLHS->CastFromType()))
@@ -13780,9 +13782,9 @@ void CodeGen::genCodeForTreeLng(GenTreePtr tree, regMaskTP needReg, regMaskTP av
/* Special case: check op2 for "long(intval) & 0xFFFFFFFF" */
else if (op2->gtOper == GT_AND)
{
- GenTreePtr andLHS;
+ GenTree* andLHS;
andLHS = op2->gtOp.gtOp1;
- GenTreePtr andRHS;
+ GenTree* andRHS;
andRHS = op2->gtOp.gtOp2;
if (andLHS->gtOper == GT_CAST && andRHS->gtOper == GT_CNS_LNG &&
@@ -13876,8 +13878,8 @@ void CodeGen::genCodeForTreeLng(GenTreePtr tree, regMaskTP needReg, regMaskTP av
if (oper == GT_OR && op2->gtOper == GT_LSH)
{
- GenTreePtr lshLHS = op2->gtOp.gtOp1;
- GenTreePtr lshRHS = op2->gtOp.gtOp2;
+ GenTree* lshLHS = op2->gtOp.gtOp1;
+ GenTree* lshRHS = op2->gtOp.gtOp2;
if (lshLHS->gtOper == GT_CAST && lshRHS->gtOper == GT_CNS_INT && lshRHS->gtIntCon.gtIconVal == 32 &&
genTypeSize(TYP_INT) == genTypeSize(lshLHS->CastFromType()))
@@ -13891,8 +13893,8 @@ void CodeGen::genCodeForTreeLng(GenTreePtr tree, regMaskTP needReg, regMaskTP av
if (op1->gtOper == GT_AND)
{
- GenTreePtr andLHS = op1->gtOp.gtOp1;
- GenTreePtr andRHS = op1->gtOp.gtOp2;
+ GenTree* andLHS = op1->gtOp.gtOp1;
+ GenTree* andRHS = op1->gtOp.gtOp2;
if (andLHS->gtOper == GT_CAST && andRHS->gtOper == GT_CNS_LNG &&
andRHS->gtLngCon.gtLconVal == 0x00000000FFFFFFFF &&
@@ -14544,7 +14546,7 @@ void CodeGen::genCodeForTreeLng(GenTreePtr tree, regMaskTP needReg, regMaskTP av
addrReg = genMakeAddressable(tree, availMask, RegSet::FREE_REG);
- GenTreePtr addr = oper == GT_IND ? op1 : tree;
+ GenTree* addr = oper == GT_IND ? op1 : tree;
/* Pick a register for the value */
@@ -15079,7 +15081,7 @@ DONE:
* Generate code for a mod of a long by an int.
*/
-regPairNo CodeGen::genCodeForLongModInt(GenTreePtr tree, regMaskTP needReg)
+regPairNo CodeGen::genCodeForLongModInt(GenTree* tree, regMaskTP needReg)
{
#ifdef _TARGET_X86_
@@ -15087,8 +15089,8 @@ regPairNo CodeGen::genCodeForLongModInt(GenTreePtr tree, regMaskTP needReg)
regMaskTP addrReg;
genTreeOps oper = tree->OperGet();
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtOp.gtOp2;
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtOp.gtOp2;
/* Codegen only for Unsigned MOD */
noway_assert(oper == GT_UMOD);
@@ -15268,7 +15270,7 @@ regPairNo CodeGen::genCodeForLongModInt(GenTreePtr tree, regMaskTP needReg)
// Given a tree, return the number of registers that are currently
// used to hold integer enregistered local variables.
// Note that, an enregistered TYP_LONG can take 1 or 2 registers.
-unsigned CodeGen::genRegCountForLiveIntEnregVars(GenTreePtr tree)
+unsigned CodeGen::genRegCountForLiveIntEnregVars(GenTree* tree)
{
unsigned regCount = 0;
@@ -15309,9 +15311,9 @@ unsigned CodeGen::genRegCountForLiveIntEnregVars(GenTreePtr tree)
* Generate code for a floating-point operation.
*/
-void CodeGen::genCodeForTreeFlt(GenTreePtr tree,
- regMaskTP needReg, /* = RBM_ALLFLOAT */
- regMaskTP bestReg) /* = RBM_NONE */
+void CodeGen::genCodeForTreeFlt(GenTree* tree,
+ regMaskTP needReg, /* = RBM_ALLFLOAT */
+ regMaskTP bestReg) /* = RBM_NONE */
{
genCodeForTreeFloat(tree, needReg, bestReg);
@@ -15542,13 +15544,13 @@ void CodeGen::genTableSwitch(regNumber reg, unsigned jumpCnt, BasicBlock** jumpT
* Generate code for a switch statement.
*/
-void CodeGen::genCodeForSwitch(GenTreePtr tree)
+void CodeGen::genCodeForSwitch(GenTree* tree)
{
unsigned jumpCnt;
BasicBlock** jumpTab;
- GenTreePtr oper;
- regNumber reg;
+ GenTree* oper;
+ regNumber reg;
noway_assert(tree->gtOper == GT_SWITCH);
oper = tree->gtOp.gtOp1;
@@ -15701,9 +15703,9 @@ size_t CodeGen::genPushArgList(GenTreeCall* call)
args = &firstForObjp;
}
- GenTreePtr curr;
- var_types type;
- size_t opsz;
+ GenTree* curr;
+ var_types type;
+ size_t opsz;
for (; args; args = args->Rest())
{
@@ -15881,7 +15883,7 @@ size_t CodeGen::genPushArgList(GenTreeCall* call)
{
while (arg->gtOper == GT_COMMA)
{
- GenTreePtr op1 = arg->gtOp.gtOp1;
+ GenTree* op1 = arg->gtOp.gtOp1;
genEvalSideEffects(op1);
genUpdateLife(op1);
arg = arg->gtOp.gtOp2;
@@ -15910,7 +15912,7 @@ size_t CodeGen::genPushArgList(GenTreeCall* call)
GenTree* arg = curr;
while (arg->gtOper == GT_COMMA)
{
- GenTreePtr op1 = arg->gtOp.gtOp1;
+ GenTree* op1 = arg->gtOp.gtOp1;
genEvalSideEffects(op1);
genUpdateLife(op1);
arg = arg->gtOp.gtOp2;
@@ -15922,8 +15924,8 @@ size_t CodeGen::genPushArgList(GenTreeCall* call)
if (arg->gtOper == GT_MKREFANY)
{
- GenTreePtr op1 = arg->gtOp.gtOp1;
- GenTreePtr op2 = arg->gtOp.gtOp2;
+ GenTree* op1 = arg->gtOp.gtOp1;
+ GenTree* op2 = arg->gtOp.gtOp2;
addrReg = genMakeAddressable(op1, RBM_NONE, RegSet::KEEP_REG);
@@ -15955,7 +15957,7 @@ size_t CodeGen::genPushArgList(GenTreeCall* call)
if (arg->gtObj.gtOp1->gtOper == GT_ADDR && arg->gtObj.gtOp1->gtOp.gtOp1->gtOper == GT_LCL_VAR)
{
- GenTreePtr structLocalTree = arg->gtObj.gtOp1->gtOp.gtOp1;
+ GenTree* structLocalTree = arg->gtObj.gtOp1->gtOp.gtOp1;
unsigned structLclNum = structLocalTree->gtLclVarCommon.gtLclNum;
LclVarDsc* varDsc = &compiler->lvaTable[structLclNum];
@@ -16517,7 +16519,7 @@ size_t CodeGen::genPushArgList(GenTreeCall* call)
size_t CodeGen::genPushArgList(GenTreeCall* call)
{
GenTreeArgList* lateArgs = call->gtCallLateArgs;
- GenTreePtr curr;
+ GenTree* curr;
var_types type;
int argSize;
@@ -16711,8 +16713,8 @@ size_t CodeGen::genPushArgList(GenTreeCall* call)
{
#ifdef _TARGET_ARM_
{
- GenTreePtr curArgNode = curArgTabEntry->node;
- var_types curRegArgType = curArgNode->gtType;
+ GenTree* curArgNode = curArgTabEntry->node;
+ var_types curRegArgType = curArgNode->gtType;
assert(curRegArgType != TYP_UNDEF);
if (curRegArgType == TYP_STRUCT)
@@ -16741,7 +16743,7 @@ size_t CodeGen::genPushArgList(GenTreeCall* call)
{
for (GenTree* arg = curr; arg->gtOper == GT_COMMA; arg = arg->gtOp.gtOp2)
{
- GenTreePtr op1 = arg->gtOp.gtOp1;
+ GenTree* op1 = arg->gtOp.gtOp1;
genEvalSideEffects(op1);
genUpdateLife(op1);
@@ -16756,7 +16758,7 @@ size_t CodeGen::genPushArgList(GenTreeCall* call)
GenTree* arg = curr;
while (arg->gtOper == GT_COMMA)
{
- GenTreePtr op1 = arg->gtOp.gtOp1;
+ GenTree* op1 = arg->gtOp.gtOp1;
genEvalSideEffects(op1);
genUpdateLife(op1);
arg = arg->gtOp.gtOp2;
@@ -16776,7 +16778,7 @@ size_t CodeGen::genPushArgList(GenTreeCall* call)
// that indicate another filled slot, and "nextPromotedStructFieldVar" will be the local
// variable number of the next field variable to be copied.
LclVarDsc* promotedStructLocalVarDesc = NULL;
- GenTreePtr structLocalTree = NULL;
+ GenTree* structLocalTree = NULL;
unsigned bytesOfNextSlotOfCurPromotedStruct = TARGET_POINTER_SIZE; // Size of slot.
unsigned nextPromotedStructFieldVar = BAD_VAR_NUM;
unsigned promotedStructOffsetOfFirstStackSlot = 0;
@@ -16958,7 +16960,7 @@ size_t CodeGen::genPushArgList(GenTreeCall* call)
}
#ifdef _TARGET_ARM_
-bool CodeGen::genFillSlotFromPromotedStruct(GenTreePtr arg,
+bool CodeGen::genFillSlotFromPromotedStruct(GenTree* arg,
fgArgTabEntry* curArgTabEntry,
LclVarDsc* promotedStructLocalVarDesc,
emitAttr fieldSize,
@@ -17450,11 +17452,11 @@ bool CodeGen::genFillSlotFromPromotedStruct(GenTreePtr arg,
}
#endif // _TARGET_ARM_
-regMaskTP CodeGen::genFindDeadFieldRegs(GenTreePtr cpBlk)
+regMaskTP CodeGen::genFindDeadFieldRegs(GenTree* cpBlk)
{
noway_assert(cpBlk->OperIsCopyBlkOp()); // Precondition.
- GenTreePtr rhs = cpBlk->gtOp.gtOp1;
- regMaskTP res = 0;
+ GenTree* rhs = cpBlk->gtOp.gtOp1;
+ regMaskTP res = 0;
if (rhs->OperIsIndir())
{
GenTree* addr = rhs->AsIndir()->Addr();
@@ -17488,7 +17490,7 @@ regMaskTP CodeGen::genFindDeadFieldRegs(GenTreePtr cpBlk)
void CodeGen::SetupLateArgs(GenTreeCall* call)
{
GenTreeArgList* lateArgs;
- GenTreePtr curr;
+ GenTree* curr;
/* Generate the code to move the late arguments into registers */
@@ -17571,7 +17573,7 @@ void CodeGen::SetupLateArgs(GenTreeCall* call)
GenTree* arg = curr;
while (arg->gtOper == GT_COMMA)
{
- GenTreePtr op1 = arg->gtOp.gtOp1;
+ GenTree* op1 = arg->gtOp.gtOp1;
genEvalSideEffects(op1);
genUpdateLife(op1);
arg = arg->gtOp.gtOp2;
@@ -17653,7 +17655,7 @@ void CodeGen::SetupLateArgs(GenTreeCall* call)
LclVarDsc* promotedStructLocalVarDesc = NULL;
unsigned bytesOfNextSlotOfCurPromotedStruct = 0; // Size of slot.
unsigned nextPromotedStructFieldVar = BAD_VAR_NUM;
- GenTreePtr structLocalTree = NULL;
+ GenTree* structLocalTree = NULL;
BYTE* gcLayout = NULL;
regNumber regSrc = REG_NA;
@@ -18084,7 +18086,7 @@ void CodeGen::SetupLateArgs(GenTreeCall* call)
// been pushed onto the stack, but *no* registers have been marked
// as 'in-use', that is the responsibility of the caller.
//
-void CodeGen::PushMkRefAnyArg(GenTreePtr mkRefAnyTree, fgArgTabEntry* curArgTabEntry, regMaskTP regNeedMask)
+void CodeGen::PushMkRefAnyArg(GenTree* mkRefAnyTree, fgArgTabEntry* curArgTabEntry, regMaskTP regNeedMask)
{
regNumber regNum = curArgTabEntry->regNum;
regNumber regNum2;
@@ -18138,7 +18140,7 @@ void CodeGen::PushMkRefAnyArg(GenTreePtr mkRefAnyTree, fgArgTabEntry* curArgTabE
// as being used, so we don't want to double-count this one.
if (arg1RegMask != 0)
{
- GenTreePtr op1 = mkRefAnyTree->gtOp.gtOp1;
+ GenTree* op1 = mkRefAnyTree->gtOp.gtOp1;
if (op1->gtFlags & GTF_SPILLED)
{
/* The register that we loaded arg1 into has been spilled -- reload it back into the correct arg register */
@@ -18168,7 +18170,7 @@ regMaskTP CodeGen::genLoadIndirectCallTarget(GenTreeCall* call)
*/
struct
{
- GenTreePtr node;
+ GenTree* node;
union {
regNumber regNum;
regPairNo regPair;
@@ -18182,9 +18184,9 @@ regMaskTP CodeGen::genLoadIndirectCallTarget(GenTreeCall* call)
regMaskTP argRegs = RBM_NONE;
for (regIndex = 0; regIndex < MAX_REG_ARG; regIndex++)
{
- regMaskTP mask;
- regNumber regNum = genMapRegArgNumToRegNum(regIndex, TYP_INT);
- GenTreePtr argTree = regSet.rsUsedTree[regNum];
+ regMaskTP mask;
+ regNumber regNum = genMapRegArgNumToRegNum(regIndex, TYP_INT);
+ GenTree* argTree = regSet.rsUsedTree[regNum];
regArgTab[regIndex].node = argTree;
if ((argTree != NULL) && (argTree->gtType != TYP_STRUCT)) // We won't spill the struct
{
@@ -18214,7 +18216,7 @@ regMaskTP CodeGen::genLoadIndirectCallTarget(GenTreeCall* call)
for (regIndex = 0; regIndex < MAX_REG_ARG; regIndex++)
{
- GenTreePtr argTree = regArgTab[regIndex].node;
+ GenTree* argTree = regArgTab[regIndex].node;
if ((argTree != NULL) && (argTree->gtFlags & GTF_SPILLED))
{
assert(argTree->gtType != TYP_STRUCT); // We currently don't support spilling structs in argument registers
@@ -19269,8 +19271,8 @@ regMaskTP CodeGen::genCodeForCall(GenTreeCall* call, bool valUsed)
//------------------------------------------------------
// Non-virtual indirect calls via the P/Invoke stub
- GenTreePtr cookie = call->gtCallCookie;
- GenTreePtr target = call->gtCallAddr;
+ GenTree* cookie = call->gtCallCookie;
+ GenTree* target = call->gtCallAddr;
noway_assert((call->gtFlags & GTF_CALL_POP_ARGS) == 0);
@@ -20367,7 +20369,7 @@ void CodeGen::genCreateAndStoreGCInfoX64(unsigned codeSize, unsigned prologSize
* For CEE_LOCALLOC
*/
-regNumber CodeGen::genLclHeap(GenTreePtr size)
+regNumber CodeGen::genLclHeap(GenTree* size)
{
noway_assert((genActualType(size->gtType) == TYP_INT) || (genActualType(size->gtType) == TYP_I_IMPL));
@@ -20732,7 +20734,7 @@ DONE:
* constant operand, and one that's in a register. Thus, the only thing we
* need to determine is whether the register holding op1 is dead.
*/
-bool CodeGen::genRegTrashable(regNumber reg, GenTreePtr tree)
+bool CodeGen::genRegTrashable(regNumber reg, GenTree* tree)
{
regMaskTP vars;
regMaskTP mask = genRegMask(reg);
@@ -20741,7 +20743,7 @@ bool CodeGen::genRegTrashable(regNumber reg, GenTreePtr tree)
return false;
assert(tree->gtOper == GT_ADD);
- GenTreePtr regValTree = tree->gtOp.gtOp1;
+ GenTree* regValTree = tree->gtOp.gtOp1;
if (!tree->gtOp.gtOp2->IsCnsIntOrI())
{
regValTree = tree->gtOp.gtOp2;
@@ -20826,12 +20828,12 @@ bool CodeGen::genRegTrashable(regNumber reg, GenTreePtr tree)
*/
-GenTreePtr Compiler::fgLegacyPerStatementLocalVarLiveness(GenTreePtr startNode, // The node to start walking with.
- GenTreePtr relopNode) // The node before the startNode.
- // (It should either be NULL or
- // a GTF_RELOP_QMARK node.)
+GenTree* Compiler::fgLegacyPerStatementLocalVarLiveness(GenTree* startNode, // The node to start walking with.
+ GenTree* relopNode) // The node before the startNode.
+ // (It should either be NULL or
+ // a GTF_RELOP_QMARK node.)
{
- GenTreePtr tree;
+ GenTree* tree;
VARSET_TP defSet_BeforeSplit(VarSetOps::MakeCopy(this, fgCurDefSet)); // Store the current fgCurDefSet and
// fgCurUseSet so
@@ -20955,7 +20957,7 @@ GenTreePtr Compiler::fgLegacyPerStatementLocalVarLiveness(GenTreePtr startNode,
{
GenTreeLclVarCommon* dummyLclVarTree = NULL;
bool dummyIsEntire = false;
- GenTreePtr addrArg = tree->gtOp.gtOp1->gtEffectiveVal(/*commaOnly*/ true);
+ GenTree* addrArg = tree->gtOp.gtOp1->gtEffectiveVal(/*commaOnly*/ true);
if (!addrArg->DefinesLocalAddr(this, /*width doesn't matter*/ 0, &dummyLclVarTree, &dummyIsEntire))
{
fgCurMemoryUse |= memoryKindSet(GcHeap, ByrefExposed);
@@ -21842,7 +21844,7 @@ void CodeGen::genFlagsEqualToNone()
* contents of the given register.
*/
-void CodeGen::genFlagsEqualToReg(GenTreePtr tree, regNumber reg)
+void CodeGen::genFlagsEqualToReg(GenTree* tree, regNumber reg)
{
genFlagsEqLoc.CaptureLocation(getEmitter());
genFlagsEqReg = reg;
@@ -21866,7 +21868,7 @@ void CodeGen::genFlagsEqualToReg(GenTreePtr tree, regNumber reg)
* contents of the given local variable.
*/
-void CodeGen::genFlagsEqualToVar(GenTreePtr tree, unsigned var)
+void CodeGen::genFlagsEqualToVar(GenTree* tree, unsigned var)
{
genFlagsEqLoc.CaptureLocation(getEmitter());
genFlagsEqVar = var;
@@ -21917,9 +21919,9 @@ bool CodeGen::genFlagsAreVar(unsigned var)
* This utility function returns true iff the execution path from "from"
* (inclusive) to "to" (exclusive) contains a death of the given var
*/
-bool CodeGen::genContainsVarDeath(GenTreePtr from, GenTreePtr to, unsigned varNum)
+bool CodeGen::genContainsVarDeath(GenTree* from, GenTree* to, unsigned varNum)
{
- GenTreePtr tree;
+ GenTree* tree;
for (tree = from; tree != NULL && tree != to; tree = tree->gtNext)
{
if (tree->IsLocal() && (tree->gtFlags & GTF_VAR_DEATH))
diff --git a/src/jit/codegenlinear.cpp b/src/jit/codegenlinear.cpp
index afffe013bd..fb8b6b5fe0 100644
--- a/src/jit/codegenlinear.cpp
+++ b/src/jit/codegenlinear.cpp
@@ -690,7 +690,7 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
// Return Value:
// The assigned regNumber
//
-regNumber CodeGenInterface::genGetAssignedReg(GenTreePtr tree)
+regNumber CodeGenInterface::genGetAssignedReg(GenTree* tree)
{
return tree->gtRegNum;
}
@@ -707,7 +707,7 @@ regNumber CodeGenInterface::genGetAssignedReg(GenTreePtr tree)
// Assumptions:
// The lclVar must be a register candidate (lvRegCandidate)
-void CodeGen::genSpillVar(GenTreePtr tree)
+void CodeGen::genSpillVar(GenTree* tree)
{
unsigned varNum = tree->gtLclVarCommon.gtLclNum;
LclVarDsc* varDsc = &(compiler->lvaTable[varNum]);
@@ -790,7 +790,7 @@ void CodeGen::genSpillVar(GenTreePtr tree)
// tree - the lclVar node
//
// inline
-void CodeGenInterface::genUpdateVarReg(LclVarDsc* varDsc, GenTreePtr tree)
+void CodeGenInterface::genUpdateVarReg(LclVarDsc* varDsc, GenTree* tree)
{
assert(tree->OperIsScalarLocal() || (tree->gtOper == GT_COPY));
varDsc->lvRegNum = tree->gtRegNum;
@@ -816,8 +816,8 @@ GenTree* sameRegAsDst(GenTree* tree, GenTree*& other /*out*/)
return nullptr;
}
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtOp.gtOp2;
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtOp.gtOp2;
if (op1->gtRegNum == tree->gtRegNum)
{
other = op2;
diff --git a/src/jit/codegenlinear.h b/src/jit/codegenlinear.h
index f7d43d73e5..4cc1f79a70 100644
--- a/src/jit/codegenlinear.h
+++ b/src/jit/codegenlinear.h
@@ -10,9 +10,9 @@
#ifndef LEGACY_BACKEND // Not necessary (it's this way in the #include location), but helpful to IntelliSense
-void genSetRegToConst(regNumber targetReg, var_types targetType, GenTreePtr tree);
-void genCodeForTreeNode(GenTreePtr treeNode);
-void genCodeForBinary(GenTreePtr treeNode);
+void genSetRegToConst(regNumber targetReg, var_types targetType, GenTree* tree);
+void genCodeForTreeNode(GenTree* treeNode);
+void genCodeForBinary(GenTree* treeNode);
#if defined(_TARGET_X86_)
void genCodeForLongUMod(GenTreeOp* node);
@@ -22,7 +22,7 @@ void genCodeForDivMod(GenTreeOp* treeNode);
void genCodeForMul(GenTreeOp* treeNode);
void genCodeForMulHi(GenTreeOp* treeNode);
void genLeaInstruction(GenTreeAddrMode* lea);
-void genSetRegToCond(regNumber dstReg, GenTreePtr tree);
+void genSetRegToCond(regNumber dstReg, GenTree* tree);
#if defined(_TARGET_ARMARCH_)
void genScaledAdd(emitAttr attr, regNumber targetReg, regNumber baseReg, regNumber indexReg, int scale);
@@ -33,16 +33,16 @@ void genCodeForMulLong(GenTreeMultiRegOp* treeNode);
#endif // _TARGET_ARM_
#if !defined(_TARGET_64BIT_)
-void genLongToIntCast(GenTreePtr treeNode);
+void genLongToIntCast(GenTree* treeNode);
#endif
-void genIntToIntCast(GenTreePtr treeNode);
-void genFloatToFloatCast(GenTreePtr treeNode);
-void genFloatToIntCast(GenTreePtr treeNode);
-void genIntToFloatCast(GenTreePtr treeNode);
-void genCkfinite(GenTreePtr treeNode);
+void genIntToIntCast(GenTree* treeNode);
+void genFloatToFloatCast(GenTree* treeNode);
+void genFloatToIntCast(GenTree* treeNode);
+void genIntToFloatCast(GenTree* treeNode);
+void genCkfinite(GenTree* treeNode);
void genCodeForCompare(GenTreeOp* tree);
-void genIntrinsic(GenTreePtr treeNode);
+void genIntrinsic(GenTree* treeNode);
void genPutArgStk(GenTreePutArgStk* treeNode);
void genPutArgReg(GenTreeOp* tree);
#ifdef _TARGET_ARM_
@@ -50,13 +50,13 @@ void genPutArgSplit(GenTreePutArgSplit* treeNode);
#endif
#if defined(_TARGET_XARCH_)
-unsigned getBaseVarForPutArgStk(GenTreePtr treeNode);
+unsigned getBaseVarForPutArgStk(GenTree* treeNode);
#endif // _TARGET_XARCH_
unsigned getFirstArgWithStackSlot();
-void genCompareFloat(GenTreePtr treeNode);
-void genCompareInt(GenTreePtr treeNode);
+void genCompareFloat(GenTree* treeNode);
+void genCompareInt(GenTree* treeNode);
#ifdef FEATURE_SIMD
enum SIMDScalarMoveType
@@ -161,7 +161,7 @@ regNumber genConsumeReg(GenTree* tree);
void genCopyRegIfNeeded(GenTree* tree, regNumber needReg);
void genConsumeRegAndCopy(GenTree* tree, regNumber needReg);
-void genConsumeIfReg(GenTreePtr tree)
+void genConsumeIfReg(GenTree* tree)
{
if (!tree->isContained())
{
@@ -169,7 +169,7 @@ void genConsumeIfReg(GenTreePtr tree)
}
}
-void genRegCopy(GenTreePtr tree);
+void genRegCopy(GenTree* tree);
void genTransferRegGCState(regNumber dst, regNumber src);
void genConsumeAddress(GenTree* addr);
void genConsumeAddrMode(GenTreeAddrMode* mode);
@@ -189,10 +189,10 @@ void genConsumeRegs(GenTree* tree);
void genConsumeOperands(GenTreeOp* tree);
void genEmitGSCookieCheck(bool pushReg);
void genSetRegToIcon(regNumber reg, ssize_t val, var_types type = TYP_INT, insFlags flags = INS_FLAGS_DONT_CARE);
-void genCodeForShift(GenTreePtr tree);
+void genCodeForShift(GenTree* tree);
#if defined(_TARGET_X86_) || defined(_TARGET_ARM_)
-void genCodeForShiftLong(GenTreePtr tree);
+void genCodeForShiftLong(GenTree* tree);
#endif
#ifdef _TARGET_XARCH_
@@ -297,9 +297,9 @@ void genCodeForArrOffset(GenTreeArrOffs* treeNode);
instruction genGetInsForOper(genTreeOps oper, var_types type);
bool genEmitOptimizedGCWriteBarrier(GCInfo::WriteBarrierForm writeBarrierForm, GenTree* addr, GenTree* data);
void genCallInstruction(GenTreeCall* call);
-void genJmpMethod(GenTreePtr jmp);
+void genJmpMethod(GenTree* jmp);
BasicBlock* genCallFinally(BasicBlock* block);
-void genCodeForJumpTrue(GenTreePtr tree);
+void genCodeForJumpTrue(GenTree* tree);
#ifdef _TARGET_ARM64_
void genCodeForJumpCompare(GenTreeOp* tree);
#endif // _TARGET_ARM64_
@@ -310,17 +310,17 @@ void genEHCatchRet(BasicBlock* block);
void genEHFinallyOrFilterRet(BasicBlock* block);
#endif // !FEATURE_EH_FUNCLETS
-void genMultiRegCallStoreToLocal(GenTreePtr treeNode);
+void genMultiRegCallStoreToLocal(GenTree* treeNode);
// Deals with codegen for muti-register struct returns.
-bool isStructReturn(GenTreePtr treeNode);
-void genStructReturn(GenTreePtr treeNode);
+bool isStructReturn(GenTree* treeNode);
+void genStructReturn(GenTree* treeNode);
-void genReturn(GenTreePtr treeNode);
+void genReturn(GenTree* treeNode);
-void genLclHeap(GenTreePtr tree);
+void genLclHeap(GenTree* tree);
-bool genIsRegCandidateLocal(GenTreePtr tree)
+bool genIsRegCandidateLocal(GenTree* tree)
{
if (!tree->IsLocal())
{
diff --git a/src/jit/codegenxarch.cpp b/src/jit/codegenxarch.cpp
index 32eb48290b..5726f2af88 100644
--- a/src/jit/codegenxarch.cpp
+++ b/src/jit/codegenxarch.cpp
@@ -438,7 +438,7 @@ void CodeGen::instGen_Set_Reg_To_Imm(emitAttr size, regNumber reg, ssize_t imm,
* specified by the constant (GT_CNS_INT or GT_CNS_DBL) in 'tree'. This does not call
* genProduceReg() on the target register.
*/
-void CodeGen::genSetRegToConst(regNumber targetReg, var_types targetType, GenTreePtr tree)
+void CodeGen::genSetRegToConst(regNumber targetReg, var_types targetType, GenTree* tree)
{
switch (tree->gtOper)
{
@@ -506,7 +506,7 @@ void CodeGen::genCodeForNegNot(GenTree* tree)
}
else
{
- GenTreePtr operand = tree->gtGetOp1();
+ GenTree* operand = tree->gtGetOp1();
assert(operand->isUsedFromReg());
regNumber operandReg = genConsumeReg(operand);
@@ -801,8 +801,8 @@ void CodeGen::genCodeForBinary(GenTree* treeNode)
oper == GT_SUB_LO || oper == GT_SUB_HI || oper == GT_ADD || oper == GT_SUB);
#endif // !defined(_TARGET_64BIT_)
- GenTreePtr op1 = treeNode->gtGetOp1();
- GenTreePtr op2 = treeNode->gtGetOp2();
+ GenTree* op1 = treeNode->gtGetOp1();
+ GenTree* op2 = treeNode->gtGetOp2();
// Commutative operations can mark op1 as contained or reg-optional to generate "op reg, memop/immed"
if (!op1->isUsedFromReg())
@@ -823,8 +823,8 @@ void CodeGen::genCodeForBinary(GenTree* treeNode)
regNumber op1reg = op1->isUsedFromReg() ? op1->gtRegNum : REG_NA;
regNumber op2reg = op2->isUsedFromReg() ? op2->gtRegNum : REG_NA;
- GenTreePtr dst;
- GenTreePtr src;
+ GenTree* dst;
+ GenTree* src;
// This is the case of reg1 = reg1 op reg2
// We're ready to emit the instruction without any moves
@@ -1054,7 +1054,7 @@ void CodeGen::genCodeForMul(GenTreeOp* treeNode)
// Otherwise returns false.
// For other platforms always returns false.
//
-bool CodeGen::isStructReturn(GenTreePtr treeNode)
+bool CodeGen::isStructReturn(GenTree* treeNode)
{
// This method could be called for 'treeNode' of GT_RET_FILT or GT_RETURN.
// For the GT_RET_FILT, the return is always
@@ -1084,10 +1084,10 @@ bool CodeGen::isStructReturn(GenTreePtr treeNode)
//
// Assumption:
// op1 of GT_RETURN node is either GT_LCL_VAR or multi-reg GT_CALL
-void CodeGen::genStructReturn(GenTreePtr treeNode)
+void CodeGen::genStructReturn(GenTree* treeNode)
{
assert(treeNode->OperGet() == GT_RETURN);
- GenTreePtr op1 = treeNode->gtGetOp1();
+ GenTree* op1 = treeNode->gtGetOp1();
#ifdef FEATURE_UNIX_AMD64_STRUCT_PASSING
if (op1->OperGet() == GT_LCL_VAR)
@@ -1262,11 +1262,11 @@ void CodeGen::genStructReturn(GenTreePtr treeNode)
// Return Value:
// None
//
-void CodeGen::genReturn(GenTreePtr treeNode)
+void CodeGen::genReturn(GenTree* treeNode)
{
assert(treeNode->OperGet() == GT_RETURN || treeNode->OperGet() == GT_RETFILT);
- GenTreePtr op1 = treeNode->gtGetOp1();
- var_types targetType = treeNode->TypeGet();
+ GenTree* op1 = treeNode->gtGetOp1();
+ var_types targetType = treeNode->TypeGet();
// A void GT_RETFILT is the end of a finally. For non-void filter returns we need to load the result in the return
// register, if it's not already there. The processing is the same as GT_RETURN. For filters, the IL spec says the
@@ -1450,8 +1450,8 @@ void CodeGen::genCodeForCompare(GenTreeOp* tree)
// TODO-XArch-CQ: Check for the case where we can simply transfer the carry bit to a register
// (signed < or >= where targetReg != REG_NA)
- GenTreePtr op1 = tree->gtOp1;
- var_types op1Type = op1->TypeGet();
+ GenTree* op1 = tree->gtOp1;
+ var_types op1Type = op1->TypeGet();
if (varTypeIsFloating(op1Type))
{
@@ -1496,7 +1496,7 @@ void CodeGen::genCodeForBT(GenTreeOp* bt)
// Return Value:
// None
//
-void CodeGen::genCodeForJumpTrue(GenTreePtr tree)
+void CodeGen::genCodeForJumpTrue(GenTree* tree)
{
GenTree* cmp = tree->gtOp.gtOp1;
@@ -1629,7 +1629,7 @@ void CodeGen::genCodeForReturnTrap(GenTreeOp* tree)
* Preconditions: All operands have been evaluated
*
*/
-void CodeGen::genCodeForTreeNode(GenTreePtr treeNode)
+void CodeGen::genCodeForTreeNode(GenTree* treeNode)
{
regNumber targetReg;
#if !defined(_TARGET_64BIT_)
@@ -2091,7 +2091,7 @@ void CodeGen::genCodeForTreeNode(GenTreePtr treeNode)
// The child of store is a multi-reg call node.
// genProduceReg() on treeNode is made by caller of this routine.
//
-void CodeGen::genMultiRegCallStoreToLocal(GenTreePtr treeNode)
+void CodeGen::genMultiRegCallStoreToLocal(GenTree* treeNode)
{
assert(treeNode->OperGet() == GT_STORE_LCL_VAR);
@@ -2274,12 +2274,12 @@ void CodeGen::genMultiRegCallStoreToLocal(GenTreePtr treeNode)
// is defined by convention relative to other items), and is used by the GC to find the
// "base" stack pointer in functions with localloc.
//
-void CodeGen::genLclHeap(GenTreePtr tree)
+void CodeGen::genLclHeap(GenTree* tree)
{
assert(tree->OperGet() == GT_LCLHEAP);
assert(compiler->compLocallocUsed);
- GenTreePtr size = tree->gtOp.gtOp1;
+ GenTree* size = tree->gtOp.gtOp1;
noway_assert((genActualType(size->gtType) == TYP_INT) || (genActualType(size->gtType) == TYP_I_IMPL));
regNumber targetReg = tree->gtRegNum;
@@ -2706,9 +2706,9 @@ void CodeGen::genCodeForStoreBlk(GenTreeBlk* storeBlkNode)
void CodeGen::genCodeForInitBlkRepStos(GenTreeBlk* initBlkNode)
{
// Make sure we got the arguments of the initblk/initobj operation in the right registers.
- unsigned size = initBlkNode->Size();
- GenTreePtr dstAddr = initBlkNode->Addr();
- GenTreePtr initVal = initBlkNode->Data();
+ unsigned size = initBlkNode->Size();
+ GenTree* dstAddr = initBlkNode->Addr();
+ GenTree* initVal = initBlkNode->Data();
if (initVal->OperIsInitVal())
{
initVal = initVal->gtGetOp1();
@@ -2744,9 +2744,9 @@ void CodeGen::genCodeForInitBlkRepStos(GenTreeBlk* initBlkNode)
void CodeGen::genCodeForInitBlkUnroll(GenTreeBlk* initBlkNode)
{
// Make sure we got the arguments of the initblk/initobj operation in the right registers
- unsigned size = initBlkNode->Size();
- GenTreePtr dstAddr = initBlkNode->Addr();
- GenTreePtr initVal = initBlkNode->Data();
+ unsigned size = initBlkNode->Size();
+ GenTree* dstAddr = initBlkNode->Addr();
+ GenTree* initVal = initBlkNode->Data();
if (initVal->OperIsInitVal())
{
initVal = initVal->gtGetOp1();
@@ -2840,9 +2840,9 @@ void CodeGen::genCodeForInitBlk(GenTreeBlk* initBlkNode)
{
#ifdef _TARGET_AMD64_
// Make sure we got the arguments of the initblk operation in the right registers
- unsigned blockSize = initBlkNode->Size();
- GenTreePtr dstAddr = initBlkNode->Addr();
- GenTreePtr initVal = initBlkNode->Data();
+ unsigned blockSize = initBlkNode->Size();
+ GenTree* dstAddr = initBlkNode->Addr();
+ GenTree* initVal = initBlkNode->Data();
if (initVal->OperIsInitVal())
{
initVal = initVal->gtGetOp1();
@@ -2922,10 +2922,10 @@ void CodeGen::genCodeForStoreOffset(instruction ins, emitAttr size, regNumber sr
void CodeGen::genCodeForCpBlkUnroll(GenTreeBlk* cpBlkNode)
{
// Make sure we got the arguments of the cpblk operation in the right registers
- unsigned size = cpBlkNode->Size();
- GenTreePtr dstAddr = cpBlkNode->Addr();
- GenTreePtr source = cpBlkNode->Data();
- GenTreePtr srcAddr = nullptr;
+ unsigned size = cpBlkNode->Size();
+ GenTree* dstAddr = cpBlkNode->Addr();
+ GenTree* source = cpBlkNode->Data();
+ GenTree* srcAddr = nullptr;
assert(size <= CPBLK_UNROLL_LIMIT);
emitter* emit = getEmitter();
@@ -3033,10 +3033,10 @@ void CodeGen::genCodeForCpBlkUnroll(GenTreeBlk* cpBlkNode)
void CodeGen::genCodeForCpBlkRepMovs(GenTreeBlk* cpBlkNode)
{
// Make sure we got the arguments of the cpblk operation in the right registers
- unsigned size = cpBlkNode->Size();
- GenTreePtr dstAddr = cpBlkNode->Addr();
- GenTreePtr source = cpBlkNode->Data();
- GenTreePtr srcAddr = nullptr;
+ unsigned size = cpBlkNode->Size();
+ GenTree* dstAddr = cpBlkNode->Addr();
+ GenTree* source = cpBlkNode->Data();
+ GenTree* srcAddr = nullptr;
#ifdef DEBUG
assert(dstAddr->isUsedFromReg());
@@ -3201,7 +3201,7 @@ unsigned CodeGen::genMove1IfNeeded(unsigned size, regNumber intTmpReg, GenTree*
//
void CodeGen::genStructPutArgUnroll(GenTreePutArgStk* putArgNode)
{
- GenTreePtr src = putArgNode->gtOp.gtOp1;
+ GenTree* src = putArgNode->gtOp.gtOp1;
// We will never call this method for SIMD types, which are stored directly
// in genPutStructArgStk().
noway_assert(src->TypeGet() == TYP_STRUCT);
@@ -3322,7 +3322,7 @@ void CodeGen::genStructPutArgUnroll(GenTreePutArgStk* putArgNode)
//
void CodeGen::genStructPutArgRepMovs(GenTreePutArgStk* putArgNode)
{
- GenTreePtr srcAddr = putArgNode->gtGetOp1();
+ GenTree* srcAddr = putArgNode->gtGetOp1();
assert(srcAddr->TypeGet() == TYP_STRUCT);
assert(putArgNode->getArgSize() > CPBLK_UNROLL_LIMIT);
@@ -3395,11 +3395,11 @@ void CodeGen::genClearStackVec3ArgUpperBits()
void CodeGen::genCodeForCpObj(GenTreeObj* cpObjNode)
{
// Make sure we got the arguments of the cpobj operation in the right registers
- GenTreePtr dstAddr = cpObjNode->Addr();
- GenTreePtr source = cpObjNode->Data();
- GenTreePtr srcAddr = nullptr;
- var_types srcAddrType = TYP_BYREF;
- bool sourceIsLocal = false;
+ GenTree* dstAddr = cpObjNode->Addr();
+ GenTree* source = cpObjNode->Data();
+ GenTree* srcAddr = nullptr;
+ var_types srcAddrType = TYP_BYREF;
+ bool sourceIsLocal = false;
assert(source->isContained());
if (source->gtOper == GT_IND)
@@ -3553,10 +3553,10 @@ void CodeGen::genCodeForCpBlk(GenTreeBlk* cpBlkNode)
{
#ifdef _TARGET_AMD64_
// Make sure we got the arguments of the cpblk operation in the right registers
- unsigned blockSize = cpBlkNode->Size();
- GenTreePtr dstAddr = cpBlkNode->Addr();
- GenTreePtr source = cpBlkNode->Data();
- GenTreePtr srcAddr = nullptr;
+ unsigned blockSize = cpBlkNode->Size();
+ GenTree* dstAddr = cpBlkNode->Addr();
+ GenTree* source = cpBlkNode->Data();
+ GenTree* srcAddr = nullptr;
// Size goes in arg2
if (blockSize != 0)
@@ -3720,9 +3720,9 @@ void CodeGen::genCodeForCmpXchg(GenTreeCmpXchg* tree)
var_types targetType = tree->TypeGet();
regNumber targetReg = tree->gtRegNum;
- GenTreePtr location = tree->gtOpLocation; // arg1
- GenTreePtr value = tree->gtOpValue; // arg2
- GenTreePtr comparand = tree->gtOpComparand; // arg3
+ GenTree* location = tree->gtOpLocation; // arg1
+ GenTree* value = tree->gtOpValue; // arg2
+ GenTree* comparand = tree->gtOpComparand; // arg3
assert(location->gtRegNum != REG_NA && location->gtRegNum != REG_RAX);
assert(value->gtRegNum != REG_NA && value->gtRegNum != REG_RAX);
@@ -3754,7 +3754,7 @@ void CodeGen::genCodeForCmpXchg(GenTreeCmpXchg* tree)
}
// generate code for BoundsCheck nodes
-void CodeGen::genRangeCheck(GenTreePtr oper)
+void CodeGen::genRangeCheck(GenTree* oper)
{
#ifdef FEATURE_SIMD
noway_assert(oper->OperGet() == GT_ARR_BOUNDS_CHECK || oper->OperGet() == GT_SIMD_CHK);
@@ -3764,10 +3764,10 @@ void CodeGen::genRangeCheck(GenTreePtr oper)
GenTreeBoundsChk* bndsChk = oper->AsBoundsChk();
- GenTreePtr arrIndex = bndsChk->gtIndex;
- GenTreePtr arrLen = bndsChk->gtArrLen;
- GenTreePtr arrRef = nullptr;
- int lenOffset = 0;
+ GenTree* arrIndex = bndsChk->gtIndex;
+ GenTree* arrLen = bndsChk->gtArrLen;
+ GenTree* arrRef = nullptr;
+ int lenOffset = 0;
GenTree * src1, *src2;
emitJumpKind jmpKind;
@@ -3912,8 +3912,8 @@ unsigned CodeGen::genOffsetOfMDArrayDimensionSize(var_types elemType, unsigned r
void CodeGen::genCodeForArrIndex(GenTreeArrIndex* arrIndex)
{
- GenTreePtr arrObj = arrIndex->ArrObj();
- GenTreePtr indexNode = arrIndex->IndexExpr();
+ GenTree* arrObj = arrIndex->ArrObj();
+ GenTree* indexNode = arrIndex->IndexExpr();
regNumber arrReg = genConsumeReg(arrObj);
regNumber indexReg = genConsumeReg(indexNode);
@@ -3958,9 +3958,9 @@ void CodeGen::genCodeForArrIndex(GenTreeArrIndex* arrIndex)
void CodeGen::genCodeForArrOffset(GenTreeArrOffs* arrOffset)
{
- GenTreePtr offsetNode = arrOffset->gtOffset;
- GenTreePtr indexNode = arrOffset->gtIndex;
- GenTreePtr arrObj = arrOffset->gtArrObj;
+ GenTree* offsetNode = arrOffset->gtOffset;
+ GenTree* indexNode = arrOffset->gtIndex;
+ GenTree* arrObj = arrOffset->gtArrObj;
regNumber tgtReg = arrOffset->gtRegNum;
assert(tgtReg != REG_NA);
@@ -4124,7 +4124,7 @@ instruction CodeGen::genGetInsForOper(genTreeOps oper, var_types type)
// it's a register-allocated expression. If it is in a register that is
// not RCX, it will be moved to RCX (so RCX better not be in use!).
//
-void CodeGen::genCodeForShift(GenTreePtr tree)
+void CodeGen::genCodeForShift(GenTree* tree)
{
// Only the non-RMW case here.
assert(tree->OperIsShiftOrRotate());
@@ -4136,10 +4136,10 @@ void CodeGen::genCodeForShift(GenTreePtr tree)
var_types targetType = tree->TypeGet();
instruction ins = genGetInsForOper(tree->OperGet(), targetType);
- GenTreePtr operand = tree->gtGetOp1();
- regNumber operandReg = operand->gtRegNum;
+ GenTree* operand = tree->gtGetOp1();
+ regNumber operandReg = operand->gtRegNum;
- GenTreePtr shiftBy = tree->gtGetOp2();
+ GenTree* shiftBy = tree->gtGetOp2();
if (shiftBy->isContainedIntOrIImmed())
{
@@ -4191,7 +4191,7 @@ void CodeGen::genCodeForShift(GenTreePtr tree)
// targetReg if sourceHi is a memory operand). Similarly for GT_RSH_LO, sourceLo could be marked as
// contained memory-op. Even if not a memory-op, we could mark it as reg-optional.
//
-void CodeGen::genCodeForShiftLong(GenTreePtr tree)
+void CodeGen::genCodeForShiftLong(GenTree* tree)
{
// Only the non-RMW case here.
genTreeOps oper = tree->OperGet();
@@ -4213,7 +4213,7 @@ void CodeGen::genCodeForShiftLong(GenTreePtr tree)
var_types targetType = tree->TypeGet();
instruction ins = genGetInsForOper(oper, targetType);
- GenTreePtr shiftBy = tree->gtGetOp2();
+ GenTree* shiftBy = tree->gtGetOp2();
assert(shiftBy->isContainedIntOrIImmed());
@@ -4408,7 +4408,7 @@ void CodeGen::genCodeForStoreLclFld(GenTreeLclFld* tree)
}
#endif // FEATURE_SIMD
- GenTreePtr op1 = tree->gtGetOp1();
+ GenTree* op1 = tree->gtGetOp1();
genConsumeRegs(op1);
getEmitter()->emitInsBinary(ins_Store(targetType), emitTypeSize(tree), tree, op1);
@@ -4429,7 +4429,7 @@ void CodeGen::genCodeForStoreLclVar(GenTreeLclVar* tree)
regNumber targetReg = tree->gtRegNum;
emitter* emit = getEmitter();
- GenTreePtr op1 = tree->gtGetOp1();
+ GenTree* op1 = tree->gtGetOp1();
// var = call, where call returns a multi-reg return value
// case is handled separately.
@@ -4822,7 +4822,7 @@ void CodeGen::genCodeForStoreInd(GenTreeStoreInd* tree)
{
assert(data->isContained() && !data->OperIsLeaf());
- GenTreePtr rmwDst = nullptr;
+ GenTree* rmwDst = nullptr;
dataIsUnary = (GenTree::OperIsUnary(data->OperGet()) != 0);
if (!dataIsUnary)
@@ -5095,11 +5095,11 @@ void CodeGen::genCallInstruction(GenTreeCall* call)
}
// Consume all the arg regs
- for (GenTreePtr list = call->gtCallLateArgs; list; list = list->MoveNext())
+ for (GenTree* list = call->gtCallLateArgs; list; list = list->MoveNext())
{
assert(list->OperIsList());
- GenTreePtr argNode = list->Current();
+ GenTree* argNode = list->Current();
fgArgTabEntry* curArgTabEntry = compiler->gtArgEntryByNode(call, argNode->gtSkipReloadOrCopy());
assert(curArgTabEntry);
@@ -5117,7 +5117,7 @@ void CodeGen::genCallInstruction(GenTreeCall* call)
unsigned iterationNum = 0;
for (; fieldListPtr != nullptr; fieldListPtr = fieldListPtr->Rest(), iterationNum++)
{
- GenTreePtr putArgRegNode = fieldListPtr->gtOp.gtOp1;
+ GenTree* putArgRegNode = fieldListPtr->gtOp.gtOp1;
assert(putArgRegNode->gtOper == GT_PUTARG_REG);
regNumber argReg = REG_NA;
@@ -5173,11 +5173,11 @@ void CodeGen::genCallInstruction(GenTreeCall* call)
#if defined(_TARGET_X86_) || defined(FEATURE_UNIX_AMD64_STRUCT_PASSING)
// The call will pop its arguments.
// for each putarg_stk:
- ssize_t stackArgBytes = 0;
- GenTreePtr args = call->gtCallArgs;
+ ssize_t stackArgBytes = 0;
+ GenTree* args = call->gtCallArgs;
while (args)
{
- GenTreePtr arg = args->gtOp.gtOp1;
+ GenTree* arg = args->gtOp.gtOp1;
if (arg->OperGet() != GT_ARGPLACE && !(arg->gtFlags & GTF_LATE_ARG))
{
if (arg->OperGet() == GT_PUTARG_STK)
@@ -5647,7 +5647,7 @@ void CodeGen::genCallInstruction(GenTreeCall* call)
// The arguments of the caller needs to be transferred to the callee before exiting caller.
// The actual jump to callee is generated as part of caller epilog sequence.
// Therefore the codegen of GT_JMP is to ensure that the callee arguments are correctly setup.
-void CodeGen::genJmpMethod(GenTreePtr jmp)
+void CodeGen::genJmpMethod(GenTree* jmp)
{
assert(jmp->OperGet() == GT_JMP);
assert(compiler->compJmpOpUsed);
@@ -5975,7 +5975,7 @@ void CodeGen::genLeaInstruction(GenTreeAddrMode* lea)
//-------------------------------------------------------------------------------------------
// static
-void CodeGen::genJumpKindsForTree(GenTreePtr cmpTree, emitJumpKind jmpKind[2], bool jmpToTrueLabel[2])
+void CodeGen::genJumpKindsForTree(GenTree* cmpTree, emitJumpKind jmpKind[2], bool jmpToTrueLabel[2])
{
// Except for BEQ (= ordered GT_EQ) both jumps are to the true label.
jmpToTrueLabel[0] = true;
@@ -6127,13 +6127,13 @@ void CodeGen::genJumpKindsForTree(GenTreePtr cmpTree, emitJumpKind jmpKind[2], b
//
// As we can see from the above equalities that the operands of a compare operator need to be
// reveresed in case of BLT/CLT, BGT.UN/CGT.UN, BLE/CLE, BGE.UN/CGE.UN.
-void CodeGen::genCompareFloat(GenTreePtr treeNode)
+void CodeGen::genCompareFloat(GenTree* treeNode)
{
assert(treeNode->OperIsCompare());
GenTreeOp* tree = treeNode->AsOp();
- GenTreePtr op1 = tree->gtOp1;
- GenTreePtr op2 = tree->gtOp2;
+ GenTree* op1 = tree->gtOp1;
+ GenTree* op2 = tree->gtOp2;
var_types op1Type = op1->TypeGet();
var_types op2Type = op2->TypeGet();
@@ -6159,9 +6159,9 @@ void CodeGen::genCompareFloat(GenTreePtr treeNode)
if (reverseOps)
{
- GenTreePtr tmp = op1;
- op1 = op2;
- op2 = tmp;
+ GenTree* tmp = op1;
+ op1 = op2;
+ op2 = tmp;
}
ins = ins_FloatCompare(op1Type);
@@ -6185,13 +6185,13 @@ void CodeGen::genCompareFloat(GenTreePtr treeNode)
//
// Return Value:
// None.
-void CodeGen::genCompareInt(GenTreePtr treeNode)
+void CodeGen::genCompareInt(GenTree* treeNode)
{
assert(treeNode->OperIsCompare() || treeNode->OperIs(GT_CMP));
GenTreeOp* tree = treeNode->AsOp();
- GenTreePtr op1 = tree->gtOp1;
- GenTreePtr op2 = tree->gtOp2;
+ GenTree* op1 = tree->gtOp1;
+ GenTree* op2 = tree->gtOp2;
var_types op1Type = op1->TypeGet();
var_types op2Type = op2->TypeGet();
regNumber targetReg = tree->gtRegNum;
@@ -6300,7 +6300,7 @@ void CodeGen::genCompareInt(GenTreePtr treeNode)
// A full 64-bit value of either 1 or 0 is setup in the 'dstReg'
//-------------------------------------------------------------------------------------------
-void CodeGen::genSetRegToCond(regNumber dstReg, GenTreePtr tree)
+void CodeGen::genSetRegToCond(regNumber dstReg, GenTree* tree)
{
noway_assert((genRegMask(dstReg) & RBM_BYTE_REGS) != 0);
@@ -6475,12 +6475,12 @@ void CodeGen::genLongToIntCast(GenTree* cast)
// TODO-XArch-CQ: Allow castOp to be a contained node without an assigned register.
// TODO: refactor to use getCastDescription
//
-void CodeGen::genIntToIntCast(GenTreePtr treeNode)
+void CodeGen::genIntToIntCast(GenTree* treeNode)
{
assert(treeNode->OperGet() == GT_CAST);
- GenTreePtr castOp = treeNode->gtCast.CastOp();
- var_types srcType = genActualType(castOp->TypeGet());
+ GenTree* castOp = treeNode->gtCast.CastOp();
+ var_types srcType = genActualType(castOp->TypeGet());
noway_assert(genTypeSize(srcType) >= 4);
assert(genTypeSize(srcType) <= genTypeSize(TYP_I_IMPL));
@@ -6744,7 +6744,7 @@ void CodeGen::genIntToIntCast(GenTreePtr treeNode)
// The treeNode must have an assigned register.
// The cast is between float and double or vice versa.
//
-void CodeGen::genFloatToFloatCast(GenTreePtr treeNode)
+void CodeGen::genFloatToFloatCast(GenTree* treeNode)
{
// float <--> double conversions are always non-overflow ones
assert(treeNode->OperGet() == GT_CAST);
@@ -6753,7 +6753,7 @@ void CodeGen::genFloatToFloatCast(GenTreePtr treeNode)
regNumber targetReg = treeNode->gtRegNum;
assert(genIsValidFloatReg(targetReg));
- GenTreePtr op1 = treeNode->gtOp.gtOp1;
+ GenTree* op1 = treeNode->gtOp.gtOp1;
#ifdef DEBUG
// If not contained, must be a valid float reg.
if (op1->isUsedFromReg())
@@ -6796,7 +6796,7 @@ void CodeGen::genFloatToFloatCast(GenTreePtr treeNode)
// The treeNode must have an assigned register.
// SrcType= int32/uint32/int64/uint64 and DstType=float/double.
//
-void CodeGen::genIntToFloatCast(GenTreePtr treeNode)
+void CodeGen::genIntToFloatCast(GenTree* treeNode)
{
// int type --> float/double conversions are always non-overflow ones
assert(treeNode->OperGet() == GT_CAST);
@@ -6805,7 +6805,7 @@ void CodeGen::genIntToFloatCast(GenTreePtr treeNode)
regNumber targetReg = treeNode->gtRegNum;
assert(genIsValidFloatReg(targetReg));
- GenTreePtr op1 = treeNode->gtOp.gtOp1;
+ GenTree* op1 = treeNode->gtOp.gtOp1;
#ifdef DEBUG
if (op1->isUsedFromReg())
{
@@ -6929,7 +6929,7 @@ void CodeGen::genIntToFloatCast(GenTreePtr treeNode)
//
// TODO-XArch-CQ: (Low-pri) - generate in-line code when DstType = uint64
//
-void CodeGen::genFloatToIntCast(GenTreePtr treeNode)
+void CodeGen::genFloatToIntCast(GenTree* treeNode)
{
// we don't expect to see overflow detecting float/double --> int type conversions here
// as they should have been converted into helper calls by front-end.
@@ -6939,7 +6939,7 @@ void CodeGen::genFloatToIntCast(GenTreePtr treeNode)
regNumber targetReg = treeNode->gtRegNum;
assert(genIsValidIntReg(targetReg));
- GenTreePtr op1 = treeNode->gtOp.gtOp1;
+ GenTree* op1 = treeNode->gtOp.gtOp1;
#ifdef DEBUG
if (op1->isUsedFromReg())
{
@@ -6994,14 +6994,14 @@ void CodeGen::genFloatToIntCast(GenTreePtr treeNode)
// TODO-XArch-CQ - mark the operand as contained if known to be in
// memory (e.g. field or an array element).
//
-void CodeGen::genCkfinite(GenTreePtr treeNode)
+void CodeGen::genCkfinite(GenTree* treeNode)
{
assert(treeNode->OperGet() == GT_CKFINITE);
- GenTreePtr op1 = treeNode->gtOp.gtOp1;
- var_types targetType = treeNode->TypeGet();
- int expMask = (targetType == TYP_FLOAT) ? 0x7F800000 : 0x7FF00000; // Bit mask to extract exponent.
- regNumber targetReg = treeNode->gtRegNum;
+ GenTree* op1 = treeNode->gtOp.gtOp1;
+ var_types targetType = treeNode->TypeGet();
+ int expMask = (targetType == TYP_FLOAT) ? 0x7F800000 : 0x7FF00000; // Bit mask to extract exponent.
+ regNumber targetReg = treeNode->gtRegNum;
// Extract exponent into a register.
regNumber tmpReg = treeNode->GetSingleTempReg();
@@ -7232,7 +7232,7 @@ int CodeGenInterface::genCallerSPtoInitialSPdelta()
// i) tree oper is one of GT_NEG or GT_INTRINSIC Abs()
// ii) tree type is floating point type.
// iii) caller of this routine needs to call genProduceReg()
-void CodeGen::genSSE2BitwiseOp(GenTreePtr treeNode)
+void CodeGen::genSSE2BitwiseOp(GenTree* treeNode)
{
regNumber targetReg = treeNode->gtRegNum;
var_types targetType = treeNode->TypeGet();
@@ -7314,7 +7314,7 @@ void CodeGen::genSSE2BitwiseOp(GenTreePtr treeNode)
// Move operand into targetReg only if the reg reserved for
// internal purpose is not the same as targetReg.
- GenTreePtr op1 = treeNode->gtOp.gtOp1;
+ GenTree* op1 = treeNode->gtOp.gtOp1;
assert(op1->isUsedFromReg());
regNumber operandReg = genConsumeReg(op1);
if (tmpReg != targetReg)
@@ -7508,7 +7508,7 @@ void CodeGen::genSSE41RoundOp(GenTreeOp* treeNode)
// Return value:
// None
//
-void CodeGen::genIntrinsic(GenTreePtr treeNode)
+void CodeGen::genIntrinsic(GenTree* treeNode)
{
// Right now only Sqrt/Abs are treated as math intrinsics.
switch (treeNode->gtIntrinsic.gtIntrinsicId)
@@ -7516,7 +7516,7 @@ void CodeGen::genIntrinsic(GenTreePtr treeNode)
case CORINFO_INTRINSIC_Sqrt:
{
// Both operand and its result must be of the same floating point type.
- GenTreePtr srcNode = treeNode->gtOp.gtOp1;
+ GenTree* srcNode = treeNode->gtOp.gtOp1;
assert(varTypeIsFloating(srcNode));
assert(srcNode->TypeGet() == treeNode->TypeGet());
@@ -7562,7 +7562,7 @@ void CodeGen::genIntrinsic(GenTreePtr treeNode)
// the first stack passed argument from the caller. This is done by iterating over
// all the lvParam variables and finding the first with lvArgReg equals to REG_STK.
//
-unsigned CodeGen::getBaseVarForPutArgStk(GenTreePtr treeNode)
+unsigned CodeGen::getBaseVarForPutArgStk(GenTree* treeNode)
{
assert(treeNode->OperGet() == GT_PUTARG_STK);
@@ -7988,8 +7988,8 @@ void CodeGen::genPutArgStkFieldList(GenTreePutArgStk* putArgStk)
//
void CodeGen::genPutArgStk(GenTreePutArgStk* putArgStk)
{
- GenTreePtr data = putArgStk->gtOp1;
- var_types targetType = genActualType(data->TypeGet());
+ GenTree* data = putArgStk->gtOp1;
+ var_types targetType = genActualType(data->TypeGet());
#ifdef _TARGET_X86_
@@ -8776,14 +8776,14 @@ void CodeGen::genStoreLongLclVar(GenTree* treeNode)
LclVarDsc* varDsc = &(compiler->lvaTable[lclNum]);
assert(varDsc->TypeGet() == TYP_LONG);
assert(!varDsc->lvPromoted);
- GenTreePtr op1 = treeNode->gtOp.gtOp1;
+ GenTree* op1 = treeNode->gtOp.gtOp1;
noway_assert(op1->OperGet() == GT_LONG || op1->OperGet() == GT_MUL_LONG);
genConsumeRegs(op1);
if (op1->OperGet() == GT_LONG)
{
- GenTreePtr loVal = op1->gtGetOp1();
- GenTreePtr hiVal = op1->gtGetOp2();
+ GenTree* loVal = op1->gtGetOp1();
+ GenTree* hiVal = op1->gtGetOp2();
// NYI: Contained immediates.
NYI_IF((loVal->gtRegNum == REG_NA) || (hiVal->gtRegNum == REG_NA),
diff --git a/src/jit/compiler.cpp b/src/jit/compiler.cpp
index fd3264e0c2..c98b61228b 100644
--- a/src/jit/compiler.cpp
+++ b/src/jit/compiler.cpp
@@ -476,7 +476,7 @@ var_types Compiler::getJitGCType(BYTE gcType)
//
// Note that for ARM64 there will alwys be exactly two pointer sized fields
-void Compiler::getStructGcPtrsFromOp(GenTreePtr op, BYTE* gcPtrsOut)
+void Compiler::getStructGcPtrsFromOp(GenTree* op, BYTE* gcPtrsOut)
{
assert(op->TypeGet() == TYP_STRUCT);
@@ -1945,7 +1945,7 @@ void Compiler::compInit(ArenaAllocator* pAlloc, InlineInfo* inlineInfo)
#endif // MEASURE_MEM_ALLOC
#ifdef LEGACY_BACKEND
- compQMarks = new (this, CMK_Unknown) JitExpandArrayStack<GenTreePtr>(getAllocator());
+ compQMarks = new (this, CMK_Unknown) JitExpandArrayStack<GenTree*>(getAllocator());
#endif
}
@@ -2305,7 +2305,7 @@ VarName Compiler::compVarName(regNumber reg, bool isFloatReg)
// maybe var is marked dead, but still used (last use)
if (!isFloatReg && codeGen->regSet.rsUsedTree[reg] != NULL)
{
- GenTreePtr nodePtr;
+ GenTree* nodePtr;
if (GenTree::OperIsUnary(codeGen->regSet.rsUsedTree[reg]->OperGet()))
{
@@ -5110,7 +5110,7 @@ void Compiler::ResetOptAnnotations()
{
stmt->gtFlags &= ~GTF_STMT_HAS_CSE;
- for (GenTreePtr tree = stmt->gtStmt.gtStmtList; tree != nullptr; tree = tree->gtNext)
+ for (GenTree* tree = stmt->gtStmt.gtStmtList; tree != nullptr; tree = tree->gtNext)
{
tree->ClearVN();
tree->ClearAssertion();
@@ -7121,9 +7121,9 @@ Compiler::NodeToIntMap* Compiler::FindReachableNodesInNodeTestData()
for (BasicBlock* block = fgFirstBB; block != nullptr; block = block->bbNext)
{
- for (GenTreePtr stmt = block->FirstNonPhiDef(); stmt != nullptr; stmt = stmt->gtNext)
+ for (GenTree* stmt = block->FirstNonPhiDef(); stmt != nullptr; stmt = stmt->gtNext)
{
- for (GenTreePtr tree = stmt->gtStmt.gtStmtList; tree; tree = tree->gtNext)
+ for (GenTree* tree = stmt->gtStmt.gtStmtList; tree; tree = tree->gtNext)
{
TestLabelAndNum tlAndN;
@@ -7135,11 +7135,11 @@ Compiler::NodeToIntMap* Compiler::FindReachableNodesInNodeTestData()
unsigned i = 0;
while (args != nullptr)
{
- GenTreePtr arg = args->Current();
+ GenTree* arg = args->Current();
if (arg->gtFlags & GTF_LATE_ARG)
{
// Find the corresponding late arg.
- GenTreePtr lateArg = call->fgArgInfo->GetLateArg(i);
+ GenTree* lateArg = call->fgArgInfo->GetLateArg(i);
if (GetNodeTestData()->Lookup(lateArg, &tlAndN))
{
reachable->Set(lateArg, 0);
@@ -7160,7 +7160,7 @@ Compiler::NodeToIntMap* Compiler::FindReachableNodesInNodeTestData()
return reachable;
}
-void Compiler::TransferTestDataToNode(GenTreePtr from, GenTreePtr to)
+void Compiler::TransferTestDataToNode(GenTree* from, GenTree* to)
{
TestLabelAndNum tlAndN;
// We can't currently associate multiple annotations with a single node.
@@ -7180,7 +7180,7 @@ void Compiler::TransferTestDataToNode(GenTreePtr from, GenTreePtr to)
}
}
-void Compiler::CopyTestDataToCloneTree(GenTreePtr from, GenTreePtr to)
+void Compiler::CopyTestDataToCloneTree(GenTree* from, GenTree* to)
{
if (m_nodeTestData == nullptr)
{
@@ -7357,12 +7357,12 @@ void Compiler::compJitStats()
void Compiler::compCallArgStats()
{
- GenTreePtr args;
- GenTreePtr argx;
+ GenTree* args;
+ GenTree* argx;
BasicBlock* block;
- GenTreePtr stmt;
- GenTreePtr call;
+ GenTree* stmt;
+ GenTree* call;
unsigned argNum;
@@ -11463,7 +11463,7 @@ HelperCallProperties Compiler::s_helperCallProperties;
// Return Value:
// true - tree kills GC refs on callee save registers
// false - tree doesn't affect GC refs on callee save registers
-bool Compiler::killGCRefs(GenTreePtr tree)
+bool Compiler::killGCRefs(GenTree* tree)
{
if (tree->IsCall())
{
diff --git a/src/jit/compiler.h b/src/jit/compiler.h
index 90ddb6dcf0..f95c89d750 100644
--- a/src/jit/compiler.h
+++ b/src/jit/compiler.h
@@ -177,7 +177,7 @@ struct VarScopeDsc
struct DefLoc
{
BasicBlock* m_blk;
- GenTreePtr m_tree;
+ GenTree* m_tree;
DefLoc() : m_blk(nullptr), m_tree(nullptr)
{
@@ -732,9 +732,9 @@ public:
BYTE* lvGcLayout; // GC layout info for structs
#if ASSERTION_PROP
- BlockSet lvRefBlks; // Set of blocks that contain refs
- GenTreePtr lvDefStmt; // Pointer to the statement with the single definition
- void lvaDisqualifyVar(); // Call to disqualify a local variable from use in optAddCopies
+ BlockSet lvRefBlks; // Set of blocks that contain refs
+ GenTree* lvDefStmt; // Pointer to the statement with the single definition
+ void lvaDisqualifyVar(); // Call to disqualify a local variable from use in optAddCopies
#endif
var_types TypeGet() const
{
@@ -1213,10 +1213,10 @@ struct fgArgTabEntry
}
#endif // defined(FEATURE_UNIX_AMD64_STRUCT_PASSING)
- GenTreePtr node; // Initially points at the Op1 field of 'parent', but if the argument is replaced with an GT_ASG or
+ GenTree* node; // Initially points at the Op1 field of 'parent', but if the argument is replaced with an GT_ASG or
// placeholder
// it will point at the actual argument in the gtCallLateArgs list.
- GenTreePtr parent; // Points at the GT_LIST node in the gtCallArgs for this argument
+ GenTree* parent; // Points at the GT_LIST node in the gtCallArgs for this argument
unsigned argNum; // The original argument number, also specifies the required argument evaluation order from the IL
@@ -1336,12 +1336,12 @@ public:
fgArgInfo(GenTreeCall* newCall, GenTreeCall* oldCall);
fgArgTabEntry* AddRegArg(
- unsigned argNum, GenTreePtr node, GenTreePtr parent, regNumber regNum, unsigned numRegs, unsigned alignment);
+ unsigned argNum, GenTree* node, GenTree* parent, regNumber regNum, unsigned numRegs, unsigned alignment);
#ifdef FEATURE_UNIX_AMD64_STRUCT_PASSING
fgArgTabEntry* AddRegArg(unsigned argNum,
- GenTreePtr node,
- GenTreePtr parent,
+ GenTree* node,
+ GenTree* parent,
regNumber regNum,
unsigned numRegs,
unsigned alignment,
@@ -1350,21 +1350,21 @@ public:
const SYSTEMV_AMD64_CORINFO_STRUCT_REG_PASSING_DESCRIPTOR* const structDescPtr = nullptr);
#endif // FEATURE_UNIX_AMD64_STRUCT_PASSING
- fgArgTabEntry* AddStkArg(unsigned argNum,
- GenTreePtr node,
- GenTreePtr parent,
- unsigned numSlots,
+ fgArgTabEntry* AddStkArg(unsigned argNum,
+ GenTree* node,
+ GenTree* parent,
+ unsigned numSlots,
unsigned alignment FEATURE_UNIX_AMD64_STRUCT_PASSING_ONLY_ARG(const bool isStruct));
void RemorphReset();
fgArgTabEntry* RemorphRegArg(
- unsigned argNum, GenTreePtr node, GenTreePtr parent, regNumber regNum, unsigned numRegs, unsigned alignment);
+ unsigned argNum, GenTree* node, GenTree* parent, regNumber regNum, unsigned numRegs, unsigned alignment);
- void RemorphStkArg(unsigned argNum, GenTreePtr node, GenTreePtr parent, unsigned numSlots, unsigned alignment);
+ void RemorphStkArg(unsigned argNum, GenTree* node, GenTree* parent, unsigned numSlots, unsigned alignment);
void SplitArg(unsigned argNum, unsigned numRegs, unsigned numSlots);
- void EvalToTmp(unsigned argNum, unsigned tmpNum, GenTreePtr newNode);
+ void EvalToTmp(unsigned argNum, unsigned tmpNum, GenTree* newNode);
void ArgsComplete();
@@ -1443,7 +1443,7 @@ public:
#endif // defined(UNIX_X86_ABI)
// Get the late arg for arg at position argIndex. Caller must ensure this position has a late arg.
- GenTreePtr GetLateArg(unsigned argIndex);
+ GenTree* GetLateArg(unsigned argIndex);
void Dump(Compiler* compiler);
};
@@ -1474,7 +1474,7 @@ struct TestLabelAndNum
}
};
-typedef JitHashTable<GenTreePtr, JitPtrKeyFuncs<GenTree>, TestLabelAndNum> NodeToTestDataMap;
+typedef JitHashTable<GenTree*, JitPtrKeyFuncs<GenTree>, TestLabelAndNum> NodeToTestDataMap;
// XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
#endif // DEBUG
@@ -1587,7 +1587,7 @@ public:
#endif
#if FEATURE_MULTIREG_RET
- GenTreePtr impAssignMultiRegTypeToVar(GenTreePtr op, CORINFO_CLASS_HANDLE hClass);
+ GenTree* impAssignMultiRegTypeToVar(GenTree* op, CORINFO_CLASS_HANDLE hClass);
#endif // FEATURE_MULTIREG_RET
#ifdef ARM_SOFTFP
@@ -1603,10 +1603,10 @@ public:
//
bool IsHfa(CORINFO_CLASS_HANDLE hClass);
- bool IsHfa(GenTreePtr tree);
+ bool IsHfa(GenTree* tree);
- var_types GetHfaType(GenTreePtr tree);
- unsigned GetHfaCount(GenTreePtr tree);
+ var_types GetHfaType(GenTree* tree);
+ unsigned GetHfaCount(GenTree* tree);
var_types GetHfaType(CORINFO_CLASS_HANDLE hClass);
unsigned GetHfaCount(CORINFO_CLASS_HANDLE hClass);
@@ -1956,78 +1956,78 @@ public:
*/
// Functions to create nodes
- GenTreeStmt* gtNewStmt(GenTreePtr expr = nullptr, IL_OFFSETX offset = BAD_IL_OFFSET);
+ GenTreeStmt* gtNewStmt(GenTree* expr = nullptr, IL_OFFSETX offset = BAD_IL_OFFSET);
// For unary opers.
- GenTreePtr gtNewOperNode(genTreeOps oper, var_types type, GenTreePtr op1, bool doSimplifications = TRUE);
+ GenTree* gtNewOperNode(genTreeOps oper, var_types type, GenTree* op1, bool doSimplifications = TRUE);
// For binary opers.
- GenTreePtr gtNewOperNode(genTreeOps oper, var_types type, GenTreePtr op1, GenTreePtr op2);
+ GenTree* gtNewOperNode(genTreeOps oper, var_types type, GenTree* op1, GenTree* op2);
- GenTreePtr gtNewQmarkNode(var_types type, GenTreePtr cond, GenTreePtr colon);
+ GenTree* gtNewQmarkNode(var_types type, GenTree* cond, GenTree* colon);
- GenTreePtr gtNewLargeOperNode(genTreeOps oper,
- var_types type = TYP_I_IMPL,
- GenTreePtr op1 = nullptr,
- GenTreePtr op2 = nullptr);
+ GenTree* gtNewLargeOperNode(genTreeOps oper,
+ var_types type = TYP_I_IMPL,
+ GenTree* op1 = nullptr,
+ GenTree* op2 = nullptr);
GenTreeIntCon* gtNewIconNode(ssize_t value, var_types type = TYP_INT);
GenTree* gtNewPhysRegNode(regNumber reg, var_types type);
- GenTreePtr gtNewJmpTableNode();
+ GenTree* gtNewJmpTableNode();
- GenTreePtr gtNewIndOfIconHandleNode(var_types indType, size_t value, unsigned iconFlags, bool isInvariant);
+ GenTree* gtNewIndOfIconHandleNode(var_types indType, size_t value, unsigned iconFlags, bool isInvariant);
- GenTreePtr gtNewIconHandleNode(size_t value, unsigned flags, FieldSeqNode* fields = nullptr);
+ GenTree* gtNewIconHandleNode(size_t value, unsigned flags, FieldSeqNode* fields = nullptr);
unsigned gtTokenToIconFlags(unsigned token);
- GenTreePtr gtNewIconEmbHndNode(void* value, void* pValue, unsigned flags, void* compileTimeHandle);
+ GenTree* gtNewIconEmbHndNode(void* value, void* pValue, unsigned flags, void* compileTimeHandle);
- GenTreePtr gtNewIconEmbScpHndNode(CORINFO_MODULE_HANDLE scpHnd);
- GenTreePtr gtNewIconEmbClsHndNode(CORINFO_CLASS_HANDLE clsHnd);
- GenTreePtr gtNewIconEmbMethHndNode(CORINFO_METHOD_HANDLE methHnd);
- GenTreePtr gtNewIconEmbFldHndNode(CORINFO_FIELD_HANDLE fldHnd);
+ GenTree* gtNewIconEmbScpHndNode(CORINFO_MODULE_HANDLE scpHnd);
+ GenTree* gtNewIconEmbClsHndNode(CORINFO_CLASS_HANDLE clsHnd);
+ GenTree* gtNewIconEmbMethHndNode(CORINFO_METHOD_HANDLE methHnd);
+ GenTree* gtNewIconEmbFldHndNode(CORINFO_FIELD_HANDLE fldHnd);
- GenTreePtr gtNewStringLiteralNode(InfoAccessType iat, void* pValue);
+ GenTree* gtNewStringLiteralNode(InfoAccessType iat, void* pValue);
- GenTreePtr gtNewLconNode(__int64 value);
+ GenTree* gtNewLconNode(__int64 value);
- GenTreePtr gtNewDconNode(double value);
+ GenTree* gtNewDconNode(double value);
- GenTreePtr gtNewSconNode(int CPX, CORINFO_MODULE_HANDLE scpHandle);
+ GenTree* gtNewSconNode(int CPX, CORINFO_MODULE_HANDLE scpHandle);
- GenTreePtr gtNewZeroConNode(var_types type);
+ GenTree* gtNewZeroConNode(var_types type);
- GenTreePtr gtNewOneConNode(var_types type);
+ GenTree* gtNewOneConNode(var_types type);
#ifdef FEATURE_SIMD
- GenTreePtr gtNewSIMDVectorZero(var_types simdType, var_types baseType, unsigned size);
- GenTreePtr gtNewSIMDVectorOne(var_types simdType, var_types baseType, unsigned size);
+ GenTree* gtNewSIMDVectorZero(var_types simdType, var_types baseType, unsigned size);
+ GenTree* gtNewSIMDVectorOne(var_types simdType, var_types baseType, unsigned size);
#endif
GenTreeBlk* gtNewBlkOpNode(
- genTreeOps oper, GenTreePtr dst, GenTreePtr srcOrFillVal, GenTreePtr sizeOrClsTok, bool isVolatile);
+ genTreeOps oper, GenTree* dst, GenTree* srcOrFillVal, GenTree* sizeOrClsTok, bool isVolatile);
- GenTree* gtNewBlkOpNode(GenTreePtr dst, GenTreePtr srcOrFillVal, unsigned size, bool isVolatile, bool isCopyBlock);
+ GenTree* gtNewBlkOpNode(GenTree* dst, GenTree* srcOrFillVal, unsigned size, bool isVolatile, bool isCopyBlock);
- GenTree* gtNewPutArgReg(var_types type, GenTreePtr arg, regNumber argReg);
+ GenTree* gtNewPutArgReg(var_types type, GenTree* arg, regNumber argReg);
- GenTree* gtNewBitCastNode(var_types type, GenTreePtr arg);
+ GenTree* gtNewBitCastNode(var_types type, GenTree* arg);
protected:
- void gtBlockOpInit(GenTreePtr result, GenTreePtr dst, GenTreePtr srcOrFillVal, bool isVolatile);
+ void gtBlockOpInit(GenTree* result, GenTree* dst, GenTree* srcOrFillVal, bool isVolatile);
public:
- GenTree* gtNewObjNode(CORINFO_CLASS_HANDLE structHnd, GenTreePtr addr);
+ GenTree* gtNewObjNode(CORINFO_CLASS_HANDLE structHnd, GenTree* addr);
void gtSetObjGcInfo(GenTreeObj* objNode);
- GenTree* gtNewStructVal(CORINFO_CLASS_HANDLE structHnd, GenTreePtr addr);
- GenTree* gtNewBlockVal(GenTreePtr addr, unsigned size);
+ GenTree* gtNewStructVal(CORINFO_CLASS_HANDLE structHnd, GenTree* addr);
+ GenTree* gtNewBlockVal(GenTree* addr, unsigned size);
- GenTree* gtNewCpObjNode(GenTreePtr dst, GenTreePtr src, CORINFO_CLASS_HANDLE structHnd, bool isVolatile);
+ GenTree* gtNewCpObjNode(GenTree* dst, GenTree* src, CORINFO_CLASS_HANDLE structHnd, bool isVolatile);
- GenTreeArgList* gtNewListNode(GenTreePtr op1, GenTreeArgList* op2);
+ GenTreeArgList* gtNewListNode(GenTree* op1, GenTreeArgList* op2);
GenTreeCall* gtNewCallNode(gtCallTypes callType,
CORINFO_METHOD_HANDLE handle,
@@ -2035,25 +2035,21 @@ public:
GenTreeArgList* args,
IL_OFFSETX ilOffset = BAD_IL_OFFSET);
- GenTreeCall* gtNewIndCallNode(GenTreePtr addr,
+ GenTreeCall* gtNewIndCallNode(GenTree* addr,
var_types type,
GenTreeArgList* args,
IL_OFFSETX ilOffset = BAD_IL_OFFSET);
GenTreeCall* gtNewHelperCallNode(unsigned helper, var_types type, GenTreeArgList* args = nullptr);
- GenTreePtr gtNewLclvNode(unsigned lnum, var_types type, IL_OFFSETX ILoffs = BAD_IL_OFFSET);
+ GenTree* gtNewLclvNode(unsigned lnum, var_types type, IL_OFFSETX ILoffs = BAD_IL_OFFSET);
#ifdef FEATURE_SIMD
GenTreeSIMD* gtNewSIMDNode(
- var_types type, GenTreePtr op1, SIMDIntrinsicID simdIntrinsicID, var_types baseType, unsigned size);
- GenTreeSIMD* gtNewSIMDNode(var_types type,
- GenTreePtr op1,
- GenTreePtr op2,
- SIMDIntrinsicID simdIntrinsicID,
- var_types baseType,
- unsigned size);
- void SetOpLclRelatedToSIMDIntrinsic(GenTreePtr op);
+ var_types type, GenTree* op1, SIMDIntrinsicID simdIntrinsicID, var_types baseType, unsigned size);
+ GenTreeSIMD* gtNewSIMDNode(
+ var_types type, GenTree* op1, GenTree* op2, SIMDIntrinsicID simdIntrinsicID, var_types baseType, unsigned size);
+ void SetOpLclRelatedToSIMDIntrinsic(GenTree* op);
#endif
#ifdef FEATURE_HW_INTRINSICS
@@ -2089,76 +2085,76 @@ public:
CORINFO_CLASS_HANDLE gtGetStructHandleForHWSIMD(var_types simdType, var_types simdBaseType);
#endif // FEATURE_HW_INTRINSICS
- GenTreePtr gtNewLclLNode(unsigned lnum, var_types type, IL_OFFSETX ILoffs = BAD_IL_OFFSET);
+ GenTree* gtNewLclLNode(unsigned lnum, var_types type, IL_OFFSETX ILoffs = BAD_IL_OFFSET);
GenTreeLclFld* gtNewLclFldNode(unsigned lnum, var_types type, unsigned offset);
- GenTreePtr gtNewInlineCandidateReturnExpr(GenTreePtr inlineCandidate, var_types type);
+ GenTree* gtNewInlineCandidateReturnExpr(GenTree* inlineCandidate, var_types type);
- GenTreePtr gtNewCodeRef(BasicBlock* block);
+ GenTree* gtNewCodeRef(BasicBlock* block);
- GenTreePtr gtNewFieldRef(
- var_types typ, CORINFO_FIELD_HANDLE fldHnd, GenTreePtr obj = nullptr, DWORD offset = 0, bool nullcheck = false);
+ GenTree* gtNewFieldRef(
+ var_types typ, CORINFO_FIELD_HANDLE fldHnd, GenTree* obj = nullptr, DWORD offset = 0, bool nullcheck = false);
- GenTreePtr gtNewIndexRef(var_types typ, GenTreePtr arrayOp, GenTreePtr indexOp);
+ GenTree* gtNewIndexRef(var_types typ, GenTree* arrayOp, GenTree* indexOp);
GenTreeArrLen* gtNewArrLen(var_types typ, GenTree* arrayOp, int lenOffset);
GenTree* gtNewIndir(var_types typ, GenTree* addr);
- GenTreeArgList* gtNewArgList(GenTreePtr op);
- GenTreeArgList* gtNewArgList(GenTreePtr op1, GenTreePtr op2);
- GenTreeArgList* gtNewArgList(GenTreePtr op1, GenTreePtr op2, GenTreePtr op3);
- GenTreeArgList* gtNewArgList(GenTreePtr op1, GenTreePtr op2, GenTreePtr op3, GenTreePtr op4);
+ GenTreeArgList* gtNewArgList(GenTree* op);
+ GenTreeArgList* gtNewArgList(GenTree* op1, GenTree* op2);
+ GenTreeArgList* gtNewArgList(GenTree* op1, GenTree* op2, GenTree* op3);
+ GenTreeArgList* gtNewArgList(GenTree* op1, GenTree* op2, GenTree* op3, GenTree* op4);
static fgArgTabEntry* gtArgEntryByArgNum(GenTreeCall* call, unsigned argNum);
- static fgArgTabEntry* gtArgEntryByNode(GenTreeCall* call, GenTreePtr node);
+ static fgArgTabEntry* gtArgEntryByNode(GenTreeCall* call, GenTree* node);
fgArgTabEntry* gtArgEntryByLateArgIndex(GenTreeCall* call, unsigned lateArgInx);
bool gtArgIsThisPtr(fgArgTabEntry* argEntry);
- GenTreePtr gtNewAssignNode(GenTreePtr dst, GenTreePtr src);
+ GenTree* gtNewAssignNode(GenTree* dst, GenTree* src);
- GenTreePtr gtNewTempAssign(unsigned tmp, GenTreePtr val);
+ GenTree* gtNewTempAssign(unsigned tmp, GenTree* val);
- GenTreePtr gtNewRefCOMfield(GenTreePtr objPtr,
- CORINFO_RESOLVED_TOKEN* pResolvedToken,
- CORINFO_ACCESS_FLAGS access,
- CORINFO_FIELD_INFO* pFieldInfo,
- var_types lclTyp,
- CORINFO_CLASS_HANDLE structType,
- GenTreePtr assg);
+ GenTree* gtNewRefCOMfield(GenTree* objPtr,
+ CORINFO_RESOLVED_TOKEN* pResolvedToken,
+ CORINFO_ACCESS_FLAGS access,
+ CORINFO_FIELD_INFO* pFieldInfo,
+ var_types lclTyp,
+ CORINFO_CLASS_HANDLE structType,
+ GenTree* assg);
- GenTreePtr gtNewNothingNode();
+ GenTree* gtNewNothingNode();
- GenTreePtr gtNewArgPlaceHolderNode(var_types type, CORINFO_CLASS_HANDLE clsHnd);
+ GenTree* gtNewArgPlaceHolderNode(var_types type, CORINFO_CLASS_HANDLE clsHnd);
- GenTreePtr gtUnusedValNode(GenTreePtr expr);
+ GenTree* gtUnusedValNode(GenTree* expr);
- GenTreePtr gtNewCastNode(var_types typ, GenTreePtr op1, var_types castType);
+ GenTree* gtNewCastNode(var_types typ, GenTree* op1, var_types castType);
- GenTreePtr gtNewCastNodeL(var_types typ, GenTreePtr op1, var_types castType);
+ GenTree* gtNewCastNodeL(var_types typ, GenTree* op1, var_types castType);
- GenTreePtr gtNewAllocObjNode(unsigned int helper, CORINFO_CLASS_HANDLE clsHnd, var_types type, GenTreePtr op1);
+ GenTree* gtNewAllocObjNode(unsigned int helper, CORINFO_CLASS_HANDLE clsHnd, var_types type, GenTree* op1);
GenTree* gtNewRuntimeLookup(CORINFO_GENERIC_HANDLE hnd, CorInfoGenericHandleType hndTyp, GenTree* lookupTree);
//------------------------------------------------------------------------
// Other GenTree functions
- GenTreePtr gtClone(GenTree* tree, bool complexOK = false);
+ GenTree* gtClone(GenTree* tree, bool complexOK = false);
// If `tree` is a lclVar with lclNum `varNum`, return an IntCns with value `varVal`; otherwise,
// create a copy of `tree`, adding specified flags, replacing uses of lclVar `deepVarNum` with
// IntCnses with value `deepVarVal`.
- GenTreePtr gtCloneExpr(
+ GenTree* gtCloneExpr(
GenTree* tree, unsigned addFlags, unsigned varNum, int varVal, unsigned deepVarNum, int deepVarVal);
// Create a copy of `tree`, optionally adding specifed flags, and optionally mapping uses of local
// `varNum` to int constants with value `varVal`.
- GenTreePtr gtCloneExpr(GenTree* tree, unsigned addFlags = 0, unsigned varNum = (unsigned)-1, int varVal = 0)
+ GenTree* gtCloneExpr(GenTree* tree, unsigned addFlags = 0, unsigned varNum = (unsigned)-1, int varVal = 0)
{
return gtCloneExpr(tree, addFlags, varNum, varVal, varNum, varVal);
}
- GenTreePtr gtReplaceTree(GenTreePtr stmt, GenTreePtr tree, GenTreePtr replacementTree);
+ GenTree* gtReplaceTree(GenTree* stmt, GenTree* tree, GenTree* replacementTree);
void gtUpdateSideEffects(GenTree* stmt, GenTree* tree);
@@ -2174,15 +2170,15 @@ public:
// is #of nodes in subtree) of "tree" is greater than "limit".
// (This is somewhat redundant with the "gtCostEx/gtCostSz" fields, but can be used
// before they have been set.)
- bool gtComplexityExceeds(GenTreePtr* tree, unsigned limit);
+ bool gtComplexityExceeds(GenTree** tree, unsigned limit);
bool gtCompareTree(GenTree* op1, GenTree* op2);
- GenTreePtr gtReverseCond(GenTree* tree);
+ GenTree* gtReverseCond(GenTree* tree);
bool gtHasRef(GenTree* tree, ssize_t lclNum, bool defOnly);
- bool gtHasLocalsWithAddrOp(GenTreePtr tree);
+ bool gtHasLocalsWithAddrOp(GenTree* tree);
unsigned gtSetListOrder(GenTree* list, bool regs, bool isListCallArgs);
@@ -2191,7 +2187,7 @@ public:
#ifdef DEBUG
unsigned gtHashValue(GenTree* tree);
- GenTreePtr gtWalkOpEffectiveVal(GenTreePtr op);
+ GenTree* gtWalkOpEffectiveVal(GenTree* op);
#endif
void gtPrepareCost(GenTree* tree);
@@ -2206,28 +2202,28 @@ public:
#if FEATURE_STACK_FP_X87
bool gtFPstLvlRedo;
- void gtComputeFPlvls(GenTreePtr tree);
+ void gtComputeFPlvls(GenTree* tree);
#endif // FEATURE_STACK_FP_X87
void gtSetStmtInfo(GenTree* stmt);
// Returns "true" iff "node" has any of the side effects in "flags".
- bool gtNodeHasSideEffects(GenTreePtr node, unsigned flags);
+ bool gtNodeHasSideEffects(GenTree* node, unsigned flags);
// Returns "true" iff "tree" or its (transitive) children have any of the side effects in "flags".
- bool gtTreeHasSideEffects(GenTreePtr tree, unsigned flags);
+ bool gtTreeHasSideEffects(GenTree* tree, unsigned flags);
// Appends 'expr' in front of 'list'
// 'list' will typically start off as 'nullptr'
// when 'list' is non-null a GT_COMMA node is used to insert 'expr'
- GenTreePtr gtBuildCommaList(GenTreePtr list, GenTreePtr expr);
+ GenTree* gtBuildCommaList(GenTree* list, GenTree* expr);
- void gtExtractSideEffList(GenTreePtr expr,
- GenTreePtr* pList,
- unsigned flags = GTF_SIDE_EFFECT,
- bool ignoreRoot = false);
+ void gtExtractSideEffList(GenTree* expr,
+ GenTree** pList,
+ unsigned flags = GTF_SIDE_EFFECT,
+ bool ignoreRoot = false);
- GenTreePtr gtGetThisArg(GenTreeCall* call);
+ GenTree* gtGetThisArg(GenTreeCall* call);
// Static fields of struct types (and sometimes the types that those are reduced to) are represented by having the
// static field contain an object pointer to the boxed struct. This simplifies the GC implementation...but
@@ -2283,23 +2279,23 @@ public:
//-------------------------------------------------------------------------
// Get the handle, if any.
- CORINFO_CLASS_HANDLE gtGetStructHandleIfPresent(GenTreePtr tree);
+ CORINFO_CLASS_HANDLE gtGetStructHandleIfPresent(GenTree* tree);
// Get the handle, and assert if not found.
- CORINFO_CLASS_HANDLE gtGetStructHandle(GenTreePtr tree);
+ CORINFO_CLASS_HANDLE gtGetStructHandle(GenTree* tree);
// Get the handle for a ref type.
- CORINFO_CLASS_HANDLE gtGetClassHandle(GenTreePtr tree, bool* isExact, bool* isNonNull);
+ CORINFO_CLASS_HANDLE gtGetClassHandle(GenTree* tree, bool* isExact, bool* isNonNull);
//-------------------------------------------------------------------------
// Functions to display the trees
#ifdef DEBUG
- void gtDispNode(GenTreePtr tree, IndentStack* indentStack, __in_z const char* msg, bool isLIR);
+ void gtDispNode(GenTree* tree, IndentStack* indentStack, __in_z const char* msg, bool isLIR);
- void gtDispVN(GenTreePtr tree);
- void gtDispConst(GenTreePtr tree);
- void gtDispLeaf(GenTreePtr tree, IndentStack* indentStack);
- void gtDispNodeName(GenTreePtr tree);
- void gtDispRegVal(GenTreePtr tree);
+ void gtDispVN(GenTree* tree);
+ void gtDispConst(GenTree* tree);
+ void gtDispLeaf(GenTree* tree, IndentStack* indentStack);
+ void gtDispNodeName(GenTree* tree);
+ void gtDispRegVal(GenTree* tree);
enum IndentInfo
{
@@ -2311,12 +2307,12 @@ public:
IIError,
IndentInfoCount
};
- void gtDispChild(GenTreePtr child,
+ void gtDispChild(GenTree* child,
IndentStack* indentStack,
IndentInfo arcType,
__in_opt const char* msg = nullptr,
bool topOnly = false);
- void gtDispTree(GenTreePtr tree,
+ void gtDispTree(GenTree* tree,
IndentStack* indentStack = nullptr,
__in_opt const char* msg = nullptr,
bool topOnly = false,
@@ -2325,9 +2321,9 @@ public:
int gtGetLclVarName(unsigned lclNum, char* buf, unsigned buf_remaining);
char* gtGetLclVarName(unsigned lclNum);
void gtDispLclVar(unsigned varNum, bool padForBiggestDisp = true);
- void gtDispTreeList(GenTreePtr tree, IndentStack* indentStack = nullptr);
- void gtGetArgMsg(GenTreeCall* call, GenTreePtr arg, unsigned argNum, int listCount, char* bufp, unsigned bufLength);
- void gtGetLateArgMsg(GenTreeCall* call, GenTreePtr arg, int argNum, int listCount, char* bufp, unsigned bufLength);
+ void gtDispTreeList(GenTree* tree, IndentStack* indentStack = nullptr);
+ void gtGetArgMsg(GenTreeCall* call, GenTree* arg, unsigned argNum, int listCount, char* bufp, unsigned bufLength);
+ void gtGetLateArgMsg(GenTreeCall* call, GenTree* arg, int argNum, int listCount, char* bufp, unsigned bufLength);
void gtDispArgList(GenTreeCall* call, IndentStack* indentStack);
void gtDispFieldSeq(FieldSeqNode* pfsn);
@@ -2347,8 +2343,8 @@ public:
WALK_ABORT
};
struct fgWalkData;
- typedef fgWalkResult(fgWalkPreFn)(GenTreePtr* pTree, fgWalkData* data);
- typedef fgWalkResult(fgWalkPostFn)(GenTreePtr* pTree, fgWalkData* data);
+ typedef fgWalkResult(fgWalkPreFn)(GenTree** pTree, fgWalkData* data);
+ typedef fgWalkResult(fgWalkPostFn)(GenTree** pTree, fgWalkData* data);
#ifdef DEBUG
static fgWalkPreFn gtAssertColonCond;
@@ -2356,14 +2352,14 @@ public:
static fgWalkPreFn gtMarkColonCond;
static fgWalkPreFn gtClearColonCond;
- GenTreePtr* gtFindLink(GenTreePtr stmt, GenTreePtr node);
- bool gtHasCatchArg(GenTreePtr tree);
- bool gtHasUnmanagedCall(GenTreePtr tree);
+ GenTree** gtFindLink(GenTree* stmt, GenTree* node);
+ bool gtHasCatchArg(GenTree* tree);
+ bool gtHasUnmanagedCall(GenTree* tree);
typedef ArrayStack<GenTree*> GenTreeStack;
static bool gtHasCallOnStack(GenTreeStack* parentStack);
- void gtCheckQuirkAddrExposedLclVar(GenTreePtr argTree, GenTreeStack* parentStack);
+ void gtCheckQuirkAddrExposedLclVar(GenTree* argTree, GenTreeStack* parentStack);
//=========================================================================
// BasicBlock functions
@@ -2663,18 +2659,18 @@ public:
unsigned lvaLclSize(unsigned varNum);
unsigned lvaLclExactSize(unsigned varNum);
- bool lvaLclVarRefs(GenTreePtr tree, GenTreePtr* findPtr, varRefKinds* refsPtr, void* result);
+ bool lvaLclVarRefs(GenTree* tree, GenTree** findPtr, varRefKinds* refsPtr, void* result);
// Call lvaLclVarRefs on "true"; accumulate "*result" into whichever of
// "allVars" and "trkdVars" is indiated by the nullness of "findPtr"; return
// the return result.
bool lvaLclVarRefsAccum(
- GenTreePtr tree, GenTreePtr* findPtr, varRefKinds* refsPtr, ALLVARSET_TP* allVars, VARSET_TP* trkdVars);
+ GenTree* tree, GenTree** findPtr, varRefKinds* refsPtr, ALLVARSET_TP* allVars, VARSET_TP* trkdVars);
// If "findPtr" is non-NULL, assumes "result" is an "ALLVARSET_TP*", and
// (destructively) unions "allVars" into "*result". Otherwise, assumes "result" is a "VARSET_TP*",
// and (destructively) unions "trkedVars" into "*result".
- void lvaLclVarRefsAccumIntoRes(GenTreePtr* findPtr,
+ void lvaLclVarRefsAccumIntoRes(GenTree** findPtr,
void* result,
ALLVARSET_VALARG_TP allVars,
VARSET_VALARG_TP trkdVars);
@@ -2695,14 +2691,14 @@ public:
void lvaAllocOutgoingArgSpaceVar(); // Set up lvaOutgoingArgSpaceVar
- VARSET_VALRET_TP lvaStmtLclMask(GenTreePtr stmt);
+ VARSET_VALRET_TP lvaStmtLclMask(GenTree* stmt);
- void lvaIncRefCnts(GenTreePtr tree);
- void lvaDecRefCnts(GenTreePtr tree);
+ void lvaIncRefCnts(GenTree* tree);
+ void lvaDecRefCnts(GenTree* tree);
- void lvaDecRefCnts(BasicBlock* basicBlock, GenTreePtr tree);
- void lvaRecursiveDecRefCounts(GenTreePtr tree);
- void lvaRecursiveIncRefCounts(GenTreePtr tree);
+ void lvaDecRefCnts(BasicBlock* basicBlock, GenTree* tree);
+ void lvaRecursiveDecRefCounts(GenTree* tree);
+ void lvaRecursiveIncRefCounts(GenTree* tree);
#ifdef DEBUG
struct lvaStressLclFldArgs
@@ -2757,9 +2753,9 @@ public:
// If the local is TYP_REF, set or update the associated class information.
void lvaSetClass(unsigned varNum, CORINFO_CLASS_HANDLE clsHnd, bool isExact = false);
- void lvaSetClass(unsigned varNum, GenTreePtr tree, CORINFO_CLASS_HANDLE stackHandle = nullptr);
+ void lvaSetClass(unsigned varNum, GenTree* tree, CORINFO_CLASS_HANDLE stackHandle = nullptr);
void lvaUpdateClass(unsigned varNum, CORINFO_CLASS_HANDLE clsHnd, bool isExact = false);
- void lvaUpdateClass(unsigned varNum, GenTreePtr tree, CORINFO_CLASS_HANDLE stackHandle = nullptr);
+ void lvaUpdateClass(unsigned varNum, GenTree* tree, CORINFO_CLASS_HANDLE stackHandle = nullptr);
#define MAX_NumOfFieldsInPromotableStruct 4 // Maximum number of fields in promotable struct
@@ -2872,11 +2868,11 @@ protected:
#if ASSERTION_PROP
BasicBlock* lvaMarkRefsCurBlock;
- GenTreePtr lvaMarkRefsCurStmt;
+ GenTree* lvaMarkRefsCurStmt;
#endif
BasicBlock::weight_t lvaMarkRefsWeight;
- void lvaMarkLclRefs(GenTreePtr tree);
+ void lvaMarkLclRefs(GenTree* tree);
bool IsDominatedByExceptionalEntry(BasicBlock* block);
void SetVolatileHint(LclVarDsc* varDsc);
@@ -2952,7 +2948,7 @@ protected:
void impResolveToken(const BYTE* addr, CORINFO_RESOLVED_TOKEN* pResolvedToken, CorInfoTokenKind kind);
- void impPushOnStack(GenTreePtr tree, typeInfo ti);
+ void impPushOnStack(GenTree* tree, typeInfo ti);
void impPushNullObjRefOnStack();
StackEntry impPopStack();
StackEntry& impStackTop(unsigned n = 0);
@@ -2961,9 +2957,7 @@ protected:
void impSaveStackState(SavedStack* savePtr, bool copy);
void impRestoreStackState(SavedStack* savePtr);
- GenTreePtr impImportLdvirtftn(GenTreePtr thisPtr,
- CORINFO_RESOLVED_TOKEN* pResolvedToken,
- CORINFO_CALL_INFO* pCallInfo);
+ GenTree* impImportLdvirtftn(GenTree* thisPtr, CORINFO_RESOLVED_TOKEN* pResolvedToken, CORINFO_CALL_INFO* pCallInfo);
void impImportAndPushBox(CORINFO_RESOLVED_TOKEN* pResolvedToken);
@@ -2974,7 +2968,7 @@ protected:
void impCheckForPInvokeCall(
GenTreeCall* call, CORINFO_METHOD_HANDLE methHnd, CORINFO_SIG_INFO* sig, unsigned mflags, BasicBlock* block);
GenTreeCall* impImportIndirectCall(CORINFO_SIG_INFO* sig, IL_OFFSETX ilOffset = BAD_IL_OFFSET);
- void impPopArgsForUnmanagedCall(GenTreePtr call, CORINFO_SIG_INFO* sig);
+ void impPopArgsForUnmanagedCall(GenTree* call, CORINFO_SIG_INFO* sig);
void impInsertHelperCall(CORINFO_HELPER_DESC* helperCall);
void impHandleAccessAllowed(CorInfoIsAccessAllowedResult result, CORINFO_HELPER_DESC* helperCall);
@@ -2984,7 +2978,7 @@ protected:
CORINFO_RESOLVED_TOKEN* pResolvedToken,
CORINFO_RESOLVED_TOKEN* pConstrainedResolvedToken, // Is this a "constrained." call on a
// type parameter?
- GenTreePtr newobjThis,
+ GenTree* newobjThis,
int prefixFlags,
CORINFO_CALL_INFO* callInfo,
IL_OFFSET rawILOffset);
@@ -2999,28 +2993,28 @@ protected:
bool impMethodInfo_hasRetBuffArg(CORINFO_METHOD_INFO* methInfo);
- GenTreePtr impFixupCallStructReturn(GenTreeCall* call, CORINFO_CLASS_HANDLE retClsHnd);
+ GenTree* impFixupCallStructReturn(GenTreeCall* call, CORINFO_CLASS_HANDLE retClsHnd);
- GenTreePtr impFixupStructReturnType(GenTreePtr op, CORINFO_CLASS_HANDLE retClsHnd);
+ GenTree* impFixupStructReturnType(GenTree* op, CORINFO_CLASS_HANDLE retClsHnd);
#ifdef DEBUG
var_types impImportJitTestLabelMark(int numArgs);
#endif // DEBUG
- GenTreePtr impInitClass(CORINFO_RESOLVED_TOKEN* pResolvedToken);
+ GenTree* impInitClass(CORINFO_RESOLVED_TOKEN* pResolvedToken);
- GenTreePtr impImportStaticReadOnlyField(void* fldAddr, var_types lclTyp);
+ GenTree* impImportStaticReadOnlyField(void* fldAddr, var_types lclTyp);
- GenTreePtr impImportStaticFieldAccess(CORINFO_RESOLVED_TOKEN* pResolvedToken,
- CORINFO_ACCESS_FLAGS access,
- CORINFO_FIELD_INFO* pFieldInfo,
- var_types lclTyp);
+ GenTree* impImportStaticFieldAccess(CORINFO_RESOLVED_TOKEN* pResolvedToken,
+ CORINFO_ACCESS_FLAGS access,
+ CORINFO_FIELD_INFO* pFieldInfo,
+ var_types lclTyp);
- static void impBashVarAddrsToI(GenTreePtr tree1, GenTreePtr tree2 = nullptr);
+ static void impBashVarAddrsToI(GenTree* tree1, GenTree* tree2 = nullptr);
- GenTreePtr impImplicitIorI4Cast(GenTreePtr tree, var_types dstTyp);
+ GenTree* impImplicitIorI4Cast(GenTree* tree, var_types dstTyp);
- GenTreePtr impImplicitR4orR8Cast(GenTreePtr tree, var_types dstTyp);
+ GenTree* impImplicitR4orR8Cast(GenTree* tree, var_types dstTyp);
void impImportLeave(BasicBlock* block);
void impResetLeaveBlock(BasicBlock* block, unsigned jmpAddr);
@@ -3134,23 +3128,23 @@ protected:
const HWIntrinsicInfo& getHWIntrinsicInfo(NamedIntrinsic);
#endif // _TARGET_ARM64_
#endif // FEATURE_HW_INTRINSICS
- GenTreePtr impArrayAccessIntrinsic(CORINFO_CLASS_HANDLE clsHnd,
- CORINFO_SIG_INFO* sig,
- int memberRef,
- bool readonlyCall,
- CorInfoIntrinsics intrinsicID);
- GenTreePtr impInitializeArrayIntrinsic(CORINFO_SIG_INFO* sig);
+ GenTree* impArrayAccessIntrinsic(CORINFO_CLASS_HANDLE clsHnd,
+ CORINFO_SIG_INFO* sig,
+ int memberRef,
+ bool readonlyCall,
+ CorInfoIntrinsics intrinsicID);
+ GenTree* impInitializeArrayIntrinsic(CORINFO_SIG_INFO* sig);
- GenTreePtr impMethodPointer(CORINFO_RESOLVED_TOKEN* pResolvedToken, CORINFO_CALL_INFO* pCallInfo);
+ GenTree* impMethodPointer(CORINFO_RESOLVED_TOKEN* pResolvedToken, CORINFO_CALL_INFO* pCallInfo);
- GenTreePtr impTransformThis(GenTreePtr thisPtr,
- CORINFO_RESOLVED_TOKEN* pConstrainedResolvedToken,
- CORINFO_THIS_TRANSFORM transform);
+ GenTree* impTransformThis(GenTree* thisPtr,
+ CORINFO_RESOLVED_TOKEN* pConstrainedResolvedToken,
+ CORINFO_THIS_TRANSFORM transform);
//----------------- Manipulating the trees and stmts ----------------------
- GenTreePtr impTreeList; // Trees for the BB being imported
- GenTreePtr impTreeLast; // The last tree for the current BB
+ GenTree* impTreeList; // Trees for the BB being imported
+ GenTree* impTreeLast; // The last tree for the current BB
public:
enum
@@ -3160,83 +3154,80 @@ public:
};
void impBeginTreeList();
- void impEndTreeList(BasicBlock* block, GenTreePtr firstStmt, GenTreePtr lastStmt);
+ void impEndTreeList(BasicBlock* block, GenTree* firstStmt, GenTree* lastStmt);
void impEndTreeList(BasicBlock* block);
- void impAppendStmtCheck(GenTreePtr stmt, unsigned chkLevel);
- void impAppendStmt(GenTreePtr stmt, unsigned chkLevel);
- void impInsertStmtBefore(GenTreePtr stmt, GenTreePtr stmtBefore);
- GenTreePtr impAppendTree(GenTreePtr tree, unsigned chkLevel, IL_OFFSETX offset);
- void impInsertTreeBefore(GenTreePtr tree, IL_OFFSETX offset, GenTreePtr stmtBefore);
+ void impAppendStmtCheck(GenTree* stmt, unsigned chkLevel);
+ void impAppendStmt(GenTree* stmt, unsigned chkLevel);
+ void impInsertStmtBefore(GenTree* stmt, GenTree* stmtBefore);
+ GenTree* impAppendTree(GenTree* tree, unsigned chkLevel, IL_OFFSETX offset);
+ void impInsertTreeBefore(GenTree* tree, IL_OFFSETX offset, GenTree* stmtBefore);
void impAssignTempGen(unsigned tmp,
- GenTreePtr val,
+ GenTree* val,
unsigned curLevel,
- GenTreePtr* pAfterStmt = nullptr,
+ GenTree** pAfterStmt = nullptr,
IL_OFFSETX ilOffset = BAD_IL_OFFSET,
BasicBlock* block = nullptr);
void impAssignTempGen(unsigned tmpNum,
- GenTreePtr val,
+ GenTree* val,
CORINFO_CLASS_HANDLE structHnd,
unsigned curLevel,
- GenTreePtr* pAfterStmt = nullptr,
+ GenTree** pAfterStmt = nullptr,
IL_OFFSETX ilOffset = BAD_IL_OFFSET,
BasicBlock* block = nullptr);
- GenTreePtr impCloneExpr(GenTreePtr tree,
- GenTreePtr* clone,
- CORINFO_CLASS_HANDLE structHnd,
- unsigned curLevel,
- GenTreePtr* pAfterStmt DEBUGARG(const char* reason));
- GenTreePtr impAssignStruct(GenTreePtr dest,
- GenTreePtr src,
- CORINFO_CLASS_HANDLE structHnd,
- unsigned curLevel,
- GenTreePtr* pAfterStmt = nullptr,
- BasicBlock* block = nullptr);
- GenTreePtr impAssignStructPtr(GenTreePtr dest,
- GenTreePtr src,
- CORINFO_CLASS_HANDLE structHnd,
- unsigned curLevel,
- GenTreePtr* pAfterStmt = nullptr,
- BasicBlock* block = nullptr);
-
- GenTreePtr impGetStructAddr(GenTreePtr structVal,
+ GenTree* impCloneExpr(GenTree* tree,
+ GenTree** clone,
+ CORINFO_CLASS_HANDLE structHnd,
+ unsigned curLevel,
+ GenTree** pAfterStmt DEBUGARG(const char* reason));
+ GenTree* impAssignStruct(GenTree* dest,
+ GenTree* src,
+ CORINFO_CLASS_HANDLE structHnd,
+ unsigned curLevel,
+ GenTree** pAfterStmt = nullptr,
+ BasicBlock* block = nullptr);
+ GenTree* impAssignStructPtr(GenTree* dest,
+ GenTree* src,
CORINFO_CLASS_HANDLE structHnd,
unsigned curLevel,
- bool willDeref);
+ GenTree** pAfterStmt = nullptr,
+ BasicBlock* block = nullptr);
+
+ GenTree* impGetStructAddr(GenTree* structVal, CORINFO_CLASS_HANDLE structHnd, unsigned curLevel, bool willDeref);
var_types impNormStructType(CORINFO_CLASS_HANDLE structHnd,
BYTE* gcLayout = nullptr,
unsigned* numGCVars = nullptr,
var_types* simdBaseType = nullptr);
- GenTreePtr impNormStructVal(GenTreePtr structVal,
- CORINFO_CLASS_HANDLE structHnd,
- unsigned curLevel,
- bool forceNormalization = false);
+ GenTree* impNormStructVal(GenTree* structVal,
+ CORINFO_CLASS_HANDLE structHnd,
+ unsigned curLevel,
+ bool forceNormalization = false);
- GenTreePtr impTokenToHandle(CORINFO_RESOLVED_TOKEN* pResolvedToken,
- BOOL* pRuntimeLookup = nullptr,
- BOOL mustRestoreHandle = FALSE,
- BOOL importParent = FALSE);
+ GenTree* impTokenToHandle(CORINFO_RESOLVED_TOKEN* pResolvedToken,
+ BOOL* pRuntimeLookup = nullptr,
+ BOOL mustRestoreHandle = FALSE,
+ BOOL importParent = FALSE);
- GenTreePtr impParentClassTokenToHandle(CORINFO_RESOLVED_TOKEN* pResolvedToken,
- BOOL* pRuntimeLookup = nullptr,
- BOOL mustRestoreHandle = FALSE)
+ GenTree* impParentClassTokenToHandle(CORINFO_RESOLVED_TOKEN* pResolvedToken,
+ BOOL* pRuntimeLookup = nullptr,
+ BOOL mustRestoreHandle = FALSE)
{
return impTokenToHandle(pResolvedToken, pRuntimeLookup, mustRestoreHandle, TRUE);
}
- GenTreePtr impLookupToTree(CORINFO_RESOLVED_TOKEN* pResolvedToken,
- CORINFO_LOOKUP* pLookup,
- unsigned flags,
- void* compileTimeHandle);
+ GenTree* impLookupToTree(CORINFO_RESOLVED_TOKEN* pResolvedToken,
+ CORINFO_LOOKUP* pLookup,
+ unsigned flags,
+ void* compileTimeHandle);
- GenTreePtr getRuntimeContextTree(CORINFO_RUNTIME_LOOKUP_KIND kind);
+ GenTree* getRuntimeContextTree(CORINFO_RUNTIME_LOOKUP_KIND kind);
- GenTreePtr impRuntimeLookupToTree(CORINFO_RESOLVED_TOKEN* pResolvedToken,
- CORINFO_LOOKUP* pLookup,
- void* compileTimeHandle);
+ GenTree* impRuntimeLookupToTree(CORINFO_RESOLVED_TOKEN* pResolvedToken,
+ CORINFO_LOOKUP* pLookup,
+ void* compileTimeHandle);
- GenTreePtr impReadyToRunLookupToTree(CORINFO_CONST_LOOKUP* pLookup, unsigned flags, void* compileTimeHandle);
+ GenTree* impReadyToRunLookupToTree(CORINFO_CONST_LOOKUP* pLookup, unsigned flags, void* compileTimeHandle);
GenTreeCall* impReadyToRunHelperToTree(CORINFO_RESOLVED_TOKEN* pResolvedToken,
CorInfoHelpFunc helper,
@@ -3244,10 +3235,10 @@ public:
GenTreeArgList* arg = nullptr,
CORINFO_LOOKUP_KIND* pGenericLookupKind = nullptr);
- GenTreePtr impCastClassOrIsInstToTree(GenTreePtr op1,
- GenTreePtr op2,
- CORINFO_RESOLVED_TOKEN* pResolvedToken,
- bool isCastClass);
+ GenTree* impCastClassOrIsInstToTree(GenTree* op1,
+ GenTree* op2,
+ CORINFO_RESOLVED_TOKEN* pResolvedToken,
+ bool isCastClass);
GenTree* impOptimizeCastClassOrIsInst(GenTree* op1, CORINFO_RESOLVED_TOKEN* pResolvedToken, bool isCastClass);
@@ -3259,7 +3250,7 @@ public:
bool IsIntrinsicImplementedByUserCall(CorInfoIntrinsics intrinsicId);
bool IsTargetIntrinsic(CorInfoIntrinsics intrinsicId);
bool IsMathIntrinsic(CorInfoIntrinsics intrinsicId);
- bool IsMathIntrinsic(GenTreePtr tree);
+ bool IsMathIntrinsic(GenTree* tree);
private:
//----------------- Importing the method ----------------------------------
@@ -3272,8 +3263,8 @@ private:
bool impNestedStackSpill;
// For displaying instrs with generated native code (-n:B)
- GenTreePtr impLastILoffsStmt; // oldest stmt added for which we did not gtStmtLastILoffs
- void impNoteLastILoffs();
+ GenTree* impLastILoffsStmt; // oldest stmt added for which we did not gtStmtLastILoffs
+ void impNoteLastILoffs();
#endif
/* IL offset of the stmt currently being imported. It gets set to
@@ -3290,8 +3281,8 @@ private:
unsigned impInitBlockLineInfo();
- GenTreePtr impCheckForNullPointer(GenTreePtr obj);
- bool impIsThis(GenTreePtr obj);
+ GenTree* impCheckForNullPointer(GenTree* obj);
+ bool impIsThis(GenTree* obj);
bool impIsLDFTN_TOKEN(const BYTE* delegateCreateStart, const BYTE* newobjCodeAddr);
bool impIsDUP_LDVIRTFTN_TOKEN(const BYTE* delegateCreateStart, const BYTE* newobjCodeAddr);
bool impIsAnySTLOC(OPCODE opcode)
@@ -3380,7 +3371,7 @@ private:
// for the block, but instead, just re-uses the block's existing EntryState.
void impReimportBlockPending(BasicBlock* block);
- var_types impGetByRefResultType(genTreeOps oper, bool fUnsigned, GenTreePtr* pOp1, GenTreePtr* pOp2);
+ var_types impGetByRefResultType(genTreeOps oper, bool fUnsigned, GenTree** pOp1, GenTree** pOp2);
void impImportBlock(BasicBlock* block);
@@ -3479,7 +3470,7 @@ private:
bool impReturnInstruction(BasicBlock* block, int prefixFlags, OPCODE& opcode);
#ifdef _TARGET_ARM_
- void impMarkLclDstNotPromotable(unsigned tmpNum, GenTreePtr op, CORINFO_CLASS_HANDLE hClass);
+ void impMarkLclDstNotPromotable(unsigned tmpNum, GenTree* op, CORINFO_CLASS_HANDLE hClass);
#endif
// A free list of linked list nodes used to represent to-do stacks of basic blocks.
@@ -3513,7 +3504,7 @@ private:
static LONG jitNestingLevel;
#endif // DEBUG
- static BOOL impIsAddressInLocal(GenTreePtr tree, GenTreePtr* lclVarTreeOut);
+ static BOOL impIsAddressInLocal(GenTree* tree, GenTree** lclVarTreeOut);
void impMakeDiscretionaryInlineObservations(InlineInfo* pInlineInfo, InlineResult* inlineResult);
@@ -3523,7 +3514,7 @@ private:
bool forceInline,
InlineResult* inlineResult);
- void impCheckCanInline(GenTreePtr call,
+ void impCheckCanInline(GenTree* call,
CORINFO_METHOD_HANDLE fncHandle,
unsigned methAttr,
CORINFO_CONTEXT_HANDLE exactContextHnd,
@@ -3531,7 +3522,7 @@ private:
InlineResult* inlineResult);
void impInlineRecordArgInfo(InlineInfo* pInlineInfo,
- GenTreePtr curArgVal,
+ GenTree* curArgVal,
unsigned argNum,
InlineResult* inlineResult);
@@ -3539,15 +3530,15 @@ private:
unsigned impInlineFetchLocal(unsigned lclNum DEBUGARG(const char* reason));
- GenTreePtr impInlineFetchArg(unsigned lclNum, InlArgInfo* inlArgInfo, InlLclVarInfo* lclTypeInfo);
+ GenTree* impInlineFetchArg(unsigned lclNum, InlArgInfo* inlArgInfo, InlLclVarInfo* lclTypeInfo);
- BOOL impInlineIsThis(GenTreePtr tree, InlArgInfo* inlArgInfo);
+ BOOL impInlineIsThis(GenTree* tree, InlArgInfo* inlArgInfo);
- BOOL impInlineIsGuaranteedThisDerefBeforeAnySideEffects(GenTreePtr additionalTreesToBeEvaluatedBefore,
- GenTreePtr variableBeingDereferenced,
+ BOOL impInlineIsGuaranteedThisDerefBeforeAnySideEffects(GenTree* additionalTreesToBeEvaluatedBefore,
+ GenTree* variableBeingDereferenced,
InlArgInfo* inlArgInfo);
- void impMarkInlineCandidate(GenTreePtr call,
+ void impMarkInlineCandidate(GenTree* call,
CORINFO_CONTEXT_HANDLE exactContextHnd,
bool exactContextNeedsRuntimeLookup,
CORINFO_CALL_INFO* callInfo);
@@ -3827,7 +3818,7 @@ public:
BasicBlock* handler,
BlockToBlockMap& continuationMap);
- GenTreePtr fgGetCritSectOfStaticMethod();
+ GenTree* fgGetCritSectOfStaticMethod();
#if FEATURE_EH_FUNCLETS
@@ -3864,9 +3855,9 @@ public:
#ifdef DEBUG
static fgWalkPreFn fgAssertNoQmark;
- void fgPreExpandQmarkChecks(GenTreePtr expr);
+ void fgPreExpandQmarkChecks(GenTree* expr);
void fgPostExpandQmarkChecks();
- static void fgCheckQmarkAllowedForm(GenTreePtr tree);
+ static void fgCheckQmarkAllowedForm(GenTree* tree);
#endif
IL_OFFSET fgFindBlockILOffset(BasicBlock* block);
@@ -3877,14 +3868,14 @@ public:
BasicBlock* fgSplitBlockAfterNode(BasicBlock* curr, GenTree* node); // for LIR
BasicBlock* fgSplitEdge(BasicBlock* curr, BasicBlock* succ);
- GenTreeStmt* fgNewStmtFromTree(GenTreePtr tree, BasicBlock* block, IL_OFFSETX offs);
- GenTreeStmt* fgNewStmtFromTree(GenTreePtr tree);
- GenTreeStmt* fgNewStmtFromTree(GenTreePtr tree, BasicBlock* block);
- GenTreeStmt* fgNewStmtFromTree(GenTreePtr tree, IL_OFFSETX offs);
+ GenTreeStmt* fgNewStmtFromTree(GenTree* tree, BasicBlock* block, IL_OFFSETX offs);
+ GenTreeStmt* fgNewStmtFromTree(GenTree* tree);
+ GenTreeStmt* fgNewStmtFromTree(GenTree* tree, BasicBlock* block);
+ GenTreeStmt* fgNewStmtFromTree(GenTree* tree, IL_OFFSETX offs);
- GenTreePtr fgGetTopLevelQmark(GenTreePtr expr, GenTreePtr* ppDst = nullptr);
- void fgExpandQmarkForCastInstOf(BasicBlock* block, GenTreePtr stmt);
- void fgExpandQmarkStmt(BasicBlock* block, GenTreePtr expr);
+ GenTree* fgGetTopLevelQmark(GenTree* expr, GenTree** ppDst = nullptr);
+ void fgExpandQmarkForCastInstOf(BasicBlock* block, GenTree* stmt);
+ void fgExpandQmarkStmt(BasicBlock* block, GenTree* expr);
void fgExpandQmarkNodes();
void fgMorph();
@@ -3894,10 +3885,10 @@ public:
void fgSimpleLowering();
#ifdef LEGACY_BACKEND
- bool fgShouldCreateAssignOp(GenTreePtr tree, bool* bReverse);
+ bool fgShouldCreateAssignOp(GenTree* tree, bool* bReverse);
#endif
- GenTreePtr fgInitThisClass();
+ GenTree* fgInitThisClass();
GenTreeCall* fgGetStaticsCCtorHelper(CORINFO_CLASS_HANDLE cls, CorInfoHelpFunc helper);
@@ -3917,7 +3908,7 @@ public:
void fgLocalVarLivenessInit();
#ifdef LEGACY_BACKEND
- GenTreePtr fgLegacyPerStatementLocalVarLiveness(GenTreePtr startNode, GenTreePtr relopNode);
+ GenTree* fgLegacyPerStatementLocalVarLiveness(GenTree* startNode, GenTree* relopNode);
#else
void fgPerNodeLocalVarLiveness(GenTree* node);
#endif
@@ -3938,9 +3929,9 @@ public:
bool fgMarkIntf(VARSET_VALARG_TP varSet1, unsigned varIndex);
- void fgUpdateRefCntForClone(BasicBlock* addedToBlock, GenTreePtr clonedTree);
+ void fgUpdateRefCntForClone(BasicBlock* addedToBlock, GenTree* clonedTree);
- void fgUpdateRefCntForExtract(GenTreePtr wholeTree, GenTreePtr keptTree);
+ void fgUpdateRefCntForExtract(GenTree* wholeTree, GenTree* keptTree);
void fgComputeLifeCall(VARSET_TP& life, GenTreeCall* call);
@@ -3957,8 +3948,8 @@ public:
bool fgComputeLifeLocal(VARSET_TP& life, VARSET_VALARG_TP keepAliveVars, GenTree* lclVarNode, GenTree* node);
void fgComputeLife(VARSET_TP& life,
- GenTreePtr startNode,
- GenTreePtr endNode,
+ GenTree* startNode,
+ GenTree* endNode,
VARSET_VALARG_TP volatileVars,
bool* pStmtInfoDirty DEBUGARG(bool* treeModf));
@@ -3971,15 +3962,15 @@ public:
bool* pStmtInfoDirty DEBUGARG(bool* treeModf));
// For updating liveset during traversal AFTER fgComputeLife has completed
- VARSET_VALRET_TP fgGetVarBits(GenTreePtr tree);
- VARSET_VALRET_TP fgUpdateLiveSet(VARSET_VALARG_TP liveSet, GenTreePtr tree);
+ VARSET_VALRET_TP fgGetVarBits(GenTree* tree);
+ VARSET_VALRET_TP fgUpdateLiveSet(VARSET_VALARG_TP liveSet, GenTree* tree);
// Returns the set of live variables after endTree,
// assuming that liveSet is the set of live variables BEFORE tree.
// Requires that fgComputeLife has completed, and that tree is in the same
// statement as endTree, and that it comes before endTree in execution order
- VARSET_VALRET_TP fgUpdateLiveSet(VARSET_VALARG_TP liveSet, GenTreePtr tree, GenTreePtr endTree)
+ VARSET_VALRET_TP fgUpdateLiveSet(VARSET_VALARG_TP liveSet, GenTree* tree, GenTree* endTree)
{
VARSET_TP newLiveSet(VarSetOps::MakeCopy(this, liveSet));
while (tree != nullptr && tree != endTree->gtNext)
@@ -3997,7 +3988,7 @@ public:
// "x", and a def of a new SSA name for "x". The tree only has one local variable for "x", so it has to choose
// whether to treat that as the use or def. It chooses the "use", and thus the old SSA name. This map allows us
// to record/recover the "def" SSA number, given the lcl var node for "x" in such a tree.
- typedef JitHashTable<GenTreePtr, JitPtrKeyFuncs<GenTree>, unsigned> NodeToUnsignedMap;
+ typedef JitHashTable<GenTree*, JitPtrKeyFuncs<GenTree>, unsigned> NodeToUnsignedMap;
NodeToUnsignedMap* m_opAsgnVarDefSsaNums;
NodeToUnsignedMap* GetOpAsgnVarDefSsaNums()
{
@@ -4012,13 +4003,13 @@ public:
// "tree," EXCEPT in the case of GTF_VAR_USEASG, because the tree node's gtVN member is the
// "use" VN. Performs a lookup into the map of (use asg tree -> def VN.) to return the "def's"
// VN.
- inline ValueNum GetUseAsgDefVNOrTreeVN(GenTreePtr tree);
+ inline ValueNum GetUseAsgDefVNOrTreeVN(GenTree* tree);
// Requires that "lcl" has the GTF_VAR_DEF flag set. Returns the SSA number of "lcl".
// Except: assumes that lcl is a def, and if it is
// a def appearing in "lcl op= rhs" (GTF_VAR_USEASG), looks up and returns the SSA number for the "def",
// rather than the "use" SSA number recorded in the tree "lcl".
- inline unsigned GetSsaNumForLocalVarDef(GenTreePtr lcl);
+ inline unsigned GetSsaNumForLocalVarDef(GenTree* lcl);
// Some assignments assign to a local "indirectly": they are part of a comma expression that takes the address
// of the local (or a field thereof), assigns this address to a temp, and uses an indirection of this temp as
@@ -4059,7 +4050,7 @@ public:
{
}
};
- typedef JitHashTable<GenTreePtr, JitPtrKeyFuncs<GenTree>, IndirectAssignmentAnnotation*> NodeToIndirAssignMap;
+ typedef JitHashTable<GenTree*, JitPtrKeyFuncs<GenTree>, IndirectAssignmentAnnotation*> NodeToIndirAssignMap;
NodeToIndirAssignMap* m_indirAssignMap;
NodeToIndirAssignMap* GetIndirAssignMap()
{
@@ -4121,7 +4112,7 @@ public:
// The type tree->TypeGet() will typically match the element type of the array or fldSeq.
// When this type doesn't match or if the fldSeq is 'NotAField' we return a new unique VN
//
- ValueNum fgValueNumberArrIndexVal(GenTreePtr tree,
+ ValueNum fgValueNumberArrIndexVal(GenTree* tree,
CORINFO_CLASS_HANDLE elemTypeEq,
ValueNum arrVN,
ValueNum inxVN,
@@ -4132,7 +4123,7 @@ public:
// by evaluating the array index expression "tree". Returns the value number resulting from
// dereferencing the array in the current GcHeap state. If "tree" is non-null, it must be the
// "GT_IND" that does the dereference, and it is given the returned value number.
- ValueNum fgValueNumberArrIndexVal(GenTreePtr tree, struct VNFuncApp* funcApp, ValueNum addrXvn);
+ ValueNum fgValueNumberArrIndexVal(GenTree* tree, struct VNFuncApp* funcApp, ValueNum addrXvn);
// Compute the value number for a byref-exposed load of the given type via the given pointerVN.
ValueNum fgValueNumberByrefExposedLoad(var_types type, ValueNum pointerVN);
@@ -4151,42 +4142,42 @@ public:
// Called when an operation (performed by "tree", described by "msg") may cause the GcHeap to be mutated.
// As GcHeap is a subset of ByrefExposed, this will also annotate the ByrefExposed mutation.
- void fgMutateGcHeap(GenTreePtr tree DEBUGARG(const char* msg));
+ void fgMutateGcHeap(GenTree* tree DEBUGARG(const char* msg));
// Called when an operation (performed by "tree", described by "msg") may cause an address-exposed local to be
// mutated.
- void fgMutateAddressExposedLocal(GenTreePtr tree DEBUGARG(const char* msg));
+ void fgMutateAddressExposedLocal(GenTree* tree DEBUGARG(const char* msg));
// For a GC heap store at curTree, record the new curMemoryVN's and update curTree's MemorySsaMap.
// As GcHeap is a subset of ByrefExposed, this will also record the ByrefExposed store.
- void recordGcHeapStore(GenTreePtr curTree, ValueNum gcHeapVN DEBUGARG(const char* msg));
+ void recordGcHeapStore(GenTree* curTree, ValueNum gcHeapVN DEBUGARG(const char* msg));
// For a store to an address-exposed local at curTree, record the new curMemoryVN and update curTree's MemorySsaMap.
- void recordAddressExposedLocalStore(GenTreePtr curTree, ValueNum memoryVN DEBUGARG(const char* msg));
+ void recordAddressExposedLocalStore(GenTree* curTree, ValueNum memoryVN DEBUGARG(const char* msg));
// Tree caused an update in the current memory VN. If "tree" has an associated heap SSA #, record that
// value in that SSA #.
- void fgValueNumberRecordMemorySsa(MemoryKind memoryKind, GenTreePtr tree);
+ void fgValueNumberRecordMemorySsa(MemoryKind memoryKind, GenTree* tree);
// The input 'tree' is a leaf node that is a constant
// Assign the proper value number to the tree
- void fgValueNumberTreeConst(GenTreePtr tree);
+ void fgValueNumberTreeConst(GenTree* tree);
// Assumes that all inputs to "tree" have had value numbers assigned; assigns a VN to tree.
// (With some exceptions: the VN of the lhs of an assignment is assigned as part of the
// assignment.)
// If "evalAsgLhsInd" is true, evaluate a GT_IND node, even if it's labeled as the LHS of
// an assignment.
- void fgValueNumberTree(GenTreePtr tree, bool evalAsgLhsInd = false);
+ void fgValueNumberTree(GenTree* tree, bool evalAsgLhsInd = false);
// Does value-numbering for a block assignment.
- void fgValueNumberBlockAssignment(GenTreePtr tree, bool evalAsgLhsInd);
+ void fgValueNumberBlockAssignment(GenTree* tree, bool evalAsgLhsInd);
// Does value-numbering for a cast tree.
- void fgValueNumberCastTree(GenTreePtr tree);
+ void fgValueNumberCastTree(GenTree* tree);
// Does value-numbering for an intrinsic tree.
- void fgValueNumberIntrinsic(GenTreePtr tree);
+ void fgValueNumberIntrinsic(GenTree* tree);
// Does value-numbering for a call. We interpret some helper calls.
void fgValueNumberCall(GenTreeCall* call);
@@ -4500,9 +4491,9 @@ public:
void fgRemoveEmptyBlocks();
- void fgRemoveStmt(BasicBlock* block, GenTreePtr stmt, bool updateRefCnt = true);
+ void fgRemoveStmt(BasicBlock* block, GenTree* stmt, bool updateRefCnt = true);
- bool fgCheckRemoveStmt(BasicBlock* block, GenTreePtr stmt);
+ bool fgCheckRemoveStmt(BasicBlock* block, GenTree* stmt);
void fgCreateLoopPreHeader(unsigned lnum);
@@ -4598,9 +4589,9 @@ public:
/* Helper code that has been factored out */
inline void fgConvertBBToThrowBB(BasicBlock* block);
- bool fgCastNeeded(GenTreePtr tree, var_types toType);
- GenTreePtr fgDoNormalizeOnStore(GenTreePtr tree);
- GenTreePtr fgMakeTmpArgNode(
+ bool fgCastNeeded(GenTree* tree, var_types toType);
+ GenTree* fgDoNormalizeOnStore(GenTree* tree);
+ GenTree* fgMakeTmpArgNode(
unsigned tmpVarNum FEATURE_UNIX_AMD64_STRUCT_PASSING_ONLY_ARG(const bool passedInRegisters));
// The following check for loops that don't execute calls
@@ -4628,7 +4619,7 @@ public:
void fgTableDispBasicBlock(BasicBlock* block, int ibcColWidth = 0);
void fgDispBasicBlocks(BasicBlock* firstBlock, BasicBlock* lastBlock, bool dumpTrees);
void fgDispBasicBlocks(bool dumpTrees = false);
- void fgDumpStmtTree(GenTreePtr stmt, unsigned bbNum);
+ void fgDumpStmtTree(GenTree* stmt, unsigned bbNum);
void fgDumpBlock(BasicBlock* block);
void fgDumpTrees(BasicBlock* firstBlock, BasicBlock* lastBlock);
@@ -4639,24 +4630,24 @@ public:
void fgDebugCheckBlockLinks();
void fgDebugCheckLinks(bool morphTrees = false);
void fgDebugCheckStmtsList(BasicBlock* block, bool morphTrees);
- void fgDebugCheckNodeLinks(BasicBlock* block, GenTreePtr stmt);
+ void fgDebugCheckNodeLinks(BasicBlock* block, GenTree* stmt);
void fgDebugCheckNodesUniqueness();
- void fgDebugCheckFlags(GenTreePtr tree);
- void fgDebugCheckFlagsHelper(GenTreePtr tree, unsigned treeFlags, unsigned chkFlags);
+ void fgDebugCheckFlags(GenTree* tree);
+ void fgDebugCheckFlagsHelper(GenTree* tree, unsigned treeFlags, unsigned chkFlags);
void fgDebugCheckTryFinallyExits();
#endif
#ifdef LEGACY_BACKEND
- static void fgOrderBlockOps(GenTreePtr tree,
- regMaskTP reg0,
- regMaskTP reg1,
- regMaskTP reg2,
- GenTreePtr* opsPtr, // OUT
- regMaskTP* regsPtr); // OUT
-#endif // LEGACY_BACKEND
-
- static GenTreePtr fgGetFirstNode(GenTreePtr tree);
+ static void fgOrderBlockOps(GenTree* tree,
+ regMaskTP reg0,
+ regMaskTP reg1,
+ regMaskTP reg2,
+ GenTree** opsPtr, // OUT
+ regMaskTP* regsPtr); // OUT
+#endif // LEGACY_BACKEND
+
+ static GenTree* fgGetFirstNode(GenTree* tree);
static bool fgTreeIsInStmt(GenTree* tree, GenTreeStmt* stmt);
void fgTraverseRPO();
@@ -4669,20 +4660,20 @@ public:
fgWalkPostFn* wtpoVisitorFn;
void* pCallbackData; // user-provided data
bool wtprLclsOnly; // whether to only visit lclvar nodes
- GenTreePtr parent; // parent of current node, provided to callback
+ GenTree* parent; // parent of current node, provided to callback
GenTreeStack* parentStack; // stack of parent nodes, if asked for
#ifdef DEBUG
bool printModified; // callback can use this
#endif
};
- fgWalkResult fgWalkTreePre(GenTreePtr* pTree,
+ fgWalkResult fgWalkTreePre(GenTree** pTree,
fgWalkPreFn* visitor,
void* pCallBackData = nullptr,
bool lclVarsOnly = false,
bool computeStack = false);
- fgWalkResult fgWalkTree(GenTreePtr* pTree,
+ fgWalkResult fgWalkTree(GenTree** pTree,
fgWalkPreFn* preVisitor,
fgWalkPostFn* postVisitor,
void* pCallBackData = nullptr);
@@ -4691,7 +4682,7 @@ public:
//----- Postorder
- fgWalkResult fgWalkTreePost(GenTreePtr* pTree,
+ fgWalkResult fgWalkTreePost(GenTree** pTree,
fgWalkPostFn* visitor,
void* pCallBackData = nullptr,
bool computeStack = false);
@@ -4702,9 +4693,9 @@ public:
// returns WALK_SKIP_SUBTREES if GTF_EXCEPT is not set (assumes flags
// properly propagated to parent trees). It returns WALK_CONTINUE
// otherwise.
- static fgWalkResult fgChkThrowCB(GenTreePtr* pTree, Compiler::fgWalkData* data);
- static fgWalkResult fgChkLocAllocCB(GenTreePtr* pTree, Compiler::fgWalkData* data);
- static fgWalkResult fgChkQmarkCB(GenTreePtr* pTree, Compiler::fgWalkData* data);
+ static fgWalkResult fgChkThrowCB(GenTree** pTree, Compiler::fgWalkData* data);
+ static fgWalkResult fgChkLocAllocCB(GenTree** pTree, Compiler::fgWalkData* data);
+ static fgWalkResult fgChkQmarkCB(GenTree** pTree, Compiler::fgWalkData* data);
/**************************************************************************
* PROTECTED
@@ -4792,22 +4783,22 @@ public:
#endif
public:
- GenTreeStmt* fgInsertStmtAtEnd(BasicBlock* block, GenTreePtr node);
+ GenTreeStmt* fgInsertStmtAtEnd(BasicBlock* block, GenTree* node);
public: // Used by linear scan register allocation
- GenTreeStmt* fgInsertStmtNearEnd(BasicBlock* block, GenTreePtr node);
+ GenTreeStmt* fgInsertStmtNearEnd(BasicBlock* block, GenTree* node);
private:
- GenTreePtr fgInsertStmtAtBeg(BasicBlock* block, GenTreePtr stmt);
- GenTreePtr fgInsertStmtAfter(BasicBlock* block, GenTreePtr insertionPoint, GenTreePtr stmt);
+ GenTree* fgInsertStmtAtBeg(BasicBlock* block, GenTree* stmt);
+ GenTree* fgInsertStmtAfter(BasicBlock* block, GenTree* insertionPoint, GenTree* stmt);
public: // Used by linear scan register allocation
- GenTreePtr fgInsertStmtBefore(BasicBlock* block, GenTreePtr insertionPoint, GenTreePtr stmt);
+ GenTree* fgInsertStmtBefore(BasicBlock* block, GenTree* insertionPoint, GenTree* stmt);
private:
- GenTreePtr fgInsertStmtListAfter(BasicBlock* block, GenTreePtr stmtAfter, GenTreePtr stmtList);
+ GenTree* fgInsertStmtListAfter(BasicBlock* block, GenTree* stmtAfter, GenTree* stmtList);
- GenTreePtr fgMorphSplitTree(GenTree** splitPoint, GenTree* stmt, BasicBlock* blk);
+ GenTree* fgMorphSplitTree(GenTree** splitPoint, GenTree* stmt, BasicBlock* blk);
// Create a new temporary variable to hold the result of *ppTree,
// and transform the graph accordingly.
@@ -4816,7 +4807,7 @@ private:
private:
// Recognize a bitwise rotation pattern and convert into a GT_ROL or a GT_ROR node.
- GenTreePtr fgRecognizeAndMorphBitwiseRotation(GenTreePtr tree);
+ GenTree* fgRecognizeAndMorphBitwiseRotation(GenTree* tree);
bool fgOperIsBitwiseRotationRoot(genTreeOps oper);
//-------- Determine the order in which the trees will be evaluated -------
@@ -4827,7 +4818,7 @@ private:
GenTree* fgSetTreeSeq(GenTree* tree, GenTree* prev = nullptr, bool isLIR = false);
void fgSetTreeSeqHelper(GenTree* tree, bool isLIR);
- void fgSetTreeSeqFinish(GenTreePtr tree, bool isLIR);
+ void fgSetTreeSeqFinish(GenTree* tree, bool isLIR);
void fgSetStmtSeq(GenTree* tree);
void fgSetBlockOrder(BasicBlock* block);
@@ -4840,28 +4831,28 @@ private:
bool compCanEncodePtrArgCntMax();
- void fgSetRngChkTarget(GenTreePtr tree, bool delay = true);
+ void fgSetRngChkTarget(GenTree* tree, bool delay = true);
BasicBlock* fgSetRngChkTargetInner(SpecialCodeKind kind, bool delay, unsigned* stkDepth);
#if REARRANGE_ADDS
- void fgMoveOpsLeft(GenTreePtr tree);
+ void fgMoveOpsLeft(GenTree* tree);
#endif
- bool fgIsCommaThrow(GenTreePtr tree, bool forFolding = false);
+ bool fgIsCommaThrow(GenTree* tree, bool forFolding = false);
- bool fgIsThrow(GenTreePtr tree);
+ bool fgIsThrow(GenTree* tree);
bool fgInDifferentRegions(BasicBlock* blk1, BasicBlock* blk2);
bool fgIsBlockCold(BasicBlock* block);
- GenTreePtr fgMorphCastIntoHelper(GenTreePtr tree, int helper, GenTreePtr oper);
+ GenTree* fgMorphCastIntoHelper(GenTree* tree, int helper, GenTree* oper);
- GenTreePtr fgMorphIntoHelperCall(GenTreePtr tree, int helper, GenTreeArgList* args);
+ GenTree* fgMorphIntoHelperCall(GenTree* tree, int helper, GenTreeArgList* args);
- GenTreePtr fgMorphStackArgForVarArgs(unsigned lclNum, var_types varType, unsigned lclOffs);
+ GenTree* fgMorphStackArgForVarArgs(unsigned lclNum, var_types varType, unsigned lclOffs);
- bool fgMorphRelopToQmark(GenTreePtr tree);
+ bool fgMorphRelopToQmark(GenTree* tree);
// A "MorphAddrContext" carries information from the surrounding context. If we are evaluating a byref address,
// it is useful to know whether the address will be immediately dereferenced, or whether the address value will
@@ -4892,24 +4883,24 @@ private:
static MorphAddrContext s_CopyBlockMAC;
#ifdef FEATURE_SIMD
- GenTreePtr getSIMDStructFromField(GenTreePtr tree,
- var_types* baseTypeOut,
- unsigned* indexOut,
- unsigned* simdSizeOut,
- bool ignoreUsedInSIMDIntrinsic = false);
- GenTreePtr fgMorphFieldAssignToSIMDIntrinsicSet(GenTreePtr tree);
- GenTreePtr fgMorphFieldToSIMDIntrinsicGet(GenTreePtr tree);
- bool fgMorphCombineSIMDFieldAssignments(BasicBlock* block, GenTreePtr stmt);
- void impMarkContiguousSIMDFieldAssignments(GenTreePtr stmt);
+ GenTree* getSIMDStructFromField(GenTree* tree,
+ var_types* baseTypeOut,
+ unsigned* indexOut,
+ unsigned* simdSizeOut,
+ bool ignoreUsedInSIMDIntrinsic = false);
+ GenTree* fgMorphFieldAssignToSIMDIntrinsicSet(GenTree* tree);
+ GenTree* fgMorphFieldToSIMDIntrinsicGet(GenTree* tree);
+ bool fgMorphCombineSIMDFieldAssignments(BasicBlock* block, GenTree* stmt);
+ void impMarkContiguousSIMDFieldAssignments(GenTree* stmt);
// fgPreviousCandidateSIMDFieldAsgStmt is only used for tracking previous simd field assignment
// in function: Complier::impMarkContiguousSIMDFieldAssignments.
- GenTreePtr fgPreviousCandidateSIMDFieldAsgStmt;
+ GenTree* fgPreviousCandidateSIMDFieldAsgStmt;
#endif // FEATURE_SIMD
- GenTreePtr fgMorphArrayIndex(GenTreePtr tree);
- GenTreePtr fgMorphCast(GenTreePtr tree);
- GenTreePtr fgUnwrapProxy(GenTreePtr objRef);
+ GenTree* fgMorphArrayIndex(GenTree* tree);
+ GenTree* fgMorphCast(GenTree* tree);
+ GenTree* fgUnwrapProxy(GenTree* objRef);
GenTreeCall* fgMorphArgs(GenTreeCall* call);
void fgMakeOutgoingStructArgCopy(GenTreeCall* call,
@@ -4918,64 +4909,64 @@ private:
CORINFO_CLASS_HANDLE copyBlkClass FEATURE_UNIX_AMD64_STRUCT_PASSING_ONLY_ARG(
const SYSTEMV_AMD64_CORINFO_STRUCT_REG_PASSING_DESCRIPTOR* structDescPtr));
- void fgFixupStructReturn(GenTreePtr call);
- GenTreePtr fgMorphLocalVar(GenTreePtr tree, bool forceRemorph);
+ void fgFixupStructReturn(GenTree* call);
+ GenTree* fgMorphLocalVar(GenTree* tree, bool forceRemorph);
public:
- bool fgAddrCouldBeNull(GenTreePtr addr);
+ bool fgAddrCouldBeNull(GenTree* addr);
private:
- GenTreePtr fgMorphField(GenTreePtr tree, MorphAddrContext* mac);
+ GenTree* fgMorphField(GenTree* tree, MorphAddrContext* mac);
bool fgCanFastTailCall(GenTreeCall* call);
bool fgCheckStmtAfterTailCall();
void fgMorphTailCall(GenTreeCall* call);
void fgMorphRecursiveFastTailCallIntoLoop(BasicBlock* block, GenTreeCall* recursiveTailCall);
- GenTreePtr fgAssignRecursiveCallArgToCallerParam(GenTreePtr arg,
- fgArgTabEntry* argTabEntry,
- BasicBlock* block,
- IL_OFFSETX callILOffset,
- GenTreePtr tmpAssignmentInsertionPoint,
- GenTreePtr paramAssignmentInsertionPoint);
+ GenTree* fgAssignRecursiveCallArgToCallerParam(GenTree* arg,
+ fgArgTabEntry* argTabEntry,
+ BasicBlock* block,
+ IL_OFFSETX callILOffset,
+ GenTree* tmpAssignmentInsertionPoint,
+ GenTree* paramAssignmentInsertionPoint);
static int fgEstimateCallStackSize(GenTreeCall* call);
- GenTreePtr fgMorphCall(GenTreeCall* call);
+ GenTree* fgMorphCall(GenTreeCall* call);
void fgMorphCallInline(GenTreeCall* call, InlineResult* result);
void fgMorphCallInlineHelper(GenTreeCall* call, InlineResult* result);
#if DEBUG
void fgNoteNonInlineCandidate(GenTreeStmt* stmt, GenTreeCall* call);
static fgWalkPreFn fgFindNonInlineCandidate;
#endif
- GenTreePtr fgOptimizeDelegateConstructor(GenTreeCall* call,
- CORINFO_CONTEXT_HANDLE* ExactContextHnd,
- CORINFO_RESOLVED_TOKEN* ldftnToken);
- GenTreePtr fgMorphLeaf(GenTreePtr tree);
- void fgAssignSetVarDef(GenTreePtr tree);
- GenTreePtr fgMorphOneAsgBlockOp(GenTreePtr tree);
- GenTreePtr fgMorphInitBlock(GenTreePtr tree);
- GenTreePtr fgMorphBlkToInd(GenTreeBlk* tree, var_types type);
- GenTreePtr fgMorphGetStructAddr(GenTreePtr* pTree, CORINFO_CLASS_HANDLE clsHnd, bool isRValue = false);
- GenTreePtr fgMorphBlkNode(GenTreePtr tree, bool isDest);
- GenTreePtr fgMorphBlockOperand(GenTreePtr tree, var_types asgType, unsigned blockWidth, bool isDest);
+ GenTree* fgOptimizeDelegateConstructor(GenTreeCall* call,
+ CORINFO_CONTEXT_HANDLE* ExactContextHnd,
+ CORINFO_RESOLVED_TOKEN* ldftnToken);
+ GenTree* fgMorphLeaf(GenTree* tree);
+ void fgAssignSetVarDef(GenTree* tree);
+ GenTree* fgMorphOneAsgBlockOp(GenTree* tree);
+ GenTree* fgMorphInitBlock(GenTree* tree);
+ GenTree* fgMorphBlkToInd(GenTreeBlk* tree, var_types type);
+ GenTree* fgMorphGetStructAddr(GenTree** pTree, CORINFO_CLASS_HANDLE clsHnd, bool isRValue = false);
+ GenTree* fgMorphBlkNode(GenTree* tree, bool isDest);
+ GenTree* fgMorphBlockOperand(GenTree* tree, var_types asgType, unsigned blockWidth, bool isDest);
void fgMorphUnsafeBlk(GenTreeObj* obj);
- GenTreePtr fgMorphCopyBlock(GenTreePtr tree);
- GenTreePtr fgMorphForRegisterFP(GenTreePtr tree);
- GenTreePtr fgMorphSmpOp(GenTreePtr tree, MorphAddrContext* mac = nullptr);
- GenTreePtr fgMorphSmpOpPre(GenTreePtr tree);
- GenTreePtr fgMorphModToSubMulDiv(GenTreeOp* tree);
- GenTreePtr fgMorphSmpOpOptional(GenTreeOp* tree);
- GenTreePtr fgMorphRecognizeBoxNullable(GenTree* compare);
+ GenTree* fgMorphCopyBlock(GenTree* tree);
+ GenTree* fgMorphForRegisterFP(GenTree* tree);
+ GenTree* fgMorphSmpOp(GenTree* tree, MorphAddrContext* mac = nullptr);
+ GenTree* fgMorphSmpOpPre(GenTree* tree);
+ GenTree* fgMorphModToSubMulDiv(GenTreeOp* tree);
+ GenTree* fgMorphSmpOpOptional(GenTreeOp* tree);
+ GenTree* fgMorphRecognizeBoxNullable(GenTree* compare);
- GenTreePtr fgMorphToEmulatedFP(GenTreePtr tree);
- GenTreePtr fgMorphConst(GenTreePtr tree);
+ GenTree* fgMorphToEmulatedFP(GenTree* tree);
+ GenTree* fgMorphConst(GenTree* tree);
public:
- GenTreePtr fgMorphTree(GenTreePtr tree, MorphAddrContext* mac = nullptr);
+ GenTree* fgMorphTree(GenTree* tree, MorphAddrContext* mac = nullptr);
private:
#if LOCAL_ASSERTION_PROP
- void fgKillDependentAssertionsSingle(unsigned lclNum DEBUGARG(GenTreePtr tree));
- void fgKillDependentAssertions(unsigned lclNum DEBUGARG(GenTreePtr tree));
+ void fgKillDependentAssertionsSingle(unsigned lclNum DEBUGARG(GenTree* tree));
+ void fgKillDependentAssertions(unsigned lclNum DEBUGARG(GenTree* tree));
#endif
- void fgMorphTreeDone(GenTreePtr tree, GenTreePtr oldTree = nullptr DEBUGARG(int morphNum = 0));
+ void fgMorphTreeDone(GenTree* tree, GenTree* oldTree = nullptr DEBUGARG(int morphNum = 0));
GenTreeStmt* fgMorphStmt;
@@ -5052,13 +5043,13 @@ private:
unsigned fgCheckInlineDepthAndRecursion(InlineInfo* inlineInfo);
void fgInvokeInlineeCompiler(GenTreeCall* call, InlineResult* result);
void fgInsertInlineeBlocks(InlineInfo* pInlineInfo);
- GenTreePtr fgInlinePrependStatements(InlineInfo* inlineInfo);
- void fgInlineAppendStatements(InlineInfo* inlineInfo, BasicBlock* block, GenTreePtr stmt);
+ GenTree* fgInlinePrependStatements(InlineInfo* inlineInfo);
+ void fgInlineAppendStatements(InlineInfo* inlineInfo, BasicBlock* block, GenTree* stmt);
#if FEATURE_MULTIREG_RET
- GenTreePtr fgGetStructAsStructPtr(GenTreePtr tree);
- GenTreePtr fgAssignStructInlineeToVar(GenTreePtr child, CORINFO_CLASS_HANDLE retClsHnd);
- void fgAttachStructInlineeToAsg(GenTreePtr tree, GenTreePtr child, CORINFO_CLASS_HANDLE retClsHnd);
+ GenTree* fgGetStructAsStructPtr(GenTree* tree);
+ GenTree* fgAssignStructInlineeToVar(GenTree* child, CORINFO_CLASS_HANDLE retClsHnd);
+ void fgAttachStructInlineeToAsg(GenTree* tree, GenTree* child, CORINFO_CLASS_HANDLE retClsHnd);
#endif // FEATURE_MULTIREG_RET
static fgWalkPreFn fgUpdateInlineReturnExpressionPlaceHolder;
@@ -5071,8 +5062,8 @@ private:
#endif
void fgPromoteStructs();
- fgWalkResult fgMorphStructField(GenTreePtr tree, fgWalkData* fgWalkPre);
- fgWalkResult fgMorphLocalField(GenTreePtr tree, fgWalkData* fgWalkPre);
+ fgWalkResult fgMorphStructField(GenTree* tree, fgWalkData* fgWalkPre);
+ fgWalkResult fgMorphLocalField(GenTree* tree, fgWalkData* fgWalkPre);
// Identify which parameters are implicit byrefs, and flag their LclVarDscs.
void fgMarkImplicitByRefArgs();
@@ -5082,8 +5073,8 @@ private:
void fgRetypeImplicitByRefArgs();
// Rewrite appearances of implicit byrefs (manifest the implied additional level of indirection).
- bool fgMorphImplicitByRefArgs(GenTreePtr tree);
- GenTreePtr fgMorphImplicitByRefArgs(GenTreePtr tree, bool isAddr);
+ bool fgMorphImplicitByRefArgs(GenTree* tree);
+ GenTree* fgMorphImplicitByRefArgs(GenTree* tree, bool isAddr);
// Clear up annotations for any struct promotion temps created for implicit byrefs.
void fgMarkDemotedImplicitByRefArgs();
@@ -5098,7 +5089,7 @@ private:
// Returns true if the type of tree is of size at least "width", or if "tree" is not a
// local variable.
- bool fgFitsInOrNotLoc(GenTreePtr tree, unsigned width);
+ bool fgFitsInOrNotLoc(GenTree* tree, unsigned width);
// The given local variable, required to be a struct variable, is being assigned via
// a "lclField", to make it masquerade as an integral type in the ABI. Make sure that
@@ -5118,7 +5109,7 @@ private:
TypeProducerKind gtGetTypeProducerKind(GenTree* tree);
bool gtIsTypeHandleToRuntimeTypeHelper(GenTreeCall* call);
- bool gtIsActiveCSE_Candidate(GenTreePtr tree);
+ bool gtIsActiveCSE_Candidate(GenTree* tree);
#ifdef DEBUG
bool fgPrintInlinedMethods;
@@ -5127,10 +5118,10 @@ private:
bool fgIsBigOffset(size_t offset);
// The following are used when morphing special cases of integer div/mod operations and also by codegen
- bool fgIsSignedDivOptimizable(GenTreePtr divisor);
- bool fgIsUnsignedDivOptimizable(GenTreePtr divisor);
- bool fgIsSignedModOptimizable(GenTreePtr divisor);
- bool fgIsUnsignedModOptimizable(GenTreePtr divisor);
+ bool fgIsSignedDivOptimizable(GenTree* divisor);
+ bool fgIsUnsignedDivOptimizable(GenTree* divisor);
+ bool fgIsSignedModOptimizable(GenTree* divisor);
+ bool fgIsUnsignedModOptimizable(GenTree* divisor);
bool fgNeedReturnSpillTemp();
@@ -5148,18 +5139,18 @@ public:
void optInit();
protected:
- LclVarDsc* optIsTrackedLocal(GenTreePtr tree);
+ LclVarDsc* optIsTrackedLocal(GenTree* tree);
public:
- void optRemoveRangeCheck(GenTreePtr tree, GenTreePtr stmt);
- bool optIsRangeCheckRemovable(GenTreePtr tree);
+ void optRemoveRangeCheck(GenTree* tree, GenTree* stmt);
+ bool optIsRangeCheckRemovable(GenTree* tree);
protected:
static fgWalkPreFn optValidRangeCheckIndex;
static fgWalkPreFn optRemoveTreeVisitor; // Helper passed to Compiler::fgWalkAllTreesPre() to decrement the LclVar
// usage counts
- void optRemoveTree(GenTreePtr deadTree, GenTreePtr keepList);
+ void optRemoveTree(GenTree* deadTree, GenTree* keepList);
/**************************************************************************
*
@@ -5227,7 +5218,7 @@ protected:
void optHoistLoopExprsForBlock(BasicBlock* blk, unsigned lnum, LoopHoistContext* hoistCtxt);
// Return true if the tree looks profitable to hoist out of loop 'lnum'.
- bool optIsProfitableToHoistableTree(GenTreePtr tree, unsigned lnum);
+ bool optIsProfitableToHoistableTree(GenTree* tree, unsigned lnum);
// Hoist all proper sub-expressions of "tree" (which occurs in "stmt", which occurs in "blk")
// that are invariant in loop "lnum" (an index into the optLoopTable)
@@ -5236,7 +5227,7 @@ protected:
// Returns "true" iff "tree" is loop-invariant (wrt "lnum").
// Assumes that the value of "*firstBlockAndBeforeSideEffect" indicates that we're in the first block, and before
// any possible globally visible side effects. Assume is called in evaluation order, and updates this.
- bool optHoistLoopExprsForTree(GenTreePtr tree,
+ bool optHoistLoopExprsForTree(GenTree* tree,
unsigned lnum,
LoopHoistContext* hoistCtxt,
bool* firstBlockAndBeforeSideEffect,
@@ -5244,7 +5235,7 @@ protected:
bool* pCctorDependent);
// Performs the hoisting 'tree' into the PreHeader for loop 'lnum'
- void optHoistCandidate(GenTreePtr tree, unsigned lnum, LoopHoistContext* hoistCtxt);
+ void optHoistCandidate(GenTree* tree, unsigned lnum, LoopHoistContext* hoistCtxt);
// Returns true iff the ValueNum "vn" represents a value that is loop-invariant in "lnum".
// Constants and init values are always loop invariant.
@@ -5255,7 +5246,7 @@ protected:
// "subst". If "tree" is a local SSA var, it is valid if its SSA definition occurs outside of the loop, or
// if it is in the domain of "subst" (meaning that it's definition has been previously hoisted, with a "standin"
// local.) If tree is a constant, it is valid. Otherwise, if it is an operator, it is valid iff its children are.
- bool optTreeIsValidAtLoopHead(GenTreePtr tree, unsigned lnum);
+ bool optTreeIsValidAtLoopHead(GenTree* tree, unsigned lnum);
// If "blk" is the entry block of a natural loop, returns true and sets "*pLnum" to the index of the loop
// in the loop table.
@@ -5275,7 +5266,7 @@ private:
void optComputeLoopSideEffectsOfBlock(BasicBlock* blk);
// Hoist the expression "expr" out of loop "lnum".
- void optPerformHoistExpr(GenTreePtr expr, unsigned lnum);
+ void optPerformHoistExpr(GenTree* expr, unsigned lnum);
public:
void optOptimizeBools();
@@ -5417,7 +5408,7 @@ public:
/* The following values are set only for iterator loops, i.e. has the flag LPFLG_ITER set */
- GenTreePtr lpIterTree; // The "i <op>= const" tree
+ GenTree* lpIterTree; // The "i <op>= const" tree
unsigned lpIterVar(); // iterator variable #
int lpIterConst(); // the constant with which the iterator is incremented
genTreeOps lpIterOper(); // the type of the operation on the iterator (ASG_ADD, ASG_SUB, etc.)
@@ -5433,13 +5424,13 @@ public:
/* The following is for LPFLG_ITER loops only (i.e. the loop condition is "i RELOP const or var" */
- GenTreePtr lpTestTree; // pointer to the node containing the loop test
+ GenTree* lpTestTree; // pointer to the node containing the loop test
genTreeOps lpTestOper(); // the type of the comparison between the iterator and the limit (GT_LE, GT_GE, etc.)
void VERIFY_lpTestTree();
- bool lpIsReversed(); // true if the iterator node is the second operand in the loop condition
- GenTreePtr lpIterator(); // the iterator node in the loop test
- GenTreePtr lpLimit(); // the limit node in the loop test
+ bool lpIsReversed(); // true if the iterator node is the second operand in the loop condition
+ GenTree* lpIterator(); // the iterator node in the loop test
+ GenTree* lpLimit(); // the limit node in the loop test
int lpConstLimit(); // limit constant value of iterator - loop condition is "i RELOP const" : Valid if
// LPFLG_CONST_LIMIT
@@ -5548,17 +5539,13 @@ protected:
void optUpdateLoopsBeforeRemoveBlock(BasicBlock* block, bool skipUnmarkLoop = false);
- bool optIsLoopTestEvalIntoTemp(GenTreePtr test, GenTreePtr* newTest);
- unsigned optIsLoopIncrTree(GenTreePtr incr);
- bool optCheckIterInLoopTest(unsigned loopInd, GenTreePtr test, BasicBlock* from, BasicBlock* to, unsigned iterVar);
- bool optComputeIterInfo(GenTreePtr incr, BasicBlock* from, BasicBlock* to, unsigned* pIterVar);
- bool optPopulateInitInfo(unsigned loopInd, GenTreePtr init, unsigned iterVar);
- bool optExtractInitTestIncr(BasicBlock* head,
- BasicBlock* bottom,
- BasicBlock* exit,
- GenTreePtr* ppInit,
- GenTreePtr* ppTest,
- GenTreePtr* ppIncr);
+ bool optIsLoopTestEvalIntoTemp(GenTree* test, GenTree** newTest);
+ unsigned optIsLoopIncrTree(GenTree* incr);
+ bool optCheckIterInLoopTest(unsigned loopInd, GenTree* test, BasicBlock* from, BasicBlock* to, unsigned iterVar);
+ bool optComputeIterInfo(GenTree* incr, BasicBlock* from, BasicBlock* to, unsigned* pIterVar);
+ bool optPopulateInitInfo(unsigned loopInd, GenTree* init, unsigned iterVar);
+ bool optExtractInitTestIncr(
+ BasicBlock* head, BasicBlock* bottom, BasicBlock* exit, GenTree** ppInit, GenTree** ppTest, GenTree** ppIncr);
void optFindNaturalLoops();
@@ -5635,13 +5622,13 @@ private:
static fgWalkPreFn optIsVarAssgCB;
protected:
- bool optIsVarAssigned(BasicBlock* beg, BasicBlock* end, GenTreePtr skip, unsigned var);
+ bool optIsVarAssigned(BasicBlock* beg, BasicBlock* end, GenTree* skip, unsigned var);
bool optIsVarAssgLoop(unsigned lnum, unsigned var);
int optIsSetAssgLoop(unsigned lnum, ALLVARSET_VALARG_TP vars, varRefKinds inds = VR_NONE);
- bool optNarrowTree(GenTreePtr tree, var_types srct, var_types dstt, ValueNumPair vnpNarrow, bool doit);
+ bool optNarrowTree(GenTree* tree, var_types srct, var_types dstt, ValueNumPair vnpNarrow, bool doit);
/**************************************************************************
* Optimization conditions
@@ -5670,15 +5657,15 @@ protected:
struct treeLst
{
- treeLst* tlNext;
- GenTreePtr tlTree;
+ treeLst* tlNext;
+ GenTree* tlTree;
};
struct treeStmtLst
{
treeStmtLst* tslNext;
- GenTreePtr tslTree; // tree node
- GenTreePtr tslStmt; // statement containing the tree
+ GenTree* tslTree; // tree node
+ GenTree* tslStmt; // statement containing the tree
BasicBlock* tslBlock; // block containing the statement
};
@@ -5699,8 +5686,8 @@ protected:
unsigned csdDefWtCnt; // weighted def count
unsigned csdUseWtCnt; // weighted use count (excluding the implicit uses at defs)
- GenTreePtr csdTree; // treenode containing the 1st occurance
- GenTreePtr csdStmt; // stmt containing the 1st occurance
+ GenTree* csdTree; // treenode containing the 1st occurance
+ GenTree* csdStmt; // stmt containing the 1st occurance
BasicBlock* csdBlock; // block containing the 1st occurance
treeStmtLst* csdTreeList; // list of matching tree nodes: head
@@ -5714,18 +5701,18 @@ protected:
CSEdsc** optCSEhash;
CSEdsc** optCSEtab;
- typedef JitHashTable<GenTreePtr, JitPtrKeyFuncs<GenTree>, GenTreePtr> NodeToNodeMap;
+ typedef JitHashTable<GenTree*, JitPtrKeyFuncs<GenTree>, GenTree*> NodeToNodeMap;
NodeToNodeMap* optCseCheckedBoundMap; // Maps bound nodes to ancestor compares that should be
// re-numbered with the bound to improve range check elimination
// Given a compare, look for a cse candidate checked bound feeding it and add a map entry if found.
- void optCseUpdateCheckedBoundMap(GenTreePtr compare);
+ void optCseUpdateCheckedBoundMap(GenTree* compare);
void optCSEstop();
CSEdsc* optCSEfindDsc(unsigned index);
- void optUnmarkCSE(GenTreePtr tree);
+ void optUnmarkCSE(GenTree* tree);
// user defined callback data for the tree walk function optCSE_MaskHelper()
struct optCSE_MaskData
@@ -5740,7 +5727,7 @@ protected:
// This function walks all the node for an given tree
// and return the mask of CSE definitions and uses for the tree
//
- void optCSE_GetMaskData(GenTreePtr tree, optCSE_MaskData* pMaskData);
+ void optCSE_GetMaskData(GenTree* tree, optCSE_MaskData* pMaskData);
// Given a binary tree node return true if it is safe to swap the order of evaluation for op1 and op2.
bool optCSE_canSwap(GenTree* firstNode, GenTree* secondNode);
@@ -5772,13 +5759,13 @@ public:
protected:
void optValnumCSE_Init();
- unsigned optValnumCSE_Index(GenTreePtr tree, GenTreePtr stmt);
+ unsigned optValnumCSE_Index(GenTree* tree, GenTree* stmt);
unsigned optValnumCSE_Locate();
void optValnumCSE_InitDataFlow();
void optValnumCSE_DataFlow();
void optValnumCSE_Availablity();
void optValnumCSE_Heuristic();
- void optValnumCSE_UnmarkCSEs(GenTreePtr deadTree, GenTreePtr keepList);
+ void optValnumCSE_UnmarkCSEs(GenTree* deadTree, GenTree* keepList);
#endif // FEATURE_VALNUM_CSE
@@ -5792,7 +5779,7 @@ protected:
unsigned optCSEweight; // The weight of the current block when we are
// scanning for CSE expressions
- bool optIsCSEcandidate(GenTreePtr tree);
+ bool optIsCSEcandidate(GenTree* tree);
// lclNumIsTrueCSE returns true if the LclVar was introduced by the CSE phase of the compiler
//
@@ -5818,7 +5805,7 @@ protected:
struct isVarAssgDsc
{
- GenTreePtr ivaSkip;
+ GenTree* ivaSkip;
#ifdef DEBUG
void* ivaSelf;
#endif
@@ -5833,17 +5820,17 @@ protected:
public:
// VN based copy propagation.
- typedef ArrayStack<GenTreePtr> GenTreePtrStack;
+ typedef ArrayStack<GenTree*> GenTreePtrStack;
typedef JitHashTable<unsigned, JitSmallPrimitiveKeyFuncs<unsigned>, GenTreePtrStack*> LclNumToGenTreePtrStack;
// Kill set to track variables with intervening definitions.
VARSET_TP optCopyPropKillSet;
// Copy propagation functions.
- void optCopyProp(BasicBlock* block, GenTreePtr stmt, GenTreePtr tree, LclNumToGenTreePtrStack* curSsaName);
+ void optCopyProp(BasicBlock* block, GenTree* stmt, GenTree* tree, LclNumToGenTreePtrStack* curSsaName);
void optBlockCopyPropPopStacks(BasicBlock* block, LclNumToGenTreePtrStack* curSsaName);
void optBlockCopyProp(BasicBlock* block, LclNumToGenTreePtrStack* curSsaName);
- bool optIsSsaLocal(GenTreePtr tree);
+ bool optIsSsaLocal(GenTree* tree);
int optCopyProp_LclVarScore(LclVarDsc* lclVarDsc, LclVarDsc* copyVarDsc, bool preferOp2);
void optVnCopyProp();
@@ -5908,17 +5895,17 @@ public:
OPK_NULLCHECK
};
- bool gtIsVtableRef(GenTreePtr tree);
- GenTreePtr getArrayLengthFromAllocation(GenTreePtr tree);
- GenTreePtr getObjectHandleNodeFromAllocation(GenTreePtr tree);
- GenTreePtr optPropGetValueRec(unsigned lclNum, unsigned ssaNum, optPropKind valueKind, int walkDepth);
- GenTreePtr optPropGetValue(unsigned lclNum, unsigned ssaNum, optPropKind valueKind);
- GenTreePtr optEarlyPropRewriteTree(GenTreePtr tree);
+ bool gtIsVtableRef(GenTree* tree);
+ GenTree* getArrayLengthFromAllocation(GenTree* tree);
+ GenTree* getObjectHandleNodeFromAllocation(GenTree* tree);
+ GenTree* optPropGetValueRec(unsigned lclNum, unsigned ssaNum, optPropKind valueKind, int walkDepth);
+ GenTree* optPropGetValue(unsigned lclNum, unsigned ssaNum, optPropKind valueKind);
+ GenTree* optEarlyPropRewriteTree(GenTree* tree);
bool optDoEarlyPropForBlock(BasicBlock* block);
bool optDoEarlyPropForFunc();
void optEarlyProp();
- void optFoldNullCheck(GenTreePtr tree);
- bool optCanMoveNullCheckPastTree(GenTreePtr tree, bool isInsideTry);
+ void optFoldNullCheck(GenTree* tree);
+ bool optCanMoveNullCheckPastTree(GenTree* tree, bool isInsideTry);
#if ASSERTION_PROP
/**************************************************************************
@@ -6178,13 +6165,13 @@ protected:
static fgWalkPreFn optAddCopiesCallback;
static fgWalkPreFn optVNAssertionPropCurStmtVisitor;
unsigned optAddCopyLclNum;
- GenTreePtr optAddCopyAsgnNode;
+ GenTree* optAddCopyAsgnNode;
bool optLocalAssertionProp; // indicates that we are performing local assertion prop
bool optAssertionPropagated; // set to true if we modified the trees
bool optAssertionPropagatedCurrentStmt;
#ifdef DEBUG
- GenTreePtr optAssertionPropCurrentTree;
+ GenTree* optAssertionPropCurrentTree;
#endif
AssertionIndex* optComplementaryAssertionMap;
JitExpandArray<ASSERT_TP>* optAssertionDep; // table that holds dependent assertions (assertions
@@ -6194,12 +6181,12 @@ protected:
AssertionIndex optMaxAssertionCount;
public:
- void optVnNonNullPropCurStmt(BasicBlock* block, GenTreePtr stmt, GenTreePtr tree);
- fgWalkResult optVNConstantPropCurStmt(BasicBlock* block, GenTreePtr stmt, GenTreePtr tree);
- GenTreePtr optVNConstantPropOnRelOp(GenTreePtr tree);
- GenTreePtr optVNConstantPropOnJTrue(BasicBlock* block, GenTreePtr stmt, GenTreePtr test);
- GenTreePtr optVNConstantPropOnTree(BasicBlock* block, GenTreePtr stmt, GenTreePtr tree);
- GenTreePtr optPrepareTreeForReplacement(GenTreePtr extractTree, GenTreePtr replaceTree);
+ void optVnNonNullPropCurStmt(BasicBlock* block, GenTree* stmt, GenTree* tree);
+ fgWalkResult optVNConstantPropCurStmt(BasicBlock* block, GenTree* stmt, GenTree* tree);
+ GenTree* optVNConstantPropOnRelOp(GenTree* tree);
+ GenTree* optVNConstantPropOnJTrue(BasicBlock* block, GenTree* stmt, GenTree* test);
+ GenTree* optVNConstantPropOnTree(BasicBlock* block, GenTree* stmt, GenTree* tree);
+ GenTree* optPrepareTreeForReplacement(GenTree* extractTree, GenTree* replaceTree);
AssertionIndex GetAssertionCount()
{
@@ -6220,28 +6207,28 @@ public:
#endif
// Assertion prop data flow functions.
- void optAssertionPropMain();
- GenTreePtr optVNAssertionPropCurStmt(BasicBlock* block, GenTreePtr stmt);
- bool optIsTreeKnownIntValue(bool vnBased, GenTreePtr tree, ssize_t* pConstant, unsigned* pIconFlags);
+ void optAssertionPropMain();
+ GenTree* optVNAssertionPropCurStmt(BasicBlock* block, GenTree* stmt);
+ bool optIsTreeKnownIntValue(bool vnBased, GenTree* tree, ssize_t* pConstant, unsigned* pIconFlags);
ASSERT_TP* optInitAssertionDataflowFlags();
ASSERT_TP* optComputeAssertionGen();
// Assertion Gen functions.
- void optAssertionGen(GenTreePtr tree);
- AssertionIndex optAssertionGenPhiDefn(GenTreePtr tree);
- AssertionInfo optCreateJTrueBoundsAssertion(GenTreePtr tree);
- AssertionInfo optAssertionGenJtrue(GenTreePtr tree);
- AssertionIndex optCreateJtrueAssertions(GenTreePtr op1, GenTreePtr op2, Compiler::optAssertionKind assertionKind);
+ void optAssertionGen(GenTree* tree);
+ AssertionIndex optAssertionGenPhiDefn(GenTree* tree);
+ AssertionInfo optCreateJTrueBoundsAssertion(GenTree* tree);
+ AssertionInfo optAssertionGenJtrue(GenTree* tree);
+ AssertionIndex optCreateJtrueAssertions(GenTree* op1, GenTree* op2, Compiler::optAssertionKind assertionKind);
AssertionIndex optFindComplementary(AssertionIndex assertionIndex);
void optMapComplementary(AssertionIndex assertionIndex, AssertionIndex index);
// Assertion creation functions.
- AssertionIndex optCreateAssertion(GenTreePtr op1, GenTreePtr op2, optAssertionKind assertionKind);
- AssertionIndex optCreateAssertion(GenTreePtr op1,
- GenTreePtr op2,
+ AssertionIndex optCreateAssertion(GenTree* op1, GenTree* op2, optAssertionKind assertionKind);
+ AssertionIndex optCreateAssertion(GenTree* op1,
+ GenTree* op2,
optAssertionKind assertionKind,
AssertionDsc* assertion);
- void optCreateComplementaryAssertion(AssertionIndex assertionIndex, GenTreePtr op1, GenTreePtr op2);
+ void optCreateComplementaryAssertion(AssertionIndex assertionIndex, GenTree* op1, GenTree* op2);
bool optAssertionVnInvolvesNan(AssertionDsc* assertion);
AssertionIndex optAddAssertion(AssertionDsc* assertion);
@@ -6252,40 +6239,40 @@ public:
ASSERT_TP optGetVnMappedAssertions(ValueNum vn);
// Used for respective assertion propagations.
- AssertionIndex optAssertionIsSubrange(GenTreePtr tree, var_types toType, ASSERT_VALARG_TP assertions);
- AssertionIndex optAssertionIsSubtype(GenTreePtr tree, GenTreePtr methodTableArg, ASSERT_VALARG_TP assertions);
- AssertionIndex optAssertionIsNonNullInternal(GenTreePtr op, ASSERT_VALARG_TP assertions);
- bool optAssertionIsNonNull(GenTreePtr op,
+ AssertionIndex optAssertionIsSubrange(GenTree* tree, var_types toType, ASSERT_VALARG_TP assertions);
+ AssertionIndex optAssertionIsSubtype(GenTree* tree, GenTree* methodTableArg, ASSERT_VALARG_TP assertions);
+ AssertionIndex optAssertionIsNonNullInternal(GenTree* op, ASSERT_VALARG_TP assertions);
+ bool optAssertionIsNonNull(GenTree* op,
ASSERT_VALARG_TP assertions DEBUGARG(bool* pVnBased) DEBUGARG(AssertionIndex* pIndex));
// Used for Relop propagation.
- AssertionIndex optGlobalAssertionIsEqualOrNotEqual(ASSERT_VALARG_TP assertions, GenTreePtr op1, GenTreePtr op2);
+ AssertionIndex optGlobalAssertionIsEqualOrNotEqual(ASSERT_VALARG_TP assertions, GenTree* op1, GenTree* op2);
AssertionIndex optLocalAssertionIsEqualOrNotEqual(
optOp1Kind op1Kind, unsigned lclNum, optOp2Kind op2Kind, ssize_t cnsVal, ASSERT_VALARG_TP assertions);
// Assertion prop for lcl var functions.
- bool optAssertionProp_LclVarTypeCheck(GenTreePtr tree, LclVarDsc* lclVarDsc, LclVarDsc* copyVarDsc);
- GenTreePtr optCopyAssertionProp(AssertionDsc* curAssertion,
- GenTreePtr tree,
- GenTreePtr stmt DEBUGARG(AssertionIndex index));
- GenTreePtr optConstantAssertionProp(AssertionDsc* curAssertion,
- const GenTreePtr tree,
- const GenTreePtr stmt DEBUGARG(AssertionIndex index));
- GenTreePtr optVnConstantAssertionProp(const GenTreePtr tree, const GenTreePtr stmt);
+ bool optAssertionProp_LclVarTypeCheck(GenTree* tree, LclVarDsc* lclVarDsc, LclVarDsc* copyVarDsc);
+ GenTree* optCopyAssertionProp(AssertionDsc* curAssertion,
+ GenTree* tree,
+ GenTree* stmt DEBUGARG(AssertionIndex index));
+ GenTree* optConstantAssertionProp(AssertionDsc* curAssertion,
+ GenTree* tree,
+ GenTree* stmt DEBUGARG(AssertionIndex index));
+ GenTree* optVnConstantAssertionProp(GenTree* tree, GenTree* stmt);
// Assertion propagation functions.
- GenTreePtr optAssertionProp(ASSERT_VALARG_TP assertions, const GenTreePtr tree, const GenTreePtr stmt);
- GenTreePtr optAssertionProp_LclVar(ASSERT_VALARG_TP assertions, const GenTreePtr tree, const GenTreePtr stmt);
- GenTreePtr optAssertionProp_Ind(ASSERT_VALARG_TP assertions, const GenTreePtr tree, const GenTreePtr stmt);
- GenTreePtr optAssertionProp_Cast(ASSERT_VALARG_TP assertions, const GenTreePtr tree, const GenTreePtr stmt);
- GenTreePtr optAssertionProp_Call(ASSERT_VALARG_TP assertions, GenTreeCall* call, const GenTreePtr stmt);
- GenTreePtr optAssertionProp_RelOp(ASSERT_VALARG_TP assertions, const GenTreePtr tree, const GenTreePtr stmt);
- GenTreePtr optAssertionProp_Comma(ASSERT_VALARG_TP assertions, const GenTreePtr tree, const GenTreePtr stmt);
- GenTreePtr optAssertionProp_BndsChk(ASSERT_VALARG_TP assertions, const GenTreePtr tree, const GenTreePtr stmt);
- GenTreePtr optAssertionPropGlobal_RelOp(ASSERT_VALARG_TP assertions, const GenTreePtr tree, const GenTreePtr stmt);
- GenTreePtr optAssertionPropLocal_RelOp(ASSERT_VALARG_TP assertions, const GenTreePtr tree, const GenTreePtr stmt);
- GenTreePtr optAssertionProp_Update(const GenTreePtr newTree, const GenTreePtr tree, const GenTreePtr stmt);
- GenTreePtr optNonNullAssertionProp_Call(ASSERT_VALARG_TP assertions, GenTreeCall* call, const GenTreePtr stmt);
+ GenTree* optAssertionProp(ASSERT_VALARG_TP assertions, GenTree* tree, GenTree* stmt);
+ GenTree* optAssertionProp_LclVar(ASSERT_VALARG_TP assertions, GenTree* tree, GenTree* stmt);
+ GenTree* optAssertionProp_Ind(ASSERT_VALARG_TP assertions, GenTree* tree, GenTree* stmt);
+ GenTree* optAssertionProp_Cast(ASSERT_VALARG_TP assertions, GenTree* tree, GenTree* stmt);
+ GenTree* optAssertionProp_Call(ASSERT_VALARG_TP assertions, GenTreeCall* call, GenTree* stmt);
+ GenTree* optAssertionProp_RelOp(ASSERT_VALARG_TP assertions, GenTree* tree, GenTree* stmt);
+ GenTree* optAssertionProp_Comma(ASSERT_VALARG_TP assertions, GenTree* tree, GenTree* stmt);
+ GenTree* optAssertionProp_BndsChk(ASSERT_VALARG_TP assertions, GenTree* tree, GenTree* stmt);
+ GenTree* optAssertionPropGlobal_RelOp(ASSERT_VALARG_TP assertions, GenTree* tree, GenTree* stmt);
+ GenTree* optAssertionPropLocal_RelOp(ASSERT_VALARG_TP assertions, GenTree* tree, GenTree* stmt);
+ GenTree* optAssertionProp_Update(GenTree* newTree, GenTree* tree, GenTree* stmt);
+ GenTree* optNonNullAssertionProp_Call(ASSERT_VALARG_TP assertions, GenTreeCall* call, GenTree* stmt);
// Implied assertion functions.
void optImpliedAssertions(AssertionIndex assertionIndex, ASSERT_TP& activeAssertions);
@@ -6310,26 +6297,26 @@ public:
{
LoopCloneContext* context;
unsigned loopNum;
- GenTreePtr stmt;
- LoopCloneVisitorInfo(LoopCloneContext* context, unsigned loopNum, GenTreePtr stmt)
+ GenTree* stmt;
+ LoopCloneVisitorInfo(LoopCloneContext* context, unsigned loopNum, GenTree* stmt)
: context(context), loopNum(loopNum), stmt(nullptr)
{
}
};
bool optIsStackLocalInvariant(unsigned loopNum, unsigned lclNum);
- bool optExtractArrIndex(GenTreePtr tree, ArrIndex* result, unsigned lhsNum);
- bool optReconstructArrIndex(GenTreePtr tree, ArrIndex* result, unsigned lhsNum);
+ bool optExtractArrIndex(GenTree* tree, ArrIndex* result, unsigned lhsNum);
+ bool optReconstructArrIndex(GenTree* tree, ArrIndex* result, unsigned lhsNum);
bool optIdentifyLoopOptInfo(unsigned loopNum, LoopCloneContext* context);
static fgWalkPreFn optCanOptimizeByLoopCloningVisitor;
- fgWalkResult optCanOptimizeByLoopCloning(GenTreePtr tree, LoopCloneVisitorInfo* info);
+ fgWalkResult optCanOptimizeByLoopCloning(GenTree* tree, LoopCloneVisitorInfo* info);
void optObtainLoopCloningOpts(LoopCloneContext* context);
bool optIsLoopClonable(unsigned loopInd);
bool optCanCloneLoops();
#ifdef DEBUG
- void optDebugLogLoopCloning(BasicBlock* block, GenTreePtr insertBefore);
+ void optDebugLogLoopCloning(BasicBlock* block, GenTree* insertBefore);
#endif
void optPerformStaticOptimizations(unsigned loopNum, LoopCloneContext* context DEBUGARG(bool fastPath));
bool optComputeDerefConditions(unsigned loopNum, LoopCloneContext* context);
@@ -6361,14 +6348,14 @@ protected:
unsigned short rcdHashValue; // to make matching faster
unsigned short rcdIndex; // 0..optRngChkCount-1
- GenTreePtr rcdTree; // the array index tree
+ GenTree* rcdTree; // the array index tree
};
unsigned optRngChkCount;
static const size_t optRngChkHashSize;
- ssize_t optGetArrayRefScaleAndIndex(GenTreePtr mul, GenTreePtr* pIndex DEBUGARG(bool bRngChk));
- GenTreePtr optFindLocalInit(BasicBlock* block, GenTreePtr local, VARSET_TP* pKilledInOut, bool* isKilledAfterInit);
+ ssize_t optGetArrayRefScaleAndIndex(GenTree* mul, GenTree** pIndex DEBUGARG(bool bRngChk));
+ GenTree* optFindLocalInit(BasicBlock* block, GenTree* local, VARSET_TP* pKilledInOut, bool* isKilledAfterInit);
bool optReachWithoutCall(BasicBlock* srcBB, BasicBlock* dstBB);
@@ -6485,20 +6472,20 @@ private:
static fgWalkPreFn rpMarkRegIntf;
regMaskTP rpPredictAddressMode(
- GenTreePtr tree, var_types type, regMaskTP lockedRegs, regMaskTP rsvdRegs, GenTreePtr lenCSE);
+ GenTree* tree, var_types type, regMaskTP lockedRegs, regMaskTP rsvdRegs, GenTree* lenCSE);
void rpPredictRefAssign(unsigned lclNum);
- regMaskTP rpPredictBlkAsgRegUse(GenTreePtr tree, rpPredictReg predictReg, regMaskTP lockedRegs, regMaskTP rsvdRegs);
+ regMaskTP rpPredictBlkAsgRegUse(GenTree* tree, rpPredictReg predictReg, regMaskTP lockedRegs, regMaskTP rsvdRegs);
- regMaskTP rpPredictTreeRegUse(GenTreePtr tree, rpPredictReg predictReg, regMaskTP lockedRegs, regMaskTP rsvdRegs);
+ regMaskTP rpPredictTreeRegUse(GenTree* tree, rpPredictReg predictReg, regMaskTP lockedRegs, regMaskTP rsvdRegs);
regMaskTP rpPredictAssignRegVars(regMaskTP regAvail);
void rpPredictRegUse(); // Entry point
- unsigned raPredictTreeRegUse(GenTreePtr tree);
- unsigned raPredictListRegUse(GenTreePtr list);
+ unsigned raPredictTreeRegUse(GenTree* tree);
+ unsigned raPredictListRegUse(GenTree* list);
void raSetRegVarOrder(var_types regType,
regNumber* customVarOrder,
@@ -6524,7 +6511,7 @@ private:
#endif
#endif
- regMaskTP genReturnRegForTree(GenTreePtr tree);
+ regMaskTP genReturnRegForTree(GenTree* tree);
#endif // LEGACY_BACKEND
/* raIsVarargsStackArg is called by raMaskStkVars and by
@@ -6804,7 +6791,7 @@ public:
// Gets the offset of a MDArray's first element
unsigned eeGetMDArrayDataOffset(var_types type, unsigned rank);
- GenTreePtr eeGetPInvokeCookie(CORINFO_SIG_INFO* szMetaSig);
+ GenTree* eeGetPInvokeCookie(CORINFO_SIG_INFO* szMetaSig);
// Returns the page size for the target machine as reported by the EE.
inline size_t eeGetPageSize()
@@ -7029,9 +7016,9 @@ public:
static CorInfoHelpFunc eeGetHelperNum(CORINFO_METHOD_HANDLE method);
static fgWalkPreFn CountSharedStaticHelper;
- static bool IsSharedStaticHelper(GenTreePtr tree);
- static bool IsTreeAlwaysHoistable(GenTreePtr tree);
- static bool IsGcSafePoint(GenTreePtr tree);
+ static bool IsSharedStaticHelper(GenTree* tree);
+ static bool IsTreeAlwaysHoistable(GenTree* tree);
+ static bool IsGcSafePoint(GenTree* tree);
static CORINFO_FIELD_HANDLE eeFindJitDataOffs(unsigned jitDataOffs);
// returns true/false if 'field' is a Jit Data offset
@@ -7126,7 +7113,7 @@ public:
// whose return type is other than TYP_VOID. 2) GT_CALL node is a frequently used
// structure and IL offset is needed only when generating debuggable code. Therefore
// it is desirable to avoid memory size penalty in retail scenarios.
- typedef JitHashTable<GenTreePtr, JitPtrKeyFuncs<GenTree>, IL_OFFSETX> CallSiteILOffsetTable;
+ typedef JitHashTable<GenTree*, JitPtrKeyFuncs<GenTree>, IL_OFFSETX> CallSiteILOffsetTable;
CallSiteILOffsetTable* genCallSite2ILOffsetMap;
unsigned genReturnLocal; // Local number for the return value when applicable.
@@ -7219,25 +7206,25 @@ public:
// LIVENESS
- VARSET_TP compCurLife; // current live variables
- GenTreePtr compCurLifeTree; // node after which compCurLife has been computed
+ VARSET_TP compCurLife; // current live variables
+ GenTree* compCurLifeTree; // node after which compCurLife has been computed
template <bool ForCodeGen>
- void compChangeLife(VARSET_VALARG_TP newLife DEBUGARG(GenTreePtr tree));
+ void compChangeLife(VARSET_VALARG_TP newLife DEBUGARG(GenTree* tree));
- void genChangeLife(VARSET_VALARG_TP newLife DEBUGARG(GenTreePtr tree))
+ void genChangeLife(VARSET_VALARG_TP newLife DEBUGARG(GenTree* tree))
{
compChangeLife</*ForCodeGen*/ true>(newLife DEBUGARG(tree));
}
template <bool ForCodeGen>
- void compUpdateLife(GenTreePtr tree);
+ void compUpdateLife(GenTree* tree);
// Updates "compCurLife" to its state after evaluate of "true". If "pLastUseVars" is
// non-null, sets "*pLastUseVars" to the set of tracked variables for which "tree" was a last
// use. (Can be more than one var in the case of dependently promoted struct vars.)
template <bool ForCodeGen>
- void compUpdateLifeVar(GenTreePtr tree, VARSET_TP* pLastUseVars = nullptr);
+ void compUpdateLifeVar(GenTree* tree, VARSET_TP* pLastUseVars = nullptr);
template <bool ForCodeGen>
inline void compUpdateLife(VARSET_VALARG_TP newLife);
@@ -7260,7 +7247,7 @@ public:
// If "tree" is a indirection (GT_IND, or GT_OBJ) whose arg is an ADDR, whose arg is a LCL_VAR, return that LCL_VAR
// node, else NULL.
- static GenTreePtr fgIsIndirOfAddrOfLocal(GenTreePtr tree);
+ static GenTree* fgIsIndirOfAddrOfLocal(GenTree* tree);
// This is indexed by GT_OBJ nodes that are address of promoted struct variables, which
// have been annotated with the GTF_VAR_DEATH flag. If such a node is *not* mapped in this
@@ -7667,26 +7654,26 @@ private:
// Pops and returns GenTree node from importers type stack.
// Normalizes TYP_STRUCT value in case of GT_CALL, GT_RET_EXPR and arg nodes.
- GenTreePtr impSIMDPopStack(var_types type, bool expectAddr = false);
+ GenTree* impSIMDPopStack(var_types type, bool expectAddr = false);
// Create a GT_SIMD tree for a Get property of SIMD vector with a fixed index.
GenTreeSIMD* impSIMDGetFixed(var_types simdType, var_types baseType, unsigned simdSize, int index);
// Creates a GT_SIMD tree for Select operation
- GenTreePtr impSIMDSelect(CORINFO_CLASS_HANDLE typeHnd,
- var_types baseType,
- unsigned simdVectorSize,
- GenTree* op1,
- GenTree* op2,
- GenTree* op3);
+ GenTree* impSIMDSelect(CORINFO_CLASS_HANDLE typeHnd,
+ var_types baseType,
+ unsigned simdVectorSize,
+ GenTree* op1,
+ GenTree* op2,
+ GenTree* op3);
// Creates a GT_SIMD tree for Min/Max operation
- GenTreePtr impSIMDMinMax(SIMDIntrinsicID intrinsicId,
- CORINFO_CLASS_HANDLE typeHnd,
- var_types baseType,
- unsigned simdVectorSize,
- GenTree* op1,
- GenTree* op2);
+ GenTree* impSIMDMinMax(SIMDIntrinsicID intrinsicId,
+ CORINFO_CLASS_HANDLE typeHnd,
+ var_types baseType,
+ unsigned simdVectorSize,
+ GenTree* op1,
+ GenTree* op2);
// Transforms operands and returns the SIMD intrinsic to be applied on
// transformed operands to obtain given relop result.
@@ -7698,7 +7685,7 @@ private:
GenTree** op2);
// Creates a GT_SIMD tree for Abs intrinsic.
- GenTreePtr impSIMDAbs(CORINFO_CLASS_HANDLE typeHnd, var_types baseType, unsigned simdVectorSize, GenTree* op1);
+ GenTree* impSIMDAbs(CORINFO_CLASS_HANDLE typeHnd, var_types baseType, unsigned simdVectorSize, GenTree* op1);
#if defined(_TARGET_XARCH_) && !defined(LEGACY_BACKEND)
// Transforms operands and returns the SIMD intrinsic to be applied on
@@ -7729,21 +7716,21 @@ private:
CORINFO_CLASS_HANDLE typeHnd, unsigned simdVectorSize, var_types baseType, GenTree** op1, GenTree** op2);
#endif // defined(_TARGET_XARCH_) && !defined(LEGACY_BACKEND)
- void setLclRelatedToSIMDIntrinsic(GenTreePtr tree);
- bool areFieldsContiguous(GenTreePtr op1, GenTreePtr op2);
- bool areArrayElementsContiguous(GenTreePtr op1, GenTreePtr op2);
- bool areArgumentsContiguous(GenTreePtr op1, GenTreePtr op2);
- GenTreePtr createAddressNodeForSIMDInit(GenTreePtr tree, unsigned simdSize);
+ void setLclRelatedToSIMDIntrinsic(GenTree* tree);
+ bool areFieldsContiguous(GenTree* op1, GenTree* op2);
+ bool areArrayElementsContiguous(GenTree* op1, GenTree* op2);
+ bool areArgumentsContiguous(GenTree* op1, GenTree* op2);
+ GenTree* createAddressNodeForSIMDInit(GenTree* tree, unsigned simdSize);
// check methodHnd to see if it is a SIMD method that is expanded as an intrinsic in the JIT.
- GenTreePtr impSIMDIntrinsic(OPCODE opcode,
- GenTreePtr newobjThis,
- CORINFO_CLASS_HANDLE clsHnd,
- CORINFO_METHOD_HANDLE method,
- CORINFO_SIG_INFO* sig,
- int memberRef);
+ GenTree* impSIMDIntrinsic(OPCODE opcode,
+ GenTree* newobjThis,
+ CORINFO_CLASS_HANDLE clsHnd,
+ CORINFO_METHOD_HANDLE method,
+ CORINFO_SIG_INFO* sig,
+ int memberRef);
- GenTreePtr getOp1ForConstructor(OPCODE opcode, GenTreePtr newobjThis, CORINFO_CLASS_HANDLE clsHnd);
+ GenTree* getOp1ForConstructor(OPCODE opcode, GenTree* newobjThis, CORINFO_CLASS_HANDLE clsHnd);
// Whether SIMD vector occupies part of SIMD register.
// SSE2: vector2f/3f are considered sub register SIMD types.
@@ -8029,8 +8016,8 @@ public:
// the importing is completely finished.
#ifdef LEGACY_BACKEND
- JitExpandArrayStack<GenTreePtr>* compQMarks; // The set of QMark nodes created in the current compilation, so
- // we can iterate over these efficiently.
+ JitExpandArrayStack<GenTree*>* compQMarks; // The set of QMark nodes created in the current compilation, so
+ // we can iterate over these efficiently.
#endif
#if CPU_USES_BLOCK_MOVE
@@ -8655,7 +8642,7 @@ public:
#if FEATURE_MULTIREG_ARGS
// Given a GenTree node of TYP_STRUCT that represents a pass by value argument
// return the gcPtr layout for the pointers sized fields
- void getStructGcPtrsFromOp(GenTreePtr op, BYTE* gcPtrsOut);
+ void getStructGcPtrsFromOp(GenTree* op, BYTE* gcPtrsOut);
#endif // FEATURE_MULTIREG_ARGS
// Returns true if the method being compiled returns a value
@@ -8680,7 +8667,7 @@ public:
#endif
BasicBlock* compCurBB; // the current basic block in process
- GenTreePtr compCurStmt; // the current statement in process
+ GenTree* compCurStmt; // the current statement in process
#ifdef DEBUG
unsigned compCurStmtNum; // to give all statements an increasing StmtNum when printing dumps
#endif
@@ -9244,7 +9231,7 @@ public:
// Register allocator
void raInitStackFP();
void raEnregisterVarsPrePassStackFP();
- void raSetRegLclBirthDeath(GenTreePtr tree, VARSET_VALARG_TP lastlife, bool fromLDOBJ);
+ void raSetRegLclBirthDeath(GenTree* tree, VARSET_VALARG_TP lastlife, bool fromLDOBJ);
void raEnregisterVarsPostPassStackFP();
void raGenerateFPRefCounts();
void raEnregisterVarsStackFP();
@@ -9499,7 +9486,7 @@ public:
return compRoot->m_nodeTestData;
}
- typedef JitHashTable<GenTreePtr, JitPtrKeyFuncs<GenTree>, int> NodeToIntMap;
+ typedef JitHashTable<GenTree*, JitPtrKeyFuncs<GenTree>, int> NodeToIntMap;
// Returns the set (i.e., the domain of the result map) of nodes that are keys in m_nodeTestData, and
// currently occur in the AST graph.
@@ -9507,11 +9494,11 @@ public:
// Node "from" is being eliminated, and being replaced by node "to". If "from" had any associated
// test data, associate that data with "to".
- void TransferTestDataToNode(GenTreePtr from, GenTreePtr to);
+ void TransferTestDataToNode(GenTree* from, GenTree* to);
// Requires that "to" is a clone of "from". If any nodes in the "from" tree
// have annotations, attach similar annotations to the corresponding nodes in "to".
- void CopyTestDataToCloneTree(GenTreePtr from, GenTreePtr to);
+ void CopyTestDataToCloneTree(GenTree* from, GenTree* to);
// These are the methods that test that the various conditions implied by the
// test attributes are satisfied.
@@ -9535,7 +9522,7 @@ public:
return compRoot->m_fieldSeqStore;
}
- typedef JitHashTable<GenTreePtr, JitPtrKeyFuncs<GenTree>, FieldSeqNode*> NodeToFieldSeqMap;
+ typedef JitHashTable<GenTree*, JitPtrKeyFuncs<GenTree>, FieldSeqNode*> NodeToFieldSeqMap;
// Some nodes of "TYP_BYREF" or "TYP_I_IMPL" actually represent the address of a field within a struct, but since
// the offset of the field is zero, there's no "GT_ADD" node. We normally attach a field sequence to the constant
@@ -9566,7 +9553,7 @@ public:
// One exception above is that "op1" is a node of type "TYP_REF" where "op1" is a GT_LCL_VAR.
// This happens when System.Object vtable pointer is a regular field at offset 0 in System.Private.CoreLib in
// CoreRT. Such case is handled same as the default case.
- void fgAddFieldSeqForZeroOffset(GenTreePtr op1, FieldSeqNode* fieldSeq);
+ void fgAddFieldSeqForZeroOffset(GenTree* op1, FieldSeqNode* fieldSeq);
typedef JitHashTable<const GenTree*, JitPtrKeyFuncs<GenTree>, ArrayInfo> NodeToArrayInfoMap;
NodeToArrayInfoMap* m_arrayInfoMap;
@@ -9687,9 +9674,9 @@ public:
#endif // defined(FEATURE_UNIX_AMD64_STRUCT_PASSING)
void fgMorphMultiregStructArgs(GenTreeCall* call);
- GenTreePtr fgMorphMultiregStructArg(GenTreePtr arg, fgArgTabEntry* fgEntryPtr);
+ GenTree* fgMorphMultiregStructArg(GenTree* arg, fgArgTabEntry* fgEntryPtr);
- bool killGCRefs(GenTreePtr tree);
+ bool killGCRefs(GenTree* tree);
}; // end of class Compiler
diff --git a/src/jit/compiler.hpp b/src/jit/compiler.hpp
index edfdd2ed9d..5a2a2342e8 100644
--- a/src/jit/compiler.hpp
+++ b/src/jit/compiler.hpp
@@ -852,7 +852,7 @@ void* GenTree::operator new(size_t sz, Compiler* comp, genTreeOps oper)
#if SMALL_TREE_NODES
size_t size = GenTree::s_gtNodeSizes[oper];
#else
- size_t size = TREE_NODE_SZ_LARGE;
+ size_t size = TREE_NODE_SZ_LARGE;
#endif
#if MEASURE_NODE_SIZE
@@ -933,7 +933,7 @@ inline GenTree::GenTree(genTreeOps oper, var_types type DEBUGARG(bool largeNode)
/*****************************************************************************/
-inline GenTreeStmt* Compiler::gtNewStmt(GenTreePtr expr, IL_OFFSETX offset)
+inline GenTreeStmt* Compiler::gtNewStmt(GenTree* expr, IL_OFFSETX offset)
{
/* NOTE - GT_STMT is now a small node in retail */
@@ -944,7 +944,7 @@ inline GenTreeStmt* Compiler::gtNewStmt(GenTreePtr expr, IL_OFFSETX offset)
/*****************************************************************************/
-inline GenTreePtr Compiler::gtNewOperNode(genTreeOps oper, var_types type, GenTreePtr op1, bool doSimplifications)
+inline GenTree* Compiler::gtNewOperNode(genTreeOps oper, var_types type, GenTree* op1, bool doSimplifications)
{
assert((GenTree::OperKind(oper) & (GTK_UNOP | GTK_BINOP)) != 0);
assert((GenTree::OperKind(oper) & GTK_EXOP) ==
@@ -980,7 +980,7 @@ inline GenTreePtr Compiler::gtNewOperNode(genTreeOps oper, var_types type, GenTr
}
}
- GenTreePtr node = new (this, oper) GenTreeOp(oper, type, op1, nullptr);
+ GenTree* node = new (this, oper) GenTreeOp(oper, type, op1, nullptr);
//
// the GT_ADDR of a Local Variable implies GTF_ADDR_ONSTACK
@@ -1008,7 +1008,7 @@ inline genTreeOps LargeOpOpcode()
* Use to create nodes which may later be morphed to another (big) operator
*/
-inline GenTreePtr Compiler::gtNewLargeOperNode(genTreeOps oper, var_types type, GenTreePtr op1, GenTreePtr op2)
+inline GenTree* Compiler::gtNewLargeOperNode(genTreeOps oper, var_types type, GenTree* op1, GenTree* op2)
{
assert((GenTree::OperKind(oper) & (GTK_UNOP | GTK_BINOP)) != 0);
assert((GenTree::OperKind(oper) & GTK_EXOP) ==
@@ -1018,9 +1018,9 @@ inline GenTreePtr Compiler::gtNewLargeOperNode(genTreeOps oper, var_types type,
assert(GenTree::s_gtNodeSizes[oper] == TREE_NODE_SZ_SMALL);
- GenTreePtr node = new (this, LargeOpOpcode()) GenTreeOp(oper, type, op1, op2 DEBUGARG(/*largeNode*/ true));
+ GenTree* node = new (this, LargeOpOpcode()) GenTreeOp(oper, type, op1, op2 DEBUGARG(/*largeNode*/ true));
#else
- GenTreePtr node = new (this, oper) GenTreeOp(oper, type, op1, op2);
+ GenTree* node = new (this, oper) GenTreeOp(oper, type, op1, op2);
#endif
return node;
@@ -1032,9 +1032,9 @@ inline GenTreePtr Compiler::gtNewLargeOperNode(genTreeOps oper, var_types type,
* that may need to be fixed up).
*/
-inline GenTreePtr Compiler::gtNewIconHandleNode(size_t value, unsigned flags, FieldSeqNode* fields)
+inline GenTree* Compiler::gtNewIconHandleNode(size_t value, unsigned flags, FieldSeqNode* fields)
{
- GenTreePtr node;
+ GenTree* node;
assert((flags & (GTF_ICON_HDL_MASK | GTF_ICON_FIELD_OFF)) != 0);
// Interpret "fields == NULL" as "not a field."
@@ -1059,7 +1059,7 @@ inline GenTreePtr Compiler::gtNewIconHandleNode(size_t value, unsigned flags, Fi
* These are versions for each specific type of HANDLE
*/
-inline GenTreePtr Compiler::gtNewIconEmbScpHndNode(CORINFO_MODULE_HANDLE scpHnd)
+inline GenTree* Compiler::gtNewIconEmbScpHndNode(CORINFO_MODULE_HANDLE scpHnd)
{
void *embedScpHnd, *pEmbedScpHnd;
@@ -1072,7 +1072,7 @@ inline GenTreePtr Compiler::gtNewIconEmbScpHndNode(CORINFO_MODULE_HANDLE scpHnd)
//-----------------------------------------------------------------------------
-inline GenTreePtr Compiler::gtNewIconEmbClsHndNode(CORINFO_CLASS_HANDLE clsHnd)
+inline GenTree* Compiler::gtNewIconEmbClsHndNode(CORINFO_CLASS_HANDLE clsHnd)
{
void *embedClsHnd, *pEmbedClsHnd;
@@ -1085,7 +1085,7 @@ inline GenTreePtr Compiler::gtNewIconEmbClsHndNode(CORINFO_CLASS_HANDLE clsHnd)
//-----------------------------------------------------------------------------
-inline GenTreePtr Compiler::gtNewIconEmbMethHndNode(CORINFO_METHOD_HANDLE methHnd)
+inline GenTree* Compiler::gtNewIconEmbMethHndNode(CORINFO_METHOD_HANDLE methHnd)
{
void *embedMethHnd, *pEmbedMethHnd;
@@ -1098,7 +1098,7 @@ inline GenTreePtr Compiler::gtNewIconEmbMethHndNode(CORINFO_METHOD_HANDLE methHn
//-----------------------------------------------------------------------------
-inline GenTreePtr Compiler::gtNewIconEmbFldHndNode(CORINFO_FIELD_HANDLE fldHnd)
+inline GenTree* Compiler::gtNewIconEmbFldHndNode(CORINFO_FIELD_HANDLE fldHnd)
{
void *embedFldHnd, *pEmbedFldHnd;
@@ -1150,12 +1150,12 @@ inline GenTreeCall* Compiler::gtNewHelperCallNode(unsigned helper, var_types typ
// Return Value:
// Returns GT_ALLOCOBJ node that will be later morphed into an
// allocation helper call or local variable allocation on the stack.
-inline GenTreePtr Compiler::gtNewAllocObjNode(unsigned int helper,
- CORINFO_CLASS_HANDLE clsHnd,
- var_types type,
- GenTreePtr op1)
+inline GenTree* Compiler::gtNewAllocObjNode(unsigned int helper,
+ CORINFO_CLASS_HANDLE clsHnd,
+ var_types type,
+ GenTree* op1)
{
- GenTreePtr node = new (this, GT_ALLOCOBJ) GenTreeAllocObj(type, helper, clsHnd, op1);
+ GenTree* node = new (this, GT_ALLOCOBJ) GenTreeAllocObj(type, helper, clsHnd, op1);
return node;
}
@@ -1178,9 +1178,9 @@ inline GenTree* Compiler::gtNewRuntimeLookup(CORINFO_GENERIC_HANDLE hnd, CorInfo
/*****************************************************************************/
-inline GenTreePtr Compiler::gtNewCodeRef(BasicBlock* block)
+inline GenTree* Compiler::gtNewCodeRef(BasicBlock* block)
{
- GenTreePtr node = new (this, GT_LABEL) GenTreeLabel(block);
+ GenTree* node = new (this, GT_LABEL) GenTreeLabel(block);
return node;
}
@@ -1189,16 +1189,16 @@ inline GenTreePtr Compiler::gtNewCodeRef(BasicBlock* block)
* A little helper to create a data member reference node.
*/
-inline GenTreePtr Compiler::gtNewFieldRef(
- var_types typ, CORINFO_FIELD_HANDLE fldHnd, GenTreePtr obj, DWORD offset, bool nullcheck)
+inline GenTree* Compiler::gtNewFieldRef(
+ var_types typ, CORINFO_FIELD_HANDLE fldHnd, GenTree* obj, DWORD offset, bool nullcheck)
{
#if SMALL_TREE_NODES
/* 'GT_FIELD' nodes may later get transformed into 'GT_IND' */
assert(GenTree::s_gtNodeSizes[GT_IND] <= GenTree::s_gtNodeSizes[GT_FIELD]);
- GenTreePtr tree = new (this, GT_FIELD) GenTreeField(typ);
+ GenTree* tree = new (this, GT_FIELD) GenTreeField(typ);
#else
- GenTreePtr tree = new (this, GT_FIELD) GenTreeField(typ);
+ GenTree* tree = new (this, GT_FIELD) GenTreeField(typ);
#endif
tree->gtField.gtFldObj = obj;
tree->gtField.gtFldHnd = fldHnd;
@@ -1241,7 +1241,7 @@ inline GenTreePtr Compiler::gtNewFieldRef(
* A little helper to create an array index node.
*/
-inline GenTreePtr Compiler::gtNewIndexRef(var_types typ, GenTreePtr arrayOp, GenTreePtr indexOp)
+inline GenTree* Compiler::gtNewIndexRef(var_types typ, GenTree* arrayOp, GenTree* indexOp)
{
GenTreeIndex* gtIndx = new (this, GT_INDEX) GenTreeIndex(typ, arrayOp, indexOp, genTypeSize(typ));
@@ -1291,7 +1291,7 @@ inline GenTree* Compiler::gtNewIndir(var_types typ, GenTree* addr)
* any code. We currently use a "nop" node of type void for this purpose.
*/
-inline GenTreePtr Compiler::gtNewNothingNode()
+inline GenTree* Compiler::gtNewNothingNode()
{
return new (this, GT_NOP) GenTreeOp(GT_NOP, TYP_VOID);
}
@@ -1321,15 +1321,15 @@ inline void GenTree::gtBashToNOP()
// return new arg placeholder node. Does not do anything but has a type associated
// with it so we can keep track of register arguments in lists associated w/ call nodes
-inline GenTreePtr Compiler::gtNewArgPlaceHolderNode(var_types type, CORINFO_CLASS_HANDLE clsHnd)
+inline GenTree* Compiler::gtNewArgPlaceHolderNode(var_types type, CORINFO_CLASS_HANDLE clsHnd)
{
- GenTreePtr node = new (this, GT_ARGPLACE) GenTreeArgPlace(type, clsHnd);
+ GenTree* node = new (this, GT_ARGPLACE) GenTreeArgPlace(type, clsHnd);
return node;
}
/*****************************************************************************/
-inline GenTreePtr Compiler::gtUnusedValNode(GenTreePtr expr)
+inline GenTree* Compiler::gtUnusedValNode(GenTree* expr)
{
return gtNewOperNode(GT_COMMA, TYP_VOID, expr, gtNewNothingNode());
}
@@ -1344,7 +1344,7 @@ inline GenTreePtr Compiler::gtUnusedValNode(GenTreePtr expr)
inline void Compiler::gtSetStmtInfo(GenTree* stmt)
{
assert(stmt->gtOper == GT_STMT);
- GenTreePtr expr = stmt->gtStmt.gtStmtExpr;
+ GenTree* expr = stmt->gtStmt.gtStmtExpr;
#if FEATURE_STACK_FP_X87
/* We will try to compute the FP stack level at each node */
@@ -1455,13 +1455,13 @@ inline void GenTree::SetOper(genTreeOps oper, ValueNumberUpdate vnUpdate)
}
}
-inline GenTreePtr Compiler::gtNewCastNode(var_types typ, GenTreePtr op1, var_types castType)
+inline GenTree* Compiler::gtNewCastNode(var_types typ, GenTree* op1, var_types castType)
{
- GenTreePtr res = new (this, GT_CAST) GenTreeCast(typ, op1, castType);
+ GenTree* res = new (this, GT_CAST) GenTreeCast(typ, op1, castType);
return res;
}
-inline GenTreePtr Compiler::gtNewCastNodeL(var_types typ, GenTreePtr op1, var_types castType)
+inline GenTree* Compiler::gtNewCastNodeL(var_types typ, GenTree* op1, var_types castType)
{
/* Some casts get transformed into 'GT_CALL' or 'GT_IND' nodes */
@@ -1470,7 +1470,7 @@ inline GenTreePtr Compiler::gtNewCastNodeL(var_types typ, GenTreePtr op1, var_ty
/* Make a big node first and then change it to be GT_CAST */
- GenTreePtr res = new (this, LargeOpOpcode()) GenTreeCast(typ, op1, castType DEBUGARG(/*largeNode*/ true));
+ GenTree* res = new (this, LargeOpOpcode()) GenTreeCast(typ, op1, castType DEBUGARG(/*largeNode*/ true));
return res;
}
@@ -1493,7 +1493,7 @@ inline void GenTree::SetOper(genTreeOps oper, ValueNumberUpdate vnUpdate)
}
}
-inline void GenTree::ReplaceWith(GenTreePtr src)
+inline void GenTree::ReplaceWith(GenTree* src)
{
RecordOperBashing(OperGet(), src->OperGet()); // nop unless NODEBASH_STATS is enabled
*this = *src;
@@ -1502,13 +1502,13 @@ inline void GenTree::ReplaceWith(GenTreePtr src)
#endif
}
-inline GenTreePtr Compiler::gtNewCastNode(var_types typ, GenTreePtr op1, var_types castType)
+inline GenTree* Compiler::gtNewCastNode(var_types typ, GenTree* op1, var_types castType)
{
- GenTreePtr tree = gtNewOperNode(GT_CAST, typ, op1);
+ GenTree* tree = gtNewOperNode(GT_CAST, typ, op1);
tree->gtCast.gtCastType = castType;
}
-inline GenTreePtr Compiler::gtNewCastNodeL(var_types typ, GenTreePtr op1, var_types castType)
+inline GenTree* Compiler::gtNewCastNodeL(var_types typ, GenTree* op1, var_types castType)
{
return gtNewCastNode(typ, op1, castType);
}
@@ -2163,9 +2163,9 @@ inline void LclVarDsc::addPrefReg(regMaskTP regMask, Compiler* comp)
* referenced in a statement.
*/
-inline VARSET_VALRET_TP Compiler::lvaStmtLclMask(GenTreePtr stmt)
+inline VARSET_VALRET_TP Compiler::lvaStmtLclMask(GenTree* stmt)
{
- GenTreePtr tree;
+ GenTree* tree;
unsigned varNum;
LclVarDsc* varDsc;
VARSET_TP lclMask(VarSetOps::MakeEmpty(this));
@@ -2738,7 +2738,7 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
/*****************************************************************************/
-inline bool rpCanAsgOperWithoutReg(GenTreePtr op, bool lclvar)
+inline bool rpCanAsgOperWithoutReg(GenTree* op, bool lclvar)
{
var_types type;
@@ -2799,7 +2799,7 @@ inline bool Compiler::compCanEncodePtrArgCntMax()
*/
inline Compiler::fgWalkResult Compiler::fgWalkTreePre(
- GenTreePtr* pTree, fgWalkPreFn* visitor, void* callBackData, bool lclVarsOnly, bool computeStack)
+ GenTree** pTree, fgWalkPreFn* visitor, void* callBackData, bool lclVarsOnly, bool computeStack)
{
fgWalkData walkData;
@@ -2856,7 +2856,7 @@ inline Compiler::fgWalkResult Compiler::fgWalkTreePre(
* computeStack - true if we want to make stack visible to callback function
*/
-inline Compiler::fgWalkResult Compiler::fgWalkTreePost(GenTreePtr* pTree,
+inline Compiler::fgWalkResult Compiler::fgWalkTreePost(GenTree** pTree,
fgWalkPostFn* visitor,
void* callBackData,
bool computeStack)
@@ -2895,7 +2895,7 @@ inline Compiler::fgWalkResult Compiler::fgWalkTreePost(GenTreePtr* pTree,
* WALK_SKIP_SUBTREES don't walk any subtrees of the node just visited
*/
-inline Compiler::fgWalkResult Compiler::fgWalkTree(GenTreePtr* pTree,
+inline Compiler::fgWalkResult Compiler::fgWalkTree(GenTree** pTree,
fgWalkPreFn* preVisitor,
fgWalkPreFn* postVisitor,
void* callBackData)
@@ -3115,7 +3115,7 @@ inline bool Compiler::fgIsBigOffset(size_t offset)
* if "divisor" is a positive integer constant and a power of 2 other than 1 and INT_MIN
*/
-inline bool Compiler::fgIsSignedDivOptimizable(GenTreePtr divisor)
+inline bool Compiler::fgIsSignedDivOptimizable(GenTree* divisor)
{
if (!opts.MinOpts() && divisor->IsCnsIntOrI())
{
@@ -3142,7 +3142,7 @@ inline bool Compiler::fgIsSignedDivOptimizable(GenTreePtr divisor)
* if "divisor" is an unsigned integer constant and a power of 2 other than 1 and zero.
*/
-inline bool Compiler::fgIsUnsignedDivOptimizable(GenTreePtr divisor)
+inline bool Compiler::fgIsUnsignedDivOptimizable(GenTree* divisor)
{
if (!opts.MinOpts() && divisor->IsCnsIntOrI())
{
@@ -3161,7 +3161,7 @@ inline bool Compiler::fgIsUnsignedDivOptimizable(GenTreePtr divisor)
* if "divisor" is a positive integer constant and a power of 2 other than zero
*/
-inline bool Compiler::fgIsSignedModOptimizable(GenTreePtr divisor)
+inline bool Compiler::fgIsSignedModOptimizable(GenTree* divisor)
{
if (!opts.MinOpts() && divisor->IsCnsIntOrI())
{
@@ -3180,7 +3180,7 @@ inline bool Compiler::fgIsSignedModOptimizable(GenTreePtr divisor)
* if "divisor" is a positive integer constant and a power of 2 other than zero
*/
-inline bool Compiler::fgIsUnsignedModOptimizable(GenTreePtr divisor)
+inline bool Compiler::fgIsUnsignedModOptimizable(GenTree* divisor)
{
if (!opts.MinOpts() && divisor->IsCnsIntOrI())
{
@@ -3553,7 +3553,7 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
*/
template <bool ForCodeGen>
-inline void Compiler::compUpdateLife(GenTreePtr tree)
+inline void Compiler::compUpdateLife(GenTree* tree)
{
// TODO-Cleanup: We shouldn't really be calling this more than once
if (tree == compCurLifeTree)
@@ -3759,8 +3759,8 @@ inline void Compiler::LoopDsc::VERIFY_lpIterTree()
if (lpIterTree->OperGet() == GT_ASG)
{
- GenTreePtr lhs = lpIterTree->gtOp.gtOp1;
- GenTreePtr rhs = lpIterTree->gtOp.gtOp2;
+ GenTree* lhs = lpIterTree->gtOp.gtOp1;
+ GenTree* rhs = lpIterTree->gtOp.gtOp2;
assert(lhs->OperGet() == GT_LCL_VAR);
switch (rhs->gtOper)
@@ -3801,7 +3801,7 @@ inline int Compiler::LoopDsc::lpIterConst()
VERIFY_lpIterTree();
if (lpIterTree->OperGet() == GT_ASG)
{
- GenTreePtr rhs = lpIterTree->gtOp.gtOp2;
+ GenTree* rhs = lpIterTree->gtOp.gtOp2;
return (int)rhs->gtOp.gtOp2->gtIntCon.gtIconVal;
}
else
@@ -3817,7 +3817,7 @@ inline genTreeOps Compiler::LoopDsc::lpIterOper()
VERIFY_lpIterTree();
if (lpIterTree->OperGet() == GT_ASG)
{
- GenTreePtr rhs = lpIterTree->gtOp.gtOp2;
+ GenTree* rhs = lpIterTree->gtOp.gtOp2;
return rhs->OperGet();
}
else
@@ -3850,8 +3850,8 @@ inline void Compiler::LoopDsc::VERIFY_lpTestTree()
genTreeOps oper = lpTestTree->OperGet();
assert(GenTree::OperIsCompare(oper));
- GenTreePtr iterator = nullptr;
- GenTreePtr limit = nullptr;
+ GenTree* iterator = nullptr;
+ GenTree* limit = nullptr;
if ((lpTestTree->gtOp.gtOp2->gtOper == GT_LCL_VAR) && (lpTestTree->gtOp.gtOp2->gtFlags & GTF_VAR_ITERATOR) != 0)
{
iterator = lpTestTree->gtOp.gtOp2;
@@ -3904,7 +3904,7 @@ inline genTreeOps Compiler::LoopDsc::lpTestOper()
//-----------------------------------------------------------------------------
-inline GenTreePtr Compiler::LoopDsc::lpIterator()
+inline GenTree* Compiler::LoopDsc::lpIterator()
{
VERIFY_lpTestTree();
@@ -3913,7 +3913,7 @@ inline GenTreePtr Compiler::LoopDsc::lpIterator()
//-----------------------------------------------------------------------------
-inline GenTreePtr Compiler::LoopDsc::lpLimit()
+inline GenTree* Compiler::LoopDsc::lpLimit()
{
VERIFY_lpTestTree();
@@ -3927,7 +3927,7 @@ inline int Compiler::LoopDsc::lpConstLimit()
VERIFY_lpTestTree();
assert(lpFlags & LPFLG_CONST_LIMIT);
- GenTreePtr limit = lpLimit();
+ GenTree* limit = lpLimit();
assert(limit->OperIsConst());
return (int)limit->gtIntCon.gtIconVal;
}
@@ -3939,7 +3939,7 @@ inline unsigned Compiler::LoopDsc::lpVarLimit()
VERIFY_lpTestTree();
assert(lpFlags & LPFLG_VAR_LIMIT);
- GenTreePtr limit = lpLimit();
+ GenTree* limit = lpLimit();
assert(limit->OperGet() == GT_LCL_VAR);
return limit->gtLclVarCommon.gtLclNum;
}
@@ -3951,7 +3951,7 @@ inline bool Compiler::LoopDsc::lpArrLenLimit(Compiler* comp, ArrIndex* index)
VERIFY_lpTestTree();
assert(lpFlags & LPFLG_ARRLEN_LIMIT);
- GenTreePtr limit = lpLimit();
+ GenTree* limit = lpLimit();
assert(limit->OperGet() == GT_ARR_LENGTH);
// Check if we have a.length or a[i][j].length
@@ -3991,7 +3991,7 @@ inline bool Compiler::optIsVarAssgLoop(unsigned lnum, unsigned var)
* If the tree is a tracked local variable, return its LclVarDsc ptr.
*/
-inline LclVarDsc* Compiler::optIsTrackedLocal(GenTreePtr tree)
+inline LclVarDsc* Compiler::optIsTrackedLocal(GenTree* tree)
{
LclVarDsc* varDsc;
unsigned lclNum;
@@ -4086,7 +4086,7 @@ inline CorInfoHelpFunc Compiler::eeGetHelperNum(CORINFO_METHOD_HANDLE method)
return ((CorInfoHelpFunc)(((size_t)method) >> 2));
}
-inline Compiler::fgWalkResult Compiler::CountSharedStaticHelper(GenTreePtr* pTree, fgWalkData* data)
+inline Compiler::fgWalkResult Compiler::CountSharedStaticHelper(GenTree** pTree, fgWalkData* data)
{
if (Compiler::IsSharedStaticHelper(*pTree))
{
@@ -4100,7 +4100,7 @@ inline Compiler::fgWalkResult Compiler::CountSharedStaticHelper(GenTreePtr* pTre
// TODO-Cleanup: Replace calls to IsSharedStaticHelper with new HelperCallProperties
//
-inline bool Compiler::IsSharedStaticHelper(GenTreePtr tree)
+inline bool Compiler::IsSharedStaticHelper(GenTree* tree)
{
if (tree->gtOper != GT_CALL || tree->gtCall.gtCallType != CT_HELPER)
{
@@ -4143,7 +4143,7 @@ inline bool Compiler::IsSharedStaticHelper(GenTreePtr tree)
return result1;
}
-inline bool Compiler::IsTreeAlwaysHoistable(GenTreePtr tree)
+inline bool Compiler::IsTreeAlwaysHoistable(GenTree* tree)
{
if (IsSharedStaticHelper(tree))
{
@@ -4155,7 +4155,7 @@ inline bool Compiler::IsTreeAlwaysHoistable(GenTreePtr tree)
}
}
-inline bool Compiler::IsGcSafePoint(GenTreePtr tree)
+inline bool Compiler::IsGcSafePoint(GenTree* tree)
{
if (tree->IsCall())
{
@@ -4327,7 +4327,7 @@ inline bool Compiler::compIsProfilerHookNeeded()
* We simply grab a temp and assign 0 to it and use it in place of the NULL.
*/
-inline GenTreePtr Compiler::impCheckForNullPointer(GenTreePtr obj)
+inline GenTree* Compiler::impCheckForNullPointer(GenTree* obj)
{
/* If it is not a GC type, we will be able to fold it.
So don't need to do anything */
@@ -4362,7 +4362,7 @@ inline GenTreePtr Compiler::impCheckForNullPointer(GenTreePtr obj)
* even if we might have created the copy of 'this' pointer in lvaArg0Var.
*/
-inline bool Compiler::impIsThis(GenTreePtr obj)
+inline bool Compiler::impIsThis(GenTree* obj)
{
if (compIsForInlining())
{
@@ -4723,7 +4723,7 @@ bool Compiler::fgExcludeFromSsa(unsigned lclNum)
}
/*****************************************************************************/
-ValueNum Compiler::GetUseAsgDefVNOrTreeVN(GenTreePtr op)
+ValueNum Compiler::GetUseAsgDefVNOrTreeVN(GenTree* op)
{
if (op->gtFlags & GTF_VAR_USEASG)
{
@@ -4738,7 +4738,7 @@ ValueNum Compiler::GetUseAsgDefVNOrTreeVN(GenTreePtr op)
}
/*****************************************************************************/
-unsigned Compiler::GetSsaNumForLocalVarDef(GenTreePtr lcl)
+unsigned Compiler::GetSsaNumForLocalVarDef(GenTree* lcl)
{
// Address-taken variables don't have SSA numbers.
if (fgExcludeFromSsa(lcl->AsLclVarCommon()->gtLclNum))
@@ -5139,7 +5139,7 @@ inline static bool StructHasCustomLayout(DWORD attribs)
* to catch extra references
*/
-inline void DEBUG_DESTROY_NODE(GenTreePtr tree)
+inline void DEBUG_DESTROY_NODE(GenTree* tree)
{
#ifdef DEBUG
// printf("DEBUG_DESTROY_NODE for [0x%08x]\n", tree);
diff --git a/src/jit/copyprop.cpp b/src/jit/copyprop.cpp
index 12206671f1..c1689f2567 100644
--- a/src/jit/copyprop.cpp
+++ b/src/jit/copyprop.cpp
@@ -34,9 +34,9 @@ inline static T* allocate_any(jitstd::allocator<void>& alloc, size_t count = 1)
*/
void Compiler::optBlockCopyPropPopStacks(BasicBlock* block, LclNumToGenTreePtrStack* curSsaName)
{
- for (GenTreePtr stmt = block->bbTreeList; stmt; stmt = stmt->gtNext)
+ for (GenTree* stmt = block->bbTreeList; stmt; stmt = stmt->gtNext)
{
- for (GenTreePtr tree = stmt->gtStmt.gtStmtList; tree; tree = tree->gtNext)
+ for (GenTree* tree = stmt->gtStmt.gtStmtList; tree; tree = tree->gtNext)
{
if (!tree->IsLocal())
{
@@ -121,7 +121,7 @@ int Compiler::optCopyProp_LclVarScore(LclVarDsc* lclVarDsc, LclVarDsc* copyVarDs
// tree - The tree to perform copy propagation on
// curSsaName - The map from lclNum to its recently live definitions as a stack
-void Compiler::optCopyProp(BasicBlock* block, GenTreePtr stmt, GenTreePtr tree, LclNumToGenTreePtrStack* curSsaName)
+void Compiler::optCopyProp(BasicBlock* block, GenTree* stmt, GenTree* tree, LclNumToGenTreePtrStack* curSsaName)
{
// TODO-Review: EH successor/predecessor iteration seems broken.
if (block->bbCatchTyp == BBCT_FINALLY || block->bbCatchTyp == BBCT_FAULT)
@@ -158,7 +158,7 @@ void Compiler::optCopyProp(BasicBlock* block, GenTreePtr stmt, GenTreePtr tree,
{
unsigned newLclNum = iter.Get();
- GenTreePtr op = iter.GetValue()->Index(0);
+ GenTree* op = iter.GetValue()->Index(0);
// Nothing to do if same.
if (lclNum == newLclNum)
@@ -280,7 +280,7 @@ void Compiler::optCopyProp(BasicBlock* block, GenTreePtr stmt, GenTreePtr tree,
*
* Helper to check if tree is a local that participates in SSA numbering.
*/
-bool Compiler::optIsSsaLocal(GenTreePtr tree)
+bool Compiler::optIsSsaLocal(GenTree* tree)
{
return tree->IsLocal() && !fgExcludeFromSsa(tree->AsLclVarCommon()->GetLclNum());
}
@@ -301,12 +301,12 @@ void Compiler::optBlockCopyProp(BasicBlock* block, LclNumToGenTreePtrStack* curS
// There are no definitions at the start of the block. So clear it.
compCurLifeTree = nullptr;
VarSetOps::Assign(this, compCurLife, block->bbLiveIn);
- for (GenTreePtr stmt = block->bbTreeList; stmt; stmt = stmt->gtNext)
+ for (GenTree* stmt = block->bbTreeList; stmt; stmt = stmt->gtNext)
{
VarSetOps::ClearD(this, optCopyPropKillSet);
// Walk the tree to find if any local variable can be replaced with current live definitions.
- for (GenTreePtr tree = stmt->gtStmt.gtStmtList; tree; tree = tree->gtNext)
+ for (GenTree* tree = stmt->gtStmt.gtStmtList; tree; tree = tree->gtNext)
{
compUpdateLife</*ForCodeGen*/ false>(tree);
@@ -333,7 +333,7 @@ void Compiler::optBlockCopyProp(BasicBlock* block, LclNumToGenTreePtrStack* curS
}
// This logic must be in sync with SSA renaming process.
- for (GenTreePtr tree = stmt->gtStmt.gtStmtList; tree; tree = tree->gtNext)
+ for (GenTree* tree = stmt->gtStmt.gtStmtList; tree; tree = tree->gtNext)
{
if (!optIsSsaLocal(tree))
{
diff --git a/src/jit/decomposelongs.cpp b/src/jit/decomposelongs.cpp
index 7caae4743c..5c8a7c9735 100644
--- a/src/jit/decomposelongs.cpp
+++ b/src/jit/decomposelongs.cpp
@@ -877,12 +877,12 @@ GenTree* DecomposeLongs::DecomposeInd(LIR::Use& use)
indLow->gtType = TYP_INT;
// Create tree of ind(addr+4)
- GenTreePtr addrBase = indLow->gtGetOp1();
- GenTreePtr addrBaseHigh = new (m_compiler, GT_LCL_VAR)
+ GenTree* addrBase = indLow->gtGetOp1();
+ GenTree* addrBaseHigh = new (m_compiler, GT_LCL_VAR)
GenTreeLclVar(GT_LCL_VAR, addrBase->TypeGet(), addrBase->AsLclVarCommon()->GetLclNum(), BAD_IL_OFFSET);
- GenTreePtr addrHigh =
+ GenTree* addrHigh =
new (m_compiler, GT_LEA) GenTreeAddrMode(TYP_REF, addrBaseHigh, nullptr, 0, genTypeSize(TYP_INT));
- GenTreePtr indHigh = new (m_compiler, GT_IND) GenTreeIndir(GT_IND, TYP_INT, addrHigh, nullptr);
+ GenTree* indHigh = new (m_compiler, GT_IND) GenTreeIndir(GT_IND, TYP_INT, addrHigh, nullptr);
indHigh->gtFlags |= (indLow->gtFlags & (GTF_GLOB_REF | GTF_EXCEPT | GTF_IND_FLAGS));
m_compiler->lvaIncRefCnts(addrBaseHigh);
diff --git a/src/jit/earlyprop.cpp b/src/jit/earlyprop.cpp
index 8b3c654401..b0db714a09 100644
--- a/src/jit/earlyprop.cpp
+++ b/src/jit/earlyprop.cpp
@@ -41,7 +41,7 @@ bool Compiler::optDoEarlyPropForBlock(BasicBlock* block)
// Return Value:
// Return true if the tree is a method table reference.
-bool Compiler::gtIsVtableRef(GenTreePtr tree)
+bool Compiler::gtIsVtableRef(GenTree* tree)
{
if (tree->OperGet() == GT_IND)
{
@@ -68,7 +68,7 @@ bool Compiler::gtIsVtableRef(GenTreePtr tree)
// Return Value:
// Return the array length node.
-GenTreePtr Compiler::getArrayLengthFromAllocation(GenTreePtr tree)
+GenTree* Compiler::getArrayLengthFromAllocation(GenTree* tree)
{
assert(tree != nullptr);
@@ -103,7 +103,7 @@ GenTreePtr Compiler::getArrayLengthFromAllocation(GenTreePtr tree)
// Return Value:
// Return the object type handle node.
-GenTreePtr Compiler::getObjectHandleNodeFromAllocation(GenTreePtr tree)
+GenTree* Compiler::getObjectHandleNodeFromAllocation(GenTree* tree)
{
assert(tree != nullptr);
@@ -192,9 +192,9 @@ void Compiler::optEarlyProp()
// Walk the stmt tree in linear order to rewrite any array length reference with a
// constant array length.
bool isRewritten = false;
- for (GenTreePtr tree = stmt->gtStmt.gtStmtList; tree != nullptr; tree = tree->gtNext)
+ for (GenTree* tree = stmt->gtStmt.gtStmtList; tree != nullptr; tree = tree->gtNext)
{
- GenTreePtr rewrittenTree = optEarlyPropRewriteTree(tree);
+ GenTree* rewrittenTree = optEarlyPropRewriteTree(tree);
if (rewrittenTree != nullptr)
{
gtUpdateSideEffects(stmt, rewrittenTree);
@@ -233,9 +233,9 @@ void Compiler::optEarlyProp()
// Return a new tree if the original tree was successfully rewritten.
// The containing tree links are updated.
//
-GenTreePtr Compiler::optEarlyPropRewriteTree(GenTreePtr tree)
+GenTree* Compiler::optEarlyPropRewriteTree(GenTree* tree)
{
- GenTreePtr objectRefPtr = nullptr;
+ GenTree* objectRefPtr = nullptr;
optPropKind propKind = optPropKind::OPK_INVALID;
if (tree->OperGet() == GT_ARR_LENGTH)
@@ -279,9 +279,9 @@ GenTreePtr Compiler::optEarlyPropRewriteTree(GenTreePtr tree)
return nullptr;
}
- unsigned lclNum = objectRefPtr->AsLclVarCommon()->GetLclNum();
- unsigned ssaNum = objectRefPtr->AsLclVarCommon()->GetSsaNum();
- GenTreePtr actualVal = optPropGetValue(lclNum, ssaNum, propKind);
+ unsigned lclNum = objectRefPtr->AsLclVarCommon()->GetLclNum();
+ unsigned ssaNum = objectRefPtr->AsLclVarCommon()->GetSsaNum();
+ GenTree* actualVal = optPropGetValue(lclNum, ssaNum, propKind);
if (actualVal != nullptr)
{
@@ -341,7 +341,7 @@ GenTreePtr Compiler::optEarlyPropRewriteTree(GenTreePtr tree)
}
#endif
- GenTreePtr actualValClone = gtCloneExpr(actualVal);
+ GenTree* actualValClone = gtCloneExpr(actualVal);
if (actualValClone->gtType != tree->gtType)
{
@@ -391,7 +391,7 @@ GenTreePtr Compiler::optEarlyPropRewriteTree(GenTreePtr tree)
// Return Value:
// Return the corresponding value based on valueKind.
-GenTreePtr Compiler::optPropGetValue(unsigned lclNum, unsigned ssaNum, optPropKind valueKind)
+GenTree* Compiler::optPropGetValue(unsigned lclNum, unsigned ssaNum, optPropKind valueKind)
{
return optPropGetValueRec(lclNum, ssaNum, valueKind, 0);
}
@@ -409,15 +409,15 @@ GenTreePtr Compiler::optPropGetValue(unsigned lclNum, unsigned ssaNum, optPropKi
// Return Value:
// Return the corresponding value based on valueKind.
-GenTreePtr Compiler::optPropGetValueRec(unsigned lclNum, unsigned ssaNum, optPropKind valueKind, int walkDepth)
+GenTree* Compiler::optPropGetValueRec(unsigned lclNum, unsigned ssaNum, optPropKind valueKind, int walkDepth)
{
if (ssaNum == SsaConfig::RESERVED_SSA_NUM)
{
return nullptr;
}
- SSAName ssaName(lclNum, ssaNum);
- GenTreePtr value = nullptr;
+ SSAName ssaName(lclNum, ssaNum);
+ GenTree* value = nullptr;
// Bound the recursion with a hard limit.
if (walkDepth > optEarlyPropRecurBound)
@@ -426,7 +426,7 @@ GenTreePtr Compiler::optPropGetValueRec(unsigned lclNum, unsigned ssaNum, optPro
}
// Track along the use-def chain to get the array length
- GenTreePtr treelhs = lvaTable[lclNum].GetPerSsaData(ssaNum)->m_defLoc.m_tree;
+ GenTree* treelhs = lvaTable[lclNum].GetPerSsaData(ssaNum)->m_defLoc.m_tree;
if (treelhs == nullptr)
{
@@ -436,13 +436,13 @@ GenTreePtr Compiler::optPropGetValueRec(unsigned lclNum, unsigned ssaNum, optPro
}
else
{
- GenTreePtr* lhsPtr;
- GenTreePtr treeDefParent = treelhs->gtGetParent(&lhsPtr);
+ GenTree** lhsPtr;
+ GenTree* treeDefParent = treelhs->gtGetParent(&lhsPtr);
if (treeDefParent->OperGet() == GT_ASG)
{
assert(treelhs == treeDefParent->gtGetOp1());
- GenTreePtr treeRhs = treeDefParent->gtGetOp2();
+ GenTree* treeRhs = treeDefParent->gtGetOp2();
if (treeRhs->OperIsScalarLocal() && !fgExcludeFromSsa(treeRhs->AsLclVarCommon()->GetLclNum()))
{
@@ -492,7 +492,7 @@ GenTreePtr Compiler::optPropGetValueRec(unsigned lclNum, unsigned ssaNum, optPro
// tree - The input GT_INDIR tree.
//
-void Compiler::optFoldNullCheck(GenTreePtr tree)
+void Compiler::optFoldNullCheck(GenTree* tree)
{
//
// Check for a pattern like this:
@@ -555,17 +555,17 @@ void Compiler::optFoldNullCheck(GenTreePtr tree)
if (compCurBB == defBlock)
{
- GenTreePtr defTree = defLoc.m_tree;
- GenTreePtr defParent = defTree->gtGetParent(nullptr);
+ GenTree* defTree = defLoc.m_tree;
+ GenTree* defParent = defTree->gtGetParent(nullptr);
if ((defParent->OperGet() == GT_ASG) && (defParent->gtNext == nullptr))
{
- GenTreePtr defRHS = defParent->gtGetOp2();
+ GenTree* defRHS = defParent->gtGetOp2();
if (defRHS->OperGet() == GT_COMMA)
{
if (defRHS->gtGetOp1()->OperGet() == GT_NULLCHECK)
{
- GenTreePtr nullCheckTree = defRHS->gtGetOp1();
+ GenTree* nullCheckTree = defRHS->gtGetOp1();
if (nullCheckTree->gtGetOp1()->OperGet() == GT_LCL_VAR)
{
// We found a candidate for 'y' in the picture
@@ -573,18 +573,18 @@ void Compiler::optFoldNullCheck(GenTreePtr tree)
if (defRHS->gtGetOp2()->OperGet() == GT_ADD)
{
- GenTreePtr additionNode = defRHS->gtGetOp2();
+ GenTree* additionNode = defRHS->gtGetOp2();
if ((additionNode->gtGetOp1()->OperGet() == GT_LCL_VAR) &&
(additionNode->gtGetOp1()->gtLclVarCommon.gtLclNum == nullCheckLclNum))
{
- GenTreePtr offset = additionNode->gtGetOp2();
+ GenTree* offset = additionNode->gtGetOp2();
if (offset->IsCnsIntOrI())
{
if (!fgIsBigOffset(offset->gtIntConCommon.IconValue()))
{
// Walk from the use to the def in reverse execution order to see
// if any nodes have unsafe side effects.
- GenTreePtr currentTree = lclVarNode->gtPrev;
+ GenTree* currentTree = lclVarNode->gtPrev;
bool isInsideTry = compCurBB->hasTryIndex();
bool canRemoveNullCheck = true;
const unsigned maxNodesWalked = 25;
@@ -609,8 +609,8 @@ void Compiler::optFoldNullCheck(GenTreePtr tree)
// Then walk the statement list in reverse execution order
// until we get to the statement containing the null check.
// We only need to check the side effects at the root of each statement.
- GenTreePtr curStmt = compCurStmt->gtPrev;
- currentTree = curStmt->gtStmt.gtStmtExpr;
+ GenTree* curStmt = compCurStmt->gtPrev;
+ currentTree = curStmt->gtStmt.gtStmtExpr;
while (canRemoveNullCheck && (currentTree != defParent))
{
if ((nodesWalked++ > maxNodesWalked) ||
@@ -668,7 +668,7 @@ void Compiler::optFoldNullCheck(GenTreePtr tree)
// True if GT_NULLCHECK can be folded into a node that is after tree is execution order,
// false otherwise.
-bool Compiler::optCanMoveNullCheckPastTree(GenTreePtr tree, bool isInsideTry)
+bool Compiler::optCanMoveNullCheckPastTree(GenTree* tree, bool isInsideTry)
{
bool result = true;
if (isInsideTry)
diff --git a/src/jit/ee_il_dll.cpp b/src/jit/ee_il_dll.cpp
index e87c46c89a..81ed6cfb5d 100644
--- a/src/jit/ee_il_dll.cpp
+++ b/src/jit/ee_il_dll.cpp
@@ -504,7 +504,7 @@ unsigned Compiler::eeGetArgSize(CORINFO_ARG_LIST_HANDLE list, CORINFO_SIG_INFO*
/*****************************************************************************/
-GenTreePtr Compiler::eeGetPInvokeCookie(CORINFO_SIG_INFO* szMetaSig)
+GenTree* Compiler::eeGetPInvokeCookie(CORINFO_SIG_INFO* szMetaSig)
{
void *cookie, *pCookie;
cookie = info.compCompHnd->GetCookieForPInvokeCalliSig(szMetaSig, &pCookie);
diff --git a/src/jit/emitxarch.cpp b/src/jit/emitxarch.cpp
index c42f9262dc..ca29f080a3 100644
--- a/src/jit/emitxarch.cpp
+++ b/src/jit/emitxarch.cpp
@@ -3359,8 +3359,8 @@ regNumber emitter::emitInsBinary(instruction ins, emitAttr attr, GenTree* dst, G
//
void emitter::emitInsRMW(instruction ins, emitAttr attr, GenTreeStoreInd* storeInd, GenTree* src)
{
- GenTreePtr addr = storeInd->Addr();
- addr = addr->gtSkipReloadOrCopy();
+ GenTree* addr = storeInd->Addr();
+ addr = addr->gtSkipReloadOrCopy();
assert(addr->OperGet() == GT_LCL_VAR || addr->OperGet() == GT_LCL_VAR_ADDR || addr->OperGet() == GT_LEA ||
addr->OperGet() == GT_CLS_VAR_ADDR || addr->OperGet() == GT_CNS_INT);
@@ -3423,8 +3423,8 @@ void emitter::emitInsRMW(instruction ins, emitAttr attr, GenTreeStoreInd* storeI
//
void emitter::emitInsRMW(instruction ins, emitAttr attr, GenTreeStoreInd* storeInd)
{
- GenTreePtr addr = storeInd->Addr();
- addr = addr->gtSkipReloadOrCopy();
+ GenTree* addr = storeInd->Addr();
+ addr = addr->gtSkipReloadOrCopy();
assert(addr->OperGet() == GT_LCL_VAR || addr->OperGet() == GT_LCL_VAR_ADDR || addr->OperGet() == GT_CLS_VAR_ADDR ||
addr->OperGet() == GT_LEA || addr->OperGet() == GT_CNS_INT);
diff --git a/src/jit/emitxarch.h b/src/jit/emitxarch.h
index 74fdc84a0c..cca099cc72 100644
--- a/src/jit/emitxarch.h
+++ b/src/jit/emitxarch.h
@@ -363,7 +363,7 @@ void emitIns(instruction ins);
void emitIns(instruction ins, emitAttr attr);
-void emitInsRMW(instruction inst, emitAttr attr, GenTreeStoreInd* storeInd, GenTreePtr src);
+void emitInsRMW(instruction inst, emitAttr attr, GenTreeStoreInd* storeInd, GenTree* src);
void emitInsRMW(instruction inst, emitAttr attr, GenTreeStoreInd* storeInd);
diff --git a/src/jit/flowgraph.cpp b/src/jit/flowgraph.cpp
index a23d9c6a9d..1b588e6821 100644
--- a/src/jit/flowgraph.cpp
+++ b/src/jit/flowgraph.cpp
@@ -278,7 +278,7 @@ void Compiler::fgInstrumentMethod()
ICorJitInfo::ProfileBuffer* bbProfileBufferStart = bbProfileBuffer;
- GenTreePtr stmt;
+ GenTree* stmt;
if (!SUCCEEDED(res))
{
@@ -288,7 +288,7 @@ void Compiler::fgInstrumentMethod()
// In such cases we still want to add the method entry callback node
GenTreeArgList* args = gtNewArgList(gtNewIconEmbMethHndNode(info.compMethodHnd));
- GenTreePtr call = gtNewHelperCallNode(CORINFO_HELP_BBT_FCN_ENTER, TYP_VOID, args);
+ GenTree* call = gtNewHelperCallNode(CORINFO_HELP_BBT_FCN_ENTER, TYP_VOID, args);
stmt = gtNewStmt(call);
}
@@ -333,7 +333,7 @@ void Compiler::fgInstrumentMethod()
// Add the method entry callback node
- GenTreePtr arg;
+ GenTree* arg;
#ifdef FEATURE_READYTORUN_COMPILER
if (opts.IsReadyToRun())
@@ -357,19 +357,19 @@ void Compiler::fgInstrumentMethod()
}
GenTreeArgList* args = gtNewArgList(arg);
- GenTreePtr call = gtNewHelperCallNode(CORINFO_HELP_BBT_FCN_ENTER, TYP_VOID, args);
+ GenTree* call = gtNewHelperCallNode(CORINFO_HELP_BBT_FCN_ENTER, TYP_VOID, args);
size_t addrOfBlockCount = (size_t)&bbProfileBuffer->ExecutionCount;
// Read Basic-Block count value
- GenTreePtr valueNode = gtNewIndOfIconHandleNode(TYP_INT, addrOfBlockCount, GTF_ICON_BBC_PTR, false);
+ GenTree* valueNode = gtNewIndOfIconHandleNode(TYP_INT, addrOfBlockCount, GTF_ICON_BBC_PTR, false);
// Compare Basic-Block count value against zero
- GenTreePtr relop = gtNewOperNode(GT_NE, TYP_INT, valueNode, gtNewIconNode(0, TYP_INT));
+ GenTree* relop = gtNewOperNode(GT_NE, TYP_INT, valueNode, gtNewIconNode(0, TYP_INT));
relop->gtFlags |= GTF_RELOP_QMARK; // TODO-Cleanup: [Simple] Move this to gtNewQmarkNode
- GenTreePtr colon = new (this, GT_COLON) GenTreeColon(TYP_VOID, gtNewNothingNode(), call);
- GenTreePtr cond = gtNewQmarkNode(TYP_VOID, relop, colon);
- stmt = gtNewStmt(cond);
+ GenTree* colon = new (this, GT_COLON) GenTreeColon(TYP_VOID, gtNewNothingNode(), call);
+ GenTree* cond = gtNewQmarkNode(TYP_VOID, relop, colon);
+ stmt = gtNewStmt(cond);
}
fgEnsureFirstBBisScratch();
@@ -545,18 +545,18 @@ bool Compiler::fgBlockContainsStatementBounded(BasicBlock* block, GenTree* stmt,
// In other cases, if there are any phi assignments and/or an assignment of
// the GT_CATCH_ARG, we insert after those.
-GenTreePtr Compiler::fgInsertStmtAtBeg(BasicBlock* block, GenTreePtr stmt)
+GenTree* Compiler::fgInsertStmtAtBeg(BasicBlock* block, GenTree* stmt)
{
if (stmt->gtOper != GT_STMT)
{
stmt = gtNewStmt(stmt);
}
- GenTreePtr list = block->firstStmt();
+ GenTree* list = block->firstStmt();
if (!stmt->IsPhiDefnStmt())
{
- GenTreePtr insertBeforeStmt = block->FirstNonPhiDefOrCatchArgAsg();
+ GenTree* insertBeforeStmt = block->FirstNonPhiDefOrCatchArgAsg();
if (insertBeforeStmt != nullptr)
{
return fgInsertStmtBefore(block, insertBeforeStmt, stmt);
@@ -577,7 +577,7 @@ GenTreePtr Compiler::fgInsertStmtAtBeg(BasicBlock* block, GenTreePtr stmt)
if (list)
{
- GenTreePtr last;
+ GenTree* last;
/* There is at least one statement already */
@@ -606,9 +606,9 @@ GenTreePtr Compiler::fgInsertStmtAtBeg(BasicBlock* block, GenTreePtr stmt)
* If the block can be a conditional block, use fgInsertStmtNearEnd.
*/
-GenTreeStmt* Compiler::fgInsertStmtAtEnd(BasicBlock* block, GenTreePtr node)
+GenTreeStmt* Compiler::fgInsertStmtAtEnd(BasicBlock* block, GenTree* node)
{
- GenTreePtr list = block->firstStmt();
+ GenTree* list = block->firstStmt();
GenTreeStmt* stmt;
if (node->gtOper != GT_STMT)
@@ -624,7 +624,7 @@ GenTreeStmt* Compiler::fgInsertStmtAtEnd(BasicBlock* block, GenTreePtr node)
if (list)
{
- GenTreePtr last;
+ GenTree* last;
/* There is at least one statement already */
@@ -655,7 +655,7 @@ GenTreeStmt* Compiler::fgInsertStmtAtEnd(BasicBlock* block, GenTreePtr node)
* Returns the (potentially) new GT_STMT node.
*/
-GenTreeStmt* Compiler::fgInsertStmtNearEnd(BasicBlock* block, GenTreePtr node)
+GenTreeStmt* Compiler::fgInsertStmtNearEnd(BasicBlock* block, GenTree* node)
{
GenTreeStmt* stmt;
@@ -677,7 +677,7 @@ GenTreeStmt* Compiler::fgInsertStmtNearEnd(BasicBlock* block, GenTreePtr node)
noway_assert(first);
GenTreeStmt* last = block->lastStmt();
noway_assert(last && last->gtNext == nullptr);
- GenTreePtr after = last->gtPrev;
+ GenTree* after = last->gtPrev;
#if DEBUG
if (block->bbJumpKind == BBJ_COND)
@@ -738,7 +738,7 @@ GenTreeStmt* Compiler::fgInsertStmtNearEnd(BasicBlock* block, GenTreePtr node)
* Note that the gtPrev list of statement nodes is circular, but the gtNext list is not.
*/
-GenTreePtr Compiler::fgInsertStmtAfter(BasicBlock* block, GenTreePtr insertionPoint, GenTreePtr stmt)
+GenTree* Compiler::fgInsertStmtAfter(BasicBlock* block, GenTree* insertionPoint, GenTree* stmt)
{
assert(block->bbTreeList != nullptr);
noway_assert(insertionPoint->gtOper == GT_STMT);
@@ -774,7 +774,7 @@ GenTreePtr Compiler::fgInsertStmtAfter(BasicBlock* block, GenTreePtr insertionPo
// Insert the given tree or statement before GT_STMT node "insertionPoint".
// Returns the newly inserted GT_STMT node.
-GenTreePtr Compiler::fgInsertStmtBefore(BasicBlock* block, GenTreePtr insertionPoint, GenTreePtr stmt)
+GenTree* Compiler::fgInsertStmtBefore(BasicBlock* block, GenTree* insertionPoint, GenTree* stmt)
{
assert(block->bbTreeList != nullptr);
noway_assert(insertionPoint->gtOper == GT_STMT);
@@ -785,8 +785,8 @@ GenTreePtr Compiler::fgInsertStmtBefore(BasicBlock* block, GenTreePtr insertionP
if (insertionPoint == block->bbTreeList)
{
// We're inserting before the first statement in the block.
- GenTreePtr list = block->bbTreeList;
- GenTreePtr last = list->gtPrev;
+ GenTree* list = block->bbTreeList;
+ GenTree* last = list->gtPrev;
stmt->gtNext = list;
stmt->gtPrev = last;
@@ -812,20 +812,20 @@ GenTreePtr Compiler::fgInsertStmtBefore(BasicBlock* block, GenTreePtr insertionP
* Return the last statement stmtList.
*/
-GenTreePtr Compiler::fgInsertStmtListAfter(BasicBlock* block, // the block where stmtAfter is in.
- GenTreePtr stmtAfter, // the statement where stmtList should be inserted
- // after.
- GenTreePtr stmtList)
+GenTree* Compiler::fgInsertStmtListAfter(BasicBlock* block, // the block where stmtAfter is in.
+ GenTree* stmtAfter, // the statement where stmtList should be inserted
+ // after.
+ GenTree* stmtList)
{
// Currently we can handle when stmtAfter and stmtList are non-NULL. This makes everything easy.
noway_assert(stmtAfter && stmtAfter->gtOper == GT_STMT);
noway_assert(stmtList && stmtList->gtOper == GT_STMT);
- GenTreePtr stmtLast = stmtList->gtPrev; // Last statement in a non-empty list, circular in the gtPrev list.
+ GenTree* stmtLast = stmtList->gtPrev; // Last statement in a non-empty list, circular in the gtPrev list.
noway_assert(stmtLast);
noway_assert(stmtLast->gtNext == nullptr);
- GenTreePtr stmtNext = stmtAfter->gtNext;
+ GenTree* stmtNext = stmtAfter->gtNext;
if (!stmtNext)
{
@@ -3549,7 +3549,7 @@ void Compiler::fgInitBlockVarSets()
assert(!compIsForInlining());
for (unsigned i = 0; i < compQMarks->Size(); i++)
{
- GenTreePtr qmark = compQMarks->Get(i);
+ GenTree* qmark = compQMarks->Get(i);
// Perhaps the gtOper of a QMark node was changed to something else since it was created and put on this list.
// So can't hurt to check.
if (qmark->OperGet() == GT_QMARK)
@@ -3986,11 +3986,11 @@ bool Compiler::fgCreateGCPoll(GCPollType pollType, BasicBlock* block)
noway_assert(pAddrOfCaptureThreadGlobal == nullptr);
#endif
- GenTreePtr value; // The value of g_TrapReturningThreads
+ GenTree* value; // The value of g_TrapReturningThreads
if (pAddrOfCaptureThreadGlobal != nullptr)
{
// Use a double indirection
- GenTreePtr addr =
+ GenTree* addr =
gtNewIndOfIconHandleNode(TYP_I_IMPL, (size_t)pAddrOfCaptureThreadGlobal, GTF_ICON_PTR_HDL, true);
value = gtNewOperNode(GT_IND, TYP_INT, addr);
@@ -4007,10 +4007,10 @@ bool Compiler::fgCreateGCPoll(GCPollType pollType, BasicBlock* block)
value->gtFlags |= GTF_IND_VOLATILE;
// Compare for equal to zero
- GenTreePtr trapRelop = gtNewOperNode(GT_EQ, TYP_INT, value, gtNewIconNode(0, TYP_INT));
+ GenTree* trapRelop = gtNewOperNode(GT_EQ, TYP_INT, value, gtNewIconNode(0, TYP_INT));
trapRelop->gtFlags |= GTF_RELOP_JMP_USED | GTF_DONT_CSE;
- GenTreePtr trapCheck = gtNewOperNode(GT_JTRUE, TYP_VOID, trapRelop);
+ GenTree* trapCheck = gtNewOperNode(GT_JTRUE, TYP_VOID, trapRelop);
fgInsertStmtAtEnd(top, trapCheck);
top->bbJumpDest = bottom;
top->bbJumpKind = BBJ_COND;
@@ -6837,7 +6837,7 @@ void Compiler::fgImport()
* that unconditionally throws an exception
*/
-bool Compiler::fgIsThrow(GenTreePtr tree)
+bool Compiler::fgIsThrow(GenTree* tree)
{
if ((tree->gtOper != GT_CALL) || (tree->gtCall.gtCallType != CT_HELPER))
{
@@ -6903,7 +6903,7 @@ bool Compiler::fgIsBlockCold(BasicBlock* blk)
* that unconditionally throws an exception
*/
-bool Compiler::fgIsCommaThrow(GenTreePtr tree, bool forFolding /* = false */)
+bool Compiler::fgIsCommaThrow(GenTree* tree, bool forFolding /* = false */)
{
// Instead of always folding comma throws,
// with stress enabled we only fold half the time
@@ -6932,12 +6932,12 @@ bool Compiler::fgIsCommaThrow(GenTreePtr tree, bool forFolding /* = false */)
// whose arg in turn is a LCL_VAR, return that LCL_VAR node, else nullptr.
//
// static
-GenTreePtr Compiler::fgIsIndirOfAddrOfLocal(GenTreePtr tree)
+GenTree* Compiler::fgIsIndirOfAddrOfLocal(GenTree* tree)
{
- GenTreePtr res = nullptr;
+ GenTree* res = nullptr;
if (tree->OperIsIndir())
{
- GenTreePtr addr = tree->AsIndir()->Addr();
+ GenTree* addr = tree->AsIndir()->Addr();
// Post rationalization, we can have Indir(Lea(..) trees. Therefore to recognize
// Indir of addr of a local, skip over Lea in Indir(Lea(base, index, scale, offset))
@@ -6949,7 +6949,7 @@ GenTreePtr Compiler::fgIsIndirOfAddrOfLocal(GenTreePtr tree)
// if the index part of the LEA has indir( someAddrOperator ( lclVar ) ) to search for a use but it's
// covered by the fact we're traversing the expression in execution order and we also visit the index.
GenTreeAddrMode* lea = addr->AsAddrMode();
- GenTreePtr base = lea->Base();
+ GenTree* base = lea->Base();
if (base != nullptr)
{
@@ -6964,7 +6964,7 @@ GenTreePtr Compiler::fgIsIndirOfAddrOfLocal(GenTreePtr tree)
if (addr->OperGet() == GT_ADDR)
{
- GenTreePtr lclvar = addr->gtOp.gtOp1;
+ GenTree* lclvar = addr->gtOp.gtOp1;
if (lclvar->OperGet() == GT_LCL_VAR)
{
res = lclvar;
@@ -7027,8 +7027,8 @@ GenTreeCall* Compiler::fgGetStaticsCCtorHelper(CORINFO_CLASS_HANDLE cls, CorInfo
GenTreeArgList* argList = nullptr;
- GenTreePtr opModuleIDArg;
- GenTreePtr opClassIDArg;
+ GenTree* opModuleIDArg;
+ GenTree* opClassIDArg;
// Get the class ID
unsigned clsID;
@@ -7122,7 +7122,7 @@ GenTreeCall* Compiler::fgGetSharedCCtor(CORINFO_CLASS_HANDLE cls)
// Return Value:
// True if address could be null; false otherwise
-bool Compiler::fgAddrCouldBeNull(GenTreePtr addr)
+bool Compiler::fgAddrCouldBeNull(GenTree* addr)
{
addr = addr->gtEffectiveVal();
if ((addr->gtOper == GT_CNS_INT) && addr->IsIconHandle())
@@ -7149,7 +7149,7 @@ bool Compiler::fgAddrCouldBeNull(GenTreePtr addr)
{
if (addr->gtOp.gtOp1->gtOper == GT_CNS_INT)
{
- GenTreePtr cns1Tree = addr->gtOp.gtOp1;
+ GenTree* cns1Tree = addr->gtOp.gtOp1;
if (!cns1Tree->IsIconHandle())
{
// Indirection of some random constant...
@@ -7164,7 +7164,7 @@ bool Compiler::fgAddrCouldBeNull(GenTreePtr addr)
{
if (addr->gtOp.gtOp1->gtOper == GT_CNS_INT)
{
- GenTreePtr cns1Tree = addr->gtOp.gtOp1;
+ GenTree* cns1Tree = addr->gtOp.gtOp1;
if (!cns1Tree->IsIconHandle())
{
if (!fgIsBigOffset(cns1Tree->gtIntCon.gtIconVal))
@@ -7178,7 +7178,7 @@ bool Compiler::fgAddrCouldBeNull(GenTreePtr addr)
// Is Op2 also a constant?
if (addr->gtOp.gtOp2->gtOper == GT_CNS_INT)
{
- GenTreePtr cns2Tree = addr->gtOp.gtOp2;
+ GenTree* cns2Tree = addr->gtOp.gtOp2;
// Is this an addition of a handle and constant
if (!cns2Tree->IsIconHandle())
{
@@ -7197,7 +7197,7 @@ bool Compiler::fgAddrCouldBeNull(GenTreePtr addr)
// What about Op2?
if (addr->gtOp.gtOp2->gtOper == GT_CNS_INT)
{
- GenTreePtr cns2Tree = addr->gtOp.gtOp2;
+ GenTree* cns2Tree = addr->gtOp.gtOp2;
// Is this an addition of a small constant
if (!cns2Tree->IsIconHandle())
{
@@ -7217,26 +7217,26 @@ bool Compiler::fgAddrCouldBeNull(GenTreePtr addr)
* Optimize the call to the delegate constructor.
*/
-GenTreePtr Compiler::fgOptimizeDelegateConstructor(GenTreeCall* call,
- CORINFO_CONTEXT_HANDLE* ExactContextHnd,
- CORINFO_RESOLVED_TOKEN* ldftnToken)
+GenTree* Compiler::fgOptimizeDelegateConstructor(GenTreeCall* call,
+ CORINFO_CONTEXT_HANDLE* ExactContextHnd,
+ CORINFO_RESOLVED_TOKEN* ldftnToken)
{
noway_assert(call->gtCallType == CT_USER_FUNC);
CORINFO_METHOD_HANDLE methHnd = call->gtCallMethHnd;
CORINFO_CLASS_HANDLE clsHnd = info.compCompHnd->getMethodClass(methHnd);
- GenTreePtr targetMethod = call->gtCallArgs->Rest()->Current();
+ GenTree* targetMethod = call->gtCallArgs->Rest()->Current();
noway_assert(targetMethod->TypeGet() == TYP_I_IMPL);
genTreeOps oper = targetMethod->OperGet();
CORINFO_METHOD_HANDLE targetMethodHnd = nullptr;
- GenTreePtr qmarkNode = nullptr;
+ GenTree* qmarkNode = nullptr;
if (oper == GT_FTN_ADDR)
{
targetMethodHnd = targetMethod->gtFptrVal.gtFptrMethod;
}
else if (oper == GT_CALL && targetMethod->gtCall.gtCallMethHnd == eeFindHelper(CORINFO_HELP_VIRTUAL_FUNC_PTR))
{
- GenTreePtr handleNode = targetMethod->gtCall.gtCallArgs->Rest()->Rest()->Current();
+ GenTree* handleNode = targetMethod->gtCall.gtCallArgs->Rest()->Rest()->Current();
if (handleNode->OperGet() == GT_CNS_INT)
{
@@ -7275,7 +7275,7 @@ GenTreePtr Compiler::fgOptimizeDelegateConstructor(GenTreeCall* call,
GenTreeCall* runtimeLookupCall = qmarkNode->gtOp.gtOp2->gtOp.gtOp1->AsCall();
// This could be any of CORINFO_HELP_RUNTIMEHANDLE_(METHOD|CLASS)(_LOG?)
- GenTreePtr tokenNode = runtimeLookupCall->gtCallArgs->gtOp.gtOp2->gtOp.gtOp1;
+ GenTree* tokenNode = runtimeLookupCall->gtCallArgs->gtOp.gtOp2->gtOp.gtOp1;
noway_assert(tokenNode->OperGet() == GT_CNS_INT);
targetMethodHnd = CORINFO_METHOD_HANDLE(tokenNode->gtIntCon.gtCompileTimeHandle);
}
@@ -7287,8 +7287,8 @@ GenTreePtr Compiler::fgOptimizeDelegateConstructor(GenTreeCall* call,
{
if (ldftnToken != nullptr)
{
- GenTreePtr thisPointer = call->gtCallObjp;
- GenTreePtr targetObjPointers = call->gtCallArgs->Current();
+ GenTree* thisPointer = call->gtCallObjp;
+ GenTree* targetObjPointers = call->gtCallArgs->Current();
GenTreeArgList* helperArgs = nullptr;
CORINFO_LOOKUP pLookup;
CORINFO_CONST_LOOKUP entryPoint;
@@ -7304,9 +7304,9 @@ GenTreePtr Compiler::fgOptimizeDelegateConstructor(GenTreeCall* call,
CORINFO_CONST_LOOKUP genericLookup;
info.compCompHnd->getReadyToRunHelper(ldftnToken, &pLookup.lookupKind,
CORINFO_HELP_READYTORUN_GENERIC_HANDLE, &genericLookup);
- GenTreePtr ctxTree = getRuntimeContextTree(pLookup.lookupKind.runtimeLookupKind);
- helperArgs = gtNewArgList(thisPointer, targetObjPointers, ctxTree);
- entryPoint = genericLookup;
+ GenTree* ctxTree = getRuntimeContextTree(pLookup.lookupKind.runtimeLookupKind);
+ helperArgs = gtNewArgList(thisPointer, targetObjPointers, ctxTree);
+ entryPoint = genericLookup;
}
call = gtNewHelperCallNode(CORINFO_HELP_READYTORUN_DELEGATE_CTOR, TYP_VOID, helperArgs);
call->setEntryPoint(entryPoint);
@@ -7315,8 +7315,8 @@ GenTreePtr Compiler::fgOptimizeDelegateConstructor(GenTreeCall* call,
// ReadyToRun has this optimization for a non-virtual function pointers only for now.
else if (oper == GT_FTN_ADDR)
{
- GenTreePtr thisPointer = call->gtCallObjp;
- GenTreePtr targetObjPointers = call->gtCallArgs->Current();
+ GenTree* thisPointer = call->gtCallObjp;
+ GenTree* targetObjPointers = call->gtCallArgs->Current();
GenTreeArgList* helperArgs = gtNewArgList(thisPointer, targetObjPointers);
call = gtNewHelperCallNode(CORINFO_HELP_READYTORUN_DELEGATE_CTOR, TYP_VOID, helperArgs);
@@ -7351,18 +7351,18 @@ GenTreePtr Compiler::fgOptimizeDelegateConstructor(GenTreeCall* call,
GenTreeArgList* addArgs = nullptr;
if (ctorData.pArg5)
{
- GenTreePtr arg5 = gtNewIconHandleNode(size_t(ctorData.pArg5), GTF_ICON_FTN_ADDR);
- addArgs = gtNewListNode(arg5, addArgs);
+ GenTree* arg5 = gtNewIconHandleNode(size_t(ctorData.pArg5), GTF_ICON_FTN_ADDR);
+ addArgs = gtNewListNode(arg5, addArgs);
}
if (ctorData.pArg4)
{
- GenTreePtr arg4 = gtNewIconHandleNode(size_t(ctorData.pArg4), GTF_ICON_FTN_ADDR);
- addArgs = gtNewListNode(arg4, addArgs);
+ GenTree* arg4 = gtNewIconHandleNode(size_t(ctorData.pArg4), GTF_ICON_FTN_ADDR);
+ addArgs = gtNewListNode(arg4, addArgs);
}
if (ctorData.pArg3)
{
- GenTreePtr arg3 = gtNewIconHandleNode(size_t(ctorData.pArg3), GTF_ICON_FTN_ADDR);
- addArgs = gtNewListNode(arg3, addArgs);
+ GenTree* arg3 = gtNewIconHandleNode(size_t(ctorData.pArg3), GTF_ICON_FTN_ADDR);
+ addArgs = gtNewListNode(arg3, addArgs);
}
call->gtCallArgs->Rest()->Rest() = addArgs;
}
@@ -7370,7 +7370,7 @@ GenTreePtr Compiler::fgOptimizeDelegateConstructor(GenTreeCall* call,
return call;
}
-bool Compiler::fgCastNeeded(GenTreePtr tree, var_types toType)
+bool Compiler::fgCastNeeded(GenTree* tree, var_types toType)
{
//
// If tree is a relop and we need an 4-byte integer
@@ -7429,7 +7429,7 @@ bool Compiler::fgCastNeeded(GenTreePtr tree, var_types toType)
// If assigning to a local var, add a cast if the target is
// marked as NormalizedOnStore. Returns true if any change was made
-GenTreePtr Compiler::fgDoNormalizeOnStore(GenTreePtr tree)
+GenTree* Compiler::fgDoNormalizeOnStore(GenTree* tree)
{
//
// Only normalize the stores in the global morph phase
@@ -7438,8 +7438,8 @@ GenTreePtr Compiler::fgDoNormalizeOnStore(GenTreePtr tree)
{
noway_assert(tree->OperGet() == GT_ASG);
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtOp.gtOp2;
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtOp.gtOp2;
if (op1->gtOper == GT_LCL_VAR && genActualType(op1->TypeGet()) == TYP_INT)
{
@@ -7659,13 +7659,13 @@ inline void Compiler::fgMarkLoopHead(BasicBlock* block)
}
}
-GenTreePtr Compiler::fgGetCritSectOfStaticMethod()
+GenTree* Compiler::fgGetCritSectOfStaticMethod()
{
noway_assert(!compIsForInlining());
noway_assert(info.compIsStatic); // This method should only be called for static methods.
- GenTreePtr tree = nullptr;
+ GenTree* tree = nullptr;
CORINFO_LOOKUP_KIND kind = info.compCompHnd->getLocationOfThisType(info.compMethodHnd);
@@ -7934,9 +7934,9 @@ void Compiler::fgAddSyncMethodEnterExit()
// Initialize the 'acquired' boolean.
- GenTreePtr zero = gtNewZeroConNode(genActualType(typeMonAcquired));
- GenTreePtr varNode = gtNewLclvNode(lvaMonAcquired, typeMonAcquired);
- GenTreePtr initNode = gtNewAssignNode(varNode, zero);
+ GenTree* zero = gtNewZeroConNode(genActualType(typeMonAcquired));
+ GenTree* varNode = gtNewLclvNode(lvaMonAcquired, typeMonAcquired);
+ GenTree* initNode = gtNewAssignNode(varNode, zero);
fgInsertStmtAtEnd(fgFirstBB, initNode);
@@ -7959,9 +7959,9 @@ void Compiler::fgAddSyncMethodEnterExit()
lvaCopyThis = lvaGrabTemp(true DEBUGARG("Synchronized method monitor acquired boolean"));
lvaTable[lvaCopyThis].lvType = TYP_REF;
- GenTreePtr thisNode = gtNewLclvNode(info.compThisArg, TYP_REF);
- GenTreePtr copyNode = gtNewLclvNode(lvaCopyThis, TYP_REF);
- GenTreePtr initNode = gtNewAssignNode(copyNode, thisNode);
+ GenTree* thisNode = gtNewLclvNode(info.compThisArg, TYP_REF);
+ GenTree* copyNode = gtNewLclvNode(lvaCopyThis, TYP_REF);
+ GenTree* initNode = gtNewAssignNode(copyNode, thisNode);
fgInsertStmtAtEnd(tryBegBB, initNode);
}
@@ -7993,10 +7993,10 @@ GenTree* Compiler::fgCreateMonitorTree(unsigned lvaMonAcquired, unsigned lvaThis
{
// Insert the expression "enter/exitCrit(this, &acquired)" or "enter/exitCrit(handle, &acquired)"
- var_types typeMonAcquired = TYP_UBYTE;
- GenTreePtr varNode = gtNewLclvNode(lvaMonAcquired, typeMonAcquired);
- GenTreePtr varAddrNode = gtNewOperNode(GT_ADDR, TYP_BYREF, varNode);
- GenTreePtr tree;
+ var_types typeMonAcquired = TYP_UBYTE;
+ GenTree* varNode = gtNewLclvNode(lvaMonAcquired, typeMonAcquired);
+ GenTree* varAddrNode = gtNewOperNode(GT_ADDR, TYP_BYREF, varNode);
+ GenTree* tree;
if (info.compIsStatic)
{
@@ -8122,7 +8122,7 @@ void Compiler::fgAddReversePInvokeEnterExit()
varDsc->lvType = TYP_BLK;
varDsc->lvExactSize = eeGetEEInfo()->sizeOfReversePInvokeFrame;
- GenTreePtr tree;
+ GenTree* tree;
// Add enter pinvoke exit callout at the start of prolog
@@ -8393,7 +8393,7 @@ private:
newReturnBB->bbFlags |= (BBF_PROF_WEIGHT | BBF_RUN_RARELY);
newReturnBB->bbWeight = 0;
- GenTreePtr returnExpr;
+ GenTree* returnExpr;
if (returnConst != nullptr)
{
@@ -8449,7 +8449,7 @@ private:
returnLocalDsc.lvKeepType = 1;
#endif
- GenTreePtr retTemp = comp->gtNewLclvNode(returnLocalNum, returnLocalDsc.TypeGet());
+ GenTree* retTemp = comp->gtNewLclvNode(returnLocalNum, returnLocalDsc.TypeGet());
// make sure copy prop ignores this node (make sure it always does a reload from the temp).
retTemp->gtFlags |= GTF_DONT_CSE;
@@ -8637,13 +8637,13 @@ private:
return nullptr;
}
- GenTreePtr lastExpr = lastStmt->gtStmtExpr;
+ GenTree* lastExpr = lastStmt->gtStmtExpr;
if (!lastExpr->OperIs(GT_RETURN))
{
return nullptr;
}
- GenTreePtr retExpr = lastExpr->gtGetOp1();
+ GenTree* retExpr = lastExpr->gtGetOp1();
if ((retExpr == nullptr) || !retExpr->IsIntegralConst())
{
return nullptr;
@@ -8754,7 +8754,7 @@ void Compiler::fgAddInternal()
// Now assign the original input "this" to the temp
- GenTreePtr tree;
+ GenTree* tree;
tree = gtNewLclvNode(lvaArg0Var, thisType);
@@ -8920,8 +8920,8 @@ void Compiler::fgAddInternal()
// Create the callback which will yield the final answer
- GenTreePtr callback = gtNewHelperCallNode(CORINFO_HELP_DBG_IS_JUST_MY_CODE, TYP_VOID);
- callback = new (this, GT_COLON) GenTreeColon(TYP_VOID, gtNewNothingNode(), callback);
+ GenTree* callback = gtNewHelperCallNode(CORINFO_HELP_DBG_IS_JUST_MY_CODE, TYP_VOID);
+ callback = new (this, GT_COLON) GenTreeColon(TYP_VOID, gtNewNothingNode(), callback);
// Stick the conditional call at the start of the method
@@ -8937,7 +8937,7 @@ void Compiler::fgAddInternal()
noway_assert(opts.compNeedSecurityCheck);
noway_assert(lvaSecurityObject != BAD_VAR_NUM);
- GenTreePtr tree;
+ GenTree* tree;
/* Insert the expression "call JIT_Security_Prolog(MethodHnd, &SecurityObject)" */
@@ -8972,7 +8972,7 @@ void Compiler::fgAddInternal()
if (info.compFlags & CORINFO_FLG_SYNCH)
{
- GenTreePtr tree = NULL;
+ GenTree* tree = NULL;
/* Insert the expression "enterCrit(this)" or "enterCrit(handle)" */
@@ -9048,7 +9048,7 @@ void Compiler::fgAddInternal()
if (tiRuntimeCalloutNeeded)
{
- GenTreePtr tree;
+ GenTree* tree;
/* Insert the expression "call verificationRuntimeCheck(MethodHnd)" */
@@ -9092,7 +9092,7 @@ void Compiler::fgAddInternal()
*
* Create a new statement from tree and wire the links up.
*/
-GenTreeStmt* Compiler::fgNewStmtFromTree(GenTreePtr tree, BasicBlock* block, IL_OFFSETX offs)
+GenTreeStmt* Compiler::fgNewStmtFromTree(GenTree* tree, BasicBlock* block, IL_OFFSETX offs)
{
GenTreeStmt* stmt = gtNewStmt(tree, offs);
@@ -9112,17 +9112,17 @@ GenTreeStmt* Compiler::fgNewStmtFromTree(GenTreePtr tree, BasicBlock* block, IL_
return stmt;
}
-GenTreeStmt* Compiler::fgNewStmtFromTree(GenTreePtr tree)
+GenTreeStmt* Compiler::fgNewStmtFromTree(GenTree* tree)
{
return fgNewStmtFromTree(tree, nullptr, BAD_IL_OFFSET);
}
-GenTreeStmt* Compiler::fgNewStmtFromTree(GenTreePtr tree, BasicBlock* block)
+GenTreeStmt* Compiler::fgNewStmtFromTree(GenTree* tree, BasicBlock* block)
{
return fgNewStmtFromTree(tree, block, BAD_IL_OFFSET);
}
-GenTreeStmt* Compiler::fgNewStmtFromTree(GenTreePtr tree, IL_OFFSETX offs)
+GenTreeStmt* Compiler::fgNewStmtFromTree(GenTree* tree, IL_OFFSETX offs)
{
return fgNewStmtFromTree(tree, nullptr, offs);
}
@@ -9510,7 +9510,7 @@ void Compiler::fgSimpleLowering()
#ifdef LEGACY_BACKEND
for (GenTreeStmt* stmt = block->FirstNonPhiDef(); stmt; stmt = stmt->gtNextStmt)
{
- for (GenTreePtr tree = stmt->gtStmtList; tree; tree = tree->gtNext)
+ for (GenTree* tree = stmt->gtStmtList; tree; tree = tree->gtNext)
{
#else
@@ -9525,9 +9525,9 @@ void Compiler::fgSimpleLowering()
case GT_ARR_LENGTH:
{
GenTreeArrLen* arrLen = tree->AsArrLen();
- GenTreePtr arr = arrLen->gtArrLen.ArrRef();
- GenTreePtr add;
- GenTreePtr con;
+ GenTree* arr = arrLen->gtArrLen.ArrRef();
+ GenTree* add;
+ GenTree* con;
/* Create the expression "*(array_addr + ArrLenOffs)" */
@@ -9682,7 +9682,7 @@ void Compiler::fgSimpleLowering()
/*****************************************************************************
*/
-void Compiler::fgUpdateRefCntForClone(BasicBlock* addedToBlock, GenTreePtr clonedTree)
+void Compiler::fgUpdateRefCntForClone(BasicBlock* addedToBlock, GenTree* clonedTree)
{
assert(clonedTree->gtOper != GT_STMT);
@@ -9696,7 +9696,7 @@ void Compiler::fgUpdateRefCntForClone(BasicBlock* addedToBlock, GenTreePtr clone
/*****************************************************************************
*/
-void Compiler::fgUpdateRefCntForExtract(GenTreePtr wholeTree, GenTreePtr keptTree)
+void Compiler::fgUpdateRefCntForExtract(GenTree* wholeTree, GenTree* keptTree)
{
if (lvaLocalVarRefCounted)
{
@@ -9714,7 +9714,7 @@ void Compiler::fgUpdateRefCntForExtract(GenTreePtr wholeTree, GenTreePtr keptTre
}
}
-VARSET_VALRET_TP Compiler::fgGetVarBits(GenTreePtr tree)
+VARSET_VALRET_TP Compiler::fgGetVarBits(GenTree* tree)
{
VARSET_TP varBits(VarSetOps::MakeEmpty(this));
@@ -9891,7 +9891,7 @@ void Compiler::fgRemoveEmptyBlocks()
*/
void Compiler::fgRemoveStmt(BasicBlock* block,
- GenTreePtr node,
+ GenTree* node,
// whether to decrement ref counts for tracked vars in statement
bool updateRefCount)
{
@@ -10017,7 +10017,7 @@ inline bool OperIsControlFlow(genTreeOps oper)
* Returns true if it did remove the statement.
*/
-bool Compiler::fgCheckRemoveStmt(BasicBlock* block, GenTreePtr node)
+bool Compiler::fgCheckRemoveStmt(BasicBlock* block, GenTree* node)
{
if (opts.compDbgCode)
{
@@ -10026,7 +10026,7 @@ bool Compiler::fgCheckRemoveStmt(BasicBlock* block, GenTreePtr node)
GenTreeStmt* stmt = node->AsStmt();
- GenTreePtr tree = stmt->gtStmtExpr;
+ GenTree* tree = stmt->gtStmtExpr;
genTreeOps oper = tree->OperGet();
if (OperIsControlFlow(oper) || GenTree::OperIsHWIntrinsic(oper) || oper == GT_NO_OP)
@@ -10220,15 +10220,15 @@ void Compiler::fgCompactBlocks(BasicBlock* block, BasicBlock* bNext)
}
else
{
- GenTreePtr blkNonPhi1 = block->FirstNonPhiDef();
- GenTreePtr bNextNonPhi1 = bNext->FirstNonPhiDef();
- GenTreePtr blkFirst = block->firstStmt();
- GenTreePtr bNextFirst = bNext->firstStmt();
+ GenTree* blkNonPhi1 = block->FirstNonPhiDef();
+ GenTree* bNextNonPhi1 = bNext->FirstNonPhiDef();
+ GenTree* blkFirst = block->firstStmt();
+ GenTree* bNextFirst = bNext->firstStmt();
// Does the second have any phis?
if (bNextFirst != nullptr && bNextFirst != bNextNonPhi1)
{
- GenTreePtr bNextLast = bNextFirst->gtPrev;
+ GenTree* bNextLast = bNextFirst->gtPrev;
assert(bNextLast->gtNext == nullptr);
// Does "blk" have phis?
@@ -10237,7 +10237,7 @@ void Compiler::fgCompactBlocks(BasicBlock* block, BasicBlock* bNext)
// Yes, has phis.
// Insert after the last phi of "block."
// First, bNextPhis after last phi of block.
- GenTreePtr blkLastPhi;
+ GenTree* blkLastPhi;
if (blkNonPhi1 != nullptr)
{
blkLastPhi = blkNonPhi1->gtPrev;
@@ -10251,7 +10251,7 @@ void Compiler::fgCompactBlocks(BasicBlock* block, BasicBlock* bNext)
bNextFirst->gtPrev = blkLastPhi;
// Now, rest of "block" after last phi of "bNext".
- GenTreePtr bNextLastPhi = nullptr;
+ GenTree* bNextLastPhi = nullptr;
if (bNextNonPhi1 != nullptr)
{
bNextLastPhi = bNextNonPhi1->gtPrev;
@@ -10284,10 +10284,10 @@ void Compiler::fgCompactBlocks(BasicBlock* block, BasicBlock* bNext)
if (blkFirst != nullptr) // If "block" has no statements, fusion will work fine...
{
// First, bNextPhis at start of block.
- GenTreePtr blkLast = blkFirst->gtPrev;
- block->bbTreeList = bNextFirst;
+ GenTree* blkLast = blkFirst->gtPrev;
+ block->bbTreeList = bNextFirst;
// Now, rest of "block" (if it exists) after last phi of "bNext".
- GenTreePtr bNextLastPhi = nullptr;
+ GenTree* bNextLastPhi = nullptr;
if (bNextNonPhi1 != nullptr)
{
// There is a first non phi, so the last phi is before it.
@@ -10312,19 +10312,19 @@ void Compiler::fgCompactBlocks(BasicBlock* block, BasicBlock* bNext)
}
// Now proceed with the updated bbTreeLists.
- GenTreePtr stmtList1 = block->firstStmt();
- GenTreePtr stmtList2 = bNext->firstStmt();
+ GenTree* stmtList1 = block->firstStmt();
+ GenTree* stmtList2 = bNext->firstStmt();
/* the block may have an empty list */
if (stmtList1)
{
- GenTreePtr stmtLast1 = block->lastStmt();
+ GenTree* stmtLast1 = block->lastStmt();
/* The second block may be a GOTO statement or something with an empty bbTreeList */
if (stmtList2)
{
- GenTreePtr stmtLast2 = bNext->lastStmt();
+ GenTree* stmtLast2 = bNext->lastStmt();
/* append list2 to list 1 */
@@ -10644,7 +10644,7 @@ void Compiler::fgUnreachableBlock(BasicBlock* block)
// TODO-Cleanup: I'm not sure why this happens -- if the block is unreachable, why does it have phis?
// Anyway, remove any phis.
- GenTreePtr firstNonPhi = block->FirstNonPhiDef();
+ GenTree* firstNonPhi = block->FirstNonPhiDef();
if (block->bbTreeList != firstNonPhi)
{
if (firstNonPhi != nullptr)
@@ -13902,7 +13902,7 @@ bool Compiler::fgOptimizeEmptyBlock(BasicBlock* block)
}
else
{
- GenTreePtr nopStmt = fgInsertStmtAtEnd(block, nop);
+ GenTree* nopStmt = fgInsertStmtAtEnd(block, nop);
fgSetStmtSeq(nopStmt);
gtSetStmtInfo(nopStmt);
}
@@ -14110,7 +14110,7 @@ bool Compiler::fgOptimizeSwitchBranches(BasicBlock* block)
if (switchTree->gtFlags & GTF_SIDE_EFFECT)
{
/* Extract the side effects from the conditional */
- GenTreePtr sideEffList = nullptr;
+ GenTree* sideEffList = nullptr;
gtExtractSideEffList(switchTree, &sideEffList);
@@ -14524,7 +14524,7 @@ bool Compiler::fgOptimizeBranchToNext(BasicBlock* block, BasicBlock* bNext, Basi
if (cond->gtStmtExpr->gtFlags & GTF_SIDE_EFFECT)
{
/* Extract the side effects from the conditional */
- GenTreePtr sideEffList = nullptr;
+ GenTree* sideEffList = nullptr;
gtExtractSideEffList(cond->gtStmtExpr, &sideEffList);
@@ -14668,7 +14668,7 @@ bool Compiler::fgOptimizeBranch(BasicBlock* bJump)
unsigned estDupCostSz = 0;
for (stmt = bDest->firstStmt(); stmt; stmt = stmt->gtNextStmt)
{
- GenTreePtr expr = stmt->gtStmtExpr;
+ GenTree* expr = stmt->gtStmtExpr;
/* We call gtPrepareCost to measure the cost of duplicating this tree */
gtPrepareCost(expr);
@@ -14817,7 +14817,7 @@ bool Compiler::fgOptimizeBranch(BasicBlock* bJump)
/* Get to the condition node from the statement tree */
- GenTreePtr condTree = stmt->gtStmtExpr;
+ GenTree* condTree = stmt->gtStmtExpr;
noway_assert(condTree->gtOper == GT_JTRUE);
if (condTree->gtOper != GT_JTRUE)
@@ -18482,7 +18482,7 @@ GenTree* Compiler::fgSetTreeSeq(GenTree* tree, GenTree* prevTree, bool isLIR)
* Uses 'global' - fgTreeSeqLst
*/
-void Compiler::fgSetTreeSeqHelper(GenTreePtr tree, bool isLIR)
+void Compiler::fgSetTreeSeqHelper(GenTree* tree, bool isLIR)
{
genTreeOps oper;
unsigned kind;
@@ -18553,8 +18553,8 @@ void Compiler::fgSetTreeSeqHelper(GenTreePtr tree, bool isLIR)
if (kind & GTK_SMPOP)
{
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtGetOp2IfPresent();
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtGetOp2IfPresent();
// Special handling for GT_LIST
if (tree->OperGet() == GT_LIST)
@@ -18562,12 +18562,12 @@ void Compiler::fgSetTreeSeqHelper(GenTreePtr tree, bool isLIR)
// First, handle the list items, which will be linked in forward order.
// As we go, we will link the GT_LIST nodes in reverse order - we will number
// them and update fgTreeSeqList in a subsequent traversal.
- GenTreePtr nextList = tree;
- GenTreePtr list = nullptr;
+ GenTree* nextList = tree;
+ GenTree* list = nullptr;
while (nextList != nullptr && nextList->OperGet() == GT_LIST)
{
- list = nextList;
- GenTreePtr listItem = list->gtOp.gtOp1;
+ list = nextList;
+ GenTree* listItem = list->gtOp.gtOp1;
fgSetTreeSeqHelper(listItem, isLIR);
nextList = list->gtOp.gtOp2;
if (nextList != nullptr)
@@ -18803,7 +18803,7 @@ void Compiler::fgSetTreeSeqHelper(GenTreePtr tree, bool isLIR)
fgSetTreeSeqFinish(tree, isLIR);
}
-void Compiler::fgSetTreeSeqFinish(GenTreePtr tree, bool isLIR)
+void Compiler::fgSetTreeSeqFinish(GenTree* tree, bool isLIR)
{
// If we are sequencing for LIR:
// - Clear the reverse ops flag
@@ -18991,7 +18991,7 @@ void Compiler::fgSetBlockOrder()
/*****************************************************************************/
-void Compiler::fgSetStmtSeq(GenTreePtr tree)
+void Compiler::fgSetStmtSeq(GenTree* tree)
{
GenTree list; // helper node that we use to start the StmtList
// It's located in front of the first node in the list
@@ -19022,8 +19022,8 @@ void Compiler::fgSetStmtSeq(GenTreePtr tree)
goto BAD_LIST;
}
- GenTreePtr temp;
- GenTreePtr last;
+ GenTree* temp;
+ GenTree* last;
for (temp = list.gtNext, last = &list; temp; last = temp, temp = temp->gtNext)
{
if (temp->gtPrev != last)
@@ -19041,7 +19041,7 @@ void Compiler::fgSetStmtSeq(GenTreePtr tree)
gtDispTree(tree->gtStmt.gtStmtExpr);
printf("\n");
- for (GenTreePtr bad = &list; bad; bad = bad->gtNext)
+ for (GenTree* bad = &list; bad; bad = bad->gtNext)
{
printf(" entry at ");
printTreeID(bad);
@@ -19078,7 +19078,7 @@ void Compiler::fgSetStmtSeq(GenTreePtr tree)
void Compiler::fgSetBlockOrder(BasicBlock* block)
{
- GenTreePtr tree;
+ GenTree* tree;
tree = block->bbTreeList;
if (!tree)
@@ -19126,7 +19126,7 @@ void Compiler::fgSetBlockOrder(BasicBlock* block)
// reg0 - The register for the destination
// reg1 - The register for the source
// reg2 - The register for the size
-// opsPtr - An array of 3 GenTreePtr's, an out argument for the operands, in order
+// opsPtr - An array of 3 GenTree*'s, an out argument for the operands, in order
// regsPtr - An array of three regMaskTP - an out argument for the registers, in order
//
// Return Value:
@@ -19158,18 +19158,18 @@ void Compiler::fgSetBlockOrder(BasicBlock* block)
// that is set, the size is evaluated first, and then the src and dst are evaluated
// according to the GTF_REVERSE_OPS flag on the assignment.
-void Compiler::fgOrderBlockOps(GenTreePtr tree,
- regMaskTP reg0,
- regMaskTP reg1,
- regMaskTP reg2,
- GenTreePtr* opsPtr, // OUT
- regMaskTP* regsPtr) // OUT
+void Compiler::fgOrderBlockOps(GenTree* tree,
+ regMaskTP reg0,
+ regMaskTP reg1,
+ regMaskTP reg2,
+ GenTree** opsPtr, // OUT
+ regMaskTP* regsPtr) // OUT
{
assert(tree->OperIsBlkOp());
GenTreeBlk* destBlk = tree->gtOp.gtOp1->AsBlk();
- GenTreePtr destAddr = destBlk->Addr();
- GenTreePtr srcPtrOrVal = tree->gtOp.gtOp2;
+ GenTree* destAddr = destBlk->Addr();
+ GenTree* srcPtrOrVal = tree->gtOp.gtOp2;
if (tree->OperIsCopyBlkOp())
{
assert(srcPtrOrVal->OperIsIndir());
@@ -19179,7 +19179,7 @@ void Compiler::fgOrderBlockOps(GenTreePtr tree,
assert(destAddr != nullptr);
assert(srcPtrOrVal != nullptr);
- GenTreePtr ops[3] = {
+ GenTree* ops[3] = {
destAddr, // Dest address
srcPtrOrVal, // Val / Src address
nullptr // Size of block
@@ -19241,9 +19241,9 @@ void Compiler::fgOrderBlockOps(GenTreePtr tree,
// TODO-Cleanup: Add a debug-only method that verifies this.
/* static */
-GenTreePtr Compiler::fgGetFirstNode(GenTreePtr tree)
+GenTree* Compiler::fgGetFirstNode(GenTree* tree)
{
- GenTreePtr child = tree;
+ GenTree* child = tree;
while (child->NumChildren() > 0)
{
if (child->OperIsBinary() && child->IsReverseOp())
@@ -19295,7 +19295,7 @@ unsigned Compiler::fgGetCodeEstimate(BasicBlock* block)
break;
}
- GenTreePtr tree = block->FirstNonPhiDef();
+ GenTree* tree = block->FirstNonPhiDef();
if (tree)
{
do
@@ -20559,7 +20559,7 @@ void Compiler::fgDispBasicBlocks(bool dumpTrees)
/*****************************************************************************/
// Increment the stmtNum and dump the tree using gtDispTree
//
-void Compiler::fgDumpStmtTree(GenTreePtr stmt, unsigned bbNum)
+void Compiler::fgDumpStmtTree(GenTree* stmt, unsigned bbNum)
{
compCurStmtNum++; // Increment the current stmtNum
@@ -20634,10 +20634,10 @@ void Compiler::fgDumpTrees(BasicBlock* firstBlock, BasicBlock* lastBlock)
*/
/* static */
-Compiler::fgWalkResult Compiler::fgStress64RsltMulCB(GenTreePtr* pTree, fgWalkData* data)
+Compiler::fgWalkResult Compiler::fgStress64RsltMulCB(GenTree** pTree, fgWalkData* data)
{
- GenTreePtr tree = *pTree;
- Compiler* pComp = data->compiler;
+ GenTree* tree = *pTree;
+ Compiler* pComp = data->compiler;
if (tree->gtOper != GT_MUL || tree->gtType != TYP_INT || (tree->gtOverflow()))
{
@@ -21111,7 +21111,7 @@ void Compiler::fgDebugCheckBBlist(bool checkBBNum /* = false */, bool checkBBRef
*
****************************************************************************/
-void Compiler::fgDebugCheckFlags(GenTreePtr tree)
+void Compiler::fgDebugCheckFlags(GenTree* tree)
{
noway_assert(tree->gtOper != GT_STMT);
@@ -21152,8 +21152,8 @@ void Compiler::fgDebugCheckFlags(GenTreePtr tree)
else if (kind & GTK_SMPOP)
{
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtGetOp2IfPresent();
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtGetOp2IfPresent();
// During GS work, we make shadow copies for params.
// In gsParamsToShadows(), we create a shadow var of TYP_INT for every small type param.
@@ -21259,7 +21259,7 @@ void Compiler::fgDebugCheckFlags(GenTreePtr tree)
was set and thus GTF_ASG cannot be considered here. */
/* For a GT_ASG(GT_IND(x), y) we are interested in the side effects of x */
- GenTreePtr op1p;
+ GenTree* op1p;
if (GenTree::OperIsAssignment(oper) && (op1->gtOper == GT_IND))
{
op1p = op1->gtOp.gtOp1;
@@ -21303,8 +21303,8 @@ void Compiler::fgDebugCheckFlags(GenTreePtr tree)
{
case GT_CALL:
- GenTreePtr args;
- GenTreePtr argx;
+ GenTree* args;
+ GenTree* argx;
GenTreeCall* call;
call = tree->AsCall();
@@ -21377,8 +21377,8 @@ void Compiler::fgDebugCheckFlags(GenTreePtr tree)
case GT_ARR_ELEM:
- GenTreePtr arrObj;
- unsigned dim;
+ GenTree* arrObj;
+ unsigned dim;
arrObj = tree->gtArrElem.gtArrObj;
fgDebugCheckFlags(arrObj);
@@ -21469,7 +21469,7 @@ void Compiler::fgDebugCheckFlags(GenTreePtr tree)
// Note:
// Checking that all bits that are set in treeFlags are also set in chkFlags is currently disabled.
-void Compiler::fgDebugCheckFlagsHelper(GenTreePtr tree, unsigned treeFlags, unsigned chkFlags)
+void Compiler::fgDebugCheckFlagsHelper(GenTree* tree, unsigned treeFlags, unsigned chkFlags)
{
if (chkFlags & ~treeFlags)
{
@@ -21532,7 +21532,7 @@ void Compiler::fgDebugCheckNodeLinks(BasicBlock* block, GenTree* node)
// terminates.
assert(stmt->gtStmtList->gtPrev == nullptr);
- for (GenTreePtr tree = stmt->gtStmtList; tree != nullptr; tree = tree->gtNext)
+ for (GenTree* tree = stmt->gtStmtList; tree != nullptr; tree = tree->gtNext)
{
if (tree->gtPrev)
{
@@ -21554,7 +21554,7 @@ void Compiler::fgDebugCheckNodeLinks(BasicBlock* block, GenTree* node)
/* Cross-check gtPrev,gtNext with gtOp for simple trees */
- GenTreePtr expectedPrevTree = nullptr;
+ GenTree* expectedPrevTree = nullptr;
if (tree->OperIsLeaf())
{
@@ -21835,7 +21835,7 @@ void Compiler::fgDebugCheckNodesUniqueness()
{
if (block->IsLIR())
{
- for (GenTreePtr i : LIR::AsRange(block))
+ for (GenTree* i : LIR::AsRange(block))
{
walker.CheckTreeId(i->gtTreeID);
}
@@ -21844,7 +21844,7 @@ void Compiler::fgDebugCheckNodesUniqueness()
{
for (GenTreeStmt* stmt = block->firstStmt(); stmt != nullptr; stmt = stmt->gtNextStmt)
{
- GenTreePtr root = stmt->gtStmtExpr;
+ GenTree* root = stmt->gtStmtExpr;
fgWalkTreePre(&root, UniquenessCheckWalker::MarkTreeId, &walker);
}
}
@@ -21946,7 +21946,7 @@ void Compiler::fgInline()
compCurBB = block;
GenTreeStmt* stmt;
- GenTreePtr expr;
+ GenTree* expr;
for (stmt = block->firstStmt(); stmt != nullptr; stmt = stmt->gtNextStmt)
{
@@ -22048,9 +22048,9 @@ void Compiler::fgInline()
// Note:
// Invokes fgNoteNonInlineCandidate on the nodes it finds.
-Compiler::fgWalkResult Compiler::fgFindNonInlineCandidate(GenTreePtr* pTree, fgWalkData* data)
+Compiler::fgWalkResult Compiler::fgFindNonInlineCandidate(GenTree** pTree, fgWalkData* data)
{
- GenTreePtr tree = *pTree;
+ GenTree* tree = *pTree;
if (tree->gtOper == GT_CALL)
{
Compiler* compiler = data->compiler;
@@ -22111,7 +22111,7 @@ void Compiler::fgNoteNonInlineCandidate(GenTreeStmt* stmt, GenTreeCall* call)
* type or __without replacing__ and just returning a subtree or by __modifying__
* a subtree.
*/
-GenTreePtr Compiler::fgGetStructAsStructPtr(GenTreePtr tree)
+GenTree* Compiler::fgGetStructAsStructPtr(GenTree* tree)
{
noway_assert((tree->gtOper == GT_LCL_VAR) || (tree->gtOper == GT_FIELD) || (tree->gtOper == GT_IND) ||
(tree->gtOper == GT_BLK) || (tree->gtOper == GT_OBJ) || tree->OperIsSIMD() ||
@@ -22144,7 +22144,7 @@ GenTreePtr Compiler::fgGetStructAsStructPtr(GenTreePtr tree)
* Assign the inlinee to a tmp, if it is a call, just assign it to a lclVar, else we can
* use a copyblock to do the assignment.
*/
-GenTreePtr Compiler::fgAssignStructInlineeToVar(GenTreePtr child, CORINFO_CLASS_HANDLE retClsHnd)
+GenTree* Compiler::fgAssignStructInlineeToVar(GenTree* child, CORINFO_CLASS_HANDLE retClsHnd)
{
assert(child->gtOper != GT_RET_EXPR && child->gtOper != GT_MKREFANY);
@@ -22152,21 +22152,21 @@ GenTreePtr Compiler::fgAssignStructInlineeToVar(GenTreePtr child, CORINFO_CLASS_
lvaSetStruct(tmpNum, retClsHnd, false);
var_types structType = lvaTable[tmpNum].lvType;
- GenTreePtr dst = gtNewLclvNode(tmpNum, structType);
+ GenTree* dst = gtNewLclvNode(tmpNum, structType);
// If we have a call, we'd like it to be: V00 = call(), but first check if
// we have a ", , , call()" -- this is very defensive as we may never get
// an inlinee that is made of commas. If the inlinee is not a call, then
// we use a copy block to do the assignment.
- GenTreePtr src = child;
- GenTreePtr lastComma = nullptr;
+ GenTree* src = child;
+ GenTree* lastComma = nullptr;
while (src->gtOper == GT_COMMA)
{
lastComma = src;
src = src->gtOp.gtOp2;
}
- GenTreePtr newInlinee = nullptr;
+ GenTree* newInlinee = nullptr;
if (src->gtOper == GT_CALL)
{
// If inlinee was just a call, new inlinee is v05 = call()
@@ -22189,13 +22189,13 @@ GenTreePtr Compiler::fgAssignStructInlineeToVar(GenTreePtr child, CORINFO_CLASS_
else
{
// Inlinee is not a call, so just create a copy block to the tmp.
- src = child;
- GenTreePtr dstAddr = fgGetStructAsStructPtr(dst);
- GenTreePtr srcAddr = fgGetStructAsStructPtr(src);
- newInlinee = gtNewCpObjNode(dstAddr, srcAddr, retClsHnd, false);
+ src = child;
+ GenTree* dstAddr = fgGetStructAsStructPtr(dst);
+ GenTree* srcAddr = fgGetStructAsStructPtr(src);
+ newInlinee = gtNewCpObjNode(dstAddr, srcAddr, retClsHnd, false);
}
- GenTreePtr production = gtNewLclvNode(tmpNum, structType);
+ GenTree* production = gtNewLclvNode(tmpNum, structType);
return gtNewOperNode(GT_COMMA, structType, newInlinee, production);
}
@@ -22210,7 +22210,7 @@ GenTreePtr Compiler::fgAssignStructInlineeToVar(GenTreePtr child, CORINFO_CLASS_
* a lclVar/call. So it is not worthwhile to do pattern matching optimizations like addr(ldobj(op1))
* can just be op1.
*/
-void Compiler::fgAttachStructInlineeToAsg(GenTreePtr tree, GenTreePtr child, CORINFO_CLASS_HANDLE retClsHnd)
+void Compiler::fgAttachStructInlineeToAsg(GenTree* tree, GenTree* child, CORINFO_CLASS_HANDLE retClsHnd)
{
// We are okay to have:
// 1. V02 = call();
@@ -22229,8 +22229,8 @@ void Compiler::fgAttachStructInlineeToAsg(GenTreePtr tree, GenTreePtr child, COR
return;
}
- GenTreePtr dstAddr = fgGetStructAsStructPtr(tree->gtOp.gtOp1);
- GenTreePtr srcAddr = fgGetStructAsStructPtr(
+ GenTree* dstAddr = fgGetStructAsStructPtr(tree->gtOp.gtOp1);
+ GenTree* srcAddr = fgGetStructAsStructPtr(
(child->gtOper == GT_CALL)
? fgAssignStructInlineeToVar(child, retClsHnd) // Assign to a variable if it is a call.
: child); // Just get the address, if not a call.
@@ -22280,9 +22280,9 @@ void Compiler::fgAttachStructInlineeToAsg(GenTreePtr tree, GenTreePtr child, COR
// where structs can be returned in multiple registers, ensure the
// call has a suitable parent.
-Compiler::fgWalkResult Compiler::fgUpdateInlineReturnExpressionPlaceHolder(GenTreePtr* pTree, fgWalkData* data)
+Compiler::fgWalkResult Compiler::fgUpdateInlineReturnExpressionPlaceHolder(GenTree** pTree, fgWalkData* data)
{
- GenTreePtr tree = *pTree;
+ GenTree* tree = *pTree;
Compiler* comp = data->compiler;
CORINFO_CLASS_HANDLE retClsHnd = NO_CLASS_HANDLE;
@@ -22299,7 +22299,7 @@ Compiler::fgWalkResult Compiler::fgUpdateInlineReturnExpressionPlaceHolder(GenTr
do
{
// Obtained the expanded inline candidate
- GenTreePtr inlineCandidate = tree->gtRetExpr.gtInlineCandidate;
+ GenTree* inlineCandidate = tree->gtRetExpr.gtInlineCandidate;
#ifdef DEBUG
if (comp->verbose)
@@ -22334,7 +22334,7 @@ Compiler::fgWalkResult Compiler::fgUpdateInlineReturnExpressionPlaceHolder(GenTr
//
// May miss cases where there are intermediaries between call
// and this, eg commas.
- GenTreePtr parentTree = data->parent;
+ GenTree* parentTree = data->parent;
if ((parentTree != nullptr) && (parentTree->gtOper == GT_CALL))
{
@@ -22375,7 +22375,7 @@ Compiler::fgWalkResult Compiler::fgUpdateInlineReturnExpressionPlaceHolder(GenTr
//
if (comp->IsMultiRegReturnedType(retClsHnd))
{
- GenTreePtr parent = data->parent;
+ GenTree* parent = data->parent;
// See assert below, we only look one level above for an asg parent.
if (parent->gtOper == GT_ASG)
{
@@ -22401,7 +22401,7 @@ Compiler::fgWalkResult Compiler::fgUpdateInlineReturnExpressionPlaceHolder(GenTr
//
if ((tree->gtOper == GT_ASG) && (tree->gtOp.gtOp2->gtOper == GT_COMMA))
{
- GenTreePtr comma;
+ GenTree* comma;
for (comma = tree->gtOp.gtOp2; comma->gtOper == GT_COMMA; comma = comma->gtOp.gtOp2)
{
// empty
@@ -22424,9 +22424,9 @@ Compiler::fgWalkResult Compiler::fgUpdateInlineReturnExpressionPlaceHolder(GenTr
*/
/* static */
-Compiler::fgWalkResult Compiler::fgDebugCheckInlineCandidates(GenTreePtr* pTree, fgWalkData* data)
+Compiler::fgWalkResult Compiler::fgDebugCheckInlineCandidates(GenTree** pTree, fgWalkData* data)
{
- GenTreePtr tree = *pTree;
+ GenTree* tree = *pTree;
if (tree->gtOper == GT_CALL)
{
assert((tree->gtFlags & GTF_CALL_INLINE_CANDIDATE) == 0);
@@ -22697,7 +22697,7 @@ void Compiler::fgInsertInlineeBlocks(InlineInfo* pInlineInfo)
#ifdef DEBUG
- GenTreePtr currentDumpStmt = nullptr;
+ GenTree* currentDumpStmt = nullptr;
if (verbose)
{
@@ -22720,7 +22720,7 @@ void Compiler::fgInsertInlineeBlocks(InlineInfo* pInlineInfo)
}
// Prepend statements
- GenTreePtr stmtAfter = fgInlinePrependStatements(pInlineInfo);
+ GenTree* stmtAfter = fgInlinePrependStatements(pInlineInfo);
#ifdef DEBUG
if (verbose)
@@ -22808,10 +22808,10 @@ void Compiler::fgInsertInlineeBlocks(InlineInfo* pInlineInfo)
//
// Split statements between topBlock and bottomBlock
//
- GenTreePtr topBlock_Begin;
- GenTreePtr topBlock_End;
- GenTreePtr bottomBlock_Begin;
- GenTreePtr bottomBlock_End;
+ GenTree* topBlock_Begin;
+ GenTree* topBlock_End;
+ GenTree* bottomBlock_Begin;
+ GenTree* bottomBlock_End;
topBlock_Begin = nullptr;
topBlock_End = nullptr;
@@ -23050,7 +23050,7 @@ _Done:
// and are are given the same inline context as the call any calls
// added here will appear to have been part of the immediate caller.
-GenTreePtr Compiler::fgInlinePrependStatements(InlineInfo* inlineInfo)
+GenTree* Compiler::fgInlinePrependStatements(InlineInfo* inlineInfo)
{
BasicBlock* block = inlineInfo->iciBlock;
GenTreeStmt* callStmt = inlineInfo->iciStmt;
@@ -23076,7 +23076,7 @@ GenTreePtr Compiler::fgInlinePrependStatements(InlineInfo* inlineInfo)
InlArgInfo* inlArgInfo = inlineInfo->inlArgInfo;
InlLclVarInfo* lclVarInfo = inlineInfo->lclVarInfo;
- GenTreePtr tree;
+ GenTree* tree;
// Create the null check statement (but not appending it to the statement list yet) for the 'this' pointer if
// necessary.
@@ -23086,7 +23086,7 @@ GenTreePtr Compiler::fgInlinePrependStatements(InlineInfo* inlineInfo)
// Note: Here we no longer do the optimization that was done by thisDereferencedFirst in the old inliner.
// However the assetionProp logic will remove any unecessary null checks that we may have added
//
- GenTreePtr nullcheck = nullptr;
+ GenTree* nullcheck = nullptr;
if (call->gtFlags & GTF_CALL_NULLCHECK && !inlineInfo->thisDereferencedFirst)
{
@@ -23131,7 +23131,7 @@ GenTreePtr Compiler::fgInlinePrependStatements(InlineInfo* inlineInfo)
once) but the optimization cannot be applied.
*/
- GenTreePtr argSingleUseNode = argInfo.argBashTmpNode;
+ GenTree* argSingleUseNode = argInfo.argBashTmpNode;
if ((argSingleUseNode != nullptr) && !(argSingleUseNode->gtFlags & GTF_VAR_CLONED) && argIsSingleDef)
{
@@ -23430,7 +23430,7 @@ GenTreePtr Compiler::fgInlinePrependStatements(InlineInfo* inlineInfo)
// we skip nulling the locals, since it can interfere
// with tail calls introduced by the local.
-void Compiler::fgInlineAppendStatements(InlineInfo* inlineInfo, BasicBlock* block, GenTreePtr stmtAfter)
+void Compiler::fgInlineAppendStatements(InlineInfo* inlineInfo, BasicBlock* block, GenTree* stmtAfter)
{
// If this inlinee was passed a runtime lookup generic context and
// ignores it, we can decrement the "generic context was used" ref
@@ -23480,7 +23480,7 @@ void Compiler::fgInlineAppendStatements(InlineInfo* inlineInfo, BasicBlock* bloc
JITDUMP("fgInlineAppendStatements: nulling out gc ref inlinee locals.\n");
- GenTreePtr callStmt = inlineInfo->iciStmt;
+ GenTree* callStmt = inlineInfo->iciStmt;
IL_OFFSETX callILOffset = callStmt->gtStmt.gtStmtILoffsx;
CORINFO_METHOD_INFO* InlineeMethodInfo = InlineeCompiler->info.compMethodInfo;
const unsigned lclCnt = InlineeMethodInfo->locals.numArgs;
@@ -23523,7 +23523,7 @@ void Compiler::fgInlineAppendStatements(InlineInfo* inlineInfo, BasicBlock* bloc
// Does the local we're about to null out appear in the return
// expression? If so we somehow messed up and didn't properly
// spill the return value. See impInlineFetchLocal.
- GenTreePtr retExpr = inlineInfo->retExpr;
+ GenTree* retExpr = inlineInfo->retExpr;
if (retExpr != nullptr)
{
const bool interferesWithReturn = gtHasRef(inlineInfo->retExpr, tmpNum, false);
@@ -23531,8 +23531,8 @@ void Compiler::fgInlineAppendStatements(InlineInfo* inlineInfo, BasicBlock* bloc
}
// Assign null to the local.
- GenTreePtr nullExpr = gtNewTempAssign(tmpNum, gtNewZeroConNode(lclTyp));
- GenTreePtr nullStmt = gtNewStmt(nullExpr, callILOffset);
+ GenTree* nullExpr = gtNewTempAssign(tmpNum, gtNewZeroConNode(lclTyp));
+ GenTree* nullStmt = gtNewStmt(nullExpr, callILOffset);
if (stmtAfter == nullptr)
{
@@ -23557,9 +23557,9 @@ void Compiler::fgInlineAppendStatements(InlineInfo* inlineInfo, BasicBlock* bloc
/*****************************************************************************/
/*static*/
-Compiler::fgWalkResult Compiler::fgChkThrowCB(GenTreePtr* pTree, fgWalkData* data)
+Compiler::fgWalkResult Compiler::fgChkThrowCB(GenTree** pTree, fgWalkData* data)
{
- GenTreePtr tree = *pTree;
+ GenTree* tree = *pTree;
// If this tree doesn't have the EXCEPT flag set, then there is no
// way any of the child nodes could throw, so we can stop recursing.
@@ -23603,9 +23603,9 @@ Compiler::fgWalkResult Compiler::fgChkThrowCB(GenTreePtr* pTree, fgWalkData* dat
/*****************************************************************************/
/*static*/
-Compiler::fgWalkResult Compiler::fgChkLocAllocCB(GenTreePtr* pTree, fgWalkData* data)
+Compiler::fgWalkResult Compiler::fgChkLocAllocCB(GenTree** pTree, fgWalkData* data)
{
- GenTreePtr tree = *pTree;
+ GenTree* tree = *pTree;
if (tree->gtOper == GT_LCLHEAP)
{
@@ -23617,9 +23617,9 @@ Compiler::fgWalkResult Compiler::fgChkLocAllocCB(GenTreePtr* pTree, fgWalkData*
/*****************************************************************************/
/*static*/
-Compiler::fgWalkResult Compiler::fgChkQmarkCB(GenTreePtr* pTree, fgWalkData* data)
+Compiler::fgWalkResult Compiler::fgChkQmarkCB(GenTree** pTree, fgWalkData* data)
{
- GenTreePtr tree = *pTree;
+ GenTree* tree = *pTree;
if (tree->gtOper == GT_QMARK)
{
@@ -23732,7 +23732,7 @@ void Compiler::fgRemoveEmptyFinally()
for (GenTreeStmt* stmt = firstBlock->firstStmt(); stmt != nullptr; stmt = stmt->gtNextStmt)
{
- GenTreePtr stmtExpr = stmt->gtStmtExpr;
+ GenTree* stmtExpr = stmt->gtStmtExpr;
if (stmtExpr->gtOper != GT_RETFILT)
{
@@ -24155,7 +24155,7 @@ void Compiler::fgRemoveEmptyTry()
if (block->bbJumpKind == BBJ_EHFINALLYRET)
{
GenTreeStmt* finallyRet = block->lastStmt();
- GenTreePtr finallyRetExpr = finallyRet->gtStmtExpr;
+ GenTree* finallyRetExpr = finallyRet->gtStmtExpr;
assert(finallyRetExpr->gtOper == GT_RETFILT);
fgRemoveStmt(block, finallyRet);
block->bbJumpKind = BBJ_ALWAYS;
@@ -24170,7 +24170,7 @@ void Compiler::fgRemoveEmptyTry()
// since we're removing the enclosing handler.
for (GenTreeStmt* stmt = block->firstStmt(); stmt != nullptr; stmt = stmt->gtNextStmt)
{
- GenTreePtr expr = stmt->gtStmtExpr;
+ GenTree* expr = stmt->gtStmtExpr;
if (expr->gtOper == GT_END_LFIN)
{
const unsigned nestLevel = expr->gtVal.gtVal1;
@@ -24665,7 +24665,7 @@ void Compiler::fgCloneFinally()
if (block->bbJumpKind == BBJ_EHFINALLYRET)
{
GenTreeStmt* finallyRet = newBlock->lastStmt();
- GenTreePtr finallyRetExpr = finallyRet->gtStmtExpr;
+ GenTree* finallyRetExpr = finallyRet->gtStmtExpr;
assert(finallyRetExpr->gtOper == GT_RETFILT);
fgRemoveStmt(newBlock, finallyRet);
newBlock->bbJumpKind = BBJ_ALWAYS;
@@ -25008,7 +25008,7 @@ void Compiler::fgCleanupContinuation(BasicBlock* continuation)
bool foundEndLFin = false;
for (GenTreeStmt* stmt = continuation->firstStmt(); stmt != nullptr; stmt = stmt->gtNextStmt)
{
- GenTreePtr expr = stmt->gtStmtExpr;
+ GenTree* expr = stmt->gtStmtExpr;
if (expr->gtOper == GT_END_LFIN)
{
assert(!foundEndLFin);
@@ -25478,7 +25478,7 @@ private:
//
bool ContainsFatCalli(GenTreeStmt* stmt)
{
- GenTreePtr fatPointerCandidate = stmt->gtStmtExpr;
+ GenTree* fatPointerCandidate = stmt->gtStmtExpr;
if (fatPointerCandidate->OperIsAssignment())
{
fatPointerCandidate = fatPointerCandidate->gtGetOp2();
@@ -25529,7 +25529,7 @@ private:
// call tree node pointer.
GenTreeCall* GetCall(GenTreeStmt* callStmt)
{
- GenTreePtr tree = callStmt->gtStmtExpr;
+ GenTree* tree = callStmt->gtStmtExpr;
GenTreeCall* call = nullptr;
if (doesReturnValue)
{
@@ -25567,14 +25567,14 @@ private:
//
void CreateCheck()
{
- checkBlock = CreateAndInsertBasicBlock(BBJ_COND, currBlock);
- GenTreePtr fatPointerMask = new (compiler, GT_CNS_INT) GenTreeIntCon(TYP_I_IMPL, FAT_POINTER_MASK);
- GenTreePtr fptrAddressCopy = compiler->gtCloneExpr(fptrAddress);
- GenTreePtr fatPointerAnd = compiler->gtNewOperNode(GT_AND, TYP_I_IMPL, fptrAddressCopy, fatPointerMask);
- GenTreePtr zero = new (compiler, GT_CNS_INT) GenTreeIntCon(TYP_I_IMPL, 0);
- GenTreePtr fatPointerCmp = compiler->gtNewOperNode(GT_NE, TYP_INT, fatPointerAnd, zero);
- GenTreePtr jmpTree = compiler->gtNewOperNode(GT_JTRUE, TYP_VOID, fatPointerCmp);
- GenTreePtr jmpStmt = compiler->fgNewStmtFromTree(jmpTree, stmt->gtStmt.gtStmtILoffsx);
+ checkBlock = CreateAndInsertBasicBlock(BBJ_COND, currBlock);
+ GenTree* fatPointerMask = new (compiler, GT_CNS_INT) GenTreeIntCon(TYP_I_IMPL, FAT_POINTER_MASK);
+ GenTree* fptrAddressCopy = compiler->gtCloneExpr(fptrAddress);
+ GenTree* fatPointerAnd = compiler->gtNewOperNode(GT_AND, TYP_I_IMPL, fptrAddressCopy, fatPointerMask);
+ GenTree* zero = new (compiler, GT_CNS_INT) GenTreeIntCon(TYP_I_IMPL, 0);
+ GenTree* fatPointerCmp = compiler->gtNewOperNode(GT_NE, TYP_INT, fatPointerAnd, zero);
+ GenTree* jmpTree = compiler->gtNewOperNode(GT_JTRUE, TYP_VOID, fatPointerCmp);
+ GenTree* jmpStmt = compiler->fgNewStmtFromTree(jmpTree, stmt->gtStmt.gtStmtILoffsx);
compiler->fgInsertStmtAtEnd(checkBlock, jmpStmt);
}
@@ -25583,8 +25583,8 @@ private:
//
void CreateThen()
{
- thenBlock = CreateAndInsertBasicBlock(BBJ_ALWAYS, checkBlock);
- GenTreePtr nonFatCallStmt = compiler->gtCloneExpr(stmt)->AsStmt();
+ thenBlock = CreateAndInsertBasicBlock(BBJ_ALWAYS, checkBlock);
+ GenTree* nonFatCallStmt = compiler->gtCloneExpr(stmt)->AsStmt();
compiler->fgInsertStmtAtEnd(thenBlock, nonFatCallStmt);
}
@@ -25595,9 +25595,9 @@ private:
{
elseBlock = CreateAndInsertBasicBlock(BBJ_NONE, thenBlock);
- GenTreePtr fixedFptrAddress = GetFixedFptrAddress();
- GenTreePtr actualCallAddress = compiler->gtNewOperNode(GT_IND, pointerType, fixedFptrAddress);
- GenTreePtr hiddenArgument = GetHiddenArgument(fixedFptrAddress);
+ GenTree* fixedFptrAddress = GetFixedFptrAddress();
+ GenTree* actualCallAddress = compiler->gtNewOperNode(GT_IND, pointerType, fixedFptrAddress);
+ GenTree* hiddenArgument = GetHiddenArgument(fixedFptrAddress);
GenTreeStmt* fatStmt = CreateFatCallStmt(actualCallAddress, hiddenArgument);
compiler->fgInsertStmtAtEnd(elseBlock, fatStmt);
@@ -25629,10 +25629,10 @@ private:
//
// Return Value:
// address without fat pointer bit set.
- GenTreePtr GetFixedFptrAddress()
+ GenTree* GetFixedFptrAddress()
{
- GenTreePtr fptrAddressCopy = compiler->gtCloneExpr(fptrAddress);
- GenTreePtr fatPointerMask = new (compiler, GT_CNS_INT) GenTreeIntCon(TYP_I_IMPL, FAT_POINTER_MASK);
+ GenTree* fptrAddressCopy = compiler->gtCloneExpr(fptrAddress);
+ GenTree* fatPointerMask = new (compiler, GT_CNS_INT) GenTreeIntCon(TYP_I_IMPL, FAT_POINTER_MASK);
return compiler->gtNewOperNode(GT_SUB, pointerType, fptrAddressCopy, fatPointerMask);
}
@@ -25644,13 +25644,13 @@ private:
//
// Return Value:
// generic context hidden argument.
- GenTreePtr GetHiddenArgument(GenTreePtr fixedFptrAddress)
+ GenTree* GetHiddenArgument(GenTree* fixedFptrAddress)
{
- GenTreePtr fixedFptrAddressCopy = compiler->gtCloneExpr(fixedFptrAddress);
- GenTreePtr wordSize = new (compiler, GT_CNS_INT) GenTreeIntCon(TYP_I_IMPL, genTypeSize(TYP_I_IMPL));
- GenTreePtr hiddenArgumentPtrPtr =
+ GenTree* fixedFptrAddressCopy = compiler->gtCloneExpr(fixedFptrAddress);
+ GenTree* wordSize = new (compiler, GT_CNS_INT) GenTreeIntCon(TYP_I_IMPL, genTypeSize(TYP_I_IMPL));
+ GenTree* hiddenArgumentPtrPtr =
compiler->gtNewOperNode(GT_ADD, pointerType, fixedFptrAddressCopy, wordSize);
- GenTreePtr hiddenArgumentPtr = compiler->gtNewOperNode(GT_IND, pointerType, hiddenArgumentPtrPtr);
+ GenTree* hiddenArgumentPtr = compiler->gtNewOperNode(GT_IND, pointerType, hiddenArgumentPtrPtr);
return compiler->gtNewOperNode(GT_IND, fixedFptrAddressCopy->TypeGet(), hiddenArgumentPtr);
}
@@ -25663,10 +25663,10 @@ private:
//
// Return Value:
// created call node.
- GenTreeStmt* CreateFatCallStmt(GenTreePtr actualCallAddress, GenTreePtr hiddenArgument)
+ GenTreeStmt* CreateFatCallStmt(GenTree* actualCallAddress, GenTree* hiddenArgument)
{
GenTreeStmt* fatStmt = compiler->gtCloneExpr(stmt)->AsStmt();
- GenTreePtr fatTree = fatStmt->gtStmtExpr;
+ GenTree* fatTree = fatStmt->gtStmtExpr;
GenTreeCall* fatCall = GetCall(fatStmt);
fatCall->gtCallAddr = actualCallAddress;
AddHiddenArgument(fatCall, hiddenArgument);
@@ -25680,14 +25680,14 @@ private:
// fatCall - fat call node
// hiddenArgument - generic context hidden argument
//
- void AddHiddenArgument(GenTreeCall* fatCall, GenTreePtr hiddenArgument)
+ void AddHiddenArgument(GenTreeCall* fatCall, GenTree* hiddenArgument)
{
GenTreeArgList* oldArgs = fatCall->gtCallArgs;
GenTreeArgList* newArgs;
#if USER_ARGS_COME_LAST
if (fatCall->HasRetBufArg())
{
- GenTreePtr retBuffer = oldArgs->Current();
+ GenTree* retBuffer = oldArgs->Current();
GenTreeArgList* rest = oldArgs->Rest();
newArgs = compiler->gtNewListNode(hiddenArgument, rest);
newArgs = compiler->gtNewListNode(retBuffer, newArgs);
@@ -25710,7 +25710,7 @@ private:
// argList - fat call node
// hiddenArgument - generic context hidden argument
//
- void AddArgumentToTail(GenTreeArgList* argList, GenTreePtr hiddenArgument)
+ void AddArgumentToTail(GenTreeArgList* argList, GenTree* hiddenArgument)
{
GenTreeArgList* iterator = argList;
while (iterator->Rest() != nullptr)
@@ -25757,7 +25757,7 @@ private:
BasicBlock* elseBlock;
GenTreeStmt* stmt;
GenTreeCall* origCall;
- GenTreePtr fptrAddress;
+ GenTree* fptrAddress;
var_types pointerType;
bool doesReturnValue;
@@ -25773,9 +25773,9 @@ private:
//------------------------------------------------------------------------
// fgDebugCheckFatPointerCandidates: callback to make sure there are no more GTF_CALL_M_FAT_POINTER_CHECK calls.
//
-Compiler::fgWalkResult Compiler::fgDebugCheckFatPointerCandidates(GenTreePtr* pTree, fgWalkData* data)
+Compiler::fgWalkResult Compiler::fgDebugCheckFatPointerCandidates(GenTree** pTree, fgWalkData* data)
{
- GenTreePtr tree = *pTree;
+ GenTree* tree = *pTree;
if (tree->IsCall())
{
assert(!tree->AsCall()->IsFatPointerCandidate());
diff --git a/src/jit/gcinfo.cpp b/src/jit/gcinfo.cpp
index f330a86042..a91bef2629 100644
--- a/src/jit/gcinfo.cpp
+++ b/src/jit/gcinfo.cpp
@@ -229,7 +229,7 @@ void GCInfo::gcMarkRegPtrVal(regNumber reg, var_types type)
/*****************************************************************************/
-GCInfo::WriteBarrierForm GCInfo::gcIsWriteBarrierCandidate(GenTreePtr tgt, GenTreePtr assignVal)
+GCInfo::WriteBarrierForm GCInfo::gcIsWriteBarrierCandidate(GenTree* tgt, GenTree* assignVal)
{
#if FEATURE_WRITE_BARRIER
@@ -297,7 +297,7 @@ GCInfo::WriteBarrierForm GCInfo::gcIsWriteBarrierCandidate(GenTreePtr tgt, GenTr
return WBF_NoBarrier;
}
-bool GCInfo::gcIsWriteBarrierAsgNode(GenTreePtr op)
+bool GCInfo::gcIsWriteBarrierAsgNode(GenTree* op)
{
if (op->gtOper == GT_ASG)
{
@@ -322,7 +322,7 @@ bool GCInfo::gcIsWriteBarrierAsgNode(GenTreePtr op)
*/
#ifdef LEGACY_BACKEND
-void GCInfo::gcMarkRegPtrVal(GenTreePtr tree)
+void GCInfo::gcMarkRegPtrVal(GenTree* tree)
{
if (varTypeIsGC(tree->TypeGet()))
{
@@ -672,7 +672,7 @@ void GCInfo::gcRegPtrSetInit()
#endif // JIT32_GCENCODER
-GCInfo::WriteBarrierForm GCInfo::gcWriteBarrierFormFromTargetAddress(GenTreePtr tgtAddr)
+GCInfo::WriteBarrierForm GCInfo::gcWriteBarrierFormFromTargetAddress(GenTree* tgtAddr)
{
GCInfo::WriteBarrierForm result = GCInfo::WBF_BarrierUnknown; // Default case, we have no information.
diff --git a/src/jit/gentree.cpp b/src/jit/gentree.cpp
index ef7da6d3a3..eb13bf9ff2 100644
--- a/src/jit/gentree.cpp
+++ b/src/jit/gentree.cpp
@@ -454,8 +454,8 @@ void GenTree::ReplaceWith(GenTree* src, Compiler* comp)
RecordOperBashing(OperGet(), src->OperGet()); // nop unless NODEBASH_STATS is enabled
- GenTreePtr prev = gtPrev;
- GenTreePtr next = gtNext;
+ GenTree* prev = gtPrev;
+ GenTree* next = gtNext;
// The VTable pointer is copied intentionally here
memcpy((void*)this, (void*)src, src->GetNodeSize());
this->gtPrev = prev;
@@ -648,7 +648,7 @@ void Compiler::fgWalkAllTreesPre(fgWalkPreFn* visitor, void* pCallBackData)
for (block = fgFirstBB; block; block = block->bbNext)
{
- GenTreePtr tree;
+ GenTree* tree;
for (tree = block->bbTreeList; tree; tree = tree->gtNext)
{
@@ -667,7 +667,7 @@ void Compiler::fgWalkAllTreesPre(fgWalkPreFn* visitor, void* pCallBackData)
//
// Return Value:
// None
-void GenTree::CopyReg(GenTreePtr from)
+void GenTree::CopyReg(GenTree* from)
{
// To do the copy, use _gtRegPair, which must be bigger than _gtRegNum. Note that the values
// might be undefined (so gtRegTag == GT_REGTAG_NONE).
@@ -1204,7 +1204,7 @@ unsigned GenTreePutArgStk::getArgSize()
* Returns non-zero if the two trees are identical.
*/
-bool GenTree::Compare(GenTreePtr op1, GenTreePtr op2, bool swapOK)
+bool GenTree::Compare(GenTree* op1, GenTree* op2, bool swapOK)
{
genTreeOps oper;
unsigned kind;
@@ -1348,7 +1348,7 @@ AGAIN:
{
if (IsExOp(kind))
{
- // ExOp operators extend unary operator with extra, non-GenTreePtr members. In many cases,
+ // ExOp operators extend unary operator with extra, non-GenTree* members. In many cases,
// these should be included in the comparison.
switch (oper)
{
@@ -1386,7 +1386,7 @@ AGAIN:
{
if (IsExOp(kind))
{
- // ExOp operators extend unary operator with extra, non-GenTreePtr members. In many cases,
+ // ExOp operators extend unary operator with extra, non-GenTree* members. In many cases,
// these should be included in the hash code.
switch (oper)
{
@@ -1617,7 +1617,7 @@ AGAIN:
* Returns non-zero if the given tree contains a use of a local #lclNum.
*/
-bool Compiler::gtHasRef(GenTreePtr tree, ssize_t lclNum, bool defOnly)
+bool Compiler::gtHasRef(GenTree* tree, ssize_t lclNum, bool defOnly)
{
genTreeOps oper;
unsigned kind;
@@ -1865,10 +1865,10 @@ struct AddrTakenDsc
};
/* static */
-Compiler::fgWalkResult Compiler::gtHasLocalsWithAddrOpCB(GenTreePtr* pTree, fgWalkData* data)
+Compiler::fgWalkResult Compiler::gtHasLocalsWithAddrOpCB(GenTree** pTree, fgWalkData* data)
{
- GenTreePtr tree = *pTree;
- Compiler* comp = data->compiler;
+ GenTree* tree = *pTree;
+ Compiler* comp = data->compiler;
if (tree->gtOper == GT_LCL_VAR)
{
@@ -1891,7 +1891,7 @@ Compiler::fgWalkResult Compiler::gtHasLocalsWithAddrOpCB(GenTreePtr* pTree, fgWa
* flag(s) set.
*/
-bool Compiler::gtHasLocalsWithAddrOp(GenTreePtr tree)
+bool Compiler::gtHasLocalsWithAddrOp(GenTree* tree)
{
AddrTakenDsc desc;
@@ -1932,7 +1932,7 @@ unsigned Compiler::gtHashValue(GenTree* tree)
unsigned hash = 0;
- GenTreePtr temp;
+ GenTree* temp;
AGAIN:
assert(tree);
@@ -2012,7 +2012,7 @@ AGAIN:
/* Is it a 'simple' unary/binary operator? */
- GenTreePtr op1;
+ GenTree* op1;
if (kind & GTK_UNOP)
{
@@ -2021,7 +2021,7 @@ AGAIN:
if (GenTree::IsExOp(kind))
{
- // ExOp operators extend operators with extra, non-GenTreePtr members. In many cases,
+ // ExOp operators extend operators with extra, non-GenTree* members. In many cases,
// these should be included in the hash code.
switch (oper)
{
@@ -2074,7 +2074,7 @@ AGAIN:
{
if (GenTree::IsExOp(kind))
{
- // ExOp operators extend operators with extra, non-GenTreePtr members. In many cases,
+ // ExOp operators extend operators with extra, non-GenTree* members. In many cases,
// these should be included in the hash code.
switch (oper)
{
@@ -2126,8 +2126,8 @@ AGAIN:
}
}
- op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtOp.gtOp2;
+ op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtOp.gtOp2;
/* Is there a second sub-operand? */
@@ -2288,7 +2288,7 @@ DONE:
* If we encounter an expression that is equal to *findPtr we set *findPtr
* to NULL.
*/
-bool Compiler::lvaLclVarRefs(GenTreePtr tree, GenTreePtr* findPtr, varRefKinds* refsPtr, void* result)
+bool Compiler::lvaLclVarRefs(GenTree* tree, GenTree** findPtr, varRefKinds* refsPtr, void* result)
{
genTreeOps oper;
unsigned kind;
@@ -2558,7 +2558,7 @@ AGAIN:
}
bool Compiler::lvaLclVarRefsAccum(
- GenTreePtr tree, GenTreePtr* findPtr, varRefKinds* refsPtr, ALLVARSET_TP* allVars, VARSET_TP* trkdVars)
+ GenTree* tree, GenTree** findPtr, varRefKinds* refsPtr, ALLVARSET_TP* allVars, VARSET_TP* trkdVars)
{
if (findPtr)
{
@@ -2583,7 +2583,7 @@ bool Compiler::lvaLclVarRefsAccum(
return true;
}
-void Compiler::lvaLclVarRefsAccumIntoRes(GenTreePtr* findPtr,
+void Compiler::lvaLclVarRefsAccumIntoRes(GenTree** findPtr,
void* result,
ALLVARSET_VALARG_TP allVars,
VARSET_VALARG_TP trkdVars)
@@ -2685,7 +2685,7 @@ genTreeOps GenTree::SwapRelop(genTreeOps relop)
* Reverse the meaning of the given test condition.
*/
-GenTreePtr Compiler::gtReverseCond(GenTree* tree)
+GenTree* Compiler::gtReverseCond(GenTree* tree)
{
if (tree->OperIsCompare())
{
@@ -2733,8 +2733,8 @@ bool GenTree::gtIsValid64RsltMul()
return false;
}
- GenTreePtr op1 = gtOp.gtOp1;
- GenTreePtr op2 = gtOp.gtOp2;
+ GenTree* op1 = gtOp.gtOp1;
+ GenTree* op2 = gtOp.gtOp2;
if (TypeGet() != TYP_LONG || op1->TypeGet() != TYP_LONG || op2->TypeGet() != TYP_LONG)
{
@@ -2811,7 +2811,7 @@ unsigned Compiler::gtSetListOrder(GenTree* list, bool isListCallArgs, bool callA
list = listNodes.Pop();
assert(list && list->OperIsAnyList());
- GenTreePtr next = list->gtOp.gtOp2;
+ GenTree* next = list->gtOp.gtOp2;
unsigned level = 0;
unsigned ftreg = 0;
@@ -2836,8 +2836,8 @@ unsigned Compiler::gtSetListOrder(GenTree* list, bool isListCallArgs, bool callA
costSz += next->gtCostSz;
}
- GenTreePtr op1 = list->gtOp.gtOp1;
- unsigned lvl = gtSetEvalOrder(op1);
+ GenTree* op1 = list->gtOp.gtOp1;
+ unsigned lvl = gtSetEvalOrder(op1);
#if FEATURE_STACK_FP_X87
// restore the FP level
@@ -2956,8 +2956,8 @@ unsigned Compiler::gtSetListOrder(GenTree* list, bool isListCallArgs, bool callA
//
void Compiler::gtWalkOp(GenTree** op1WB, GenTree** op2WB, GenTree* base, bool constOnly)
{
- GenTreePtr op1 = *op1WB;
- GenTreePtr op2 = *op2WB;
+ GenTree* op1 = *op1WB;
+ GenTree* op2 = *op2WB;
op1 = op1->gtEffectiveVal();
@@ -2977,7 +2977,7 @@ void Compiler::gtWalkOp(GenTree** op1WB, GenTree** op2WB, GenTree* base, bool co
// (Why? Also, presumably op2 is not a GT_NOP in this case?)
if (op1->gtOper == GT_NOP)
{
- GenTreePtr tmp;
+ GenTree* tmp;
tmp = op1;
op1 = op2;
@@ -3019,7 +3019,7 @@ void Compiler::gtWalkOp(GenTree** op1WB, GenTree** op2WB, GenTree* base, bool co
* Here, we are planning to generate the address mode [edx+4*eax], where eax = idx and edx = the GT_COMMA expression.
* To check adr equivalence with op2, we need to walk down the GT_ADD tree just like gtWalkOp() does.
*/
-GenTreePtr Compiler::gtWalkOpEffectiveVal(GenTreePtr op)
+GenTree* Compiler::gtWalkOpEffectiveVal(GenTree* op)
{
for (;;)
{
@@ -3429,8 +3429,8 @@ unsigned Compiler::gtSetEvalOrder(GenTree* tree)
int lvlb; // preference for op2
unsigned lvl2; // scratch variable
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtGetOp2IfPresent();
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtGetOp2IfPresent();
costEx = 0;
costSz = 0;
@@ -3645,9 +3645,9 @@ unsigned Compiler::gtSetEvalOrder(GenTree* tree)
// If we have a GT_ADDR of an GT_IND we can just copy the costs from indOp1
if (op1->OperGet() == GT_IND)
{
- GenTreePtr indOp1 = op1->gtOp.gtOp1;
- costEx = indOp1->gtCostEx;
- costSz = indOp1->gtCostSz;
+ GenTree* indOp1 = op1->gtOp.gtOp1;
+ costEx = indOp1->gtCostEx;
+ costSz = indOp1->gtCostSz;
}
break;
@@ -3720,13 +3720,13 @@ unsigned Compiler::gtSetEvalOrder(GenTree* tree)
#if SCALED_ADDR_MODES
unsigned mul;
#endif
- unsigned cns;
- GenTreePtr base;
- GenTreePtr idx;
+ unsigned cns;
+ GenTree* base;
+ GenTree* idx;
// See if we can form a complex addressing mode.
- GenTreePtr addr = op1->gtEffectiveVal();
+ GenTree* addr = op1->gtEffectiveVal();
bool doAddrMode = true;
// See if we can form a complex addressing mode.
@@ -3825,7 +3825,7 @@ unsigned Compiler::gtSetEvalOrder(GenTree* tree)
//
addrmodeCount--;
- GenTreePtr tmp = addr;
+ GenTree* tmp = addr;
while (addrmodeCount > 0)
{
// decrement the gtCosts for the interior GT_ADD or GT_LSH node by the remaining
@@ -3835,8 +3835,8 @@ unsigned Compiler::gtSetEvalOrder(GenTree* tree)
addrmodeCount--;
if (addrmodeCount > 0)
{
- GenTreePtr tmpOp1 = tmp->gtOp.gtOp1;
- GenTreePtr tmpOp2 = tmp->gtGetOp2();
+ GenTree* tmpOp1 = tmp->gtOp.gtOp1;
+ GenTree* tmpOp2 = tmp->gtGetOp2();
assert(tmpOp2 != nullptr);
if ((tmpOp1 != base) && (tmpOp1->OperGet() == GT_ADD))
@@ -3947,7 +3947,7 @@ unsigned Compiler::gtSetEvalOrder(GenTree* tree)
assert(base != NULL);
#endif
- INDEBUG(GenTreePtr op1Save = addr);
+ INDEBUG(GenTree* op1Save = addr);
// Walk 'addr' identifying non-overflow ADDs that will be part of the address mode.
// Note that we will be modifying 'op1' and 'op2' so that eventually they should
@@ -4004,7 +4004,7 @@ unsigned Compiler::gtSetEvalOrder(GenTree* tree)
op2->gtFlags |= GTF_ADDRMODE_NO_CSE;
// We may have eliminated multiple shifts and multiplies in the addressing mode,
// so navigate down through them to get to "idx".
- GenTreePtr op2op1 = op2->gtOp.gtOp1;
+ GenTree* op2op1 = op2->gtOp.gtOp1;
while ((op2op1->gtOper == GT_LSH || op2op1->gtOper == GT_MUL) && op2op1 != idx)
{
op2op1->gtFlags |= GTF_ADDRMODE_NO_CSE;
@@ -4355,7 +4355,7 @@ unsigned Compiler::gtSetEvalOrder(GenTree* tree)
bool bReverseInAssignment = false;
if (GenTree::OperIsAssignment(oper))
{
- GenTreePtr op1Val = op1;
+ GenTree* op1Val = op1;
if (tree->gtOper == GT_ASG)
{
@@ -4518,8 +4518,9 @@ unsigned Compiler::gtSetEvalOrder(GenTree* tree)
}
/* We try to swap operands if the second one is more expensive */
- bool tryToSwap;
- GenTreePtr opA, opB;
+ bool tryToSwap;
+ GenTree* opA;
+ GenTree* opB;
if (tree->gtFlags & GTF_REVERSE_OPS)
{
@@ -4697,7 +4698,7 @@ unsigned Compiler::gtSetEvalOrder(GenTree* tree)
if (tree->gtCall.gtCallObjp)
{
- GenTreePtr thisVal = tree->gtCall.gtCallObjp;
+ GenTree* thisVal = tree->gtCall.gtCallObjp;
lvl2 = gtSetEvalOrder(thisVal);
if (level < lvl2)
@@ -4761,7 +4762,7 @@ unsigned Compiler::gtSetEvalOrder(GenTree* tree)
assert(tree->gtCall.gtCallCookie == nullptr || tree->gtCall.gtCallCookie->gtOper == GT_CNS_INT ||
tree->gtCall.gtCallCookie->gtOper == GT_IND);
- GenTreePtr indirect = tree->gtCall.gtCallAddr;
+ GenTree* indirect = tree->gtCall.gtCallAddr;
lvl2 = gtSetEvalOrder(indirect);
if (level < lvl2)
@@ -5044,7 +5045,7 @@ DONE:
#if FEATURE_STACK_FP_X87
/*****************************************************************************/
-void Compiler::gtComputeFPlvls(GenTreePtr tree)
+void Compiler::gtComputeFPlvls(GenTree* tree)
{
genTreeOps oper;
unsigned kind;
@@ -5072,8 +5073,8 @@ void Compiler::gtComputeFPlvls(GenTreePtr tree)
if (kind & GTK_SMPOP)
{
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtGetOp2IfPresent();
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtGetOp2IfPresent();
/* Check for some special cases */
@@ -5363,7 +5364,7 @@ unsigned GenTree::GetScaledIndex()
* "*addr" to the other argument.
*/
-bool GenTree::IsAddWithI32Const(GenTreePtr* addr, int* offset)
+bool GenTree::IsAddWithI32Const(GenTree** addr, int* offset)
{
if (OperGet() == GT_ADD)
{
@@ -5407,7 +5408,7 @@ bool GenTree::IsAddWithI32Const(GenTreePtr* addr, int* offset)
// later gets converted to a GT_FIELD_LIST with two GT_LCL_FLDs in Lower/LowerXArch.
//
-GenTreePtr* GenTree::gtGetChildPointer(GenTreePtr parent) const
+GenTree** GenTree::gtGetChildPointer(GenTree* parent) const
{
switch (parent->OperGet())
@@ -5529,11 +5530,11 @@ GenTreePtr* GenTree::gtGetChildPointer(GenTreePtr parent) const
}
if (this == call->gtCallArgs)
{
- return reinterpret_cast<GenTreePtr*>(&(call->gtCallArgs));
+ return reinterpret_cast<GenTree**>(&(call->gtCallArgs));
}
if (this == call->gtCallLateArgs)
{
- return reinterpret_cast<GenTreePtr*>(&(call->gtCallLateArgs));
+ return reinterpret_cast<GenTree**>(&(call->gtCallLateArgs));
}
if (this == call->gtControlExpr)
{
@@ -5928,7 +5929,7 @@ void GenTree::ReplaceOperand(GenTree** useEdge, GenTree* replacement)
//
// Arguments:
-// parentChildPointer - A pointer to a GenTreePtr* (yes, that's three
+// parentChildPointer - A pointer to a GenTree** (yes, that's three
// levels, i.e. GenTree ***), which if non-null,
// will be set to point to the field in the parent
// that points to this node.
@@ -5941,11 +5942,11 @@ void GenTree::ReplaceOperand(GenTree** useEdge, GenTree* replacement)
// To enable the child to be replaced, it accepts an argument, parentChildPointer that, if non-null,
// will be set to point to the child pointer in the parent that points to this node.
-GenTreePtr GenTree::gtGetParent(GenTreePtr** parentChildPtrPtr) const
+GenTree* GenTree::gtGetParent(GenTree*** parentChildPtrPtr) const
{
// Find the parent node; it must be after this node in the execution order.
- GenTreePtr* parentChildPtr = nullptr;
- GenTreePtr parent;
+ GenTree** parentChildPtr = nullptr;
+ GenTree* parent;
for (parent = gtNext; parent != nullptr; parent = parent->gtNext)
{
parentChildPtr = gtGetChildPointer(parent);
@@ -5984,7 +5985,7 @@ bool GenTree::OperRequiresAsgFlag()
bool GenTree::OperMayThrow(Compiler* comp)
{
- GenTreePtr op;
+ GenTree* op;
switch (gtOper)
{
@@ -6193,7 +6194,7 @@ void GenTree::SetVtableForOper(genTreeOps oper)
}
#endif // DEBUGGABLE_GENTREE
-GenTreePtr Compiler::gtNewOperNode(genTreeOps oper, var_types type, GenTreePtr op1, GenTreePtr op2)
+GenTree* Compiler::gtNewOperNode(genTreeOps oper, var_types type, GenTree* op1, GenTree* op2)
{
assert(op1 != nullptr);
assert(op2 != nullptr);
@@ -6202,12 +6203,12 @@ GenTreePtr Compiler::gtNewOperNode(genTreeOps oper, var_types type, GenTreePtr o
// should call the appropriate constructor for the extended type.
assert(!GenTree::IsExOp(GenTree::OperKind(oper)));
- GenTreePtr node = new (this, oper) GenTreeOp(oper, type, op1, op2);
+ GenTree* node = new (this, oper) GenTreeOp(oper, type, op1, op2);
return node;
}
-GenTreePtr Compiler::gtNewQmarkNode(var_types type, GenTreePtr cond, GenTreePtr colon)
+GenTree* Compiler::gtNewQmarkNode(var_types type, GenTree* cond, GenTree* colon)
{
compQmarkUsed = true;
GenTree* result = new (this, GT_QMARK) GenTreeQmark(type, cond, colon, this);
@@ -6220,7 +6221,7 @@ GenTreePtr Compiler::gtNewQmarkNode(var_types type, GenTreePtr cond, GenTreePtr
return result;
}
-GenTreeQmark::GenTreeQmark(var_types type, GenTreePtr cond, GenTreePtr colonOp, Compiler* comp)
+GenTreeQmark::GenTreeQmark(var_types type, GenTree* cond, GenTree* colonOp, Compiler* comp)
: GenTreeOp(GT_QMARK, type, cond, colonOp)
#ifdef LEGACY_BACKEND
, gtThenLiveSet(VarSetOps::UninitVal())
@@ -6250,9 +6251,9 @@ GenTree* Compiler::gtNewPhysRegNode(regNumber reg, var_types type)
}
#ifndef LEGACY_BACKEND
-GenTreePtr Compiler::gtNewJmpTableNode()
+GenTree* Compiler::gtNewJmpTableNode()
{
- GenTreePtr node = new (this, GT_JMPTABLE) GenTreeJumpTable(TYP_INT);
+ GenTree* node = new (this, GT_JMPTABLE) GenTreeJumpTable(TYP_INT);
node->gtJumpTable.gtJumpTableAddr = 0;
return node;
}
@@ -6310,10 +6311,10 @@ unsigned Compiler::gtTokenToIconFlags(unsigned token)
// If the indType is GT_REF we also mark the indNode as GTF_GLOB_REF
//
-GenTreePtr Compiler::gtNewIndOfIconHandleNode(var_types indType, size_t addr, unsigned iconFlags, bool isInvariant)
+GenTree* Compiler::gtNewIndOfIconHandleNode(var_types indType, size_t addr, unsigned iconFlags, bool isInvariant)
{
- GenTreePtr addrNode = gtNewIconHandleNode(addr, iconFlags);
- GenTreePtr indNode = gtNewOperNode(GT_IND, indType, addrNode);
+ GenTree* addrNode = gtNewIconHandleNode(addr, iconFlags);
+ GenTree* indNode = gtNewOperNode(GT_IND, indType, addrNode);
// This indirection won't cause an exception.
//
@@ -6345,10 +6346,10 @@ GenTreePtr Compiler::gtNewIndOfIconHandleNode(var_types indType, size_t addr, un
* If the handle needs to be accessed via an indirection, pValue points to it.
*/
-GenTreePtr Compiler::gtNewIconEmbHndNode(void* value, void* pValue, unsigned iconFlags, void* compileTimeHandle)
+GenTree* Compiler::gtNewIconEmbHndNode(void* value, void* pValue, unsigned iconFlags, void* compileTimeHandle)
{
- GenTreePtr iconNode;
- GenTreePtr handleNode;
+ GenTree* iconNode;
+ GenTree* handleNode;
if (value != nullptr)
{
@@ -6390,9 +6391,9 @@ GenTreePtr Compiler::gtNewIconEmbHndNode(void* value, void* pValue, unsigned ico
}
/*****************************************************************************/
-GenTreePtr Compiler::gtNewStringLiteralNode(InfoAccessType iat, void* pValue)
+GenTree* Compiler::gtNewStringLiteralNode(InfoAccessType iat, void* pValue)
{
- GenTreePtr tree = nullptr;
+ GenTree* tree = nullptr;
switch (iat)
{
@@ -6428,25 +6429,25 @@ GenTreePtr Compiler::gtNewStringLiteralNode(InfoAccessType iat, void* pValue)
/*****************************************************************************/
-GenTreePtr Compiler::gtNewLconNode(__int64 value)
+GenTree* Compiler::gtNewLconNode(__int64 value)
{
#ifdef _TARGET_64BIT_
- GenTreePtr node = new (this, GT_CNS_INT) GenTreeIntCon(TYP_LONG, value);
+ GenTree* node = new (this, GT_CNS_INT) GenTreeIntCon(TYP_LONG, value);
#else
- GenTreePtr node = new (this, GT_CNS_LNG) GenTreeLngCon(value);
+ GenTree* node = new (this, GT_CNS_LNG) GenTreeLngCon(value);
#endif
return node;
}
-GenTreePtr Compiler::gtNewDconNode(double value)
+GenTree* Compiler::gtNewDconNode(double value)
{
- GenTreePtr node = new (this, GT_CNS_DBL) GenTreeDblCon(value);
+ GenTree* node = new (this, GT_CNS_DBL) GenTreeDblCon(value);
return node;
}
-GenTreePtr Compiler::gtNewSconNode(int CPX, CORINFO_MODULE_HANDLE scpHandle)
+GenTree* Compiler::gtNewSconNode(int CPX, CORINFO_MODULE_HANDLE scpHandle)
{
#if SMALL_TREE_NODES
@@ -6455,17 +6456,17 @@ GenTreePtr Compiler::gtNewSconNode(int CPX, CORINFO_MODULE_HANDLE scpHandle)
assert(GenTree::s_gtNodeSizes[GT_CALL] > GenTree::s_gtNodeSizes[GT_CNS_STR]);
- GenTreePtr node = new (this, GT_CALL) GenTreeStrCon(CPX, scpHandle DEBUGARG(/*largeNode*/ true));
+ GenTree* node = new (this, GT_CALL) GenTreeStrCon(CPX, scpHandle DEBUGARG(/*largeNode*/ true));
#else
- GenTreePtr node = new (this, GT_CNS_STR) GenTreeStrCon(CPX, scpHandle DEBUGARG(/*largeNode*/ true));
+ GenTree* node = new (this, GT_CNS_STR) GenTreeStrCon(CPX, scpHandle DEBUGARG(/*largeNode*/ true));
#endif
return node;
}
-GenTreePtr Compiler::gtNewZeroConNode(var_types type)
+GenTree* Compiler::gtNewZeroConNode(var_types type)
{
- GenTreePtr zero;
+ GenTree* zero;
switch (type)
{
case TYP_INT:
@@ -6501,7 +6502,7 @@ GenTreePtr Compiler::gtNewZeroConNode(var_types type)
return zero;
}
-GenTreePtr Compiler::gtNewOneConNode(var_types type)
+GenTree* Compiler::gtNewOneConNode(var_types type)
{
switch (type)
{
@@ -6515,8 +6516,8 @@ GenTreePtr Compiler::gtNewOneConNode(var_types type)
case TYP_FLOAT:
{
- GenTreePtr one = gtNewDconNode(1.0);
- one->gtType = type;
+ GenTree* one = gtNewDconNode(1.0);
+ one->gtType = type;
return one;
}
@@ -6537,7 +6538,7 @@ GenTreePtr Compiler::gtNewOneConNode(var_types type)
// simdType - simd vector type
// baseType - element type of vector
// size - size of vector in bytes
-GenTreePtr Compiler::gtNewSIMDVectorZero(var_types simdType, var_types baseType, unsigned size)
+GenTree* Compiler::gtNewSIMDVectorZero(var_types simdType, var_types baseType, unsigned size)
{
baseType = genActualType(baseType);
GenTree* initVal = gtNewZeroConNode(baseType);
@@ -6552,7 +6553,7 @@ GenTreePtr Compiler::gtNewSIMDVectorZero(var_types simdType, var_types baseType,
// simdType - simd vector type
// baseType - element type of vector
// size - size of vector in bytes
-GenTreePtr Compiler::gtNewSIMDVectorOne(var_types simdType, var_types baseType, unsigned size)
+GenTree* Compiler::gtNewSIMDVectorOne(var_types simdType, var_types baseType, unsigned size)
{
GenTree* initVal;
if (varTypeIsSmallInt(baseType))
@@ -6580,7 +6581,7 @@ GenTreePtr Compiler::gtNewSIMDVectorOne(var_types simdType, var_types baseType,
}
#endif // FEATURE_SIMD
-GenTreeCall* Compiler::gtNewIndCallNode(GenTreePtr addr, var_types type, GenTreeArgList* args, IL_OFFSETX ilOffset)
+GenTreeCall* Compiler::gtNewIndCallNode(GenTree* addr, var_types type, GenTreeArgList* args, IL_OFFSETX ilOffset)
{
return gtNewCallNode(CT_INDIRECT, (CORINFO_METHOD_HANDLE)addr, type, args, ilOffset);
}
@@ -6683,7 +6684,7 @@ GenTreeCall* Compiler::gtNewCallNode(
return node;
}
-GenTreePtr Compiler::gtNewLclvNode(unsigned lnum, var_types type, IL_OFFSETX ILoffs)
+GenTree* Compiler::gtNewLclvNode(unsigned lnum, var_types type, IL_OFFSETX ILoffs)
{
// We need to ensure that all struct values are normalized.
// It might be nice to assert this in general, but we have assignments of int to long.
@@ -6695,7 +6696,7 @@ GenTreePtr Compiler::gtNewLclvNode(unsigned lnum, var_types type, IL_OFFSETX ILo
assert(type == lvaTable[lnum].lvType ||
(lvaIsImplicitByRefLocal(lnum) && fgGlobalMorph && (lvaTable[lnum].lvType == TYP_BYREF)));
}
- GenTreePtr node = new (this, GT_LCL_VAR) GenTreeLclVar(type, lnum, ILoffs);
+ GenTree* node = new (this, GT_LCL_VAR) GenTreeLclVar(type, lnum, ILoffs);
/* Cannot have this assert because the inliner uses this function
* to add temporaries */
@@ -6705,7 +6706,7 @@ GenTreePtr Compiler::gtNewLclvNode(unsigned lnum, var_types type, IL_OFFSETX ILo
return node;
}
-GenTreePtr Compiler::gtNewLclLNode(unsigned lnum, var_types type, IL_OFFSETX ILoffs)
+GenTree* Compiler::gtNewLclLNode(unsigned lnum, var_types type, IL_OFFSETX ILoffs)
{
// We need to ensure that all struct values are normalized.
// It might be nice to assert this in general, but we have assignments of int to long.
@@ -6722,9 +6723,9 @@ GenTreePtr Compiler::gtNewLclLNode(unsigned lnum, var_types type, IL_OFFSETX ILo
// assert(GenTree::s_gtNodeSizes[GT_CALL] > GenTree::s_gtNodeSizes[GT_LCL_VAR]);
- GenTreePtr node = new (this, GT_CALL) GenTreeLclVar(type, lnum, ILoffs DEBUGARG(/*largeNode*/ true));
+ GenTree* node = new (this, GT_CALL) GenTreeLclVar(type, lnum, ILoffs DEBUGARG(/*largeNode*/ true));
#else
- GenTreePtr node = new (this, GT_LCL_VAR) GenTreeLclVar(type, lnum, ILoffs DEBUGARG(/*largeNode*/ true));
+ GenTree* node = new (this, GT_LCL_VAR) GenTreeLclVar(type, lnum, ILoffs DEBUGARG(/*largeNode*/ true));
#endif
return node;
@@ -6743,12 +6744,12 @@ GenTreeLclFld* Compiler::gtNewLclFldNode(unsigned lnum, var_types type, unsigned
return node;
}
-GenTreePtr Compiler::gtNewInlineCandidateReturnExpr(GenTreePtr inlineCandidate, var_types type)
+GenTree* Compiler::gtNewInlineCandidateReturnExpr(GenTree* inlineCandidate, var_types type)
{
assert(GenTree::s_gtNodeSizes[GT_RET_EXPR] == TREE_NODE_SZ_LARGE);
- GenTreePtr node = new (this, GT_RET_EXPR) GenTreeRetExpr(type);
+ GenTree* node = new (this, GT_RET_EXPR) GenTreeRetExpr(type);
node->gtRetExpr.gtInlineCandidate = inlineCandidate;
@@ -6765,7 +6766,7 @@ GenTreePtr Compiler::gtNewInlineCandidateReturnExpr(GenTreePtr inlineCandidate,
return node;
}
-GenTreeArgList* Compiler::gtNewListNode(GenTreePtr op1, GenTreeArgList* op2)
+GenTreeArgList* Compiler::gtNewListNode(GenTree* op1, GenTreeArgList* op2)
{
assert((op1 != nullptr) && (op1->OperGet() != GT_LIST));
@@ -6777,7 +6778,7 @@ GenTreeArgList* Compiler::gtNewListNode(GenTreePtr op1, GenTreeArgList* op2)
* Create a list out of one value.
*/
-GenTreeArgList* Compiler::gtNewArgList(GenTreePtr arg)
+GenTreeArgList* Compiler::gtNewArgList(GenTree* arg)
{
return new (this, GT_LIST) GenTreeArgList(arg);
}
@@ -6787,7 +6788,7 @@ GenTreeArgList* Compiler::gtNewArgList(GenTreePtr arg)
* Create a list out of the two values.
*/
-GenTreeArgList* Compiler::gtNewArgList(GenTreePtr arg1, GenTreePtr arg2)
+GenTreeArgList* Compiler::gtNewArgList(GenTree* arg1, GenTree* arg2)
{
return new (this, GT_LIST) GenTreeArgList(arg1, gtNewArgList(arg2));
}
@@ -6797,7 +6798,7 @@ GenTreeArgList* Compiler::gtNewArgList(GenTreePtr arg1, GenTreePtr arg2)
* Create a list out of the three values.
*/
-GenTreeArgList* Compiler::gtNewArgList(GenTreePtr arg1, GenTreePtr arg2, GenTreePtr arg3)
+GenTreeArgList* Compiler::gtNewArgList(GenTree* arg1, GenTree* arg2, GenTree* arg3)
{
return new (this, GT_LIST) GenTreeArgList(arg1, gtNewArgList(arg2, arg3));
}
@@ -6807,7 +6808,7 @@ GenTreeArgList* Compiler::gtNewArgList(GenTreePtr arg1, GenTreePtr arg2, GenTree
* Create a list out of the three values.
*/
-GenTreeArgList* Compiler::gtNewArgList(GenTreePtr arg1, GenTreePtr arg2, GenTreePtr arg3, GenTreePtr arg4)
+GenTreeArgList* Compiler::gtNewArgList(GenTree* arg1, GenTree* arg2, GenTree* arg3, GenTree* arg4)
{
return new (this, GT_LIST) GenTreeArgList(arg1, gtNewArgList(arg2, arg3, arg4));
}
@@ -6845,7 +6846,7 @@ fgArgTabEntry* Compiler::gtArgEntryByArgNum(GenTreeCall* call, unsigned argNum)
* that has the matching node and return the fgArgTableEntryPtr
*/
-fgArgTabEntry* Compiler::gtArgEntryByNode(GenTreeCall* call, GenTreePtr node)
+fgArgTabEntry* Compiler::gtArgEntryByNode(GenTreeCall* call, GenTree* node)
{
fgArgInfo* argInfo = call->fgArgInfo;
noway_assert(argInfo != nullptr);
@@ -6922,7 +6923,7 @@ bool Compiler::gtArgIsThisPtr(fgArgTabEntry* argEntry)
* Create a node that will assign 'src' to 'dst'.
*/
-GenTreePtr Compiler::gtNewAssignNode(GenTreePtr dst, GenTreePtr src)
+GenTree* Compiler::gtNewAssignNode(GenTree* dst, GenTree* src)
{
/* Mark the target as being assigned */
@@ -6939,7 +6940,7 @@ GenTreePtr Compiler::gtNewAssignNode(GenTreePtr dst, GenTreePtr src)
/* Create the assignment node */
- GenTreePtr asg = gtNewOperNode(GT_ASG, dst->TypeGet(), dst, src);
+ GenTree* asg = gtNewOperNode(GT_ASG, dst->TypeGet(), dst, src);
/* Mark the expression as containing an assignment */
@@ -7049,7 +7050,7 @@ void Compiler::gtSetObjGcInfo(GenTreeObj* objNode)
// Return Value:
// A block, object or local node that represents the struct value pointed to by 'addr'.
-GenTree* Compiler::gtNewStructVal(CORINFO_CLASS_HANDLE structHnd, GenTreePtr addr)
+GenTree* Compiler::gtNewStructVal(CORINFO_CLASS_HANDLE structHnd, GenTree* addr)
{
if (addr->gtOper == GT_ADDR)
{
@@ -7078,7 +7079,7 @@ GenTree* Compiler::gtNewStructVal(CORINFO_CLASS_HANDLE structHnd, GenTreePtr add
// Return Value:
// A block, object or local node that represents the block value pointed to by 'addr'.
-GenTree* Compiler::gtNewBlockVal(GenTreePtr addr, unsigned size)
+GenTree* Compiler::gtNewBlockVal(GenTree* addr, unsigned size)
{
// By default we treat this as an opaque struct type with known size.
var_types blkType = TYP_STRUCT;
@@ -7120,14 +7121,11 @@ GenTree* Compiler::gtNewBlockVal(GenTreePtr addr, unsigned size)
// if FEATURE_SIMD is enabled and the source has a SIMD type.
// isVolatile - Is this marked as volatile memory?
-GenTree* Compiler::gtNewCpObjNode(GenTreePtr dstAddr,
- GenTreePtr srcAddr,
- CORINFO_CLASS_HANDLE structHnd,
- bool isVolatile)
+GenTree* Compiler::gtNewCpObjNode(GenTree* dstAddr, GenTree* srcAddr, CORINFO_CLASS_HANDLE structHnd, bool isVolatile)
{
- GenTreePtr lhs = gtNewStructVal(structHnd, dstAddr);
- GenTree* src = nullptr;
- unsigned size;
+ GenTree* lhs = gtNewStructVal(structHnd, dstAddr);
+ GenTree* src = nullptr;
+ unsigned size;
if (lhs->OperIsBlk())
{
@@ -7219,7 +7217,7 @@ void GenTreeIntCon::FixupInitBlkValue(var_types asgType)
// This procedure centralizes all the logic to both enforce proper structure and
// to properly construct any InitBlk/CpBlk node.
-void Compiler::gtBlockOpInit(GenTreePtr result, GenTreePtr dst, GenTreePtr srcOrFillVal, bool isVolatile)
+void Compiler::gtBlockOpInit(GenTree* result, GenTree* dst, GenTree* srcOrFillVal, bool isVolatile)
{
if (!result->OperIsBlkOp())
{
@@ -7274,8 +7272,8 @@ void Compiler::gtBlockOpInit(GenTreePtr result, GenTreePtr dst, GenTreePtr srcOr
*/
if (result->OperIsCopyBlkOp())
{
- GenTreePtr currSrc = srcOrFillVal;
- GenTreePtr currDst = dst;
+ GenTree* currSrc = srcOrFillVal;
+ GenTree* currDst = dst;
if (currSrc->OperIsBlk() && (currSrc->AsBlk()->Addr()->OperGet() == GT_ADDR))
{
@@ -7360,8 +7358,7 @@ void Compiler::gtBlockOpInit(GenTreePtr result, GenTreePtr dst, GenTreePtr srcOr
// If size is zero, the dst must be a GT_OBJ with the class handle.
// 'dst' must be a block node or lclVar.
//
-GenTree* Compiler::gtNewBlkOpNode(
- GenTreePtr dst, GenTreePtr srcOrFillVal, unsigned size, bool isVolatile, bool isCopyBlock)
+GenTree* Compiler::gtNewBlkOpNode(GenTree* dst, GenTree* srcOrFillVal, unsigned size, bool isVolatile, bool isCopyBlock)
{
assert(dst->OperIsBlk() || dst->OperIsLocal());
if (isCopyBlock)
@@ -7404,16 +7401,16 @@ GenTree* Compiler::gtNewBlkOpNode(
// Notes:
// The node is generated as GenTreeMultiRegOp on RyuJIT/armel, GenTreeOp on all the other archs.
//
-GenTreePtr Compiler::gtNewPutArgReg(var_types type, GenTreePtr arg, regNumber argReg)
+GenTree* Compiler::gtNewPutArgReg(var_types type, GenTree* arg, regNumber argReg)
{
assert(arg != nullptr);
- GenTreePtr node = nullptr;
+ GenTree* node = nullptr;
#if !defined(LEGACY_BACKEND) && defined(_TARGET_ARM_)
// A PUTARG_REG could be a MultiRegOp on arm since we could move a double register to two int registers.
node = new (this, GT_PUTARG_REG) GenTreeMultiRegOp(GT_PUTARG_REG, type, arg, nullptr);
#else
- node = gtNewOperNode(GT_PUTARG_REG, type, arg);
+ node = gtNewOperNode(GT_PUTARG_REG, type, arg);
#endif
node->gtRegNum = argReg;
@@ -7434,16 +7431,16 @@ GenTreePtr Compiler::gtNewPutArgReg(var_types type, GenTreePtr arg, regNumber ar
// Notes:
// The node is generated as GenTreeMultiRegOp on RyuJIT/armel, as GenTreeOp on all the other archs.
//
-GenTreePtr Compiler::gtNewBitCastNode(var_types type, GenTreePtr arg)
+GenTree* Compiler::gtNewBitCastNode(var_types type, GenTree* arg)
{
assert(arg != nullptr);
- GenTreePtr node = nullptr;
+ GenTree* node = nullptr;
#if !defined(LEGACY_BACKEND) && defined(_TARGET_ARM_)
// A BITCAST could be a MultiRegOp on arm since we could move a double register to two int registers.
node = new (this, GT_BITCAST) GenTreeMultiRegOp(GT_BITCAST, type, arg, nullptr);
#else
- node = gtNewOperNode(GT_BITCAST, type, arg);
+ node = gtNewOperNode(GT_BITCAST, type, arg);
#endif
return node;
@@ -7461,9 +7458,9 @@ GenTreePtr Compiler::gtNewBitCastNode(var_types type, GenTreePtr arg)
* complete job if you can't handle this function failing.
*/
-GenTreePtr Compiler::gtClone(GenTree* tree, bool complexOK)
+GenTree* Compiler::gtClone(GenTree* tree, bool complexOK)
{
- GenTreePtr copy;
+ GenTree* copy;
switch (tree->gtOper)
{
@@ -7522,7 +7519,7 @@ GenTreePtr Compiler::gtClone(GenTree* tree, bool complexOK)
if (tree->gtOper == GT_FIELD)
{
- GenTreePtr objp;
+ GenTree* objp;
// copied from line 9850
@@ -7541,8 +7538,8 @@ GenTreePtr Compiler::gtClone(GenTree* tree, bool complexOK)
}
else if (tree->OperIs(GT_ADD, GT_SUB))
{
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtOp.gtOp2;
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtOp.gtOp2;
if (op1->OperIsLeaf() && op2->OperIsLeaf())
{
@@ -7566,7 +7563,7 @@ GenTreePtr Compiler::gtClone(GenTree* tree, bool complexOK)
}
else if (tree->gtOper == GT_ADDR)
{
- GenTreePtr op1 = gtClone(tree->gtOp.gtOp1);
+ GenTree* op1 = gtClone(tree->gtOp.gtOp1);
if (op1 == nullptr)
{
return nullptr;
@@ -7611,7 +7608,7 @@ GenTreePtr Compiler::gtClone(GenTree* tree, bool complexOK)
// the explicit `deepVarNum` and `deepVarVal` parameters; those are used in
// recursive invocations to avoid replacing defs.
-GenTreePtr Compiler::gtCloneExpr(
+GenTree* Compiler::gtCloneExpr(
GenTree* tree, unsigned addFlags, unsigned varNum, int varVal, unsigned deepVarNum, int deepVarVal)
{
if (tree == nullptr)
@@ -8164,7 +8161,7 @@ GenTreePtr Compiler::gtCloneExpr(
case GT_ARR_ELEM:
{
- GenTreePtr inds[GT_ARR_MAX_RANK];
+ GenTree* inds[GT_ARR_MAX_RANK];
for (unsigned dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
{
inds[dim] = gtCloneExpr(tree->gtArrElem.gtArrInds[dim], addFlags, deepVarNum, deepVarVal);
@@ -8306,15 +8303,15 @@ DONE:
// but this method will sequence 'replacementTree', and insert it into the
// proper place in the statement sequence.
-GenTreePtr Compiler::gtReplaceTree(GenTreePtr stmt, GenTreePtr tree, GenTreePtr replacementTree)
+GenTree* Compiler::gtReplaceTree(GenTree* stmt, GenTree* tree, GenTree* replacementTree)
{
assert(fgStmtListThreaded);
assert(tree != nullptr);
assert(stmt != nullptr);
assert(replacementTree != nullptr);
- GenTreePtr* treePtr = nullptr;
- GenTreePtr treeParent = tree->gtGetParent(&treePtr);
+ GenTree** treePtr = nullptr;
+ GenTree* treeParent = tree->gtGetParent(&treePtr);
assert(treeParent != nullptr || tree == stmt->gtStmt.gtStmtExpr);
@@ -8349,10 +8346,10 @@ GenTreePtr Compiler::gtReplaceTree(GenTreePtr stmt, GenTreePtr tree, GenTreePtr
assert(useEdge == treePtr);
#endif // DEBUG
- GenTreePtr treeFirstNode = fgGetFirstNode(tree);
- GenTreePtr treeLastNode = tree;
- GenTreePtr treePrevNode = treeFirstNode->gtPrev;
- GenTreePtr treeNextNode = treeLastNode->gtNext;
+ GenTree* treeFirstNode = fgGetFirstNode(tree);
+ GenTree* treeLastNode = tree;
+ GenTree* treePrevNode = treeFirstNode->gtPrev;
+ GenTree* treeNextNode = treeLastNode->gtNext;
treeParent->ReplaceOperand(treePtr, replacementTree);
@@ -8613,7 +8610,7 @@ bool Compiler::gtCompareTree(GenTree* op1, GenTree* op2)
return false;
}
-GenTreePtr Compiler::gtGetThisArg(GenTreeCall* call)
+GenTree* Compiler::gtGetThisArg(GenTreeCall* call)
{
if (call->gtCallObjp != nullptr)
{
@@ -8630,12 +8627,12 @@ GenTreePtr Compiler::gtGetThisArg(GenTreeCall* call)
regNumber thisReg = REG_ARG_0;
unsigned argNum = 0;
fgArgTabEntry* thisArgTabEntry = gtArgEntryByArgNum(call, argNum);
- GenTreePtr result = thisArgTabEntry->node;
+ GenTree* result = thisArgTabEntry->node;
#if !FEATURE_FIXED_OUT_ARGS
- GenTreePtr lateArgs = call->gtCallLateArgs;
- regList list = call->regArgList;
- int index = 0;
+ GenTree* lateArgs = call->gtCallLateArgs;
+ regList list = call->regArgList;
+ int index = 0;
while (lateArgs != NULL)
{
assert(lateArgs->gtOper == GT_LIST);
@@ -8950,7 +8947,7 @@ unsigned GenTree::NumChildren()
}
}
-GenTreePtr GenTree::GetChild(unsigned childNum)
+GenTree* GenTree::GetChild(unsigned childNum)
{
assert(childNum < NumChildren()); // Precondition.
assert(NumChildren() <= MAX_CHILDREN);
@@ -10012,7 +10009,7 @@ void Compiler::gtDispVN(GenTree* tree)
// 'indentStack' may be null, in which case no indentation or arcs are printed
// 'msg' may be null
-void Compiler::gtDispNode(GenTreePtr tree, IndentStack* indentStack, __in __in_z __in_opt const char* msg, bool isLIR)
+void Compiler::gtDispNode(GenTree* tree, IndentStack* indentStack, __in __in_z __in_opt const char* msg, bool isLIR)
{
bool printPointer = true; // always true..
bool printFlags = true; // always true..
@@ -11284,7 +11281,7 @@ void Compiler::gtDispLeaf(GenTree* tree, IndentStack* indentStack)
// 'msg' has a default value of null
// 'topOnly' is an optional argument that defaults to false
-void Compiler::gtDispChild(GenTreePtr child,
+void Compiler::gtDispChild(GenTree* child,
IndentStack* indentStack,
IndentInfo arcType,
__in_opt const char* msg, /* = nullptr */
@@ -11309,7 +11306,7 @@ extern const char* getHWIntrinsicName(NamedIntrinsic intrinsic);
/*****************************************************************************/
-void Compiler::gtDispTree(GenTreePtr tree,
+void Compiler::gtDispTree(GenTree* tree,
IndentStack* indentStack, /* = nullptr */
__in __in_z __in_opt const char* msg, /* = nullptr */
bool topOnly, /* = false */
@@ -11797,7 +11794,7 @@ void Compiler::gtDispTree(GenTreePtr tree,
for (GenTreeArgList* lateArgs = call->gtCallLateArgs; lateArgs;
(lateArgIndex++, lateArgs = lateArgs->Rest()))
{
- GenTreePtr argx;
+ GenTree* argx;
argx = lateArgs->Current();
@@ -11920,7 +11917,7 @@ void Compiler::gtDispTree(GenTreePtr tree,
// 'arg' must be an argument to 'call' (else gtArgEntryByNode will assert)
void Compiler::gtGetArgMsg(
- GenTreeCall* call, GenTreePtr arg, unsigned argNum, int listCount, char* bufp, unsigned bufLength)
+ GenTreeCall* call, GenTree* arg, unsigned argNum, int listCount, char* bufp, unsigned bufLength)
{
if (call->gtCallLateArgs != nullptr)
{
@@ -12038,7 +12035,7 @@ void Compiler::gtGetArgMsg(
// 'arg' must be an argument to 'call' (else gtArgEntryByNode will assert)
void Compiler::gtGetLateArgMsg(
- GenTreeCall* call, GenTreePtr argx, int lateArgIndex, int listCount, char* bufp, unsigned bufLength)
+ GenTreeCall* call, GenTree* argx, int lateArgIndex, int listCount, char* bufp, unsigned bufLength)
{
assert(!argx->IsArgPlaceHolderNode()); // No place holders nodes are in gtCallLateArgs;
@@ -12219,7 +12216,7 @@ void Compiler::gtDispArgList(GenTreeCall* call, IndentStack* indentStack)
// Assumptions:
// 'tree' must be a GT_LIST node
-void Compiler::gtDispTreeList(GenTreePtr tree, IndentStack* indentStack /* = nullptr */)
+void Compiler::gtDispTreeList(GenTree* tree, IndentStack* indentStack /* = nullptr */)
{
for (/*--*/; tree != nullptr; tree = tree->gtNext)
{
@@ -12441,7 +12438,7 @@ void Compiler::gtDispLIRNode(GenTree* node, const char* prefixMsg /* = nullptr *
* and call the methods to perform the folding
*/
-GenTreePtr Compiler::gtFoldExpr(GenTreePtr tree)
+GenTree* Compiler::gtFoldExpr(GenTree* tree)
{
unsigned kind = tree->OperKind();
@@ -12460,7 +12457,7 @@ GenTreePtr Compiler::gtFoldExpr(GenTreePtr tree)
return tree;
}
- GenTreePtr op1 = tree->gtOp.gtOp1;
+ GenTree* op1 = tree->gtOp.gtOp1;
/* Filter out non-foldable trees that can have constant children */
@@ -12488,7 +12485,7 @@ GenTreePtr Compiler::gtFoldExpr(GenTreePtr tree)
// Don't take out conditionals for debugging
!((opts.compDbgCode || opts.MinOpts()) && tree->OperIsCompare()))
{
- GenTreePtr op2 = tree->gtOp.gtOp2;
+ GenTree* op2 = tree->gtOp.gtOp2;
// The atomic operations are exempted here because they are never computable statically;
// one of their arguments is an address.
@@ -12515,14 +12512,14 @@ GenTreePtr Compiler::gtFoldExpr(GenTreePtr tree)
{
assert(tree->OperGet() == GT_QMARK);
- GenTreePtr colon_op1 = op2->gtOp.gtOp1;
- GenTreePtr colon_op2 = op2->gtOp.gtOp2;
+ GenTree* colon_op1 = op2->gtOp.gtOp1;
+ GenTree* colon_op2 = op2->gtOp.gtOp2;
if (gtCompareTree(colon_op1, colon_op2))
{
// Both sides of the GT_COLON are the same tree
- GenTreePtr sideEffList = nullptr;
+ GenTree* sideEffList = nullptr;
gtExtractSideEffList(op1, &sideEffList);
fgUpdateRefCntForExtract(op1, sideEffList); // Decrement refcounts for op1, Keeping any side-effects
@@ -12683,10 +12680,10 @@ GenTree* Compiler::gtFoldTypeEqualityCall(CorInfoIntrinsics methodID, GenTree* o
*
*/
-GenTreePtr Compiler::gtFoldExprCompare(GenTreePtr tree)
+GenTree* Compiler::gtFoldExprCompare(GenTree* tree)
{
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtOp.gtOp2;
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtOp.gtOp2;
assert(tree->OperIsCompare());
@@ -12706,7 +12703,7 @@ GenTreePtr Compiler::gtFoldExprCompare(GenTreePtr tree)
return tree; /* return unfolded tree */
}
- GenTreePtr cons;
+ GenTree* cons;
switch (tree->gtOper)
{
@@ -13034,14 +13031,15 @@ GenTree* Compiler::gtFoldTypeCompare(GenTree* tree)
* multiply with 1, etc
*/
-GenTreePtr Compiler::gtFoldExprSpecial(GenTreePtr tree)
+GenTree* Compiler::gtFoldExprSpecial(GenTree* tree)
{
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtOp.gtOp2;
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtOp.gtOp2;
genTreeOps oper = tree->OperGet();
- GenTreePtr op, cons;
- ssize_t val;
+ GenTree* op;
+ GenTree* cons;
+ ssize_t val;
assert(tree->OperKind() & GTK_BINOP);
@@ -13820,7 +13818,7 @@ GenTree* Compiler::gtOptimizeEnumHasFlag(GenTree* thisOp, GenTree* flagOp)
#pragma warning(push)
#pragma warning(disable : 21000) // Suppress PREFast warning about overly large function
#endif
-GenTreePtr Compiler::gtFoldExprConst(GenTreePtr tree)
+GenTree* Compiler::gtFoldExprConst(GenTree* tree)
{
unsigned kind = tree->OperKind();
@@ -13833,8 +13831,8 @@ GenTreePtr Compiler::gtFoldExprConst(GenTreePtr tree)
assert(kind & (GTK_UNOP | GTK_BINOP));
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtGetOp2IfPresent();
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtGetOp2IfPresent();
if (!opts.OptEnabled(CLFLG_CONSTANTFOLD))
{
@@ -15238,7 +15236,7 @@ DONE:
// May set compFloatingPointUsed.
//
-GenTreePtr Compiler::gtNewTempAssign(unsigned tmp, GenTreePtr val)
+GenTree* Compiler::gtNewTempAssign(unsigned tmp, GenTree* val)
{
// Self-assignment is a nop.
if (val->OperGet() == GT_LCL_VAR && val->gtLclVarCommon.gtLclNum == tmp)
@@ -15313,8 +15311,8 @@ GenTreePtr Compiler::gtNewTempAssign(unsigned tmp, GenTreePtr val)
/* Create the assignment node */
- GenTreePtr asg;
- GenTreePtr dest = gtNewLclvNode(tmp, dstTyp);
+ GenTree* asg;
+ GenTree* dest = gtNewLclvNode(tmp, dstTyp);
dest->gtFlags |= GTF_VAR_DEF;
// With first-class structs, we should be propagating the class handle on all non-primitive
@@ -15325,7 +15323,7 @@ GenTreePtr Compiler::gtNewTempAssign(unsigned tmp, GenTreePtr val)
if (varTypeIsStruct(valTyp) && ((structHnd != NO_CLASS_HANDLE) || (varTypeIsSIMD(valTyp))))
{
// The GT_OBJ may be be a child of a GT_COMMA.
- GenTreePtr valx = val->gtEffectiveVal(/*commaOnly*/ true);
+ GenTree* valx = val->gtEffectiveVal(/*commaOnly*/ true);
if (valx->gtOper == GT_OBJ)
{
@@ -15357,13 +15355,13 @@ GenTreePtr Compiler::gtNewTempAssign(unsigned tmp, GenTreePtr val)
* an assignment and 'assg' is the new value).
*/
-GenTreePtr Compiler::gtNewRefCOMfield(GenTreePtr objPtr,
- CORINFO_RESOLVED_TOKEN* pResolvedToken,
- CORINFO_ACCESS_FLAGS access,
- CORINFO_FIELD_INFO* pFieldInfo,
- var_types lclTyp,
- CORINFO_CLASS_HANDLE structType,
- GenTreePtr assg)
+GenTree* Compiler::gtNewRefCOMfield(GenTree* objPtr,
+ CORINFO_RESOLVED_TOKEN* pResolvedToken,
+ CORINFO_ACCESS_FLAGS access,
+ CORINFO_FIELD_INFO* pFieldInfo,
+ var_types lclTyp,
+ CORINFO_CLASS_HANDLE structType,
+ GenTree* assg)
{
assert(pFieldInfo->fieldAccessor == CORINFO_FIELD_INSTANCE_HELPER ||
pFieldInfo->fieldAccessor == CORINFO_FIELD_INSTANCE_ADDR_HELPER ||
@@ -15415,7 +15413,7 @@ GenTreePtr Compiler::gtNewRefCOMfield(GenTreePtr objPtr,
args = gtNewListNode(gtNewIconEmbClsHndNode(pFieldInfo->structType), args);
}
- GenTreePtr fieldHnd = impTokenToHandle(pResolvedToken);
+ GenTree* fieldHnd = impTokenToHandle(pResolvedToken);
if (fieldHnd == nullptr)
{ // compDonotInline()
return nullptr;
@@ -15432,7 +15430,7 @@ GenTreePtr Compiler::gtNewRefCOMfield(GenTreePtr objPtr,
args = gtNewListNode(objPtr, args);
}
- GenTreePtr tree = gtNewHelperCallNode(pFieldInfo->helper, genActualType(helperType), args);
+ GenTree* tree = gtNewHelperCallNode(pFieldInfo->helper, genActualType(helperType), args);
if (pFieldInfo->fieldAccessor == CORINFO_FIELD_INSTANCE_HELPER)
{
@@ -15497,7 +15495,7 @@ GenTreePtr Compiler::gtNewRefCOMfield(GenTreePtr objPtr,
* assignments too.
*/
-bool Compiler::gtNodeHasSideEffects(GenTreePtr tree, unsigned flags)
+bool Compiler::gtNodeHasSideEffects(GenTree* tree, unsigned flags)
{
if (flags & GTF_ASG)
{
@@ -15566,7 +15564,7 @@ bool Compiler::gtNodeHasSideEffects(GenTreePtr tree, unsigned flags)
* Returns true if the expr tree has any side effects.
*/
-bool Compiler::gtTreeHasSideEffects(GenTreePtr tree, unsigned flags /* = GTF_SIDE_EFFECT*/)
+bool Compiler::gtTreeHasSideEffects(GenTree* tree, unsigned flags /* = GTF_SIDE_EFFECT*/)
{
// These are the side effect flags that we care about for this tree
unsigned sideEffectFlags = tree->gtFlags & flags;
@@ -15616,7 +15614,7 @@ bool Compiler::gtTreeHasSideEffects(GenTreePtr tree, unsigned flags /* = GTF_SID
return true;
}
-GenTreePtr Compiler::gtBuildCommaList(GenTreePtr list, GenTreePtr expr)
+GenTree* Compiler::gtBuildCommaList(GenTree* list, GenTree* expr)
{
// 'list' starts off as null,
// and when it is null we haven't started the list yet.
@@ -15624,7 +15622,7 @@ GenTreePtr Compiler::gtBuildCommaList(GenTreePtr list, GenTreePtr expr)
if (list != nullptr)
{
// Create a GT_COMMA that appends 'expr' in front of the remaining set of expressions in (*list)
- GenTreePtr result = gtNewOperNode(GT_COMMA, TYP_VOID, expr, list);
+ GenTree* result = gtNewOperNode(GT_COMMA, TYP_VOID, expr, list);
// Set the flags in the comma node
result->gtFlags |= (list->gtFlags & GTF_ALL_EFFECT);
@@ -15675,10 +15673,10 @@ GenTreePtr Compiler::gtBuildCommaList(GenTreePtr list, GenTreePtr expr)
* level tree node as having side-effect.
*/
-void Compiler::gtExtractSideEffList(GenTreePtr expr,
- GenTreePtr* pList,
- unsigned flags /* = GTF_SIDE_EFFECT*/,
- bool ignoreRoot /* = false */)
+void Compiler::gtExtractSideEffList(GenTree* expr,
+ GenTree** pList,
+ unsigned flags /* = GTF_SIDE_EFFECT*/,
+ bool ignoreRoot /* = false */)
{
assert(expr);
assert(expr->gtOper != GT_STMT);
@@ -15726,8 +15724,8 @@ void Compiler::gtExtractSideEffList(GenTreePtr expr,
if (kind & GTK_SMPOP)
{
- GenTreePtr op1 = expr->gtOp.gtOp1;
- GenTreePtr op2 = expr->gtGetOp2IfPresent();
+ GenTree* op1 = expr->gtOp.gtOp1;
+ GenTree* op2 = expr->gtGetOp2IfPresent();
if (flags & GTF_EXCEPT)
{
@@ -15792,7 +15790,7 @@ void Compiler::gtExtractSideEffList(GenTreePtr expr,
// We can remove this Helper call, but there still could be
// side-effects in the arguments that we may need to keep
//
- GenTreePtr args;
+ GenTree* args;
for (args = expr->gtCall.gtCallArgs; args; args = args->gtOp.gtOp2)
{
assert(args->OperIsList());
@@ -15834,10 +15832,10 @@ void Compiler::gtExtractSideEffList(GenTreePtr expr,
#ifdef DEBUG
-void dispNodeList(GenTreePtr list, bool verbose)
+void dispNodeList(GenTree* list, bool verbose)
{
- GenTreePtr last = nullptr;
- GenTreePtr next;
+ GenTree* last = nullptr;
+ GenTree* next;
if (!list)
{
@@ -15873,7 +15871,7 @@ void dispNodeList(GenTreePtr list, bool verbose)
*/
/* static */
-Compiler::fgWalkResult Compiler::gtAssertColonCond(GenTreePtr* pTree, fgWalkData* data)
+Compiler::fgWalkResult Compiler::gtAssertColonCond(GenTree** pTree, fgWalkData* data)
{
assert(data->pCallbackData == nullptr);
@@ -15889,7 +15887,7 @@ Compiler::fgWalkResult Compiler::gtAssertColonCond(GenTreePtr* pTree, fgWalkData
*/
/* static */
-Compiler::fgWalkResult Compiler::gtMarkColonCond(GenTreePtr* pTree, fgWalkData* data)
+Compiler::fgWalkResult Compiler::gtMarkColonCond(GenTree** pTree, fgWalkData* data)
{
assert(data->pCallbackData == nullptr);
@@ -15906,9 +15904,9 @@ Compiler::fgWalkResult Compiler::gtMarkColonCond(GenTreePtr* pTree, fgWalkData*
*/
/* static */
-Compiler::fgWalkResult Compiler::gtClearColonCond(GenTreePtr* pTree, fgWalkData* data)
+Compiler::fgWalkResult Compiler::gtClearColonCond(GenTree** pTree, fgWalkData* data)
{
- GenTreePtr tree = *pTree;
+ GenTree* tree = *pTree;
assert(data->pCallbackData == nullptr);
@@ -15924,15 +15922,15 @@ Compiler::fgWalkResult Compiler::gtClearColonCond(GenTreePtr* pTree, fgWalkData*
struct FindLinkData
{
- GenTreePtr nodeToFind;
- GenTreePtr* result;
+ GenTree* nodeToFind;
+ GenTree** result;
};
/*****************************************************************************
*
* Callback used by the tree walker to implement fgFindLink()
*/
-static Compiler::fgWalkResult gtFindLinkCB(GenTreePtr* pTree, Compiler::fgWalkData* cbData)
+static Compiler::fgWalkResult gtFindLinkCB(GenTree** pTree, Compiler::fgWalkData* cbData)
{
FindLinkData* data = (FindLinkData*)cbData->pCallbackData;
if (*pTree == data->nodeToFind)
@@ -15944,7 +15942,7 @@ static Compiler::fgWalkResult gtFindLinkCB(GenTreePtr* pTree, Compiler::fgWalkDa
return Compiler::WALK_CONTINUE;
}
-GenTreePtr* Compiler::gtFindLink(GenTreePtr stmt, GenTreePtr node)
+GenTree** Compiler::gtFindLink(GenTree* stmt, GenTree* node)
{
assert(stmt->gtOper == GT_STMT);
@@ -15968,13 +15966,13 @@ GenTreePtr* Compiler::gtFindLink(GenTreePtr stmt, GenTreePtr node)
* Callback that checks if a tree node has oper type GT_CATCH_ARG
*/
-static Compiler::fgWalkResult gtFindCatchArg(GenTreePtr* pTree, Compiler::fgWalkData* /* data */)
+static Compiler::fgWalkResult gtFindCatchArg(GenTree** pTree, Compiler::fgWalkData* /* data */)
{
return ((*pTree)->OperGet() == GT_CATCH_ARG) ? Compiler::WALK_ABORT : Compiler::WALK_CONTINUE;
}
/*****************************************************************************/
-bool Compiler::gtHasCatchArg(GenTreePtr tree)
+bool Compiler::gtHasCatchArg(GenTree* tree)
{
if (((tree->gtFlags & GTF_ORDER_SIDEEFF) != 0) && (fgWalkTreePre(&tree, gtFindCatchArg) == WALK_ABORT))
{
@@ -16033,7 +16031,7 @@ bool Compiler::gtHasCatchArg(GenTreePtr tree)
// of parent nodes. Both of these generally requires that
// we are performing a recursive tree walk using struct fgWalkData
//------------------------------------------------------------------------
-void Compiler::gtCheckQuirkAddrExposedLclVar(GenTreePtr tree, GenTreeStack* parentStack)
+void Compiler::gtCheckQuirkAddrExposedLclVar(GenTree* tree, GenTreeStack* parentStack)
{
#ifdef _TARGET_64BIT_
// We only need to Quirk for _TARGET_64BIT_
@@ -16155,7 +16153,7 @@ bool Compiler::gtIsTypeHandleToRuntimeTypeHelper(GenTreeCall* call)
call->gtCallMethHnd == eeFindHelper(CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPE_MAYBENULL);
}
-bool Compiler::gtIsActiveCSE_Candidate(GenTreePtr tree)
+bool Compiler::gtIsActiveCSE_Candidate(GenTree* tree)
{
return (optValnumCSE_phase && IS_CSE_INDEX(tree->gtCSEnum));
}
@@ -16171,7 +16169,7 @@ struct ComplexityStruct
}
};
-static Compiler::fgWalkResult ComplexityExceedsWalker(GenTreePtr* pTree, Compiler::fgWalkData* data)
+static Compiler::fgWalkResult ComplexityExceedsWalker(GenTree** pTree, Compiler::fgWalkData* data)
{
ComplexityStruct* pComplexity = (ComplexityStruct*)data->pCallbackData;
if (++pComplexity->m_numNodes > pComplexity->m_nodeLimit)
@@ -16184,7 +16182,7 @@ static Compiler::fgWalkResult ComplexityExceedsWalker(GenTreePtr* pTree, Compile
}
}
-bool Compiler::gtComplexityExceeds(GenTreePtr* tree, unsigned limit)
+bool Compiler::gtComplexityExceeds(GenTree** tree, unsigned limit)
{
ComplexityStruct complexity(limit);
if (fgWalkTreePre(tree, &ComplexityExceedsWalker, &complexity) == WALK_ABORT)
@@ -16216,7 +16214,7 @@ bool GenTree::IsPhiDefnStmt()
{
return false;
}
- GenTreePtr asg = gtStmt.gtStmtExpr;
+ GenTree* asg = gtStmt.gtStmtExpr;
return asg->IsPhiDefn();
}
@@ -16259,7 +16257,7 @@ bool GenTree::DefinesLocal(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, bo
}
else if (gtOp.gtOp1->OperGet() == GT_IND)
{
- GenTreePtr indArg = gtOp.gtOp1->gtOp.gtOp1;
+ GenTree* indArg = gtOp.gtOp1->gtOp.gtOp1;
return indArg->DefinesLocalAddr(comp, genTypeSize(gtOp.gtOp1->TypeGet()), pLclVarTree, pIsEntire);
}
else if (gtOp.gtOp1->OperIsBlk())
@@ -16273,13 +16271,13 @@ bool GenTree::DefinesLocal(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, bo
}
if (blkNode != nullptr)
{
- GenTreePtr destAddr = blkNode->Addr();
- unsigned width = blkNode->gtBlkSize;
+ GenTree* destAddr = blkNode->Addr();
+ unsigned width = blkNode->gtBlkSize;
// Do we care about whether this assigns the entire variable?
if (pIsEntire != nullptr && width == 0)
{
assert(blkNode->gtOper == GT_DYN_BLK);
- GenTreePtr blockWidth = blkNode->AsDynBlk()->gtDynamicSize;
+ GenTree* blockWidth = blkNode->AsDynBlk()->gtDynamicSize;
if (blockWidth->IsCnsIntOrI())
{
if (blockWidth->IsIconHandle())
@@ -16315,7 +16313,7 @@ bool GenTree::DefinesLocalAddr(Compiler* comp, unsigned width, GenTreeLclVarComm
{
if (OperGet() == GT_ADDR || OperGet() == GT_LCL_VAR_ADDR)
{
- GenTreePtr addrArg = this;
+ GenTree* addrArg = this;
if (OperGet() == GT_ADDR)
{
addrArg = gtOp.gtOp1;
@@ -16386,7 +16384,7 @@ bool GenTree::DefinesLocalAddr(Compiler* comp, unsigned width, GenTreeLclVarComm
CLANG_FORMAT_COMMENT_ANCHOR;
#ifdef DEBUG
- GenTreePtr index = gtOp.gtOp2;
+ GenTree* index = gtOp.gtOp2;
if (index != nullptr)
{
assert(!index->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire));
@@ -16394,7 +16392,7 @@ bool GenTree::DefinesLocalAddr(Compiler* comp, unsigned width, GenTreeLclVarComm
#endif // DEBUG
// base
- GenTreePtr base = gtOp.gtOp1;
+ GenTree* base = gtOp.gtOp1;
if (base != nullptr)
{
// Lea could have an Indir as its base.
@@ -16475,7 +16473,7 @@ bool GenTree::IsLocalAddrExpr(Compiler* comp, GenTreeLclVarCommon** pLclVarTree,
if (OperGet() == GT_ADDR)
{
assert(!comp->compRationalIRForm);
- GenTreePtr addrArg = gtOp.gtOp1;
+ GenTree* addrArg = gtOp.gtOp1;
if (addrArg->IsLocal()) // Note that this covers "GT_LCL_FLD."
{
*pLclVarTree = addrArg->AsLclVarCommon();
@@ -16685,9 +16683,9 @@ bool GenTreeIndir::HasIndex()
return Index() != nullptr;
}
-GenTreePtr GenTreeIndir::Base()
+GenTree* GenTreeIndir::Base()
{
- GenTreePtr addr = Addr();
+ GenTree* addr = Addr();
if (isIndirAddrMode())
{
@@ -16871,10 +16869,10 @@ bool GenTreeIntConCommon::AddrNeedsReloc(Compiler* comp)
}
#endif //_TARGET_X86_
-bool GenTree::IsFieldAddr(Compiler* comp, GenTreePtr* pObj, GenTreePtr* pStatic, FieldSeqNode** pFldSeq)
+bool GenTree::IsFieldAddr(Compiler* comp, GenTree** pObj, GenTree** pStatic, FieldSeqNode** pFldSeq)
{
FieldSeqNode* newFldSeq = nullptr;
- GenTreePtr baseAddr = nullptr;
+ GenTree* baseAddr = nullptr;
bool mustBeStatic = false;
FieldSeqNode* statStructFldSeq = nullptr;
@@ -16883,7 +16881,7 @@ bool GenTree::IsFieldAddr(Compiler* comp, GenTreePtr* pObj, GenTreePtr* pStatic,
// Recognize struct static field patterns...
if (OperGet() == GT_IND)
{
- GenTreePtr addr = gtOp.gtOp1;
+ GenTree* addr = gtOp.gtOp1;
GenTreeIntCon* icon = nullptr;
if (addr->OperGet() == GT_CNS_INT)
{
@@ -17171,7 +17169,7 @@ CORINFO_CLASS_HANDLE Compiler::gtGetStructHandle(GenTree* tree)
// isNonNull set true if tree value is known not to be null,
// otherwise a null value is possible.
-CORINFO_CLASS_HANDLE Compiler::gtGetClassHandle(GenTreePtr tree, bool* isExact, bool* isNonNull)
+CORINFO_CLASS_HANDLE Compiler::gtGetClassHandle(GenTree* tree, bool* isExact, bool* isNonNull)
{
// Set default values for our out params.
*isNonNull = false;
@@ -17194,7 +17192,7 @@ CORINFO_CLASS_HANDLE Compiler::gtGetClassHandle(GenTreePtr tree, bool* isExact,
}
// Tunnel through commas.
- GenTreePtr obj = tree->gtEffectiveVal(false);
+ GenTree* obj = tree->gtEffectiveVal(false);
const genTreeOps objOp = obj->OperGet();
switch (objOp)
@@ -17238,8 +17236,8 @@ CORINFO_CLASS_HANDLE Compiler::gtGetClassHandle(GenTreePtr tree, bool* isExact,
{
// If we see a RET_EXPR, recurse through to examine the
// return value expression.
- GenTreePtr retExpr = tree->gtRetExpr.gtInlineCandidate;
- objClass = gtGetClassHandle(retExpr, isExact, isNonNull);
+ GenTree* retExpr = tree->gtRetExpr.gtInlineCandidate;
+ objClass = gtGetClassHandle(retExpr, isExact, isNonNull);
break;
}
@@ -17342,7 +17340,7 @@ CORINFO_CLASS_HANDLE Compiler::gtGetClassHandle(GenTreePtr tree, bool* isExact,
GenTreeIndir* indir = obj->AsIndir();
if (indir->HasBase() && !indir->HasIndex())
{
- GenTreePtr base = indir->Base();
+ GenTree* base = indir->Base();
GenTreeLclVarCommon* lcl = base->IsLocalAddrExpr();
if ((lcl != nullptr) && (base->OperGet() != GT_ADD))
@@ -17380,7 +17378,7 @@ CORINFO_CLASS_HANDLE Compiler::gtGetClassHandle(GenTreePtr tree, bool* isExact,
}
void GenTree::ParseArrayAddress(
- Compiler* comp, ArrayInfo* arrayInfo, GenTreePtr* pArr, ValueNum* pInxVN, FieldSeqNode** pFldSeq)
+ Compiler* comp, ArrayInfo* arrayInfo, GenTree** pArr, ValueNum* pInxVN, FieldSeqNode** pFldSeq)
{
*pArr = nullptr;
ValueNum inxVN = ValueNumStore::NoVN;
@@ -17519,7 +17517,7 @@ void GenTree::ParseArrayAddress(
}
void GenTree::ParseArrayAddressWork(
- Compiler* comp, ssize_t inputMul, GenTreePtr* pArr, ValueNum* pInxVN, ssize_t* pOffset, FieldSeqNode** pFldSeq)
+ Compiler* comp, ssize_t inputMul, GenTree** pArr, ValueNum* pInxVN, ssize_t* pOffset, FieldSeqNode** pFldSeq)
{
if (TypeGet() == TYP_REF)
{
@@ -17549,8 +17547,8 @@ void GenTree::ParseArrayAddressWork(
case GT_MUL:
{
// If one op is a constant, continue parsing down.
- ssize_t subMul = 0;
- GenTreePtr nonConst = nullptr;
+ ssize_t subMul = 0;
+ GenTree* nonConst = nullptr;
if (gtOp.gtOp1->IsCnsIntOrI())
{
// If the other arg is an int constant, and is a "not-a-field", choose
@@ -17635,7 +17633,7 @@ bool GenTree::ParseArrayElemForm(Compiler* comp, ArrayInfo* arrayInfo, FieldSeqN
}
// Otherwise...
- GenTreePtr addr = AsIndir()->Addr();
+ GenTree* addr = AsIndir()->Addr();
return addr->ParseArrayElemAddrForm(comp, arrayInfo, pFldSeq);
}
else
@@ -17650,8 +17648,8 @@ bool GenTree::ParseArrayElemAddrForm(Compiler* comp, ArrayInfo* arrayInfo, Field
{
case GT_ADD:
{
- GenTreePtr arrAddr = nullptr;
- GenTreePtr offset = nullptr;
+ GenTree* arrAddr = nullptr;
+ GenTree* offset = nullptr;
if (gtOp.gtOp1->TypeGet() == TYP_BYREF)
{
arrAddr = gtOp.gtOp1;
@@ -17675,7 +17673,7 @@ bool GenTree::ParseArrayElemAddrForm(Compiler* comp, ArrayInfo* arrayInfo, Field
case GT_ADDR:
{
- GenTreePtr addrArg = gtOp.gtOp1;
+ GenTree* addrArg = gtOp.gtOp1;
if (addrArg->OperGet() != GT_IND)
{
return false;
@@ -17876,7 +17874,7 @@ bool FieldSeqNode::IsPseudoField()
#ifdef FEATURE_SIMD
GenTreeSIMD* Compiler::gtNewSIMDNode(
- var_types type, GenTreePtr op1, SIMDIntrinsicID simdIntrinsicID, var_types baseType, unsigned size)
+ var_types type, GenTree* op1, SIMDIntrinsicID simdIntrinsicID, var_types baseType, unsigned size)
{
assert(op1 != nullptr);
SetOpLclRelatedToSIMDIntrinsic(op1);
@@ -17885,7 +17883,7 @@ GenTreeSIMD* Compiler::gtNewSIMDNode(
}
GenTreeSIMD* Compiler::gtNewSIMDNode(
- var_types type, GenTreePtr op1, GenTreePtr op2, SIMDIntrinsicID simdIntrinsicID, var_types baseType, unsigned size)
+ var_types type, GenTree* op1, GenTree* op2, SIMDIntrinsicID simdIntrinsicID, var_types baseType, unsigned size)
{
assert(op1 != nullptr);
SetOpLclRelatedToSIMDIntrinsic(op1);
@@ -17901,7 +17899,7 @@ GenTreeSIMD* Compiler::gtNewSIMDNode(
// Arguments:
// op - The tree, to be an operand of a new GT_SIMD node, to check.
//
-void Compiler::SetOpLclRelatedToSIMDIntrinsic(GenTreePtr op)
+void Compiler::SetOpLclRelatedToSIMDIntrinsic(GenTree* op)
{
if (op != nullptr)
{
diff --git a/src/jit/gentree.h b/src/jit/gentree.h
index 6fee2ad0f9..5f534db8f1 100644
--- a/src/jit/gentree.h
+++ b/src/jit/gentree.h
@@ -313,8 +313,6 @@ class GenTreeOperandIterator;
/*****************************************************************************/
-typedef struct GenTree* GenTreePtr;
-
// Forward declarations of the subtypes
#define GTSTRUCT_0(fn, en) struct GenTree##fn;
#define GTSTRUCT_1(fn, en) struct GenTree##fn;
@@ -677,7 +675,7 @@ public:
#endif
// Copy the _gtRegNum/_gtRegPair/gtRegTag fields
- void CopyReg(GenTreePtr from);
+ void CopyReg(GenTree* from);
bool gtHasReg() const;
int GetRegisterDstCount() const;
@@ -704,7 +702,7 @@ public:
regMaskSmall gtUsedRegs; // set of used (trashed) registers
#endif // LEGACY_BACKEND
- void SetVNsFromNode(GenTreePtr tree)
+ void SetVNsFromNode(GenTree* tree)
{
gtVNPair = tree->gtVNPair;
}
@@ -1035,8 +1033,8 @@ public:
// clang-format on
- GenTreePtr gtNext;
- GenTreePtr gtPrev;
+ GenTree* gtNext;
+ GenTree* gtPrev;
#ifdef DEBUG
unsigned gtTreeID;
@@ -1760,11 +1758,11 @@ public:
return OperIsAnyList(gtOper);
}
- inline GenTreePtr MoveNext();
+ inline GenTree* MoveNext();
- inline GenTreePtr Current();
+ inline GenTree* Current();
- inline GenTreePtr* pCurrent();
+ inline GenTree** pCurrent();
inline GenTree* gtGetOp1() const;
@@ -1778,7 +1776,7 @@ public:
// Given a tree node, if this is a child of that node, return the pointer to the child node so that it
// can be modified; otherwise, return null.
- GenTreePtr* gtGetChildPointer(GenTreePtr parent) const;
+ GenTree** gtGetChildPointer(GenTree* parent) const;
// Given a tree node, if this node uses that node, return the use as an out parameter and return true.
// Otherwise, return false.
@@ -1791,11 +1789,11 @@ private:
public:
// Get the parent of this node, and optionally capture the pointer to the child so that it can be modified.
- GenTreePtr gtGetParent(GenTreePtr** parentChildPtrPtr) const;
+ GenTree* gtGetParent(GenTree*** parentChildPtrPtr) const;
void ReplaceOperand(GenTree** useEdge, GenTree* replacement);
- inline GenTreePtr gtEffectiveVal(bool commaOnly = false);
+ inline GenTree* gtEffectiveVal(bool commaOnly = false);
// Tunnel through any GT_RET_EXPRs
inline GenTree* gtRetExprVal();
@@ -1826,7 +1824,7 @@ public:
// Returns true if "addr" is a GT_ADD node, at least one of whose arguments is an integer
// (<= 32 bit) constant. If it returns true, it sets "*offset" to (one of the) constant value(s), and
// "*addr" to the other argument.
- bool IsAddWithI32Const(GenTreePtr* addr, int* offset);
+ bool IsAddWithI32Const(GenTree** addr, int* offset);
public:
#if SMALL_TREE_NODES
@@ -1853,7 +1851,7 @@ public:
//---------------------------------------------------------------------
- static bool Compare(GenTreePtr op1, GenTreePtr op2, bool swapOK = false);
+ static bool Compare(GenTree* op1, GenTree* op2, bool swapOK = false);
//---------------------------------------------------------------------
@@ -1955,7 +1953,7 @@ public:
// -- the field sequence must also be checked.
// If it is a field address, the field sequence will be a sequence of length >= 1,
// starting with an instance or static field, and optionally continuing with struct fields.
- bool IsFieldAddr(Compiler* comp, GenTreePtr* pObj, GenTreePtr* pStatic, FieldSeqNode** pFldSeq);
+ bool IsFieldAddr(Compiler* comp, GenTree** pObj, GenTree** pStatic, FieldSeqNode** pFldSeq);
// Requires "this" to be the address of an array (the child of a GT_IND labeled with GTF_IND_ARR_INDEX).
// Sets "pArr" to the node representing the array (either an array object pointer, or perhaps a byref to the some
@@ -1964,11 +1962,11 @@ public:
// Sets "*inxVN" to the value number inferred for the array index.
// Sets "*pFldSeq" to the sequence, if any, of struct fields used to index into the array element.
void ParseArrayAddress(
- Compiler* comp, struct ArrayInfo* arrayInfo, GenTreePtr* pArr, ValueNum* pInxVN, FieldSeqNode** pFldSeq);
+ Compiler* comp, struct ArrayInfo* arrayInfo, GenTree** pArr, ValueNum* pInxVN, FieldSeqNode** pFldSeq);
// Helper method for the above.
void ParseArrayAddressWork(
- Compiler* comp, ssize_t inputMul, GenTreePtr* pArr, ValueNum* pInxVN, ssize_t* pOffset, FieldSeqNode** pFldSeq);
+ Compiler* comp, ssize_t inputMul, GenTree** pArr, ValueNum* pInxVN, ssize_t* pOffset, FieldSeqNode** pFldSeq);
// Requires "this" to be a GT_IND. Requires the outermost caller to set "*pFldSeq" to nullptr.
// Returns true if it is an array index expression, or access to a (sequence of) struct field(s)
@@ -2192,7 +2190,7 @@ public:
unsigned NumChildren();
// Requires "childNum < NumChildren()". Returns the "n"th child of "this."
- GenTreePtr GetChild(unsigned childNum);
+ GenTree* GetChild(unsigned childNum);
// Returns an iterator that will produce the use edge to each operand of this node. Differs
// from the sequence of nodes produced by a loop over `GetChild` in its handling of call, phi,
@@ -2472,7 +2470,7 @@ public:
// like gtUnOp.gtOp1 instead of gtOp.gtOp1.
struct GenTreeUnOp : public GenTree
{
- GenTreePtr gtOp1;
+ GenTree* gtOp1;
protected:
GenTreeUnOp(genTreeOps oper, var_types type DEBUGARG(bool largeNode = false))
@@ -2480,7 +2478,7 @@ protected:
{
}
- GenTreeUnOp(genTreeOps oper, var_types type, GenTreePtr op1 DEBUGARG(bool largeNode = false))
+ GenTreeUnOp(genTreeOps oper, var_types type, GenTree* op1 DEBUGARG(bool largeNode = false))
: GenTree(oper, type DEBUGARG(largeNode)), gtOp1(op1)
{
assert(op1 != nullptr || NullOp1Legal());
@@ -2499,9 +2497,9 @@ protected:
struct GenTreeOp : public GenTreeUnOp
{
- GenTreePtr gtOp2;
+ GenTree* gtOp2;
- GenTreeOp(genTreeOps oper, var_types type, GenTreePtr op1, GenTreePtr op2 DEBUGARG(bool largeNode = false))
+ GenTreeOp(genTreeOps oper, var_types type, GenTree* op1, GenTree* op2 DEBUGARG(bool largeNode = false))
: GenTreeUnOp(oper, type, op1 DEBUGARG(largeNode)), gtOp2(op2)
{
// comparisons are always integral types
@@ -2989,13 +2987,13 @@ public:
struct GenTreeCast : public GenTreeOp
{
- GenTreePtr& CastOp()
+ GenTree*& CastOp()
{
return gtOp1;
}
var_types gtCastType;
- GenTreeCast(var_types type, GenTreePtr op, var_types castType DEBUGARG(bool largeNode = false))
+ GenTreeCast(var_types type, GenTree* op, var_types castType DEBUGARG(bool largeNode = false))
: GenTreeOp(GT_CAST, type, op, nullptr DEBUGARG(largeNode)), gtCastType(castType)
{
}
@@ -3013,20 +3011,20 @@ struct GenTreeBox : public GenTreeUnOp
// An expanded helper call to implement the "box" if we don't get
// rid of it any other way. Must be in same position as op1.
- GenTreePtr& BoxOp()
+ GenTree*& BoxOp()
{
return gtOp1;
}
// This is the statement that contains the assignment tree when the node is an inlined GT_BOX on a value
// type
- GenTreePtr gtAsgStmtWhenInlinedBoxValue;
+ GenTree* gtAsgStmtWhenInlinedBoxValue;
// And this is the statement that copies from the value being boxed to the box payload
- GenTreePtr gtCopyStmtWhenInlinedBoxValue;
+ GenTree* gtCopyStmtWhenInlinedBoxValue;
- GenTreeBox(var_types type,
- GenTreePtr boxOp,
- GenTreePtr asgStmtWhenInlinedBoxValue,
- GenTreePtr copyStmtWhenInlinedBoxValue)
+ GenTreeBox(var_types type,
+ GenTree* boxOp,
+ GenTree* asgStmtWhenInlinedBoxValue,
+ GenTree* copyStmtWhenInlinedBoxValue)
: GenTreeUnOp(GT_BOX, type, boxOp)
, gtAsgStmtWhenInlinedBoxValue(asgStmtWhenInlinedBoxValue)
, gtCopyStmtWhenInlinedBoxValue(copyStmtWhenInlinedBoxValue)
@@ -3043,7 +3041,7 @@ struct GenTreeBox : public GenTreeUnOp
struct GenTreeField : public GenTree
{
- GenTreePtr gtFldObj;
+ GenTree* gtFldObj;
CORINFO_FIELD_HANDLE gtFldHnd;
DWORD gtFldOffset;
bool gtFldMayOverlap;
@@ -3074,7 +3072,7 @@ struct GenTreeField : public GenTree
// method names for the arguments.
struct GenTreeArgList : public GenTreeOp
{
- GenTreePtr& Current()
+ GenTree*& Current()
{
return gtOp1;
}
@@ -3090,15 +3088,15 @@ struct GenTreeArgList : public GenTreeOp
}
#endif
- GenTreeArgList(GenTreePtr arg) : GenTreeArgList(arg, nullptr)
+ GenTreeArgList(GenTree* arg) : GenTreeArgList(arg, nullptr)
{
}
- GenTreeArgList(GenTreePtr arg, GenTreeArgList* rest) : GenTreeArgList(GT_LIST, arg, rest)
+ GenTreeArgList(GenTree* arg, GenTreeArgList* rest) : GenTreeArgList(GT_LIST, arg, rest)
{
}
- GenTreeArgList(genTreeOps oper, GenTreePtr arg, GenTreeArgList* rest) : GenTreeOp(oper, TYP_VOID, arg, rest)
+ GenTreeArgList(genTreeOps oper, GenTree* arg, GenTreeArgList* rest) : GenTreeOp(oper, TYP_VOID, arg, rest)
{
assert(OperIsAnyList(oper));
assert((arg != nullptr) && arg->IsValidCallArgument());
@@ -3140,7 +3138,7 @@ struct GenTreeFieldList : public GenTreeArgList
return *reinterpret_cast<GenTreeFieldList**>(&gtOp2);
}
- GenTreeFieldList(GenTreePtr arg, unsigned fieldOffset, var_types fieldType, GenTreeFieldList* prevList)
+ GenTreeFieldList(GenTree* arg, unsigned fieldOffset, var_types fieldType, GenTreeFieldList* prevList)
: GenTreeArgList(GT_FIELD_LIST, arg, nullptr)
{
// While GT_FIELD_LIST can be in a GT_LIST, GT_FIELD_LISTs cannot be nested or have GT_LISTs.
@@ -3171,11 +3169,11 @@ struct GenTreeFieldList : public GenTreeArgList
// TODO-Cleanup: If we could get these accessors used everywhere, then we could switch them.
struct GenTreeColon : public GenTreeOp
{
- GenTreePtr& ThenNode()
+ GenTree*& ThenNode()
{
return gtOp2;
}
- GenTreePtr& ElseNode()
+ GenTree*& ElseNode()
{
return gtOp1;
}
@@ -3186,7 +3184,7 @@ struct GenTreeColon : public GenTreeOp
}
#endif
- GenTreeColon(var_types typ, GenTreePtr thenNode, GenTreePtr elseNode) : GenTreeOp(GT_COLON, typ, elseNode, thenNode)
+ GenTreeColon(var_types typ, GenTree* thenNode, GenTree* elseNode) : GenTreeOp(GT_COLON, typ, elseNode, thenNode)
{
}
};
@@ -3341,7 +3339,7 @@ class fgArgInfo;
struct GenTreeCall final : public GenTree
{
- GenTreePtr gtCallObjp; // The instance argument ('this' pointer)
+ GenTree* gtCallObjp; // The instance argument ('this' pointer)
GenTreeArgList* gtCallArgs; // The list of arguments in original evaluation order
GenTreeArgList* gtCallLateArgs; // On x86: The register arguments in an optimal order
// On ARM/x64: - also includes any outgoing arg space arguments
@@ -3880,7 +3878,7 @@ struct GenTreeCall final : public GenTree
union {
// only used for CALLI unmanaged calls (CT_INDIRECT)
- GenTreePtr gtCallCookie;
+ GenTree* gtCallCookie;
// gtInlineCandidateInfo is only used when inlining methods
InlineCandidateInfo* gtInlineCandidateInfo;
void* gtStubCallStubAddr; // GTF_CALL_VIRT_STUB - these are never inlined
@@ -3893,7 +3891,7 @@ struct GenTreeCall final : public GenTree
union {
CORINFO_METHOD_HANDLE gtCallMethHnd; // CT_USER_FUNC
- GenTreePtr gtCallAddr; // CT_INDIRECT
+ GenTree* gtCallAddr; // CT_INDIRECT
};
#ifdef FEATURE_READYTORUN_COMPILER
@@ -3939,11 +3937,11 @@ struct GenTreeCall final : public GenTree
struct GenTreeCmpXchg : public GenTree
{
- GenTreePtr gtOpLocation;
- GenTreePtr gtOpValue;
- GenTreePtr gtOpComparand;
+ GenTree* gtOpLocation;
+ GenTree* gtOpValue;
+ GenTree* gtOpComparand;
- GenTreeCmpXchg(var_types type, GenTreePtr loc, GenTreePtr val, GenTreePtr comparand)
+ GenTreeCmpXchg(var_types type, GenTree* loc, GenTree* val, GenTree* comparand)
: GenTree(GT_CMPXCHG, type), gtOpLocation(loc), gtOpValue(val), gtOpComparand(comparand)
{
// There's no reason to do a compare-exchange on a local location, so we'll assume that all of these
@@ -3971,7 +3969,7 @@ struct GenTreeMultiRegOp : public GenTreeOp
static const unsigned PACKED_GTF_SPILLED = 2;
unsigned char gtSpillFlags;
- GenTreeMultiRegOp(genTreeOps oper, var_types type, GenTreePtr op1, GenTreePtr op2)
+ GenTreeMultiRegOp(genTreeOps oper, var_types type, GenTree* op1, GenTree* op2)
: GenTreeOp(oper, type, op1, op2), gtOtherReg(REG_NA)
{
ClearOtherRegFlags();
@@ -4143,7 +4141,7 @@ struct GenTreeQmark : public GenTreeOp
// The "Compiler*" argument is not a DEBUGARG here because we use it to keep track of the set of
// (possible) QMark nodes.
- GenTreeQmark(var_types type, GenTreePtr cond, GenTreePtr colonOp, class Compiler* comp);
+ GenTreeQmark(var_types type, GenTree* cond, GenTree* colonOp, class Compiler* comp);
#if DEBUGGABLE_GENTREE
GenTreeQmark() : GenTreeOp(GT_QMARK, TYP_INT, nullptr, nullptr)
@@ -4164,16 +4162,13 @@ struct GenTreeIntrinsic : public GenTreeOp
CORINFO_CONST_LOOKUP gtEntryPoint;
#endif
- GenTreeIntrinsic(var_types type, GenTreePtr op1, CorInfoIntrinsics intrinsicId, CORINFO_METHOD_HANDLE methodHandle)
+ GenTreeIntrinsic(var_types type, GenTree* op1, CorInfoIntrinsics intrinsicId, CORINFO_METHOD_HANDLE methodHandle)
: GenTreeOp(GT_INTRINSIC, type, op1, nullptr), gtIntrinsicId(intrinsicId), gtMethodHandle(methodHandle)
{
}
- GenTreeIntrinsic(var_types type,
- GenTreePtr op1,
- GenTreePtr op2,
- CorInfoIntrinsics intrinsicId,
- CORINFO_METHOD_HANDLE methodHandle)
+ GenTreeIntrinsic(
+ var_types type, GenTree* op1, GenTree* op2, CorInfoIntrinsics intrinsicId, CORINFO_METHOD_HANDLE methodHandle)
: GenTreeOp(GT_INTRINSIC, type, op1, op2), gtIntrinsicId(intrinsicId), gtMethodHandle(methodHandle)
{
}
@@ -4190,8 +4185,7 @@ struct GenTreeJitIntrinsic : public GenTreeOp
var_types gtSIMDBaseType; // SIMD vector base type
unsigned gtSIMDSize; // SIMD vector size in bytes, use 0 for scalar intrinsics
- GenTreeJitIntrinsic(
- genTreeOps oper, var_types type, GenTreePtr op1, GenTreePtr op2, var_types baseType, unsigned size)
+ GenTreeJitIntrinsic(genTreeOps oper, var_types type, GenTree* op1, GenTree* op2, var_types baseType, unsigned size)
: GenTreeOp(oper, type, op1, op2), gtSIMDBaseType(baseType), gtSIMDSize(size)
{
}
@@ -4215,17 +4209,13 @@ struct GenTreeSIMD : public GenTreeJitIntrinsic
{
SIMDIntrinsicID gtSIMDIntrinsicID; // operation Id
- GenTreeSIMD(var_types type, GenTreePtr op1, SIMDIntrinsicID simdIntrinsicID, var_types baseType, unsigned size)
+ GenTreeSIMD(var_types type, GenTree* op1, SIMDIntrinsicID simdIntrinsicID, var_types baseType, unsigned size)
: GenTreeJitIntrinsic(GT_SIMD, type, op1, nullptr, baseType, size), gtSIMDIntrinsicID(simdIntrinsicID)
{
}
- GenTreeSIMD(var_types type,
- GenTreePtr op1,
- GenTreePtr op2,
- SIMDIntrinsicID simdIntrinsicID,
- var_types baseType,
- unsigned size)
+ GenTreeSIMD(
+ var_types type, GenTree* op1, GenTree* op2, SIMDIntrinsicID simdIntrinsicID, var_types baseType, unsigned size)
: GenTreeJitIntrinsic(GT_SIMD, type, op1, op2, baseType, size), gtSIMDIntrinsicID(simdIntrinsicID)
{
}
@@ -4282,11 +4272,11 @@ inline bool GenTree::OperIsSimdHWIntrinsic() const
struct GenTreeIndex : public GenTreeOp
{
- GenTreePtr& Arr()
+ GenTree*& Arr()
{
return gtOp1;
}
- GenTreePtr& Index()
+ GenTree*& Index()
{
return gtOp2;
}
@@ -4294,7 +4284,7 @@ struct GenTreeIndex : public GenTreeOp
unsigned gtIndElemSize; // size of elements in the array
CORINFO_CLASS_HANDLE gtStructElemClass; // If the element type is a struct, this is the struct type.
- GenTreeIndex(var_types type, GenTreePtr arr, GenTreePtr ind, unsigned indElemSize)
+ GenTreeIndex(var_types type, GenTree* arr, GenTree* ind, unsigned indElemSize)
: GenTreeOp(GT_INDEX, type, arr, ind)
, gtIndElemSize(indElemSize)
, gtStructElemClass(nullptr) // We always initialize this after construction.
@@ -4392,7 +4382,7 @@ struct GenTreeIndexAddr : public GenTreeOp
struct GenTreeArrLen : public GenTreeUnOp
{
- GenTreePtr& ArrRef()
+ GenTree*& ArrRef()
{
return gtOp1;
} // the array address node
@@ -4405,7 +4395,7 @@ public:
return gtArrLenOffset;
}
- GenTreeArrLen(var_types type, GenTreePtr arrRef, int lenOffset)
+ GenTreeArrLen(var_types type, GenTree* arrRef, int lenOffset)
: GenTreeUnOp(GT_ARR_LENGTH, type, arrRef), gtArrLenOffset(lenOffset)
{
}
@@ -4426,10 +4416,10 @@ public:
struct GenTreeBoundsChk : public GenTree
{
- GenTreePtr gtIndex; // The index expression.
- GenTreePtr gtArrLen; // An expression for the length of the array being indexed.
+ GenTree* gtIndex; // The index expression.
+ GenTree* gtArrLen; // An expression for the length of the array being indexed.
- GenTreePtr gtIndRngFailBB; // Label to jump to for array-index-out-of-range
+ GenTree* gtIndRngFailBB; // Label to jump to for array-index-out-of-range
SpecialCodeKind gtThrowKind; // Kind of throw block to branch to on failure
/* Only out-of-ranges at same stack depth can jump to the same label (finding return address is easier)
@@ -4437,7 +4427,7 @@ struct GenTreeBoundsChk : public GenTree
optimizer has a chance of eliminating some of the rng checks */
unsigned gtStkDepth;
- GenTreeBoundsChk(genTreeOps oper, var_types type, GenTreePtr index, GenTreePtr arrLen, SpecialCodeKind kind)
+ GenTreeBoundsChk(genTreeOps oper, var_types type, GenTree* index, GenTree* arrLen, SpecialCodeKind kind)
: GenTree(oper, type)
, gtIndex(index)
, gtArrLen(arrLen)
@@ -4456,7 +4446,7 @@ struct GenTreeBoundsChk : public GenTree
#endif
// If the gtArrLen is really an array length, returns array reference, else "NULL".
- GenTreePtr GetArray()
+ GenTree* GetArray()
{
if (gtArrLen->OperGet() == GT_ARR_LENGTH)
{
@@ -4474,10 +4464,10 @@ struct GenTreeBoundsChk : public GenTree
struct GenTreeArrElem : public GenTree
{
- GenTreePtr gtArrObj;
+ GenTree* gtArrObj;
#define GT_ARR_MAX_RANK 3
- GenTreePtr gtArrInds[GT_ARR_MAX_RANK]; // Indices
+ GenTree* gtArrInds[GT_ARR_MAX_RANK]; // Indices
unsigned char gtArrRank; // Rank of the array
unsigned char gtArrElemSize; // !!! Caution, this is an "unsigned char", it is used only
@@ -4488,12 +4478,8 @@ struct GenTreeArrElem : public GenTree
var_types gtArrElemType; // The array element type
// Requires that "inds" is a pointer to an array of "rank" GenTreePtrs for the indices.
- GenTreeArrElem(var_types type,
- GenTreePtr arr,
- unsigned char rank,
- unsigned char elemSize,
- var_types elemType,
- GenTreePtr* inds)
+ GenTreeArrElem(
+ var_types type, GenTree* arr, unsigned char rank, unsigned char elemSize, var_types elemType, GenTree** inds)
: GenTree(GT_ARR_ELEM, type), gtArrObj(arr), gtArrRank(rank), gtArrElemSize(elemSize), gtArrElemType(elemType)
{
for (unsigned char i = 0; i < rank; i++)
@@ -4539,12 +4525,12 @@ struct GenTreeArrElem : public GenTree
struct GenTreeArrIndex : public GenTreeOp
{
// The array object - may be any expression producing an Array reference, but is likely to be a lclVar.
- GenTreePtr& ArrObj()
+ GenTree*& ArrObj()
{
return gtOp1;
}
// The index expression - may be any integral expression.
- GenTreePtr& IndexExpr()
+ GenTree*& IndexExpr()
{
return gtOp2;
}
@@ -4553,8 +4539,8 @@ struct GenTreeArrIndex : public GenTreeOp
var_types gtArrElemType; // The array element type
GenTreeArrIndex(var_types type,
- GenTreePtr arrObj,
- GenTreePtr indexExpr,
+ GenTree* arrObj,
+ GenTree* indexExpr,
unsigned char currDim,
unsigned char arrRank,
var_types elemType)
@@ -4605,21 +4591,21 @@ protected:
//
struct GenTreeArrOffs : public GenTree
{
- GenTreePtr gtOffset; // The accumulated offset for lower dimensions - must be TYP_I_IMPL, and
+ GenTree* gtOffset; // The accumulated offset for lower dimensions - must be TYP_I_IMPL, and
// will either be a CSE temp, the constant 0, or another GenTreeArrOffs node.
- GenTreePtr gtIndex; // The effective index for the current dimension - must be non-negative
+ GenTree* gtIndex; // The effective index for the current dimension - must be non-negative
// and can be any expression (though it is likely to be either a GenTreeArrIndex,
// node, a lclVar, or a constant).
- GenTreePtr gtArrObj; // The array object - may be any expression producing an Array reference,
+ GenTree* gtArrObj; // The array object - may be any expression producing an Array reference,
// but is likely to be a lclVar.
unsigned char gtCurrDim; // The current dimension
unsigned char gtArrRank; // Rank of the array
var_types gtArrElemType; // The array element type
GenTreeArrOffs(var_types type,
- GenTreePtr offset,
- GenTreePtr index,
- GenTreePtr arrObj,
+ GenTree* offset,
+ GenTree* index,
+ GenTree* arrObj,
unsigned char currDim,
unsigned char rank,
var_types elemType)
@@ -4667,7 +4653,7 @@ struct GenTreeAddrMode : public GenTreeOp
{
return gtOp1 != nullptr;
}
- GenTreePtr& Base()
+ GenTree*& Base()
{
return gtOp1;
}
@@ -4677,7 +4663,7 @@ struct GenTreeAddrMode : public GenTreeOp
{
return gtOp2 != nullptr;
}
- GenTreePtr& Index()
+ GenTree*& Index()
{
return gtOp2;
}
@@ -4700,7 +4686,7 @@ private:
unsigned gtOffset; // The offset to add
public:
- GenTreeAddrMode(var_types type, GenTreePtr base, GenTreePtr index, unsigned scale, unsigned offset)
+ GenTreeAddrMode(var_types type, GenTree* base, GenTree* index, unsigned scale, unsigned offset)
: GenTreeOp(GT_LEA, type, base, index)
{
assert(base != nullptr || index != nullptr);
@@ -4723,7 +4709,7 @@ struct GenTreeIndir : public GenTreeOp
// The address for the indirection.
// Since GenTreeDynBlk derives from this, but is an "EXOP" (i.e. it has extra fields),
// we can't access Op1 and Op2 in the normal manner if we may have a DynBlk.
- GenTreePtr& Addr()
+ GenTree*& Addr()
{
return gtOp1;
}
@@ -4805,7 +4791,7 @@ public:
bool gtBlkOpGcUnsafe;
- GenTreeBlk(genTreeOps oper, var_types type, GenTreePtr addr, unsigned size)
+ GenTreeBlk(genTreeOps oper, var_types type, GenTree* addr, unsigned size)
: GenTreeIndir(oper, type, addr, nullptr)
, gtBlkSize(size)
, gtBlkOpKind(BlkOpKindInvalid)
@@ -4815,7 +4801,7 @@ public:
gtFlags |= (addr->gtFlags & GTF_ALL_EFFECT);
}
- GenTreeBlk(genTreeOps oper, var_types type, GenTreePtr addr, GenTreePtr data, unsigned size)
+ GenTreeBlk(genTreeOps oper, var_types type, GenTree* addr, GenTree* data, unsigned size)
: GenTreeIndir(oper, type, addr, data), gtBlkSize(size), gtBlkOpKind(BlkOpKindInvalid), gtBlkOpGcUnsafe(false)
{
assert(OperIsBlk(oper));
@@ -4900,7 +4886,7 @@ struct GenTreeObj : public GenTreeBlk
}
}
- GenTreeObj(var_types type, GenTreePtr addr, CORINFO_CLASS_HANDLE cls, unsigned size)
+ GenTreeObj(var_types type, GenTree* addr, CORINFO_CLASS_HANDLE cls, unsigned size)
: GenTreeBlk(GT_OBJ, type, addr, size), gtClass(cls)
{
// By default, an OBJ is assumed to be a global reference.
@@ -4909,7 +4895,7 @@ struct GenTreeObj : public GenTreeBlk
_gtGcPtrCount = UINT32_MAX;
}
- GenTreeObj(var_types type, GenTreePtr addr, GenTreePtr data, CORINFO_CLASS_HANDLE cls, unsigned size)
+ GenTreeObj(var_types type, GenTree* addr, GenTree* data, CORINFO_CLASS_HANDLE cls, unsigned size)
: GenTreeBlk(GT_STORE_OBJ, type, addr, data, size), gtClass(cls)
{
// By default, an OBJ is assumed to be a global reference.
@@ -4933,10 +4919,10 @@ struct GenTreeObj : public GenTreeBlk
struct GenTreeDynBlk : public GenTreeBlk
{
public:
- GenTreePtr gtDynamicSize;
- bool gtEvalSizeFirst;
+ GenTree* gtDynamicSize;
+ bool gtEvalSizeFirst;
- GenTreeDynBlk(GenTreePtr addr, GenTreePtr dynamicSize)
+ GenTreeDynBlk(GenTree* addr, GenTree* dynamicSize)
: GenTreeBlk(GT_DYN_BLK, TYP_STRUCT, addr, 0), gtDynamicSize(dynamicSize), gtEvalSizeFirst(false)
{
// Conservatively the 'addr' could be null or point into the global heap.
@@ -5023,7 +5009,7 @@ struct GenTreeStoreInd : public GenTreeIndir
#endif
}
- GenTreePtr& Data()
+ GenTree*& Data()
{
return gtOp2;
}
@@ -5068,8 +5054,8 @@ class InlineContext;
struct GenTreeStmt : public GenTree
{
- GenTreePtr gtStmtExpr; // root of the expression tree
- GenTreePtr gtStmtList; // first node (for forward walks)
+ GenTree* gtStmtExpr; // root of the expression tree
+ GenTree* gtStmtList; // first node (for forward walks)
InlineContext* gtInlineContext; // The inline context for this statement.
IL_OFFSETX gtStmtILoffsx; // instr offset (if available)
@@ -5105,7 +5091,7 @@ struct GenTreeStmt : public GenTree
}
}
- GenTreeStmt(GenTreePtr expr, IL_OFFSETX offset)
+ GenTreeStmt(GenTree* expr, IL_OFFSETX offset)
: GenTree(GT_STMT, TYP_VOID)
, gtStmtExpr(expr)
, gtStmtList(nullptr)
@@ -5218,7 +5204,7 @@ struct GenTreePutArgStk : public GenTreeUnOp
GenTreePutArgStk(genTreeOps oper,
var_types type,
- GenTreePtr op1,
+ GenTree* op1,
unsigned slotNum
PUT_STRUCT_ARG_STK_ONLY_ARG(unsigned numSlots),
bool putInIncomingArgArea = false,
@@ -5359,7 +5345,7 @@ struct GenTreePutArgSplit : public GenTreePutArgStk
{
unsigned gtNumRegs;
- GenTreePutArgSplit(GenTreePtr op1,
+ GenTreePutArgSplit(GenTree* op1,
unsigned slotNum PUT_STRUCT_ARG_STK_ONLY_ARG(unsigned numSlots),
unsigned numRegs,
bool putIncomingArgArea = false,
@@ -5688,7 +5674,7 @@ struct GenTreeAllocObj final : public GenTreeUnOp
unsigned int gtNewHelper; // Value returned by ICorJitInfo::getNewHelper
CORINFO_CLASS_HANDLE gtAllocObjClsHnd;
- GenTreeAllocObj(var_types type, unsigned int helper, CORINFO_CLASS_HANDLE clsHnd, GenTreePtr op)
+ GenTreeAllocObj(var_types type, unsigned int helper, CORINFO_CLASS_HANDLE clsHnd, GenTree* op)
: GenTreeUnOp(GT_ALLOCOBJ, type, op DEBUGARG(/*largeNode*/ TRUE))
, // This node in most cases will be changed to a call node
gtNewHelper(helper)
@@ -5924,7 +5910,7 @@ inline bool GenTree::IsSIMDEqualityOrInequality() const
return false;
}
-inline GenTreePtr GenTree::MoveNext()
+inline GenTree* GenTree::MoveNext()
{
assert(OperIsAnyList());
return gtOp.gtOp2;
@@ -6004,13 +5990,13 @@ inline bool GenTree::IsValidCallArgument()
}
#endif // DEBUG
-inline GenTreePtr GenTree::Current()
+inline GenTree* GenTree::Current()
{
assert(OperIsAnyList());
return gtOp.gtOp1;
}
-inline GenTreePtr* GenTree::pCurrent()
+inline GenTree** GenTree::pCurrent()
{
assert(OperIsAnyList());
return &(gtOp.gtOp1);
@@ -6102,7 +6088,7 @@ inline GenTree* GenTree::gtGetOp2IfPresent() const
return op2;
}
-inline GenTreePtr GenTree::gtEffectiveVal(bool commaOnly)
+inline GenTree* GenTree::gtEffectiveVal(bool commaOnly)
{
GenTree* effectiveVal = this;
for (;;)
diff --git a/src/jit/gschecks.cpp b/src/jit/gschecks.cpp
index e4f1c25e0f..8b0da76e31 100644
--- a/src/jit/gschecks.cpp
+++ b/src/jit/gschecks.cpp
@@ -97,12 +97,12 @@ struct MarkPtrsInfo
* or indirection node. It starts a new tree walk for it's subtrees when the state
* changes.
*/
-Compiler::fgWalkResult Compiler::gsMarkPtrsAndAssignGroups(GenTreePtr* pTree, fgWalkData* data)
+Compiler::fgWalkResult Compiler::gsMarkPtrsAndAssignGroups(GenTree** pTree, fgWalkData* data)
{
struct MarkPtrsInfo* pState = (MarkPtrsInfo*)data->pCallbackData;
struct MarkPtrsInfo newState = *pState;
Compiler* comp = data->compiler;
- GenTreePtr tree = *pTree;
+ GenTree* tree = *pTree;
ShadowParamVarInfo* shadowVarInfo = pState->comp->gsShadowVarInfo;
assert(shadowVarInfo);
bool fIsBlk = false;
@@ -454,13 +454,13 @@ void Compiler::gsParamsToShadows()
var_types type = lvaTable[shadowVar].TypeGet();
- GenTreePtr src = gtNewLclvNode(lclNum, varDsc->TypeGet());
- GenTreePtr dst = gtNewLclvNode(shadowVar, type);
+ GenTree* src = gtNewLclvNode(lclNum, varDsc->TypeGet());
+ GenTree* dst = gtNewLclvNode(shadowVar, type);
src->gtFlags |= GTF_DONT_CSE;
dst->gtFlags |= GTF_DONT_CSE;
- GenTreePtr opAssign = nullptr;
+ GenTree* opAssign = nullptr;
if (type == TYP_STRUCT)
{
CORINFO_CLASS_HANDLE clsHnd = varDsc->lvVerTypeInfo.GetClassHandle();
@@ -512,13 +512,13 @@ void Compiler::gsParamsToShadows()
continue;
}
- GenTreePtr src = gtNewLclvNode(shadowVar, lvaTable[shadowVar].TypeGet());
- GenTreePtr dst = gtNewLclvNode(lclNum, varDsc->TypeGet());
+ GenTree* src = gtNewLclvNode(shadowVar, lvaTable[shadowVar].TypeGet());
+ GenTree* dst = gtNewLclvNode(lclNum, varDsc->TypeGet());
src->gtFlags |= GTF_DONT_CSE;
dst->gtFlags |= GTF_DONT_CSE;
- GenTreePtr opAssign = nullptr;
+ GenTree* opAssign = nullptr;
if (varDsc->TypeGet() == TYP_STRUCT)
{
CORINFO_CLASS_HANDLE clsHnd = varDsc->lvVerTypeInfo.GetClassHandle();
@@ -543,11 +543,11 @@ void Compiler::gsParamsToShadows()
* Replace all vulnerable param uses by it's shadow copy.
*/
-Compiler::fgWalkResult Compiler::gsReplaceShadowParams(GenTreePtr* pTree, fgWalkData* data)
+Compiler::fgWalkResult Compiler::gsReplaceShadowParams(GenTree** pTree, fgWalkData* data)
{
- Compiler* comp = data->compiler;
- GenTreePtr tree = *pTree;
- GenTreePtr asg = nullptr;
+ Compiler* comp = data->compiler;
+ GenTree* tree = *pTree;
+ GenTree* asg = nullptr;
if (tree->gtOper == GT_ASG)
{
diff --git a/src/jit/importer.cpp b/src/jit/importer.cpp
index 4c3ea55cf6..e330ff0df6 100644
--- a/src/jit/importer.cpp
+++ b/src/jit/importer.cpp
@@ -76,7 +76,7 @@ void Compiler::impInit()
* Pushes the given tree on the stack.
*/
-void Compiler::impPushOnStack(GenTreePtr tree, typeInfo ti)
+void Compiler::impPushOnStack(GenTree* tree, typeInfo ti)
{
/* Check for overflow. If inlining, we may be using a bigger stack */
@@ -331,7 +331,7 @@ unsigned Compiler::impStackHeight()
*/
#ifdef DEBUG // only used in asserts
-static bool impValidSpilledStackEntry(GenTreePtr tree)
+static bool impValidSpilledStackEntry(GenTree* tree)
{
if (tree->gtOper == GT_LCL_VAR)
{
@@ -372,7 +372,7 @@ void Compiler::impSaveStackState(SavedStack* savePtr, bool copy)
for (unsigned level = 0; level < verCurrentState.esStackDepth; level++, table++)
{
table->seTypeInfo = verCurrentState.esStack[level].seTypeInfo;
- GenTreePtr tree = verCurrentState.esStack[level].val;
+ GenTree* tree = verCurrentState.esStack[level].val;
assert(impValidSpilledStackEntry(tree));
@@ -428,7 +428,7 @@ inline void Compiler::impBeginTreeList()
* directly only for handling CEE_LEAVEs out of finally-protected try's.
*/
-inline void Compiler::impEndTreeList(BasicBlock* block, GenTreePtr firstStmt, GenTreePtr lastStmt)
+inline void Compiler::impEndTreeList(BasicBlock* block, GenTree* firstStmt, GenTree* lastStmt)
{
assert(firstStmt->gtOper == GT_STMT);
assert(lastStmt->gtOper == GT_STMT);
@@ -456,7 +456,7 @@ inline void Compiler::impEndTreeList(BasicBlock* block)
{
assert(impTreeList->gtOper == GT_BEG_STMTS);
- GenTreePtr firstTree = impTreeList->gtNext;
+ GenTree* firstTree = impTreeList->gtNext;
if (!firstTree)
{
@@ -491,7 +491,7 @@ inline void Compiler::impEndTreeList(BasicBlock* block)
* that this has only limited value as we can only check [0..chkLevel).
*/
-inline void Compiler::impAppendStmtCheck(GenTreePtr stmt, unsigned chkLevel)
+inline void Compiler::impAppendStmtCheck(GenTree* stmt, unsigned chkLevel)
{
#ifndef DEBUG
return;
@@ -508,7 +508,7 @@ inline void Compiler::impAppendStmtCheck(GenTreePtr stmt, unsigned chkLevel)
return;
}
- GenTreePtr tree = stmt->gtStmt.gtStmtExpr;
+ GenTree* tree = stmt->gtStmt.gtStmtExpr;
// Calls can only be appended if there are no GTF_GLOB_EFFECT on the stack
@@ -557,7 +557,7 @@ inline void Compiler::impAppendStmtCheck(GenTreePtr stmt, unsigned chkLevel)
* interference with stmt and spill if needed.
*/
-inline void Compiler::impAppendStmt(GenTreePtr stmt, unsigned chkLevel)
+inline void Compiler::impAppendStmt(GenTree* stmt, unsigned chkLevel)
{
assert(stmt->gtOper == GT_STMT);
noway_assert(impTreeLast != nullptr);
@@ -565,8 +565,8 @@ inline void Compiler::impAppendStmt(GenTreePtr stmt, unsigned chkLevel)
/* If the statement being appended has any side-effects, check the stack
to see if anything needs to be spilled to preserve correct ordering. */
- GenTreePtr expr = stmt->gtStmt.gtStmtExpr;
- unsigned flags = expr->gtFlags & GTF_GLOB_EFFECT;
+ GenTree* expr = stmt->gtStmt.gtStmtExpr;
+ unsigned flags = expr->gtFlags & GTF_GLOB_EFFECT;
// Assignment to (unaliased) locals don't count as a side-effect as
// we handle them specially using impSpillLclRefs(). Temp locals should
@@ -667,16 +667,16 @@ inline void Compiler::impAppendStmt(GenTreePtr stmt, unsigned chkLevel)
* Insert the given GT_STMT "stmt" before GT_STMT "stmtBefore"
*/
-inline void Compiler::impInsertStmtBefore(GenTreePtr stmt, GenTreePtr stmtBefore)
+inline void Compiler::impInsertStmtBefore(GenTree* stmt, GenTree* stmtBefore)
{
assert(stmt->gtOper == GT_STMT);
assert(stmtBefore->gtOper == GT_STMT);
- GenTreePtr stmtPrev = stmtBefore->gtPrev;
- stmt->gtPrev = stmtPrev;
- stmt->gtNext = stmtBefore;
- stmtPrev->gtNext = stmt;
- stmtBefore->gtPrev = stmt;
+ GenTree* stmtPrev = stmtBefore->gtPrev;
+ stmt->gtPrev = stmtPrev;
+ stmt->gtNext = stmtBefore;
+ stmtPrev->gtNext = stmt;
+ stmtBefore->gtPrev = stmt;
}
/*****************************************************************************
@@ -685,13 +685,13 @@ inline void Compiler::impInsertStmtBefore(GenTreePtr stmt, GenTreePtr stmtBefore
* Return the newly created statement.
*/
-GenTreePtr Compiler::impAppendTree(GenTreePtr tree, unsigned chkLevel, IL_OFFSETX offset)
+GenTree* Compiler::impAppendTree(GenTree* tree, unsigned chkLevel, IL_OFFSETX offset)
{
assert(tree);
/* Allocate an 'expression statement' node */
- GenTreePtr expr = gtNewStmt(tree, offset);
+ GenTree* expr = gtNewStmt(tree, offset);
/* Append the statement to the current block's stmt list */
@@ -705,13 +705,13 @@ GenTreePtr Compiler::impAppendTree(GenTreePtr tree, unsigned chkLevel, IL_OFFSET
* Insert the given exression tree before GT_STMT "stmtBefore"
*/
-void Compiler::impInsertTreeBefore(GenTreePtr tree, IL_OFFSETX offset, GenTreePtr stmtBefore)
+void Compiler::impInsertTreeBefore(GenTree* tree, IL_OFFSETX offset, GenTree* stmtBefore)
{
assert(stmtBefore->gtOper == GT_STMT);
/* Allocate an 'expression statement' node */
- GenTreePtr expr = gtNewStmt(tree, offset);
+ GenTree* expr = gtNewStmt(tree, offset);
/* Append the statement to the current block's stmt list */
@@ -725,21 +725,21 @@ void Compiler::impInsertTreeBefore(GenTreePtr tree, IL_OFFSETX offset, GenTreePt
*/
void Compiler::impAssignTempGen(unsigned tmp,
- GenTreePtr val,
+ GenTree* val,
unsigned curLevel,
- GenTreePtr* pAfterStmt, /* = NULL */
+ GenTree** pAfterStmt, /* = NULL */
IL_OFFSETX ilOffset, /* = BAD_IL_OFFSET */
BasicBlock* block /* = NULL */
)
{
- GenTreePtr asg = gtNewTempAssign(tmp, val);
+ GenTree* asg = gtNewTempAssign(tmp, val);
if (!asg->IsNothingNode())
{
if (pAfterStmt)
{
- GenTreePtr asgStmt = gtNewStmt(asg, ilOffset);
- *pAfterStmt = fgInsertStmtAfter(block, *pAfterStmt, asgStmt);
+ GenTree* asgStmt = gtNewStmt(asg, ilOffset);
+ *pAfterStmt = fgInsertStmtAfter(block, *pAfterStmt, asgStmt);
}
else
{
@@ -753,15 +753,15 @@ void Compiler::impAssignTempGen(unsigned tmp,
*/
void Compiler::impAssignTempGen(unsigned tmpNum,
- GenTreePtr val,
+ GenTree* val,
CORINFO_CLASS_HANDLE structType,
unsigned curLevel,
- GenTreePtr* pAfterStmt, /* = NULL */
+ GenTree** pAfterStmt, /* = NULL */
IL_OFFSETX ilOffset, /* = BAD_IL_OFFSET */
BasicBlock* block /* = NULL */
)
{
- GenTreePtr asg;
+ GenTree* asg;
if (varTypeIsStruct(val))
{
@@ -785,8 +785,8 @@ void Compiler::impAssignTempGen(unsigned tmpNum,
val->gtType = lvaTable[tmpNum].lvType;
- GenTreePtr dst = gtNewLclvNode(tmpNum, val->gtType);
- asg = impAssignStruct(dst, val, structType, curLevel, pAfterStmt, block);
+ GenTree* dst = gtNewLclvNode(tmpNum, val->gtType);
+ asg = impAssignStruct(dst, val, structType, curLevel, pAfterStmt, block);
}
else
{
@@ -797,8 +797,8 @@ void Compiler::impAssignTempGen(unsigned tmpNum,
{
if (pAfterStmt)
{
- GenTreePtr asgStmt = gtNewStmt(asg, ilOffset);
- *pAfterStmt = fgInsertStmtAfter(block, *pAfterStmt, asgStmt);
+ GenTree* asgStmt = gtNewStmt(asg, ilOffset);
+ *pAfterStmt = fgInsertStmtAfter(block, *pAfterStmt, asgStmt);
}
else
{
@@ -845,7 +845,7 @@ GenTreeArgList* Compiler::impPopList(unsigned count, CORINFO_SIG_INFO* sig, GenT
{
StackEntry se = impPopStack();
typeInfo ti = se.seTypeInfo;
- GenTreePtr temp = se.val;
+ GenTree* temp = se.val;
if (varTypeIsStruct(temp))
{
@@ -1022,13 +1022,13 @@ GenTreeArgList* Compiler::impPopRevList(unsigned count, CORINFO_SIG_INFO* sig, u
curLevel is the stack level for which a spill may be being done.
*/
-GenTreePtr Compiler::impAssignStruct(GenTreePtr dest,
- GenTreePtr src,
- CORINFO_CLASS_HANDLE structHnd,
- unsigned curLevel,
- GenTreePtr* pAfterStmt, /* = NULL */
- BasicBlock* block /* = NULL */
- )
+GenTree* Compiler::impAssignStruct(GenTree* dest,
+ GenTree* src,
+ CORINFO_CLASS_HANDLE structHnd,
+ unsigned curLevel,
+ GenTree** pAfterStmt, /* = NULL */
+ BasicBlock* block /* = NULL */
+ )
{
assert(varTypeIsStruct(dest));
@@ -1062,7 +1062,7 @@ GenTreePtr Compiler::impAssignStruct(GenTreePtr dest,
// TODO-1stClassStructs: Avoid creating an address if it is not needed,
// or re-creating a Blk node if it is.
- GenTreePtr destAddr;
+ GenTree* destAddr;
if (dest->gtOper == GT_IND || dest->OperIsBlk())
{
@@ -1078,17 +1078,17 @@ GenTreePtr Compiler::impAssignStruct(GenTreePtr dest,
/*****************************************************************************/
-GenTreePtr Compiler::impAssignStructPtr(GenTreePtr destAddr,
- GenTreePtr src,
- CORINFO_CLASS_HANDLE structHnd,
- unsigned curLevel,
- GenTreePtr* pAfterStmt, /* = NULL */
- BasicBlock* block /* = NULL */
- )
+GenTree* Compiler::impAssignStructPtr(GenTree* destAddr,
+ GenTree* src,
+ CORINFO_CLASS_HANDLE structHnd,
+ unsigned curLevel,
+ GenTree** pAfterStmt, /* = NULL */
+ BasicBlock* block /* = NULL */
+ )
{
- var_types destType;
- GenTreePtr dest = nullptr;
- unsigned destFlags = 0;
+ var_types destType;
+ GenTree* dest = nullptr;
+ unsigned destFlags = 0;
#if defined(FEATURE_UNIX_AMD64_STRUCT_PASSING)
assert(varTypeIsStruct(src) || (src->gtOper == GT_ADDR && src->TypeGet() == TYP_BYREF));
@@ -1161,7 +1161,7 @@ GenTreePtr Compiler::impAssignStructPtr(GenTreePtr destAddr,
// If it is a multi-reg struct return, don't change the oper to GT_LCL_FLD.
// That is, the IR will be of the form lclVar = call for multi-reg return
//
- GenTreePtr lcl = destAddr->gtOp.gtOp1;
+ GenTree* lcl = destAddr->gtOp.gtOp1;
if (src->AsCall()->HasMultiRegRetVal())
{
// Mark the struct LclVar as used in a MultiReg return context
@@ -1268,21 +1268,21 @@ GenTreePtr Compiler::impAssignStructPtr(GenTreePtr destAddr,
// Since we are assigning the result of a GT_MKREFANY,
// "destAddr" must point to a refany.
- GenTreePtr destAddrClone;
+ GenTree* destAddrClone;
destAddr =
impCloneExpr(destAddr, &destAddrClone, structHnd, curLevel, pAfterStmt DEBUGARG("MKREFANY assignment"));
assert(offsetof(CORINFO_RefAny, dataPtr) == 0);
assert(destAddr->gtType == TYP_I_IMPL || destAddr->gtType == TYP_BYREF);
GetZeroOffsetFieldMap()->Set(destAddr, GetFieldSeqStore()->CreateSingleton(GetRefanyDataField()));
- GenTreePtr ptrSlot = gtNewOperNode(GT_IND, TYP_I_IMPL, destAddr);
+ GenTree* ptrSlot = gtNewOperNode(GT_IND, TYP_I_IMPL, destAddr);
GenTreeIntCon* typeFieldOffset = gtNewIconNode(offsetof(CORINFO_RefAny, type), TYP_I_IMPL);
typeFieldOffset->gtFieldSeq = GetFieldSeqStore()->CreateSingleton(GetRefanyTypeField());
- GenTreePtr typeSlot =
+ GenTree* typeSlot =
gtNewOperNode(GT_IND, TYP_I_IMPL, gtNewOperNode(GT_ADD, destAddr->gtType, destAddrClone, typeFieldOffset));
// append the assign of the pointer value
- GenTreePtr asg = gtNewAssignNode(ptrSlot, src->gtOp.gtOp1);
+ GenTree* asg = gtNewAssignNode(ptrSlot, src->gtOp.gtOp1);
if (pAfterStmt)
{
*pAfterStmt = fgInsertStmtAfter(block, *pAfterStmt, gtNewStmt(asg, impCurStmtOffs));
@@ -1378,10 +1378,10 @@ GenTreePtr Compiler::impAssignStructPtr(GenTreePtr destAddr,
willDeref - does the caller guarantee to dereference the pointer.
*/
-GenTreePtr Compiler::impGetStructAddr(GenTreePtr structVal,
- CORINFO_CLASS_HANDLE structHnd,
- unsigned curLevel,
- bool willDeref)
+GenTree* Compiler::impGetStructAddr(GenTree* structVal,
+ CORINFO_CLASS_HANDLE structHnd,
+ unsigned curLevel,
+ bool willDeref)
{
assert(varTypeIsStruct(structVal) || eeIsValueClass(structHnd));
@@ -1402,18 +1402,18 @@ GenTreePtr Compiler::impGetStructAddr(GenTreePtr structVal,
// The 'return value' is now the temp itself
- type = genActualType(lvaTable[tmpNum].TypeGet());
- GenTreePtr temp = gtNewLclvNode(tmpNum, type);
- temp = gtNewOperNode(GT_ADDR, TYP_BYREF, temp);
+ type = genActualType(lvaTable[tmpNum].TypeGet());
+ GenTree* temp = gtNewLclvNode(tmpNum, type);
+ temp = gtNewOperNode(GT_ADDR, TYP_BYREF, temp);
return temp;
}
else if (oper == GT_COMMA)
{
assert(structVal->gtOp.gtOp2->gtType == type); // Second thing is the struct
- GenTreePtr oldTreeLast = impTreeLast;
- structVal->gtOp.gtOp2 = impGetStructAddr(structVal->gtOp.gtOp2, structHnd, curLevel, willDeref);
- structVal->gtType = TYP_BYREF;
+ GenTree* oldTreeLast = impTreeLast;
+ structVal->gtOp.gtOp2 = impGetStructAddr(structVal->gtOp.gtOp2, structHnd, curLevel, willDeref);
+ structVal->gtType = TYP_BYREF;
if (oldTreeLast != impTreeLast)
{
@@ -1532,10 +1532,10 @@ var_types Compiler::impNormStructType(CORINFO_CLASS_HANDLE structHnd,
// Given TYP_STRUCT value 'structVal', make sure it is 'canonical', that is
// it is either an OBJ or a MKREFANY node, or a node (e.g. GT_INDEX) that will be morphed.
//
-GenTreePtr Compiler::impNormStructVal(GenTreePtr structVal,
- CORINFO_CLASS_HANDLE structHnd,
- unsigned curLevel,
- bool forceNormalization /*=false*/)
+GenTree* Compiler::impNormStructVal(GenTree* structVal,
+ CORINFO_CLASS_HANDLE structHnd,
+ unsigned curLevel,
+ bool forceNormalization /*=false*/)
{
assert(forceNormalization || varTypeIsStruct(structVal));
assert(structHnd != NO_CLASS_HANDLE);
@@ -1739,10 +1739,10 @@ GenTreePtr Compiler::impNormStructVal(GenTreePtr structVal,
// and the token refers to formal type parameters whose instantiation is not known
// at compile-time.
//
-GenTreePtr Compiler::impTokenToHandle(CORINFO_RESOLVED_TOKEN* pResolvedToken,
- BOOL* pRuntimeLookup /* = NULL */,
- BOOL mustRestoreHandle /* = FALSE */,
- BOOL importParent /* = FALSE */)
+GenTree* Compiler::impTokenToHandle(CORINFO_RESOLVED_TOKEN* pResolvedToken,
+ BOOL* pRuntimeLookup /* = NULL */,
+ BOOL mustRestoreHandle /* = FALSE */,
+ BOOL importParent /* = FALSE */)
{
assert(!fgGlobalMorph);
@@ -1789,10 +1789,10 @@ GenTreePtr Compiler::impTokenToHandle(CORINFO_RESOLVED_TOKEN* pResolvedToken,
return result;
}
-GenTreePtr Compiler::impLookupToTree(CORINFO_RESOLVED_TOKEN* pResolvedToken,
- CORINFO_LOOKUP* pLookup,
- unsigned handleFlags,
- void* compileTimeHandle)
+GenTree* Compiler::impLookupToTree(CORINFO_RESOLVED_TOKEN* pResolvedToken,
+ CORINFO_LOOKUP* pLookup,
+ unsigned handleFlags,
+ void* compileTimeHandle)
{
if (!pLookup->lookupKind.needsRuntimeLookup)
{
@@ -1830,9 +1830,9 @@ GenTreePtr Compiler::impLookupToTree(CORINFO_RESOLVED_TOKEN* pResolvedToken,
}
#ifdef FEATURE_READYTORUN_COMPILER
-GenTreePtr Compiler::impReadyToRunLookupToTree(CORINFO_CONST_LOOKUP* pLookup,
- unsigned handleFlags,
- void* compileTimeHandle)
+GenTree* Compiler::impReadyToRunLookupToTree(CORINFO_CONST_LOOKUP* pLookup,
+ unsigned handleFlags,
+ void* compileTimeHandle)
{
CORINFO_GENERIC_HANDLE handle = nullptr;
void* pIndirection = nullptr;
@@ -1870,9 +1870,9 @@ GenTreeCall* Compiler::impReadyToRunHelperToTree(
}
#endif
-GenTreePtr Compiler::impMethodPointer(CORINFO_RESOLVED_TOKEN* pResolvedToken, CORINFO_CALL_INFO* pCallInfo)
+GenTree* Compiler::impMethodPointer(CORINFO_RESOLVED_TOKEN* pResolvedToken, CORINFO_CALL_INFO* pCallInfo)
{
- GenTreePtr op1 = nullptr;
+ GenTree* op1 = nullptr;
switch (pCallInfo->kind)
{
@@ -1923,9 +1923,9 @@ GenTreePtr Compiler::impMethodPointer(CORINFO_RESOLVED_TOKEN* pResolvedToken, CO
// Notes:
// Reports about generic context using.
-GenTreePtr Compiler::getRuntimeContextTree(CORINFO_RUNTIME_LOOKUP_KIND kind)
+GenTree* Compiler::getRuntimeContextTree(CORINFO_RUNTIME_LOOKUP_KIND kind)
{
- GenTreePtr ctxTree = nullptr;
+ GenTree* ctxTree = nullptr;
// Collectible types requires that for shared generic code, if we use the generic context parameter
// that we report it. (This is a conservative approach, we could detect some cases particularly when the
@@ -1969,16 +1969,16 @@ GenTreePtr Compiler::getRuntimeContextTree(CORINFO_RUNTIME_LOOKUP_KIND kind)
to lookup the handle.
*/
-GenTreePtr Compiler::impRuntimeLookupToTree(CORINFO_RESOLVED_TOKEN* pResolvedToken,
- CORINFO_LOOKUP* pLookup,
- void* compileTimeHandle)
+GenTree* Compiler::impRuntimeLookupToTree(CORINFO_RESOLVED_TOKEN* pResolvedToken,
+ CORINFO_LOOKUP* pLookup,
+ void* compileTimeHandle)
{
// This method can only be called from the importer instance of the Compiler.
// In other word, it cannot be called by the instance of the Compiler for the inlinee.
assert(!compIsForInlining());
- GenTreePtr ctxTree = getRuntimeContextTree(pLookup->lookupKind.runtimeLookupKind);
+ GenTree* ctxTree = getRuntimeContextTree(pLookup->lookupKind.runtimeLookupKind);
CORINFO_RUNTIME_LOOKUP* pRuntimeLookup = &pLookup->runtimeLookup;
// It's available only via the run-time helper function
@@ -1999,7 +1999,7 @@ GenTreePtr Compiler::impRuntimeLookupToTree(CORINFO_RESOLVED_TOKEN* pResolvedTok
}
// Slot pointer
- GenTreePtr slotPtrTree = ctxTree;
+ GenTree* slotPtrTree = ctxTree;
if (pRuntimeLookup->testForNull)
{
@@ -2007,7 +2007,7 @@ GenTreePtr Compiler::impRuntimeLookupToTree(CORINFO_RESOLVED_TOKEN* pResolvedTok
nullptr DEBUGARG("impRuntimeLookup slot"));
}
- GenTreePtr indOffTree = nullptr;
+ GenTree* indOffTree = nullptr;
// Applied repeated indirections
for (WORD i = 0; i < pRuntimeLookup->indirections; i++)
@@ -2087,27 +2087,27 @@ GenTreePtr Compiler::impRuntimeLookupToTree(CORINFO_RESOLVED_TOKEN* pResolvedTok
impSpillSideEffects(true, CHECK_SPILL_ALL DEBUGARG("bubbling QMark1"));
// Extract the handle
- GenTreePtr handle = gtNewOperNode(GT_IND, TYP_I_IMPL, slotPtrTree);
+ GenTree* handle = gtNewOperNode(GT_IND, TYP_I_IMPL, slotPtrTree);
handle->gtFlags |= GTF_IND_NONFAULTING;
- GenTreePtr handleCopy = impCloneExpr(handle, &handle, NO_CLASS_HANDLE, (unsigned)CHECK_SPILL_ALL,
- nullptr DEBUGARG("impRuntimeLookup typehandle"));
+ GenTree* handleCopy = impCloneExpr(handle, &handle, NO_CLASS_HANDLE, (unsigned)CHECK_SPILL_ALL,
+ nullptr DEBUGARG("impRuntimeLookup typehandle"));
// Call to helper
GenTree* argNode = gtNewIconEmbHndNode(pRuntimeLookup->signature, nullptr, GTF_ICON_TOKEN_HDL, compileTimeHandle);
GenTreeArgList* helperArgs = gtNewArgList(ctxTree, argNode);
- GenTreePtr helperCall = gtNewHelperCallNode(pRuntimeLookup->helper, TYP_I_IMPL, helperArgs);
+ GenTree* helperCall = gtNewHelperCallNode(pRuntimeLookup->helper, TYP_I_IMPL, helperArgs);
// Check for null and possibly call helper
- GenTreePtr relop = gtNewOperNode(GT_NE, TYP_INT, handle, gtNewIconNode(0, TYP_I_IMPL));
+ GenTree* relop = gtNewOperNode(GT_NE, TYP_INT, handle, gtNewIconNode(0, TYP_I_IMPL));
relop->gtFlags |= GTF_RELOP_QMARK;
- GenTreePtr colon = new (this, GT_COLON) GenTreeColon(TYP_I_IMPL,
- gtNewNothingNode(), // do nothing if nonnull
- helperCall);
+ GenTree* colon = new (this, GT_COLON) GenTreeColon(TYP_I_IMPL,
+ gtNewNothingNode(), // do nothing if nonnull
+ helperCall);
- GenTreePtr qmark = gtNewQmarkNode(TYP_I_IMPL, relop, colon);
+ GenTree* qmark = gtNewQmarkNode(TYP_I_IMPL, relop, colon);
unsigned tmp;
if (handleCopy->IsLocal())
@@ -2177,7 +2177,7 @@ bool Compiler::impSpillStackEntry(unsigned level,
guard.Init(&impNestedStackSpill, bAssertOnRecursion);
#endif
- GenTreePtr tree = verCurrentState.esStack[level].val;
+ GenTree* tree = verCurrentState.esStack[level].val;
/* Allocate a temp if we haven't been asked to use a particular one */
@@ -2241,8 +2241,8 @@ bool Compiler::impSpillStackEntry(unsigned level,
}
// The tree type may be modified by impAssignTempGen, so use the type of the lclVar.
- var_types type = genActualType(lvaTable[tnum].TypeGet());
- GenTreePtr temp = gtNewLclvNode(tnum, type);
+ var_types type = genActualType(lvaTable[tnum].TypeGet());
+ GenTree* temp = gtNewLclvNode(tnum, type);
verCurrentState.esStack[level].val = temp;
return true;
@@ -2259,7 +2259,7 @@ void Compiler::impSpillStackEnsure(bool spillLeaves)
for (unsigned level = 0; level < verCurrentState.esStackDepth; level++)
{
- GenTreePtr tree = verCurrentState.esStack[level].val;
+ GenTree* tree = verCurrentState.esStack[level].val;
if (!spillLeaves && tree->OperIsLeaf())
{
@@ -2327,9 +2327,9 @@ inline void Compiler::impSpillSideEffects(bool spillGlobEffects, unsigned chkLev
for (unsigned i = 0; i < chkLevel; i++)
{
- GenTreePtr tree = verCurrentState.esStack[i].val;
+ GenTree* tree = verCurrentState.esStack[i].val;
- GenTreePtr lclVarTree;
+ GenTree* lclVarTree;
if ((tree->gtFlags & spillFlags) != 0 ||
(spillGlobEffects && // Only consider the following when spillGlobEffects == TRUE
@@ -2359,7 +2359,7 @@ inline void Compiler::impSpillSpecialSideEff()
for (unsigned level = 0; level < verCurrentState.esStackDepth; level++)
{
- GenTreePtr tree = verCurrentState.esStack[level].val;
+ GenTree* tree = verCurrentState.esStack[level].val;
// Make sure if we have an exception object in the sub tree we spill ourselves.
if (gtHasCatchArg(tree))
{
@@ -2377,7 +2377,7 @@ void Compiler::impSpillValueClasses()
{
for (unsigned level = 0; level < verCurrentState.esStackDepth; level++)
{
- GenTreePtr tree = verCurrentState.esStack[level].val;
+ GenTree* tree = verCurrentState.esStack[level].val;
if (fgWalkTreePre(&tree, impFindValueClasses) == WALK_ABORT)
{
@@ -2395,7 +2395,7 @@ void Compiler::impSpillValueClasses()
* Callback that checks if a tree node is TYP_STRUCT
*/
-Compiler::fgWalkResult Compiler::impFindValueClasses(GenTreePtr* pTree, fgWalkData* data)
+Compiler::fgWalkResult Compiler::impFindValueClasses(GenTree** pTree, fgWalkData* data)
{
fgWalkResult walkResult = WALK_CONTINUE;
@@ -2425,7 +2425,7 @@ void Compiler::impSpillLclRefs(ssize_t lclNum)
for (unsigned level = 0; level < verCurrentState.esStackDepth; level++)
{
- GenTreePtr tree = verCurrentState.esStack[level].val;
+ GenTree* tree = verCurrentState.esStack[level].val;
/* If the tree may throw an exception, and the block has a handler,
then we need to spill assignments to the local if the local is
@@ -2462,7 +2462,7 @@ BasicBlock* Compiler::impPushCatchArgOnStack(BasicBlock* hndBlk, CORINFO_CLASS_H
if ((hndBlk->bbFlags & (BBF_IMPORTED | BBF_INTERNAL | BBF_DONT_REMOVE | BBF_HAS_LABEL | BBF_JMP_TARGET)) ==
(BBF_IMPORTED | BBF_INTERNAL | BBF_DONT_REMOVE | BBF_HAS_LABEL | BBF_JMP_TARGET))
{
- GenTreePtr tree = hndBlk->bbTreeList;
+ GenTree* tree = hndBlk->bbTreeList;
if (tree != nullptr && tree->gtOper == GT_STMT)
{
@@ -2485,7 +2485,7 @@ BasicBlock* Compiler::impPushCatchArgOnStack(BasicBlock* hndBlk, CORINFO_CLASS_H
}
/* Push the exception address value on the stack */
- GenTreePtr arg = new (this, GT_CATCH_ARG) GenTree(GT_CATCH_ARG, TYP_REF);
+ GenTree* arg = new (this, GT_CATCH_ARG) GenTree(GT_CATCH_ARG, TYP_REF);
/* Mark the node as having a side-effect - i.e. cannot be
* moved around since it is tied to a fixed location (EAX) */
@@ -2547,15 +2547,15 @@ BasicBlock* Compiler::impPushCatchArgOnStack(BasicBlock* hndBlk, CORINFO_CLASS_H
* If the tree has side-effects, it will be spilled to a temp.
*/
-GenTreePtr Compiler::impCloneExpr(GenTreePtr tree,
- GenTreePtr* pClone,
- CORINFO_CLASS_HANDLE structHnd,
- unsigned curLevel,
- GenTreePtr* pAfterStmt DEBUGARG(const char* reason))
+GenTree* Compiler::impCloneExpr(GenTree* tree,
+ GenTree** pClone,
+ CORINFO_CLASS_HANDLE structHnd,
+ unsigned curLevel,
+ GenTree** pAfterStmt DEBUGARG(const char* reason))
{
if (!(tree->gtFlags & GTF_GLOB_EFFECT))
{
- GenTreePtr clone = gtClone(tree, true);
+ GenTree* clone = gtClone(tree, true);
if (clone)
{
@@ -2589,7 +2589,7 @@ inline void Compiler::impCurStmtOffsSet(IL_OFFSET offs)
{
if (compIsForInlining())
{
- GenTreePtr callStmt = impInlineInfo->iciStmt;
+ GenTree* callStmt = impInlineInfo->iciStmt;
assert(callStmt->gtOper == GT_STMT);
impCurStmtOffs = callStmt->gtStmt.gtStmtILoffsx;
}
@@ -2861,7 +2861,7 @@ CORINFO_CLASS_HANDLE Compiler::impGetObjectClass()
*/
/* static */
-void Compiler::impBashVarAddrsToI(GenTreePtr tree1, GenTreePtr tree2)
+void Compiler::impBashVarAddrsToI(GenTree* tree1, GenTree* tree2)
{
if (tree1->IsVarAddr())
{
@@ -2882,7 +2882,7 @@ void Compiler::impBashVarAddrsToI(GenTreePtr tree1, GenTreePtr tree2)
* We also allow an implicit conversion of a ldnull into a TYP_I_IMPL(0)
*/
-GenTreePtr Compiler::impImplicitIorI4Cast(GenTreePtr tree, var_types dstTyp)
+GenTree* Compiler::impImplicitIorI4Cast(GenTree* tree, var_types dstTyp)
{
var_types currType = genActualType(tree->gtType);
var_types wantedType = genActualType(dstTyp);
@@ -2920,7 +2920,7 @@ GenTreePtr Compiler::impImplicitIorI4Cast(GenTreePtr tree, var_types dstTyp)
* that exist in the IL are turned into explicit casts here.
*/
-GenTreePtr Compiler::impImplicitR4orR8Cast(GenTreePtr tree, var_types dstTyp)
+GenTree* Compiler::impImplicitR4orR8Cast(GenTree* tree, var_types dstTyp)
{
#ifndef LEGACY_BACKEND
if (varTypeIsFloating(tree) && varTypeIsFloating(dstTyp) && (dstTyp != tree->gtType))
@@ -2954,12 +2954,12 @@ GenTreePtr Compiler::impImplicitR4orR8Cast(GenTreePtr tree, var_types dstTyp)
// The function recognizes all kinds of arrays thus enabling a small runtime
// such as CoreRT to skip providing an implementation for InitializeArray.
-GenTreePtr Compiler::impInitializeArrayIntrinsic(CORINFO_SIG_INFO* sig)
+GenTree* Compiler::impInitializeArrayIntrinsic(CORINFO_SIG_INFO* sig)
{
assert(sig->numArgs == 2);
- GenTreePtr fieldTokenNode = impStackTop(0).val;
- GenTreePtr arrayLocalNode = impStackTop(1).val;
+ GenTree* fieldTokenNode = impStackTop(0).val;
+ GenTree* arrayLocalNode = impStackTop(1).val;
//
// Verify that the field token is known and valid. Note that It's also
@@ -3014,7 +3014,7 @@ GenTreePtr Compiler::impInitializeArrayIntrinsic(CORINFO_SIG_INFO* sig)
// We start by looking at the last statement, making sure it's an assignment, and
// that the target of the assignment is the array passed to InitializeArray.
//
- GenTreePtr arrayAssignment = impTreeLast->gtStmt.gtStmtExpr;
+ GenTree* arrayAssignment = impTreeLast->gtStmt.gtStmtExpr;
if ((arrayAssignment->gtOper != GT_ASG) || (arrayAssignment->gtOp.gtOp1->gtOper != GT_LCL_VAR) ||
(arrayLocalNode->gtOper != GT_LCL_VAR) ||
(arrayAssignment->gtOp.gtOp1->gtLclVarCommon.gtLclNum != arrayLocalNode->gtLclVarCommon.gtLclNum))
@@ -3026,7 +3026,7 @@ GenTreePtr Compiler::impInitializeArrayIntrinsic(CORINFO_SIG_INFO* sig)
// Make sure that the object being assigned is a helper call.
//
- GenTreePtr newArrayCall = arrayAssignment->gtOp.gtOp2;
+ GenTree* newArrayCall = arrayAssignment->gtOp.gtOp2;
if ((newArrayCall->gtOper != GT_CALL) || (newArrayCall->gtCall.gtCallType != CT_HELPER))
{
return nullptr;
@@ -3216,7 +3216,7 @@ GenTreePtr Compiler::impInitializeArrayIntrinsic(CORINFO_SIG_INFO* sig)
// the number of elements.
//
- GenTreePtr arrayLengthNode;
+ GenTree* arrayLengthNode;
GenTreeArgList* args = newArrayCall->gtCall.gtCallArgs;
#ifdef FEATURE_READYTORUN_COMPILER
@@ -3302,9 +3302,9 @@ GenTreePtr Compiler::impInitializeArrayIntrinsic(CORINFO_SIG_INFO* sig)
dataOffset = eeGetArrayDataOffset(elementType);
}
- GenTreePtr dst = gtNewOperNode(GT_ADD, TYP_BYREF, arrayLocalNode, gtNewIconNode(dataOffset, TYP_I_IMPL));
- GenTreePtr blk = gtNewBlockVal(dst, blkSize);
- GenTreePtr src = gtNewIndOfIconHandleNode(TYP_STRUCT, (size_t)initData, GTF_ICON_STATIC_HDL, false);
+ GenTree* dst = gtNewOperNode(GT_ADD, TYP_BYREF, arrayLocalNode, gtNewIconNode(dataOffset, TYP_I_IMPL));
+ GenTree* blk = gtNewBlockVal(dst, blkSize);
+ GenTree* src = gtNewIndOfIconHandleNode(TYP_STRUCT, (size_t)initData, GTF_ICON_STATIC_HDL, false);
return gtNewBlkOpNode(blk, // dst
src, // src
@@ -3424,7 +3424,7 @@ GenTree* Compiler::impIntrinsic(GenTree* newobjThis,
assert(intrinsicID != CORINFO_INTRINSIC_StubHelpers_GetStubContextAddr);
#endif
- GenTreePtr retNode = nullptr;
+ GenTree* retNode = nullptr;
// Under debug and minopts, only expand what is required.
if (!mustExpand && (opts.compDbgCode || opts.MinOpts()))
@@ -3439,7 +3439,8 @@ GenTree* Compiler::impIntrinsic(GenTree* newobjThis,
switch (intrinsicID)
{
- GenTreePtr op1, op2;
+ GenTree* op1;
+ GenTree* op2;
case CORINFO_INTRINSIC_Sin:
case CORINFO_INTRINSIC_Cbrt:
@@ -3531,13 +3532,13 @@ GenTree* Compiler::impIntrinsic(GenTree* newobjThis,
{
assert(callType != TYP_STRUCT);
assert(sig->numArgs == 3);
- GenTreePtr op3;
+ GenTree* op3;
op3 = impPopStack().val; // comparand
op2 = impPopStack().val; // value
op1 = impPopStack().val; // location
- GenTreePtr node = new (this, GT_CMPXCHG) GenTreeCmpXchg(genActualType(callType), op1, op2, op3);
+ GenTree* node = new (this, GT_CMPXCHG) GenTreeCmpXchg(genActualType(callType), op1, op2, op3);
node->gtCmpXchg.gtOpLocation->gtFlags |= GTF_DONT_CSE;
retNode = node;
@@ -3724,11 +3725,11 @@ GenTree* Compiler::impIntrinsic(GenTree* newobjThis,
// Remove call to constructor and directly assign the byref passed
// to the call to the first slot of the ByReference struct.
op1 = impPopStack().val;
- GenTreePtr thisptr = newobjThis;
+ GenTree* thisptr = newobjThis;
CORINFO_FIELD_HANDLE fldHnd = info.compCompHnd->getFieldInClass(clsHnd, 0);
- GenTreePtr field = gtNewFieldRef(TYP_BYREF, fldHnd, thisptr, 0, false);
- GenTreePtr assign = gtNewAssignNode(field, op1);
- GenTreePtr byReferenceStruct = gtCloneExpr(thisptr->gtGetOp1());
+ GenTree* field = gtNewFieldRef(TYP_BYREF, fldHnd, thisptr, 0, false);
+ GenTree* assign = gtNewAssignNode(field, op1);
+ GenTree* byReferenceStruct = gtCloneExpr(thisptr->gtGetOp1());
assert(byReferenceStruct != nullptr);
impPushOnStack(byReferenceStruct, typeInfo(TI_STRUCT, clsHnd));
retNode = assign;
@@ -3739,7 +3740,7 @@ GenTree* Compiler::impIntrinsic(GenTree* newobjThis,
{
op1 = impPopStack().val;
CORINFO_FIELD_HANDLE fldHnd = info.compCompHnd->getFieldInClass(clsHnd, 0);
- GenTreePtr field = gtNewFieldRef(TYP_BYREF, fldHnd, op1, 0, false);
+ GenTree* field = gtNewFieldRef(TYP_BYREF, fldHnd, op1, 0, false);
retNode = field;
break;
}
@@ -3767,10 +3768,10 @@ GenTree* Compiler::impIntrinsic(GenTree* newobjThis,
JITDUMP("\nimpIntrinsic: Expanding %sSpan<T>.get_Item, T=%s, sizeof(T)=%u\n", isReadOnly ? "ReadOnly" : "",
info.compCompHnd->getClassName(spanElemHnd), elemSize);
- GenTreePtr index = impPopStack().val;
- GenTreePtr ptrToSpan = impPopStack().val;
- GenTreePtr indexClone = nullptr;
- GenTreePtr ptrToSpanClone = nullptr;
+ GenTree* index = impPopStack().val;
+ GenTree* ptrToSpan = impPopStack().val;
+ GenTree* indexClone = nullptr;
+ GenTree* ptrToSpanClone = nullptr;
#if defined(DEBUG)
if (verbose)
@@ -3791,18 +3792,18 @@ GenTree* Compiler::impIntrinsic(GenTree* newobjThis,
// Bounds check
CORINFO_FIELD_HANDLE lengthHnd = info.compCompHnd->getFieldInClass(clsHnd, 1);
const unsigned lengthOffset = info.compCompHnd->getFieldOffset(lengthHnd);
- GenTreePtr length = gtNewFieldRef(TYP_INT, lengthHnd, ptrToSpan, lengthOffset, false);
- GenTreePtr boundsCheck = new (this, GT_ARR_BOUNDS_CHECK)
+ GenTree* length = gtNewFieldRef(TYP_INT, lengthHnd, ptrToSpan, lengthOffset, false);
+ GenTree* boundsCheck = new (this, GT_ARR_BOUNDS_CHECK)
GenTreeBoundsChk(GT_ARR_BOUNDS_CHECK, TYP_VOID, index, length, SCK_RNGCHK_FAIL);
// Element access
- GenTreePtr indexIntPtr = impImplicitIorI4Cast(indexClone, TYP_I_IMPL);
- GenTreePtr sizeofNode = gtNewIconNode(elemSize);
- GenTreePtr mulNode = gtNewOperNode(GT_MUL, TYP_I_IMPL, indexIntPtr, sizeofNode);
+ GenTree* indexIntPtr = impImplicitIorI4Cast(indexClone, TYP_I_IMPL);
+ GenTree* sizeofNode = gtNewIconNode(elemSize);
+ GenTree* mulNode = gtNewOperNode(GT_MUL, TYP_I_IMPL, indexIntPtr, sizeofNode);
CORINFO_FIELD_HANDLE ptrHnd = info.compCompHnd->getFieldInClass(clsHnd, 0);
const unsigned ptrOffset = info.compCompHnd->getFieldOffset(ptrHnd);
- GenTreePtr data = gtNewFieldRef(TYP_BYREF, ptrHnd, ptrToSpanClone, ptrOffset, false);
- GenTreePtr result = gtNewOperNode(GT_ADD, TYP_BYREF, data, mulNode);
+ GenTree* data = gtNewFieldRef(TYP_BYREF, ptrHnd, ptrToSpanClone, ptrOffset, false);
+ GenTree* result = gtNewOperNode(GT_ADD, TYP_BYREF, data, mulNode);
// Prepare result
var_types resultType = JITtype2varType(sig->retType);
@@ -3824,8 +3825,8 @@ GenTree* Compiler::impIntrinsic(GenTree* newobjThis,
CORINFO_GENERICHANDLE_RESULT embedInfo;
info.compCompHnd->expandRawHandleIntrinsic(&resolvedToken, &embedInfo);
- GenTreePtr rawHandle = impLookupToTree(&resolvedToken, &embedInfo.lookup, gtTokenToIconFlags(memberRef),
- embedInfo.compileTimeHandle);
+ GenTree* rawHandle = impLookupToTree(&resolvedToken, &embedInfo.lookup, gtTokenToIconFlags(memberRef),
+ embedInfo.compileTimeHandle);
if (rawHandle == nullptr)
{
return nullptr;
@@ -3836,10 +3837,10 @@ GenTree* Compiler::impIntrinsic(GenTree* newobjThis,
unsigned rawHandleSlot = lvaGrabTemp(true DEBUGARG("rawHandle"));
impAssignTempGen(rawHandleSlot, rawHandle, clsHnd, (unsigned)CHECK_SPILL_NONE);
- GenTreePtr lclVar = gtNewLclvNode(rawHandleSlot, TYP_I_IMPL);
- GenTreePtr lclVarAddr = gtNewOperNode(GT_ADDR, TYP_I_IMPL, lclVar);
- var_types resultType = JITtype2varType(sig->retType);
- retNode = gtNewOperNode(GT_IND, resultType, lclVarAddr);
+ GenTree* lclVar = gtNewLclvNode(rawHandleSlot, TYP_I_IMPL);
+ GenTree* lclVarAddr = gtNewOperNode(GT_ADDR, TYP_I_IMPL, lclVar);
+ var_types resultType = JITtype2varType(sig->retType);
+ retNode = gtNewOperNode(GT_IND, resultType, lclVarAddr);
break;
}
@@ -4136,7 +4137,7 @@ NamedIntrinsic Compiler::lookupNamedIntrinsic(CORINFO_METHOD_HANDLE method)
/*****************************************************************************/
-GenTreePtr Compiler::impArrayAccessIntrinsic(
+GenTree* Compiler::impArrayAccessIntrinsic(
CORINFO_CLASS_HANDLE clsHnd, CORINFO_SIG_INFO* sig, int memberRef, bool readonlyCall, CorInfoIntrinsics intrinsicID)
{
/* If we are generating SMALL_CODE, we don't want to use intrinsics for
@@ -4223,7 +4224,7 @@ GenTreePtr Compiler::impArrayAccessIntrinsic(
return nullptr;
}
- GenTreePtr val = nullptr;
+ GenTree* val = nullptr;
if (intrinsicID == CORINFO_INTRINSIC_Array_Set)
{
@@ -4242,16 +4243,16 @@ GenTreePtr Compiler::impArrayAccessIntrinsic(
noway_assert((unsigned char)GT_ARR_MAX_RANK == GT_ARR_MAX_RANK);
- GenTreePtr inds[GT_ARR_MAX_RANK];
+ GenTree* inds[GT_ARR_MAX_RANK];
for (unsigned k = rank; k > 0; k--)
{
inds[k - 1] = impPopStack().val;
}
- GenTreePtr arr = impPopStack().val;
+ GenTree* arr = impPopStack().val;
assert(arr->gtType == TYP_REF);
- GenTreePtr arrElem =
+ GenTree* arrElem =
new (this, GT_ARR_ELEM) GenTreeArrElem(TYP_BYREF, arr, static_cast<unsigned char>(rank),
static_cast<unsigned char>(arrayElemSize), elemType, &inds[0]);
@@ -4396,7 +4397,7 @@ void Compiler::verConvertBBToThrowVerificationException(BasicBlock* block DEBUGA
}
assert(verCurrentState.esStackDepth == 0);
- GenTreePtr op1 =
+ GenTree* op1 =
gtNewHelperCallNode(CORINFO_HELP_VERIFICATION, TYP_VOID, gtNewArgList(gtNewIconNode(block->bbCodeOffs)));
// verCurrentState.esStackDepth = 0;
impAppendTree(op1, (unsigned)CHECK_SPILL_NONE, impCurStmtOffs);
@@ -5465,9 +5466,9 @@ BOOL Compiler::verIsCallToInitThisPtr(CORINFO_CLASS_HANDLE context, CORINFO_CLAS
return ((target == context) || (target == info.compCompHnd->getParentType(context)));
}
-GenTreePtr Compiler::impImportLdvirtftn(GenTreePtr thisPtr,
- CORINFO_RESOLVED_TOKEN* pResolvedToken,
- CORINFO_CALL_INFO* pCallInfo)
+GenTree* Compiler::impImportLdvirtftn(GenTree* thisPtr,
+ CORINFO_RESOLVED_TOKEN* pResolvedToken,
+ CORINFO_CALL_INFO* pCallInfo)
{
if ((pCallInfo->methodFlags & CORINFO_FLG_EnC) && !(pCallInfo->classFlags & CORINFO_FLG_INTERFACE))
{
@@ -5477,7 +5478,7 @@ GenTreePtr Compiler::impImportLdvirtftn(GenTreePtr thisPtr,
// CoreRT generic virtual method
if ((pCallInfo->sig.sigInst.methInstCount != 0) && IsTargetAbi(CORINFO_CORERT_ABI))
{
- GenTreePtr runtimeMethodHandle = nullptr;
+ GenTree* runtimeMethodHandle = nullptr;
if (pCallInfo->exactContextNeedsRuntimeLookup)
{
runtimeMethodHandle =
@@ -5507,7 +5508,7 @@ GenTreePtr Compiler::impImportLdvirtftn(GenTreePtr thisPtr,
// We need a runtime lookup. CoreRT has a ReadyToRun helper for that too.
if (IsTargetAbi(CORINFO_CORERT_ABI))
{
- GenTreePtr ctxTree = getRuntimeContextTree(pCallInfo->codePointerLookup.lookupKind.runtimeLookupKind);
+ GenTree* ctxTree = getRuntimeContextTree(pCallInfo->codePointerLookup.lookupKind.runtimeLookupKind);
return impReadyToRunHelperToTree(pResolvedToken, CORINFO_HELP_READYTORUN_GENERIC_HANDLE, TYP_I_IMPL,
gtNewArgList(ctxTree), &pCallInfo->codePointerLookup.lookupKind);
@@ -5516,13 +5517,13 @@ GenTreePtr Compiler::impImportLdvirtftn(GenTreePtr thisPtr,
#endif
// Get the exact descriptor for the static callsite
- GenTreePtr exactTypeDesc = impParentClassTokenToHandle(pResolvedToken);
+ GenTree* exactTypeDesc = impParentClassTokenToHandle(pResolvedToken);
if (exactTypeDesc == nullptr)
{ // compDonotInline()
return nullptr;
}
- GenTreePtr exactMethodDesc = impTokenToHandle(pResolvedToken);
+ GenTree* exactMethodDesc = impTokenToHandle(pResolvedToken);
if (exactMethodDesc == nullptr)
{ // compDonotInline()
return nullptr;
@@ -5581,11 +5582,11 @@ void Compiler::impImportAndPushBox(CORINFO_RESOLVED_TOKEN* pResolvedToken)
impSpillSpecialSideEff();
// Get get the expression to box from the stack.
- GenTreePtr op1 = nullptr;
- GenTreePtr op2 = nullptr;
+ GenTree* op1 = nullptr;
+ GenTree* op2 = nullptr;
StackEntry se = impPopStack();
CORINFO_CLASS_HANDLE operCls = se.seTypeInfo.GetClassHandle();
- GenTreePtr exprToBox = se.val;
+ GenTree* exprToBox = se.val;
// Look at what helper we should use.
CorInfoHelpFunc boxHelper = info.compCompHnd->getBoxHelper(pResolvedToken->hClass);
@@ -5682,9 +5683,9 @@ void Compiler::impImportAndPushBox(CORINFO_RESOLVED_TOKEN* pResolvedToken)
compCurBB->bbFlags |= BBF_HAS_NEWOBJ;
optMethodFlags |= OMF_HAS_NEWOBJ;
- GenTreePtr asg = gtNewTempAssign(impBoxTemp, op1);
+ GenTree* asg = gtNewTempAssign(impBoxTemp, op1);
- GenTreePtr asgStmt = impAppendTree(asg, (unsigned)CHECK_SPILL_NONE, impCurStmtOffs);
+ GenTree* asgStmt = impAppendTree(asg, (unsigned)CHECK_SPILL_NONE, impCurStmtOffs);
op1 = gtNewLclvNode(impBoxTemp, TYP_REF);
op2 = gtNewIconNode(TARGET_POINTER_SIZE, TYP_I_IMPL);
@@ -5725,7 +5726,7 @@ void Compiler::impImportAndPushBox(CORINFO_RESOLVED_TOKEN* pResolvedToken)
impSpillSideEffects(true, (unsigned)CHECK_SPILL_ALL DEBUGARG("impImportAndPushBox"));
// Set up this copy as a second assignment.
- GenTreePtr copyStmt = impAppendTree(op1, (unsigned)CHECK_SPILL_NONE, impCurStmtOffs);
+ GenTree* copyStmt = impAppendTree(op1, (unsigned)CHECK_SPILL_NONE, impCurStmtOffs);
op1 = gtNewLclvNode(impBoxTemp, TYP_REF);
@@ -5786,7 +5787,7 @@ void Compiler::impImportAndPushBox(CORINFO_RESOLVED_TOKEN* pResolvedToken)
void Compiler::impImportNewObjArray(CORINFO_RESOLVED_TOKEN* pResolvedToken, CORINFO_CALL_INFO* pCallInfo)
{
- GenTreePtr classHandle = impParentClassTokenToHandle(pResolvedToken);
+ GenTree* classHandle = impParentClassTokenToHandle(pResolvedToken);
if (classHandle == nullptr)
{ // compDonotInline()
return;
@@ -5794,7 +5795,7 @@ void Compiler::impImportNewObjArray(CORINFO_RESOLVED_TOKEN* pResolvedToken, CORI
assert(pCallInfo->sig.numArgs);
- GenTreePtr node;
+ GenTree* node;
GenTreeArgList* args;
//
@@ -5849,11 +5850,11 @@ void Compiler::impImportNewObjArray(CORINFO_RESOLVED_TOKEN* pResolvedToken, CORI
// into lvaNewObjArrayArgs temp.
for (int i = pCallInfo->sig.numArgs - 1; i >= 0; i--)
{
- GenTreePtr arg = impImplicitIorI4Cast(impPopStack().val, TYP_INT);
+ GenTree* arg = impImplicitIorI4Cast(impPopStack().val, TYP_INT);
- GenTreePtr dest = gtNewLclvNode(lvaNewObjArrayArgs, TYP_BLK);
- dest = gtNewOperNode(GT_ADDR, TYP_I_IMPL, dest);
- dest = gtNewOperNode(GT_ADD, TYP_I_IMPL, dest,
+ GenTree* dest = gtNewLclvNode(lvaNewObjArrayArgs, TYP_BLK);
+ dest = gtNewOperNode(GT_ADDR, TYP_I_IMPL, dest);
+ dest = gtNewOperNode(GT_ADD, TYP_I_IMPL, dest,
new (this, GT_CNS_INT) GenTreeIntCon(TYP_I_IMPL, sizeof(INT32) * i));
dest = gtNewOperNode(GT_IND, TYP_INT, dest);
@@ -5909,15 +5910,15 @@ void Compiler::impImportNewObjArray(CORINFO_RESOLVED_TOKEN* pResolvedToken, CORI
impPushOnStack(node, typeInfo(TI_REF, pResolvedToken->hClass));
}
-GenTreePtr Compiler::impTransformThis(GenTreePtr thisPtr,
- CORINFO_RESOLVED_TOKEN* pConstrainedResolvedToken,
- CORINFO_THIS_TRANSFORM transform)
+GenTree* Compiler::impTransformThis(GenTree* thisPtr,
+ CORINFO_RESOLVED_TOKEN* pConstrainedResolvedToken,
+ CORINFO_THIS_TRANSFORM transform)
{
switch (transform)
{
case CORINFO_DEREF_THIS:
{
- GenTreePtr obj = thisPtr;
+ GenTree* obj = thisPtr;
// This does a LDIND on the obj, which should be a byref. pointing to a ref
impBashVarAddrsToI(obj);
@@ -5940,7 +5941,7 @@ GenTreePtr Compiler::impTransformThis(GenTreePtr thisPtr,
// method from System.Object or System.ValueType. The EE does not provide us with
// "unboxed" versions of these methods.
- GenTreePtr obj = thisPtr;
+ GenTree* obj = thisPtr;
assert(obj->TypeGet() == TYP_BYREF || obj->TypeGet() == TYP_I_IMPL);
obj = gtNewObjNode(pConstrainedResolvedToken->hClass, obj);
@@ -6207,7 +6208,7 @@ GenTreeCall* Compiler::impImportIndirectCall(CORINFO_SIG_INFO* sig, IL_OFFSETX i
/* Get the function pointer */
- GenTreePtr fptr = impPopStack().val;
+ GenTree* fptr = impPopStack().val;
// The function pointer is typically a sized to match the target pointer size
// However, stubgen IL optimization can change LDC.I8 to LDC.I4
@@ -6236,7 +6237,7 @@ GenTreeCall* Compiler::impImportIndirectCall(CORINFO_SIG_INFO* sig, IL_OFFSETX i
/*****************************************************************************/
-void Compiler::impPopArgsForUnmanagedCall(GenTreePtr call, CORINFO_SIG_INFO* sig)
+void Compiler::impPopArgsForUnmanagedCall(GenTree* call, CORINFO_SIG_INFO* sig)
{
assert(call->gtFlags & GTF_CALL_UNMANAGED);
@@ -6297,11 +6298,11 @@ void Compiler::impPopArgsForUnmanagedCall(GenTreePtr call, CORINFO_SIG_INFO* sig
/* The argument list is now "clean" - no out-of-order side effects
* Pop the argument list in reverse order */
- GenTreePtr args = call->gtCall.gtCallArgs = impPopRevList(sig->numArgs, sig, sig->numArgs - argsToReverse);
+ GenTree* args = call->gtCall.gtCallArgs = impPopRevList(sig->numArgs, sig, sig->numArgs - argsToReverse);
if (call->gtCall.gtCallMoreFlags & GTF_CALL_M_UNMGD_THISCALL)
{
- GenTreePtr thisPtr = args->Current();
+ GenTree* thisPtr = args->Current();
impBashVarAddrsToI(thisPtr);
assert(thisPtr->TypeGet() == TYP_I_IMPL || thisPtr->TypeGet() == TYP_BYREF);
}
@@ -6324,7 +6325,7 @@ void Compiler::impPopArgsForUnmanagedCall(GenTreePtr call, CORINFO_SIG_INFO* sig
// initializtion. Otherwise, nullptr.
//
-GenTreePtr Compiler::impInitClass(CORINFO_RESOLVED_TOKEN* pResolvedToken)
+GenTree* Compiler::impInitClass(CORINFO_RESOLVED_TOKEN* pResolvedToken)
{
CorInfoInitClassResult initClassResult =
info.compCompHnd->initClass(pResolvedToken->hField, info.compMethodHnd, impTokenLookupContextHandle);
@@ -6335,7 +6336,7 @@ GenTreePtr Compiler::impInitClass(CORINFO_RESOLVED_TOKEN* pResolvedToken)
}
BOOL runtimeLookup;
- GenTreePtr node = impParentClassTokenToHandle(pResolvedToken, &runtimeLookup);
+ GenTree* node = impParentClassTokenToHandle(pResolvedToken, &runtimeLookup);
if (node == nullptr)
{
@@ -6356,9 +6357,9 @@ GenTreePtr Compiler::impInitClass(CORINFO_RESOLVED_TOKEN* pResolvedToken)
return node;
}
-GenTreePtr Compiler::impImportStaticReadOnlyField(void* fldAddr, var_types lclTyp)
+GenTree* Compiler::impImportStaticReadOnlyField(void* fldAddr, var_types lclTyp)
{
- GenTreePtr op1 = nullptr;
+ GenTree* op1 = nullptr;
switch (lclTyp)
{
@@ -6422,12 +6423,12 @@ GenTreePtr Compiler::impImportStaticReadOnlyField(void* fldAddr, var_types lclTy
return op1;
}
-GenTreePtr Compiler::impImportStaticFieldAccess(CORINFO_RESOLVED_TOKEN* pResolvedToken,
- CORINFO_ACCESS_FLAGS access,
- CORINFO_FIELD_INFO* pFieldInfo,
- var_types lclTyp)
+GenTree* Compiler::impImportStaticFieldAccess(CORINFO_RESOLVED_TOKEN* pResolvedToken,
+ CORINFO_ACCESS_FLAGS access,
+ CORINFO_FIELD_INFO* pFieldInfo,
+ var_types lclTyp)
{
- GenTreePtr op1;
+ GenTree* op1;
switch (pFieldInfo->fieldAccessor)
{
@@ -6503,7 +6504,7 @@ GenTreePtr Compiler::impImportStaticFieldAccess(CORINFO_RESOLVED_TOKEN* pResolve
CORINFO_LOOKUP_KIND kind = info.compCompHnd->getLocationOfThisType(info.compMethodHnd);
assert(kind.needsRuntimeLookup);
- GenTreePtr ctxTree = getRuntimeContextTree(kind.runtimeLookupKind);
+ GenTree* ctxTree = getRuntimeContextTree(kind.runtimeLookupKind);
GenTreeArgList* args = gtNewArgList(ctxTree);
unsigned callFlags = 0;
@@ -6656,7 +6657,7 @@ void Compiler::impInsertHelperCall(CORINFO_HELPER_DESC* helperInfo)
for (unsigned i = helperInfo->numArgs; i > 0; --i)
{
const CORINFO_HELPER_ARG& helperArg = helperInfo->args[i - 1];
- GenTreePtr currentArg = nullptr;
+ GenTree* currentArg = nullptr;
switch (helperArg.argType)
{
case CORINFO_HELPER_ARG_TYPE_Field:
@@ -6688,7 +6689,7 @@ void Compiler::impInsertHelperCall(CORINFO_HELPER_DESC* helperInfo)
* Mark as CSE'able, and hoistable. Consider marking hoistable unless you're in the inlinee.
* Also, consider sticking this in the first basic block.
*/
- GenTreePtr callout = gtNewHelperCallNode(helperInfo->helperNum, TYP_VOID, args);
+ GenTree* callout = gtNewHelperCallNode(helperInfo->helperNum, TYP_VOID, args);
impAppendTree(callout, (unsigned)CHECK_SPILL_NONE, impCurStmtOffs);
}
@@ -6928,7 +6929,7 @@ bool Compiler::impIsImplicitTailCallCandidate(
var_types Compiler::impImportCall(OPCODE opcode,
CORINFO_RESOLVED_TOKEN* pResolvedToken,
CORINFO_RESOLVED_TOKEN* pConstrainedResolvedToken,
- GenTreePtr newobjThis,
+ GenTree* newobjThis,
int prefixFlags,
CORINFO_CALL_INFO* callInfo,
IL_OFFSET rawILOffset)
@@ -6943,7 +6944,7 @@ var_types Compiler::impImportCall(OPCODE opcode,
unsigned clsFlags = 0;
unsigned mflags = 0;
unsigned argFlags = 0;
- GenTreePtr call = nullptr;
+ GenTree* call = nullptr;
GenTreeArgList* args = nullptr;
CORINFO_THIS_TRANSFORM constraintCallThisTransform = CORINFO_NO_THIS_TRANSFORM;
CORINFO_CONTEXT_HANDLE exactContextHnd = nullptr;
@@ -7258,7 +7259,7 @@ var_types Compiler::impImportCall(OPCODE opcode,
return TYP_UNDEF;
}
- GenTreePtr stubAddr = impRuntimeLookupToTree(pResolvedToken, &callInfo->stubLookup, methHnd);
+ GenTree* stubAddr = impRuntimeLookupToTree(pResolvedToken, &callInfo->stubLookup, methHnd);
assert(!compDonotInline());
// This is the rough code to set up an indirect stub call
@@ -7341,16 +7342,16 @@ var_types Compiler::impImportCall(OPCODE opcode,
args = impPopList(sig->numArgs, sig);
- GenTreePtr thisPtr = impPopStack().val;
- thisPtr = impTransformThis(thisPtr, pConstrainedResolvedToken, callInfo->thisTransform);
+ GenTree* thisPtr = impPopStack().val;
+ thisPtr = impTransformThis(thisPtr, pConstrainedResolvedToken, callInfo->thisTransform);
assert(thisPtr != nullptr);
// Clone the (possibly transformed) "this" pointer
- GenTreePtr thisPtrCopy;
+ GenTree* thisPtrCopy;
thisPtr = impCloneExpr(thisPtr, &thisPtrCopy, NO_CLASS_HANDLE, (unsigned)CHECK_SPILL_ALL,
nullptr DEBUGARG("LDVIRTFTN this pointer"));
- GenTreePtr fptr = impImportLdvirtftn(thisPtr, pResolvedToken, callInfo);
+ GenTree* fptr = impImportLdvirtftn(thisPtr, pResolvedToken, callInfo);
assert(fptr != nullptr);
thisPtr = nullptr; // can't reuse it
@@ -7421,7 +7422,7 @@ var_types Compiler::impImportCall(OPCODE opcode,
assert((sig->callConv & CORINFO_CALLCONV_MASK) != CORINFO_CALLCONV_VARARG);
assert((sig->callConv & CORINFO_CALLCONV_MASK) != CORINFO_CALLCONV_NATIVEVARARG);
- GenTreePtr fptr =
+ GenTree* fptr =
impLookupToTree(pResolvedToken, &callInfo->codePointerLookup, GTF_ICON_FTN_ADDR, callInfo->hMethod);
if (compDonotInline())
@@ -7682,12 +7683,12 @@ var_types Compiler::impImportCall(OPCODE opcode,
return TYP_UNDEF;
}
- GenTreePtr cookie = eeGetPInvokeCookie(sig);
+ GenTree* cookie = eeGetPInvokeCookie(sig);
// This cookie is required to be either a simple GT_CNS_INT or
// an indirection of a GT_CNS_INT
//
- GenTreePtr cookieConst = cookie;
+ GenTree* cookieConst = cookie;
if (cookie->gtOper == GT_IND)
{
cookieConst = cookie->gtOp.gtOp1;
@@ -7729,7 +7730,7 @@ var_types Compiler::impImportCall(OPCODE opcode,
varCookie = info.compCompHnd->getVarArgsHandle(sig, &pVarCookie);
assert((!varCookie) != (!pVarCookie));
- GenTreePtr cookie = gtNewIconEmbHndNode(varCookie, pVarCookie, GTF_ICON_VARG_HDL, sig);
+ GenTree* cookie = gtNewIconEmbHndNode(varCookie, pVarCookie, GTF_ICON_VARG_HDL, sig);
assert(extraArg == nullptr);
extraArg = gtNewArgList(cookie);
@@ -7762,8 +7763,8 @@ var_types Compiler::impImportCall(OPCODE opcode,
assert(opcode != CEE_CALLI);
- GenTreePtr instParam;
- BOOL runtimeLookup;
+ GenTree* instParam;
+ BOOL runtimeLookup;
// Instantiated generic method
if (((SIZE_T)exactContextHnd & CORINFO_CONTEXTFLAGS_MASK) == CORINFO_CONTEXTFLAGS_METHOD)
@@ -7910,7 +7911,7 @@ var_types Compiler::impImportCall(OPCODE opcode,
if (!(mflags & CORINFO_FLG_STATIC) && !((opcode == CEE_NEWOBJ) && (newobjThis == nullptr)))
{
- GenTreePtr obj;
+ GenTree* obj;
if (opcode == CEE_NEWOBJ)
{
@@ -8084,7 +8085,7 @@ DONE:
// True virtual or indirect calls, shouldn't pass in a callee handle.
CORINFO_METHOD_HANDLE exactCalleeHnd =
((call->gtCall.gtCallType != CT_USER_FUNC) || call->gtCall.IsVirtual()) ? nullptr : methHnd;
- GenTreePtr thisArg = call->gtCall.gtCallObjp;
+ GenTree* thisArg = call->gtCall.gtCallObjp;
if (info.compCompHnd->canTailCall(info.compMethodHnd, methHnd, exactCalleeHnd, explicitTailCall))
{
@@ -8187,7 +8188,7 @@ DONE:
if (compIsForInlining() && opcode == CEE_CALLVIRT)
{
- GenTreePtr callObj = call->gtCall.gtCallObjp;
+ GenTree* callObj = call->gtCall.gtCallObjp;
assert(callObj != nullptr);
if ((call->gtCall.IsVirtual() || (call->gtFlags & GTF_CALL_NULLCHECK)) &&
@@ -8394,7 +8395,7 @@ var_types Compiler::impImportJitTestLabelMark(int numArgs)
tlAndN.m_num = 0;
StackEntry se = impPopStack();
assert(se.seTypeInfo.GetType() == TI_INT);
- GenTreePtr val = se.val;
+ GenTree* val = se.val;
assert(val->IsCnsIntOrI());
tlAndN.m_tl = (TestLabel)val->AsIntConCommon()->IconValue();
}
@@ -8402,7 +8403,7 @@ var_types Compiler::impImportJitTestLabelMark(int numArgs)
{
StackEntry se = impPopStack();
assert(se.seTypeInfo.GetType() == TI_INT);
- GenTreePtr val = se.val;
+ GenTree* val = se.val;
assert(val->IsCnsIntOrI());
tlAndN.m_num = val->AsIntConCommon()->IconValue();
se = impPopStack();
@@ -8417,7 +8418,7 @@ var_types Compiler::impImportJitTestLabelMark(int numArgs)
}
StackEntry expSe = impPopStack();
- GenTreePtr node = expSe.val;
+ GenTree* node = expSe.val;
// There are a small number of special cases, where we actually put the annotation on a subnode.
if (tlAndN.m_tl == TL_LoopHoist && tlAndN.m_num >= 100)
@@ -8426,7 +8427,7 @@ var_types Compiler::impImportJitTestLabelMark(int numArgs)
// a GT_IND of a static field address, which should be the sum of a (hoistable) helper call and possibly some
// offset within the the static field block whose address is returned by the helper call.
// The annotation is saying that this address calculation, but not the entire access, should be hoisted.
- GenTreePtr helperCall = nullptr;
+ GenTree* helperCall = nullptr;
assert(node->OperGet() == GT_IND);
tlAndN.m_num -= 100;
GetNodeTestData()->Set(node->gtOp.gtOp1, tlAndN);
@@ -8454,7 +8455,7 @@ var_types Compiler::impImportJitTestLabelMark(int numArgs)
// Return Value:
// Returns new GenTree node after fixing struct return of call node
//
-GenTreePtr Compiler::impFixupCallStructReturn(GenTreeCall* call, CORINFO_CLASS_HANDLE retClsHnd)
+GenTree* Compiler::impFixupCallStructReturn(GenTreeCall* call, CORINFO_CLASS_HANDLE retClsHnd)
{
if (!varTypeIsStruct(call))
{
@@ -8569,7 +8570,7 @@ GenTreePtr Compiler::impFixupCallStructReturn(GenTreeCall* call, CORINFO_CLASS_H
Note that this method is only call for !_TARGET_X86_
*/
-GenTreePtr Compiler::impFixupStructReturnType(GenTreePtr op, CORINFO_CLASS_HANDLE retClsHnd)
+GenTree* Compiler::impFixupStructReturnType(GenTree* op, CORINFO_CLASS_HANDLE retClsHnd)
{
assert(varTypeIsStruct(info.compRetType));
assert(info.compRetBuffArg == BAD_VAR_NUM);
@@ -8691,7 +8692,7 @@ REDO_RETURN_NODE:
}
else if (op->gtOper == GT_OBJ)
{
- GenTreePtr op1 = op->AsObj()->Addr();
+ GenTree* op1 = op->AsObj()->Addr();
// We will fold away OBJ/ADDR
// except for OBJ/ADDR/INDEX
@@ -8808,8 +8809,8 @@ void Compiler::impImportLeave(BasicBlock* block)
BasicBlock* step = DUMMY_INIT(NULL);
unsigned encFinallies = 0; // Number of enclosing finallies.
- GenTreePtr endCatches = NULL;
- GenTreePtr endLFin = NULL; // The statement tree to indicate the end of locally-invoked finally.
+ GenTree* endCatches = NULL;
+ GenTree* endLFin = NULL; // The statement tree to indicate the end of locally-invoked finally.
unsigned XTnum;
EHblkDsc* HBtab;
@@ -8834,7 +8835,7 @@ void Compiler::impImportLeave(BasicBlock* block)
BADCODE("leave out of fault/finally block");
// Create the call to CORINFO_HELP_ENDCATCH
- GenTreePtr endCatch = gtNewHelperCallNode(CORINFO_HELP_ENDCATCH, TYP_VOID);
+ GenTree* endCatch = gtNewHelperCallNode(CORINFO_HELP_ENDCATCH, TYP_VOID);
// Make a list of all the currently pending endCatches
if (endCatches)
@@ -8907,7 +8908,7 @@ void Compiler::impImportLeave(BasicBlock* block)
}
#endif
- GenTreePtr lastStmt;
+ GenTree* lastStmt;
if (endCatches)
{
@@ -8998,7 +8999,7 @@ void Compiler::impImportLeave(BasicBlock* block)
}
#endif
- GenTreePtr lastStmt;
+ GenTree* lastStmt;
if (endCatches)
{
@@ -9644,10 +9645,11 @@ const static controlFlow_t controlFlow[] = {
* Determine the result type of an arithemetic operation
* On 64-bit inserts upcasts when native int is mixed with int32
*/
-var_types Compiler::impGetByRefResultType(genTreeOps oper, bool fUnsigned, GenTreePtr* pOp1, GenTreePtr* pOp2)
+var_types Compiler::impGetByRefResultType(genTreeOps oper, bool fUnsigned, GenTree** pOp1, GenTree** pOp2)
{
- var_types type = TYP_UNDEF;
- GenTreePtr op1 = *pOp1, op2 = *pOp2;
+ var_types type = TYP_UNDEF;
+ GenTree* op1 = *pOp1;
+ GenTree* op2 = *pOp2;
// Arithemetic operations are generally only allowed with
// primitive types, but certain operations are allowed
@@ -9915,10 +9917,10 @@ GenTree* Compiler::impOptimizeCastClassOrIsInst(GenTree* op1, CORINFO_RESOLVED_T
// Notes:
// May expand into a series of runtime checks or a helper call.
-GenTreePtr Compiler::impCastClassOrIsInstToTree(GenTreePtr op1,
- GenTreePtr op2,
- CORINFO_RESOLVED_TOKEN* pResolvedToken,
- bool isCastClass)
+GenTree* Compiler::impCastClassOrIsInstToTree(GenTree* op1,
+ GenTree* op2,
+ CORINFO_RESOLVED_TOKEN* pResolvedToken,
+ bool isCastClass)
{
assert(op1->TypeGet() == TYP_REF);
@@ -9990,8 +9992,8 @@ GenTreePtr Compiler::impCastClassOrIsInstToTree(GenTreePtr op1,
impSpillSideEffects(true, CHECK_SPILL_ALL DEBUGARG("bubbling QMark2"));
- GenTreePtr temp;
- GenTreePtr condMT;
+ GenTree* temp;
+ GenTree* condMT;
//
// expand the methodtable match:
//
@@ -10010,7 +10012,7 @@ GenTreePtr Compiler::impCastClassOrIsInstToTree(GenTreePtr op1,
// thus we can use gtClone(op1) from now on
//
- GenTreePtr op2Var = op2;
+ GenTree* op2Var = op2;
if (isCastClass)
{
op2Var = fgInsertCommaFormTemp(&op2);
@@ -10020,7 +10022,7 @@ GenTreePtr Compiler::impCastClassOrIsInstToTree(GenTreePtr op1,
temp->gtFlags |= GTF_EXCEPT;
condMT = gtNewOperNode(GT_NE, TYP_INT, temp, op2);
- GenTreePtr condNull;
+ GenTree* condNull;
//
// expand the null check:
//
@@ -10034,8 +10036,8 @@ GenTreePtr Compiler::impCastClassOrIsInstToTree(GenTreePtr op1,
//
// expand the true and false trees for the condMT
//
- GenTreePtr condFalse = gtClone(op1);
- GenTreePtr condTrue;
+ GenTree* condFalse = gtClone(op1);
+ GenTree* condTrue;
if (isCastClass)
{
//
@@ -10053,7 +10055,7 @@ GenTreePtr Compiler::impCastClassOrIsInstToTree(GenTreePtr op1,
#define USE_QMARK_TREES
#ifdef USE_QMARK_TREES
- GenTreePtr qmarkMT;
+ GenTree* qmarkMT;
//
// Generate first QMARK - COLON tree
//
@@ -10067,7 +10069,7 @@ GenTreePtr Compiler::impCastClassOrIsInstToTree(GenTreePtr op1,
qmarkMT = gtNewQmarkNode(TYP_REF, condMT, temp);
condMT->gtFlags |= GTF_RELOP_QMARK;
- GenTreePtr qmarkNull;
+ GenTree* qmarkNull;
//
// Generate second QMARK - COLON tree
//
@@ -10133,7 +10135,9 @@ void Compiler::impImportBlockCode(BasicBlock* block)
unsigned nxtStmtIndex = impInitBlockLineInfo();
IL_OFFSET nxtStmtOffs;
- GenTreePtr arrayNodeFrom, arrayNodeTo, arrayNodeToIndex;
+ GenTree* arrayNodeFrom;
+ GenTree* arrayNodeTo;
+ GenTree* arrayNodeToIndex;
CorInfoHelpFunc helper;
CorInfoIsAccessAllowedResult accessAllowedResult;
CORINFO_HELPER_DESC calloutHelper;
@@ -10251,7 +10255,7 @@ void Compiler::impImportBlockCode(BasicBlock* block)
if (impCurStmtOffs != BAD_IL_OFFSET && opts.compDbgCode)
{
- GenTreePtr placeHolder = new (this, GT_NO_OP) GenTree(GT_NO_OP, TYP_VOID);
+ GenTree* placeHolder = new (this, GT_NO_OP) GenTree(GT_NO_OP, TYP_VOID);
impAppendTree(placeHolder, (unsigned)CHECK_SPILL_NONE, impCurStmtOffs);
assert(impCurStmtOffs == BAD_IL_OFFSET);
@@ -10339,10 +10343,10 @@ void Compiler::impImportBlockCode(BasicBlock* block)
CORINFO_CLASS_HANDLE stelemClsHnd = DUMMY_INIT(NULL);
var_types lclTyp, ovflType = TYP_UNKNOWN;
- GenTreePtr op1 = DUMMY_INIT(NULL);
- GenTreePtr op2 = DUMMY_INIT(NULL);
+ GenTree* op1 = DUMMY_INIT(NULL);
+ GenTree* op2 = DUMMY_INIT(NULL);
GenTreeArgList* args = nullptr; // What good do these "DUMMY_INIT"s do?
- GenTreePtr newObjThisPtr = DUMMY_INIT(NULL);
+ GenTree* newObjThisPtr = DUMMY_INIT(NULL);
bool uns = DUMMY_INIT(false);
bool isLocal = false;
@@ -10396,7 +10400,7 @@ void Compiler::impImportBlockCode(BasicBlock* block)
unsigned lclNum;
var_types type;
- GenTreePtr op3;
+ GenTree* op3;
genTreeOps oper;
unsigned size;
@@ -10495,7 +10499,7 @@ void Compiler::impImportBlockCode(BasicBlock* block)
cval.dblVal = getR4LittleEndian(codeAddr);
JITDUMP(" %#.17g", cval.dblVal);
{
- GenTreePtr cnsOp = gtNewDconNode(cval.dblVal);
+ GenTree* cnsOp = gtNewDconNode(cval.dblVal);
#if !FEATURE_X87_DOUBLES
// X87 stack doesn't differentiate between float/double
// so R4 is treated as R8, but everybody else does
@@ -13015,7 +13019,7 @@ void Compiler::impImportBlockCode(BasicBlock* block)
goto DO_LDFTN;
}
- GenTreePtr fptr = impImportLdvirtftn(op1, &resolvedToken, &callInfo);
+ GenTree* fptr = impImportLdvirtftn(op1, &resolvedToken, &callInfo);
if (compDonotInline())
{
return;
@@ -13532,7 +13536,7 @@ void Compiler::impImportBlockCode(BasicBlock* block)
int aflags = isLoadAddress ? CORINFO_ACCESS_ADDRESS : CORINFO_ACCESS_GET;
- GenTreePtr obj = nullptr;
+ GenTree* obj = nullptr;
typeInfo* tiObj = nullptr;
CORINFO_CLASS_HANDLE objType = nullptr; // used for fields
@@ -13885,7 +13889,7 @@ void Compiler::impImportBlockCode(BasicBlock* block)
if (fieldInfo.fieldFlags & CORINFO_FLG_FIELD_INITCLASS)
{
- GenTreePtr helperNode = impInitClass(&resolvedToken);
+ GenTree* helperNode = impInitClass(&resolvedToken);
if (compDonotInline())
{
return;
@@ -13917,10 +13921,10 @@ void Compiler::impImportBlockCode(BasicBlock* block)
JITDUMP(" %08X", resolvedToken.token);
- int aflags = CORINFO_ACCESS_SET;
- GenTreePtr obj = nullptr;
- typeInfo* tiObj = nullptr;
- typeInfo tiVal;
+ int aflags = CORINFO_ACCESS_SET;
+ GenTree* obj = nullptr;
+ typeInfo* tiObj = nullptr;
+ typeInfo tiVal;
/* Pull the value from the stack */
StackEntry se = impPopStack();
@@ -14194,7 +14198,7 @@ void Compiler::impImportBlockCode(BasicBlock* block)
if (fieldInfo.fieldFlags & CORINFO_FLG_FIELD_INITCLASS)
{
- GenTreePtr helperNode = impInitClass(&resolvedToken);
+ GenTree* helperNode = impInitClass(&resolvedToken);
if (compDonotInline())
{
return;
@@ -14719,12 +14723,12 @@ void Compiler::impImportBlockCode(BasicBlock* block)
// ((*clone == typeToken) ? nop : helper(clone, typeToken));
// push(clone + TARGET_POINTER_SIZE)
//
- GenTreePtr cloneOperand;
+ GenTree* cloneOperand;
op1 = impCloneExpr(op1, &cloneOperand, NO_CLASS_HANDLE, (unsigned)CHECK_SPILL_ALL,
nullptr DEBUGARG("inline UNBOX clone1"));
op1 = gtNewOperNode(GT_IND, TYP_I_IMPL, op1);
- GenTreePtr condBox = gtNewOperNode(GT_EQ, TYP_INT, op1, op2);
+ GenTree* condBox = gtNewOperNode(GT_EQ, TYP_INT, op1, op2);
op1 = impCloneExpr(cloneOperand, &cloneOperand, NO_CLASS_HANDLE, (unsigned)CHECK_SPILL_ALL,
nullptr DEBUGARG("inline UNBOX clone2"));
@@ -15676,7 +15680,7 @@ void Compiler::impLoadLoc(unsigned ilLclNum, IL_OFFSET offset)
* (transfer, not a cast.)
*
*/
-void Compiler::impMarkLclDstNotPromotable(unsigned tmpNum, GenTreePtr src, CORINFO_CLASS_HANDLE hClass)
+void Compiler::impMarkLclDstNotPromotable(unsigned tmpNum, GenTree* src, CORINFO_CLASS_HANDLE hClass)
{
if (src->gtOper == GT_CALL && src->gtCall.IsVarargs() && IsHfa(hClass))
{
@@ -15699,11 +15703,11 @@ void Compiler::impMarkLclDstNotPromotable(unsigned tmpNum, GenTreePtr src, CORIN
#endif // _TARGET_ARM_
#if FEATURE_MULTIREG_RET
-GenTreePtr Compiler::impAssignMultiRegTypeToVar(GenTreePtr op, CORINFO_CLASS_HANDLE hClass)
+GenTree* Compiler::impAssignMultiRegTypeToVar(GenTree* op, CORINFO_CLASS_HANDLE hClass)
{
unsigned tmpNum = lvaGrabTemp(true DEBUGARG("Return value temp for multireg return."));
impAssignTempGen(tmpNum, op, hClass, (unsigned)CHECK_SPILL_ALL);
- GenTreePtr ret = gtNewLclvNode(tmpNum, op->gtType);
+ GenTree* ret = gtNewLclvNode(tmpNum, op->gtType);
// TODO-1stClassStructs: Handle constant propagation and CSE-ing of multireg returns.
ret->gtFlags |= GTF_DONT_CSE;
@@ -16048,7 +16052,7 @@ bool Compiler::impReturnInstruction(BasicBlock* block, int prefixFlags, OPCODE&
#endif // defined(_TARGET_ARM64_)
{
assert(iciCall->HasRetBufArg());
- GenTreePtr dest = gtCloneExpr(iciCall->gtCallArgs->gtOp.gtOp1);
+ GenTree* dest = gtCloneExpr(iciCall->gtCallArgs->gtOp.gtOp1);
// spill temp only exists if there are multiple return points
if (fgNeedReturnSpillTemp())
{
@@ -16082,7 +16086,7 @@ bool Compiler::impReturnInstruction(BasicBlock* block, int prefixFlags, OPCODE&
else if (info.compRetBuffArg != BAD_VAR_NUM)
{
// Assign value to return buff (first param)
- GenTreePtr retBuffAddr = gtNewLclvNode(info.compRetBuffArg, TYP_BYREF, impCurStmtOffs);
+ GenTree* retBuffAddr = gtNewLclvNode(info.compRetBuffArg, TYP_BYREF, impCurStmtOffs);
op2 = impAssignStructPtr(retBuffAddr, op2, retClsHnd, (unsigned)CHECK_SPILL_ALL);
impAppendTree(op2, (unsigned)CHECK_SPILL_NONE, impCurStmtOffs);
@@ -16470,7 +16474,7 @@ SPILLSTACK:
// on the stack, its lifetime is hard to determine, simply
// don't reuse such temps.
- GenTreePtr addStmt = nullptr;
+ GenTree* addStmt = nullptr;
/* Do the successors of 'block' have any other predecessors ?
We do not want to do some of the optimizations related to multiRef
@@ -16590,7 +16594,7 @@ SPILLSTACK:
JITDUMP("\nSpilling stack entries into temps\n");
for (level = 0, tempNum = baseTmp; level < verCurrentState.esStackDepth; level++, tempNum++)
{
- GenTreePtr tree = verCurrentState.esStack[level].val;
+ GenTree* tree = verCurrentState.esStack[level].val;
/* VC generates code where it pushes a byref from one branch, and an int (ldc.i4 0) from
the other. This should merge to a byref in unverifiable code.
@@ -16704,11 +16708,11 @@ SPILLSTACK:
if (addStmt && !newTemps && gtHasRef(addStmt->gtStmt.gtStmtExpr, tempNum, false))
{
- GenTreePtr addTree = addStmt->gtStmt.gtStmtExpr;
+ GenTree* addTree = addStmt->gtStmt.gtStmtExpr;
if (addTree->gtOper == GT_JTRUE)
{
- GenTreePtr relOp = addTree->gtOp.gtOp1;
+ GenTree* relOp = addTree->gtOp.gtOp1;
assert(relOp->OperIsCompare());
var_types type = genActualType(relOp->gtOp.gtOp1->TypeGet());
@@ -17179,7 +17183,7 @@ void Compiler::impRetypeEntryStateTemps(BasicBlock* blk)
EntryState* es = blk->bbEntryState;
for (unsigned level = 0; level < es->esStackDepth; level++)
{
- GenTreePtr tree = es->esStack[level].val;
+ GenTree* tree = es->esStack[level].val;
if ((tree->gtOper == GT_LCL_VAR) || (tree->gtOper == GT_LCL_FLD))
{
unsigned lclNum = tree->gtLclVarCommon.gtLclNum;
@@ -17264,7 +17268,7 @@ void Compiler::verInitBBEntryState(BasicBlock* block, EntryState* srcState)
memcpy(block->bbEntryState->esStack, srcState->esStack, stackSize);
for (unsigned level = 0; level < srcState->esStackDepth; level++)
{
- GenTreePtr tree = srcState->esStack[level].val;
+ GenTree* tree = srcState->esStack[level].val;
block->bbEntryState->esStack[level].val = gtCloneExpr(tree);
}
}
@@ -17572,14 +17576,14 @@ bool Compiler::impIsValueType(typeInfo* pTypeInfo)
*/
-BOOL Compiler::impIsAddressInLocal(GenTreePtr tree, GenTreePtr* lclVarTreeOut)
+BOOL Compiler::impIsAddressInLocal(GenTree* tree, GenTree** lclVarTreeOut)
{
if (tree->gtOper != GT_ADDR)
{
return FALSE;
}
- GenTreePtr op = tree->gtOp.gtOp1;
+ GenTree* op = tree->gtOp.gtOp1;
while (op->gtOper == GT_FIELD)
{
op = op->gtField.gtFldObj;
@@ -17665,7 +17669,7 @@ void Compiler::impMakeDiscretionaryInlineObservations(InlineInfo* pInlineInfo, I
// Check if the callee has the same 'this' as the root.
if (pInlineInfo != nullptr)
{
- GenTreePtr thisArg = pInlineInfo->iciCall->gtCall.gtCallObjp;
+ GenTree* thisArg = pInlineInfo->iciCall->gtCall.gtCallObjp;
assert(thisArg);
bool isSameThis = impIsThis(thisArg);
inlineResult->NoteBool(InlineObservation::CALLSITE_IS_SAME_THIS, isSameThis);
@@ -17827,7 +17831,7 @@ void Compiler::impCanInlineIL(CORINFO_METHOD_HANDLE fncHandle,
/*****************************************************************************
*/
-void Compiler::impCheckCanInline(GenTreePtr call,
+void Compiler::impCheckCanInline(GenTree* call,
CORINFO_METHOD_HANDLE fncHandle,
unsigned methAttr,
CORINFO_CONTEXT_HANDLE exactContextHnd,
@@ -17840,7 +17844,7 @@ void Compiler::impCheckCanInline(GenTreePtr call,
struct Param
{
Compiler* pThis;
- GenTreePtr call;
+ GenTree* call;
CORINFO_METHOD_HANDLE fncHandle;
unsigned methAttr;
CORINFO_CONTEXT_HANDLE exactContextHnd;
@@ -17939,7 +17943,7 @@ void Compiler::impCheckCanInline(GenTreePtr call,
if (dwRestrictions & INLINE_SAME_THIS)
{
- GenTreePtr thisArg = pParam->call->gtCall.gtCallObjp;
+ GenTree* thisArg = pParam->call->gtCall.gtCallObjp;
assert(thisArg);
if (!pParam->pThis->impIsThis(thisArg))
@@ -18030,7 +18034,7 @@ void Compiler::impInlineRecordArgInfo(InlineInfo* pInlineInfo,
inlCurArgInfo->argNode = curArgVal;
- GenTreePtr lclVarTree;
+ GenTree* lclVarTree;
if (impIsAddressInLocal(curArgVal, &lclVarTree) && varTypeIsStruct(lclVarTree))
{
inlCurArgInfo->argIsByRefToStructLocal = true;
@@ -18164,7 +18168,7 @@ void Compiler::impInlineInitVars(InlineInfo* pInlineInfo)
{
assert(!compIsForInlining());
- GenTreePtr call = pInlineInfo->iciCall;
+ GenTree* call = pInlineInfo->iciCall;
CORINFO_METHOD_INFO* methInfo = &pInlineInfo->inlineCandidateInfo->methInfo;
unsigned clsAttr = pInlineInfo->inlineCandidateInfo->clsAttr;
InlArgInfo* inlArgInfo = pInlineInfo->inlArgInfo;
@@ -18179,9 +18183,9 @@ void Compiler::impInlineInitVars(InlineInfo* pInlineInfo)
/* Get hold of the 'this' pointer and the argument list proper */
- GenTreePtr thisArg = call->gtCall.gtCallObjp;
- GenTreePtr argList = call->gtCall.gtCallArgs;
- unsigned argCnt = 0; // Count of the arguments
+ GenTree* thisArg = call->gtCall.gtCallObjp;
+ GenTree* argList = call->gtCall.gtCallArgs;
+ unsigned argCnt = 0; // Count of the arguments
assert((methInfo->args.hasThis()) == (thisArg != nullptr));
@@ -18209,7 +18213,7 @@ void Compiler::impInlineInitVars(InlineInfo* pInlineInfo)
unsigned typeCtxtArg = methInfo->args.totalILArgs();
#endif // USER_ARGS_COME_LAST
- for (GenTreePtr argTmp = argList; argTmp; argTmp = argTmp->gtOp.gtOp2)
+ for (GenTree* argTmp = argList; argTmp; argTmp = argTmp->gtOp.gtOp2)
{
if (argTmp == argList && hasRetBuffArg)
{
@@ -18354,7 +18358,7 @@ void Compiler::impInlineInitVars(InlineInfo* pInlineInfo)
/* Does the tree type match the signature type? */
- GenTreePtr inlArgNode = inlArgInfo[i].argNode;
+ GenTree* inlArgNode = inlArgInfo[i].argNode;
if (sigType != inlArgNode->gtType)
{
@@ -18645,7 +18649,7 @@ unsigned Compiler::impInlineFetchLocal(unsigned lclNum DEBUGARG(const char* reas
// This method will side effect inlArgInfo. It should only be called
// for actual uses of the argument in the inlinee.
-GenTreePtr Compiler::impInlineFetchArg(unsigned lclNum, InlArgInfo* inlArgInfo, InlLclVarInfo* lclVarInfo)
+GenTree* Compiler::impInlineFetchArg(unsigned lclNum, InlArgInfo* inlArgInfo, InlLclVarInfo* lclVarInfo)
{
// Cache the relevant arg and lcl info for this argument.
// We will modify argInfo but not lclVarInfo.
@@ -18653,7 +18657,7 @@ GenTreePtr Compiler::impInlineFetchArg(unsigned lclNum, InlArgInfo* inlArgInfo,
const InlLclVarInfo& lclInfo = lclVarInfo[lclNum];
const bool argCanBeModified = argInfo.argHasLdargaOp || argInfo.argHasStargOp;
const var_types lclTyp = lclInfo.lclTypeInfo;
- GenTreePtr op1 = nullptr;
+ GenTree* op1 = nullptr;
if (argInfo.argIsInvariant && !argCanBeModified)
{
@@ -18832,7 +18836,7 @@ GenTreePtr Compiler::impInlineFetchArg(unsigned lclNum, InlArgInfo* inlArgInfo,
worry about it.
*/
-BOOL Compiler::impInlineIsThis(GenTreePtr tree, InlArgInfo* inlArgInfo)
+BOOL Compiler::impInlineIsThis(GenTree* tree, InlArgInfo* inlArgInfo)
{
assert(compIsForInlining());
return (tree->gtOper == GT_LCL_VAR && tree->gtLclVarCommon.gtLclNum == inlArgInfo[0].argTmpNum);
@@ -18849,8 +18853,8 @@ BOOL Compiler::impInlineIsThis(GenTreePtr tree, InlArgInfo* inlArgInfo)
// is the set of pending trees that have not yet been added to the statement list,
// and which have been removed from verCurrentState.esStack[]
-BOOL Compiler::impInlineIsGuaranteedThisDerefBeforeAnySideEffects(GenTreePtr additionalTreesToBeEvaluatedBefore,
- GenTreePtr variableBeingDereferenced,
+BOOL Compiler::impInlineIsGuaranteedThisDerefBeforeAnySideEffects(GenTree* additionalTreesToBeEvaluatedBefore,
+ GenTree* variableBeingDereferenced,
InlArgInfo* inlArgInfo)
{
assert(compIsForInlining());
@@ -18858,8 +18862,8 @@ BOOL Compiler::impInlineIsGuaranteedThisDerefBeforeAnySideEffects(GenTreePtr ad
BasicBlock* block = compCurBB;
- GenTreePtr stmt;
- GenTreePtr expr;
+ GenTree* stmt;
+ GenTree* expr;
if (block != fgFirstBB)
{
@@ -18918,7 +18922,7 @@ BOOL Compiler::impInlineIsGuaranteedThisDerefBeforeAnySideEffects(GenTreePtr ad
// method may be marked as "noinline" to short-circuit any
// future assessments of calls to this method.
-void Compiler::impMarkInlineCandidate(GenTreePtr callNode,
+void Compiler::impMarkInlineCandidate(GenTree* callNode,
CORINFO_CONTEXT_HANDLE exactContextHnd,
bool exactContextNeedsRuntimeLookup,
CORINFO_CALL_INFO* callInfo)
@@ -19244,7 +19248,7 @@ bool Compiler::IsMathIntrinsic(CorInfoIntrinsics intrinsicId)
}
}
-bool Compiler::IsMathIntrinsic(GenTreePtr tree)
+bool Compiler::IsMathIntrinsic(GenTree* tree)
{
return (tree->OperGet() == GT_INTRINSIC) && IsMathIntrinsic(tree->gtIntrinsic.gtIntrinsicId);
}
@@ -19771,12 +19775,12 @@ public:
void StoreRetExprResultsInArgs(GenTreeCall* call)
{
- GenTreePtr args = call->gtCallArgs;
+ GenTree* args = call->gtCallArgs;
if (args != nullptr)
{
comp->fgWalkTreePre(&args, SpillRetExprVisitor, this);
}
- GenTreePtr thisArg = call->gtCallObjp;
+ GenTree* thisArg = call->gtCallObjp;
if (thisArg != nullptr)
{
comp->fgWalkTreePre(&thisArg, SpillRetExprVisitor, this);
@@ -19787,7 +19791,7 @@ private:
static Compiler::fgWalkResult SpillRetExprVisitor(GenTree** pTree, Compiler::fgWalkData* fgWalkPre)
{
assert((pTree != nullptr) && (*pTree != nullptr));
- GenTreePtr tree = *pTree;
+ GenTree* tree = *pTree;
if ((tree->gtFlags & GTF_CALL) == 0)
{
// Trees with ret_expr are marked as GTF_CALL.
@@ -19801,9 +19805,9 @@ private:
return Compiler::WALK_CONTINUE;
}
- void StoreRetExprAsLocalVar(GenTreePtr* pRetExpr)
+ void StoreRetExprAsLocalVar(GenTree** pRetExpr)
{
- GenTreePtr retExpr = *pRetExpr;
+ GenTree* retExpr = *pRetExpr;
assert(retExpr->OperGet() == GT_RET_EXPR);
JITDUMP("Store return expression %u as a local var.\n", retExpr->gtTreeID);
unsigned tmp = comp->lvaGrabTemp(true DEBUGARG("spilling ret_expr"));
diff --git a/src/jit/inline.h b/src/jit/inline.h
index b5a26138f8..eba9b8d52c 100644
--- a/src/jit/inline.h
+++ b/src/jit/inline.h
@@ -520,21 +520,21 @@ struct InlineCandidateInfo
struct InlArgInfo
{
- GenTreePtr argNode; // caller node for this argument
- GenTreePtr argBashTmpNode; // tmp node created, if it may be replaced with actual arg
- unsigned argTmpNum; // the argument tmp number
- unsigned argIsUsed : 1; // is this arg used at all?
- unsigned argIsInvariant : 1; // the argument is a constant or a local variable address
- unsigned argIsLclVar : 1; // the argument is a local variable
- unsigned argIsThis : 1; // the argument is the 'this' pointer
- unsigned argHasSideEff : 1; // the argument has side effects
- unsigned argHasGlobRef : 1; // the argument has a global ref
- unsigned argHasCallerLocalRef : 1; // the argument value depends on an aliased caller local
- unsigned argHasTmp : 1; // the argument will be evaluated to a temp
- unsigned argHasLdargaOp : 1; // Is there LDARGA(s) operation on this argument?
- unsigned argHasStargOp : 1; // Is there STARG(s) operation on this argument?
- unsigned argIsByRefToStructLocal : 1; // Is this arg an address of a struct local or a normed struct local or a
- // field in them?
+ GenTree* argNode; // caller node for this argument
+ GenTree* argBashTmpNode; // tmp node created, if it may be replaced with actual arg
+ unsigned argTmpNum; // the argument tmp number
+ unsigned argIsUsed : 1; // is this arg used at all?
+ unsigned argIsInvariant : 1; // the argument is a constant or a local variable address
+ unsigned argIsLclVar : 1; // the argument is a local variable
+ unsigned argIsThis : 1; // the argument is the 'this' pointer
+ unsigned argHasSideEff : 1; // the argument has side effects
+ unsigned argHasGlobRef : 1; // the argument has a global ref
+ unsigned argHasCallerLocalRef : 1; // the argument value depends on an aliased caller local
+ unsigned argHasTmp : 1; // the argument will be evaluated to a temp
+ unsigned argHasLdargaOp : 1; // Is there LDARGA(s) operation on this argument?
+ unsigned argHasStargOp : 1; // Is there STARG(s) operation on this argument?
+ unsigned argIsByRefToStructLocal : 1; // Is this arg an address of a struct local or a normed struct local or a
+ // field in them?
};
// InlLclVarInfo describes inline candidate argument and local variable properties.
@@ -562,7 +562,7 @@ struct InlineInfo
InlineResult* inlineResult;
- GenTreePtr retExpr; // The return expression of the inlined candidate.
+ GenTree* retExpr; // The return expression of the inlined candidate.
CORINFO_CLASS_HANDLE retExprClassHnd;
bool retExprClassHndIsExact;
diff --git a/src/jit/instr.cpp b/src/jit/instr.cpp
index a3e354222a..1500dece90 100644
--- a/src/jit/instr.cpp
+++ b/src/jit/instr.cpp
@@ -536,7 +536,7 @@ void CodeGenInterface::inst_FN(instruction ins, unsigned stk)
* Display a stack frame reference.
*/
-void CodeGen::inst_set_SV_var(GenTreePtr tree)
+void CodeGen::inst_set_SV_var(GenTree* tree)
{
#ifdef DEBUG
assert(tree && (tree->gtOper == GT_LCL_VAR || tree->gtOper == GT_LCL_VAR_ADDR || tree->gtOper == GT_STORE_LCL_VAR));
@@ -632,7 +632,7 @@ void CodeGen::inst_RV_IV(
* On return, *baseReg, *indScale, *indReg, and *cns are set.
*/
-void CodeGen::instGetAddrMode(GenTreePtr addr, regNumber* baseReg, unsigned* indScale, regNumber* indReg, unsigned* cns)
+void CodeGen::instGetAddrMode(GenTree* addr, regNumber* baseReg, unsigned* indScale, regNumber* indReg, unsigned* cns)
{
if (addr->gtOper == GT_ARR_ELEM)
{
@@ -655,9 +655,9 @@ void CodeGen::instGetAddrMode(GenTreePtr addr, regNumber* baseReg, unsigned* ind
else if (addr->gtOper == GT_LEA)
{
GenTreeAddrMode* lea = addr->AsAddrMode();
- GenTreePtr base = lea->Base();
+ GenTree* base = lea->Base();
assert(!base || (base->InReg()));
- GenTreePtr index = lea->Index();
+ GenTree* index = lea->Index();
assert(!index || (index->InReg()));
*baseReg = base ? base->gtRegNum : REG_NA;
@@ -670,9 +670,9 @@ void CodeGen::instGetAddrMode(GenTreePtr addr, regNumber* baseReg, unsigned* ind
{
/* Figure out what complex address mode to use */
- GenTreePtr rv1 = NULL;
- GenTreePtr rv2 = NULL;
- bool rev = false;
+ GenTree* rv1 = NULL;
+ GenTree* rv2 = NULL;
+ bool rev = false;
INDEBUG(bool yes =)
genCreateAddrMode(addr, -1, true, RBM_NONE, &rev, &rv1, &rv2,
@@ -759,7 +759,7 @@ void CodeGen::sched_AM(instruction ins,
emitAttr size,
regNumber ireg,
bool rdst,
- GenTreePtr addr,
+ GenTree* addr,
unsigned offs,
bool cons,
int imm,
@@ -1078,7 +1078,7 @@ void CodeGen::sched_AM(instruction ins,
emitAttr size,
regNumber ireg,
bool rdst,
- GenTreePtr addr,
+ GenTree* addr,
unsigned offs,
bool cons,
int imm,
@@ -1243,7 +1243,7 @@ void CodeGen::instEmit_indCall(GenTreeCall* call,
MULTIREG_HAS_SECOND_GC_RET_ONLY_ARG(emitAttr secondRetSize))
// clang-format on
{
- GenTreePtr addr;
+ GenTree* addr;
emitter::EmitCallType emitCallType;
@@ -1380,8 +1380,8 @@ void CodeGen::instEmit_indCall(GenTreeCall* call,
{
bool rev = false;
- GenTreePtr rv1 = nullptr;
- GenTreePtr rv2 = nullptr;
+ GenTree* rv1 = nullptr;
+ GenTree* rv2 = nullptr;
/* Figure out what complex address mode to use */
@@ -1435,7 +1435,7 @@ void CodeGen::instEmit_indCall(GenTreeCall* call,
* Emit an "op [r/m]" instruction (the r/m operand given by a tree).
*/
-void CodeGen::instEmit_RM(instruction ins, GenTreePtr tree, GenTreePtr addr, unsigned offs)
+void CodeGen::instEmit_RM(instruction ins, GenTree* tree, GenTree* addr, unsigned offs)
{
emitAttr size;
@@ -1452,7 +1452,7 @@ void CodeGen::instEmit_RM(instruction ins, GenTreePtr tree, GenTreePtr addr, uns
* Emit an "op [r/m], reg" instruction (the r/m operand given by a tree).
*/
-void CodeGen::instEmit_RM_RV(instruction ins, emitAttr size, GenTreePtr tree, regNumber reg, unsigned offs)
+void CodeGen::instEmit_RM_RV(instruction ins, emitAttr size, GenTree* tree, regNumber reg, unsigned offs)
{
#ifdef _TARGET_XARCH_
assert(instIsFP(ins) == 0);
@@ -1467,7 +1467,7 @@ void CodeGen::instEmit_RM_RV(instruction ins, emitAttr size, GenTreePtr tree, re
* been made addressable).
*/
-void CodeGen::inst_TT(instruction ins, GenTreePtr tree, unsigned offs, int shfv, emitAttr size)
+void CodeGen::inst_TT(instruction ins, GenTree* tree, unsigned offs, int shfv, emitAttr size)
{
bool sizeInferred = false;
@@ -1632,7 +1632,7 @@ AGAIN:
#ifndef LEGACY_BACKEND
assert(!"inst_TT not supported for GT_IND, GT_NULLCHECK or GT_ARR_ELEM in !LEGACY_BACKEND");
#else // LEGACY_BACKEND
- GenTreePtr addr = tree->OperIsIndir() ? tree->gtOp.gtOp1 : tree;
+ GenTree* addr = tree->OperIsIndir() ? tree->gtOp.gtOp1 : tree;
if (shfv)
sched_AM(ins, size, REG_NA, false, addr, offs, true, shfv);
else
@@ -1669,7 +1669,7 @@ AGAIN:
* been made addressable) and another that is a register.
*/
-void CodeGen::inst_TT_RV(instruction ins, GenTreePtr tree, regNumber reg, unsigned offs, emitAttr size, insFlags flags)
+void CodeGen::inst_TT_RV(instruction ins, GenTree* tree, regNumber reg, unsigned offs, emitAttr size, insFlags flags)
{
assert(reg != REG_STK);
@@ -1854,7 +1854,7 @@ AGAIN:
#ifndef LEGACY_BACKEND
assert(!"inst_TT_RV not supported for GT_IND, GT_NULLCHECK or GT_ARR_ELEM in RyuJIT Backend");
#else // LEGACY_BACKEND
- GenTreePtr addr = tree->OperIsIndir() ? tree->gtOp.gtOp1 : tree;
+ GenTree* addr = tree->OperIsIndir() ? tree->gtOp.gtOp1 : tree;
sched_AM(ins, size, reg, false, addr, offs, false, 0, flags);
#endif // LEGACY_BACKEND
}
@@ -1910,7 +1910,7 @@ regNumber CodeGen::genGetZeroRegister()
* Generate an instruction that has one operand given by a tree (which has
* been made addressable) and another that is an integer constant.
*/
-void CodeGen::inst_TT_IV(instruction ins, GenTreePtr tree, ssize_t val, unsigned offs, emitAttr size, insFlags flags)
+void CodeGen::inst_TT_IV(instruction ins, GenTree* tree, ssize_t val, unsigned offs, emitAttr size, insFlags flags)
{
bool sizeInferred = false;
@@ -2226,7 +2226,7 @@ AGAIN:
case GT_NULLCHECK:
case GT_ARR_ELEM:
{
- GenTreePtr addr = tree->OperIsIndir() ? tree->gtOp.gtOp1 : tree;
+ GenTree* addr = tree->OperIsIndir() ? tree->gtOp.gtOp1 : tree;
sched_AM(ins, size, REG_NA, false, addr, offs, true, ival, flags);
}
return;
@@ -2248,7 +2248,7 @@ AGAIN:
*/
void CodeGen::inst_RV_AT(
- instruction ins, emitAttr size, var_types type, regNumber reg, GenTreePtr tree, unsigned offs, insFlags flags)
+ instruction ins, emitAttr size, var_types type, regNumber reg, GenTree* tree, unsigned offs, insFlags flags)
{
#ifdef _TARGET_XARCH_
#ifdef DEBUG
@@ -2269,7 +2269,7 @@ void CodeGen::inst_RV_AT(
if (EA_SIZE(size) > EA_PTRSIZE && !instIsFP(ins))
EA_SET_SIZE(size, EA_PTRSIZE);
- GenTreePtr addr = tree;
+ GenTree* addr = tree;
sched_AM(ins, size, reg, true, addr, offs, false, 0, flags);
}
@@ -2279,7 +2279,7 @@ void CodeGen::inst_RV_AT(
* (which has been made addressable) and an integer constant.
*/
-void CodeGen::inst_AT_IV(instruction ins, emitAttr size, GenTreePtr baseTree, int icon, unsigned offs)
+void CodeGen::inst_AT_IV(instruction ins, emitAttr size, GenTree* baseTree, int icon, unsigned offs)
{
sched_AM(ins, size, REG_NA, false, baseTree, offs, true, icon);
}
@@ -2293,7 +2293,7 @@ void CodeGen::inst_AT_IV(instruction ins, emitAttr size, GenTreePtr baseTree, in
void CodeGen::inst_RV_TT(instruction ins,
regNumber reg,
- GenTreePtr tree,
+ GenTree* tree,
unsigned offs,
emitAttr size,
insFlags flags /* = INS_FLAGS_DONT_CARE */)
@@ -2588,7 +2588,7 @@ AGAIN:
#ifndef LEGACY_BACKEND
assert(!"inst_RV_TT not supported for GT_IND, GT_NULLCHECK, GT_ARR_ELEM or GT_LEA in !LEGACY_BACKEND");
#else // LEGACY_BACKEND
- GenTreePtr addr = tree->OperIsIndir() ? tree->gtOp.gtOp1 : tree;
+ GenTree* addr = tree->OperIsIndir() ? tree->gtOp.gtOp1 : tree;
inst_RV_AT(ins, size, tree->TypeGet(), reg, addr, offs, flags);
#endif // LEGACY_BACKEND
}
@@ -2640,7 +2640,7 @@ AGAIN:
* which is reg=[tree]*icon
*/
#ifdef LEGACY_BACKEND
-void CodeGen::inst_RV_TT_IV(instruction ins, regNumber reg, GenTreePtr tree, int val)
+void CodeGen::inst_RV_TT_IV(instruction ins, regNumber reg, GenTree* tree, int val)
{
assert(tree->gtType <= TYP_I_IMPL);
@@ -2706,7 +2706,7 @@ void CodeGen::inst_RV_SH(
* Generate a "shift [r/m], icon" instruction.
*/
-void CodeGen::inst_TT_SH(instruction ins, GenTreePtr tree, unsigned val, unsigned offs)
+void CodeGen::inst_TT_SH(instruction ins, GenTree* tree, unsigned val, unsigned offs)
{
#ifdef _TARGET_XARCH_
if (val == 0)
@@ -2736,7 +2736,7 @@ void CodeGen::inst_TT_SH(instruction ins, GenTreePtr tree, unsigned val, unsigne
* Generate a "shift [addr], cl" instruction.
*/
-void CodeGen::inst_TT_CL(instruction ins, GenTreePtr tree, unsigned offs)
+void CodeGen::inst_TT_CL(instruction ins, GenTree* tree, unsigned offs)
{
inst_TT(ins, tree, offs, 0, emitTypeSize(tree->TypeGet()));
}
@@ -2821,7 +2821,7 @@ void CodeGen::inst_SA_IV(instruction ins, unsigned ofs, int val, var_types type)
* or short (e.g. something like "movzx eax, byte ptr [edx]").
*/
-void CodeGen::inst_RV_ST(instruction ins, emitAttr size, regNumber reg, GenTreePtr tree)
+void CodeGen::inst_RV_ST(instruction ins, emitAttr size, regNumber reg, GenTree* tree)
{
assert(size == EA_1BYTE || size == EA_2BYTE);
@@ -2887,7 +2887,7 @@ void CodeGen::inst_RV_ST(instruction ins, regNumber reg, TempDsc* tmp, unsigned
#endif // !_TARGET_ARM_
}
-void CodeGen::inst_mov_RV_ST(regNumber reg, GenTreePtr tree)
+void CodeGen::inst_mov_RV_ST(regNumber reg, GenTree* tree)
{
/* Figure out the size of the value being loaded */
diff --git a/src/jit/jitgcinfo.h b/src/jit/jitgcinfo.h
index 57f107321c..2c79783d3a 100644
--- a/src/jit/jitgcinfo.h
+++ b/src/jit/jitgcinfo.h
@@ -72,7 +72,7 @@ typedef JitHashTable<RegSlotIdKey, RegSlotIdKey, GcSlotId> RegSlotMap;
typedef JitHashTable<StackSlotIdKey, StackSlotIdKey, GcSlotId> StackSlotMap;
#endif
-typedef JitHashTable<GenTreePtr, JitPtrKeyFuncs<GenTree>, VARSET_TP*> NodeToVarsetPtrMap;
+typedef JitHashTable<GenTree*, JitPtrKeyFuncs<GenTree>, VARSET_TP*> NodeToVarsetPtrMap;
class GCInfo
{
@@ -91,7 +91,7 @@ public:
void gcMarkRegSetByref(regMaskTP regMask DEBUGARG(bool forceOutput = false));
void gcMarkRegSetNpt(regMaskTP regMask DEBUGARG(bool forceOutput = false));
void gcMarkRegPtrVal(regNumber reg, var_types type);
- void gcMarkRegPtrVal(GenTreePtr tree);
+ void gcMarkRegPtrVal(GenTree* tree);
#ifdef DEBUG
void gcDspGCrefSetChanges(regMaskTP gcRegGCrefSetNew DEBUGARG(bool forceOutput = false));
@@ -320,12 +320,12 @@ public:
// might accidentally be violated in the future.)
};
- WriteBarrierForm gcIsWriteBarrierCandidate(GenTreePtr tgt, GenTreePtr assignVal);
- bool gcIsWriteBarrierAsgNode(GenTreePtr op);
+ WriteBarrierForm gcIsWriteBarrierCandidate(GenTree* tgt, GenTree* assignVal);
+ bool gcIsWriteBarrierAsgNode(GenTree* op);
// Returns a WriteBarrierForm decision based on the form of "tgtAddr", which is assumed to be the
// argument of a GT_IND LHS.
- WriteBarrierForm gcWriteBarrierFormFromTargetAddress(GenTreePtr tgtAddr);
+ WriteBarrierForm gcWriteBarrierFormFromTargetAddress(GenTree* tgtAddr);
//-------------------------------------------------------------------------
//
diff --git a/src/jit/lclvars.cpp b/src/jit/lclvars.cpp
index bc2470c71f..bf9a1f9a18 100644
--- a/src/jit/lclvars.cpp
+++ b/src/jit/lclvars.cpp
@@ -2395,7 +2395,7 @@ void Compiler::lvaSetClass(unsigned varNum, CORINFO_CLASS_HANDLE clsHnd, bool is
// tree kinds can track ref types, the stack type is used as a
// fallback.
-void Compiler::lvaSetClass(unsigned varNum, GenTreePtr tree, CORINFO_CLASS_HANDLE stackHnd)
+void Compiler::lvaSetClass(unsigned varNum, GenTree* tree, CORINFO_CLASS_HANDLE stackHnd)
{
bool isExact = false;
bool isNonNull = false;
@@ -2520,7 +2520,7 @@ void Compiler::lvaUpdateClass(unsigned varNum, CORINFO_CLASS_HANDLE clsHnd, bool
// tree kinds can track ref types, the stack type is used as a
// fallback.
-void Compiler::lvaUpdateClass(unsigned varNum, GenTreePtr tree, CORINFO_CLASS_HANDLE stackHnd)
+void Compiler::lvaUpdateClass(unsigned varNum, GenTree* tree, CORINFO_CLASS_HANDLE stackHnd)
{
bool isExact = false;
bool isNonNull = false;
@@ -2724,7 +2724,7 @@ BasicBlock::weight_t BasicBlock::getBBWeight(Compiler* comp)
}
// Decrement the ref counts for all locals contained in the tree and its children.
-void Compiler::lvaRecursiveDecRefCounts(GenTreePtr tree)
+void Compiler::lvaRecursiveDecRefCounts(GenTree* tree)
{
assert(lvaLocalVarRefCounted);
@@ -2765,13 +2765,13 @@ Compiler::fgWalkResult DecLclVarRefCountsVisitor::WalkTree(Compiler* compiler, G
* Helper passed to the tree walker to decrement the refCnts for
* all local variables in an expression
*/
-void Compiler::lvaDecRefCnts(GenTreePtr tree)
+void Compiler::lvaDecRefCnts(GenTree* tree)
{
assert(compCurBB != nullptr);
lvaDecRefCnts(compCurBB, tree);
}
-void Compiler::lvaDecRefCnts(BasicBlock* block, GenTreePtr tree)
+void Compiler::lvaDecRefCnts(BasicBlock* block, GenTree* tree)
{
assert(block != nullptr);
assert(tree != nullptr);
@@ -2819,7 +2819,7 @@ void Compiler::lvaDecRefCnts(BasicBlock* block, GenTreePtr tree)
}
// Increment the ref counts for all locals contained in the tree and its children.
-void Compiler::lvaRecursiveIncRefCounts(GenTreePtr tree)
+void Compiler::lvaRecursiveIncRefCounts(GenTree* tree)
{
assert(lvaLocalVarRefCounted);
@@ -2860,7 +2860,7 @@ Compiler::fgWalkResult IncLclVarRefCountsVisitor::WalkTree(Compiler* compiler, G
* Helper passed to the tree walker to increment the refCnts for
* all local variables in an expression
*/
-void Compiler::lvaIncRefCnts(GenTreePtr tree)
+void Compiler::lvaIncRefCnts(GenTree* tree)
{
unsigned lclNum;
LclVarDsc* varDsc;
@@ -3591,7 +3591,7 @@ var_types LclVarDsc::lvaArgType()
* This is called by lvaMarkLclRefsCallback() to do variable ref marking
*/
-void Compiler::lvaMarkLclRefs(GenTreePtr tree)
+void Compiler::lvaMarkLclRefs(GenTree* tree)
{
/* Is this a call to unmanaged code ? */
if (tree->gtOper == GT_CALL && tree->gtFlags & GTF_CALL_UNMANAGED)
@@ -3616,8 +3616,8 @@ void Compiler::lvaMarkLclRefs(GenTreePtr tree)
if (tree->OperIsAssignment())
{
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtOp.gtOp2;
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtOp.gtOp2;
/* Set target register for RHS local if assignment is of a "small" type */
@@ -3712,7 +3712,7 @@ void Compiler::lvaMarkLclRefs(GenTreePtr tree)
{
if (tree->gtType == TYP_INT)
{
- GenTreePtr op2 = tree->gtOp.gtOp2;
+ GenTree* op2 = tree->gtOp.gtOp2;
if (op2->gtOper == GT_LCL_VAR)
{
@@ -3909,7 +3909,7 @@ void Compiler::lvaMarkLocalVars(BasicBlock* block)
#endif
MarkLocalVarsVisitor visitor(this);
- for (GenTreePtr tree = block->FirstNonPhiDef(); tree; tree = tree->gtNext)
+ for (GenTree* tree = block->FirstNonPhiDef(); tree; tree = tree->gtNext)
{
assert(tree->gtOper == GT_STMT);
@@ -7221,11 +7221,11 @@ static unsigned LCL_FLD_PADDING(unsigned lclNum)
In the first pass we will mark the locals where we CAN't apply the stress mode.
In the second pass we will do the appropiate morphing wherever we've not determined we can't do it.
*/
-Compiler::fgWalkResult Compiler::lvaStressLclFldCB(GenTreePtr* pTree, fgWalkData* data)
+Compiler::fgWalkResult Compiler::lvaStressLclFldCB(GenTree** pTree, fgWalkData* data)
{
- GenTreePtr tree = *pTree;
+ GenTree* tree = *pTree;
genTreeOps oper = tree->OperGet();
- GenTreePtr lcl;
+ GenTree* lcl;
switch (oper)
{
@@ -7339,8 +7339,8 @@ Compiler::fgWalkResult Compiler::lvaStressLclFldCB(GenTreePtr* pTree, fgWalkData
/* Change addr(lclVar) to addr(lclVar)+padding */
noway_assert(oper == GT_ADDR);
- GenTreePtr paddingTree = pComp->gtNewIconNode(padding);
- GenTreePtr newAddr = pComp->gtNewOperNode(GT_ADD, tree->gtType, tree, paddingTree);
+ GenTree* paddingTree = pComp->gtNewIconNode(padding);
+ GenTree* newAddr = pComp->gtNewOperNode(GT_ADD, tree->gtType, tree, paddingTree);
*pTree = newAddr;
diff --git a/src/jit/lir.cpp b/src/jit/lir.cpp
index 300017e43c..a0a265d5ae 100644
--- a/src/jit/lir.cpp
+++ b/src/jit/lir.cpp
@@ -1434,7 +1434,7 @@ public:
//
CheckLclVarSemanticsHelper(Compiler* compiler,
const LIR::Range* range,
- SmallHashTable<GenTreePtr, bool, 32U>& unusedDefs)
+ SmallHashTable<GenTree*, bool, 32U>& unusedDefs)
: compiler(compiler), range(range), unusedDefs(unusedDefs), unusedLclVarReads(compiler)
{
}
@@ -1445,7 +1445,7 @@ public:
// 'true' if the Local variables semantics for the specified range is legal.
bool Check()
{
- for (GenTreePtr node : *range)
+ for (GenTree* node : *range)
{
if (!node->isContained()) // a contained node reads operands in the parent.
{
@@ -1473,9 +1473,9 @@ private:
//
// Arguments:
// node - the node to use operands from.
- void UseNodeOperands(GenTreePtr node)
+ void UseNodeOperands(GenTree* node)
{
- for (GenTreePtr operand : node->Operands())
+ for (GenTree* operand : node->Operands())
{
if (!operand->IsLIR())
{
diff --git a/src/jit/liveness.cpp b/src/jit/liveness.cpp
index dd48c5b738..ba479dccfb 100644
--- a/src/jit/liveness.cpp
+++ b/src/jit/liveness.cpp
@@ -294,7 +294,7 @@ void Compiler::fgPerNodeLocalVarLiveness(GenTree* tree)
{
GenTreeLclVarCommon* dummyLclVarTree = nullptr;
bool dummyIsEntire = false;
- GenTreePtr addrArg = tree->gtOp.gtOp1->gtEffectiveVal(/*commaOnly*/ true);
+ GenTree* addrArg = tree->gtOp.gtOp1->gtEffectiveVal(/*commaOnly*/ true);
if (!addrArg->DefinesLocalAddr(this, /*width doesn't matter*/ 0, &dummyLclVarTree, &dummyIsEntire))
{
fgCurMemoryUse |= memoryKindSet(GcHeap, ByrefExposed);
@@ -1516,11 +1516,11 @@ bool Compiler::fgMarkIntf(VARSET_VALARG_TP varSet)
* For updating liveset during traversal AFTER fgComputeLife has completed
*/
-VARSET_VALRET_TP Compiler::fgUpdateLiveSet(VARSET_VALARG_TP liveSet, GenTreePtr tree)
+VARSET_VALRET_TP Compiler::fgUpdateLiveSet(VARSET_VALARG_TP liveSet, GenTree* tree)
{
VARSET_TP newLiveSet(VarSetOps::MakeCopy(this, liveSet));
assert(fgLocalVarLivenessDone == true);
- GenTreePtr lclVarTree = tree; // After the tests below, "lclVarTree" will be the local variable.
+ GenTree* lclVarTree = tree; // After the tests below, "lclVarTree" will be the local variable.
if (tree->gtOper == GT_LCL_VAR || tree->gtOper == GT_LCL_FLD || tree->gtOper == GT_REG_VAR ||
(lclVarTree = fgIsIndirOfAddrOfLocal(tree)) != nullptr)
{
@@ -1929,12 +1929,12 @@ bool Compiler::fgComputeLifeLocal(VARSET_TP& life, VARSET_VALARG_TP keepAliveVar
#ifndef LEGACY_BACKEND
void Compiler::fgComputeLife(VARSET_TP& life,
- GenTreePtr startNode,
- GenTreePtr endNode,
+ GenTree* startNode,
+ GenTree* endNode,
VARSET_VALARG_TP volatileVars,
bool* pStmtInfoDirty DEBUGARG(bool* treeModf))
{
- GenTreePtr tree;
+ GenTree* tree;
// Don't kill vars in scope
VARSET_TP keepAliveVars(VarSetOps::Union(this, volatileVars, compCurBB->bbScope));
@@ -2301,22 +2301,22 @@ void Compiler::fgComputeLifeLIR(VARSET_TP& life, BasicBlock* block, VARSET_VALAR
#endif
void Compiler::fgComputeLife(VARSET_TP& life,
- GenTreePtr startNode,
- GenTreePtr endNode,
+ GenTree* startNode,
+ GenTree* endNode,
VARSET_VALARG_TP volatileVars,
bool* pStmtInfoDirty DEBUGARG(bool* treeModf))
{
- GenTreePtr tree;
- unsigned lclNum;
+ GenTree* tree;
+ unsigned lclNum;
- GenTreePtr gtQMark = NULL; // current GT_QMARK node (walking the trees backwards)
- GenTreePtr nextColonExit = 0; // gtQMark->gtOp.gtOp2 while walking the 'else' branch.
- // gtQMark->gtOp.gtOp1 while walking the 'then' branch
+ GenTree* gtQMark = NULL; // current GT_QMARK node (walking the trees backwards)
+ GenTree* nextColonExit = 0; // gtQMark->gtOp.gtOp2 while walking the 'else' branch.
+ // gtQMark->gtOp.gtOp1 while walking the 'then' branch
// TBD: This used to be an initialization to VARSET_NOT_ACCEPTABLE. Try to figure out what's going on here.
- VARSET_TP entryLiveSet(VarSetOps::MakeFull(this)); // liveness when we see gtQMark
- VARSET_TP gtColonLiveSet(VarSetOps::MakeFull(this)); // liveness when we see gtColon
- GenTreePtr gtColon = NULL;
+ VARSET_TP entryLiveSet(VarSetOps::MakeFull(this)); // liveness when we see gtQMark
+ VARSET_TP gtColonLiveSet(VarSetOps::MakeFull(this)); // liveness when we see gtColon
+ GenTree* gtColon = NULL;
VARSET_TP keepAliveVars(VarSetOps::Union(this, volatileVars, compCurBB->bbScope)); /* Dont kill vars in scope */
@@ -2346,8 +2346,8 @@ void Compiler::fgComputeLife(VARSET_TP& life,
noway_assert(tree->gtFlags & GTF_RELOP_QMARK);
noway_assert(gtQMark->gtOp.gtOp2->gtOper == GT_COLON);
- GenTreePtr thenNode = gtColon->AsColon()->ThenNode();
- GenTreePtr elseNode = gtColon->AsColon()->ElseNode();
+ GenTree* thenNode = gtColon->AsColon()->ThenNode();
+ GenTree* elseNode = gtColon->AsColon()->ElseNode();
noway_assert(thenNode && elseNode);
@@ -2386,7 +2386,7 @@ void Compiler::fgComputeLife(VARSET_TP& life,
if (tree->gtFlags & GTF_SIDE_EFFECT)
{
- GenTreePtr sideEffList = NULL;
+ GenTree* sideEffList = NULL;
gtExtractSideEffList(tree, &sideEffList);
@@ -2463,7 +2463,7 @@ void Compiler::fgComputeLife(VARSET_TP& life,
// so swap the two branches and reverse the condition. If one is
// non-empty, we want it to be the 'else'
- GenTreePtr tmp = thenNode;
+ GenTree* tmp = thenNode;
gtColon->AsColon()->ThenNode() = thenNode = elseNode;
gtColon->AsColon()->ElseNode() = elseNode = tmp;
@@ -2511,7 +2511,7 @@ void Compiler::fgComputeLife(VARSET_TP& life,
// so the variable(s) should stay live until the end of the LDOBJ.
// Note that for promoted structs lvTracked is false.
- GenTreePtr lclVarTree = nullptr;
+ GenTree* lclVarTree = nullptr;
if (tree->gtOper == GT_OBJ)
{
// fgIsIndirOfAddrOfLocal returns nullptr if the tree is
@@ -2849,7 +2849,7 @@ bool Compiler::fgRemoveDeadStore(GenTree** pTree,
EXTRACT_SIDE_EFFECTS:
/* Extract the side effects */
- GenTreePtr sideEffList = nullptr;
+ GenTree* sideEffList = nullptr;
#ifdef DEBUG
if (verbose)
{
@@ -2948,7 +2948,7 @@ bool Compiler::fgRemoveDeadStore(GenTree** pTree,
{
/* :-( we have side effects */
- GenTreePtr sideEffList = nullptr;
+ GenTree* sideEffList = nullptr;
#ifdef DEBUG
if (verbose)
{
@@ -3240,7 +3240,7 @@ void Compiler::fgInterBlockLocalVarLiveness()
{
/* Get the first statement in the block */
- GenTreePtr firstStmt = block->FirstNonPhiDef();
+ GenTree* firstStmt = block->FirstNonPhiDef();
if (!firstStmt)
{
@@ -3249,7 +3249,7 @@ void Compiler::fgInterBlockLocalVarLiveness()
/* Walk all the statements of the block backwards - Get the LAST stmt */
- GenTreePtr nextStmt = block->bbTreeList->gtPrev;
+ GenTree* nextStmt = block->bbTreeList->gtPrev;
do
{
diff --git a/src/jit/loopcloning.cpp b/src/jit/loopcloning.cpp
index ccd12ee2be..5eea37c51e 100644
--- a/src/jit/loopcloning.cpp
+++ b/src/jit/loopcloning.cpp
@@ -27,14 +27,14 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
// This tree produces GT_INDEX node, the caller is supposed to morph it appropriately
// so it can be codegen'ed.
//
-GenTreePtr LC_Array::ToGenTree(Compiler* comp)
+GenTree* LC_Array::ToGenTree(Compiler* comp)
{
// If jagged array
if (type == Jagged)
{
// Create a a[i][j][k].length type node.
- GenTreePtr arr = comp->gtNewLclvNode(arrIndex->arrLcl, comp->lvaTable[arrIndex->arrLcl].lvType);
- int rank = GetDimRank();
+ GenTree* arr = comp->gtNewLclvNode(arrIndex->arrLcl, comp->lvaTable[arrIndex->arrLcl].lvType);
+ int rank = GetDimRank();
for (int i = 0; i < rank; ++i)
{
arr = comp->gtNewIndexRef(TYP_REF, arr, comp->gtNewLclvNode(arrIndex->indLcls[i],
@@ -43,7 +43,7 @@ GenTreePtr LC_Array::ToGenTree(Compiler* comp)
// If asked for arrlen invoke arr length operator.
if (oper == ArrLen)
{
- GenTreePtr arrLen = comp->gtNewArrLen(TYP_INT, arr, offsetof(CORINFO_Array, length));
+ GenTree* arrLen = comp->gtNewArrLen(TYP_INT, arr, offsetof(CORINFO_Array, length));
return arrLen;
}
else
@@ -70,7 +70,7 @@ GenTreePtr LC_Array::ToGenTree(Compiler* comp)
// Returns the gen tree representation for either a constant or a variable or an arrLen operation
// defined by the "type" member
//
-GenTreePtr LC_Ident::ToGenTree(Compiler* comp)
+GenTree* LC_Ident::ToGenTree(Compiler* comp)
{
// Convert to GenTree nodes.
switch (type)
@@ -101,7 +101,7 @@ GenTreePtr LC_Ident::ToGenTree(Compiler* comp)
// Returns the gen tree representation for either a constant or a variable or an arrLen operation
// defined by the "type" member
//
-GenTreePtr LC_Expr::ToGenTree(Compiler* comp)
+GenTree* LC_Expr::ToGenTree(Compiler* comp)
{
// Convert to GenTree nodes.
switch (type)
@@ -124,7 +124,7 @@ GenTreePtr LC_Expr::ToGenTree(Compiler* comp)
// Return Values:
// Returns the gen tree representation for the conditional operator on lhs and rhs trees
//
-GenTreePtr LC_Condition::ToGenTree(Compiler* comp)
+GenTree* LC_Condition::ToGenTree(Compiler* comp)
{
GenTree* op1Tree = op1.ToGenTree(comp);
GenTree* op2Tree = op2.ToGenTree(comp);
@@ -676,7 +676,7 @@ void LoopCloneContext::CondToStmtInBlock(Compiler* comp
noway_assert(conds.Size() > 0);
// Get the first condition.
- GenTreePtr cond = conds[0].ToGenTree(comp);
+ GenTree* cond = conds[0].ToGenTree(comp);
for (unsigned i = 1; i < conds.Size(); ++i)
{
// Append all conditions using AND operator.
@@ -687,7 +687,7 @@ void LoopCloneContext::CondToStmtInBlock(Compiler* comp
cond = comp->gtNewOperNode(reverse ? GT_NE : GT_EQ, TYP_INT, cond, comp->gtNewIconNode(0));
// Add jmpTrue "cond == 0" to slow path.
- GenTreePtr stmt = comp->fgNewStmtFromTree(comp->gtNewOperNode(GT_JTRUE, TYP_VOID, cond));
+ GenTree* stmt = comp->fgNewStmtFromTree(comp->gtNewOperNode(GT_JTRUE, TYP_VOID, cond));
// Add stmt to the block.
comp->fgInsertStmtAtEnd(block, stmt);
diff --git a/src/jit/loopcloning.h b/src/jit/loopcloning.h
index aefb6f4eb0..b5986510b3 100644
--- a/src/jit/loopcloning.h
+++ b/src/jit/loopcloning.h
@@ -208,13 +208,13 @@ struct LcMdArrayOptInfo : public LcOptInfo
*/
struct LcJaggedArrayOptInfo : public LcOptInfo
{
- unsigned dim; // "dim" represents upto what level of the rank this optimization applies to.
- // For example, a[i][j][k] could be the jagged array but if "dim" is 2,
- // then this node is treated as though it were a[i][j]
- ArrIndex arrIndex; // ArrIndex representation of the array.
- GenTreePtr stmt; // "stmt" where the optimization opportunity occurs.
+ unsigned dim; // "dim" represents upto what level of the rank this optimization applies to.
+ // For example, a[i][j][k] could be the jagged array but if "dim" is 2,
+ // then this node is treated as though it were a[i][j]
+ ArrIndex arrIndex; // ArrIndex representation of the array.
+ GenTree* stmt; // "stmt" where the optimization opportunity occurs.
- LcJaggedArrayOptInfo(ArrIndex& arrIndex, unsigned dim, GenTreePtr stmt)
+ LcJaggedArrayOptInfo(ArrIndex& arrIndex, unsigned dim, GenTree* stmt)
: LcOptInfo(this, LcJaggedArray), dim(dim), arrIndex(arrIndex), stmt(stmt)
{
}
@@ -308,7 +308,7 @@ struct LC_Array
}
// Get a tree representation for this symbolic a.length
- GenTreePtr ToGenTree(Compiler* comp);
+ GenTree* ToGenTree(Compiler* comp);
};
/**
@@ -387,7 +387,7 @@ struct LC_Ident
}
// Convert this symbolic representation into a tree node.
- GenTreePtr ToGenTree(Compiler* comp);
+ GenTree* ToGenTree(Compiler* comp);
};
/**
@@ -438,7 +438,7 @@ struct LC_Expr
}
// Convert LC_Expr into a tree node.
- GenTreePtr ToGenTree(Compiler* comp);
+ GenTree* ToGenTree(Compiler* comp);
};
/**
@@ -477,7 +477,7 @@ struct LC_Condition
}
// Convert this conditional operation into a GenTree.
- GenTreePtr ToGenTree(Compiler* comp);
+ GenTree* ToGenTree(Compiler* comp);
};
/**
diff --git a/src/jit/lower.cpp b/src/jit/lower.cpp
index b328c27d86..21da58279e 100644
--- a/src/jit/lower.cpp
+++ b/src/jit/lower.cpp
@@ -39,7 +39,7 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
// Notes:
// If 'childNode' it has any existing sources, they will now be sources for the parent.
//
-void Lowering::MakeSrcContained(GenTreePtr parentNode, GenTreePtr childNode)
+void Lowering::MakeSrcContained(GenTree* parentNode, GenTree* childNode)
{
assert(!parentNode->OperIsLeaf());
assert(childNode->canBeContained());
@@ -478,7 +478,7 @@ GenTree* Lowering::LowerSwitch(GenTree* node)
// 2. and a statement with GT_SWITCH(temp)
assert(node->gtOper == GT_SWITCH);
- GenTreePtr temp = node->gtOp.gtOp1;
+ GenTree* temp = node->gtOp.gtOp1;
assert(temp->gtOper == GT_LCL_VAR);
unsigned tempLclNum = temp->gtLclVarCommon.gtLclNum;
LclVarDsc* tempVarDsc = comp->lvaTable + tempLclNum;
@@ -511,8 +511,8 @@ GenTree* Lowering::LowerSwitch(GenTree* node)
// the default case. As stated above, this conditional is being shared between
// both GT_SWITCH lowering code paths.
// This condition is of the form: if (temp > jumpTableLength - 2){ goto jumpTable[jumpTableLength - 1]; }
- GenTreePtr gtDefaultCaseCond = comp->gtNewOperNode(GT_GT, TYP_INT, comp->gtNewLclvNode(tempLclNum, tempLclType),
- comp->gtNewIconNode(jumpCnt - 2, genActualType(tempLclType)));
+ GenTree* gtDefaultCaseCond = comp->gtNewOperNode(GT_GT, TYP_INT, comp->gtNewLclvNode(tempLclNum, tempLclType),
+ comp->gtNewIconNode(jumpCnt - 2, genActualType(tempLclType)));
// Make sure we perform an unsigned comparison, just in case the switch index in 'temp'
// is now less than zero 0 (that would also hit the default case).
@@ -521,8 +521,8 @@ GenTree* Lowering::LowerSwitch(GenTree* node)
/* Increment the lvRefCnt and lvRefCntWtd for temp */
tempVarDsc->incRefCnts(blockWeight, comp);
- GenTreePtr gtDefaultCaseJump = comp->gtNewOperNode(GT_JTRUE, TYP_VOID, gtDefaultCaseCond);
- gtDefaultCaseJump->gtFlags = node->gtFlags;
+ GenTree* gtDefaultCaseJump = comp->gtNewOperNode(GT_JTRUE, TYP_VOID, gtDefaultCaseCond);
+ gtDefaultCaseJump->gtFlags = node->gtFlags;
LIR::Range condRange = LIR::SeqTree(comp, gtDefaultCaseJump);
switchBBRange.InsertAtEnd(std::move(condRange));
@@ -692,13 +692,12 @@ GenTree* Lowering::LowerSwitch(GenTree* node)
// |____GT_EQ
// |____ (switchIndex) (The temp variable)
// |____ (ICon) (The actual case constant)
- GenTreePtr gtCaseCond =
- comp->gtNewOperNode(GT_EQ, TYP_INT, comp->gtNewLclvNode(tempLclNum, tempLclType),
- comp->gtNewIconNode(i, tempLclType));
+ GenTree* gtCaseCond = comp->gtNewOperNode(GT_EQ, TYP_INT, comp->gtNewLclvNode(tempLclNum, tempLclType),
+ comp->gtNewIconNode(i, tempLclType));
/* Increment the lvRefCnt and lvRefCntWtd for temp */
tempVarDsc->incRefCnts(blockWeight, comp);
- GenTreePtr gtCaseBranch = comp->gtNewOperNode(GT_JTRUE, TYP_VOID, gtCaseCond);
+ GenTree* gtCaseBranch = comp->gtNewOperNode(GT_JTRUE, TYP_VOID, gtCaseCond);
LIR::Range caseRange = LIR::SeqTree(comp, gtCaseBranch);
currentBBRange->InsertAtEnd(std::move(caseRange));
}
@@ -986,14 +985,14 @@ void Lowering::ReplaceArgWithPutArgOrBitcast(GenTree** argSlot, GenTree* putArgO
// layout object, so the codegen of the GT_PUTARG_STK could use this for optimizing copying to the stack by value.
// (using block copy primitives for non GC pointers and a single TARGET_POINTER_SIZE copy with recording GC info.)
//
-GenTreePtr Lowering::NewPutArg(GenTreeCall* call, GenTreePtr arg, fgArgTabEntry* info, var_types type)
+GenTree* Lowering::NewPutArg(GenTreeCall* call, GenTree* arg, fgArgTabEntry* info, var_types type)
{
assert(call != nullptr);
assert(arg != nullptr);
assert(info != nullptr);
- GenTreePtr putArg = nullptr;
- bool updateArgTable = true;
+ GenTree* putArg = nullptr;
+ bool updateArgTable = true;
bool isOnStack = true;
#ifdef FEATURE_UNIX_AMD64_STRUCT_PASSING
@@ -1179,7 +1178,7 @@ GenTreePtr Lowering::NewPutArg(GenTreeCall* call, GenTreePtr arg, fgArgTabEntry*
for (unsigned ctr = 0; fieldListPtr != nullptr; fieldListPtr = fieldListPtr->Rest(), ctr++)
{
// Create a new GT_PUTARG_REG node with op1 the original GT_LCL_FLD.
- GenTreePtr newOper = comp->gtNewPutArgReg(
+ GenTree* newOper = comp->gtNewPutArgReg(
comp->GetTypeFromClassificationAndSizes(info->structDesc.eightByteClassifications[ctr],
info->structDesc.eightByteSizes[ctr]),
fieldListPtr->gtOp.gtOp1, (ctr == 0) ? info->regNum : info->otherRegNum);
@@ -1215,11 +1214,11 @@ GenTreePtr Lowering::NewPutArg(GenTreeCall* call, GenTreePtr arg, fgArgTabEntry*
regNumber argReg = info->regNum;
for (unsigned ctr = 0; fieldListPtr != nullptr; fieldListPtr = fieldListPtr->Rest(), ctr++)
{
- GenTreePtr curOp = fieldListPtr->gtOp.gtOp1;
- var_types curTyp = curOp->TypeGet();
+ GenTree* curOp = fieldListPtr->gtOp.gtOp1;
+ var_types curTyp = curOp->TypeGet();
// Create a new GT_PUTARG_REG node with op1
- GenTreePtr newOper = comp->gtNewPutArgReg(curTyp, curOp, argReg);
+ GenTree* newOper = comp->gtNewPutArgReg(curTyp, curOp, argReg);
// Splice in the new GT_PUTARG_REG node in the GT_FIELD_LIST
ReplaceArgWithPutArgOrBitcast(&fieldListPtr->gtOp.gtOp1, newOper);
@@ -1374,9 +1373,9 @@ GenTreePtr Lowering::NewPutArg(GenTreeCall* call, GenTreePtr arg, fgArgTabEntry*
// Return Value:
// None.
//
-void Lowering::LowerArg(GenTreeCall* call, GenTreePtr* ppArg)
+void Lowering::LowerArg(GenTreeCall* call, GenTree** ppArg)
{
- GenTreePtr arg = *ppArg;
+ GenTree* arg = *ppArg;
JITDUMP("lowering arg : ");
DISPNODE(arg);
@@ -1452,13 +1451,13 @@ void Lowering::LowerArg(GenTreeCall* call, GenTreePtr* ppArg)
noway_assert(arg->OperGet() == GT_LONG);
assert(info->numRegs == 2);
- GenTreePtr argLo = arg->gtGetOp1();
- GenTreePtr argHi = arg->gtGetOp2();
+ GenTree* argLo = arg->gtGetOp1();
+ GenTree* argHi = arg->gtGetOp2();
GenTreeFieldList* fieldList = new (comp, GT_FIELD_LIST) GenTreeFieldList(argLo, 0, TYP_INT, nullptr);
// Only the first fieldList node (GTF_FIELD_LIST_HEAD) is in the instruction sequence.
(void)new (comp, GT_FIELD_LIST) GenTreeFieldList(argHi, 4, TYP_INT, fieldList);
- GenTreePtr putArg = NewPutArg(call, fieldList, info, type);
+ GenTree* putArg = NewPutArg(call, fieldList, info, type);
BlockRange().InsertBefore(arg, putArg);
BlockRange().Remove(arg);
@@ -1472,13 +1471,13 @@ void Lowering::LowerArg(GenTreeCall* call, GenTreePtr* ppArg)
// Although the hi argument needs to be pushed first, that will be handled by the general case,
// in which the fields will be reversed.
assert(info->numSlots == 2);
- GenTreePtr argLo = arg->gtGetOp1();
- GenTreePtr argHi = arg->gtGetOp2();
+ GenTree* argLo = arg->gtGetOp1();
+ GenTree* argHi = arg->gtGetOp2();
GenTreeFieldList* fieldList = new (comp, GT_FIELD_LIST) GenTreeFieldList(argLo, 0, TYP_INT, nullptr);
// Only the first fieldList node (GTF_FIELD_LIST_HEAD) is in the instruction sequence.
(void)new (comp, GT_FIELD_LIST) GenTreeFieldList(argHi, 4, TYP_INT, fieldList);
- GenTreePtr putArg = NewPutArg(call, fieldList, info, type);
- putArg->gtRegNum = info->regNum;
+ GenTree* putArg = NewPutArg(call, fieldList, info, type);
+ putArg->gtRegNum = info->regNum;
// We can't call ReplaceArgWithPutArgOrBitcast here because it presumes that we are keeping the original
// arg.
@@ -1504,7 +1503,7 @@ void Lowering::LowerArg(GenTreeCall* call, GenTreePtr* ppArg)
}
#endif // _TARGET_ARMARCH_
- GenTreePtr putArg = NewPutArg(call, arg, info, type);
+ GenTree* putArg = NewPutArg(call, arg, info, type);
// In the case of register passable struct (in one or two registers)
// the NewPutArg returns a new node (GT_PUTARG_REG or a GT_FIELD_LIST with two GT_PUTARG_REGs.)
@@ -1840,17 +1839,17 @@ void Lowering::CheckVSQuirkStackPaddingNeeded(GenTreeCall* call)
// Confine this to IL stub calls which aren't marked as unmanaged.
if (call->IsPInvoke() && !call->IsUnmanaged())
{
- bool paddingNeeded = false;
- GenTreePtr firstPutArgReg = nullptr;
+ bool paddingNeeded = false;
+ GenTree* firstPutArgReg = nullptr;
for (GenTreeArgList* args = call->gtCallLateArgs; args; args = args->Rest())
{
- GenTreePtr tmp = args->Current();
+ GenTree* tmp = args->Current();
if (tmp->OperGet() == GT_PUTARG_REG)
{
if (firstPutArgReg == nullptr)
{
firstPutArgReg = tmp;
- GenTreePtr op1 = firstPutArgReg->gtOp.gtOp1;
+ GenTree* op1 = firstPutArgReg->gtOp.gtOp1;
if (op1->OperGet() == GT_LCL_VAR_ADDR)
{
@@ -1948,7 +1947,7 @@ void Lowering::InsertProfTailCallHook(GenTreeCall* call, GenTree* insertionPoint
if (insertionPoint == nullptr)
{
- GenTreePtr tmp = nullptr;
+ GenTree* tmp = nullptr;
for (GenTreeArgList* args = call->gtCallArgs; args; args = args->Rest())
{
tmp = args->Current();
@@ -1985,7 +1984,7 @@ void Lowering::InsertProfTailCallHook(GenTreeCall* call, GenTree* insertionPoint
#endif // !defined(_TARGET_X86_)
assert(insertionPoint != nullptr);
- GenTreePtr profHookNode = new (comp, GT_PROF_HOOK) GenTree(GT_PROF_HOOK, TYP_VOID);
+ GenTree* profHookNode = new (comp, GT_PROF_HOOK) GenTree(GT_PROF_HOOK, TYP_VOID);
BlockRange().InsertBefore(insertionPoint, profHookNode);
}
@@ -2037,13 +2036,13 @@ void Lowering::LowerFastTailCall(GenTreeCall* call)
// of call is setup. Note that once a stack arg is setup, it cannot have nested
// calls subsequently in execution order to setup other args, because the nested
// call could over-write the stack arg that is setup earlier.
- GenTreePtr firstPutArgStk = nullptr;
+ GenTree* firstPutArgStk = nullptr;
GenTreeArgList* args;
ArrayStack<GenTree*> putargs(comp);
for (args = call->gtCallArgs; args; args = args->Rest())
{
- GenTreePtr tmp = args->Current();
+ GenTree* tmp = args->Current();
if (tmp->OperGet() == GT_PUTARG_STK)
{
putargs.Push(tmp);
@@ -2052,7 +2051,7 @@ void Lowering::LowerFastTailCall(GenTreeCall* call)
for (args = call->gtCallLateArgs; args; args = args->Rest())
{
- GenTreePtr tmp = args->Current();
+ GenTree* tmp = args->Current();
if (tmp->OperGet() == GT_PUTARG_STK)
{
putargs.Push(tmp);
@@ -2095,7 +2094,7 @@ void Lowering::LowerFastTailCall(GenTreeCall* call)
for (int i = 0; i < putargs.Height(); i++)
{
- GenTreePtr putArgStkNode = putargs.Bottom(i);
+ GenTree* putArgStkNode = putargs.Bottom(i);
assert(putArgStkNode->OperGet() == GT_PUTARG_STK);
@@ -2127,7 +2126,7 @@ void Lowering::LowerFastTailCall(GenTreeCall* call)
// Start searching in execution order list till we encounter call node
unsigned tmpLclNum = BAD_VAR_NUM;
var_types tmpType = TYP_UNDEF;
- for (GenTreePtr treeNode = putArgStkNode->gtNext; treeNode != call; treeNode = treeNode->gtNext)
+ for (GenTree* treeNode = putArgStkNode->gtNext; treeNode != call; treeNode = treeNode->gtNext)
{
if (treeNode->OperIsLocal() || treeNode->OperIsLocalAddr())
{
@@ -2178,7 +2177,7 @@ void Lowering::LowerFastTailCall(GenTreeCall* call)
// Insert GT_START_NONGC node before the first GT_PUTARG_STK node.
// Note that if there are no args to be setup on stack, no need to
// insert GT_START_NONGC node.
- GenTreePtr startNonGCNode = nullptr;
+ GenTree* startNonGCNode = nullptr;
if (firstPutArgStk != nullptr)
{
startNonGCNode = new (comp, GT_START_NONGC) GenTree(GT_START_NONGC, TYP_VOID);
@@ -2197,7 +2196,7 @@ void Lowering::LowerFastTailCall(GenTreeCall* call)
if ((comp->fgBBcount == 1) && !(comp->compCurBB->bbFlags & BBF_GC_SAFE_POINT))
{
assert(comp->fgFirstBB == comp->compCurBB);
- GenTreePtr noOp = new (comp, GT_NO_OP) GenTree(GT_NO_OP, TYP_VOID);
+ GenTree* noOp = new (comp, GT_NO_OP) GenTree(GT_NO_OP, TYP_VOID);
BlockRange().InsertBefore(startNonGCNode, noOp);
}
}
@@ -3571,7 +3570,7 @@ void Lowering::InsertPInvokeMethodProlog()
// Return Value:
// Code tree to perform the action.
//
-void Lowering::InsertPInvokeMethodEpilog(BasicBlock* returnBB DEBUGARG(GenTreePtr lastExpr))
+void Lowering::InsertPInvokeMethodEpilog(BasicBlock* returnBB DEBUGARG(GenTree* lastExpr))
{
assert(returnBB != nullptr);
assert(comp->info.compCallUnmanaged);
@@ -4390,12 +4389,12 @@ bool Lowering::AreSourcesPossiblyModifiedLocals(GenTree* addr, GenTree* base, Ge
//
GenTree* Lowering::TryCreateAddrMode(LIR::Use&& use, bool isIndir)
{
- GenTree* addr = use.Def();
- GenTreePtr base = nullptr;
- GenTreePtr index = nullptr;
- unsigned scale = 0;
- unsigned offset = 0;
- bool rev = false;
+ GenTree* addr = use.Def();
+ GenTree* base = nullptr;
+ GenTree* index = nullptr;
+ unsigned scale = 0;
+ unsigned offset = 0;
+ bool rev = false;
// TODO-1stClassStructs: This logic is here to preserve prior behavior. Note that previously
// block ops were not considered for addressing modes, but an add under it may have been.
@@ -4457,7 +4456,7 @@ GenTree* Lowering::TryCreateAddrMode(LIR::Use&& use, bool isIndir)
return addr;
}
- GenTreePtr arrLength = nullptr;
+ GenTree* arrLength = nullptr;
JITDUMP("Addressing mode:\n");
JITDUMP(" Base\n ");
@@ -5023,7 +5022,7 @@ GenTree* Lowering::LowerConstIntDivOrMod(GenTree* node)
// Returns:
// The next node to lower.
//
-GenTree* Lowering::LowerSignedDivOrMod(GenTreePtr node)
+GenTree* Lowering::LowerSignedDivOrMod(GenTree* node)
{
assert((node->OperGet() == GT_DIV) || (node->OperGet() == GT_MOD));
GenTree* next = node->gtNext;
@@ -5223,7 +5222,7 @@ GenTree* Lowering::LowerArrElem(GenTree* node)
GenTree* indexNode = arrElem->gtArrElem.gtArrInds[dim];
// Use the original arrObjNode on the 0th ArrIndex node, and clone it for subsequent ones.
- GenTreePtr idxArrObjNode;
+ GenTree* idxArrObjNode;
if (dim == 0)
{
idxArrObjNode = arrObjNode;
@@ -5259,23 +5258,23 @@ GenTree* Lowering::LowerArrElem(GenTree* node)
unsigned scale = arrElem->gtArrElem.gtArrElemSize;
unsigned offset = comp->eeGetMDArrayDataOffset(arrElem->gtArrElem.gtArrElemType, arrElem->gtArrElem.gtArrRank);
- GenTreePtr leaIndexNode = prevArrOffs;
+ GenTree* leaIndexNode = prevArrOffs;
if (!jitIsScaleIndexMul(scale))
{
// We do the address arithmetic in TYP_I_IMPL, though note that the lower bounds and lengths in memory are
// TYP_INT
- GenTreePtr scaleNode = new (comp, GT_CNS_INT) GenTreeIntCon(TYP_I_IMPL, scale);
- GenTreePtr mulNode = new (comp, GT_MUL) GenTreeOp(GT_MUL, TYP_I_IMPL, leaIndexNode, scaleNode);
+ GenTree* scaleNode = new (comp, GT_CNS_INT) GenTreeIntCon(TYP_I_IMPL, scale);
+ GenTree* mulNode = new (comp, GT_MUL) GenTreeOp(GT_MUL, TYP_I_IMPL, leaIndexNode, scaleNode);
BlockRange().InsertBefore(insertionPoint, scaleNode, mulNode);
leaIndexNode = mulNode;
scale = 1;
}
- GenTreePtr leaBase = comp->gtClone(arrObjNode);
+ GenTree* leaBase = comp->gtClone(arrObjNode);
varDsc->incRefCnts(blockWeight, comp);
BlockRange().InsertBefore(insertionPoint, leaBase);
- GenTreePtr leaNode = new (comp, GT_LEA) GenTreeAddrMode(arrElem->TypeGet(), leaBase, leaIndexNode, scale, offset);
+ GenTree* leaNode = new (comp, GT_LEA) GenTreeAddrMode(arrElem->TypeGet(), leaBase, leaIndexNode, scale, offset);
BlockRange().InsertBefore(insertionPoint, leaNode);
@@ -5542,7 +5541,7 @@ void Lowering::LowerBlock(BasicBlock* block)
* TODO-CQ: Perform tree recognition by using the Value Numbering Package, that way we can recognize
* arbitrary complex trees and support much more addressing patterns.
*/
-bool Lowering::IndirsAreEquivalent(GenTreePtr candidate, GenTreePtr storeInd)
+bool Lowering::IndirsAreEquivalent(GenTree* candidate, GenTree* storeInd)
{
assert(candidate->OperGet() == GT_IND);
assert(storeInd->OperGet() == GT_STOREIND);
@@ -5557,8 +5556,8 @@ bool Lowering::IndirsAreEquivalent(GenTreePtr candidate, GenTreePtr storeInd)
return false;
}
- GenTreePtr pTreeA = candidate->gtGetOp1();
- GenTreePtr pTreeB = storeInd->gtGetOp1();
+ GenTree* pTreeA = candidate->gtGetOp1();
+ GenTree* pTreeB = storeInd->gtGetOp1();
// This method will be called by codegen (as well as during lowering).
// After register allocation, the sources may have been spilled and reloaded
@@ -5600,7 +5599,7 @@ bool Lowering::IndirsAreEquivalent(GenTreePtr candidate, GenTreePtr storeInd)
/** Test whether the two given nodes are the same leaves.
* Right now, only constant integers and local variables are supported
*/
-bool Lowering::NodesAreEquivalentLeaves(GenTreePtr tree1, GenTreePtr tree2)
+bool Lowering::NodesAreEquivalentLeaves(GenTree* tree1, GenTree* tree2)
{
if (tree1 == nullptr && tree2 == nullptr)
{
@@ -5649,12 +5648,12 @@ bool Lowering::NodesAreEquivalentLeaves(GenTreePtr tree1, GenTreePtr tree2)
/**
* Get common information required to handle a cast instruction
*/
-void Lowering::getCastDescription(GenTreePtr treeNode, CastInfo* castInfo)
+void Lowering::getCastDescription(GenTree* treeNode, CastInfo* castInfo)
{
// Intialize castInfo
memset(castInfo, 0, sizeof(*castInfo));
- GenTreePtr castOp = treeNode->gtCast.CastOp();
+ GenTree* castOp = treeNode->gtCast.CastOp();
var_types dstType = treeNode->CastToType();
var_types srcType = genActualType(castOp->TypeGet());
@@ -5971,7 +5970,7 @@ void Lowering::ContainCheckArrOffset(GenTreeArrOffs* node)
void Lowering::ContainCheckLclHeap(GenTreeOp* node)
{
assert(node->OperIs(GT_LCLHEAP));
- GenTreePtr size = node->gtOp.gtOp1;
+ GenTree* size = node->gtOp.gtOp1;
if (size->IsCnsIntOrI())
{
MakeSrcContained(node, size);
diff --git a/src/jit/lower.h b/src/jit/lower.h
index 74e71dfab8..2f7d3bd9ef 100644
--- a/src/jit/lower.h
+++ b/src/jit/lower.h
@@ -45,7 +45,7 @@ public:
bool signCheckOnly; // For converting between unsigned/signed int
};
- static void getCastDescription(GenTreePtr treeNode, CastInfo* castInfo);
+ static void getCastDescription(GenTree* treeNode, CastInfo* castInfo);
// This variant of LowerRange is called from outside of the main Lowering pass,
// so it creates its own instance of Lowering to do so.
@@ -158,9 +158,9 @@ private:
GenTree* LowerVirtualVtableCall(GenTreeCall* call);
GenTree* LowerVirtualStubCall(GenTreeCall* call);
void LowerArgsForCall(GenTreeCall* call);
- void ReplaceArgWithPutArgOrBitcast(GenTreePtr* ppChild, GenTreePtr newNode);
- GenTree* NewPutArg(GenTreeCall* call, GenTreePtr arg, fgArgTabEntry* info, var_types type);
- void LowerArg(GenTreeCall* call, GenTreePtr* ppTree);
+ void ReplaceArgWithPutArgOrBitcast(GenTree** ppChild, GenTree* newNode);
+ GenTree* NewPutArg(GenTreeCall* call, GenTree* arg, fgArgTabEntry* info, var_types type);
+ void LowerArg(GenTreeCall* call, GenTree** ppTree);
#ifdef _TARGET_ARMARCH_
GenTree* LowerFloatArg(GenTree** pArg, fgArgTabEntry* info);
GenTree* LowerFloatArgReg(GenTree* arg, regNumber regNum);
@@ -169,7 +169,7 @@ private:
void InsertPInvokeCallProlog(GenTreeCall* call);
void InsertPInvokeCallEpilog(GenTreeCall* call);
void InsertPInvokeMethodProlog();
- void InsertPInvokeMethodEpilog(BasicBlock* returnBB DEBUGARG(GenTreePtr lastExpr));
+ void InsertPInvokeMethodEpilog(BasicBlock* returnBB DEBUGARG(GenTree* lastExpr));
GenTree* SetGCState(int cns);
GenTree* CreateReturnTrapSeq();
enum FrameLinkAction
@@ -296,8 +296,8 @@ private:
#if !CPU_LOAD_STORE_ARCH
bool IsRMWIndirCandidate(GenTree* operand, GenTree* storeInd);
- bool IsBinOpInRMWStoreInd(GenTreePtr tree);
- bool IsRMWMemOpRootedAtStoreInd(GenTreePtr storeIndTree, GenTreePtr* indirCandidate, GenTreePtr* indirOpSource);
+ bool IsBinOpInRMWStoreInd(GenTree* tree);
+ bool IsRMWMemOpRootedAtStoreInd(GenTree* storeIndTree, GenTree** indirCandidate, GenTree** indirOpSource);
bool LowerRMWMemOp(GenTreeIndir* storeInd);
#endif
@@ -314,13 +314,13 @@ private:
#endif // FEATURE_HW_INTRINSICS
// Utility functions
- void MorphBlkIntoHelperCall(GenTreePtr pTree, GenTreePtr treeStmt);
+ void MorphBlkIntoHelperCall(GenTree* pTree, GenTree* treeStmt);
public:
- static bool IndirsAreEquivalent(GenTreePtr pTreeA, GenTreePtr pTreeB);
+ static bool IndirsAreEquivalent(GenTree* pTreeA, GenTree* pTreeB);
private:
- static bool NodesAreEquivalentLeaves(GenTreePtr candidate, GenTreePtr storeInd);
+ static bool NodesAreEquivalentLeaves(GenTree* candidate, GenTree* storeInd);
bool AreSourcesPossiblyModifiedLocals(GenTree* addr, GenTree* base, GenTree* index);
@@ -336,7 +336,7 @@ private:
}
// Makes 'childNode' contained in the 'parentNode'
- void MakeSrcContained(GenTreePtr parentNode, GenTreePtr childNode);
+ void MakeSrcContained(GenTree* parentNode, GenTree* childNode);
// Checks and makes 'childNode' contained in the 'parentNode'
bool CheckImmedAndMakeContained(GenTree* parentNode, GenTree* childNode);
diff --git a/src/jit/lowerarmarch.cpp b/src/jit/lowerarmarch.cpp
index 757ac52db8..642bbb2801 100644
--- a/src/jit/lowerarmarch.cpp
+++ b/src/jit/lowerarmarch.cpp
@@ -258,8 +258,8 @@ void Lowering::LowerBlockStore(GenTreeBlk* blkNode)
Compiler* compiler = comp;
// Sources are dest address and initVal or source.
- GenTreePtr srcAddrOrFill = nullptr;
- bool isInitBlk = blkNode->OperIsInitBlkOp();
+ GenTree* srcAddrOrFill = nullptr;
+ bool isInitBlk = blkNode->OperIsInitBlkOp();
if (!isInitBlk)
{
@@ -276,7 +276,7 @@ void Lowering::LowerBlockStore(GenTreeBlk* blkNode)
if (isInitBlk)
{
- GenTreePtr initVal = source;
+ GenTree* initVal = source;
if (initVal->OperIsInitVal())
{
initVal->SetContained();
@@ -415,10 +415,10 @@ void Lowering::LowerCast(GenTree* tree)
DISPNODE(tree);
JITDUMP("\n");
- GenTreePtr op1 = tree->gtOp.gtOp1;
- var_types dstType = tree->CastToType();
- var_types srcType = genActualType(op1->TypeGet());
- var_types tmpType = TYP_UNDEF;
+ GenTree* op1 = tree->gtOp.gtOp1;
+ var_types dstType = tree->CastToType();
+ var_types srcType = genActualType(op1->TypeGet());
+ var_types tmpType = TYP_UNDEF;
if (varTypeIsFloating(srcType))
{
@@ -436,7 +436,7 @@ void Lowering::LowerCast(GenTree* tree)
if (tmpType != TYP_UNDEF)
{
- GenTreePtr tmp = comp->gtNewCastNode(tmpType, op1, tmpType);
+ GenTree* tmp = comp->gtNewCastNode(tmpType, op1, tmpType);
tmp->gtFlags |= (tree->gtFlags & (GTF_UNSIGNED | GTF_OVERFLOW | GTF_EXCEPT));
tree->gtFlags &= ~GTF_UNSIGNED;
@@ -457,14 +457,14 @@ void Lowering::LowerCast(GenTree* tree)
// Return Value:
// None.
//
-void Lowering::LowerRotate(GenTreePtr tree)
+void Lowering::LowerRotate(GenTree* tree)
{
if (tree->OperGet() == GT_ROL)
{
// There is no ROL instruction on ARM. Convert ROL into ROR.
- GenTreePtr rotatedValue = tree->gtOp.gtOp1;
- unsigned rotatedValueBitSize = genTypeSize(rotatedValue->gtType) * 8;
- GenTreePtr rotateLeftIndexNode = tree->gtOp.gtOp2;
+ GenTree* rotatedValue = tree->gtOp.gtOp1;
+ unsigned rotatedValueBitSize = genTypeSize(rotatedValue->gtType) * 8;
+ GenTree* rotateLeftIndexNode = tree->gtOp.gtOp2;
if (rotateLeftIndexNode->IsCnsIntOrI())
{
@@ -474,8 +474,7 @@ void Lowering::LowerRotate(GenTreePtr tree)
}
else
{
- GenTreePtr tmp =
- comp->gtNewOperNode(GT_NEG, genActualType(rotateLeftIndexNode->gtType), rotateLeftIndexNode);
+ GenTree* tmp = comp->gtNewOperNode(GT_NEG, genActualType(rotateLeftIndexNode->gtType), rotateLeftIndexNode);
BlockRange().InsertAfter(rotateLeftIndexNode, tmp);
tree->gtOp.gtOp2 = tmp;
}
@@ -659,10 +658,10 @@ void Lowering::ContainCheckMul(GenTreeOp* node)
//
void Lowering::ContainCheckShiftRotate(GenTreeOp* node)
{
- GenTreePtr shiftBy = node->gtOp2;
+ GenTree* shiftBy = node->gtOp2;
#ifdef _TARGET_ARM_
- GenTreePtr source = node->gtOp1;
+ GenTree* source = node->gtOp1;
if (node->OperIs(GT_LSH_HI, GT_RSH_LO))
{
assert(source->OperGet() == GT_LONG);
@@ -725,9 +724,9 @@ void Lowering::ContainCheckStoreLoc(GenTreeLclVarCommon* storeLoc)
void Lowering::ContainCheckCast(GenTreeCast* node)
{
#ifdef _TARGET_ARM_
- GenTreePtr castOp = node->CastOp();
- var_types castToType = node->CastToType();
- var_types srcType = castOp->TypeGet();
+ GenTree* castOp = node->CastOp();
+ var_types castToType = node->CastToType();
+ var_types srcType = castOp->TypeGet();
if (varTypeIsLong(castOp))
{
@@ -757,7 +756,7 @@ void Lowering::ContainCheckCompare(GenTreeOp* cmp)
void Lowering::ContainCheckBoundsChk(GenTreeBoundsChk* node)
{
assert(node->OperIsBoundsCheck());
- GenTreePtr other;
+ GenTree* other;
if (!CheckImmedAndMakeContained(node, node->gtIndex))
{
CheckImmedAndMakeContained(node, node->gtArrLen);
diff --git a/src/jit/lowerxarch.cpp b/src/jit/lowerxarch.cpp
index a046704c91..70782f19fc 100644
--- a/src/jit/lowerxarch.cpp
+++ b/src/jit/lowerxarch.cpp
@@ -142,12 +142,12 @@ void Lowering::LowerStoreIndir(GenTreeIndir* node)
//
void Lowering::LowerBlockStore(GenTreeBlk* blkNode)
{
- GenTree* dstAddr = blkNode->Addr();
- unsigned size = blkNode->gtBlkSize;
- GenTree* source = blkNode->Data();
- Compiler* compiler = comp;
- GenTreePtr srcAddrOrFill = nullptr;
- bool isInitBlk = blkNode->OperIsInitBlkOp();
+ GenTree* dstAddr = blkNode->Addr();
+ unsigned size = blkNode->gtBlkSize;
+ GenTree* source = blkNode->Data();
+ Compiler* compiler = comp;
+ GenTree* srcAddrOrFill = nullptr;
+ bool isInitBlk = blkNode->OperIsInitBlkOp();
if (!isInitBlk)
{
@@ -556,7 +556,7 @@ void Lowering::LowerPutArgStk(GenTreePutArgStk* putArgStk)
}
#endif // _TARGET_X86_
- GenTreePtr src = putArgStk->gtOp1;
+ GenTree* src = putArgStk->gtOp1;
#ifdef FEATURE_PUT_STRUCT_ARG_STK
if (src->TypeGet() != TYP_STRUCT)
@@ -590,8 +590,8 @@ void Lowering::LowerPutArgStk(GenTreePutArgStk* putArgStk)
}
#ifdef FEATURE_PUT_STRUCT_ARG_STK
- GenTreePtr dst = putArgStk;
- GenTreePtr srcAddr = nullptr;
+ GenTree* dst = putArgStk;
+ GenTree* srcAddr = nullptr;
bool haveLocalAddr = false;
if ((src->OperGet() == GT_OBJ) || (src->OperGet() == GT_IND))
@@ -698,10 +698,10 @@ void Lowering::LowerCast(GenTree* tree)
{
assert(tree->OperGet() == GT_CAST);
- GenTreePtr castOp = tree->gtCast.CastOp();
- var_types castToType = tree->CastToType();
- var_types srcType = castOp->TypeGet();
- var_types tmpType = TYP_UNDEF;
+ GenTree* castOp = tree->gtCast.CastOp();
+ var_types castToType = tree->CastToType();
+ var_types srcType = castOp->TypeGet();
+ var_types tmpType = TYP_UNDEF;
// force the srcType to unsigned if GT_UNSIGNED flag is set
if (tree->gtFlags & GTF_UNSIGNED)
@@ -748,7 +748,7 @@ void Lowering::LowerCast(GenTree* tree)
if (tmpType != TYP_UNDEF)
{
- GenTreePtr tmp = comp->gtNewCastNode(tmpType, castOp, tmpType);
+ GenTree* tmp = comp->gtNewCastNode(tmpType, castOp, tmpType);
tmp->gtFlags |= (tree->gtFlags & (GTF_UNSIGNED | GTF_OVERFLOW | GTF_EXCEPT));
tree->gtFlags &= ~GTF_UNSIGNED;
@@ -973,7 +973,7 @@ bool Lowering::IsRMWIndirCandidate(GenTree* operand, GenTree* storeInd)
// Return Value
// True if 'tree' is part of a RMW memory operation pattern
//
-bool Lowering::IsBinOpInRMWStoreInd(GenTreePtr tree)
+bool Lowering::IsBinOpInRMWStoreInd(GenTree* tree)
{
// Must be a non floating-point type binary operator since SSE2 doesn't support RMW memory ops
assert(!varTypeIsFloating(tree));
@@ -995,8 +995,8 @@ bool Lowering::IsBinOpInRMWStoreInd(GenTreePtr tree)
// Since it is not relatively cheap to recognize RMW memory op pattern, we
// cache the result in GT_STOREIND node so that while lowering GT_STOREIND
// we can use the result.
- GenTreePtr indirCandidate = nullptr;
- GenTreePtr indirOpSource = nullptr;
+ GenTree* indirCandidate = nullptr;
+ GenTree* indirOpSource = nullptr;
return IsRMWMemOpRootedAtStoreInd(use.User(), &indirCandidate, &indirOpSource);
}
@@ -1046,7 +1046,7 @@ bool Lowering::IsBinOpInRMWStoreInd(GenTreePtr tree)
// Otherwise, returns false with indirCandidate and indirOpSource set to null.
// Also updates flags of GT_STOREIND tree with its RMW status.
//
-bool Lowering::IsRMWMemOpRootedAtStoreInd(GenTreePtr tree, GenTreePtr* outIndirCandidate, GenTreePtr* outIndirOpSource)
+bool Lowering::IsRMWMemOpRootedAtStoreInd(GenTree* tree, GenTree** outIndirCandidate, GenTree** outIndirOpSource)
{
assert(!varTypeIsFloating(tree));
assert(outIndirCandidate != nullptr);
@@ -1062,8 +1062,8 @@ bool Lowering::IsRMWMemOpRootedAtStoreInd(GenTreePtr tree, GenTreePtr* outIndirC
return false;
}
- GenTreePtr indirDst = storeInd->gtGetOp1();
- GenTreePtr indirSrc = storeInd->gtGetOp2();
+ GenTree* indirDst = storeInd->gtGetOp1();
+ GenTree* indirSrc = storeInd->gtGetOp2();
genTreeOps oper = indirSrc->OperGet();
// Early out if it is already known to be a RMW memory op
@@ -1452,7 +1452,7 @@ void Lowering::ContainCheckCallOperands(GenTreeCall* call)
//
void Lowering::ContainCheckIndir(GenTreeIndir* node)
{
- GenTreePtr addr = node->Addr();
+ GenTree* addr = node->Addr();
// If this is the rhs of a block copy it will be handled when we handle the store.
if (node->TypeGet() == TYP_STRUCT)
@@ -1573,14 +1573,14 @@ void Lowering::ContainCheckMul(GenTreeOp* node)
return;
}
- bool isUnsignedMultiply = ((node->gtFlags & GTF_UNSIGNED) != 0);
- bool requiresOverflowCheck = node->gtOverflowEx();
- bool useLeaEncoding = false;
- GenTreePtr memOp = nullptr;
+ bool isUnsignedMultiply = ((node->gtFlags & GTF_UNSIGNED) != 0);
+ bool requiresOverflowCheck = node->gtOverflowEx();
+ bool useLeaEncoding = false;
+ GenTree* memOp = nullptr;
bool hasImpliedFirstOperand = false;
GenTreeIntConCommon* imm = nullptr;
- GenTreePtr other = nullptr;
+ GenTree* other = nullptr;
// Multiply should never be using small types
assert(!varTypeIsSmall(node->TypeGet()));
@@ -1691,7 +1691,7 @@ void Lowering::ContainCheckMul(GenTreeOp* node)
void Lowering::ContainCheckShiftRotate(GenTreeOp* node)
{
#ifdef _TARGET_X86_
- GenTreePtr source = node->gtOp1;
+ GenTree* source = node->gtOp1;
if (node->OperIs(GT_LSH_HI, GT_RSH_LO))
{
assert(source->OperGet() == GT_LONG);
@@ -1758,9 +1758,9 @@ void Lowering::ContainCheckStoreLoc(GenTreeLclVarCommon* storeLoc)
//
void Lowering::ContainCheckCast(GenTreeCast* node)
{
- GenTreePtr castOp = node->CastOp();
- var_types castToType = node->CastToType();
- var_types srcType = castOp->TypeGet();
+ GenTree* castOp = node->CastOp();
+ var_types castToType = node->CastToType();
+ var_types srcType = castOp->TypeGet();
// force the srcType to unsigned if GT_UNSIGNED flag is set
if (node->gtFlags & GTF_UNSIGNED)
@@ -1813,10 +1813,10 @@ void Lowering::ContainCheckCompare(GenTreeOp* cmp)
{
assert(cmp->OperIsCompare() || cmp->OperIs(GT_CMP));
- GenTreePtr op1 = cmp->gtOp.gtOp1;
- GenTreePtr op2 = cmp->gtOp.gtOp2;
- var_types op1Type = op1->TypeGet();
- var_types op2Type = op2->TypeGet();
+ GenTree* op1 = cmp->gtOp.gtOp1;
+ GenTree* op2 = cmp->gtOp.gtOp2;
+ var_types op1Type = op1->TypeGet();
+ var_types op2Type = op2->TypeGet();
// If either of op1 or op2 is floating point values, then we need to use
// ucomiss or ucomisd to compare, both of which support the following form:
@@ -1842,7 +1842,7 @@ void Lowering::ContainCheckCompare(GenTreeOp* cmp)
reverseOps = cmp->OperIs(GT_LT, GT_LE);
}
- GenTreePtr otherOp;
+ GenTree* otherOp;
if (reverseOps)
{
otherOp = op1;
@@ -1939,8 +1939,8 @@ bool Lowering::LowerRMWMemOp(GenTreeIndir* storeInd)
// indirCandidate = memory read i.e. a gtInd of an addr mode
// indirOpSource = source operand used in binary/unary op (i.e. source operand of indirSrc node)
- GenTreePtr indirCandidate = nullptr;
- GenTreePtr indirOpSource = nullptr;
+ GenTree* indirCandidate = nullptr;
+ GenTree* indirOpSource = nullptr;
if (!IsRMWMemOpRootedAtStoreInd(storeInd, &indirCandidate, &indirOpSource))
{
@@ -1950,8 +1950,8 @@ bool Lowering::LowerRMWMemOp(GenTreeIndir* storeInd)
return false;
}
- GenTreePtr indirDst = storeInd->gtGetOp1();
- GenTreePtr indirSrc = storeInd->gtGetOp2();
+ GenTree* indirDst = storeInd->gtGetOp1();
+ GenTree* indirSrc = storeInd->gtGetOp2();
genTreeOps oper = indirSrc->OperGet();
// At this point we have successfully detected a RMW memory op of one of the following forms
@@ -1997,7 +1997,7 @@ bool Lowering::LowerRMWMemOp(GenTreeIndir* storeInd)
indirSrc->SetContained();
indirCandidate->SetContained();
- GenTreePtr indirCandidateChild = indirCandidate->gtGetOp1();
+ GenTree* indirCandidateChild = indirCandidate->gtGetOp1();
indirCandidateChild->SetContained();
if (indirCandidateChild->OperGet() == GT_LEA)
@@ -2068,9 +2068,9 @@ void Lowering::ContainCheckBinary(GenTreeOp* node)
// In case of memory-op, we can encode it directly provided its type matches with 'tree' type.
// This is because during codegen, type of 'tree' is used to determine emit Type size. If the types
// do not match, they get normalized (i.e. sign/zero extended) on load into a register.
- bool directlyEncodable = false;
- bool binOpInRMW = false;
- GenTreePtr operand = nullptr;
+ bool directlyEncodable = false;
+ bool binOpInRMW = false;
+ GenTree* operand = nullptr;
if (IsContainableImmed(node, op2))
{
@@ -2126,7 +2126,7 @@ void Lowering::ContainCheckBinary(GenTreeOp* node)
void Lowering::ContainCheckBoundsChk(GenTreeBoundsChk* node)
{
assert(node->OperIsBoundsCheck());
- GenTreePtr other;
+ GenTree* other;
if (CheckImmedAndMakeContained(node, node->gtIndex))
{
other = node->gtArrLen;
diff --git a/src/jit/lsra.cpp b/src/jit/lsra.cpp
index 483d8dd1a2..641a9a1d11 100644
--- a/src/jit/lsra.cpp
+++ b/src/jit/lsra.cpp
@@ -127,7 +127,7 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
// Return Value:
// None
//
-void lsraAssignRegToTree(GenTreePtr tree, regNumber reg, unsigned regIdx)
+void lsraAssignRegToTree(GenTree* tree, regNumber reg, unsigned regIdx)
{
if (regIdx == 0)
{
@@ -170,8 +170,8 @@ void lsraAssignRegToTree(GenTreePtr tree, regNumber reg, unsigned regIdx)
// Weight of ref position.
unsigned LinearScan::getWeight(RefPosition* refPos)
{
- unsigned weight;
- GenTreePtr treeNode = refPos->treeNode;
+ unsigned weight;
+ GenTree* treeNode = refPos->treeNode;
if (treeNode != nullptr)
{
@@ -2695,7 +2695,7 @@ void LinearScan::checkLastUses(BasicBlock* block)
LsraLocation loc = currentRefPosition->nodeLocation;
// We should always have a tree node for a localVar, except for the "special" RefPositions.
- GenTreePtr tree = currentRefPosition->treeNode;
+ GenTree* tree = currentRefPosition->treeNode;
assert(tree != nullptr || currentRefPosition->refType == RefTypeExpUse ||
currentRefPosition->refType == RefTypeDummyDef);
@@ -8467,7 +8467,7 @@ void LinearScan::updatePreviousInterval(RegRecord* reg, Interval* interval, Regi
// NICE: Consider tracking whether an Interval is always in the same location (register/stack)
// in which case it will require no resolution.
//
-void LinearScan::resolveLocalRef(BasicBlock* block, GenTreePtr treeNode, RefPosition* currentRefPosition)
+void LinearScan::resolveLocalRef(BasicBlock* block, GenTree* treeNode, RefPosition* currentRefPosition)
{
assert((block == nullptr) == (treeNode == nullptr));
assert(enregisterLocalVars);
@@ -8762,7 +8762,7 @@ void LinearScan::writeRegisters(RefPosition* currentRefPosition, GenTree* tree)
// and the unspilling code automatically reuses the same register, and does the reload when it notices that flag
// when considering a node's operands.
//
-void LinearScan::insertCopyOrReload(BasicBlock* block, GenTreePtr tree, unsigned multiRegIdx, RefPosition* refPosition)
+void LinearScan::insertCopyOrReload(BasicBlock* block, GenTree* tree, unsigned multiRegIdx, RefPosition* refPosition)
{
LIR::Range& blockRange = LIR::AsRange(block);
@@ -8840,7 +8840,7 @@ void LinearScan::insertCopyOrReload(BasicBlock* block, GenTreePtr tree, unsigned
// It will be a call or some node that turns into a call.
// refPosition - The RefTypeUpperVectorSaveDef RefPosition.
//
-void LinearScan::insertUpperVectorSaveAndReload(GenTreePtr tree, RefPosition* refPosition, BasicBlock* block)
+void LinearScan::insertUpperVectorSaveAndReload(GenTree* tree, RefPosition* refPosition, BasicBlock* block)
{
Interval* lclVarInterval = refPosition->getInterval()->relatedInterval;
assert(lclVarInterval->isLocalVar == true);
@@ -8861,8 +8861,8 @@ void LinearScan::insertUpperVectorSaveAndReload(GenTreePtr tree, RefPosition* re
// First, insert the save before the call.
- GenTreePtr saveLcl = compiler->gtNewLclvNode(lclVarInterval->varNum, varDsc->lvType);
- saveLcl->gtRegNum = lclVarReg;
+ GenTree* saveLcl = compiler->gtNewLclvNode(lclVarInterval->varNum, varDsc->lvType);
+ saveLcl->gtRegNum = lclVarReg;
SetLsraAdded(saveLcl);
GenTreeSIMD* simdNode =
@@ -8879,8 +8879,8 @@ void LinearScan::insertUpperVectorSaveAndReload(GenTreePtr tree, RefPosition* re
// Now insert the restore after the call.
- GenTreePtr restoreLcl = compiler->gtNewLclvNode(lclVarInterval->varNum, varDsc->lvType);
- restoreLcl->gtRegNum = lclVarReg;
+ GenTree* restoreLcl = compiler->gtNewLclvNode(lclVarInterval->varNum, varDsc->lvType);
+ restoreLcl->gtRegNum = lclVarReg;
SetLsraAdded(restoreLcl);
simdNode = new (compiler, GT_SIMD) GenTreeSIMD(varDsc->lvType, restoreLcl, nullptr, SIMDIntrinsicUpperRestore,
@@ -9016,7 +9016,7 @@ void LinearScan::updateMaxSpill(RefPosition* refPosition)
else
#endif // !FEATURE_PARTIAL_SIMD_CALLEE_SAVE
{
- GenTreePtr treeNode = refPosition->treeNode;
+ GenTree* treeNode = refPosition->treeNode;
if (treeNode == nullptr)
{
assert(RefTypeIsUse(refType));
@@ -9175,7 +9175,7 @@ void LinearScan::resolveRegisters()
}
BasicBlock* insertionBlock = compiler->fgFirstBB;
- GenTreePtr insertionPoint = LIR::AsRange(insertionBlock).FirstNonPhiNode();
+ GenTree* insertionPoint = LIR::AsRange(insertionBlock).FirstNonPhiNode();
// write back assignments
for (block = startBlockSequence(); block != nullptr; block = moveToNextBlock())
@@ -9628,7 +9628,7 @@ void LinearScan::resolveRegisters()
// If fromReg or toReg is REG_STK, then move from/to memory, respectively.
void LinearScan::insertMove(
- BasicBlock* block, GenTreePtr insertionPoint, unsigned lclNum, regNumber fromReg, regNumber toReg)
+ BasicBlock* block, GenTree* insertionPoint, unsigned lclNum, regNumber fromReg, regNumber toReg)
{
LclVarDsc* varDsc = compiler->lvaTable + lclNum;
// the lclVar must be a register candidate
@@ -9641,7 +9641,7 @@ void LinearScan::insertMove(
// This var can't be marked lvRegister now
varDsc->lvRegNum = REG_STK;
- GenTreePtr src = compiler->gtNewLclvNode(lclNum, varDsc->TypeGet());
+ GenTree* src = compiler->gtNewLclvNode(lclNum, varDsc->TypeGet());
SetLsraAdded(src);
// There are three cases we need to handle:
@@ -9712,7 +9712,7 @@ void LinearScan::insertMove(
}
void LinearScan::insertSwap(
- BasicBlock* block, GenTreePtr insertionPoint, unsigned lclNum1, regNumber reg1, unsigned lclNum2, regNumber reg2)
+ BasicBlock* block, GenTree* insertionPoint, unsigned lclNum1, regNumber reg1, unsigned lclNum2, regNumber reg2)
{
#ifdef DEBUG
if (VERBOSE)
@@ -9731,16 +9731,16 @@ void LinearScan::insertSwap(
LclVarDsc* varDsc2 = compiler->lvaTable + lclNum2;
assert(reg1 != REG_STK && reg1 != REG_NA && reg2 != REG_STK && reg2 != REG_NA);
- GenTreePtr lcl1 = compiler->gtNewLclvNode(lclNum1, varDsc1->TypeGet());
- lcl1->gtRegNum = reg1;
+ GenTree* lcl1 = compiler->gtNewLclvNode(lclNum1, varDsc1->TypeGet());
+ lcl1->gtRegNum = reg1;
SetLsraAdded(lcl1);
- GenTreePtr lcl2 = compiler->gtNewLclvNode(lclNum2, varDsc2->TypeGet());
- lcl2->gtRegNum = reg2;
+ GenTree* lcl2 = compiler->gtNewLclvNode(lclNum2, varDsc2->TypeGet());
+ lcl2->gtRegNum = reg2;
SetLsraAdded(lcl2);
- GenTreePtr swap = compiler->gtNewOperNode(GT_SWAP, TYP_VOID, lcl1, lcl2);
- swap->gtRegNum = REG_NA;
+ GenTree* swap = compiler->gtNewOperNode(GT_SWAP, TYP_VOID, lcl1, lcl2);
+ swap->gtRegNum = REG_NA;
SetLsraAdded(swap);
lcl1->gtNext = lcl2;
@@ -9881,7 +9881,7 @@ regNumber LinearScan::getTempRegForResolution(BasicBlock* fromBlock, BasicBlock*
// It inserts at least one move and updates incoming parameter 'location'.
//
void LinearScan::addResolutionForDouble(BasicBlock* block,
- GenTreePtr insertionPoint,
+ GenTree* insertionPoint,
Interval** sourceIntervals,
regNumberSmall* location,
regNumber toReg,
@@ -9951,7 +9951,7 @@ void LinearScan::addResolutionForDouble(BasicBlock* block,
// in which case fromReg will be REG_STK, and we insert at the top.
void LinearScan::addResolution(
- BasicBlock* block, GenTreePtr insertionPoint, Interval* interval, regNumber toReg, regNumber fromReg)
+ BasicBlock* block, GenTree* insertionPoint, Interval* interval, regNumber toReg, regNumber fromReg)
{
#ifdef DEBUG
const char* insertionPointString = "top";
@@ -10573,7 +10573,7 @@ void LinearScan::resolveEdge(BasicBlock* fromBlock,
memset(&stackToRegIntervals, 0, sizeof(stackToRegIntervals));
// Get the starting insertion point for the "to" resolution
- GenTreePtr insertionPoint = nullptr;
+ GenTree* insertionPoint = nullptr;
if (resolveType == ResolveSplit || resolveType == ResolveCritical)
{
insertionPoint = LIR::AsRange(block).FirstNonPhiNode();
@@ -11480,7 +11480,7 @@ void LinearScan::lsraDumpIntervals(const char* msg)
// Dumps a tree node as a destination or source operand, with the style
// of dump dependent on the mode
-void LinearScan::lsraGetOperandString(GenTreePtr tree,
+void LinearScan::lsraGetOperandString(GenTree* tree,
LsraTupleDumpMode mode,
char* operandString,
unsigned operandStringLength)
@@ -11518,7 +11518,7 @@ void LinearScan::lsraGetOperandString(GenTreePtr tree,
break;
}
}
-void LinearScan::lsraDispNode(GenTreePtr tree, LsraTupleDumpMode mode, bool hasDest)
+void LinearScan::lsraDispNode(GenTree* tree, LsraTupleDumpMode mode, bool hasDest)
{
Compiler* compiler = JitTls::GetCompiler();
const unsigned operandStringLength = 16;
diff --git a/src/jit/lsra.h b/src/jit/lsra.h
index 17c6c93a9c..a4981417cf 100644
--- a/src/jit/lsra.h
+++ b/src/jit/lsra.h
@@ -684,7 +684,7 @@ public:
#ifdef _TARGET_ARM_
void addResolutionForDouble(BasicBlock* block,
- GenTreePtr insertionPoint,
+ GenTree* insertionPoint,
Interval** sourceIntervals,
regNumberSmall* location,
regNumber toReg,
diff --git a/src/jit/lsraarm.cpp b/src/jit/lsraarm.cpp
index 081415e9f2..6e0cf22d40 100644
--- a/src/jit/lsraarm.cpp
+++ b/src/jit/lsraarm.cpp
@@ -129,7 +129,7 @@ void LinearScan::TreeNodeInfoInitLclHeap(GenTree* tree, TreeNodeInfo* info)
hasPspSym = false;
#endif
- GenTreePtr size = tree->gtOp.gtOp1;
+ GenTree* size = tree->gtOp.gtOp1;
if (size->IsCnsIntOrI())
{
assert(size->isContained());
@@ -287,10 +287,10 @@ void LinearScan::TreeNodeInfoInit(GenTree* tree, TreeNodeInfo* info)
// and that allow the source operand to be either a reg or memop. Given the
// fact that casts from small int to float/double are done as two-level casts,
// the source operand is always guaranteed to be of size 4 or 8 bytes.
- var_types castToType = tree->CastToType();
- GenTreePtr castOp = tree->gtCast.CastOp();
- var_types castOpType = castOp->TypeGet();
- info->srcCount = GetOperandInfo(castOp);
+ var_types castToType = tree->CastToType();
+ GenTree* castOp = tree->gtCast.CastOp();
+ var_types castOpType = castOp->TypeGet();
+ info->srcCount = GetOperandInfo(castOp);
if (tree->gtFlags & GTF_UNSIGNED)
{
castOpType = genUnsignedType(castOpType);
@@ -657,7 +657,7 @@ void LinearScan::TreeNodeInfoInit(GenTree* tree, TreeNodeInfo* info)
case GT_ADDR:
{
// For a GT_ADDR, the child node should not be evaluated into a register
- GenTreePtr child = tree->gtOp.gtOp1;
+ GenTree* child = tree->gtOp.gtOp1;
assert(!isCandidateLocalRef(child));
assert(child->isContained());
assert(info->dstCount == 1);
diff --git a/src/jit/lsraarm64.cpp b/src/jit/lsraarm64.cpp
index e549976f6e..2bfcfa2651 100644
--- a/src/jit/lsraarm64.cpp
+++ b/src/jit/lsraarm64.cpp
@@ -305,9 +305,9 @@ void LinearScan::TreeNodeInfoInit(GenTree* tree, TreeNodeInfo* info)
// and that allow the source operand to be either a reg or memop. Given the
// fact that casts from small int to float/double are done as two-level casts,
// the source operand is always guaranteed to be of size 4 or 8 bytes.
- var_types castToType = tree->CastToType();
- GenTreePtr castOp = tree->gtCast.CastOp();
- var_types castOpType = castOp->TypeGet();
+ var_types castToType = tree->CastToType();
+ GenTree* castOp = tree->gtCast.CastOp();
+ var_types castOpType = castOp->TypeGet();
if (tree->gtFlags & GTF_UNSIGNED)
{
castOpType = genUnsignedType(castOpType);
@@ -443,7 +443,7 @@ void LinearScan::TreeNodeInfoInit(GenTree* tree, TreeNodeInfo* info)
case GT_ADDR:
{
// For a GT_ADDR, the child node should not be evaluated into a register
- GenTreePtr child = tree->gtOp.gtOp1;
+ GenTree* child = tree->gtOp.gtOp1;
assert(!isCandidateLocalRef(child));
assert(child->isContained());
assert(info->dstCount == 1);
@@ -493,7 +493,7 @@ void LinearScan::TreeNodeInfoInit(GenTree* tree, TreeNodeInfo* info)
hasPspSym = false;
#endif
- GenTreePtr size = tree->gtOp.gtOp1;
+ GenTree* size = tree->gtOp.gtOp1;
if (size->IsCnsIntOrI())
{
assert(size->isContained());
diff --git a/src/jit/lsraarmarch.cpp b/src/jit/lsraarmarch.cpp
index 4de53df012..189a37697a 100644
--- a/src/jit/lsraarmarch.cpp
+++ b/src/jit/lsraarmarch.cpp
@@ -106,7 +106,7 @@ void LinearScan::TreeNodeInfoInitStoreLoc(GenTreeLclVarCommon* storeLoc, TreeNod
// Return Value:
// None.
//
-void LinearScan::TreeNodeInfoInitCmp(GenTreePtr tree, TreeNodeInfo* info)
+void LinearScan::TreeNodeInfoInitCmp(GenTree* tree, TreeNodeInfo* info)
{
info->srcCount = appendBinaryLocationInfoToList(tree->AsOp());
@@ -116,9 +116,9 @@ void LinearScan::TreeNodeInfoInitCmp(GenTreePtr tree, TreeNodeInfo* info)
void LinearScan::TreeNodeInfoInitGCWriteBarrier(GenTree* tree, TreeNodeInfo* info)
{
- GenTreePtr dst = tree;
- GenTreePtr addr = tree->gtOp.gtOp1;
- GenTreePtr src = tree->gtOp.gtOp2;
+ GenTree* dst = tree;
+ GenTree* addr = tree->gtOp.gtOp1;
+ GenTree* src = tree->gtOp.gtOp2;
LocationInfoListNode* addrInfo = getLocationInfo(addr);
LocationInfoListNode* srcInfo = getLocationInfo(src);
@@ -249,8 +249,8 @@ void LinearScan::TreeNodeInfoInitIndir(GenTreeIndir* indirTree, TreeNodeInfo* in
//
int LinearScan::TreeNodeInfoInitShiftRotate(GenTree* tree, TreeNodeInfo* info)
{
- GenTreePtr source = tree->gtOp.gtOp1;
- GenTreePtr shiftBy = tree->gtOp.gtOp2;
+ GenTree* source = tree->gtOp.gtOp1;
+ GenTree* shiftBy = tree->gtOp.gtOp2;
assert(info->dstCount == 1);
if (!shiftBy->isContained())
{
@@ -471,11 +471,11 @@ void LinearScan::TreeNodeInfoInitCall(GenTreeCall* call, TreeNodeInfo* info)
// Each register argument corresponds to one source.
bool callHasFloatRegArgs = false;
- for (GenTreePtr list = call->gtCallLateArgs; list; list = list->MoveNext())
+ for (GenTree* list = call->gtCallLateArgs; list; list = list->MoveNext())
{
assert(list->OperIsList());
- GenTreePtr argNode = list->Current();
+ GenTree* argNode = list->Current();
#ifdef DEBUG
// During TreeNodeInfoInit, we only use the ArgTabEntry for validation,
@@ -578,10 +578,10 @@ void LinearScan::TreeNodeInfoInitCall(GenTreeCall* call, TreeNodeInfo* info)
// because the code generator doesn't actually consider it live,
// so it can't be spilled.
- GenTreePtr args = call->gtCallArgs;
+ GenTree* args = call->gtCallArgs;
while (args)
{
- GenTreePtr arg = args->gtOp.gtOp1;
+ GenTree* arg = args->gtOp.gtOp1;
// Skip arguments that have been moved to the Late Arg list
if (!(args->gtFlags & GTF_LATE_ARG))
@@ -651,7 +651,7 @@ void LinearScan::TreeNodeInfoInitPutArgStk(GenTreePutArgStk* argNode, TreeNodeIn
{
assert(argNode->gtOper == GT_PUTARG_STK);
- GenTreePtr putArgChild = argNode->gtOp.gtOp1;
+ GenTree* putArgChild = argNode->gtOp.gtOp1;
info->srcCount = 0;
info->dstCount = 0;
@@ -684,7 +684,7 @@ void LinearScan::TreeNodeInfoInitPutArgStk(GenTreePutArgStk* argNode, TreeNodeIn
if (putArgChild->OperGet() == GT_OBJ)
{
assert(putArgChild->isContained());
- GenTreePtr objChild = putArgChild->gtOp.gtOp1;
+ GenTree* objChild = putArgChild->gtOp.gtOp1;
if (objChild->OperGet() == GT_LCL_VAR_ADDR)
{
// We will generate all of the code for the GT_PUTARG_STK, the GT_OBJ and the GT_LCL_VAR_ADDR
@@ -732,7 +732,7 @@ void LinearScan::TreeNodeInfoInitPutArgSplit(GenTreePutArgSplit* argNode, TreeNo
{
assert(argNode->gtOper == GT_PUTARG_SPLIT);
- GenTreePtr putArgChild = argNode->gtOp.gtOp1;
+ GenTree* putArgChild = argNode->gtOp.gtOp1;
// Registers for split argument corresponds to source
info->dstCount = argNode->gtNumRegs;
@@ -760,7 +760,7 @@ void LinearScan::TreeNodeInfoInitPutArgSplit(GenTreePutArgSplit* argNode, TreeNo
for (GenTreeFieldList* fieldListPtr = putArgChild->AsFieldList(); fieldListPtr != nullptr;
fieldListPtr = fieldListPtr->Rest())
{
- GenTreePtr node = fieldListPtr->gtGetOp1();
+ GenTree* node = fieldListPtr->gtGetOp1();
assert(!node->isContained());
LocationInfoListNode* nodeInfo = getLocationInfo(node);
unsigned currentRegCount = nodeInfo->info.dstCount;
@@ -789,7 +789,7 @@ void LinearScan::TreeNodeInfoInitPutArgSplit(GenTreePutArgSplit* argNode, TreeNo
regMaskTP internalMask = RBM_ALLINT & ~argMask;
info->setInternalCandidates(this, internalMask);
- GenTreePtr objChild = putArgChild->gtOp.gtOp1;
+ GenTree* objChild = putArgChild->gtOp.gtOp1;
if (objChild->OperGet() == GT_LCL_VAR_ADDR)
{
// We will generate all of the code for the GT_PUTARG_SPLIT, the GT_OBJ and the GT_LCL_VAR_ADDR
@@ -833,8 +833,8 @@ void LinearScan::TreeNodeInfoInitBlockStore(GenTreeBlk* blkNode, TreeNodeInfo* i
dstAddrInfo = getLocationInfo(dstAddr);
}
assert(info->dstCount == 0);
- GenTreePtr srcAddrOrFill = nullptr;
- bool isInitBlk = blkNode->OperIsInitBlkOp();
+ GenTree* srcAddrOrFill = nullptr;
+ bool isInitBlk = blkNode->OperIsInitBlkOp();
regMaskTP dstAddrRegMask = RBM_NONE;
regMaskTP sourceRegMask = RBM_NONE;
@@ -845,7 +845,7 @@ void LinearScan::TreeNodeInfoInitBlockStore(GenTreeBlk* blkNode, TreeNodeInfo* i
if (isInitBlk)
{
- GenTreePtr initVal = source;
+ GenTree* initVal = source;
if (initVal->OperIsInitVal())
{
assert(initVal->isContained());
diff --git a/src/jit/lsraxarch.cpp b/src/jit/lsraxarch.cpp
index 18240a4f5e..b376788a7a 100644
--- a/src/jit/lsraxarch.cpp
+++ b/src/jit/lsraxarch.cpp
@@ -479,7 +479,7 @@ void LinearScan::TreeNodeInfoInit(GenTree* tree, TreeNodeInfo* info)
case GT_ADDR:
{
// For a GT_ADDR, the child node should not be evaluated into a register
- GenTreePtr child = tree->gtOp.gtOp1;
+ GenTree* child = tree->gtOp.gtOp1;
assert(!isCandidateLocalRef(child));
assert(child->isContained());
assert(info->dstCount == 1);
@@ -862,7 +862,7 @@ void LinearScan::TreeNodeInfoInitCheckByteable(GenTree* tree, TreeNodeInfo* info
// Notes:
// This is used to determine whether to preference the source to the destination register.
//
-bool LinearScan::isRMWRegOper(GenTreePtr tree)
+bool LinearScan::isRMWRegOper(GenTree* tree)
{
// TODO-XArch-CQ: Make this more accurate.
// For now, We assume that most binary operators are of the RMW form.
@@ -1021,8 +1021,8 @@ int LinearScan::TreeNodeInfoInitShiftRotate(GenTree* tree, TreeNodeInfo* info)
// of bits moved gets stored in CL in case
// the number of bits to shift is not a constant.
int srcCount = 0;
- GenTreePtr shiftBy = tree->gtOp.gtOp2;
- GenTreePtr source = tree->gtOp.gtOp1;
+ GenTree* shiftBy = tree->gtOp.gtOp2;
+ GenTree* source = tree->gtOp.gtOp1;
LocationInfoListNode* shiftByInfo = nullptr;
// x64 can encode 8 bits of shift and it will use 5 or 6. (the others are masked off)
// We will allow whatever can be encoded - hope you know what you are doing.
@@ -1062,7 +1062,7 @@ int LinearScan::TreeNodeInfoInitShiftRotate(GenTree* tree, TreeNodeInfo* info)
{
assert((source->OperGet() == GT_LONG) && source->isContained());
- GenTreePtr sourceLo = source->gtOp.gtOp1;
+ GenTree* sourceLo = source->gtOp.gtOp1;
LocationInfoListNode* sourceLoInfo = useList.Begin();
LocationInfoListNode* sourceHiInfo = useList.GetSecond(INDEBUG(source->gtGetOp2()));
@@ -1257,7 +1257,7 @@ void LinearScan::TreeNodeInfoInitCall(GenTreeCall* call, TreeNodeInfo* info)
bool isVarArgs = call->IsVarargs();
// First, count reg args
- for (GenTreePtr list = call->gtCallLateArgs; list; list = list->MoveNext())
+ for (GenTree* list = call->gtCallLateArgs; list; list = list->MoveNext())
{
assert(list->OperIsList());
@@ -1270,7 +1270,7 @@ void LinearScan::TreeNodeInfoInitCall(GenTreeCall* call, TreeNodeInfo* info)
// - a put arg
//
// Note that this property is statically checked by LinearScan::CheckBlock.
- GenTreePtr argNode = list->Current();
+ GenTree* argNode = list->Current();
// Each register argument corresponds to one source.
if (argNode->OperIsPutArgReg())
@@ -1348,10 +1348,10 @@ void LinearScan::TreeNodeInfoInitCall(GenTreeCall* call, TreeNodeInfo* info)
// because the code generator doesn't actually consider it live,
// so it can't be spilled.
- GenTreePtr args = call->gtCallArgs;
+ GenTree* args = call->gtCallArgs;
while (args)
{
- GenTreePtr arg = args->gtOp.gtOp1;
+ GenTree* arg = args->gtOp.gtOp1;
if (!(arg->gtFlags & GTF_LATE_ARG) && !arg)
{
if (arg->IsValue() && !arg->isContained())
@@ -1443,8 +1443,8 @@ void LinearScan::TreeNodeInfoInitBlockStore(GenTreeBlk* blkNode, TreeNodeInfo* i
}
assert(info->dstCount == 0);
info->setInternalCandidates(this, RBM_NONE);
- GenTreePtr srcAddrOrFill = nullptr;
- bool isInitBlk = blkNode->OperIsInitBlkOp();
+ GenTree* srcAddrOrFill = nullptr;
+ bool isInitBlk = blkNode->OperIsInitBlkOp();
regMaskTP dstAddrRegMask = RBM_NONE;
regMaskTP sourceRegMask = RBM_NONE;
@@ -1757,8 +1757,8 @@ void LinearScan::TreeNodeInfoInitPutArgStk(GenTreePutArgStk* putArgStk, TreeNode
#endif // _TARGET_X86_
}
- GenTreePtr src = putArgStk->gtOp1;
- var_types type = src->TypeGet();
+ GenTree* src = putArgStk->gtOp1;
+ var_types type = src->TypeGet();
#if defined(FEATURE_SIMD) && defined(_TARGET_X86_)
// For PutArgStk of a TYP_SIMD12, we need an extra register.
@@ -1778,8 +1778,8 @@ void LinearScan::TreeNodeInfoInitPutArgStk(GenTreePutArgStk* putArgStk, TreeNode
return;
}
- GenTreePtr dst = putArgStk;
- GenTreePtr srcAddr = nullptr;
+ GenTree* dst = putArgStk;
+ GenTree* srcAddr = nullptr;
info->srcCount = GetOperandInfo(src);
@@ -1867,7 +1867,7 @@ void LinearScan::TreeNodeInfoInitLclHeap(GenTree* tree, TreeNodeInfo* info)
// Note: Here we don't need internal register to be different from targetReg.
// Rather, require it to be different from operand's reg.
- GenTreePtr size = tree->gtOp.gtOp1;
+ GenTree* size = tree->gtOp.gtOp1;
if (size->IsCnsIntOrI())
{
assert(size->isContained());
@@ -2618,9 +2618,9 @@ void LinearScan::TreeNodeInfoInitCast(GenTree* tree, TreeNodeInfo* info)
// and that allow the source operand to be either a reg or memop. Given the
// fact that casts from small int to float/double are done as two-level casts,
// the source operand is always guaranteed to be of size 4 or 8 bytes.
- var_types castToType = tree->CastToType();
- GenTreePtr castOp = tree->gtCast.CastOp();
- var_types castOpType = castOp->TypeGet();
+ var_types castToType = tree->CastToType();
+ GenTree* castOp = tree->gtCast.CastOp();
+ var_types castOpType = castOp->TypeGet();
info->srcCount = GetOperandInfo(castOp);
assert(info->dstCount == 1);
@@ -2656,8 +2656,8 @@ void LinearScan::TreeNodeInfoInitGCWriteBarrier(GenTree* tree, TreeNodeInfo* inf
assert(tree->OperGet() == GT_STOREIND);
GenTreeStoreInd* dst = tree->AsStoreInd();
- GenTreePtr addr = dst->Addr();
- GenTreePtr src = dst->Data();
+ GenTree* addr = dst->Addr();
+ GenTree* src = dst->Data();
LocationInfoListNode* addrInfo = getLocationInfo(addr);
LocationInfoListNode* srcInfo = getLocationInfo(src);
@@ -2825,7 +2825,7 @@ void LinearScan::TreeNodeInfoInitIndir(GenTreeIndir* indirTree, TreeNodeInfo* in
// Return Value:
// None.
//
-void LinearScan::TreeNodeInfoInitCmp(GenTreePtr tree, TreeNodeInfo* info)
+void LinearScan::TreeNodeInfoInitCmp(GenTree* tree, TreeNodeInfo* info)
{
assert(tree->OperIsCompare() || tree->OperIs(GT_CMP));
@@ -2841,10 +2841,10 @@ void LinearScan::TreeNodeInfoInitCmp(GenTreePtr tree, TreeNodeInfo* info)
info->setDstCandidates(this, RBM_BYTE_REGS);
#endif // _TARGET_X86_
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtOp.gtOp2;
- var_types op1Type = op1->TypeGet();
- var_types op2Type = op2->TypeGet();
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtOp.gtOp2;
+ var_types op1Type = op1->TypeGet();
+ var_types op2Type = op2->TypeGet();
info->srcCount = appendBinaryLocationInfoToList(tree->AsOp());
}
@@ -2858,7 +2858,7 @@ void LinearScan::TreeNodeInfoInitCmp(GenTreePtr tree, TreeNodeInfo* info)
// Return Value:
// None.
//
-void LinearScan::TreeNodeInfoInitMul(GenTreePtr tree, TreeNodeInfo* info)
+void LinearScan::TreeNodeInfoInitMul(GenTree* tree, TreeNodeInfo* info)
{
#if defined(_TARGET_X86_)
assert(tree->OperIs(GT_MUL, GT_MULHI, GT_MUL_LONG));
diff --git a/src/jit/morph.cpp b/src/jit/morph.cpp
index febf198603..8590d35205 100644
--- a/src/jit/morph.cpp
+++ b/src/jit/morph.cpp
@@ -23,14 +23,14 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
//
// Tries to fold constants and also adds an edge for overflow exception
// returns the morphed tree
-GenTreePtr Compiler::fgMorphCastIntoHelper(GenTreePtr tree, int helper, GenTreePtr oper)
+GenTree* Compiler::fgMorphCastIntoHelper(GenTree* tree, int helper, GenTree* oper)
{
GenTree* result;
/* If the operand is a constant, we'll try to fold it */
if (oper->OperIsConst())
{
- GenTreePtr oldTree = tree;
+ GenTree* oldTree = tree;
tree = gtFoldExprConst(tree); // This may not fold the constant (NaN ...)
@@ -58,7 +58,7 @@ GenTreePtr Compiler::fgMorphCastIntoHelper(GenTreePtr tree, int helper, GenTreeP
* the given argument list.
*/
-GenTreePtr Compiler::fgMorphIntoHelperCall(GenTreePtr tree, int helper, GenTreeArgList* args)
+GenTree* Compiler::fgMorphIntoHelperCall(GenTree* tree, int helper, GenTreeArgList* args)
{
// The helper call ought to be semantically equivalent to the original node, so preserve its VN.
tree->ChangeOper(GT_CALL, GenTree::PRESERVE_VN);
@@ -125,7 +125,7 @@ GenTreePtr Compiler::fgMorphIntoHelperCall(GenTreePtr tree, int helper, GenTreeA
* Determine if a relop must be morphed to a qmark to manifest a boolean value.
* This is done when code generation can't create straight-line code to do it.
*/
-bool Compiler::fgMorphRelopToQmark(GenTreePtr tree)
+bool Compiler::fgMorphRelopToQmark(GenTree* tree)
{
#ifndef LEGACY_BACKEND
return false;
@@ -143,14 +143,14 @@ bool Compiler::fgMorphRelopToQmark(GenTreePtr tree)
#pragma warning(push)
#pragma warning(disable : 21000) // Suppress PREFast warning about overly large function
#endif
-GenTreePtr Compiler::fgMorphCast(GenTreePtr tree)
+GenTree* Compiler::fgMorphCast(GenTree* tree)
{
noway_assert(tree->gtOper == GT_CAST);
noway_assert(genTypeSize(TYP_I_IMPL) == TARGET_POINTER_SIZE);
/* The first sub-operand is the thing being cast */
- GenTreePtr oper = tree->gtCast.CastOp();
+ GenTree* oper = tree->gtCast.CastOp();
if (fgGlobalMorph && (oper->gtOper == GT_ADDR))
{
@@ -408,11 +408,11 @@ GenTreePtr Compiler::fgMorphCast(GenTreePtr tree)
// the gc problem and we allow casts to bytes, longs, etc...
unsigned lclNum = lvaGrabTemp(true DEBUGARG("Cast away GC"));
oper->gtType = TYP_I_IMPL;
- GenTreePtr asg = gtNewTempAssign(lclNum, oper);
+ GenTree* asg = gtNewTempAssign(lclNum, oper);
oper->gtType = srcType;
// do the real cast
- GenTreePtr cast = gtNewCastNode(tree->TypeGet(), gtNewLclvNode(lclNum, TYP_I_IMPL), dstType);
+ GenTree* cast = gtNewCastNode(tree->TypeGet(), gtNewLclvNode(lclNum, TYP_I_IMPL), dstType);
// Generate the comma tree
oper = gtNewOperNode(GT_COMMA, tree->TypeGet(), asg, cast);
@@ -437,7 +437,7 @@ GenTreePtr Compiler::fgMorphCast(GenTreePtr tree)
// allow the cast's operand to be transformed.
if (tree->gtOverflow() && (oper->OperGet() == GT_AND))
{
- GenTreePtr andOp2 = oper->gtOp.gtOp2;
+ GenTree* andOp2 = oper->gtOp.gtOp2;
// Special case to the special case: AND with a casted int.
if ((andOp2->OperGet() == GT_CAST) && (andOp2->gtCast.CastOp()->OperGet() == GT_CNS_INT))
@@ -709,7 +709,7 @@ OPTIMIZECAST:
case GT_CNS_DBL:
case GT_CNS_STR:
{
- GenTreePtr oldTree = tree;
+ GenTree* oldTree = tree;
tree = gtFoldExprConst(tree); // This may not fold the constant (NaN ...)
@@ -769,7 +769,7 @@ OPTIMIZECAST:
// neither oper or commaOp2 can be CSE candidates
if (fgIsCommaThrow(oper) && !gtIsActiveCSE_Candidate(oper)) // oper can not be a CSE candidate
{
- GenTreePtr commaOp2 = oper->gtOp.gtOp2;
+ GenTree* commaOp2 = oper->gtOp.gtOp2;
if (!gtIsActiveCSE_Candidate(commaOp2)) // commaOp2 can not be a CSE candidate
{
@@ -844,12 +844,12 @@ REMOVE_CAST:
* Perform an unwrap operation on a Proxy object
*/
-GenTreePtr Compiler::fgUnwrapProxy(GenTreePtr objRef)
+GenTree* Compiler::fgUnwrapProxy(GenTree* objRef)
{
assert(info.compIsContextful && info.compUnwrapContextful && impIsThis(objRef));
CORINFO_EE_INFO* pInfo = eeGetEEInfo();
- GenTreePtr addTree;
+ GenTree* addTree;
// Perform the unwrap:
//
@@ -881,7 +881,7 @@ GenTreePtr Compiler::fgUnwrapProxy(GenTreePtr objRef)
* due to graph altering modifications such as copy / constant propagation
*/
-unsigned UpdateGT_LISTFlags(GenTreePtr tree)
+unsigned UpdateGT_LISTFlags(GenTree* tree)
{
assert(tree->gtOper == GT_LIST);
@@ -1051,8 +1051,8 @@ fgArgInfo::fgArgInfo(GenTreeCall* newCall, GenTreeCall* oldCall)
oldArgs = &oldArgObjp;
}
- GenTreePtr newCurr;
- GenTreePtr oldCurr;
+ GenTree* newCurr;
+ GenTree* oldCurr;
GenTreeArgList* newParent = nullptr;
GenTreeArgList* oldParent = nullptr;
fgArgTabEntry** oldArgTable = oldArgInfo->argTable;
@@ -1095,7 +1095,7 @@ fgArgInfo::fgArgInfo(GenTreeCall* newCall, GenTreeCall* oldCall)
//
*newArgTabEntry = *oldArgTabEntry;
- // Then update all GenTreePtr fields in the newArgTabEntry
+ // Then update all GenTree* fields in the newArgTabEntry
//
newArgTabEntry->parent = newParent;
@@ -1162,7 +1162,7 @@ fgArgInfo::fgArgInfo(GenTreeCall* newCall, GenTreeCall* oldCall)
newArgTabEntry = argTable[inx];
assert(newArgTabEntry != nullptr);
- // update the "node" GenTreePtr fields in the newArgTabEntry
+ // update the "node" GenTree* fields in the newArgTabEntry
//
assert(newArgTabEntry->node == nullptr); // We previously assigned NULL to this field
@@ -1189,7 +1189,7 @@ void fgArgInfo::AddArg(fgArgTabEntry* curArgTabEntry)
}
fgArgTabEntry* fgArgInfo::AddRegArg(
- unsigned argNum, GenTreePtr node, GenTreePtr parent, regNumber regNum, unsigned numRegs, unsigned alignment)
+ unsigned argNum, GenTree* node, GenTree* parent, regNumber regNum, unsigned numRegs, unsigned alignment)
{
fgArgTabEntry* curArgTabEntry = new (compiler, CMK_fgArgInfo) fgArgTabEntry;
@@ -1219,8 +1219,8 @@ fgArgTabEntry* fgArgInfo::AddRegArg(
#if defined(FEATURE_UNIX_AMD64_STRUCT_PASSING)
fgArgTabEntry* fgArgInfo::AddRegArg(unsigned argNum,
- GenTreePtr node,
- GenTreePtr parent,
+ GenTree* node,
+ GenTree* parent,
regNumber regNum,
unsigned numRegs,
unsigned alignment,
@@ -1248,10 +1248,10 @@ fgArgTabEntry* fgArgInfo::AddRegArg(unsigned
}
#endif // defined(FEATURE_UNIX_AMD64_STRUCT_PASSING)
-fgArgTabEntry* fgArgInfo::AddStkArg(unsigned argNum,
- GenTreePtr node,
- GenTreePtr parent,
- unsigned numSlots,
+fgArgTabEntry* fgArgInfo::AddStkArg(unsigned argNum,
+ GenTree* node,
+ GenTree* parent,
+ unsigned numSlots,
unsigned alignment FEATURE_UNIX_AMD64_STRUCT_PASSING_ONLY_ARG(const bool isStruct))
{
fgArgTabEntry* curArgTabEntry = new (compiler, CMK_fgArgInfo) fgArgTabEntry;
@@ -1299,7 +1299,7 @@ void fgArgInfo::RemorphReset()
}
fgArgTabEntry* fgArgInfo::RemorphRegArg(
- unsigned argNum, GenTreePtr node, GenTreePtr parent, regNumber regNum, unsigned numRegs, unsigned alignment)
+ unsigned argNum, GenTree* node, GenTree* parent, regNumber regNum, unsigned numRegs, unsigned alignment)
{
fgArgTabEntry* curArgTabEntry = nullptr;
unsigned regArgInx = 0;
@@ -1313,8 +1313,8 @@ fgArgTabEntry* fgArgInfo::RemorphRegArg(
break;
}
- bool isRegArg;
- GenTreePtr argx;
+ bool isRegArg;
+ GenTree* argx;
if (curArgTabEntry->parent != nullptr)
{
assert(curArgTabEntry->parent->OperIsList());
@@ -1345,8 +1345,8 @@ fgArgTabEntry* fgArgInfo::RemorphRegArg(
if (curArgTabEntry->node != node)
{
- GenTreePtr argx = nullptr;
- unsigned regIndex = 0;
+ GenTree* argx = nullptr;
+ unsigned regIndex = 0;
/* process the register argument list */
for (GenTreeArgList* list = callTree->gtCall.gtCallLateArgs; list; (regIndex++, list = list->Rest()))
@@ -1369,13 +1369,12 @@ fgArgTabEntry* fgArgInfo::RemorphRegArg(
return curArgTabEntry;
}
-void fgArgInfo::RemorphStkArg(
- unsigned argNum, GenTreePtr node, GenTreePtr parent, unsigned numSlots, unsigned alignment)
+void fgArgInfo::RemorphStkArg(unsigned argNum, GenTree* node, GenTree* parent, unsigned numSlots, unsigned alignment)
{
fgArgTabEntry* curArgTabEntry = nullptr;
bool isRegArg = false;
unsigned regArgInx = 0;
- GenTreePtr argx;
+ GenTree* argx;
unsigned inx;
for (inx = 0; inx < argCount; inx++)
@@ -1419,8 +1418,8 @@ void fgArgInfo::RemorphStkArg(
{
if (isRegArg)
{
- GenTreePtr argx = nullptr;
- unsigned regIndex = 0;
+ GenTree* argx = nullptr;
+ unsigned regIndex = 0;
/* process the register argument list */
for (GenTreeArgList *list = callTree->gtCall.gtCallLateArgs; list; list = list->Rest(), regIndex++)
@@ -1486,7 +1485,7 @@ void fgArgInfo::SplitArg(unsigned argNum, unsigned numRegs, unsigned numSlots)
nextSlotNum += numSlots;
}
-void fgArgInfo::EvalToTmp(unsigned argNum, unsigned tmpNum, GenTreePtr newNode)
+void fgArgInfo::EvalToTmp(unsigned argNum, unsigned tmpNum, GenTree* newNode)
{
fgArgTabEntry* curArgTabEntry = nullptr;
assert(argNum < argCount);
@@ -1514,7 +1513,7 @@ void fgArgInfo::ArgsComplete()
{
fgArgTabEntry* curArgTabEntry = argTable[curInx];
assert(curArgTabEntry != nullptr);
- GenTreePtr argx = curArgTabEntry->node;
+ GenTree* argx = curArgTabEntry->node;
if (curArgTabEntry->regNum == REG_STK)
{
@@ -1787,7 +1786,7 @@ void fgArgInfo::ArgsComplete()
{
fgArgTabEntry* curArgTabEntry = argTable[curInx];
assert(curArgTabEntry != nullptr);
- GenTreePtr argx = curArgTabEntry->node;
+ GenTree* argx = curArgTabEntry->node;
// Examine the register args that are currently not marked needTmp
//
@@ -1897,7 +1896,7 @@ void fgArgInfo::SortArgs()
//
if (!curArgTabEntry->processed)
{
- GenTreePtr argx = curArgTabEntry->node;
+ GenTree* argx = curArgTabEntry->node;
// put constants at the end of the table
//
@@ -1934,7 +1933,7 @@ void fgArgInfo::SortArgs()
//
if (!curArgTabEntry->processed)
{
- GenTreePtr argx = curArgTabEntry->node;
+ GenTree* argx = curArgTabEntry->node;
// put calls at the beginning of the table
//
@@ -2010,7 +2009,7 @@ void fgArgInfo::SortArgs()
//
if (!curArgTabEntry->processed)
{
- GenTreePtr argx = curArgTabEntry->node;
+ GenTree* argx = curArgTabEntry->node;
if ((argx->gtOper == GT_LCL_VAR) || (argx->gtOper == GT_LCL_FLD))
{
@@ -2054,7 +2053,7 @@ void fgArgInfo::SortArgs()
//
if (!curArgTabEntry->processed)
{
- GenTreePtr argx = curArgTabEntry->node;
+ GenTree* argx = curArgTabEntry->node;
// We should have already handled these kinds of args
assert(argx->gtOper != GT_LCL_VAR);
@@ -2162,7 +2161,7 @@ void fgArgInfo::Dump(Compiler* compiler)
// Return Value:
// the newly created temp var tree.
-GenTreePtr Compiler::fgMakeTmpArgNode(
+GenTree* Compiler::fgMakeTmpArgNode(
unsigned tmpVarNum FEATURE_UNIX_AMD64_STRUCT_PASSING_ONLY_ARG(const bool passedInRegisters))
{
LclVarDsc* varDsc = &lvaTable[tmpVarNum];
@@ -2170,8 +2169,8 @@ GenTreePtr Compiler::fgMakeTmpArgNode(
var_types type = varDsc->TypeGet();
// Create a copy of the temp to go into the late argument list
- GenTreePtr arg = gtNewLclvNode(tmpVarNum, type);
- GenTreePtr addrNode = nullptr;
+ GenTree* arg = gtNewLclvNode(tmpVarNum, type);
+ GenTree* addrNode = nullptr;
if (varTypeIsStruct(type))
{
@@ -2284,9 +2283,9 @@ void fgArgInfo::EvalArgsToTemps()
{
fgArgTabEntry* curArgTabEntry = argTable[curInx];
- GenTreePtr argx = curArgTabEntry->node;
- GenTreePtr setupArg = nullptr;
- GenTreePtr defArg;
+ GenTree* argx = curArgTabEntry->node;
+ GenTree* setupArg = nullptr;
+ GenTree* defArg;
#if !FEATURE_FIXED_OUT_ARGS
// Only ever set for FEATURE_FIXED_OUT_ARGS
@@ -2339,7 +2338,7 @@ void fgArgInfo::EvalArgsToTemps()
// not have any side-effects and can be delayed. So instead
// of using a temp for the whole struct, we can just use a temp
// for operand that that has a side-effect
- GenTreePtr operand;
+ GenTree* operand;
if ((argx->gtOp.gtOp2->gtFlags & GTF_ALL_EFFECT) == 0)
{
operand = argx->gtOp.gtOp1;
@@ -2491,7 +2490,7 @@ void fgArgInfo::EvalArgsToTemps()
if (varTypeIsStruct(defArg))
{
// Need a temp to walk any GT_COMMA nodes when searching for the clsHnd
- GenTreePtr defArgTmp = defArg;
+ GenTree* defArgTmp = defArg;
// The GT_OBJ may be be a child of a GT_COMMA.
while (defArgTmp->gtOper == GT_COMMA)
@@ -2545,7 +2544,7 @@ void fgArgInfo::EvalArgsToTemps()
{
if (curArgTabEntry->parent)
{
- GenTreePtr parent = curArgTabEntry->parent;
+ GenTree* parent = curArgTabEntry->parent;
/* a normal argument from the list */
noway_assert(parent->OperIsList());
noway_assert(parent->gtOp.gtOp1 == argx);
@@ -2607,7 +2606,7 @@ void fgArgInfo::EvalArgsToTemps()
// Get the late arg for arg at position argIndex.
// argIndex - 0-based position to get late arg for.
// Caller must ensure this position has a late arg.
-GenTreePtr fgArgInfo::GetLateArg(unsigned argIndex)
+GenTree* fgArgInfo::GetLateArg(unsigned argIndex)
{
for (unsigned j = 0; j < this->ArgCount(); j++)
{
@@ -2782,8 +2781,8 @@ GenTree* Compiler::fgInsertCommaFormTemp(GenTree** ppTree, CORINFO_CLASS_HANDLE
#endif
GenTreeCall* Compiler::fgMorphArgs(GenTreeCall* call)
{
- GenTreePtr args;
- GenTreePtr argx;
+ GenTree* args;
+ GenTree* argx;
unsigned flagsSummary = 0;
unsigned genPtrArgCntSav = fgPtrArgCntCur;
@@ -3303,7 +3302,7 @@ GenTreeCall* Compiler::fgMorphArgs(GenTreeCall* call)
bool hasMultiregStructArgs = false;
for (args = call->gtCallArgs; args; args = args->gtOp.gtOp2, argIndex++)
{
- GenTreePtr* parentArgx = &args->gtOp.gtOp1;
+ GenTree** parentArgx = &args->gtOp.gtOp1;
#if FEATURE_MULTIREG_ARGS
if (!hasStructArgument)
@@ -3315,8 +3314,8 @@ GenTreeCall* Compiler::fgMorphArgs(GenTreeCall* call)
#ifndef LEGACY_BACKEND
// Record the index of any nonStandard arg that we may be processing here, as we are
// about to call fgMorphTree on it and fgMorphTree may replace it with a new tree.
- GenTreePtr orig_argx = *parentArgx;
- int nonStandard_index = nonStandardArgs.Find(orig_argx);
+ GenTree* orig_argx = *parentArgx;
+ int nonStandard_index = nonStandardArgs.Find(orig_argx);
#endif // !LEGACY_BACKEND
argx = fgMorphTree(*parentArgx);
@@ -3388,7 +3387,7 @@ GenTreeCall* Compiler::fgMorphArgs(GenTreeCall* call)
passUsingIntRegs = passUsingFloatRegs ? false : (intArgRegNum < MAX_REG_ARG);
}
- GenTreePtr curArg = argx;
+ GenTree* curArg = argx;
// If late args have already been computed, use the node in the argument table.
if (argEntry != NULL && argEntry->isTmp)
{
@@ -3628,8 +3627,8 @@ GenTreeCall* Compiler::fgMorphArgs(GenTreeCall* call)
}
else // We must have a GT_OBJ with a struct type, but the GT_OBJ may be be a child of a GT_COMMA
{
- GenTreePtr argObj = argx;
- GenTreePtr* parentOfArgObj = parentArgx;
+ GenTree* argObj = argx;
+ GenTree** parentOfArgObj = parentArgx;
assert(args->OperIsList());
assert(argx == args->Current());
@@ -3722,8 +3721,8 @@ GenTreeCall* Compiler::fgMorphArgs(GenTreeCall* call)
#else // FEATURE_UNIX_AMD64_STRUCT_PASSING
if (!structDesc.passedInRegisters)
{
- GenTreePtr lclVar = fgIsIndirOfAddrOfLocal(argObj);
- bool needCpyBlk = false;
+ GenTree* lclVar = fgIsIndirOfAddrOfLocal(argObj);
+ bool needCpyBlk = false;
if (lclVar != nullptr)
{
// If the struct is promoted to registers, it has to be materialized
@@ -3793,7 +3792,7 @@ GenTreeCall* Compiler::fgMorphArgs(GenTreeCall* call)
#ifdef _TARGET_ARM_
// If we're passing a promoted struct local var,
// we may need to skip some registers due to alignment; record those.
- GenTreePtr lclVar = fgIsIndirOfAddrOfLocal(argObj);
+ GenTree* lclVar = fgIsIndirOfAddrOfLocal(argObj);
if (lclVar != NULL)
{
LclVarDsc* varDsc = &lvaTable[lclVar->gtLclVarCommon.gtLclNum];
@@ -3851,7 +3850,7 @@ GenTreeCall* Compiler::fgMorphArgs(GenTreeCall* call)
// Now see if we can fold *(&X) into X
if (argObj->gtOp.gtOp1->gtOper == GT_ADDR)
{
- GenTreePtr temp = argObj->gtOp.gtOp1->gtOp.gtOp1;
+ GenTree* temp = argObj->gtOp.gtOp1->gtOp.gtOp1;
// Keep the DONT_CSE flag in sync
// (as the addr always marks it for its op1)
@@ -4393,9 +4392,9 @@ GenTreeCall* Compiler::fgMorphArgs(GenTreeCall* call)
destTypeSlot->gtFieldSeq = GetFieldSeqStore()->CreateSingleton(GetRefanyTypeField());
destTypeSlot->gtFlags |= GTF_VAR_DEF;
- GenTreePtr asgPtrSlot = gtNewAssignNode(destPtrSlot, argx->gtOp.gtOp1);
- GenTreePtr asgTypeSlot = gtNewAssignNode(destTypeSlot, argx->gtOp.gtOp2);
- GenTreePtr asg = gtNewOperNode(GT_COMMA, TYP_VOID, asgPtrSlot, asgTypeSlot);
+ GenTree* asgPtrSlot = gtNewAssignNode(destPtrSlot, argx->gtOp.gtOp1);
+ GenTree* asgTypeSlot = gtNewAssignNode(destTypeSlot, argx->gtOp.gtOp2);
+ GenTree* asg = gtNewOperNode(GT_COMMA, TYP_VOID, asgPtrSlot, asgTypeSlot);
// Change the expression to "(tmp=val)"
args->gtOp.gtOp1 = asg;
@@ -4629,9 +4628,9 @@ GenTreeCall* Compiler::fgMorphArgs(GenTreeCall* call)
//
void Compiler::fgMorphSystemVStructArgs(GenTreeCall* call, bool hasStructArgument)
{
- unsigned flagsSummary = 0;
- GenTreePtr args;
- GenTreePtr argx;
+ unsigned flagsSummary = 0;
+ GenTree* args;
+ GenTree* argx;
if (hasStructArgument)
{
@@ -4647,17 +4646,17 @@ void Compiler::fgMorphSystemVStructArgs(GenTreeCall* call, bool hasStructArgumen
bool isLateArg = (args->gtOp.gtOp1->gtFlags & GTF_LATE_ARG) != 0;
fgArgTabEntry* fgEntryPtr = gtArgEntryByNode(call, args->gtOp.gtOp1);
assert(fgEntryPtr != nullptr);
- GenTreePtr argx = fgEntryPtr->node;
- GenTreePtr lateList = nullptr;
- GenTreePtr lateNode = nullptr;
+ GenTree* argx = fgEntryPtr->node;
+ GenTree* lateList = nullptr;
+ GenTree* lateNode = nullptr;
if (isLateArg)
{
- for (GenTreePtr list = call->gtCallLateArgs; list; list = list->MoveNext())
+ for (GenTree* list = call->gtCallLateArgs; list; list = list->MoveNext())
{
assert(list->OperIsList());
- GenTreePtr argNode = list->Current();
+ GenTree* argNode = list->Current();
if (argx == argNode)
{
lateList = list;
@@ -4667,8 +4666,8 @@ void Compiler::fgMorphSystemVStructArgs(GenTreeCall* call, bool hasStructArgumen
}
assert(lateList != nullptr && lateNode != nullptr);
}
- GenTreePtr arg = argx;
- bool argListCreated = false;
+ GenTree* arg = argx;
+ bool argListCreated = false;
var_types type = arg->TypeGet();
@@ -4766,16 +4765,16 @@ void Compiler::fgMorphSystemVStructArgs(GenTreeCall* call, bool hasStructArgumen
bool isLateArg = (args->gtOp.gtOp1->gtFlags & GTF_LATE_ARG) != 0;
fgArgTabEntry* fgEntryPtr = gtArgEntryByNode(call, args->gtOp.gtOp1);
assert(fgEntryPtr != nullptr);
- GenTreePtr argx = fgEntryPtr->node;
- GenTreePtr lateList = nullptr;
- GenTreePtr lateNode = nullptr;
+ GenTree* argx = fgEntryPtr->node;
+ GenTree* lateList = nullptr;
+ GenTree* lateNode = nullptr;
if (isLateArg)
{
- for (GenTreePtr list = call->gtCallLateArgs; list; list = list->MoveNext())
+ for (GenTree* list = call->gtCallLateArgs; list; list = list->MoveNext())
{
assert(list->OperIsList());
- GenTreePtr argNode = list->Current();
+ GenTree* argNode = list->Current();
if (argx == argNode)
{
lateList = list;
@@ -4838,7 +4837,7 @@ void Compiler::fgMorphMultiregStructArgs(GenTreeCall* call)
#endif // !UNIX_AMD64_ABI
#endif
- for (GenTreePtr args = call->gtCallArgs; args != nullptr; args = args->gtOp.gtOp2)
+ for (GenTree* args = call->gtCallArgs; args != nullptr; args = args->gtOp.gtOp2)
{
// For late arguments the arg tree that is overridden is in the gtCallLateArgs list.
// For such late args the gtCallArgList contains the setup arg node (evaluating the arg.)
@@ -4848,17 +4847,17 @@ void Compiler::fgMorphMultiregStructArgs(GenTreeCall* call)
bool isLateArg = (args->gtOp.gtOp1->gtFlags & GTF_LATE_ARG) != 0;
fgArgTabEntry* fgEntryPtr = gtArgEntryByNode(call, args->gtOp.gtOp1);
assert(fgEntryPtr != nullptr);
- GenTreePtr argx = fgEntryPtr->node;
- GenTreePtr lateList = nullptr;
- GenTreePtr lateNode = nullptr;
+ GenTree* argx = fgEntryPtr->node;
+ GenTree* lateList = nullptr;
+ GenTree* lateNode = nullptr;
if (isLateArg)
{
- for (GenTreePtr list = call->gtCallLateArgs; list; list = list->MoveNext())
+ for (GenTree* list = call->gtCallLateArgs; list; list = list->MoveNext())
{
assert(list->OperIsList());
- GenTreePtr argNode = list->Current();
+ GenTree* argNode = list->Current();
if (argx == argNode)
{
lateList = list;
@@ -4869,7 +4868,7 @@ void Compiler::fgMorphMultiregStructArgs(GenTreeCall* call)
assert(lateList != nullptr && lateNode != nullptr);
}
- GenTreePtr arg = argx;
+ GenTree* arg = argx;
if (varTypeIsStruct(arg->TypeGet()))
{
@@ -4928,7 +4927,7 @@ void Compiler::fgMorphMultiregStructArgs(GenTreeCall* call)
// indirections.
// Currently the implementation handles ARM64/ARM and will NYI for other architectures.
//
-GenTreePtr Compiler::fgMorphMultiregStructArg(GenTreePtr arg, fgArgTabEntry* fgEntryPtr)
+GenTree* Compiler::fgMorphMultiregStructArg(GenTree* arg, fgArgTabEntry* fgEntryPtr)
{
assert(varTypeIsStruct(arg->TypeGet()));
@@ -4977,7 +4976,7 @@ GenTreePtr Compiler::fgMorphMultiregStructArg(GenTreePtr arg, fgArgTabEntry* fgE
// Examine 'arg' and setup argValue objClass and structSize
//
CORINFO_CLASS_HANDLE objClass = NO_CLASS_HANDLE;
- GenTreePtr argValue = arg; // normally argValue will be arg, but see right below
+ GenTree* argValue = arg; // normally argValue will be arg, but see right below
unsigned structSize = 0;
if (arg->OperGet() == GT_OBJ)
@@ -5200,8 +5199,8 @@ GenTreePtr Compiler::fgMorphMultiregStructArg(GenTreePtr arg, fgArgTabEntry* fgE
{
// We can use the struct promoted field as the two arguments
- GenTreePtr loLclVar = gtNewLclvNode(loVarNum, loType, loVarNum);
- GenTreePtr hiLclVar = gtNewLclvNode(hiVarNum, hiType, hiVarNum);
+ GenTree* loLclVar = gtNewLclvNode(loVarNum, loType, loVarNum);
+ GenTree* hiLclVar = gtNewLclvNode(hiVarNum, hiType, hiVarNum);
// Create a new tree for 'arg'
// replace the existing LDOBJ(ADDR(LCLVAR))
@@ -5270,7 +5269,7 @@ GenTreePtr Compiler::fgMorphMultiregStructArg(GenTreePtr arg, fgArgTabEntry* fgE
// We can use the struct promoted field as arguments
for (unsigned inx = 0; inx < elemCount; inx++)
{
- GenTreePtr lclVar = gtNewLclvNode(varNums[inx], varType[inx], varNums[inx]);
+ GenTree* lclVar = gtNewLclvNode(varNums[inx], varType[inx], varNums[inx]);
// Create a new tree for 'arg'
// replace the existing LDOBJ(ADDR(LCLVAR))
listEntry = new (this, GT_FIELD_LIST) GenTreeFieldList(lclVar, offset, varType[inx], listEntry);
@@ -5354,8 +5353,8 @@ GenTreePtr Compiler::fgMorphMultiregStructArg(GenTreePtr arg, fgArgTabEntry* fgE
GenTreeFieldList* listEntry = nullptr;
for (unsigned inx = 0; inx < elemCount; inx++)
{
- elemSize = genTypeSize(type[inx]);
- GenTreePtr nextLclFld = gtNewLclFldNode(varNum, type[inx], offset);
+ elemSize = genTypeSize(type[inx]);
+ GenTree* nextLclFld = gtNewLclFldNode(varNum, type[inx], offset);
listEntry = new (this, GT_FIELD_LIST) GenTreeFieldList(nextLclFld, offset, type[inx], listEntry);
if (newArg == nullptr)
{
@@ -5369,7 +5368,7 @@ GenTreePtr Compiler::fgMorphMultiregStructArg(GenTreePtr arg, fgArgTabEntry* fgE
else if (argValue->OperGet() == GT_OBJ)
{
GenTreeObj* argObj = argValue->AsObj();
- GenTreePtr baseAddr = argObj->gtOp1;
+ GenTree* baseAddr = argObj->gtOp1;
var_types addrType = baseAddr->TypeGet();
if (baseAddr->OperGet() == GT_ADDR)
@@ -5394,11 +5393,11 @@ GenTreePtr Compiler::fgMorphMultiregStructArg(GenTreePtr arg, fgArgTabEntry* fgE
GenTreeFieldList* listEntry = nullptr;
for (unsigned inx = 0; inx < elemCount; inx++)
{
- elemSize = genTypeSize(type[inx]);
- GenTreePtr curAddr = baseAddr;
+ elemSize = genTypeSize(type[inx]);
+ GenTree* curAddr = baseAddr;
if (offset != 0)
{
- GenTreePtr baseAddrDup = gtCloneExpr(baseAddr);
+ GenTree* baseAddrDup = gtCloneExpr(baseAddr);
noway_assert(baseAddrDup != nullptr);
curAddr = gtNewOperNode(GT_ADD, addrType, baseAddrDup, gtNewIconNode(offset, TYP_I_IMPL));
}
@@ -5406,7 +5405,7 @@ GenTreePtr Compiler::fgMorphMultiregStructArg(GenTreePtr arg, fgArgTabEntry* fgE
{
curAddr = baseAddr;
}
- GenTreePtr curItem = gtNewIndir(type[inx], curAddr);
+ GenTree* curItem = gtNewIndir(type[inx], curAddr);
// For safety all GT_IND should have at least GT_GLOB_REF set.
curItem->gtFlags |= GTF_GLOB_REF;
@@ -5563,7 +5562,7 @@ void Compiler::fgMakeOutgoingStructArgCopy(
}
// Create a reference to the temp
- GenTreePtr dest = gtNewLclvNode(tmp, lvaTable[tmp].lvType);
+ GenTree* dest = gtNewLclvNode(tmp, lvaTable[tmp].lvType);
dest->gtFlags |= (GTF_DONT_CSE | GTF_VAR_DEF); // This is a def of the local, "entire" by construction.
// TODO-Cleanup: This probably shouldn't be done here because arg morphing is done prior
@@ -5581,21 +5580,21 @@ void Compiler::fgMakeOutgoingStructArgCopy(
}
// Copy the valuetype to the temp
- unsigned size = info.compCompHnd->getClassSize(copyBlkClass);
- GenTreePtr copyBlk = gtNewBlkOpNode(dest, argx, size, false /* not volatile */, true /* copyBlock */);
- copyBlk = fgMorphCopyBlock(copyBlk);
+ unsigned size = info.compCompHnd->getClassSize(copyBlkClass);
+ GenTree* copyBlk = gtNewBlkOpNode(dest, argx, size, false /* not volatile */, true /* copyBlock */);
+ copyBlk = fgMorphCopyBlock(copyBlk);
#if FEATURE_FIXED_OUT_ARGS
// Do the copy early, and evalute the temp later (see EvalArgsToTemps)
// When on Unix create LCL_FLD for structs passed in more than one registers. See fgMakeTmpArgNode
- GenTreePtr arg = copyBlk;
+ GenTree* arg = copyBlk;
#else // FEATURE_FIXED_OUT_ARGS
// Structs are always on the stack, and thus never need temps
// so we have to put the copy and temp all into one expression
- GenTreePtr arg = fgMakeTmpArgNode(tmp FEATURE_UNIX_AMD64_STRUCT_PASSING_ONLY_ARG(structDescPtr->passedInRegisters));
+ GenTree* arg = fgMakeTmpArgNode(tmp FEATURE_UNIX_AMD64_STRUCT_PASSING_ONLY_ARG(structDescPtr->passedInRegisters));
// Change the expression to "(tmp=val),tmp"
arg = gtNewOperNode(GT_COMMA, arg->TypeGet(), copyBlk, arg);
@@ -5656,7 +5655,7 @@ void Compiler::fgAddSkippedRegsInPromotedStructArg(LclVarDsc* varDsc,
// change the gtType to the precomputed native return type
// requires that callNode currently has a struct type
//
-void Compiler::fgFixupStructReturn(GenTreePtr callNode)
+void Compiler::fgFixupStructReturn(GenTree* callNode)
{
assert(varTypeIsStruct(callNode));
@@ -5735,10 +5734,10 @@ void Compiler::fgFixupStructReturn(GenTreePtr callNode)
#if REARRANGE_ADDS
-void Compiler::fgMoveOpsLeft(GenTreePtr tree)
+void Compiler::fgMoveOpsLeft(GenTree* tree)
{
- GenTreePtr op1;
- GenTreePtr op2;
+ GenTree* op1;
+ GenTree* op2;
genTreeOps oper;
do
@@ -5790,8 +5789,8 @@ void Compiler::fgMoveOpsLeft(GenTreePtr tree)
noway_assert(!tree->gtOverflowEx() && !op2->gtOverflowEx());
- GenTreePtr ad1 = op2->gtOp.gtOp1;
- GenTreePtr ad2 = op2->gtOp.gtOp2;
+ GenTree* ad1 = op2->gtOp.gtOp1;
+ GenTree* ad2 = op2->gtOp.gtOp2;
// Compiler::optOptimizeBools() can create GT_OR of two GC pointers yeilding a GT_INT
// We can not reorder such GT_OR trees
@@ -5804,7 +5803,7 @@ void Compiler::fgMoveOpsLeft(GenTreePtr tree)
/* Change "(x op (y op z))" to "(x op y) op z" */
/* ie. "(op1 op (ad1 op ad2))" to "(op1 op ad1) op ad2" */
- GenTreePtr new_op1 = op2;
+ GenTree* new_op1 = op2;
new_op1->gtOp.gtOp1 = op1;
new_op1->gtOp.gtOp2 = ad1;
@@ -5975,7 +5974,7 @@ BasicBlock* Compiler::fgSetRngChkTargetInner(SpecialCodeKind kind, bool delay, u
const int MAX_ARR_COMPLEXITY = 4;
const int MAX_INDEX_COMPLEXITY = 4;
-GenTreePtr Compiler::fgMorphArrayIndex(GenTreePtr tree)
+GenTree* Compiler::fgMorphArrayIndex(GenTree* tree)
{
noway_assert(tree->gtOper == GT_INDEX);
GenTreeIndex* asIndex = tree->AsIndex();
@@ -6075,21 +6074,21 @@ GenTreePtr Compiler::fgMorphArrayIndex(GenTreePtr tree)
}
#endif // LEGACY_BACKEND
- GenTreePtr arrRef = asIndex->Arr();
- GenTreePtr index = asIndex->Index();
+ GenTree* arrRef = asIndex->Arr();
+ GenTree* index = asIndex->Index();
bool chkd = ((tree->gtFlags & GTF_INX_RNGCHK) != 0); // if false, range checking will be disabled
bool nCSE = ((tree->gtFlags & GTF_DONT_CSE) != 0);
- GenTreePtr arrRefDefn = nullptr; // non-NULL if we need to allocate a temp for the arrRef expression
- GenTreePtr indexDefn = nullptr; // non-NULL if we need to allocate a temp for the index expression
- GenTreePtr bndsChk = nullptr;
+ GenTree* arrRefDefn = nullptr; // non-NULL if we need to allocate a temp for the arrRef expression
+ GenTree* indexDefn = nullptr; // non-NULL if we need to allocate a temp for the index expression
+ GenTree* bndsChk = nullptr;
// If we're doing range checking, introduce a GT_ARR_BOUNDS_CHECK node for the address.
if (chkd)
{
- GenTreePtr arrRef2 = nullptr; // The second copy will be used in array address expression
- GenTreePtr index2 = nullptr;
+ GenTree* arrRef2 = nullptr; // The second copy will be used in array address expression
+ GenTree* index2 = nullptr;
// If the arrRef expression involves an assignment, a call or reads from global memory,
// then we *must* allocate a temporary in which to "localize" those values,
@@ -6176,7 +6175,7 @@ GenTreePtr Compiler::fgMorphArrayIndex(GenTreePtr tree)
// Create the "addr" which is "*(arrRef + ((index * elemSize) + elemOffs))"
- GenTreePtr addr;
+ GenTree* addr;
#ifdef _TARGET_64BIT_
// Widen 'index' on 64-bit targets
@@ -6196,7 +6195,7 @@ GenTreePtr Compiler::fgMorphArrayIndex(GenTreePtr tree)
/* Scale the index value if necessary */
if (elemSize > 1)
{
- GenTreePtr size = gtNewIconNode(elemSize, TYP_I_IMPL);
+ GenTree* size = gtNewIconNode(elemSize, TYP_I_IMPL);
// Fix 392756 WP7 Crossgen
//
@@ -6220,7 +6219,7 @@ GenTreePtr Compiler::fgMorphArrayIndex(GenTreePtr tree)
/* Add the first element's offset */
- GenTreePtr cns = gtNewIconNode(elemOffs, TYP_I_IMPL);
+ GenTree* cns = gtNewIconNode(elemOffs, TYP_I_IMPL);
addr = gtNewOperNode(GT_ADD, TYP_BYREF, addr, cns);
@@ -6260,7 +6259,7 @@ GenTreePtr Compiler::fgMorphArrayIndex(GenTreePtr tree)
// Remember this 'indTree' that we just created, as we still need to attach the fieldSeq information to it.
- GenTreePtr indTree = tree;
+ GenTree* indTree = tree;
// Did we create a bndsChk tree?
if (bndsChk)
@@ -6299,7 +6298,7 @@ GenTreePtr Compiler::fgMorphArrayIndex(GenTreePtr tree)
// or it could be left unchanged. If it is unchanged then we should not return,
// instead we should proceed to attaching fieldSeq info, etc...
//
- GenTreePtr arrElem = tree->gtEffectiveVal();
+ GenTree* arrElem = tree->gtEffectiveVal();
if (fgIsCommaThrow(tree))
{
@@ -6316,7 +6315,7 @@ GenTreePtr Compiler::fgMorphArrayIndex(GenTreePtr tree)
assert(addr->TypeGet() == TYP_BYREF);
- GenTreePtr cnsOff = nullptr;
+ GenTree* cnsOff = nullptr;
if (addr->OperGet() == GT_ADD)
{
if (addr->gtOp.gtOp2->gtOper == GT_CNS_INT)
@@ -6328,7 +6327,7 @@ GenTreePtr Compiler::fgMorphArrayIndex(GenTreePtr tree)
while ((addr->OperGet() == GT_ADD) || (addr->OperGet() == GT_SUB))
{
assert(addr->TypeGet() == TYP_BYREF);
- GenTreePtr index = addr->gtOp.gtOp2;
+ GenTree* index = addr->gtOp.gtOp2;
// Label any constant array index contributions with #ConstantIndex and any LclVars with GTF_VAR_ARR_INDEX
index->LabelIndex(this);
@@ -6381,7 +6380,7 @@ GenTreePtr Compiler::fgMorphArrayIndex(GenTreePtr tree)
* so we don't need this code.
*
*/
-GenTreePtr Compiler::fgMorphStackArgForVarArgs(unsigned lclNum, var_types varType, unsigned lclOffs)
+GenTree* Compiler::fgMorphStackArgForVarArgs(unsigned lclNum, var_types varType, unsigned lclOffs)
{
/* For the fixed stack arguments of a varargs function, we need to go
through the varargs cookies to access them, except for the
@@ -6392,13 +6391,13 @@ GenTreePtr Compiler::fgMorphStackArgForVarArgs(unsigned lclNum, var_types varTyp
if (varDsc->lvIsParam && !varDsc->lvIsRegArg && lclNum != lvaVarargsHandleArg)
{
// Create a node representing the local pointing to the base of the args
- GenTreePtr ptrArg =
+ GenTree* ptrArg =
gtNewOperNode(GT_SUB, TYP_I_IMPL, gtNewLclvNode(lvaVarargsBaseOfStkArgs, TYP_I_IMPL),
gtNewIconNode(varDsc->lvStkOffs - codeGen->intRegState.rsCalleeRegArgCount * REGSIZE_BYTES +
lclOffs));
// Access the argument through the local
- GenTreePtr tree;
+ GenTree* tree;
if (varTypeIsStruct(varType))
{
tree = gtNewBlockVal(ptrArg, varDsc->lvExactSize);
@@ -6426,7 +6425,7 @@ GenTreePtr Compiler::fgMorphStackArgForVarArgs(unsigned lclNum, var_types varTyp
* Transform the given GT_LCL_VAR tree for code generation.
*/
-GenTreePtr Compiler::fgMorphLocalVar(GenTreePtr tree, bool forceRemorph)
+GenTree* Compiler::fgMorphLocalVar(GenTree* tree, bool forceRemorph)
{
assert(tree->gtOper == GT_LCL_VAR);
@@ -6442,7 +6441,7 @@ GenTreePtr Compiler::fgMorphLocalVar(GenTreePtr tree, bool forceRemorph)
#ifdef _TARGET_X86_
if (info.compIsVarArgs)
{
- GenTreePtr newTree = fgMorphStackArgForVarArgs(lclNum, varType, 0);
+ GenTree* newTree = fgMorphStackArgForVarArgs(lclNum, varType, 0);
if (newTree != nullptr)
{
if (newTree->OperIsBlk() && ((tree->gtFlags & GTF_VAR_DEF) == 0))
@@ -6520,13 +6519,13 @@ unsigned Compiler::fgGetBigOffsetMorphingTemp(var_types type)
* Transform the given GT_FIELD tree for code generation.
*/
-GenTreePtr Compiler::fgMorphField(GenTreePtr tree, MorphAddrContext* mac)
+GenTree* Compiler::fgMorphField(GenTree* tree, MorphAddrContext* mac)
{
assert(tree->gtOper == GT_FIELD);
CORINFO_FIELD_HANDLE symHnd = tree->gtField.gtFldHnd;
unsigned fldOffset = tree->gtField.gtFldOffset;
- GenTreePtr objRef = tree->gtField.gtFldObj;
+ GenTree* objRef = tree->gtField.gtFldObj;
bool fieldMayOverlap = false;
bool objIsLocal = false;
@@ -6552,7 +6551,7 @@ GenTreePtr Compiler::fgMorphField(GenTreePtr tree, MorphAddrContext* mac)
// if this field belongs to simd struct, translate it to simd instrinsic.
if (mac == nullptr)
{
- GenTreePtr newTree = fgMorphFieldToSIMDIntrinsicGet(tree);
+ GenTree* newTree = fgMorphFieldToSIMDIntrinsicGet(tree);
if (newTree != tree)
{
newTree = fgMorphSmpOp(newTree);
@@ -6573,7 +6572,7 @@ GenTreePtr Compiler::fgMorphField(GenTreePtr tree, MorphAddrContext* mac)
if (objRef)
{
- GenTreePtr addr;
+ GenTree* addr;
objIsLocal = objRef->IsLocal();
if (tree->gtFlags & GTF_IND_TLS_REF)
@@ -6662,7 +6661,7 @@ GenTreePtr Compiler::fgMorphField(GenTreePtr tree, MorphAddrContext* mac)
var_types objRefType = objRef->TypeGet();
- GenTreePtr comma = nullptr;
+ GenTree* comma = nullptr;
// NULL mac means we encounter the GT_FIELD first. This denotes a dereference of the field,
// and thus is equivalent to a MACK_Ind with zero offset.
@@ -6726,8 +6725,8 @@ GenTreePtr Compiler::fgMorphField(GenTreePtr tree, MorphAddrContext* mac)
//
// Create the "comma" subtree
//
- GenTreePtr asg = nullptr;
- GenTreePtr nullchk;
+ GenTree* asg = nullptr;
+ GenTree* nullchk;
unsigned lclNum;
@@ -6745,8 +6744,8 @@ GenTreePtr Compiler::fgMorphField(GenTreePtr tree, MorphAddrContext* mac)
// Create the "nullchk" node.
// Make it TYP_BYTE so we only deference it for 1 byte.
- GenTreePtr lclVar = gtNewLclvNode(lclNum, objRefType);
- nullchk = new (this, GT_NULLCHECK) GenTreeIndir(GT_NULLCHECK, TYP_BYTE, lclVar, nullptr);
+ GenTree* lclVar = gtNewLclvNode(lclNum, objRefType);
+ nullchk = new (this, GT_NULLCHECK) GenTreeIndir(GT_NULLCHECK, TYP_BYTE, lclVar, nullptr);
nullchk->gtFlags |= GTF_DONT_CSE; // Don't try to create a CSE for these TYP_BYTE indirections
@@ -6826,7 +6825,7 @@ GenTreePtr Compiler::fgMorphField(GenTreePtr tree, MorphAddrContext* mac)
//
// Create "comma2" node and link it to "tree".
//
- GenTreePtr comma2;
+ GenTree* comma2;
comma2 = gtNewOperNode(GT_COMMA,
addr->TypeGet(), // The type of "comma2" node is the same as the type of "addr" node.
comma, addr);
@@ -6882,7 +6881,7 @@ GenTreePtr Compiler::fgMorphField(GenTreePtr tree, MorphAddrContext* mac)
// then pIdAddr will be NULL and
// IdValue will be the actual TLS DLL index ID
//
- GenTreePtr dllRef = nullptr;
+ GenTree* dllRef = nullptr;
if (pIdAddr == nullptr)
{
if (IdValue != 0)
@@ -6902,7 +6901,7 @@ GenTreePtr Compiler::fgMorphField(GenTreePtr tree, MorphAddrContext* mac)
// Mark this ICON as a TLS_HDL, codegen will use FS:[cns]
- GenTreePtr tlsRef = gtNewIconHandleNode(WIN32_TLS_SLOTS, GTF_ICON_TLS_HDL);
+ GenTree* tlsRef = gtNewIconHandleNode(WIN32_TLS_SLOTS, GTF_ICON_TLS_HDL);
// Translate GTF_FLD_INITCLASS to GTF_ICON_INITCLASS
if ((tree->gtFlags & GTF_FLD_INITCLASS) != 0)
@@ -6926,7 +6925,7 @@ GenTreePtr Compiler::fgMorphField(GenTreePtr tree, MorphAddrContext* mac)
{
FieldSeqNode* fieldSeq =
fieldMayOverlap ? FieldSeqStore::NotAField() : GetFieldSeqStore()->CreateSingleton(symHnd);
- GenTreePtr fldOffsetNode = new (this, GT_CNS_INT) GenTreeIntCon(TYP_INT, fldOffset, fieldSeq);
+ GenTree* fldOffsetNode = new (this, GT_CNS_INT) GenTreeIntCon(TYP_INT, fldOffset, fieldSeq);
/* Add the TLS static field offset to the address */
@@ -6960,8 +6959,8 @@ GenTreePtr Compiler::fgMorphField(GenTreePtr tree, MorphAddrContext* mac)
// The address is not directly addressible, so force it into a
// constant, so we handle it properly
- GenTreePtr addr = gtNewIconHandleNode((size_t)fldAddr, GTF_ICON_STATIC_HDL);
- addr->gtType = TYP_I_IMPL;
+ GenTree* addr = gtNewIconHandleNode((size_t)fldAddr, GTF_ICON_STATIC_HDL);
+ addr->gtType = TYP_I_IMPL;
FieldSeqNode* fieldSeq =
fieldMayOverlap ? FieldSeqStore::NotAField() : GetFieldSeqStore()->CreateSingleton(symHnd);
addr->gtIntCon.gtFieldSeq = fieldSeq;
@@ -7000,7 +6999,7 @@ GenTreePtr Compiler::fgMorphField(GenTreePtr tree, MorphAddrContext* mac)
}
else
{
- GenTreePtr addr = gtNewIconHandleNode((size_t)pFldAddr, GTF_ICON_STATIC_HDL);
+ GenTree* addr = gtNewIconHandleNode((size_t)pFldAddr, GTF_ICON_STATIC_HDL);
// Translate GTF_FLD_INITCLASS to GTF_ICON_INITCLASS
if ((tree->gtFlags & GTF_FLD_INITCLASS) != 0)
@@ -7015,8 +7014,8 @@ GenTreePtr Compiler::fgMorphField(GenTreePtr tree, MorphAddrContext* mac)
// a GC type and the handle to it is a TYP_BYREF in the GC heap
// because handles to statics now go into the large object heap
- var_types handleTyp = (var_types)(varTypeIsGC(tree->TypeGet()) ? TYP_BYREF : TYP_I_IMPL);
- GenTreePtr op1 = gtNewOperNode(GT_IND, handleTyp, addr);
+ var_types handleTyp = (var_types)(varTypeIsGC(tree->TypeGet()) ? TYP_BYREF : TYP_I_IMPL);
+ GenTree* op1 = gtNewOperNode(GT_IND, handleTyp, addr);
op1->gtFlags |= GTF_IND_INVARIANT;
tree->SetOper(GT_IND);
@@ -7031,7 +7030,7 @@ GenTreePtr Compiler::fgMorphField(GenTreePtr tree, MorphAddrContext* mac)
// that the logic above does its own checking to determine whether a nullcheck is needed.
tree->gtFlags &= ~GTF_IND_ARR_LEN;
- GenTreePtr res = fgMorphSmpOp(tree);
+ GenTree* res = fgMorphSmpOp(tree);
// If we have a struct type, this node would previously have been under a GT_ADDR,
// and therefore would have been marked GTF_DONT_CSE.
@@ -7043,7 +7042,7 @@ GenTreePtr Compiler::fgMorphField(GenTreePtr tree, MorphAddrContext* mac)
if (fldOffset == 0 && res->OperGet() == GT_IND)
{
- GenTreePtr addr = res->gtOp.gtOp1;
+ GenTree* addr = res->gtOp.gtOp1;
// Since we don't make a constant zero to attach the field sequence to, associate it with the "addr" node.
FieldSeqNode* fieldSeq =
fieldMayOverlap ? FieldSeqStore::NotAField() : GetFieldSeqStore()->CreateSingleton(symHnd);
@@ -7433,11 +7432,11 @@ bool Compiler::fgCanFastTailCall(GenTreeCall* callee)
bool hasHfaArg = false;
size_t nCalleeArgs = calleeArgRegCount; // Keep track of how many args we have.
size_t calleeStackSize = 0;
- for (GenTreePtr args = callee->gtCallArgs; (args != nullptr); args = args->gtOp.gtOp2)
+ for (GenTree* args = callee->gtCallArgs; (args != nullptr); args = args->gtOp.gtOp2)
{
++nCalleeArgs;
assert(args->OperIsList());
- GenTreePtr argx = args->gtOp.gtOp1;
+ GenTree* argx = args->gtOp.gtOp1;
if (varTypeIsStruct(argx))
{
@@ -7707,10 +7706,10 @@ void Compiler::fgMorphTailCall(GenTreeCall* call)
assert(call->IsVirtual() || (call->gtCallType != CT_INDIRECT) || (call->gtCallCookie == NULL));
// First move the this pointer (if any) onto the regular arg list
- GenTreePtr thisPtr = NULL;
+ GenTree* thisPtr = NULL;
if (call->gtCallObjp)
{
- GenTreePtr objp = call->gtCallObjp;
+ GenTree* objp = call->gtCallObjp;
call->gtCallObjp = NULL;
if ((call->gtFlags & GTF_CALL_NULLCHECK) || call->IsVirtualVtable())
@@ -7720,23 +7719,23 @@ void Compiler::fgMorphTailCall(GenTreeCall* call)
if (thisPtr == NULL)
{
// Too complex, so use a temp
- unsigned lclNum = lvaGrabTemp(true DEBUGARG("tail call thisptr"));
- GenTreePtr asg = gtNewTempAssign(lclNum, objp);
+ unsigned lclNum = lvaGrabTemp(true DEBUGARG("tail call thisptr"));
+ GenTree* asg = gtNewTempAssign(lclNum, objp);
if (!call->IsVirtualVtable())
{
// Add an indirection to get the nullcheck
- GenTreePtr tmp = gtNewLclvNode(lclNum, vt);
- GenTreePtr ind = gtNewOperNode(GT_IND, TYP_INT, tmp);
- asg = gtNewOperNode(GT_COMMA, TYP_VOID, asg, ind);
+ GenTree* tmp = gtNewLclvNode(lclNum, vt);
+ GenTree* ind = gtNewOperNode(GT_IND, TYP_INT, tmp);
+ asg = gtNewOperNode(GT_COMMA, TYP_VOID, asg, ind);
}
objp = gtNewOperNode(GT_COMMA, vt, asg, gtNewLclvNode(lclNum, vt));
thisPtr = gtNewLclvNode(lclNum, vt);
}
else if (!call->IsVirtualVtable())
{
- GenTreePtr ind = gtNewOperNode(GT_IND, TYP_INT, thisPtr);
- objp = gtNewOperNode(GT_COMMA, vt, ind, objp);
- thisPtr = gtClone(thisPtr, true);
+ GenTree* ind = gtNewOperNode(GT_IND, TYP_INT, thisPtr);
+ objp = gtNewOperNode(GT_COMMA, vt, ind, objp);
+ thisPtr = gtClone(thisPtr, true);
}
call->gtFlags &= ~GTF_CALL_NULLCHECK;
@@ -7751,7 +7750,7 @@ void Compiler::fgMorphTailCall(GenTreeCall* call)
{
flags = CORINFO_TAILCALL_STUB_DISPATCH_ARG;
- GenTreePtr arg;
+ GenTree* arg;
if (call->gtCallType == CT_INDIRECT)
{
arg = gtClone(call->gtCallAddr, true);
@@ -7781,8 +7780,8 @@ void Compiler::fgMorphTailCall(GenTreeCall* call)
noway_assert(thisPtr != NULL);
- GenTreePtr add = gtNewOperNode(GT_ADD, TYP_I_IMPL, thisPtr, gtNewIconNode(VPTR_OFFS, TYP_I_IMPL));
- GenTreePtr vtbl = gtNewOperNode(GT_IND, TYP_I_IMPL, add);
+ GenTree* add = gtNewOperNode(GT_ADD, TYP_I_IMPL, thisPtr, gtNewIconNode(VPTR_OFFS, TYP_I_IMPL));
+ GenTree* vtbl = gtNewOperNode(GT_IND, TYP_I_IMPL, add);
vtbl->gtFlags |= GTF_EXCEPT;
unsigned vtabOffsOfIndirection;
@@ -7797,7 +7796,7 @@ void Compiler::fgMorphTailCall(GenTreeCall* call)
{
add = gtNewOperNode(GT_ADD, TYP_I_IMPL, vtbl, gtNewIconNode(vtabOffsOfIndirection, TYP_I_IMPL));
- GenTreePtr indOffTree = nullptr;
+ GenTree* indOffTree = nullptr;
if (isRelative)
{
@@ -7830,10 +7829,10 @@ void Compiler::fgMorphTailCall(GenTreeCall* call)
// Now inject a placeholder for the real call target that codegen will generate
#ifdef LEGACY_BACKEND
- GenTreePtr arg = new (this, GT_NOP) GenTreeOp(GT_NOP, TYP_I_IMPL);
+ GenTree* arg = new (this, GT_NOP) GenTreeOp(GT_NOP, TYP_I_IMPL);
codeGen->genMarkTreeInReg(arg, REG_TAILCALL_ADDR);
#else // !LEGACY_BACKEND
- GenTreePtr arg = gtNewIconNode(0, TYP_I_IMPL);
+ GenTree* arg = gtNewIconNode(0, TYP_I_IMPL);
#endif // !LEGACY_BACKEND
call->gtCallArgs = gtNewListNode(arg, call->gtCallArgs);
@@ -7932,21 +7931,21 @@ void Compiler::fgMorphTailCall(GenTreeCall* call)
if (call->gtCallObjp)
{
- GenTreePtr thisPtr = nullptr;
- GenTreePtr objp = call->gtCallObjp;
- call->gtCallObjp = nullptr;
+ GenTree* thisPtr = nullptr;
+ GenTree* objp = call->gtCallObjp;
+ call->gtCallObjp = nullptr;
#ifdef _TARGET_X86_
if ((call->IsDelegateInvoke() || call->IsVirtualVtable()) && !objp->IsLocal())
{
// tmp = "this"
- unsigned lclNum = lvaGrabTemp(true DEBUGARG("tail call thisptr"));
- GenTreePtr asg = gtNewTempAssign(lclNum, objp);
+ unsigned lclNum = lvaGrabTemp(true DEBUGARG("tail call thisptr"));
+ GenTree* asg = gtNewTempAssign(lclNum, objp);
// COMMA(tmp = "this", tmp)
- var_types vt = objp->TypeGet();
- GenTreePtr tmp = gtNewLclvNode(lclNum, vt);
- thisPtr = gtNewOperNode(GT_COMMA, vt, asg, tmp);
+ var_types vt = objp->TypeGet();
+ GenTree* tmp = gtNewLclvNode(lclNum, vt);
+ thisPtr = gtNewOperNode(GT_COMMA, vt, asg, tmp);
objp = thisPtr;
}
@@ -7973,13 +7972,13 @@ void Compiler::fgMorphTailCall(GenTreeCall* call)
// create a temp if either "this" has side effects or "this" is too complex to clone.
// tmp = "this"
- unsigned lclNum = lvaGrabTemp(true DEBUGARG("tail call thisptr"));
- GenTreePtr asg = gtNewTempAssign(lclNum, objp);
+ unsigned lclNum = lvaGrabTemp(true DEBUGARG("tail call thisptr"));
+ GenTree* asg = gtNewTempAssign(lclNum, objp);
// COMMA(tmp = "this", deref(tmp))
- GenTreePtr tmp = gtNewLclvNode(lclNum, vt);
- GenTreePtr ind = gtNewOperNode(GT_IND, TYP_INT, tmp);
- asg = gtNewOperNode(GT_COMMA, TYP_VOID, asg, ind);
+ GenTree* tmp = gtNewLclvNode(lclNum, vt);
+ GenTree* ind = gtNewOperNode(GT_IND, TYP_INT, tmp);
+ asg = gtNewOperNode(GT_COMMA, TYP_VOID, asg, ind);
// COMMA(COMMA(tmp = "this", deref(tmp)), tmp)
thisPtr = gtNewOperNode(GT_COMMA, vt, asg, gtNewLclvNode(lclNum, vt));
@@ -7987,8 +7986,8 @@ void Compiler::fgMorphTailCall(GenTreeCall* call)
else
{
// thisPtr = COMMA(deref("this"), "this")
- GenTreePtr ind = gtNewOperNode(GT_IND, TYP_INT, thisPtr);
- thisPtr = gtNewOperNode(GT_COMMA, vt, ind, gtClone(objp, true));
+ GenTree* ind = gtNewOperNode(GT_IND, TYP_INT, thisPtr);
+ thisPtr = gtNewOperNode(GT_COMMA, vt, ind, gtClone(objp, true));
}
call->gtFlags &= ~GTF_CALL_NULLCHECK;
@@ -8013,7 +8012,7 @@ void Compiler::fgMorphTailCall(GenTreeCall* call)
CorInfoHelperTailCallSpecialHandling flags = CorInfoHelperTailCallSpecialHandling(0);
if (call->IsVirtualStub())
{
- GenTreePtr stubAddrArg;
+ GenTree* stubAddrArg;
flags = CORINFO_TAILCALL_STUB_DISPATCH_ARG;
@@ -8035,7 +8034,7 @@ void Compiler::fgMorphTailCall(GenTreeCall* call)
}
// Now inject a placeholder for the real call target that Lower phase will generate.
- GenTreePtr arg = gtNewIconNode(0, TYP_I_IMPL);
+ GenTree* arg = gtNewIconNode(0, TYP_I_IMPL);
call->gtCallArgs = gtNewListNode(arg, call->gtCallArgs);
// Inject the pointer for the copy routine to be used for struct copying
@@ -8105,19 +8104,19 @@ void Compiler::fgMorphTailCall(GenTreeCall* call)
void Compiler::fgMorphRecursiveFastTailCallIntoLoop(BasicBlock* block, GenTreeCall* recursiveTailCall)
{
assert(recursiveTailCall->IsTailCallConvertibleToLoop());
- GenTreePtr last = block->lastStmt();
+ GenTree* last = block->lastStmt();
assert(recursiveTailCall == last->gtStmt.gtStmtExpr);
// Transform recursive tail call into a loop.
- GenTreePtr earlyArgInsertionPoint = last;
+ GenTree* earlyArgInsertionPoint = last;
IL_OFFSETX callILOffset = last->gtStmt.gtStmtILoffsx;
// Hoist arg setup statement for the 'this' argument.
- GenTreePtr thisArg = recursiveTailCall->gtCallObjp;
+ GenTree* thisArg = recursiveTailCall->gtCallObjp;
if (thisArg && !thisArg->IsNothingNode() && !thisArg->IsArgPlaceHolderNode())
{
- GenTreePtr thisArgStmt = gtNewStmt(thisArg, callILOffset);
+ GenTree* thisArgStmt = gtNewStmt(thisArg, callILOffset);
fgInsertStmtBefore(block, earlyArgInsertionPoint, thisArgStmt);
}
@@ -8160,8 +8159,8 @@ void Compiler::fgMorphRecursiveFastTailCallIntoLoop(BasicBlock* block, GenTreeCa
// [000057] - A---------- \--* = int
// [000056] D------N---- \--* lclVar int V01 arg1
- GenTreePtr tmpAssignmentInsertionPoint = last;
- GenTreePtr paramAssignmentInsertionPoint = last;
+ GenTree* tmpAssignmentInsertionPoint = last;
+ GenTree* paramAssignmentInsertionPoint = last;
// Process early args. They may contain both setup statements for late args and actual args.
// Early args don't include 'this' arg. We need to account for that so that the call to gtArgEntryByArgNum
@@ -8170,20 +8169,20 @@ void Compiler::fgMorphRecursiveFastTailCallIntoLoop(BasicBlock* block, GenTreeCa
for (GenTreeArgList* earlyArgs = recursiveTailCall->gtCallArgs; earlyArgs != nullptr;
(earlyArgIndex++, earlyArgs = earlyArgs->Rest()))
{
- GenTreePtr earlyArg = earlyArgs->Current();
+ GenTree* earlyArg = earlyArgs->Current();
if (!earlyArg->IsNothingNode() && !earlyArg->IsArgPlaceHolderNode())
{
if ((earlyArg->gtFlags & GTF_LATE_ARG) != 0)
{
// This is a setup node so we need to hoist it.
- GenTreePtr earlyArgStmt = gtNewStmt(earlyArg, callILOffset);
+ GenTree* earlyArgStmt = gtNewStmt(earlyArg, callILOffset);
fgInsertStmtBefore(block, earlyArgInsertionPoint, earlyArgStmt);
}
else
{
// This is an actual argument that needs to be assigned to the corresponding caller parameter.
fgArgTabEntry* curArgTabEntry = gtArgEntryByArgNum(recursiveTailCall, earlyArgIndex);
- GenTreePtr paramAssignStmt =
+ GenTree* paramAssignStmt =
fgAssignRecursiveCallArgToCallerParam(earlyArg, curArgTabEntry, block, callILOffset,
tmpAssignmentInsertionPoint, paramAssignmentInsertionPoint);
if ((tmpAssignmentInsertionPoint == last) && (paramAssignStmt != nullptr))
@@ -8201,9 +8200,9 @@ void Compiler::fgMorphRecursiveFastTailCallIntoLoop(BasicBlock* block, GenTreeCa
(lateArgIndex++, lateArgs = lateArgs->Rest()))
{
// A late argument is an actual argument that needs to be assigned to the corresponding caller's parameter.
- GenTreePtr lateArg = lateArgs->Current();
+ GenTree* lateArg = lateArgs->Current();
fgArgTabEntry* curArgTabEntry = gtArgEntryByLateArgIndex(recursiveTailCall, lateArgIndex);
- GenTreePtr paramAssignStmt =
+ GenTree* paramAssignStmt =
fgAssignRecursiveCallArgToCallerParam(lateArg, curArgTabEntry, block, callILOffset,
tmpAssignmentInsertionPoint, paramAssignmentInsertionPoint);
@@ -8219,10 +8218,10 @@ void Compiler::fgMorphRecursiveFastTailCallIntoLoop(BasicBlock* block, GenTreeCa
// block won't be in the loop (it's assumed to have no predecessors), we need to update the special local here.
if (!info.compIsStatic && (lvaArg0Var != info.compThisArg))
{
- var_types thisType = lvaTable[info.compThisArg].TypeGet();
- GenTreePtr arg0 = gtNewLclvNode(lvaArg0Var, thisType);
- GenTreePtr arg0Assignment = gtNewAssignNode(arg0, gtNewLclvNode(info.compThisArg, thisType));
- GenTreePtr arg0AssignmentStmt = gtNewStmt(arg0Assignment, callILOffset);
+ var_types thisType = lvaTable[info.compThisArg].TypeGet();
+ GenTree* arg0 = gtNewLclvNode(lvaArg0Var, thisType);
+ GenTree* arg0Assignment = gtNewAssignNode(arg0, gtNewLclvNode(info.compThisArg, thisType));
+ GenTree* arg0AssignmentStmt = gtNewStmt(arg0Assignment, callILOffset);
fgInsertStmtBefore(block, paramAssignmentInsertionPoint, arg0AssignmentStmt);
}
@@ -8243,8 +8242,8 @@ void Compiler::fgMorphRecursiveFastTailCallIntoLoop(BasicBlock* block, GenTreeCa
bool structWithGCFields = ((lclType == TYP_STRUCT) && (varDsc->lvStructGcCount > 0));
if (isUserLocal || structWithGCFields)
{
- GenTreePtr lcl = gtNewLclvNode(varNum, lclType);
- GenTreePtr init = nullptr;
+ GenTree* lcl = gtNewLclvNode(varNum, lclType);
+ GenTree* init = nullptr;
if (lclType == TYP_STRUCT)
{
const bool isVolatile = false;
@@ -8254,10 +8253,10 @@ void Compiler::fgMorphRecursiveFastTailCallIntoLoop(BasicBlock* block, GenTreeCa
}
else
{
- GenTreePtr zero = gtNewZeroConNode(genActualType(lclType));
- init = gtNewAssignNode(lcl, zero);
+ GenTree* zero = gtNewZeroConNode(genActualType(lclType));
+ init = gtNewAssignNode(lcl, zero);
}
- GenTreePtr initStmt = gtNewStmt(init, callILOffset);
+ GenTree* initStmt = gtNewStmt(init, callILOffset);
fgInsertStmtBefore(block, last, initStmt);
}
}
@@ -8294,19 +8293,19 @@ void Compiler::fgMorphRecursiveFastTailCallIntoLoop(BasicBlock* block, GenTreeCa
// Return Value:
// parameter assignment statement if one was inserted; nullptr otherwise.
-GenTreePtr Compiler::fgAssignRecursiveCallArgToCallerParam(GenTreePtr arg,
- fgArgTabEntry* argTabEntry,
- BasicBlock* block,
- IL_OFFSETX callILOffset,
- GenTreePtr tmpAssignmentInsertionPoint,
- GenTreePtr paramAssignmentInsertionPoint)
+GenTree* Compiler::fgAssignRecursiveCallArgToCallerParam(GenTree* arg,
+ fgArgTabEntry* argTabEntry,
+ BasicBlock* block,
+ IL_OFFSETX callILOffset,
+ GenTree* tmpAssignmentInsertionPoint,
+ GenTree* paramAssignmentInsertionPoint)
{
// Call arguments should be assigned to temps first and then the temps should be assigned to parameters because
// some argument trees may reference parameters directly.
- GenTreePtr argInTemp = nullptr;
- unsigned originalArgNum = argTabEntry->argNum;
- bool needToAssignParameter = true;
+ GenTree* argInTemp = nullptr;
+ unsigned originalArgNum = argTabEntry->argNum;
+ bool needToAssignParameter = true;
// TODO-CQ: enable calls with struct arguments passed in registers.
noway_assert(!varTypeIsStruct(arg->TypeGet()));
@@ -8337,7 +8336,7 @@ GenTreePtr Compiler::fgAssignRecursiveCallArgToCallerParam(GenTreePtr arg,
// any caller parameters. Some common cases are handled above but we may be able to eliminate
// more temp assignments.
- GenTreePtr paramAssignStmt = nullptr;
+ GenTree* paramAssignStmt = nullptr;
if (needToAssignParameter)
{
if (argInTemp == nullptr)
@@ -8345,11 +8344,11 @@ GenTreePtr Compiler::fgAssignRecursiveCallArgToCallerParam(GenTreePtr arg,
// The argument is not assigned to a temp. We need to create a new temp and insert an assignment.
// TODO: we can avoid a temp assignment if we can prove that the argument tree
// doesn't involve any caller parameters.
- unsigned tmpNum = lvaGrabTemp(true DEBUGARG("arg temp"));
- GenTreePtr tempSrc = arg;
- GenTreePtr tempDest = gtNewLclvNode(tmpNum, tempSrc->gtType);
- GenTreePtr tmpAssignNode = gtNewAssignNode(tempDest, tempSrc);
- GenTreePtr tmpAssignStmt = gtNewStmt(tmpAssignNode, callILOffset);
+ unsigned tmpNum = lvaGrabTemp(true DEBUGARG("arg temp"));
+ GenTree* tempSrc = arg;
+ GenTree* tempDest = gtNewLclvNode(tmpNum, tempSrc->gtType);
+ GenTree* tmpAssignNode = gtNewAssignNode(tempDest, tempSrc);
+ GenTree* tmpAssignStmt = gtNewStmt(tmpAssignNode, callILOffset);
fgInsertStmtBefore(block, tmpAssignmentInsertionPoint, tmpAssignStmt);
argInTemp = gtNewLclvNode(tmpNum, tempSrc->gtType);
}
@@ -8357,9 +8356,9 @@ GenTreePtr Compiler::fgAssignRecursiveCallArgToCallerParam(GenTreePtr arg,
// Now assign the temp to the parameter.
LclVarDsc* paramDsc = lvaTable + originalArgNum;
assert(paramDsc->lvIsParam);
- GenTreePtr paramDest = gtNewLclvNode(originalArgNum, paramDsc->lvType);
- GenTreePtr paramAssignNode = gtNewAssignNode(paramDest, argInTemp);
- paramAssignStmt = gtNewStmt(paramAssignNode, callILOffset);
+ GenTree* paramDest = gtNewLclvNode(originalArgNum, paramDsc->lvType);
+ GenTree* paramAssignNode = gtNewAssignNode(paramDest, argInTemp);
+ paramAssignStmt = gtNewStmt(paramAssignNode, callILOffset);
fgInsertStmtBefore(block, paramAssignmentInsertionPoint, paramAssignStmt);
}
@@ -8371,7 +8370,7 @@ GenTreePtr Compiler::fgAssignRecursiveCallArgToCallerParam(GenTreePtr arg,
* Transform the given GT_CALL tree for code generation.
*/
-GenTreePtr Compiler::fgMorphCall(GenTreeCall* call)
+GenTree* Compiler::fgMorphCall(GenTreeCall* call)
{
if (varTypeIsStruct(call))
{
@@ -8530,7 +8529,7 @@ GenTreePtr Compiler::fgMorphCall(GenTreeCall* call)
if (info.compRetBuffArg != BAD_VAR_NUM)
{
noway_assert(callType == TYP_VOID);
- GenTreePtr retValBuf = call->gtCallArgs->gtOp.gtOp1;
+ GenTree* retValBuf = call->gtCallArgs->gtOp.gtOp1;
if (retValBuf->gtOper != GT_LCL_VAR || retValBuf->gtLclVarCommon.gtLclNum != info.compRetBuffArg)
{
szFailReason = "Need to copy return buffer";
@@ -8722,7 +8721,7 @@ GenTreePtr Compiler::fgMorphCall(GenTreeCall* call)
}
#endif
- GenTreePtr stmtExpr = fgMorphStmt->gtStmtExpr;
+ GenTree* stmtExpr = fgMorphStmt->gtStmtExpr;
#ifdef DEBUG
// Tail call needs to be in one of the following IR forms
@@ -8740,7 +8739,7 @@ GenTreePtr Compiler::fgMorphCall(GenTreeCall* call)
else
{
assert(stmtOper == GT_RETURN || stmtOper == GT_ASG || stmtOper == GT_COMMA);
- GenTreePtr treeWithCall;
+ GenTree* treeWithCall;
if (stmtOper == GT_RETURN)
{
treeWithCall = stmtExpr->gtGetOp1();
@@ -8800,7 +8799,7 @@ GenTreePtr Compiler::fgMorphCall(GenTreeCall* call)
{
// We didn't insert a poll block, so we need to morph the call now
// (Normally it will get morphed when we get to the split poll block)
- GenTreePtr temp = fgMorphCall(call);
+ GenTree* temp = fgMorphCall(call);
noway_assert(temp == call);
}
@@ -8869,9 +8868,9 @@ NO_TAIL_CALL:
// This is call to CORINFO_HELP_VIRTUAL_FUNC_PTR with ignored result.
// Transform it into a null check.
- GenTreePtr thisPtr = call->gtCallArgs->gtOp.gtOp1;
+ GenTree* thisPtr = call->gtCallArgs->gtOp.gtOp1;
- GenTreePtr nullCheck = gtNewOperNode(GT_IND, TYP_I_IMPL, thisPtr);
+ GenTree* nullCheck = gtNewOperNode(GT_IND, TYP_I_IMPL, thisPtr);
nullCheck->gtFlags |= GTF_EXCEPT;
return fgMorphTree(nullCheck);
@@ -8926,8 +8925,8 @@ NO_TAIL_CALL:
}
// Make sure that return buffers containing GC pointers that aren't too large are pointers into the stack.
- GenTreePtr origDest = nullptr; // Will only become non-null if we do the transformation (and thus require
- // copy-back).
+ GenTree* origDest = nullptr; // Will only become non-null if we do the transformation (and thus require
+ // copy-back).
unsigned retValTmpNum = BAD_VAR_NUM;
CORINFO_CLASS_HANDLE structHnd = nullptr;
if (call->HasRetBufArg() &&
@@ -8941,7 +8940,7 @@ NO_TAIL_CALL:
// if we're passing the caller's ret buff arg to the callee, since the caller's caller
// will maintain the same invariant.)
- GenTreePtr dest = call->gtCallArgs->gtOp.gtOp1;
+ GenTree* dest = call->gtCallArgs->gtOp.gtOp1;
assert(dest->OperGet() != GT_ARGPLACE); // If it was, we'd be in a remorph, which we've already excluded above.
if (dest->gtType == TYP_BYREF && !(dest->OperGet() == GT_ADDR && dest->gtOp.gtOp1->OperGet() == GT_LCL_VAR))
{
@@ -9050,14 +9049,14 @@ NO_TAIL_CALL:
noway_assert(origDest == nullptr);
noway_assert(call->gtCallLateArgs->gtOp.gtOp1 != nullptr);
- GenTreePtr innerCall = call->gtCallLateArgs->gtOp.gtOp1;
+ GenTree* innerCall = call->gtCallLateArgs->gtOp.gtOp1;
if (innerCall->gtOper == GT_CALL && (innerCall->gtCall.gtCallMoreFlags & GTF_CALL_M_SPECIAL_INTRINSIC) &&
info.compCompHnd->getIntrinsicID(innerCall->gtCall.gtCallMethHnd) ==
CORINFO_INTRINSIC_GetCurrentManagedThread)
{
// substitute expression with call to helper
- GenTreePtr newCall = gtNewHelperCallNode(CORINFO_HELP_GETCURRENTMANAGEDTHREADID, TYP_INT);
+ GenTree* newCall = gtNewHelperCallNode(CORINFO_HELP_GETCURRENTMANAGEDTHREADID, TYP_INT);
JITDUMP("get_ManagedThreadId(get_CurrentThread) folding performed\n");
return fgMorphTree(newCall);
}
@@ -9065,7 +9064,7 @@ NO_TAIL_CALL:
if (origDest != nullptr)
{
- GenTreePtr retValVarAddr = gtNewOperNode(GT_ADDR, TYP_BYREF, gtNewLclvNode(retValTmpNum, TYP_STRUCT));
+ GenTree* retValVarAddr = gtNewOperNode(GT_ADDR, TYP_BYREF, gtNewLclvNode(retValTmpNum, TYP_STRUCT));
// If the origDest expression was an assignment to a variable, it might be to an otherwise-unused
// var, which would allow the whole assignment to be optimized away to a NOP. So in that case, make the
// origDest into a comma that uses the var. Note that the var doesn't have to be a temp for this to
@@ -9074,14 +9073,14 @@ NO_TAIL_CALL:
{
if (origDest->gtOp.gtOp1->OperGet() == GT_LCL_VAR)
{
- GenTreePtr var = origDest->gtOp.gtOp1;
- origDest = gtNewOperNode(GT_COMMA, var->TypeGet(), origDest,
+ GenTree* var = origDest->gtOp.gtOp1;
+ origDest = gtNewOperNode(GT_COMMA, var->TypeGet(), origDest,
gtNewLclvNode(var->gtLclVar.gtLclNum, var->TypeGet()));
}
}
- GenTreePtr copyBlk = gtNewCpObjNode(origDest, retValVarAddr, structHnd, false);
- copyBlk = fgMorphTree(copyBlk);
- GenTree* result = gtNewOperNode(GT_COMMA, TYP_VOID, call, copyBlk);
+ GenTree* copyBlk = gtNewCpObjNode(origDest, retValVarAddr, structHnd, false);
+ copyBlk = fgMorphTree(copyBlk);
+ GenTree* result = gtNewOperNode(GT_COMMA, TYP_VOID, call, copyBlk);
#ifdef DEBUG
result->gtDebugFlags |= GTF_DEBUG_NODE_MORPHED;
#endif
@@ -9122,7 +9121,7 @@ NO_TAIL_CALL:
* Transform the given GTK_CONST tree for code generation.
*/
-GenTreePtr Compiler::fgMorphConst(GenTreePtr tree)
+GenTree* Compiler::fgMorphConst(GenTree* tree)
{
assert(tree->OperKind() & GTK_CONST);
@@ -9179,7 +9178,7 @@ GenTreePtr Compiler::fgMorphConst(GenTreePtr tree)
* Transform the given GTK_LEAF tree for code generation.
*/
-GenTreePtr Compiler::fgMorphLeaf(GenTreePtr tree)
+GenTree* Compiler::fgMorphLeaf(GenTree* tree)
{
assert(tree->OperKind() & GTK_LEAF);
@@ -9193,7 +9192,7 @@ GenTreePtr Compiler::fgMorphLeaf(GenTreePtr tree)
{
if (info.compIsVarArgs)
{
- GenTreePtr newTree =
+ GenTree* newTree =
fgMorphStackArgForVarArgs(tree->gtLclFld.gtLclNum, tree->gtType, tree->gtLclFld.gtLclOffs);
if (newTree != nullptr)
{
@@ -9253,7 +9252,7 @@ GenTreePtr Compiler::fgMorphLeaf(GenTreePtr tree)
return tree;
}
-void Compiler::fgAssignSetVarDef(GenTreePtr tree)
+void Compiler::fgAssignSetVarDef(GenTree* tree)
{
GenTreeLclVarCommon* lclVarCmnTree;
bool isEntire = false;
@@ -9289,18 +9288,18 @@ void Compiler::fgAssignSetVarDef(GenTreePtr tree)
// If successful, this method always returns the incoming tree, modifying only
// its arguments.
-GenTreePtr Compiler::fgMorphOneAsgBlockOp(GenTreePtr tree)
+GenTree* Compiler::fgMorphOneAsgBlockOp(GenTree* tree)
{
// This must be a block assignment.
noway_assert(tree->OperIsBlkOp());
var_types asgType = tree->TypeGet();
- GenTreePtr asg = tree;
- GenTreePtr dest = asg->gtGetOp1();
- GenTreePtr src = asg->gtGetOp2();
+ GenTree* asg = tree;
+ GenTree* dest = asg->gtGetOp1();
+ GenTree* src = asg->gtGetOp2();
unsigned destVarNum = BAD_VAR_NUM;
LclVarDsc* destVarDsc = nullptr;
- GenTreePtr lclVarTree = nullptr;
+ GenTree* lclVarTree = nullptr;
bool isCopyBlock = asg->OperIsCopyBlkOp();
bool isInitBlock = !isCopyBlock;
@@ -9480,7 +9479,7 @@ GenTreePtr Compiler::fgMorphOneAsgBlockOp(GenTreePtr tree)
// Check to ensure we don't have a reducible *(& ... )
if (dest->OperIsIndir() && dest->AsIndir()->Addr()->OperGet() == GT_ADDR)
{
- GenTreePtr addrOp = dest->AsIndir()->Addr()->gtGetOp1();
+ GenTree* addrOp = dest->AsIndir()->Addr()->gtGetOp1();
// Ignore reinterpret casts between int/gc
if ((addrOp->TypeGet() == asgType) || (varTypeIsIntegralOrI(addrOp) && (genTypeSize(asgType) == size)))
{
@@ -9665,7 +9664,7 @@ GenTreePtr Compiler::fgMorphOneAsgBlockOp(GenTreePtr tree)
// if the Dest() is a a struct that has a "CustomLayout" and "ConstainsHoles" then we
// can not use a field by field assignment and must the orginal GT_INITBLK unmodified.
-GenTreePtr Compiler::fgMorphInitBlock(GenTreePtr tree)
+GenTree* Compiler::fgMorphInitBlock(GenTree* tree)
{
// We must have the GT_ASG form of InitBlkOp.
noway_assert((tree->OperGet() == GT_ASG) && tree->OperIsInitBlkOp());
@@ -9691,7 +9690,7 @@ GenTreePtr Compiler::fgMorphInitBlock(GenTreePtr tree)
}
JITDUMP("\nfgMorphInitBlock:");
- GenTreePtr oneAsgTree = fgMorphOneAsgBlockOp(tree);
+ GenTree* oneAsgTree = fgMorphOneAsgBlockOp(tree);
if (oneAsgTree)
{
JITDUMP(" using oneAsgTree.\n");
@@ -9849,10 +9848,10 @@ GenTreePtr Compiler::fgMorphInitBlock(GenTreePtr tree)
tree = nullptr;
INDEBUG(morphed = true);
- GenTreePtr dest;
- GenTreePtr srcCopy;
- unsigned fieldLclNum;
- unsigned fieldCnt = destLclVar->lvFieldCnt;
+ GenTree* dest;
+ GenTree* srcCopy;
+ unsigned fieldLclNum;
+ unsigned fieldCnt = destLclVar->lvFieldCnt;
for (unsigned i = 0; i < fieldCnt; ++i)
{
@@ -9946,7 +9945,7 @@ GenTreePtr Compiler::fgMorphInitBlock(GenTreePtr tree)
// This doesn't really warrant a separate method, but is here to abstract
// the fact that these nodes can be modified in-place.
-GenTreePtr Compiler::fgMorphBlkToInd(GenTreeBlk* tree, var_types type)
+GenTree* Compiler::fgMorphBlkToInd(GenTreeBlk* tree, var_types type)
{
tree->SetOper(GT_IND);
tree->gtType = type;
@@ -9967,7 +9966,7 @@ GenTreePtr Compiler::fgMorphBlkToInd(GenTreeBlk* tree, var_types type)
// If this is a source, it will morph it to an GT_IND before taking its address,
// since it may not be remorphed (and we don't want blk nodes as rvalues).
-GenTreePtr Compiler::fgMorphGetStructAddr(GenTreePtr* pTree, CORINFO_CLASS_HANDLE clsHnd, bool isRValue)
+GenTree* Compiler::fgMorphGetStructAddr(GenTree** pTree, CORINFO_CLASS_HANDLE clsHnd, bool isRValue)
{
GenTree* addr;
GenTree* tree = *pTree;
@@ -10034,7 +10033,7 @@ GenTreePtr Compiler::fgMorphGetStructAddr(GenTreePtr* pTree, CORINFO_CLASS_HANDL
// Returns the possibly-morphed node. The caller is responsible for updating
// the parent of this node..
-GenTree* Compiler::fgMorphBlkNode(GenTreePtr tree, bool isDest)
+GenTree* Compiler::fgMorphBlkNode(GenTree* tree, bool isDest)
{
GenTree* handleTree = nullptr;
GenTree* addr = nullptr;
@@ -10363,7 +10362,7 @@ void Compiler::fgMorphUnsafeBlk(GenTreeObj* dest)
// if the Source() or Dest() is a a struct that has a "CustomLayout" and "ConstainsHoles" then we
// can not use a field by field assignment and must leave the orginal block copy unmodified.
-GenTreePtr Compiler::fgMorphCopyBlock(GenTreePtr tree)
+GenTree* Compiler::fgMorphCopyBlock(GenTree* tree)
{
noway_assert(tree->OperIsCopyBlkOp());
@@ -10401,8 +10400,8 @@ GenTreePtr Compiler::fgMorphCopyBlock(GenTreePtr tree)
asg->gtOp.gtOp2 = rhs;
- GenTreePtr oldTree = tree;
- GenTreePtr oneAsgTree = fgMorphOneAsgBlockOp(tree);
+ GenTree* oldTree = tree;
+ GenTree* oneAsgTree = fgMorphOneAsgBlockOp(tree);
if (oneAsgTree)
{
@@ -10419,8 +10418,8 @@ GenTreePtr Compiler::fgMorphCopyBlock(GenTreePtr tree)
LclVarDsc* destLclVar = nullptr;
FieldSeqNode* destFldSeq = nullptr;
bool destDoFldAsg = false;
- GenTreePtr destAddr = nullptr;
- GenTreePtr srcAddr = nullptr;
+ GenTree* destAddr = nullptr;
+ GenTree* srcAddr = nullptr;
bool destOnStack = false;
bool hasGCPtrs = false;
@@ -10828,10 +10827,10 @@ GenTreePtr Compiler::fgMorphCopyBlock(GenTreePtr tree)
//
tree = nullptr;
- GenTreePtr src;
- GenTreePtr addrSpill = nullptr;
- unsigned addrSpillTemp = BAD_VAR_NUM;
- bool addrSpillIsStackDest = false; // true if 'addrSpill' represents the address in our local stack frame
+ GenTree* src;
+ GenTree* addrSpill = nullptr;
+ unsigned addrSpillTemp = BAD_VAR_NUM;
+ bool addrSpillIsStackDest = false; // true if 'addrSpill' represents the address in our local stack frame
unsigned fieldCnt = DUMMY_INIT(0);
@@ -10962,7 +10961,7 @@ GenTreePtr Compiler::fgMorphCopyBlock(GenTreePtr tree)
//
if (addrSpill->OperGet() == GT_ADDR)
{
- GenTreePtr addrOp = addrSpill->gtOp.gtOp1;
+ GenTree* addrOp = addrSpill->gtOp.gtOp1;
if (addrOp->IsLocal())
{
unsigned lclVarNum = addrOp->gtLclVarCommon.gtLclNum;
@@ -11036,7 +11035,7 @@ GenTreePtr Compiler::fgMorphCopyBlock(GenTreePtr tree)
}
}
- GenTreePtr fieldOffsetNode = gtNewIconNode(lvaTable[fieldLclNum].lvFldOffset, TYP_I_IMPL);
+ GenTree* fieldOffsetNode = gtNewIconNode(lvaTable[fieldLclNum].lvFldOffset, TYP_I_IMPL);
// Have to set the field sequence -- which means we need the field handle.
CORINFO_CLASS_HANDLE classHnd = lvaTable[srcLclNum].lvVerTypeInfo.GetClassHandle();
CORINFO_FIELD_HANDLE fieldHnd =
@@ -11169,8 +11168,8 @@ GenTree* Compiler::fgMorphForRegisterFP(GenTree* tree)
{
if (varTypeIsFloating(tree))
{
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtGetOp2();
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtGetOp2();
if (op1->TypeGet() != tree->TypeGet())
{
@@ -11184,11 +11183,11 @@ GenTree* Compiler::fgMorphForRegisterFP(GenTree* tree)
}
else if (tree->OperIsCompare())
{
- GenTreePtr op1 = tree->gtOp.gtOp1;
+ GenTree* op1 = tree->gtOp.gtOp1;
if (varTypeIsFloating(op1))
{
- GenTreePtr op2 = tree->gtGetOp2();
+ GenTree* op2 = tree->gtGetOp2();
assert(varTypeIsFloating(op2));
if (op1->TypeGet() != op2->TypeGet())
@@ -11310,7 +11309,7 @@ GenTree* Compiler::fgMorphRecognizeBoxNullable(GenTree* compare)
//--------------------------------------------------------------------------------------------------------------
// getSIMDStructFromField:
-// Checking whether the field belongs to a simd struct or not. If it is, return the GenTreePtr for
+// Checking whether the field belongs to a simd struct or not. If it is, return the GenTree* for
// the struct node, also base type, field index and simd size. If it is not, just return nullptr.
// Usually if the tree node is from a simd lclvar which is not used in any SIMD intrinsic, then we
// should return nullptr, since in this case we should treat SIMD struct as a regular struct.
@@ -11331,23 +11330,23 @@ GenTree* Compiler::fgMorphRecognizeBoxNullable(GenTree* compare)
// the UsedInSIMDIntrinsic check.
//
// return value:
-// A GenTreePtr which points the simd lclvar tree belongs to. If the tree is not the simd
+// A GenTree* which points the simd lclvar tree belongs to. If the tree is not the simd
// instrinic related field, return nullptr.
//
-GenTreePtr Compiler::getSIMDStructFromField(GenTreePtr tree,
- var_types* pBaseTypeOut,
- unsigned* indexOut,
- unsigned* simdSizeOut,
- bool ignoreUsedInSIMDIntrinsic /*false*/)
+GenTree* Compiler::getSIMDStructFromField(GenTree* tree,
+ var_types* pBaseTypeOut,
+ unsigned* indexOut,
+ unsigned* simdSizeOut,
+ bool ignoreUsedInSIMDIntrinsic /*false*/)
{
- GenTreePtr ret = nullptr;
+ GenTree* ret = nullptr;
if (tree->OperGet() == GT_FIELD)
{
- GenTreePtr objRef = tree->gtField.gtFldObj;
+ GenTree* objRef = tree->gtField.gtFldObj;
if (objRef != nullptr)
{
- GenTreePtr obj = nullptr;
+ GenTree* obj = nullptr;
if (objRef->gtOper == GT_ADDR)
{
obj = objRef->gtOp.gtOp1;
@@ -11394,19 +11393,19 @@ GenTreePtr Compiler::getSIMDStructFromField(GenTreePtr tree,
* operation to the SIMD intrinsic SIMDIntrinsicGetItem, and return the new tree.
* Otherwise, return the old tree.
* Argument:
-* tree - GenTreePtr. If this pointer points to simd struct which is used for simd
+* tree - GenTree*. If this pointer points to simd struct which is used for simd
* intrinsic, we will morph it as simd intrinsic SIMDIntrinsicGetItem.
* Return:
-* A GenTreePtr which points to the new tree. If the tree is not for simd intrinsic,
+* A GenTree* which points to the new tree. If the tree is not for simd intrinsic,
* return nullptr.
*/
-GenTreePtr Compiler::fgMorphFieldToSIMDIntrinsicGet(GenTreePtr tree)
+GenTree* Compiler::fgMorphFieldToSIMDIntrinsicGet(GenTree* tree)
{
- unsigned index = 0;
- var_types baseType = TYP_UNKNOWN;
- unsigned simdSize = 0;
- GenTreePtr simdStructNode = getSIMDStructFromField(tree, &baseType, &index, &simdSize);
+ unsigned index = 0;
+ var_types baseType = TYP_UNKNOWN;
+ unsigned simdSize = 0;
+ GenTree* simdStructNode = getSIMDStructFromField(tree, &baseType, &index, &simdSize);
if (simdStructNode != nullptr)
{
assert(simdSize >= ((index + 1) * genTypeSize(baseType)));
@@ -11424,23 +11423,23 @@ GenTreePtr Compiler::fgMorphFieldToSIMDIntrinsicGet(GenTreePtr tree)
* SIMDIntrinsicSet*, and return a new tree. If it is not such an assignment,
* then return the old tree.
* Argument:
-* tree - GenTreePtr. If this pointer points to simd struct which is used for simd
+* tree - GenTree*. If this pointer points to simd struct which is used for simd
* intrinsic, we will morph it as simd intrinsic set.
* Return:
-* A GenTreePtr which points to the new tree. If the tree is not for simd intrinsic,
+* A GenTree* which points to the new tree. If the tree is not for simd intrinsic,
* return nullptr.
*/
-GenTreePtr Compiler::fgMorphFieldAssignToSIMDIntrinsicSet(GenTreePtr tree)
+GenTree* Compiler::fgMorphFieldAssignToSIMDIntrinsicSet(GenTree* tree)
{
assert(tree->OperGet() == GT_ASG);
- GenTreePtr op1 = tree->gtGetOp1();
- GenTreePtr op2 = tree->gtGetOp2();
+ GenTree* op1 = tree->gtGetOp1();
+ GenTree* op2 = tree->gtGetOp2();
- unsigned index = 0;
- var_types baseType = TYP_UNKNOWN;
- unsigned simdSize = 0;
- GenTreePtr simdOp1Struct = getSIMDStructFromField(op1, &baseType, &index, &simdSize);
+ unsigned index = 0;
+ var_types baseType = TYP_UNKNOWN;
+ unsigned simdSize = 0;
+ GenTree* simdOp1Struct = getSIMDStructFromField(op1, &baseType, &index, &simdSize);
if (simdOp1Struct != nullptr)
{
// Generate the simd set intrinsic
@@ -11465,11 +11464,11 @@ GenTreePtr Compiler::fgMorphFieldAssignToSIMDIntrinsicSet(GenTreePtr tree)
noway_assert(!"There is no set intrinsic for index bigger than 3");
}
- GenTreePtr target = gtClone(simdOp1Struct);
+ GenTree* target = gtClone(simdOp1Struct);
assert(target != nullptr);
- GenTreePtr simdTree = gtNewSIMDNode(target->gtType, simdOp1Struct, op2, simdIntrinsicID, baseType, simdSize);
- tree->gtOp.gtOp1 = target;
- tree->gtOp.gtOp2 = simdTree;
+ GenTree* simdTree = gtNewSIMDNode(target->gtType, simdOp1Struct, op2, simdIntrinsicID, baseType, simdSize);
+ tree->gtOp.gtOp1 = target;
+ tree->gtOp.gtOp2 = simdTree;
#ifdef DEBUG
tree->gtDebugFlags |= GTF_DEBUG_NODE_MORPHED;
#endif
@@ -11489,7 +11488,7 @@ GenTreePtr Compiler::fgMorphFieldAssignToSIMDIntrinsicSet(GenTreePtr tree)
#pragma warning(push)
#pragma warning(disable : 21000) // Suppress PREFast warning about overly large function
#endif
-GenTreePtr Compiler::fgMorphSmpOp(GenTreePtr tree, MorphAddrContext* mac)
+GenTree* Compiler::fgMorphSmpOp(GenTree* tree, MorphAddrContext* mac)
{
ALLOCA_CHECK();
assert(tree->OperKind() & GTK_SMPOP);
@@ -11518,8 +11517,8 @@ GenTreePtr Compiler::fgMorphSmpOp(GenTreePtr tree, MorphAddrContext* mac)
genTreeOps oper = tree->OperGet();
var_types typ = tree->TypeGet();
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtGetOp2IfPresent();
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtGetOp2IfPresent();
/*-------------------------------------------------------------------------
* First do any PRE-ORDER processing
@@ -11541,10 +11540,10 @@ GenTreePtr Compiler::fgMorphSmpOp(GenTreePtr tree, MorphAddrContext* mac)
// We should check whether op2 should be assigned to a SIMD field or not.
// If it is, we should tranlate the tree to simd intrinsic.
assert(!fgGlobalMorph || ((tree->gtDebugFlags & GTF_DEBUG_NODE_MORPHED) == 0));
- GenTreePtr newTree = fgMorphFieldAssignToSIMDIntrinsicSet(tree);
- typ = tree->TypeGet();
- op1 = tree->gtGetOp1();
- op2 = tree->gtGetOp2();
+ GenTree* newTree = fgMorphFieldAssignToSIMDIntrinsicSet(tree);
+ typ = tree->TypeGet();
+ op1 = tree->gtGetOp1();
+ op2 = tree->gtGetOp2();
#ifdef DEBUG
assert((tree == newTree) && (tree->OperGet() == oper));
if ((tree->gtDebugFlags & GTF_DEBUG_NODE_MORPHED) != 0)
@@ -11614,7 +11613,7 @@ GenTreePtr Compiler::fgMorphSmpOp(GenTreePtr tree, MorphAddrContext* mac)
}
else
{
- GenTreePtr effOp1 = op1->gtEffectiveVal();
+ GenTree* effOp1 = op1->gtEffectiveVal();
noway_assert((effOp1->gtOper == GT_CNS_INT) &&
(effOp1->IsIntegralConst(0) || effOp1->IsIntegralConst(1)));
}
@@ -11653,7 +11652,7 @@ GenTreePtr Compiler::fgMorphSmpOp(GenTreePtr tree, MorphAddrContext* mac)
if ((op1->gtFlags & GTF_UNSIGNED) != (op2->gtFlags & GTF_UNSIGNED))
{
// We see if we can force an int constant to change its signedness
- GenTreePtr constOp;
+ GenTree* constOp;
if (op1->gtCast.CastOp()->gtOper == GT_CNS_INT)
constOp = op1;
else if (op2->gtCast.CastOp()->gtOper == GT_CNS_INT)
@@ -11888,7 +11887,7 @@ GenTreePtr Compiler::fgMorphSmpOp(GenTreePtr tree, MorphAddrContext* mac)
{
if (op2->IsIntegralConst(1))
{
- GenTreePtr zeroNode = gtNewZeroConNode(typ);
+ GenTree* zeroNode = gtNewZeroConNode(typ);
#ifdef DEBUG
zeroNode->gtDebugFlags |= GTF_DEBUG_NODE_MORPHED;
#endif
@@ -11999,7 +11998,7 @@ GenTreePtr Compiler::fgMorphSmpOp(GenTreePtr tree, MorphAddrContext* mac)
noway_assert(tree->OperIsBinary());
- GenTreePtr oldTree = tree;
+ GenTree* oldTree = tree;
tree = gtFoldExpr(tree);
@@ -12163,7 +12162,7 @@ GenTreePtr Compiler::fgMorphSmpOp(GenTreePtr tree, MorphAddrContext* mac)
if (tree->gtOper == GT_ADD && subMac1 != nullptr)
{
assert(subMac1->m_kind == MACK_Ind || subMac1->m_kind == MACK_Addr); // Can't be a CopyBlock.
- GenTreePtr otherOp = tree->gtOp.gtOp2;
+ GenTree* otherOp = tree->gtOp.gtOp2;
// Is the other operator a constant?
if (otherOp->IsCnsIntOrI())
{
@@ -12280,7 +12279,7 @@ GenTreePtr Compiler::fgMorphSmpOp(GenTreePtr tree, MorphAddrContext* mac)
case GT_ADD:
if (mac != nullptr && mac->m_kind == MACK_Ind)
{
- GenTreePtr otherOp = tree->gtOp.gtOp1;
+ GenTree* otherOp = tree->gtOp.gtOp1;
// Is the other operator a constant?
if (otherOp->IsCnsIntOrI())
{
@@ -12454,10 +12453,10 @@ DONE_MORPHING_CHILDREN:
}
}
- GenTreePtr oldTree = tree;
+ GenTree* oldTree = tree;
- GenTreePtr qmarkOp1 = nullptr;
- GenTreePtr qmarkOp2 = nullptr;
+ GenTree* qmarkOp1 = nullptr;
+ GenTree* qmarkOp2 = nullptr;
if ((tree->OperGet() == GT_QMARK) && (tree->gtOp.gtOp2->OperGet() == GT_COLON))
{
@@ -12520,10 +12519,11 @@ DONE_MORPHING_CHILDREN:
* Perform the required oper-specific postorder morphing
*/
- GenTreePtr temp;
- GenTreePtr cns1, cns2;
+ GenTree* temp;
+ GenTree* cns1;
+ GenTree* cns2;
size_t ival1, ival2;
- GenTreePtr lclVarTree;
+ GenTree* lclVarTree;
FieldSeqNode* fieldSeq = nullptr;
switch (oper)
@@ -12689,10 +12689,10 @@ DONE_MORPHING_CHILDREN:
//
//
//
- GenTreePtr comma = op1;
- GenTreePtr relop = comma->gtOp.gtOp2;
+ GenTree* comma = op1;
+ GenTree* relop = comma->gtOp.gtOp2;
- GenTreePtr relop_op1 = relop->gtOp.gtOp1;
+ GenTree* relop_op1 = relop->gtOp.gtOp1;
bool reverse = ((ival2 == 0) == (oper == GT_EQ));
@@ -12732,8 +12732,8 @@ DONE_MORPHING_CHILDREN:
// / \
//
- GenTreePtr asg = op1->gtOp.gtOp1;
- GenTreePtr lcl = op1->gtOp.gtOp2;
+ GenTree* asg = op1->gtOp.gtOp1;
+ GenTree* lcl = op1->gtOp.gtOp2;
/* Make sure that the left side of the comma is the assignment of the LCL_VAR */
if (asg->gtOper != GT_ASG)
@@ -12853,8 +12853,8 @@ DONE_MORPHING_CHILDREN:
if (op1->gtOper == GT_AND)
{
- GenTreePtr andOp = op1;
- GenTreePtr rshiftOp = andOp->gtOp.gtOp1;
+ GenTree* andOp = op1;
+ GenTree* rshiftOp = andOp->gtOp.gtOp1;
if ((rshiftOp->gtOper != GT_RSZ) && (rshiftOp->gtOper != GT_RSH))
{
@@ -12967,7 +12967,7 @@ DONE_MORPHING_CHILDREN:
/* Is the result of the mask effectively an INT ? */
- GenTreePtr andMask;
+ GenTree* andMask;
andMask = op1->gtOp.gtOp2;
if (andMask->gtOper != GT_CNS_NATIVELONG)
{
@@ -13136,8 +13136,8 @@ DONE_MORPHING_CHILDREN:
/* Get hold of the two branches */
noway_assert(op2->OperGet() == GT_COLON);
- GenTreePtr thenNode = op2->AsColon()->ThenNode();
- GenTreePtr elseNode = op2->AsColon()->ElseNode();
+ GenTree* thenNode = op2->AsColon()->ThenNode();
+ GenTree* elseNode = op2->AsColon()->ElseNode();
/* Try to hoist assignments out of qmark colon constructs.
ie. replace (cond?(x=a):(x=b)) with (x=(cond?a:b)). */
@@ -13148,8 +13148,8 @@ DONE_MORPHING_CHILDREN:
{
noway_assert(thenNode->TypeGet() == elseNode->TypeGet());
- GenTreePtr asg = thenNode;
- GenTreePtr colon = op2;
+ GenTree* asg = thenNode;
+ GenTree* colon = op2;
colon->gtOp.gtOp1 = thenNode->gtOp.gtOp2;
colon->gtOp.gtOp2 = elseNode->gtOp.gtOp2;
tree->gtType = colon->gtType = asg->gtOp.gtOp2->gtType;
@@ -13200,7 +13200,7 @@ DONE_MORPHING_CHILDREN:
}
else
{
- GenTreePtr tmp = elseNode;
+ GenTree* tmp = elseNode;
op2->AsColon()->ElseNode() = elseNode = thenNode;
op2->AsColon()->ThenNode() = thenNode = tmp;
@@ -13512,7 +13512,7 @@ DONE_MORPHING_CHILDREN:
op2->gtIntCon.gtFieldSeq->IsConstantIndexFieldSeq())
{
assert(op2->gtIntCon.gtFieldSeq->m_next == nullptr);
- GenTreePtr otherOp = op1;
+ GenTree* otherOp = op1;
if (otherOp->OperGet() == GT_NEG)
{
otherOp = otherOp->gtOp.gtOp1;
@@ -13548,7 +13548,7 @@ DONE_MORPHING_CHILDREN:
fgMorphTreeDone(op1);
}
- GenTreePtr factorIcon = gtNewIconNode(factor, TYP_I_IMPL);
+ GenTree* factorIcon = gtNewIconNode(factor, TYP_I_IMPL);
if (op2IsConstIndex)
{
factorIcon->AsIntCon()->gtFieldSeq =
@@ -13890,12 +13890,12 @@ DONE_MORPHING_CHILDREN:
// TBD: this transformation is currently necessary for correctness -- it might
// be good to analyze the failures that result if we don't do this, and fix them
// in other ways. Ideally, this should be optional.
- GenTreePtr commaNode = op1;
- unsigned treeFlags = tree->gtFlags;
- commaNode->gtType = typ;
- commaNode->gtFlags = (treeFlags & ~GTF_REVERSE_OPS); // Bashing the GT_COMMA flags here is
- // dangerous, clear the GTF_REVERSE_OPS at
- // least.
+ GenTree* commaNode = op1;
+ unsigned treeFlags = tree->gtFlags;
+ commaNode->gtType = typ;
+ commaNode->gtFlags = (treeFlags & ~GTF_REVERSE_OPS); // Bashing the GT_COMMA flags here is
+ // dangerous, clear the GTF_REVERSE_OPS at
+ // least.
#ifdef DEBUG
commaNode->gtDebugFlags |= GTF_DEBUG_NODE_MORPHED;
#endif
@@ -13961,7 +13961,7 @@ DONE_MORPHING_CHILDREN:
}
// Perform the transform ADDR(IND(...)) == (...).
- GenTreePtr addr = op1->gtOp.gtOp1;
+ GenTree* addr = op1->gtOp.gtOp1;
noway_assert(varTypeIsGC(addr->gtType) || addr->gtType == TYP_I_IMPL);
@@ -13980,7 +13980,7 @@ DONE_MORPHING_CHILDREN:
}
// Perform the transform ADDR(OBJ(...)) == (...).
- GenTreePtr addr = op1->AsObj()->Addr();
+ GenTree* addr = op1->AsObj()->Addr();
noway_assert(varTypeIsGC(addr->gtType) || addr->gtType == TYP_I_IMPL);
@@ -13991,7 +13991,7 @@ DONE_MORPHING_CHILDREN:
}
else if (op1->gtOper == GT_CAST)
{
- GenTreePtr casting = op1->gtCast.CastOp();
+ GenTree* casting = op1->gtCast.CastOp();
if (casting->gtOper == GT_LCL_VAR || casting->gtOper == GT_CLS_VAR)
{
DEBUG_DESTROY_NODE(op1);
@@ -14002,7 +14002,7 @@ DONE_MORPHING_CHILDREN:
{
// Perform the transform ADDR(COMMA(x, ..., z)) == COMMA(x, ..., ADDR(z)).
// (Be sure to mark "z" as an l-value...)
- GenTreePtr commaNode = op1;
+ GenTree* commaNode = op1;
while (commaNode->gtOp.gtOp2->gtOper == GT_COMMA)
{
commaNode = commaNode->gtOp.gtOp2;
@@ -14093,7 +14093,7 @@ DONE_MORPHING_CHILDREN:
// Extract the side effects from the left side of the comma. Since they don't "go" anywhere, this
// is all we need.
- GenTreePtr op1SideEffects = nullptr;
+ GenTree* op1SideEffects = nullptr;
// The addition of "GTF_MAKE_CSE" below prevents us from throwing away (for example)
// hoisted expressions in loops.
gtExtractSideEffList(op1, &op1SideEffects, (GTF_SIDE_EFFECT | GTF_MAKE_CSE));
@@ -14135,7 +14135,7 @@ DONE_MORPHING_CHILDREN:
{
if (fgIsCommaThrow(op1, true))
{
- GenTreePtr throwNode = op1->gtOp.gtOp1;
+ GenTree* throwNode = op1->gtOp.gtOp1;
noway_assert(throwNode->gtType == TYP_VOID);
return throwNode;
@@ -14179,7 +14179,7 @@ DONE_MORPHING_CHILDREN:
fgRemoveRestOfBlock = true;
}
- GenTreePtr throwNode = op1->gtOp.gtOp1;
+ GenTree* throwNode = op1->gtOp.gtOp1;
noway_assert(throwNode->gtType == TYP_VOID);
if (oper == GT_COMMA)
@@ -14212,7 +14212,7 @@ DONE_MORPHING_CHILDREN:
}
else
{
- GenTreePtr commaOp2 = op1->gtOp.gtOp2;
+ GenTree* commaOp2 = op1->gtOp.gtOp2;
// need type of oper to be same as tree
if (typ == TYP_LONG)
@@ -14282,7 +14282,7 @@ DONE_MORPHING_CHILDREN:
{
noway_assert(GenTree::OperIsShiftOrRotate(oper));
- GenTreePtr commaOp2 = op2->gtOp.gtOp2;
+ GenTree* commaOp2 = op2->gtOp.gtOp2;
commaOp2->ChangeOperConst(GT_CNS_NATIVELONG);
commaOp2->gtIntConCommon.SetLngValue(0);
@@ -14296,7 +14296,7 @@ DONE_MORPHING_CHILDREN:
{
// An example case is comparison (say GT_GT) of two longs or floating point values.
- GenTreePtr commaOp2 = op2->gtOp.gtOp2;
+ GenTree* commaOp2 = op2->gtOp.gtOp2;
commaOp2->ChangeOperConst(GT_CNS_INT);
commaOp2->gtIntCon.gtIconVal = 0;
@@ -14308,7 +14308,7 @@ DONE_MORPHING_CHILDREN:
{
noway_assert(tree->OperGet() == GT_ADD);
- GenTreePtr commaOp2 = op2->gtOp.gtOp2;
+ GenTree* commaOp2 = op2->gtOp.gtOp2;
commaOp2->ChangeOperConst(GT_CNS_INT);
commaOp2->gtIntCon.gtIconVal = 0;
@@ -14390,7 +14390,7 @@ GenTree* Compiler::fgMorphSmpOpOptional(GenTreeOp* tree)
if (fgGlobalMorph && (oper == GT_ADD) && !tree->gtOverflow() && (op1->gtOper == GT_ADD) && !op1->gtOverflow() &&
varTypeIsIntegralOrI(typ))
{
- GenTreePtr ad2 = op1->gtOp.gtOp2;
+ GenTree* ad2 = op1->gtOp.gtOp2;
if (op2->OperIsConst() == 0 && ad2->OperIsConst() != 0)
{
@@ -14797,7 +14797,7 @@ GenTree* Compiler::fgMorphSmpOpOptional(GenTreeOp* tree)
if (op2->gtOper == GT_CNS_INT && op1->gtOper == GT_ADD)
{
- GenTreePtr add = op1->gtOp.gtOp2;
+ GenTree* add = op1->gtOp.gtOp2;
if (add->IsCnsIntOrI() && (op2->GetScaleIndexMul() != 0))
{
@@ -14849,7 +14849,7 @@ GenTree* Compiler::fgMorphSmpOpOptional(GenTreeOp* tree)
if (!optValnumCSE_phase && op2->IsCnsIntOrI() && op1->gtOper == GT_ADD && !op1->gtOverflow())
{
- GenTreePtr cns = op1->gtOp.gtOp2;
+ GenTree* cns = op1->gtOp.gtOp2;
if (cns->IsCnsIntOrI() && (op2->GetScaleIndexShf() != 0))
{
@@ -15034,7 +15034,7 @@ bool Compiler::fgOperIsBitwiseRotationRoot(genTreeOps oper)
// Assumption:
// The input is a GT_OR or a GT_XOR tree.
-GenTreePtr Compiler::fgRecognizeAndMorphBitwiseRotation(GenTreePtr tree)
+GenTree* Compiler::fgRecognizeAndMorphBitwiseRotation(GenTree* tree)
{
#ifndef LEGACY_BACKEND
//
@@ -15087,10 +15087,10 @@ GenTreePtr Compiler::fgRecognizeAndMorphBitwiseRotation(GenTreePtr tree)
assert(fgOperIsBitwiseRotationRoot(oper));
// Check if we have an LSH on one side of the OR and an RSZ on the other side.
- GenTreePtr op1 = tree->gtGetOp1();
- GenTreePtr op2 = tree->gtGetOp2();
- GenTreePtr leftShiftTree = nullptr;
- GenTreePtr rightShiftTree = nullptr;
+ GenTree* op1 = tree->gtGetOp1();
+ GenTree* op2 = tree->gtGetOp2();
+ GenTree* leftShiftTree = nullptr;
+ GenTree* rightShiftTree = nullptr;
if ((op1->OperGet() == GT_LSH) && (op2->OperGet() == GT_RSZ))
{
leftShiftTree = op1;
@@ -15110,12 +15110,12 @@ GenTreePtr Compiler::fgRecognizeAndMorphBitwiseRotation(GenTreePtr tree)
// We already checked that there are no side effects above.
if (GenTree::Compare(leftShiftTree->gtGetOp1(), rightShiftTree->gtGetOp1()))
{
- GenTreePtr rotatedValue = leftShiftTree->gtGetOp1();
- var_types rotatedValueActualType = genActualType(rotatedValue->gtType);
- ssize_t rotatedValueBitSize = genTypeSize(rotatedValueActualType) * 8;
+ GenTree* rotatedValue = leftShiftTree->gtGetOp1();
+ var_types rotatedValueActualType = genActualType(rotatedValue->gtType);
+ ssize_t rotatedValueBitSize = genTypeSize(rotatedValueActualType) * 8;
noway_assert((rotatedValueBitSize == 32) || (rotatedValueBitSize == 64));
- GenTreePtr leftShiftIndex = leftShiftTree->gtGetOp2();
- GenTreePtr rightShiftIndex = rightShiftTree->gtGetOp2();
+ GenTree* leftShiftIndex = leftShiftTree->gtGetOp2();
+ GenTree* rightShiftIndex = rightShiftTree->gtGetOp2();
// The shift index may be masked. At least (rotatedValueBitSize - 1) lower bits
// shouldn't be masked for the transformation to be valid. If additional
@@ -15161,10 +15161,10 @@ GenTreePtr Compiler::fgRecognizeAndMorphBitwiseRotation(GenTreePtr tree)
return tree;
}
- GenTreePtr shiftIndexWithAdd = nullptr;
- GenTreePtr shiftIndexWithoutAdd = nullptr;
+ GenTree* shiftIndexWithAdd = nullptr;
+ GenTree* shiftIndexWithoutAdd = nullptr;
genTreeOps rotateOp = GT_NONE;
- GenTreePtr rotateIndex = nullptr;
+ GenTree* rotateIndex = nullptr;
if (leftShiftIndex->OperGet() == GT_ADD)
{
@@ -15263,13 +15263,13 @@ GenTreePtr Compiler::fgRecognizeAndMorphBitwiseRotation(GenTreePtr tree)
}
#if !CPU_HAS_FP_SUPPORT
-GenTreePtr Compiler::fgMorphToEmulatedFP(GenTreePtr tree)
+GenTree* Compiler::fgMorphToEmulatedFP(GenTree* tree)
{
genTreeOps oper = tree->OperGet();
var_types typ = tree->TypeGet();
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtGetOp2IfPresent();
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtGetOp2IfPresent();
/*
We have to use helper calls for all FP operations:
@@ -15281,9 +15281,9 @@ GenTreePtr Compiler::fgMorphToEmulatedFP(GenTreePtr tree)
if (varTypeIsFloating(typ) || (op1 && varTypeIsFloating(op1->TypeGet())))
{
- int helper;
- GenTreePtr args;
- size_t argc = genTypeStSz(typ);
+ int helper;
+ GenTree* args;
+ size_t argc = genTypeStSz(typ);
/* Not all FP operations need helper calls */
@@ -15476,7 +15476,7 @@ GenTreePtr Compiler::fgMorphToEmulatedFP(GenTreePtr tree)
* Transform the given tree for code generation and return an equivalent tree.
*/
-GenTreePtr Compiler::fgMorphTree(GenTreePtr tree, MorphAddrContext* mac)
+GenTree* Compiler::fgMorphTree(GenTree* tree, MorphAddrContext* mac)
{
assert(tree);
assert(tree->gtOper != GT_STMT);
@@ -15529,7 +15529,7 @@ GenTreePtr Compiler::fgMorphTree(GenTreePtr tree, MorphAddrContext* mac)
if (compStressCompile(STRESS_GENERIC_CHECK, 0))
{
- GenTreePtr copy;
+ GenTree* copy;
#ifdef SMALL_TREE_NODES
if (GenTree::s_gtNodeSizes[tree->gtOper] == TREE_NODE_SZ_SMALL)
@@ -15570,7 +15570,7 @@ GenTreePtr Compiler::fgMorphTree(GenTreePtr tree, MorphAddrContext* mac)
if (optAssertionCount > 0)
{
- GenTreePtr newTree = tree;
+ GenTree* newTree = tree;
while (newTree != nullptr)
{
tree = newTree;
@@ -15586,7 +15586,7 @@ GenTreePtr Compiler::fgMorphTree(GenTreePtr tree, MorphAddrContext* mac)
/* Save the original un-morphed tree for fgMorphTreeDone */
- GenTreePtr oldTree = tree;
+ GenTree* oldTree = tree;
/* Figure out what kind of a node we have */
@@ -15754,7 +15754,7 @@ DONE:
// lclNum - The varNum of the lclVar for which we're killing assertions.
// tree - (DEBUG only) the tree responsible for killing its assertions.
//
-void Compiler::fgKillDependentAssertionsSingle(unsigned lclNum DEBUGARG(GenTreePtr tree))
+void Compiler::fgKillDependentAssertionsSingle(unsigned lclNum DEBUGARG(GenTree* tree))
{
/* All dependent assertions are killed here */
@@ -15804,7 +15804,7 @@ void Compiler::fgKillDependentAssertionsSingle(unsigned lclNum DEBUGARG(GenTreeP
// respectively.
// Calls fgKillDependentAssertionsSingle to kill the assertions for a single lclVar.
//
-void Compiler::fgKillDependentAssertions(unsigned lclNum DEBUGARG(GenTreePtr tree))
+void Compiler::fgKillDependentAssertions(unsigned lclNum DEBUGARG(GenTree* tree))
{
LclVarDsc* varDsc = &lvaTable[lclNum];
@@ -15847,8 +15847,8 @@ void Compiler::fgKillDependentAssertions(unsigned lclNum DEBUGARG(GenTreePtr tre
*
*/
-void Compiler::fgMorphTreeDone(GenTreePtr tree,
- GenTreePtr oldTree /* == NULL */
+void Compiler::fgMorphTreeDone(GenTree* tree,
+ GenTree* oldTree /* == NULL */
DEBUGARG(int morphNum))
{
#ifdef DEBUG
@@ -15939,7 +15939,7 @@ bool Compiler::fgFoldConditional(BasicBlock* block)
{
noway_assert(block->bbTreeList && block->bbTreeList->gtPrev);
- GenTreePtr stmt = block->bbTreeList->gtPrev;
+ GenTree* stmt = block->bbTreeList->gtPrev;
noway_assert(stmt->gtNext == nullptr);
@@ -15971,7 +15971,7 @@ bool Compiler::fgFoldConditional(BasicBlock* block)
/* Did we fold the conditional */
noway_assert(stmt->gtStmt.gtStmtExpr->gtOp.gtOp1);
- GenTreePtr cond;
+ GenTree* cond;
cond = stmt->gtStmt.gtStmtExpr->gtOp.gtOp1;
if (cond->OperKind() & GTK_CONST)
@@ -16147,7 +16147,7 @@ bool Compiler::fgFoldConditional(BasicBlock* block)
{
noway_assert(block->bbTreeList && block->bbTreeList->gtPrev);
- GenTreePtr stmt = block->bbTreeList->gtPrev;
+ GenTree* stmt = block->bbTreeList->gtPrev;
noway_assert(stmt->gtNext == nullptr);
@@ -16186,7 +16186,7 @@ bool Compiler::fgFoldConditional(BasicBlock* block)
/* Did we fold the conditional */
noway_assert(stmt->gtStmt.gtStmtExpr->gtOp.gtOp1);
- GenTreePtr cond;
+ GenTree* cond;
cond = stmt->gtStmt.gtStmtExpr->gtOp.gtOp1;
if (cond->OperKind() & GTK_CONST)
@@ -16434,7 +16434,7 @@ void Compiler::fgMorphStmts(BasicBlock* block, bool* lnot, bool* loadw)
fgCurrentlyInUseArgTemps = hashBv::Create(this);
GenTreeStmt* stmt = block->firstStmt();
- GenTreePtr prev = nullptr;
+ GenTree* prev = nullptr;
for (; stmt != nullptr; prev = stmt->gtStmtExpr, stmt = stmt->gtNextStmt)
{
assert(stmt->gtOper == GT_STMT);
@@ -16451,9 +16451,9 @@ void Compiler::fgMorphStmts(BasicBlock* block, bool* lnot, bool* loadw)
}
#endif
- fgMorphStmt = stmt;
- compCurStmt = stmt;
- GenTreePtr tree = stmt->gtStmtExpr;
+ fgMorphStmt = stmt;
+ compCurStmt = stmt;
+ GenTree* tree = stmt->gtStmtExpr;
#ifdef DEBUG
compCurStmtNum++;
@@ -16473,7 +16473,7 @@ void Compiler::fgMorphStmts(BasicBlock* block, bool* lnot, bool* loadw)
/* Morph this statement tree */
- GenTreePtr morph = fgMorphTree(tree);
+ GenTree* morph = fgMorphTree(tree);
// mark any outgoing arg temps as free so we can reuse them in the next statement.
@@ -16622,16 +16622,16 @@ void Compiler::fgMorphStmts(BasicBlock* block, bool* lnot, bool* loadw)
{
if ((block->bbJumpKind == BBJ_COND) || (block->bbJumpKind == BBJ_SWITCH))
{
- GenTreePtr first = block->bbTreeList;
+ GenTree* first = block->bbTreeList;
noway_assert(first);
- GenTreePtr last = first->gtPrev;
+ GenTree* last = first->gtPrev;
noway_assert(last && last->gtNext == nullptr);
- GenTreePtr lastStmt = last->gtStmt.gtStmtExpr;
+ GenTree* lastStmt = last->gtStmt.gtStmtExpr;
if (((block->bbJumpKind == BBJ_COND) && (lastStmt->gtOper == GT_JTRUE)) ||
((block->bbJumpKind == BBJ_SWITCH) && (lastStmt->gtOper == GT_SWITCH)))
{
- GenTreePtr op1 = lastStmt->gtOp.gtOp1;
+ GenTree* op1 = lastStmt->gtOp.gtOp1;
if (op1->OperKind() & GTK_RELOP)
{
@@ -16648,7 +16648,7 @@ void Compiler::fgMorphStmts(BasicBlock* block, bool* lnot, bool* loadw)
}
#if FEATURE_FASTTAILCALL
- GenTreePtr recursiveTailCall = nullptr;
+ GenTree* recursiveTailCall = nullptr;
if (block->endsWithTailCallConvertibleToLoop(this, &recursiveTailCall))
{
fgMorphRecursiveFastTailCallIntoLoop(block, recursiveTailCall->AsCall());
@@ -16759,18 +16759,18 @@ void Compiler::fgMorphBlocks()
if (mult && (opts.compFlags & CLFLG_TREETRANS) && !opts.compDbgCode && !opts.MinOpts())
{
- for (GenTreePtr tree = block->bbTreeList; tree; tree = tree->gtNext)
+ for (GenTree* tree = block->bbTreeList; tree; tree = tree->gtNext)
{
assert(tree->gtOper == GT_STMT);
- GenTreePtr last = tree->gtStmt.gtStmtExpr;
+ GenTree* last = tree->gtStmt.gtStmtExpr;
if (last->gtOper == GT_ASG_ADD || last->gtOper == GT_ASG_SUB)
{
- GenTreePtr temp;
- GenTreePtr next;
+ GenTree* temp;
+ GenTree* next;
- GenTreePtr dst1 = last->gtOp.gtOp1;
- GenTreePtr src1 = last->gtOp.gtOp2;
+ GenTree* dst1 = last->gtOp.gtOp1;
+ GenTree* src1 = last->gtOp.gtOp2;
if (!last->IsCnsIntOrI())
{
@@ -16788,8 +16788,8 @@ void Compiler::fgMorphBlocks()
for (;;)
{
- GenTreePtr dst2;
- GenTreePtr src2;
+ GenTree* dst2;
+ GenTree* src2;
/* Look at the next statement */
@@ -16935,8 +16935,8 @@ void Compiler::fgMorphBlocks()
//
// TODO: Need to characterize the last top level stmt of a block ending with BBJ_RETURN.
- GenTreePtr last = (block->bbTreeList != nullptr) ? block->bbTreeList->gtPrev : nullptr;
- GenTreePtr ret = (last != nullptr) ? last->gtStmt.gtStmtExpr : nullptr;
+ GenTree* last = (block->bbTreeList != nullptr) ? block->bbTreeList->gtPrev : nullptr;
+ GenTree* ret = (last != nullptr) ? last->gtStmt.gtStmtExpr : nullptr;
if ((ret != nullptr) && (ret->OperGet() == GT_RETURN) && ((ret->gtFlags & GTF_RET_MERGED) != 0))
{
@@ -16977,7 +16977,7 @@ void Compiler::fgMorphBlocks()
noway_assert(ret->OperGet() == GT_RETURN);
noway_assert(ret->gtGetOp1() != nullptr);
- GenTreePtr tree = gtNewTempAssign(genReturnLocal, ret->gtGetOp1());
+ GenTree* tree = gtNewTempAssign(genReturnLocal, ret->gtGetOp1());
last->gtStmt.gtStmtExpr = (tree->OperIsCopyBlkOp()) ? fgMorphCopyBlock(tree) : tree;
@@ -17197,7 +17197,7 @@ void Compiler::fgSetOptions()
/*****************************************************************************/
-GenTreePtr Compiler::fgInitThisClass()
+GenTree* Compiler::fgInitThisClass()
{
noway_assert(!compIsForInlining());
@@ -17225,7 +17225,7 @@ GenTreePtr Compiler::fgInitThisClass()
}
// We need a runtime lookup.
- GenTreePtr ctxTree = getRuntimeContextTree(kind.runtimeLookupKind);
+ GenTree* ctxTree = getRuntimeContextTree(kind.runtimeLookupKind);
// CORINFO_HELP_READYTORUN_GENERIC_STATIC_BASE with a zeroed out resolvedToken means "get the static
// base of the class that owns the method being compiled". If we're in this method, it means we're not
@@ -17246,24 +17246,24 @@ GenTreePtr Compiler::fgInitThisClass()
// This code takes a this pointer; but we need to pass the static method desc to get the right point in
// the hierarchy
{
- GenTreePtr vtTree = gtNewLclvNode(info.compThisArg, TYP_REF);
+ GenTree* vtTree = gtNewLclvNode(info.compThisArg, TYP_REF);
// Vtable pointer of this object
vtTree = gtNewOperNode(GT_IND, TYP_I_IMPL, vtTree);
vtTree->gtFlags |= GTF_EXCEPT; // Null-pointer exception
- GenTreePtr methodHnd = gtNewIconEmbMethHndNode(info.compMethodHnd);
+ GenTree* methodHnd = gtNewIconEmbMethHndNode(info.compMethodHnd);
return gtNewHelperCallNode(CORINFO_HELP_INITINSTCLASS, TYP_VOID, gtNewArgList(vtTree, methodHnd));
}
case CORINFO_LOOKUP_CLASSPARAM:
{
- GenTreePtr vtTree = gtNewLclvNode(info.compTypeCtxtArg, TYP_I_IMPL);
+ GenTree* vtTree = gtNewLclvNode(info.compTypeCtxtArg, TYP_I_IMPL);
return gtNewHelperCallNode(CORINFO_HELP_INITCLASS, TYP_VOID, gtNewArgList(vtTree));
}
case CORINFO_LOOKUP_METHODPARAM:
{
- GenTreePtr methHndTree = gtNewLclvNode(info.compTypeCtxtArg, TYP_I_IMPL);
+ GenTree* methHndTree = gtNewLclvNode(info.compTypeCtxtArg, TYP_I_IMPL);
return gtNewHelperCallNode(CORINFO_HELP_INITINSTCLASS, TYP_VOID,
gtNewArgList(gtNewIconNode(0), methHndTree));
}
@@ -17280,7 +17280,7 @@ GenTreePtr Compiler::fgInitThisClass()
* Tree walk callback to make sure no GT_QMARK nodes are present in the tree,
* except for the allowed ? 1 : 0; pattern.
*/
-Compiler::fgWalkResult Compiler::fgAssertNoQmark(GenTreePtr* tree, fgWalkData* data)
+Compiler::fgWalkResult Compiler::fgAssertNoQmark(GenTree** tree, fgWalkData* data)
{
if ((*tree)->OperGet() == GT_QMARK)
{
@@ -17295,7 +17295,7 @@ void Compiler::fgCheckQmarkAllowedForm(GenTree* tree)
#ifndef LEGACY_BACKEND
assert(!"Qmarks beyond morph disallowed.");
#else // LEGACY_BACKEND
- GenTreePtr colon = tree->gtOp.gtOp2;
+ GenTree* colon = tree->gtOp.gtOp2;
assert(colon->gtOp.gtOp1->IsIntegralConst(0));
assert(colon->gtOp.gtOp2->IsIntegralConst(1));
@@ -17315,9 +17315,9 @@ void Compiler::fgCheckQmarkAllowedForm(GenTree* tree)
* of either op1 of colon or op2 of colon but not a child of any other
* operator.
*/
-void Compiler::fgPreExpandQmarkChecks(GenTreePtr expr)
+void Compiler::fgPreExpandQmarkChecks(GenTree* expr)
{
- GenTreePtr topQmark = fgGetTopLevelQmark(expr);
+ GenTree* topQmark = fgGetTopLevelQmark(expr);
// If the top level Qmark is null, then scan the tree to make sure
// there are no qmarks within it.
@@ -17344,14 +17344,14 @@ void Compiler::fgPreExpandQmarkChecks(GenTreePtr expr)
* GT_LCL_VAR, then return the lcl node in ppDst.
*
*/
-GenTreePtr Compiler::fgGetTopLevelQmark(GenTreePtr expr, GenTreePtr* ppDst /* = NULL */)
+GenTree* Compiler::fgGetTopLevelQmark(GenTree* expr, GenTree** ppDst /* = NULL */)
{
if (ppDst != nullptr)
{
*ppDst = nullptr;
}
- GenTreePtr topQmark = nullptr;
+ GenTree* topQmark = nullptr;
if (expr->gtOper == GT_QMARK)
{
topQmark = expr;
@@ -17397,7 +17397,7 @@ GenTreePtr Compiler::fgGetTopLevelQmark(GenTreePtr expr, GenTreePtr* ppDst /* =
* tmp has the result.
*
*/
-void Compiler::fgExpandQmarkForCastInstOf(BasicBlock* block, GenTreePtr stmt)
+void Compiler::fgExpandQmarkForCastInstOf(BasicBlock* block, GenTree* stmt)
{
#ifdef DEBUG
if (verbose)
@@ -17407,24 +17407,24 @@ void Compiler::fgExpandQmarkForCastInstOf(BasicBlock* block, GenTreePtr stmt)
}
#endif // DEBUG
- GenTreePtr expr = stmt->gtStmt.gtStmtExpr;
+ GenTree* expr = stmt->gtStmt.gtStmtExpr;
- GenTreePtr dst = nullptr;
- GenTreePtr qmark = fgGetTopLevelQmark(expr, &dst);
+ GenTree* dst = nullptr;
+ GenTree* qmark = fgGetTopLevelQmark(expr, &dst);
noway_assert(dst != nullptr);
assert(qmark->gtFlags & GTF_QMARK_CAST_INSTOF);
// Get cond, true, false exprs for the qmark.
- GenTreePtr condExpr = qmark->gtGetOp1();
- GenTreePtr trueExpr = qmark->gtGetOp2()->AsColon()->ThenNode();
- GenTreePtr falseExpr = qmark->gtGetOp2()->AsColon()->ElseNode();
+ GenTree* condExpr = qmark->gtGetOp1();
+ GenTree* trueExpr = qmark->gtGetOp2()->AsColon()->ThenNode();
+ GenTree* falseExpr = qmark->gtGetOp2()->AsColon()->ElseNode();
// Get cond, true, false exprs for the nested qmark.
- GenTreePtr nestedQmark = falseExpr;
- GenTreePtr cond2Expr;
- GenTreePtr true2Expr;
- GenTreePtr false2Expr;
+ GenTree* nestedQmark = falseExpr;
+ GenTree* cond2Expr;
+ GenTree* true2Expr;
+ GenTree* false2Expr;
if (nestedQmark->gtOper == GT_QMARK)
{
@@ -17507,8 +17507,8 @@ void Compiler::fgExpandQmarkForCastInstOf(BasicBlock* block, GenTreePtr stmt)
helperBlock->inheritWeightPercentage(cond2Block, 50);
// Append cond1 as JTRUE to cond1Block
- GenTreePtr jmpTree = gtNewOperNode(GT_JTRUE, TYP_VOID, condExpr);
- GenTreePtr jmpStmt = fgNewStmtFromTree(jmpTree, stmt->gtStmt.gtStmtILoffsx);
+ GenTree* jmpTree = gtNewOperNode(GT_JTRUE, TYP_VOID, condExpr);
+ GenTree* jmpStmt = fgNewStmtFromTree(jmpTree, stmt->gtStmt.gtStmtILoffsx);
fgInsertStmtAtEnd(cond1Block, jmpStmt);
// Append cond2 as JTRUE to cond2Block
@@ -17517,14 +17517,14 @@ void Compiler::fgExpandQmarkForCastInstOf(BasicBlock* block, GenTreePtr stmt)
fgInsertStmtAtEnd(cond2Block, jmpStmt);
// AsgBlock should get tmp = op1 assignment.
- trueExpr = gtNewTempAssign(dst->AsLclVarCommon()->GetLclNum(), trueExpr);
- GenTreePtr trueStmt = fgNewStmtFromTree(trueExpr, stmt->gtStmt.gtStmtILoffsx);
+ trueExpr = gtNewTempAssign(dst->AsLclVarCommon()->GetLclNum(), trueExpr);
+ GenTree* trueStmt = fgNewStmtFromTree(trueExpr, stmt->gtStmt.gtStmtILoffsx);
fgInsertStmtAtEnd(asgBlock, trueStmt);
// Since we are adding helper in the JTRUE false path, reverse the cond2 and add the helper.
gtReverseCond(cond2Expr);
- GenTreePtr helperExpr = gtNewTempAssign(dst->AsLclVarCommon()->GetLclNum(), true2Expr);
- GenTreePtr helperStmt = fgNewStmtFromTree(helperExpr, stmt->gtStmt.gtStmtILoffsx);
+ GenTree* helperExpr = gtNewTempAssign(dst->AsLclVarCommon()->GetLclNum(), true2Expr);
+ GenTree* helperStmt = fgNewStmtFromTree(helperExpr, stmt->gtStmt.gtStmtILoffsx);
fgInsertStmtAtEnd(helperBlock, helperStmt);
// Finally remove the nested qmark stmt.
@@ -17589,13 +17589,13 @@ void Compiler::fgExpandQmarkForCastInstOf(BasicBlock* block, GenTreePtr stmt)
* If the qmark assigns to a variable, then create tmps for "then"
* and "else" results and assign the temp to the variable as a writeback step.
*/
-void Compiler::fgExpandQmarkStmt(BasicBlock* block, GenTreePtr stmt)
+void Compiler::fgExpandQmarkStmt(BasicBlock* block, GenTree* stmt)
{
- GenTreePtr expr = stmt->gtStmt.gtStmtExpr;
+ GenTree* expr = stmt->gtStmt.gtStmtExpr;
// Retrieve the Qmark node to be expanded.
- GenTreePtr dst = nullptr;
- GenTreePtr qmark = fgGetTopLevelQmark(expr, &dst);
+ GenTree* dst = nullptr;
+ GenTree* qmark = fgGetTopLevelQmark(expr, &dst);
if (qmark == nullptr)
{
return;
@@ -17616,9 +17616,9 @@ void Compiler::fgExpandQmarkStmt(BasicBlock* block, GenTreePtr stmt)
#endif // DEBUG
// Retrieve the operands.
- GenTreePtr condExpr = qmark->gtGetOp1();
- GenTreePtr trueExpr = qmark->gtGetOp2()->AsColon()->ThenNode();
- GenTreePtr falseExpr = qmark->gtGetOp2()->AsColon()->ElseNode();
+ GenTree* condExpr = qmark->gtGetOp1();
+ GenTree* trueExpr = qmark->gtGetOp2()->AsColon()->ThenNode();
+ GenTree* falseExpr = qmark->gtGetOp2()->AsColon()->ElseNode();
assert(condExpr->gtFlags & GTF_RELOP_QMARK);
condExpr->gtFlags &= ~GTF_RELOP_QMARK;
@@ -17722,8 +17722,8 @@ void Compiler::fgExpandQmarkStmt(BasicBlock* block, GenTreePtr stmt)
elseBlock->inheritWeightPercentage(condBlock, 50);
}
- GenTreePtr jmpTree = gtNewOperNode(GT_JTRUE, TYP_VOID, qmark->gtGetOp1());
- GenTreePtr jmpStmt = fgNewStmtFromTree(jmpTree, stmt->gtStmt.gtStmtILoffsx);
+ GenTree* jmpTree = gtNewOperNode(GT_JTRUE, TYP_VOID, qmark->gtGetOp1());
+ GenTree* jmpStmt = fgNewStmtFromTree(jmpTree, stmt->gtStmt.gtStmtILoffsx);
fgInsertStmtAtEnd(condBlock, jmpStmt);
// Remove the original qmark statement.
@@ -17749,7 +17749,7 @@ void Compiler::fgExpandQmarkStmt(BasicBlock* block, GenTreePtr stmt)
{
trueExpr = gtNewTempAssign(lclNum, trueExpr);
}
- GenTreePtr trueStmt = fgNewStmtFromTree(trueExpr, stmt->gtStmt.gtStmtILoffsx);
+ GenTree* trueStmt = fgNewStmtFromTree(trueExpr, stmt->gtStmt.gtStmtILoffsx);
fgInsertStmtAtEnd(thenBlock, trueStmt);
}
@@ -17760,7 +17760,7 @@ void Compiler::fgExpandQmarkStmt(BasicBlock* block, GenTreePtr stmt)
{
falseExpr = gtNewTempAssign(lclNum, falseExpr);
}
- GenTreePtr falseStmt = fgNewStmtFromTree(falseExpr, stmt->gtStmt.gtStmtILoffsx);
+ GenTree* falseStmt = fgNewStmtFromTree(falseExpr, stmt->gtStmt.gtStmtILoffsx);
fgInsertStmtAtEnd(elseBlock, falseStmt);
}
@@ -17785,9 +17785,9 @@ void Compiler::fgExpandQmarkNodes()
{
for (BasicBlock* block = fgFirstBB; block; block = block->bbNext)
{
- for (GenTreePtr stmt = block->bbTreeList; stmt; stmt = stmt->gtNext)
+ for (GenTree* stmt = block->bbTreeList; stmt; stmt = stmt->gtNext)
{
- GenTreePtr expr = stmt->gtStmt.gtStmtExpr;
+ GenTree* expr = stmt->gtStmt.gtStmtExpr;
#ifdef DEBUG
fgPreExpandQmarkChecks(expr);
#endif
@@ -17811,9 +17811,9 @@ void Compiler::fgPostExpandQmarkChecks()
{
for (BasicBlock* block = fgFirstBB; block; block = block->bbNext)
{
- for (GenTreePtr stmt = block->bbTreeList; stmt; stmt = stmt->gtNext)
+ for (GenTree* stmt = block->bbTreeList; stmt; stmt = stmt->gtNext)
{
- GenTreePtr expr = stmt->gtStmt.gtStmtExpr;
+ GenTree* expr = stmt->gtStmt.gtStmtExpr;
fgWalkTreePre(&expr, Compiler::fgAssertNoQmark, nullptr);
}
}
@@ -17860,7 +17860,7 @@ void Compiler::fgMorph()
if (lvaTable[i].TypeGet() == TYP_REF)
{
// confirm that the argument is a GC pointer (for debugging (GC stress))
- GenTreePtr op = gtNewLclvNode(i, TYP_REF);
+ GenTree* op = gtNewLclvNode(i, TYP_REF);
GenTreeArgList* args = gtNewArgList(op);
op = gtNewHelperCallNode(CORINFO_HELP_CHECK_OBJ, TYP_VOID, args);
@@ -18159,12 +18159,12 @@ void Compiler::fgPromoteStructs()
#endif // DEBUG
}
-Compiler::fgWalkResult Compiler::fgMorphStructField(GenTreePtr tree, fgWalkData* fgWalkPre)
+Compiler::fgWalkResult Compiler::fgMorphStructField(GenTree* tree, fgWalkData* fgWalkPre)
{
noway_assert(tree->OperGet() == GT_FIELD);
- GenTreePtr objRef = tree->gtField.gtFldObj;
- GenTreePtr obj = ((objRef != nullptr) && (objRef->gtOper == GT_ADDR)) ? objRef->gtOp.gtOp1 : nullptr;
+ GenTree* objRef = tree->gtField.gtFldObj;
+ GenTree* obj = ((objRef != nullptr) && (objRef->gtOper == GT_ADDR)) ? objRef->gtOp.gtOp1 : nullptr;
noway_assert((tree->gtFlags & GTF_GLOB_REF) || ((obj != nullptr) && (obj->gtOper == GT_LCL_VAR)));
/* Is this an instance data member? */
@@ -18204,7 +18204,7 @@ Compiler::fgWalkResult Compiler::fgMorphStructField(GenTreePtr tree, fgWalkData*
tree->gtFlags &= GTF_NODE_MASK;
tree->gtFlags &= ~GTF_GLOB_REF;
- GenTreePtr parent = fgWalkPre->parentStack->Index(1);
+ GenTree* parent = fgWalkPre->parentStack->Index(1);
if (parent->gtOper == GT_ASG)
{
if (parent->gtOp.gtOp1 == tree)
@@ -18293,7 +18293,7 @@ Compiler::fgWalkResult Compiler::fgMorphStructField(GenTreePtr tree, fgWalkData*
tree->gtLclVarCommon.SetLclNum(lclNum);
tree->gtFlags &= GTF_NODE_MASK;
- GenTreePtr parent = fgWalkPre->parentStack->Index(1);
+ GenTree* parent = fgWalkPre->parentStack->Index(1);
if ((parent->gtOper == GT_ASG) && (parent->gtOp.gtOp1 == tree))
{
tree->gtFlags |= GTF_VAR_DEF;
@@ -18314,7 +18314,7 @@ Compiler::fgWalkResult Compiler::fgMorphStructField(GenTreePtr tree, fgWalkData*
return WALK_CONTINUE;
}
-Compiler::fgWalkResult Compiler::fgMorphLocalField(GenTreePtr tree, fgWalkData* fgWalkPre)
+Compiler::fgWalkResult Compiler::fgMorphLocalField(GenTree* tree, fgWalkData* fgWalkPre)
{
noway_assert(tree->OperGet() == GT_LCL_FLD);
@@ -18367,7 +18367,7 @@ Compiler::fgWalkResult Compiler::fgMorphLocalField(GenTreePtr tree, fgWalkData*
#endif // DEBUG
}
- GenTreePtr parent = fgWalkPre->parentStack->Index(1);
+ GenTree* parent = fgWalkPre->parentStack->Index(1);
if ((parent->gtOper == GT_ASG) && (parent->gtOp.gtOp1 == tree))
{
tree->gtFlags |= GTF_VAR_DEF;
@@ -18533,11 +18533,11 @@ void Compiler::fgRetypeImplicitByRefArgs()
// Insert IR that initializes the temp from the parameter.
// LHS is a simple reference to the temp.
fgEnsureFirstBBisScratch();
- GenTreePtr lhs = gtNewLclvNode(newLclNum, varDsc->lvType);
+ GenTree* lhs = gtNewLclvNode(newLclNum, varDsc->lvType);
// RHS is an indirection (using GT_OBJ) off the parameter.
- GenTreePtr addr = gtNewLclvNode(lclNum, TYP_BYREF);
- GenTreePtr rhs = gtNewBlockVal(addr, (unsigned)size);
- GenTreePtr assign = gtNewAssignNode(lhs, rhs);
+ GenTree* addr = gtNewLclvNode(lclNum, TYP_BYREF);
+ GenTree* rhs = gtNewBlockVal(addr, (unsigned)size);
+ GenTree* assign = gtNewAssignNode(lhs, rhs);
fgInsertStmtAtBeg(fgFirstBB, assign);
}
@@ -18720,7 +18720,7 @@ void Compiler::fgMarkDemotedImplicitByRefArgs()
* Morph irregular parameters
* for x64 and ARM64 this means turning them into byrefs, adding extra indirs.
*/
-bool Compiler::fgMorphImplicitByRefArgs(GenTreePtr tree)
+bool Compiler::fgMorphImplicitByRefArgs(GenTree* tree)
{
#if (!defined(_TARGET_AMD64_) || defined(FEATURE_UNIX_AMD64_STRUCT_PASSING)) && !defined(_TARGET_ARM64_)
@@ -18737,19 +18737,19 @@ bool Compiler::fgMorphImplicitByRefArgs(GenTreePtr tree)
{
if (tree->gtOp.gtOp1->gtOper == GT_LCL_VAR)
{
- GenTreePtr morphedTree = fgMorphImplicitByRefArgs(tree, true);
- changed = (morphedTree != nullptr);
+ GenTree* morphedTree = fgMorphImplicitByRefArgs(tree, true);
+ changed = (morphedTree != nullptr);
assert(!changed || (morphedTree == tree));
}
}
else
{
- for (GenTreePtr* pTree : tree->UseEdges())
+ for (GenTree** pTree : tree->UseEdges())
{
- GenTreePtr childTree = *pTree;
+ GenTree* childTree = *pTree;
if (childTree->gtOper == GT_LCL_VAR)
{
- GenTreePtr newChildTree = fgMorphImplicitByRefArgs(childTree, false);
+ GenTree* newChildTree = fgMorphImplicitByRefArgs(childTree, false);
if (newChildTree != nullptr)
{
changed = true;
@@ -18763,12 +18763,12 @@ bool Compiler::fgMorphImplicitByRefArgs(GenTreePtr tree)
#endif // (_TARGET_AMD64_ && !FEATURE_UNIX_AMD64_STRUCT_PASSING) || _TARGET_ARM64_
}
-GenTreePtr Compiler::fgMorphImplicitByRefArgs(GenTreePtr tree, bool isAddr)
+GenTree* Compiler::fgMorphImplicitByRefArgs(GenTree* tree, bool isAddr)
{
assert((tree->gtOper == GT_LCL_VAR) || ((tree->gtOper == GT_ADDR) && (tree->gtOp.gtOp1->gtOper == GT_LCL_VAR)));
assert(isAddr == (tree->gtOper == GT_ADDR));
- GenTreePtr lclVarTree = isAddr ? tree->gtOp.gtOp1 : tree;
+ GenTree* lclVarTree = isAddr ? tree->gtOp.gtOp1 : tree;
unsigned lclNum = lclVarTree->gtLclVarCommon.gtLclNum;
LclVarDsc* lclVarDsc = &lvaTable[lclNum];
@@ -18907,16 +18907,16 @@ enum AddrExposedContext
typedef ArrayStack<AddrExposedContext> AXCStack;
// We use pre-post to simulate passing an argument in a recursion, via a stack.
-Compiler::fgWalkResult Compiler::fgMarkAddrTakenLocalsPostCB(GenTreePtr* pTree, fgWalkData* fgWalkPre)
+Compiler::fgWalkResult Compiler::fgMarkAddrTakenLocalsPostCB(GenTree** pTree, fgWalkData* fgWalkPre)
{
AXCStack* axcStack = reinterpret_cast<AXCStack*>(fgWalkPre->pCallbackData);
(void)axcStack->Pop();
return WALK_CONTINUE;
}
-Compiler::fgWalkResult Compiler::fgMarkAddrTakenLocalsPreCB(GenTreePtr* pTree, fgWalkData* fgWalkPre)
+Compiler::fgWalkResult Compiler::fgMarkAddrTakenLocalsPreCB(GenTree** pTree, fgWalkData* fgWalkPre)
{
- GenTreePtr tree = *pTree;
+ GenTree* tree = *pTree;
Compiler* comp = fgWalkPre->compiler;
AXCStack* axcStack = reinterpret_cast<AXCStack*>(fgWalkPre->pCallbackData);
AddrExposedContext axc = axcStack->Top();
@@ -18929,7 +18929,7 @@ Compiler::fgWalkResult Compiler::fgMarkAddrTakenLocalsPreCB(GenTreePtr* pTree, f
case AXC_IndAdd:
{
- GenTreePtr parent = fgWalkPre->parentStack->Index(1);
+ GenTree* parent = fgWalkPre->parentStack->Index(1);
assert(parent->OperGet() == GT_ADD);
// Is one of the args a constant representing a field offset,
// and is this the other? If so, Ind context.
@@ -18979,7 +18979,7 @@ Compiler::fgWalkResult Compiler::fgMarkAddrTakenLocalsPreCB(GenTreePtr* pTree, f
// arguments, then we have to consider that storage location (indeed, it's underlying containing
// location) to be address taken. So get the width of the initblk or copyblk.
- GenTreePtr parent = fgWalkPre->parentStack->Index(1);
+ GenTree* parent = fgWalkPre->parentStack->Index(1);
GenTreeBlk* blk = tree->AsBlk();
unsigned width = blk->gtBlkSize;
noway_assert(width != 0);
@@ -19261,7 +19261,7 @@ Compiler::fgWalkResult Compiler::fgMarkAddrTakenLocalsPreCB(GenTreePtr* pTree, f
}
}
-bool Compiler::fgFitsInOrNotLoc(GenTreePtr tree, unsigned width)
+bool Compiler::fgFitsInOrNotLoc(GenTree* tree, unsigned width)
{
if (tree->TypeGet() != TYP_STRUCT)
{
@@ -19288,7 +19288,7 @@ bool Compiler::fgFitsInOrNotLoc(GenTreePtr tree, unsigned width)
}
}
-void Compiler::fgAddFieldSeqForZeroOffset(GenTreePtr op1, FieldSeqNode* fieldSeq)
+void Compiler::fgAddFieldSeqForZeroOffset(GenTree* op1, FieldSeqNode* fieldSeq)
{
assert(op1->TypeGet() == TYP_BYREF || op1->TypeGet() == TYP_I_IMPL || op1->TypeGet() == TYP_REF);
@@ -19364,7 +19364,7 @@ void Compiler::fgMarkAddressExposedLocals()
compCurBB = block;
- GenTreePtr stmt;
+ GenTree* stmt;
for (stmt = block->bbTreeList; stmt; stmt = stmt->gtNext)
{
@@ -19450,14 +19450,14 @@ bool Compiler::fgNodesMayInterfere(GenTree* write, GenTree* read)
* reverse the tree due to the fact we saw x = y <op> x and we want to fold that into
* x <op>= y because the operator property.
*/
-bool Compiler::fgShouldCreateAssignOp(GenTreePtr tree, bool* bReverse)
+bool Compiler::fgShouldCreateAssignOp(GenTree* tree, bool* bReverse)
{
#if CPU_LOAD_STORE_ARCH
/* In the case of a load/store architecture, there's no gain by doing any of this, we bail. */
return false;
#else
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtGetOp2();
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtGetOp2();
genTreeOps cmop = op2->OperGet();
/* Is the destination identical to the first RHS sub-operand? */
@@ -19541,20 +19541,20 @@ bool Compiler::fgShouldCreateAssignOp(GenTreePtr tree, bool* bReverse)
// if this funciton successfully optimized the stmts, then return true. Otherwise
// return false;
-bool Compiler::fgMorphCombineSIMDFieldAssignments(BasicBlock* block, GenTreePtr stmt)
+bool Compiler::fgMorphCombineSIMDFieldAssignments(BasicBlock* block, GenTree* stmt)
{
noway_assert(stmt->gtOper == GT_STMT);
- GenTreePtr tree = stmt->gtStmt.gtStmtExpr;
+ GenTree* tree = stmt->gtStmt.gtStmtExpr;
assert(tree->OperGet() == GT_ASG);
- GenTreePtr originalLHS = tree->gtOp.gtOp1;
- GenTreePtr prevLHS = tree->gtOp.gtOp1;
- GenTreePtr prevRHS = tree->gtOp.gtOp2;
- unsigned index = 0;
- var_types baseType = TYP_UNKNOWN;
- unsigned simdSize = 0;
- GenTreePtr simdStructNode = getSIMDStructFromField(prevRHS, &baseType, &index, &simdSize, true);
+ GenTree* originalLHS = tree->gtOp.gtOp1;
+ GenTree* prevLHS = tree->gtOp.gtOp1;
+ GenTree* prevRHS = tree->gtOp.gtOp2;
+ unsigned index = 0;
+ var_types baseType = TYP_UNKNOWN;
+ unsigned simdSize = 0;
+ GenTree* simdStructNode = getSIMDStructFromField(prevRHS, &baseType, &index, &simdSize, true);
if (simdStructNode == nullptr || index != 0 || baseType != TYP_FLOAT)
{
@@ -19562,21 +19562,21 @@ bool Compiler::fgMorphCombineSIMDFieldAssignments(BasicBlock* block, GenTreePtr
return false;
}
- var_types simdType = getSIMDTypeForSize(simdSize);
- int assignmentsCount = simdSize / genTypeSize(baseType) - 1;
- int remainingAssignments = assignmentsCount;
- GenTreePtr curStmt = stmt->gtNext;
- GenTreePtr lastStmt = stmt;
+ var_types simdType = getSIMDTypeForSize(simdSize);
+ int assignmentsCount = simdSize / genTypeSize(baseType) - 1;
+ int remainingAssignments = assignmentsCount;
+ GenTree* curStmt = stmt->gtNext;
+ GenTree* lastStmt = stmt;
while (curStmt != nullptr && remainingAssignments > 0)
{
- GenTreePtr exp = curStmt->gtStmt.gtStmtExpr;
+ GenTree* exp = curStmt->gtStmt.gtStmtExpr;
if (exp->OperGet() != GT_ASG)
{
break;
}
- GenTreePtr curLHS = exp->gtGetOp1();
- GenTreePtr curRHS = exp->gtGetOp2();
+ GenTree* curLHS = exp->gtGetOp1();
+ GenTree* curRHS = exp->gtGetOp2();
if (!areArgumentsContiguous(prevLHS, curLHS) || !areArgumentsContiguous(prevRHS, curRHS))
{
@@ -19744,8 +19744,8 @@ bool Compiler::fgCheckStmtAfterTailCall()
// Side effect flags on a GT_COMMA may be overly pessimistic, so examine
// the constituent nodes.
- GenTreePtr popExpr = popStmt->gtStmtExpr;
- bool isSideEffectFree = (popExpr->gtFlags & GTF_ALL_EFFECT) == 0;
+ GenTree* popExpr = popStmt->gtStmtExpr;
+ bool isSideEffectFree = (popExpr->gtFlags & GTF_ALL_EFFECT) == 0;
if (!isSideEffectFree && (popExpr->OperGet() == GT_COMMA))
{
isSideEffectFree = ((popExpr->gtGetOp1()->gtFlags & GTF_ALL_EFFECT) == 0) &&
@@ -19773,7 +19773,7 @@ bool Compiler::fgCheckStmtAfterTailCall()
// The next stmt can be GT_RETURN(TYP_VOID) or GT_RETURN(lclVar),
// where lclVar was return buffer in the call for structs or simd.
GenTreeStmt* retStmt = nextMorphStmt;
- GenTreePtr retExpr = retStmt->gtStmtExpr;
+ GenTree* retExpr = retStmt->gtStmtExpr;
noway_assert(retExpr->gtOper == GT_RETURN);
nextMorphStmt = retStmt->gtNextStmt;
@@ -19804,10 +19804,10 @@ bool Compiler::fgCheckStmtAfterTailCall()
#endif
{
GenTreeStmt* retStmt = nextMorphStmt;
- GenTreePtr retExpr = nextMorphStmt->gtStmtExpr;
+ GenTree* retExpr = nextMorphStmt->gtStmtExpr;
noway_assert(retExpr->gtOper == GT_RETURN);
- GenTreePtr treeWithLcl = retExpr->gtGetOp1();
+ GenTree* treeWithLcl = retExpr->gtGetOp1();
while (treeWithLcl->gtOper == GT_CAST)
{
noway_assert(!treeWithLcl->gtOverflow());
diff --git a/src/jit/objectalloc.cpp b/src/jit/objectalloc.cpp
index 2e19f4378d..93e60ee3c4 100644
--- a/src/jit/objectalloc.cpp
+++ b/src/jit/objectalloc.cpp
@@ -76,8 +76,8 @@ void ObjectAllocator::MorphAllocObjNodes()
for (GenTreeStmt* stmt = block->firstStmt(); stmt; stmt = stmt->gtNextStmt)
{
- GenTreePtr stmtExpr = stmt->gtStmtExpr;
- GenTreePtr op2 = nullptr;
+ GenTree* stmtExpr = stmt->gtStmtExpr;
+ GenTree* op2 = nullptr;
bool canonicalAllocObjFound = false;
@@ -102,7 +102,7 @@ void ObjectAllocator::MorphAllocObjNodes()
// \--* GT_LCL_VAR ref
//------------------------------------------------------------------------
- GenTreePtr op1 = stmtExpr->gtGetOp1();
+ GenTree* op1 = stmtExpr->gtGetOp1();
assert(op1->OperGet() == GT_LCL_VAR);
assert(op1->TypeGet() == TYP_REF);
@@ -149,13 +149,13 @@ void ObjectAllocator::MorphAllocObjNodes()
//
// Notes:
// Must update parents flags after this.
-GenTreePtr ObjectAllocator::MorphAllocObjNodeIntoHelperCall(GenTreeAllocObj* allocObj)
+GenTree* ObjectAllocator::MorphAllocObjNodeIntoHelperCall(GenTreeAllocObj* allocObj)
{
assert(allocObj != nullptr);
- GenTreePtr op1 = allocObj->gtGetOp1();
+ GenTree* op1 = allocObj->gtGetOp1();
- GenTreePtr helperCall = comp->fgMorphIntoHelperCall(allocObj, allocObj->gtNewHelper, comp->gtNewArgList(op1));
+ GenTree* helperCall = comp->fgMorphIntoHelperCall(allocObj, allocObj->gtNewHelper, comp->gtNewArgList(op1));
return helperCall;
}
@@ -174,9 +174,9 @@ GenTreePtr ObjectAllocator::MorphAllocObjNodeIntoHelperCall(GenTreeAllocObj* all
// Notes:
// Must update parents flags after this.
// This function can insert additional statements before stmt.
-GenTreePtr ObjectAllocator::MorphAllocObjNodeIntoStackAlloc(GenTreeAllocObj* allocObj,
- BasicBlock* block,
- GenTreeStmt* stmt)
+GenTree* ObjectAllocator::MorphAllocObjNodeIntoStackAlloc(GenTreeAllocObj* allocObj,
+ BasicBlock* block,
+ GenTreeStmt* stmt)
{
assert(allocObj != nullptr);
assert(m_AnalysisDone);
@@ -192,9 +192,9 @@ GenTreePtr ObjectAllocator::MorphAllocObjNodeIntoStackAlloc(GenTreeAllocObj* all
//------------------------------------------------------------------------
// AssertWhenAllocObjFoundVisitor: Look for a GT_ALLOCOBJ node and assert
// when found one.
-Compiler::fgWalkResult ObjectAllocator::AssertWhenAllocObjFoundVisitor(GenTreePtr* pTree, Compiler::fgWalkData* data)
+Compiler::fgWalkResult ObjectAllocator::AssertWhenAllocObjFoundVisitor(GenTree** pTree, Compiler::fgWalkData* data)
{
- GenTreePtr tree = *pTree;
+ GenTree* tree = *pTree;
assert(tree != nullptr);
assert(tree->OperGet() != GT_ALLOCOBJ);
diff --git a/src/jit/objectalloc.h b/src/jit/objectalloc.h
index bea6744024..ece050741d 100644
--- a/src/jit/objectalloc.h
+++ b/src/jit/objectalloc.h
@@ -37,12 +37,12 @@ protected:
private:
bool CanAllocateLclVarOnStack(unsigned int lclNum) const;
- void DoAnalysis();
- void MorphAllocObjNodes();
- GenTreePtr MorphAllocObjNodeIntoHelperCall(GenTreeAllocObj* allocObj);
- GenTreePtr MorphAllocObjNodeIntoStackAlloc(GenTreeAllocObj* allocObj, BasicBlock* block, GenTreeStmt* stmt);
+ void DoAnalysis();
+ void MorphAllocObjNodes();
+ GenTree* MorphAllocObjNodeIntoHelperCall(GenTreeAllocObj* allocObj);
+ GenTree* MorphAllocObjNodeIntoStackAlloc(GenTreeAllocObj* allocObj, BasicBlock* block, GenTreeStmt* stmt);
#ifdef DEBUG
- static Compiler::fgWalkResult AssertWhenAllocObjFoundVisitor(GenTreePtr* pTree, Compiler::fgWalkData* data);
+ static Compiler::fgWalkResult AssertWhenAllocObjFoundVisitor(GenTree** pTree, Compiler::fgWalkData* data);
#endif // DEBUG
};
diff --git a/src/jit/optcse.cpp b/src/jit/optcse.cpp
index 7a72558524..67f0b5b46d 100644
--- a/src/jit/optcse.cpp
+++ b/src/jit/optcse.cpp
@@ -83,7 +83,7 @@ inline Compiler::CSEdsc* Compiler::optCSEfindDsc(unsigned index)
* For a previously marked CSE, decrement the use counts and unmark it
*/
-void Compiler::optUnmarkCSE(GenTreePtr tree)
+void Compiler::optUnmarkCSE(GenTree* tree)
{
if (!IS_CSE_INDEX(tree->gtCSEnum))
{
@@ -165,7 +165,7 @@ void Compiler::optUnmarkCSE(GenTreePtr tree)
tree->gtCSEnum = NO_CSE;
}
-Compiler::fgWalkResult Compiler::optHasNonCSEChild(GenTreePtr* pTree, fgWalkData* data)
+Compiler::fgWalkResult Compiler::optHasNonCSEChild(GenTree** pTree, fgWalkData* data)
{
if (*pTree == data->pCallbackData)
{
@@ -194,7 +194,7 @@ Compiler::fgWalkResult Compiler::optHasNonCSEChild(GenTreePtr* pTree, fgWalkData
return WALK_SKIP_SUBTREES;
}
-Compiler::fgWalkResult Compiler::optPropagateNonCSE(GenTreePtr* pTree, fgWalkData* data)
+Compiler::fgWalkResult Compiler::optPropagateNonCSE(GenTree** pTree, fgWalkData* data)
{
GenTree* tree = *pTree;
Compiler* comp = data->compiler;
@@ -226,22 +226,22 @@ Compiler::fgWalkResult Compiler::optPropagateNonCSE(GenTreePtr* pTree, fgWalkDat
*/
/* static */
-Compiler::fgWalkResult Compiler::optUnmarkCSEs(GenTreePtr* pTree, fgWalkData* data)
+Compiler::fgWalkResult Compiler::optUnmarkCSEs(GenTree** pTree, fgWalkData* data)
{
- GenTreePtr tree = *pTree;
- Compiler* comp = data->compiler;
- GenTreePtr keepList = (GenTreePtr)(data->pCallbackData);
+ GenTree* tree = *pTree;
+ Compiler* comp = data->compiler;
+ GenTree* keepList = (GenTree*)(data->pCallbackData);
// We may have a non-NULL side effect list that is being kept
//
if (keepList)
{
- GenTreePtr keptTree = keepList;
+ GenTree* keptTree = keepList;
while (keptTree->OperGet() == GT_COMMA)
{
assert(keptTree->OperKind() & GTK_SMPOP);
- GenTreePtr op1 = keptTree->gtOp.gtOp1;
- GenTreePtr op2 = keptTree->gtGetOp2();
+ GenTree* op1 = keptTree->gtOp.gtOp1;
+ GenTree* op2 = keptTree->gtGetOp2();
// For the GT_COMMA case the op1 is part of the orginal CSE tree
// that is being kept because it contains some side-effect
@@ -264,7 +264,7 @@ Compiler::fgWalkResult Compiler::optUnmarkCSEs(GenTreePtr* pTree, fgWalkData* da
}
}
- // This node is being removed from the graph of GenTreePtr
+ // This node is being removed from the graph of GenTree*
// Call optUnmarkCSE and decrement the LclVar ref counts.
comp->optUnmarkCSE(tree);
assert(!IS_CSE_INDEX(tree->gtCSEnum));
@@ -293,7 +293,7 @@ Compiler::fgWalkResult Compiler::optUnmarkCSEs(GenTreePtr* pTree, fgWalkData* da
return WALK_CONTINUE;
}
-Compiler::fgWalkResult Compiler::optCSE_MaskHelper(GenTreePtr* pTree, fgWalkData* walkData)
+Compiler::fgWalkResult Compiler::optCSE_MaskHelper(GenTree** pTree, fgWalkData* walkData)
{
GenTree* tree = *pTree;
Compiler* comp = walkData->compiler;
@@ -319,7 +319,7 @@ Compiler::fgWalkResult Compiler::optCSE_MaskHelper(GenTreePtr* pTree, fgWalkData
// This functions walks all the node for an given tree
// and return the mask of CSE defs and uses for the tree
//
-void Compiler::optCSE_GetMaskData(GenTreePtr tree, optCSE_MaskData* pMaskData)
+void Compiler::optCSE_GetMaskData(GenTree* tree, optCSE_MaskData* pMaskData)
{
pMaskData->CSE_defMask = BitVecOps::MakeEmpty(cseTraits);
pMaskData->CSE_useMask = BitVecOps::MakeEmpty(cseTraits);
@@ -381,13 +381,13 @@ bool Compiler::optCSE_canSwap(GenTree* op1, GenTree* op2)
// Return true iff it safe to swap the execution order of the operands of 'tree',
// considering only the locations of the CSE defs and uses.
//
-bool Compiler::optCSE_canSwap(GenTreePtr tree)
+bool Compiler::optCSE_canSwap(GenTree* tree)
{
// We must have a binary treenode with non-null op1 and op2
assert((tree->OperKind() & GTK_SMPOP) != 0);
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtGetOp2();
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtGetOp2();
return optCSE_canSwap(op1, op2);
}
@@ -404,8 +404,8 @@ int __cdecl Compiler::optCSEcostCmpEx(const void* op1, const void* op2)
CSEdsc* dsc1 = *(CSEdsc**)op1;
CSEdsc* dsc2 = *(CSEdsc**)op2;
- GenTreePtr exp1 = dsc1->csdTree;
- GenTreePtr exp2 = dsc2->csdTree;
+ GenTree* exp1 = dsc1->csdTree;
+ GenTree* exp2 = dsc2->csdTree;
int diff;
@@ -448,8 +448,8 @@ int __cdecl Compiler::optCSEcostCmpSz(const void* op1, const void* op2)
CSEdsc* dsc1 = *(CSEdsc**)op1;
CSEdsc* dsc2 = *(CSEdsc**)op2;
- GenTreePtr exp1 = dsc1->csdTree;
- GenTreePtr exp2 = dsc2->csdTree;
+ GenTree* exp1 = dsc1->csdTree;
+ GenTree* exp2 = dsc2->csdTree;
int diff;
@@ -517,7 +517,7 @@ void Compiler::optValnumCSE_Init()
* if necessary). Returns the index or 0 if the expression can not be a CSE.
*/
-unsigned Compiler::optValnumCSE_Index(GenTreePtr tree, GenTreePtr stmt)
+unsigned Compiler::optValnumCSE_Index(GenTree* tree, GenTree* stmt)
{
unsigned key;
unsigned hash;
@@ -683,8 +683,8 @@ unsigned Compiler::optValnumCSE_Locate()
for (BasicBlock* block = fgFirstBB; block; block = block->bbNext)
{
- GenTreePtr stmt;
- GenTreePtr tree;
+ GenTree* stmt;
+ GenTree* tree;
/* Make the block publicly available */
@@ -777,7 +777,7 @@ unsigned Compiler::optValnumCSE_Locate()
// Arguments:
// compare - The compare node to check
-void Compiler::optCseUpdateCheckedBoundMap(GenTreePtr compare)
+void Compiler::optCseUpdateCheckedBoundMap(GenTree* compare)
{
assert(compare->OperIsCompare());
@@ -795,7 +795,7 @@ void Compiler::optCseUpdateCheckedBoundMap(GenTreePtr compare)
// Now look for a checked bound feeding the compare
ValueNumStore::CompareCheckedBoundArithInfo info;
- GenTreePtr boundParent = nullptr;
+ GenTree* boundParent = nullptr;
if (vnStore->IsVNCompareCheckedBound(compareVN))
{
@@ -808,8 +808,8 @@ void Compiler::optCseUpdateCheckedBoundMap(GenTreePtr compare)
{
// Compare of a bound +/- some offset to something else.
- GenTreePtr op1 = compare->gtGetOp1();
- GenTreePtr op2 = compare->gtGetOp2();
+ GenTree* op1 = compare->gtGetOp1();
+ GenTree* op2 = compare->gtGetOp2();
vnStore->GetCompareCheckedBoundArithInfo(compareVN, &info);
if (GetVNFuncForOper(op1->OperGet(), op1->IsUnsigned()) == (VNFunc)info.arrOper)
@@ -826,19 +826,19 @@ void Compiler::optCseUpdateCheckedBoundMap(GenTreePtr compare)
if (boundParent != nullptr)
{
- GenTreePtr bound = nullptr;
+ GenTree* bound = nullptr;
// Find which child of boundParent is the bound. Abort if neither
// conservative value number matches the one from the compare VN.
- GenTreePtr child1 = boundParent->gtGetOp1();
+ GenTree* child1 = boundParent->gtGetOp1();
if ((info.vnBound == child1->gtVNPair.GetConservative()) && IS_CSE_INDEX(child1->gtCSEnum))
{
bound = child1;
}
else
{
- GenTreePtr child2 = boundParent->gtGetOp2();
+ GenTree* child2 = boundParent->gtGetOp2();
if ((info.vnBound == child2->gtVNPair.GetConservative()) && IS_CSE_INDEX(child2->gtCSEnum))
{
bound = child2;
@@ -1052,8 +1052,8 @@ void Compiler::optValnumCSE_Availablity()
for (BasicBlock* block = fgFirstBB; block; block = block->bbNext)
{
- GenTreePtr stmt;
- GenTreePtr tree;
+ GenTree* stmt;
+ GenTree* tree;
/* Make the block publicly available */
@@ -1375,7 +1375,7 @@ public:
for (unsigned cnt = 0; cnt < m_pCompiler->optCSECandidateCount; cnt++)
{
Compiler::CSEdsc* dsc = sortTab[cnt];
- GenTreePtr expr = dsc->csdTree;
+ GenTree* expr = dsc->csdTree;
unsigned def;
unsigned use;
@@ -1444,7 +1444,7 @@ public:
return m_useCount;
}
// TODO-CQ: With ValNum CSE's the Expr and its cost can vary.
- GenTreePtr Expr()
+ GenTree* Expr()
{
return m_CseDsc->csdTree;
}
@@ -1936,8 +1936,8 @@ public:
}
if (!allSame)
{
- lst = dsc->csdTreeList;
- GenTreePtr firstTree = lst->tslTree;
+ lst = dsc->csdTreeList;
+ GenTree* firstTree = lst->tslTree;
printf("In %s, CSE (oper = %s, type = %s) has differing VNs: ", info.compFullName,
GenTree::OpName(firstTree->OperGet()), varTypeName(firstTree->TypeGet()));
while (lst != NULL)
@@ -1957,8 +1957,8 @@ public:
do
{
/* Process the next node in the list */
- GenTreePtr exp = lst->tslTree;
- GenTreePtr stm = lst->tslStmt;
+ GenTree* exp = lst->tslTree;
+ GenTree* stm = lst->tslStmt;
noway_assert(stm->gtOper == GT_STMT);
BasicBlock* blk = lst->tslBlock;
@@ -1984,7 +1984,7 @@ public:
// This will contain the replacement tree for exp
// It will either be the CSE def or CSE ref
//
- GenTreePtr cse = nullptr;
+ GenTree* cse = nullptr;
bool isDef;
FieldSeqNode* fldSeq = nullptr;
bool hasZeroMapAnnotation = m_pCompiler->GetZeroOffsetFieldMap()->Lookup(exp, &fldSeq);
@@ -2003,7 +2003,7 @@ public:
#endif // DEBUG
/* check for and collect any SIDE_EFFECTS */
- GenTreePtr sideEffList = nullptr;
+ GenTree* sideEffList = nullptr;
if (exp->gtFlags & GTF_PERSISTENT_SIDE_EFFECTS)
{
@@ -2033,7 +2033,7 @@ public:
vnStore->SetVNIsCheckedBound(defConservativeVN);
}
- GenTreePtr cmp;
+ GenTree* cmp;
if ((m_pCompiler->optCseCheckedBoundMap != nullptr) &&
(m_pCompiler->optCseCheckedBoundMap->Lookup(exp, &cmp)))
{
@@ -2084,15 +2084,15 @@ public:
}
#endif
- GenTreePtr cseVal = cse;
- GenTreePtr curSideEff = sideEffList;
+ GenTree* cseVal = cse;
+ GenTree* curSideEff = sideEffList;
ValueNumStore* vnStore = m_pCompiler->vnStore;
ValueNumPair exceptions_vnp = ValueNumStore::VNPForEmptyExcSet();
while ((curSideEff->OperGet() == GT_COMMA) || (curSideEff->OperGet() == GT_ASG))
{
- GenTreePtr op1 = curSideEff->gtOp.gtOp1;
- GenTreePtr op2 = curSideEff->gtOp.gtOp2;
+ GenTree* op1 = curSideEff->gtOp.gtOp1;
+ GenTree* op2 = curSideEff->gtOp.gtOp2;
ValueNumPair op1vnp;
ValueNumPair op1Xvnp = ValueNumStore::VNPForEmptyExcSet();
@@ -2151,10 +2151,10 @@ public:
exp->gtCSEnum = NO_CSE; // clear the gtCSEnum field
- GenTreePtr val = exp;
+ GenTree* val = exp;
/* Create an assignment of the value to the temp */
- GenTreePtr asg = m_pCompiler->gtNewTempAssign(cseLclVarNum, val);
+ GenTree* asg = m_pCompiler->gtNewTempAssign(cseLclVarNum, val);
// assign the proper Value Numbers
asg->gtVNPair.SetBoth(ValueNumStore::VNForVoid()); // The GT_ASG node itself is $VN.Void
@@ -2164,8 +2164,8 @@ public:
noway_assert(asg->gtOp.gtOp2 == val);
/* Create a reference to the CSE temp */
- GenTreePtr ref = m_pCompiler->gtNewLclvNode(cseLclVarNum, cseLclVarTyp);
- ref->gtVNPair = val->gtVNPair; // The new 'ref' is the same as 'val'
+ GenTree* ref = m_pCompiler->gtNewLclvNode(cseLclVarNum, cseLclVarTyp);
+ ref->gtVNPair = val->gtVNPair; // The new 'ref' is the same as 'val'
// If it has a zero-offset field seq, copy annotation to the ref
if (hasZeroMapAnnotation)
@@ -2192,7 +2192,7 @@ public:
// Walk the statement 'stm' and find the pointer
// in the tree is pointing to 'exp'
//
- GenTreePtr* link = m_pCompiler->gtFindLink(stm, exp);
+ GenTree** link = m_pCompiler->gtFindLink(stm, exp);
#ifdef DEBUG
if (link == nullptr)
@@ -2344,7 +2344,7 @@ void Compiler::optValnumCSE_Heuristic()
*
*/
-void Compiler::optValnumCSE_UnmarkCSEs(GenTreePtr deadTree, GenTreePtr keepList)
+void Compiler::optValnumCSE_UnmarkCSEs(GenTree* deadTree, GenTree* keepList)
{
assert(optValnumCSE_phase);
@@ -2407,7 +2407,7 @@ void Compiler::optOptimizeValnumCSEs()
* The following determines whether the given expression is a worthy CSE
* candidate.
*/
-bool Compiler::optIsCSEcandidate(GenTreePtr tree)
+bool Compiler::optIsCSEcandidate(GenTree* tree)
{
/* No good if the expression contains side effects or if it was marked as DONT CSE */
@@ -2710,7 +2710,7 @@ void Compiler::optCleanupCSEs()
/* Walk the statement trees in this basic block */
- GenTreePtr stmt;
+ GenTree* stmt;
// Initialize 'stmt' to the first non-Phi statement
stmt = block->FirstNonPhiDef();
@@ -2720,7 +2720,7 @@ void Compiler::optCleanupCSEs()
noway_assert(stmt->gtOper == GT_STMT);
/* We must clear the gtCSEnum field */
- for (GenTreePtr tree = stmt->gtStmt.gtStmtExpr; tree; tree = tree->gtPrev)
+ for (GenTree* tree = stmt->gtStmt.gtStmtExpr; tree; tree = tree->gtPrev)
{
tree->gtCSEnum = NO_CSE;
}
@@ -2744,7 +2744,7 @@ void Compiler::optEnsureClearCSEInfo()
/* Walk the statement trees in this basic block */
- GenTreePtr stmt;
+ GenTree* stmt;
// Initialize 'stmt' to the first non-Phi statement
stmt = block->FirstNonPhiDef();
@@ -2753,7 +2753,7 @@ void Compiler::optEnsureClearCSEInfo()
{
assert(stmt->gtOper == GT_STMT);
- for (GenTreePtr tree = stmt->gtStmt.gtStmtExpr; tree; tree = tree->gtPrev)
+ for (GenTree* tree = stmt->gtStmt.gtStmtExpr; tree; tree = tree->gtPrev)
{
assert(tree->gtCSEnum == NO_CSE);
}
diff --git a/src/jit/optimizer.cpp b/src/jit/optimizer.cpp
index a4516e511f..69bc99f844 100644
--- a/src/jit/optimizer.cpp
+++ b/src/jit/optimizer.cpp
@@ -718,7 +718,7 @@ void Compiler::optPrintLoopInfo(unsigned lnum)
// The 'init' tree is checked if its lhs is a local and rhs is either
// a const or a local.
//
-bool Compiler::optPopulateInitInfo(unsigned loopInd, GenTreePtr init, unsigned iterVar)
+bool Compiler::optPopulateInitInfo(unsigned loopInd, GenTree* init, unsigned iterVar)
{
// Operator should be =
if (init->gtOper != GT_ASG)
@@ -726,8 +726,8 @@ bool Compiler::optPopulateInitInfo(unsigned loopInd, GenTreePtr init, unsigned i
return false;
}
- GenTreePtr lhs = init->gtOp.gtOp1;
- GenTreePtr rhs = init->gtOp.gtOp2;
+ GenTree* lhs = init->gtOp.gtOp1;
+ GenTree* rhs = init->gtOp.gtOp2;
// LHS has to be local and should equal iterVar.
if (lhs->gtOper != GT_LCL_VAR || lhs->gtLclVarCommon.gtLclNum != iterVar)
{
@@ -772,10 +772,10 @@ bool Compiler::optPopulateInitInfo(unsigned loopInd, GenTreePtr init, unsigned i
// if the test condition doesn't involve iterVar.
//
bool Compiler::optCheckIterInLoopTest(
- unsigned loopInd, GenTreePtr test, BasicBlock* from, BasicBlock* to, unsigned iterVar)
+ unsigned loopInd, GenTree* test, BasicBlock* from, BasicBlock* to, unsigned iterVar)
{
// Obtain the relop from the "test" tree.
- GenTreePtr relop;
+ GenTree* relop;
if (test->gtOper == GT_JTRUE)
{
relop = test->gtGetOp1();
@@ -788,11 +788,11 @@ bool Compiler::optCheckIterInLoopTest(
noway_assert(relop->OperKind() & GTK_RELOP);
- GenTreePtr opr1 = relop->gtOp.gtOp1;
- GenTreePtr opr2 = relop->gtOp.gtOp2;
+ GenTree* opr1 = relop->gtOp.gtOp1;
+ GenTree* opr2 = relop->gtOp.gtOp2;
- GenTreePtr iterOp;
- GenTreePtr limitOp;
+ GenTree* iterOp;
+ GenTree* limitOp;
// Make sure op1 or op2 is the iterVar.
if (opr1->gtOper == GT_LCL_VAR && opr1->gtLclVarCommon.gtLclNum == iterVar)
@@ -859,7 +859,7 @@ bool Compiler::optCheckIterInLoopTest(
// Return Value:
// iterVar local num if the iterVar is found, otherwise BAD_VAR_NUM.
//
-unsigned Compiler::optIsLoopIncrTree(GenTreePtr incr)
+unsigned Compiler::optIsLoopIncrTree(GenTree* incr)
{
GenTree* incrVal;
genTreeOps updateOper;
@@ -906,7 +906,7 @@ unsigned Compiler::optIsLoopIncrTree(GenTreePtr incr)
// Check if the "incr" tree is a "v=v+1 or v+=1" type tree and make sure it is not
// assigned in the loop.
//
-bool Compiler::optComputeIterInfo(GenTreePtr incr, BasicBlock* from, BasicBlock* to, unsigned* pIterVar)
+bool Compiler::optComputeIterInfo(GenTree* incr, BasicBlock* from, BasicBlock* to, unsigned* pIterVar)
{
unsigned iterVar = optIsLoopIncrTree(incr);
@@ -946,20 +946,20 @@ bool Compiler::optComputeIterInfo(GenTreePtr incr, BasicBlock* from, BasicBlock*
// This method just retrieves what it thinks is the "test" node,
// the callers are expected to verify that "iterVar" is used in the test.
//
-bool Compiler::optIsLoopTestEvalIntoTemp(GenTreePtr testStmt, GenTreePtr* newTest)
+bool Compiler::optIsLoopTestEvalIntoTemp(GenTree* testStmt, GenTree** newTest)
{
- GenTreePtr test = testStmt->gtStmt.gtStmtExpr;
+ GenTree* test = testStmt->gtStmt.gtStmtExpr;
if (test->gtOper != GT_JTRUE)
{
return false;
}
- GenTreePtr relop = test->gtGetOp1();
+ GenTree* relop = test->gtGetOp1();
noway_assert(relop->OperIsCompare());
- GenTreePtr opr1 = relop->gtOp.gtOp1;
- GenTreePtr opr2 = relop->gtOp.gtOp2;
+ GenTree* opr1 = relop->gtOp.gtOp1;
+ GenTree* opr2 = relop->gtOp.gtOp2;
// Make sure we have jtrue (vtmp != 0)
if ((relop->OperGet() == GT_NE) && (opr1->OperGet() == GT_LCL_VAR) && (opr2->OperGet() == GT_CNS_INT) &&
@@ -967,17 +967,17 @@ bool Compiler::optIsLoopTestEvalIntoTemp(GenTreePtr testStmt, GenTreePtr* newTes
{
// Get the previous statement to get the def (rhs) of Vtmp to see
// if the "test" is evaluated into Vtmp.
- GenTreePtr prevStmt = testStmt->gtPrev;
+ GenTree* prevStmt = testStmt->gtPrev;
if (prevStmt == nullptr)
{
return false;
}
- GenTreePtr tree = prevStmt->gtStmt.gtStmtExpr;
+ GenTree* tree = prevStmt->gtStmt.gtStmtExpr;
if (tree->OperGet() == GT_ASG)
{
- GenTreePtr lhs = tree->gtOp.gtOp1;
- GenTreePtr rhs = tree->gtOp.gtOp2;
+ GenTree* lhs = tree->gtOp.gtOp1;
+ GenTree* rhs = tree->gtOp.gtOp2;
// Return as the new test node.
if (lhs->gtOper == GT_LCL_VAR && lhs->AsLclVarCommon()->GetLclNum() == opr1->AsLclVarCommon()->GetLclNum())
@@ -1029,7 +1029,7 @@ bool Compiler::optIsLoopTestEvalIntoTemp(GenTreePtr testStmt, GenTreePtr* newTes
// the callers are expected to verify that "iterVar" is used in the test.
//
bool Compiler::optExtractInitTestIncr(
- BasicBlock* head, BasicBlock* bottom, BasicBlock* top, GenTreePtr* ppInit, GenTreePtr* ppTest, GenTreePtr* ppIncr)
+ BasicBlock* head, BasicBlock* bottom, BasicBlock* top, GenTree** ppInit, GenTree** ppTest, GenTree** ppIncr)
{
assert(ppInit != nullptr);
assert(ppTest != nullptr);
@@ -1038,10 +1038,10 @@ bool Compiler::optExtractInitTestIncr(
// Check if last two statements in the loop body are the increment of the iterator
// and the loop termination test.
noway_assert(bottom->bbTreeList != nullptr);
- GenTreePtr test = bottom->bbTreeList->gtPrev;
+ GenTree* test = bottom->bbTreeList->gtPrev;
noway_assert(test != nullptr && test->gtNext == nullptr);
- GenTreePtr newTest;
+ GenTree* newTest;
if (optIsLoopTestEvalIntoTemp(test, &newTest))
{
test = newTest;
@@ -1049,7 +1049,7 @@ bool Compiler::optExtractInitTestIncr(
// Check if we have the incr tree before the test tree, if we don't,
// check if incr is part of the loop "top".
- GenTreePtr incr = test->gtPrev;
+ GenTree* incr = test->gtPrev;
if (incr == nullptr || optIsLoopIncrTree(incr->gtStmt.gtStmtExpr) == BAD_VAR_NUM)
{
if (top == nullptr || top->bbTreeList == nullptr || top->bbTreeList->gtPrev == nullptr)
@@ -1058,7 +1058,7 @@ bool Compiler::optExtractInitTestIncr(
}
// If the prev stmt to loop test is not incr, then check if we have loop test evaluated into a tmp.
- GenTreePtr topLast = top->bbTreeList->gtPrev;
+ GenTree* topLast = top->bbTreeList->gtPrev;
if (optIsLoopIncrTree(topLast->gtStmt.gtStmtExpr) != BAD_VAR_NUM)
{
incr = topLast;
@@ -1073,13 +1073,13 @@ bool Compiler::optExtractInitTestIncr(
// Find the last statement in the loop pre-header which we expect to be the initialization of
// the loop iterator.
- GenTreePtr phdr = head->bbTreeList;
+ GenTree* phdr = head->bbTreeList;
if (phdr == nullptr)
{
return false;
}
- GenTreePtr init = phdr->gtPrev;
+ GenTree* init = phdr->gtPrev;
noway_assert(init != nullptr && (init->gtNext == nullptr));
// If it is a duplicated loop condition, skip it.
@@ -1225,9 +1225,9 @@ bool Compiler::optRecordLoop(BasicBlock* head,
//
if (bottom->bbJumpKind == BBJ_COND)
{
- GenTreePtr init;
- GenTreePtr test;
- GenTreePtr incr;
+ GenTree* init;
+ GenTree* test;
+ GenTree* incr;
if (!optExtractInitTestIncr(head, bottom, top, &init, &test, &incr))
{
goto DONE_LOOP;
@@ -3855,9 +3855,9 @@ void Compiler::optUnrollLoops()
// Remove the test; we're doing a full unroll.
GenTreeStmt* testCopyStmt = newBlock->lastStmt();
- GenTreePtr testCopyExpr = testCopyStmt->gtStmt.gtStmtExpr;
+ GenTree* testCopyExpr = testCopyStmt->gtStmt.gtStmtExpr;
assert(testCopyExpr->gtOper == GT_JTRUE);
- GenTreePtr sideEffList = nullptr;
+ GenTree* sideEffList = nullptr;
gtExtractSideEffList(testCopyExpr, &sideEffList, GTF_SIDE_EFFECT | GTF_ORDER_SIDEEFF);
if (sideEffList == nullptr)
{
@@ -4083,13 +4083,13 @@ bool Compiler::optReachWithoutCall(BasicBlock* topBB, BasicBlock* botBB)
* Find the loop termination test at the bottom of the loop
*/
-static GenTreePtr optFindLoopTermTest(BasicBlock* bottom)
+static GenTree* optFindLoopTermTest(BasicBlock* bottom)
{
- GenTreePtr testt = bottom->bbTreeList;
+ GenTree* testt = bottom->bbTreeList;
assert(testt && testt->gtOper == GT_STMT);
- GenTreePtr result = testt->gtPrev;
+ GenTree* result = testt->gtPrev;
#ifdef DEBUG
while (testt->gtNext)
@@ -4191,7 +4191,7 @@ void Compiler::fgOptWhileLoop(BasicBlock* block)
return;
}
- GenTreePtr condStmt = optFindLoopTermTest(bTest);
+ GenTree* condStmt = optFindLoopTermTest(bTest);
// bTest must only contain only a jtrue with no other stmts, we will only clone
// the conditional, so any other statements will not get cloned
@@ -4206,7 +4206,7 @@ void Compiler::fgOptWhileLoop(BasicBlock* block)
noway_assert(condStmt->gtOper == GT_STMT);
- GenTreePtr condTree = condStmt->gtStmt.gtStmtExpr;
+ GenTree* condTree = condStmt->gtStmt.gtStmtExpr;
noway_assert(condTree->gtOper == GT_JTRUE);
condTree = condTree->gtOp.gtOp1;
@@ -4326,7 +4326,7 @@ void Compiler::fgOptWhileLoop(BasicBlock* block)
/* Create a statement entry out of the condition and
append the condition test at the end of 'block' */
- GenTreePtr copyOfCondStmt = fgInsertStmtAtEnd(block, condTree);
+ GenTree* copyOfCondStmt = fgInsertStmtAtEnd(block, condTree);
copyOfCondStmt->gtFlags |= GTF_STMT_CMPADD;
@@ -4946,14 +4946,14 @@ bool Compiler::optComputeDerefConditions(unsigned loopNum, LoopCloneContext* con
// block - the block in which the helper call needs to be inserted.
// insertBefore - the tree before which the helper call will be inserted.
//
-void Compiler::optDebugLogLoopCloning(BasicBlock* block, GenTreePtr insertBefore)
+void Compiler::optDebugLogLoopCloning(BasicBlock* block, GenTree* insertBefore)
{
if (JitConfig.JitDebugLogLoopCloning() == 0)
{
return;
}
- GenTreePtr logCall = gtNewHelperCallNode(CORINFO_HELP_DEBUG_LOG_LOOP_CLONING, TYP_VOID);
- GenTreePtr stmt = fgNewStmtFromTree(logCall);
+ GenTree* logCall = gtNewHelperCallNode(CORINFO_HELP_DEBUG_LOG_LOOP_CLONING, TYP_VOID);
+ GenTree* stmt = fgNewStmtFromTree(logCall);
fgInsertStmtBefore(block, insertBefore, stmt);
fgMorphBlockStmt(block, stmt->AsStmt() DEBUGARG("Debug log loop cloning"));
}
@@ -5596,7 +5596,7 @@ void Compiler::optEnsureUniqueHead(unsigned loopInd, unsigned ambientWeight)
* get called with 'doit' being true, we actually perform the narrowing.
*/
-bool Compiler::optNarrowTree(GenTreePtr tree, var_types srct, var_types dstt, ValueNumPair vnpNarrow, bool doit)
+bool Compiler::optNarrowTree(GenTree* tree, var_types srct, var_types dstt, ValueNumPair vnpNarrow, bool doit)
{
genTreeOps oper;
unsigned kind;
@@ -5764,9 +5764,9 @@ bool Compiler::optNarrowTree(GenTreePtr tree, var_types srct, var_types dstt, Va
if (kind & (GTK_BINOP | GTK_UNOP))
{
- GenTreePtr op1;
+ GenTree* op1;
op1 = tree->gtOp.gtOp1;
- GenTreePtr op2;
+ GenTree* op2;
op2 = tree->gtOp.gtOp2;
switch (tree->gtOper)
@@ -5957,13 +5957,13 @@ bool Compiler::optNarrowTree(GenTreePtr tree, var_types srct, var_types dstt, Va
* somewhere in a list of basic blocks (or in an entire loop).
*/
-Compiler::fgWalkResult Compiler::optIsVarAssgCB(GenTreePtr* pTree, fgWalkData* data)
+Compiler::fgWalkResult Compiler::optIsVarAssgCB(GenTree** pTree, fgWalkData* data)
{
- GenTreePtr tree = *pTree;
+ GenTree* tree = *pTree;
if (tree->OperIsAssignment())
{
- GenTreePtr dest = tree->gtOp.gtOp1;
+ GenTree* dest = tree->gtOp.gtOp1;
genTreeOps destOper = dest->OperGet();
isVarAssgDsc* desc = (isVarAssgDsc*)data->pCallbackData;
@@ -6026,7 +6026,7 @@ Compiler::fgWalkResult Compiler::optIsVarAssgCB(GenTreePtr* pTree, fgWalkData* d
/*****************************************************************************/
-bool Compiler::optIsVarAssigned(BasicBlock* beg, BasicBlock* end, GenTreePtr skip, unsigned var)
+bool Compiler::optIsVarAssigned(BasicBlock* beg, BasicBlock* end, GenTree* skip, unsigned var)
{
bool result;
isVarAssgDsc desc;
@@ -6199,7 +6199,7 @@ int Compiler::optIsSetAssgLoop(unsigned lnum, ALLVARSET_VALARG_TP vars, varRefKi
return 0;
}
-void Compiler::optPerformHoistExpr(GenTreePtr origExpr, unsigned lnum)
+void Compiler::optPerformHoistExpr(GenTree* origExpr, unsigned lnum)
{
#ifdef DEBUG
if (verbose)
@@ -6217,13 +6217,13 @@ void Compiler::optPerformHoistExpr(GenTreePtr origExpr, unsigned lnum)
assert(optLoopTable[lnum].lpFlags & LPFLG_HOISTABLE);
// Create a copy of the expression and mark it for CSE's.
- GenTreePtr hoistExpr = gtCloneExpr(origExpr, GTF_MAKE_CSE);
+ GenTree* hoistExpr = gtCloneExpr(origExpr, GTF_MAKE_CSE);
// At this point we should have a cloned expression, marked with the GTF_MAKE_CSE flag
assert(hoistExpr != origExpr);
assert(hoistExpr->gtFlags & GTF_MAKE_CSE);
- GenTreePtr hoist = hoistExpr;
+ GenTree* hoist = hoistExpr;
// The value of the expression isn't used (unless it's an assignment).
if (hoistExpr->OperGet() != GT_ASG)
{
@@ -6248,18 +6248,18 @@ void Compiler::optPerformHoistExpr(GenTreePtr origExpr, unsigned lnum)
hoist = fgMorphTree(hoist);
- GenTreePtr hoistStmt = gtNewStmt(hoist);
+ GenTree* hoistStmt = gtNewStmt(hoist);
hoistStmt->gtFlags |= GTF_STMT_CMPADD;
/* simply append the statement at the end of the preHead's list */
- GenTreePtr treeList = preHead->bbTreeList;
+ GenTree* treeList = preHead->bbTreeList;
if (treeList)
{
/* append after last statement */
- GenTreePtr last = treeList->gtPrev;
+ GenTree* last = treeList->gtPrev;
assert(last->gtNext == nullptr);
last->gtNext = hoistStmt;
@@ -6442,7 +6442,7 @@ void Compiler::optHoistLoopCode()
for (NodeToTestDataMap::KeyIterator ki = testData->Begin(); !ki.Equal(testData->End()); ++ki)
{
TestLabelAndNum tlAndN;
- GenTreePtr node = ki.Get();
+ GenTree* node = ki.Get();
bool b = testData->Lookup(node, &tlAndN);
assert(b);
if (tlAndN.m_tl != TL_LoopHoist)
@@ -6710,9 +6710,9 @@ void Compiler::optHoistLoopExprsForBlock(BasicBlock* blk, unsigned lnum, LoopHoi
for (GenTreeStmt* stmt = blk->FirstNonPhiDef(); stmt; stmt = stmt->gtNextStmt)
{
- GenTreePtr stmtTree = stmt->gtStmtExpr;
- bool hoistable;
- bool cctorDependent;
+ GenTree* stmtTree = stmt->gtStmtExpr;
+ bool hoistable;
+ bool cctorDependent;
(void)optHoistLoopExprsForTree(stmtTree, lnum, hoistCtxt, &firstBlockAndBeforeSideEffect, &hoistable,
&cctorDependent);
if (hoistable)
@@ -6723,7 +6723,7 @@ void Compiler::optHoistLoopExprsForBlock(BasicBlock* blk, unsigned lnum, LoopHoi
}
}
-bool Compiler::optIsProfitableToHoistableTree(GenTreePtr tree, unsigned lnum)
+bool Compiler::optIsProfitableToHoistableTree(GenTree* tree, unsigned lnum)
{
LoopDsc* pLoopDsc = &optLoopTable[lnum];
@@ -6825,7 +6825,7 @@ bool Compiler::optIsProfitableToHoistableTree(GenTreePtr tree, unsigned lnum)
// hoisted (even if '*pHoistable' is true) unless a preceding corresponding cctor init helper
// call is also hoisted.
//
-bool Compiler::optHoistLoopExprsForTree(GenTreePtr tree,
+bool Compiler::optHoistLoopExprsForTree(GenTree* tree,
unsigned lnum,
LoopHoistContext* hoistCtxt,
bool* pFirstBlockAndBeforeSideEffect,
@@ -6871,7 +6871,7 @@ bool Compiler::optHoistLoopExprsForTree(GenTreePtr tree,
// Check for the case where we can stop propagating cctor-dependent upwards.
if (tree->OperIs(GT_COMMA) && (childNum == 1))
{
- GenTreePtr op1 = tree->gtGetOp1();
+ GenTree* op1 = tree->gtGetOp1();
if (op1->OperIs(GT_CALL))
{
GenTreeCall* call = op1->AsCall();
@@ -6990,7 +6990,7 @@ bool Compiler::optHoistLoopExprsForTree(GenTreePtr tree,
else if (tree->OperIsAssignment())
{
// If the LHS of the assignment has a global reference, then assume it's a global side effect.
- GenTreePtr lhs = tree->gtOp.gtOp1;
+ GenTree* lhs = tree->gtOp.gtOp1;
if (lhs->gtFlags & GTF_GLOB_REF)
{
*pFirstBlockAndBeforeSideEffect = false;
@@ -6998,7 +6998,7 @@ bool Compiler::optHoistLoopExprsForTree(GenTreePtr tree,
}
else if (tree->OperIsCopyBlkOp())
{
- GenTreePtr args = tree->gtOp.gtOp1;
+ GenTree* args = tree->gtOp.gtOp1;
assert(args->OperGet() == GT_LIST);
if (args->gtOp.gtOp1->gtFlags & GTF_GLOB_REF)
{
@@ -7024,7 +7024,7 @@ bool Compiler::optHoistLoopExprsForTree(GenTreePtr tree,
continue;
}
- GenTreePtr child = tree->GetChild(childNum);
+ GenTree* child = tree->GetChild(childNum);
// We try to hoist this 'child' tree
optHoistCandidate(child, lnum, hoistCtxt);
@@ -7037,7 +7037,7 @@ bool Compiler::optHoistLoopExprsForTree(GenTreePtr tree,
return treeIsInvariant;
}
-void Compiler::optHoistCandidate(GenTreePtr tree, unsigned lnum, LoopHoistContext* hoistCtxt)
+void Compiler::optHoistCandidate(GenTree* tree, unsigned lnum, LoopHoistContext* hoistCtxt)
{
if (lnum == BasicBlock::NOT_IN_LOOP)
{
@@ -7185,7 +7185,7 @@ bool Compiler::optVNIsLoopInvariant(ValueNum vn, unsigned lnum, VNToBoolMap* loo
return res;
}
-bool Compiler::optTreeIsValidAtLoopHead(GenTreePtr tree, unsigned lnum)
+bool Compiler::optTreeIsValidAtLoopHead(GenTree* tree, unsigned lnum)
{
if (tree->OperIsLocal())
{
@@ -7346,14 +7346,14 @@ void Compiler::fgCreateLoopPreHeader(unsigned lnum)
// into the phi via the loop header block will now flow through the preheader
// block from the header block.
- for (GenTreePtr stmt = top->bbTreeList; stmt; stmt = stmt->gtNext)
+ for (GenTree* stmt = top->bbTreeList; stmt; stmt = stmt->gtNext)
{
- GenTreePtr tree = stmt->gtStmt.gtStmtExpr;
+ GenTree* tree = stmt->gtStmt.gtStmtExpr;
if (tree->OperGet() != GT_ASG)
{
break;
}
- GenTreePtr op2 = tree->gtGetOp2();
+ GenTree* op2 = tree->gtGetOp2();
if (op2->OperGet() != GT_PHI)
{
break;
@@ -7588,9 +7588,9 @@ void Compiler::optComputeLoopSideEffectsOfBlock(BasicBlock* blk)
MemoryKindSet memoryHavoc = emptyMemoryKindSet;
// Now iterate over the remaining statements, and their trees.
- for (GenTreePtr stmts = blk->FirstNonPhiDef(); (stmts != nullptr); stmts = stmts->gtNext)
+ for (GenTree* stmts = blk->FirstNonPhiDef(); (stmts != nullptr); stmts = stmts->gtNext)
{
- for (GenTreePtr tree = stmts->gtStmt.gtStmtList; (tree != nullptr); tree = tree->gtNext)
+ for (GenTree* tree = stmts->gtStmt.gtStmtList; (tree != nullptr); tree = tree->gtNext)
{
genTreeOps oper = tree->OperGet();
@@ -7623,11 +7623,11 @@ void Compiler::optComputeLoopSideEffectsOfBlock(BasicBlock* blk)
if (GenTree::OperIsAssignment(oper))
{
- GenTreePtr lhs = tree->gtOp.gtOp1->gtEffectiveVal(/*commaOnly*/ true);
+ GenTree* lhs = tree->gtOp.gtOp1->gtEffectiveVal(/*commaOnly*/ true);
if (lhs->OperGet() == GT_IND)
{
- GenTreePtr arg = lhs->gtOp.gtOp1->gtEffectiveVal(/*commaOnly*/ true);
+ GenTree* arg = lhs->gtOp.gtOp1->gtEffectiveVal(/*commaOnly*/ true);
FieldSeqNode* fldSeqArrElem = nullptr;
if ((tree->gtFlags & GTF_IND_VOLATILE) != 0)
@@ -7677,8 +7677,8 @@ void Compiler::optComputeLoopSideEffectsOfBlock(BasicBlock* blk)
{
// We are only interested in IsFieldAddr()'s fldSeq out parameter.
//
- GenTreePtr obj = nullptr; // unused
- GenTreePtr staticOffset = nullptr; // unused
+ GenTree* obj = nullptr; // unused
+ GenTree* staticOffset = nullptr; // unused
FieldSeqNode* fldSeq = nullptr;
if (arg->IsFieldAddr(this, &obj, &staticOffset, &fldSeq) &&
@@ -7726,7 +7726,7 @@ void Compiler::optComputeLoopSideEffectsOfBlock(BasicBlock* blk)
else if (lhs->OperGet() == GT_LCL_VAR)
{
GenTreeLclVar* lhsLcl = lhs->AsLclVar();
- GenTreePtr rhs = tree->gtOp.gtOp2;
+ GenTree* rhs = tree->gtOp.gtOp2;
ValueNum rhsVN = rhs->gtVNPair.GetLiberal();
// If we gave the RHS a value number, propagate it.
if (rhsVN != ValueNumStore::NoVN)
@@ -7757,7 +7757,7 @@ void Compiler::optComputeLoopSideEffectsOfBlock(BasicBlock* blk)
case GT_ADDR:
// Is it an addr of a array index expression?
{
- GenTreePtr addrArg = tree->gtOp.gtOp1;
+ GenTree* addrArg = tree->gtOp.gtOp1;
if (addrArg->OperGet() == GT_IND)
{
// Is the LHS an array index expression?
@@ -7910,22 +7910,22 @@ void Compiler::AddModifiedElemTypeAllContainingLoops(unsigned lnum, CORINFO_CLAS
*/
/* static */
-Compiler::fgWalkResult Compiler::optRemoveTreeVisitor(GenTreePtr* pTree, fgWalkData* data)
+Compiler::fgWalkResult Compiler::optRemoveTreeVisitor(GenTree** pTree, fgWalkData* data)
{
- GenTreePtr tree = *pTree;
- Compiler* comp = data->compiler;
- GenTreePtr keepList = (GenTreePtr)(data->pCallbackData);
+ GenTree* tree = *pTree;
+ Compiler* comp = data->compiler;
+ GenTree* keepList = (GenTree*)(data->pCallbackData);
// We may have a non-NULL side effect list that is being kept
//
if (keepList)
{
- GenTreePtr keptTree = keepList;
+ GenTree* keptTree = keepList;
while (keptTree->OperGet() == GT_COMMA)
{
assert(keptTree->OperKind() & GTK_SMPOP);
- GenTreePtr op1 = keptTree->gtOp.gtOp1;
- GenTreePtr op2 = keptTree->gtGetOp2();
+ GenTree* op1 = keptTree->gtOp.gtOp1;
+ GenTree* op2 = keptTree->gtGetOp2();
// For the GT_COMMA case the op1 is part of the orginal CSE tree
// that is being kept because it contains some side-effect
@@ -7948,7 +7948,7 @@ Compiler::fgWalkResult Compiler::optRemoveTreeVisitor(GenTreePtr* pTree, fgWalkD
}
}
- // This node is being removed from the graph of GenTreePtr
+ // This node is being removed from the graph of GenTree*
// Look for any local variable references
@@ -7988,7 +7988,7 @@ Compiler::fgWalkResult Compiler::optRemoveTreeVisitor(GenTreePtr* pTree, fgWalkD
* 'deadTree' as we need to fetch the block weight when decrementing the ref counts.
*/
-void Compiler::optRemoveTree(GenTreePtr deadTree, GenTreePtr keepList)
+void Compiler::optRemoveTree(GenTree* deadTree, GenTree* keepList)
{
// We communicate this value using the walkData.pCallbackData field
//
@@ -8002,7 +8002,7 @@ void Compiler::optRemoveTree(GenTreePtr deadTree, GenTreePtr keepList)
// tree - Range check tree
// stmt - Statement the tree belongs to
-void Compiler::optRemoveRangeCheck(GenTreePtr tree, GenTreePtr stmt)
+void Compiler::optRemoveRangeCheck(GenTree* tree, GenTree* stmt)
{
#if !REARRANGE_ADDS
noway_assert(!"can't remove range checks without REARRANGE_ADDS right now");
@@ -8025,7 +8025,7 @@ void Compiler::optRemoveRangeCheck(GenTreePtr tree, GenTreePtr stmt)
}
#endif
- GenTreePtr sideEffList = nullptr;
+ GenTree* sideEffList = nullptr;
gtExtractSideEffList(bndsChkTree, &sideEffList, GTF_ASG);
@@ -8063,7 +8063,7 @@ void Compiler::optRemoveRangeCheck(GenTreePtr tree, GenTreePtr stmt)
* multiplication node.
*/
-ssize_t Compiler::optGetArrayRefScaleAndIndex(GenTreePtr mul, GenTreePtr* pIndex DEBUGARG(bool bRngChk))
+ssize_t Compiler::optGetArrayRefScaleAndIndex(GenTree* mul, GenTree** pIndex DEBUGARG(bool bRngChk))
{
assert(mul);
assert(mul->gtOper == GT_MUL || mul->gtOper == GT_LSH);
@@ -8076,7 +8076,7 @@ ssize_t Compiler::optGetArrayRefScaleAndIndex(GenTreePtr mul, GenTreePtr* pIndex
scale = ((ssize_t)1) << scale;
}
- GenTreePtr index = mul->gtOp.gtOp1;
+ GenTree* index = mul->gtOp.gtOp1;
if (index->gtOper == GT_MUL && index->gtOp.gtOp2->IsCnsIntOrI())
{
@@ -8106,10 +8106,10 @@ ssize_t Compiler::optGetArrayRefScaleAndIndex(GenTreePtr mul, GenTreePtr* pIndex
*
*/
-GenTreePtr Compiler::optFindLocalInit(BasicBlock* block,
- GenTreePtr local,
- VARSET_TP* pKilledInOut,
- bool* pLhsRhsKilledAfterInit)
+GenTree* Compiler::optFindLocalInit(BasicBlock* block,
+ GenTree* local,
+ VARSET_TP* pKilledInOut,
+ bool* pLhsRhsKilledAfterInit)
{
assert(pKilledInOut);
assert(pLhsRhsKilledAfterInit);
@@ -8118,14 +8118,14 @@ GenTreePtr Compiler::optFindLocalInit(BasicBlock* block,
unsigned LclNum = local->gtLclVarCommon.gtLclNum;
- GenTreePtr list = block->bbTreeList;
+ GenTree* list = block->bbTreeList;
if (list == nullptr)
{
return nullptr;
}
- GenTreePtr rhs = nullptr;
- GenTreePtr stmt = list;
+ GenTree* rhs = nullptr;
+ GenTree* stmt = list;
do
{
stmt = stmt->gtPrev;
@@ -8134,7 +8134,7 @@ GenTreePtr Compiler::optFindLocalInit(BasicBlock* block,
break;
}
- GenTreePtr tree = stmt->gtStmt.gtStmtExpr;
+ GenTree* tree = stmt->gtStmt.gtStmtExpr;
// If we encounter an assignment to a local variable,
if (tree->OperIsAssignment() && tree->gtOp.gtOp1->gtOper == GT_LCL_VAR)
{
@@ -8309,7 +8309,7 @@ bool Compiler::optIdentifyLoopOptInfo(unsigned loopNum, LoopCloneContext* contex
}
#ifdef DEBUG
- GenTreePtr op1 = pLoop->lpIterator();
+ GenTree* op1 = pLoop->lpIterator();
noway_assert((op1->gtOper == GT_LCL_VAR) && (op1->gtLclVarCommon.gtLclNum == ivLclNum));
#endif
@@ -8320,7 +8320,7 @@ bool Compiler::optIdentifyLoopOptInfo(unsigned loopNum, LoopCloneContext* contex
for (BasicBlock* block = beg; block != end->bbNext; block = block->bbNext)
{
compCurBB = block;
- for (GenTreePtr stmt = block->bbTreeList; stmt; stmt = stmt->gtNext)
+ for (GenTree* stmt = block->bbTreeList; stmt; stmt = stmt->gtNext)
{
info.stmt = stmt;
const bool lclVarsOnly = false;
@@ -8373,13 +8373,13 @@ bool Compiler::optIdentifyLoopOptInfo(unsigned loopNum, LoopCloneContext* contex
// [000000001AF829F0] -A-XG------- = int
// [000000001AF82978] D------N---- lclVar int V06 tmp0
//
-bool Compiler::optExtractArrIndex(GenTreePtr tree, ArrIndex* result, unsigned lhsNum)
+bool Compiler::optExtractArrIndex(GenTree* tree, ArrIndex* result, unsigned lhsNum)
{
if (tree->gtOper != GT_COMMA)
{
return false;
}
- GenTreePtr before = tree->gtGetOp1();
+ GenTree* before = tree->gtGetOp1();
if (before->gtOper != GT_ARR_BOUNDS_CHECK)
{
return false;
@@ -8410,7 +8410,7 @@ bool Compiler::optExtractArrIndex(GenTreePtr tree, ArrIndex* result, unsigned lh
unsigned indLcl = arrBndsChk->gtIndex->gtLclVarCommon.gtLclNum;
- GenTreePtr after = tree->gtGetOp2();
+ GenTree* after = tree->gtGetOp2();
if (after->gtOper != GT_IND)
{
@@ -8427,13 +8427,13 @@ bool Compiler::optExtractArrIndex(GenTreePtr tree, ArrIndex* result, unsigned lh
return false;
}
- GenTreePtr sibo = after->gtGetOp1();
+ GenTree* sibo = after->gtGetOp1();
if (sibo->gtOper != GT_ADD)
{
return false;
}
- GenTreePtr sib = sibo->gtGetOp1();
- GenTreePtr ofs = sibo->gtGetOp2();
+ GenTree* sib = sibo->gtGetOp1();
+ GenTree* ofs = sibo->gtGetOp2();
if (ofs->gtOper != GT_CNS_INT)
{
return false;
@@ -8442,8 +8442,8 @@ bool Compiler::optExtractArrIndex(GenTreePtr tree, ArrIndex* result, unsigned lh
{
return false;
}
- GenTreePtr si = sib->gtGetOp2();
- GenTreePtr base = sib->gtGetOp1();
+ GenTree* si = sib->gtGetOp2();
+ GenTree* base = sib->gtGetOp1();
if (si->gtOper != GT_LSH)
{
return false;
@@ -8452,8 +8452,8 @@ bool Compiler::optExtractArrIndex(GenTreePtr tree, ArrIndex* result, unsigned lh
{
return false;
}
- GenTreePtr scale = si->gtGetOp2();
- GenTreePtr index = si->gtGetOp1();
+ GenTree* scale = si->gtGetOp2();
+ GenTree* index = si->gtGetOp1();
if (scale->gtOper != GT_CNS_INT)
{
return false;
@@ -8463,9 +8463,9 @@ bool Compiler::optExtractArrIndex(GenTreePtr tree, ArrIndex* result, unsigned lh
{
return false;
}
- GenTreePtr indexVar = index->gtGetOp1();
+ GenTree* indexVar = index->gtGetOp1();
#else
- GenTreePtr indexVar = index;
+ GenTree* indexVar = index;
#endif
if (indexVar->gtOper != GT_LCL_VAR || indexVar->gtLclVarCommon.gtLclNum != indLcl)
{
@@ -8520,7 +8520,7 @@ bool Compiler::optExtractArrIndex(GenTreePtr tree, ArrIndex* result, unsigned lh
// Assumption:
// The method extracts only if the array base and indices are GT_LCL_VAR.
//
-bool Compiler::optReconstructArrIndex(GenTreePtr tree, ArrIndex* result, unsigned lhsNum)
+bool Compiler::optReconstructArrIndex(GenTree* tree, ArrIndex* result, unsigned lhsNum)
{
// If we can extract "tree" (which is a top level comma) return.
if (optExtractArrIndex(tree, result, lhsNum))
@@ -8530,22 +8530,22 @@ bool Compiler::optReconstructArrIndex(GenTreePtr tree, ArrIndex* result, unsigne
// We have a comma (check if array base expr is computed in "before"), descend further.
else if (tree->OperGet() == GT_COMMA)
{
- GenTreePtr before = tree->gtGetOp1();
+ GenTree* before = tree->gtGetOp1();
// "before" should evaluate an array base for the "after" indexing.
if (before->OperGet() != GT_ASG)
{
return false;
}
- GenTreePtr lhs = before->gtGetOp1();
- GenTreePtr rhs = before->gtGetOp2();
+ GenTree* lhs = before->gtGetOp1();
+ GenTree* rhs = before->gtGetOp2();
// "rhs" should contain an GT_INDEX
if (!lhs->IsLocal() || !optReconstructArrIndex(rhs, result, lhsNum))
{
return false;
}
- unsigned lhsNum = lhs->gtLclVarCommon.gtLclNum;
- GenTreePtr after = tree->gtGetOp2();
+ unsigned lhsNum = lhs->gtLclVarCommon.gtLclNum;
+ GenTree* after = tree->gtGetOp2();
// Pass the "lhsNum", so we can verify if indeed it is used as the array base.
return optExtractArrIndex(after, result, lhsNum);
}
@@ -8553,7 +8553,7 @@ bool Compiler::optReconstructArrIndex(GenTreePtr tree, ArrIndex* result, unsigne
}
/* static */
-Compiler::fgWalkResult Compiler::optCanOptimizeByLoopCloningVisitor(GenTreePtr* pTree, Compiler::fgWalkData* data)
+Compiler::fgWalkResult Compiler::optCanOptimizeByLoopCloningVisitor(GenTree** pTree, Compiler::fgWalkData* data)
{
return data->compiler->optCanOptimizeByLoopCloning(*pTree, (LoopCloneVisitorInfo*)data->pCallbackData);
}
@@ -8599,7 +8599,7 @@ bool Compiler::optIsStackLocalInvariant(unsigned loopNum, unsigned lclNum)
// Return Value:
// Skip sub trees if the optimization candidate is identified or else continue walking
//
-Compiler::fgWalkResult Compiler::optCanOptimizeByLoopCloning(GenTreePtr tree, LoopCloneVisitorInfo* info)
+Compiler::fgWalkResult Compiler::optCanOptimizeByLoopCloning(GenTree* tree, LoopCloneVisitorInfo* info)
{
ArrIndex arrIndex(getAllocator());
@@ -8674,9 +8674,9 @@ struct optRangeCheckDsc
Walk to make sure that only locals and constants are contained in the index
for a range check
*/
-Compiler::fgWalkResult Compiler::optValidRangeCheckIndex(GenTreePtr* pTree, fgWalkData* data)
+Compiler::fgWalkResult Compiler::optValidRangeCheckIndex(GenTree** pTree, fgWalkData* data)
{
- GenTreePtr tree = *pTree;
+ GenTree* tree = *pTree;
optRangeCheckDsc* pData = (optRangeCheckDsc*)data->pCallbackData;
if (tree->gtOper == GT_IND || tree->gtOper == GT_CLS_VAR || tree->gtOper == GT_FIELD || tree->gtOper == GT_LCL_FLD)
@@ -8702,16 +8702,16 @@ Compiler::fgWalkResult Compiler::optValidRangeCheckIndex(GenTreePtr* pTree, fgWa
that the array is a local array (non subject to racing conditions) and that the
index is either a constant or a local
*/
-bool Compiler::optIsRangeCheckRemovable(GenTreePtr tree)
+bool Compiler::optIsRangeCheckRemovable(GenTree* tree)
{
noway_assert(tree->gtOper == GT_ARR_BOUNDS_CHECK);
GenTreeBoundsChk* bndsChk = tree->AsBoundsChk();
- GenTreePtr pArray = bndsChk->GetArray();
+ GenTree* pArray = bndsChk->GetArray();
if (pArray == nullptr && !bndsChk->gtArrLen->IsCnsIntOrI())
{
return false;
}
- GenTreePtr pIndex = bndsChk->gtIndex;
+ GenTree* pIndex = bndsChk->gtIndex;
// The length must be a constant (the pArray == NULL case) or the array reference must be a local.
// Otherwise we can be targeted by malicious race-conditions.
@@ -8789,14 +8789,14 @@ void Compiler::optOptimizeBoolsGcStress(BasicBlock* condBlock)
}
noway_assert(condBlock->bbJumpKind == BBJ_COND);
- GenTreePtr condStmt = condBlock->bbTreeList->gtPrev->gtStmt.gtStmtExpr;
+ GenTree* condStmt = condBlock->bbTreeList->gtPrev->gtStmt.gtStmtExpr;
noway_assert(condStmt->gtOper == GT_JTRUE);
- bool isBool;
- GenTreePtr relop;
+ bool isBool;
+ GenTree* relop;
- GenTreePtr comparand = optIsBoolCond(condStmt, &relop, &isBool);
+ GenTree* comparand = optIsBoolCond(condStmt, &relop, &isBool);
if (comparand == nullptr || !varTypeIsGC(comparand->TypeGet()))
{
@@ -8808,7 +8808,7 @@ void Compiler::optOptimizeBoolsGcStress(BasicBlock* condBlock)
return;
}
- GenTreePtr comparandClone = gtCloneExpr(comparand);
+ GenTree* comparandClone = gtCloneExpr(comparand);
// Bump up the ref-counts of any variables in 'comparandClone'
compCurBB = condBlock;
@@ -9005,22 +9005,22 @@ void Compiler::optOptimizeBools()
/* The second block must contain a single statement */
- GenTreePtr s2 = b2->bbTreeList;
+ GenTree* s2 = b2->bbTreeList;
if (s2->gtPrev != s2)
{
continue;
}
noway_assert(s2->gtOper == GT_STMT);
- GenTreePtr t2 = s2->gtStmt.gtStmtExpr;
+ GenTree* t2 = s2->gtStmt.gtStmtExpr;
noway_assert(t2->gtOper == GT_JTRUE);
/* Find the condition for the first block */
- GenTreePtr s1 = b1->bbTreeList->gtPrev;
+ GenTree* s1 = b1->bbTreeList->gtPrev;
noway_assert(s1->gtOper == GT_STMT);
- GenTreePtr t1 = s1->gtStmt.gtStmtExpr;
+ GenTree* t1 = s1->gtStmt.gtStmtExpr;
noway_assert(t1->gtOper == GT_JTRUE);
if (b2->countOfInEdges() > 1)
@@ -9032,13 +9032,13 @@ void Compiler::optOptimizeBools()
bool bool1, bool2;
- GenTreePtr c1 = optIsBoolCond(t1, &t1, &bool1);
+ GenTree* c1 = optIsBoolCond(t1, &t1, &bool1);
if (!c1)
{
continue;
}
- GenTreePtr c2 = optIsBoolCond(t2, &t2, &bool2);
+ GenTree* c2 = optIsBoolCond(t2, &t2, &bool2);
if (!c2)
{
continue;
@@ -9156,7 +9156,7 @@ void Compiler::optOptimizeBools()
//
// Now update the trees
//
- GenTreePtr cmpOp1 = gtNewOperNode(foldOp, foldType, c1, c2);
+ GenTree* cmpOp1 = gtNewOperNode(foldOp, foldType, c1, c2);
if (bool1 && bool2)
{
/* When we 'OR'/'AND' two booleans, the result is boolean as well */
@@ -9169,7 +9169,7 @@ void Compiler::optOptimizeBools()
#if FEATURE_SET_FLAGS
// For comparisons against zero we will have the GTF_SET_FLAGS set
- // and this can cause an assert to fire in fgMoveOpsLeft(GenTreePtr tree)
+ // and this can cause an assert to fire in fgMoveOpsLeft(GenTree* tree)
// during the CSE phase.
//
// So make sure to clear any GTF_SET_FLAGS bit on these operations
diff --git a/src/jit/rangecheck.cpp b/src/jit/rangecheck.cpp
index a1222767da..c2826bd41c 100644
--- a/src/jit/rangecheck.cpp
+++ b/src/jit/rangecheck.cpp
@@ -61,7 +61,7 @@ int RangeCheck::GetArrLength(ValueNum vn)
}
// Check if the computed range is within bounds.
-bool RangeCheck::BetweenBounds(Range& range, int lower, GenTreePtr upper)
+bool RangeCheck::BetweenBounds(Range& range, int lower, GenTree* upper)
{
#ifdef DEBUG
if (m_pCompiler->verbose)
@@ -194,7 +194,7 @@ bool RangeCheck::BetweenBounds(Range& range, int lower, GenTreePtr upper)
return false;
}
-void RangeCheck::OptimizeRangeCheck(BasicBlock* block, GenTreePtr stmt, GenTreePtr treeParent)
+void RangeCheck::OptimizeRangeCheck(BasicBlock* block, GenTree* stmt, GenTree* treeParent)
{
// Check if we are dealing with a bounds check node.
if (treeParent->OperGet() != GT_COMMA)
@@ -203,7 +203,7 @@ void RangeCheck::OptimizeRangeCheck(BasicBlock* block, GenTreePtr stmt, GenTreeP
}
// If we are not looking at array bounds check, bail.
- GenTreePtr tree = treeParent->gtOp.gtOp1;
+ GenTree* tree = treeParent->gtOp.gtOp1;
if (!tree->OperIsBoundsCheck())
{
return;
@@ -211,7 +211,7 @@ void RangeCheck::OptimizeRangeCheck(BasicBlock* block, GenTreePtr stmt, GenTreeP
GenTreeBoundsChk* bndsChk = tree->AsBoundsChk();
m_pCurBndsChk = bndsChk;
- GenTreePtr treeIndex = bndsChk->gtIndex;
+ GenTree* treeIndex = bndsChk->gtIndex;
// Take care of constant index first, like a[2], for example.
ValueNum idxVn = treeIndex->gtVNPair.GetConservative();
@@ -297,7 +297,7 @@ void RangeCheck::OptimizeRangeCheck(BasicBlock* block, GenTreePtr stmt, GenTreeP
return;
}
-void RangeCheck::Widen(BasicBlock* block, GenTreePtr tree, Range* pRange)
+void RangeCheck::Widen(BasicBlock* block, GenTree* tree, Range* pRange)
{
#ifdef DEBUG
if (m_pCompiler->verbose)
@@ -361,7 +361,7 @@ bool RangeCheck::IsBinOpMonotonicallyIncreasing(GenTreeOp* binop)
}
}
-bool RangeCheck::IsMonotonicallyIncreasing(GenTreePtr expr)
+bool RangeCheck::IsMonotonicallyIncreasing(GenTree* expr)
{
JITDUMP("[RangeCheck::IsMonotonicallyIncreasing] [%06d]\n", Compiler::dspTreeID(expr));
@@ -761,7 +761,7 @@ void RangeCheck::MergeEdgeAssertions(GenTreeLclVarCommon* lcl, ASSERT_VALARG_TP
// Merge assertions from the pred edges of the block, i.e., check for any assertions about "op's" value numbers for phi
// arguments. If not a phi argument, check if we assertions about local variables.
-void RangeCheck::MergeAssertion(BasicBlock* block, GenTreePtr op, Range* pRange DEBUGARG(int indent))
+void RangeCheck::MergeAssertion(BasicBlock* block, GenTree* op, Range* pRange DEBUGARG(int indent))
{
JITDUMP("Merging assertions from pred edges of BB%02d for op [%06d] $%03x\n", block->bbNum, Compiler::dspTreeID(op),
op->gtVNPair.GetConservative());
@@ -1065,11 +1065,11 @@ bool RangeCheck::DoesVarDefOverflow(GenTreeLclVarCommon* lcl)
return true;
}
-bool RangeCheck::DoesPhiOverflow(BasicBlock* block, GenTreePtr expr)
+bool RangeCheck::DoesPhiOverflow(BasicBlock* block, GenTree* expr)
{
for (GenTreeArgList* args = expr->gtOp.gtOp1->AsArgList(); args != nullptr; args = args->Rest())
{
- GenTreePtr arg = args->Current();
+ GenTree* arg = args->Current();
if (m_pSearchPath->Lookup(arg))
{
continue;
@@ -1082,7 +1082,7 @@ bool RangeCheck::DoesPhiOverflow(BasicBlock* block, GenTreePtr expr)
return false;
}
-bool RangeCheck::DoesOverflow(BasicBlock* block, GenTreePtr expr)
+bool RangeCheck::DoesOverflow(BasicBlock* block, GenTree* expr)
{
bool overflows = false;
if (!GetOverflowMap()->Lookup(expr, &overflows))
@@ -1092,7 +1092,7 @@ bool RangeCheck::DoesOverflow(BasicBlock* block, GenTreePtr expr)
return overflows;
}
-bool RangeCheck::ComputeDoesOverflow(BasicBlock* block, GenTreePtr expr)
+bool RangeCheck::ComputeDoesOverflow(BasicBlock* block, GenTree* expr)
{
JITDUMP("Does overflow [%06d]?\n", Compiler::dspTreeID(expr));
m_pSearchPath->Set(expr, block);
@@ -1135,7 +1135,7 @@ bool RangeCheck::ComputeDoesOverflow(BasicBlock* block, GenTreePtr expr)
// value as "dependent" (dep).
// If the loop is proven to be "monotonic", then make liberal decisions while merging phi node.
// eg.: merge((0, dep), (dep, dep)) = (0, dep)
-Range RangeCheck::ComputeRange(BasicBlock* block, GenTreePtr expr, bool monotonic DEBUGARG(int indent))
+Range RangeCheck::ComputeRange(BasicBlock* block, GenTree* expr, bool monotonic DEBUGARG(int indent))
{
bool newlyAdded = !m_pSearchPath->Set(expr, block);
Range range = Limit(Limit::keUndef);
@@ -1238,7 +1238,7 @@ void Indent(int indent)
#endif
// Get the range, if it is already computed, use the cached range value, else compute it.
-Range RangeCheck::GetRange(BasicBlock* block, GenTreePtr expr, bool monotonic DEBUGARG(int indent))
+Range RangeCheck::GetRange(BasicBlock* block, GenTree* expr, bool monotonic DEBUGARG(int indent))
{
#ifdef DEBUG
if (m_pCompiler->verbose)
@@ -1308,14 +1308,14 @@ struct MapMethodDefsData
{
RangeCheck* rc;
BasicBlock* block;
- GenTreePtr stmt;
+ GenTree* stmt;
- MapMethodDefsData(RangeCheck* rc, BasicBlock* block, GenTreePtr stmt) : rc(rc), block(block), stmt(stmt)
+ MapMethodDefsData(RangeCheck* rc, BasicBlock* block, GenTree* stmt) : rc(rc), block(block), stmt(stmt)
{
}
};
-Compiler::fgWalkResult MapMethodDefsVisitor(GenTreePtr* ptr, Compiler::fgWalkData* data)
+Compiler::fgWalkResult MapMethodDefsVisitor(GenTree** ptr, Compiler::fgWalkData* data)
{
GenTree* tree = *ptr;
MapMethodDefsData* rcd = ((MapMethodDefsData*)data->pCallbackData);
@@ -1333,7 +1333,7 @@ void RangeCheck::MapMethodDefs()
// First, gather where all definitions occur in the program and store it in a map.
for (BasicBlock* block = m_pCompiler->fgFirstBB; block; block = block->bbNext)
{
- for (GenTreePtr stmt = block->bbTreeList; stmt; stmt = stmt->gtNext)
+ for (GenTree* stmt = block->bbTreeList; stmt; stmt = stmt->gtNext)
{
MapMethodDefsData data(this, block, stmt);
m_pCompiler->fgWalkTreePre(&stmt->gtStmt.gtStmtExpr, MapMethodDefsVisitor, &data, false, true);
@@ -1362,9 +1362,9 @@ void RangeCheck::OptimizeRangeChecks()
// Walk through trees looking for arrBndsChk node and check if it can be optimized.
for (BasicBlock* block = m_pCompiler->fgFirstBB; block; block = block->bbNext)
{
- for (GenTreePtr stmt = block->bbTreeList; stmt; stmt = stmt->gtNext)
+ for (GenTree* stmt = block->bbTreeList; stmt; stmt = stmt->gtNext)
{
- for (GenTreePtr tree = stmt->gtStmt.gtStmtList; tree; tree = tree->gtNext)
+ for (GenTree* tree = stmt->gtStmt.gtStmtList; tree; tree = tree->gtNext)
{
if (IsOverBudget())
{
diff --git a/src/jit/rangecheck.h b/src/jit/rangecheck.h
index 99a596bdab..67c9cc08eb 100644
--- a/src/jit/rangecheck.h
+++ b/src/jit/rangecheck.h
@@ -419,9 +419,9 @@ public:
// Constructor
RangeCheck(Compiler* pCompiler);
- typedef JitHashTable<GenTreePtr, JitPtrKeyFuncs<GenTree>, bool> OverflowMap;
- typedef JitHashTable<GenTreePtr, JitPtrKeyFuncs<GenTree>, Range*> RangeMap;
- typedef JitHashTable<GenTreePtr, JitPtrKeyFuncs<GenTree>, BasicBlock*> SearchPath;
+ typedef JitHashTable<GenTree*, JitPtrKeyFuncs<GenTree>, bool> OverflowMap;
+ typedef JitHashTable<GenTree*, JitPtrKeyFuncs<GenTree>, Range*> RangeMap;
+ typedef JitHashTable<GenTree*, JitPtrKeyFuncs<GenTree>, BasicBlock*> SearchPath;
#ifdef DEBUG
// TODO-Cleanup: This code has been kept around just to ensure that the SSA data is still
@@ -432,10 +432,10 @@ public:
struct Location
{
BasicBlock* block;
- GenTreePtr stmt;
+ GenTree* stmt;
GenTreeLclVarCommon* tree;
- GenTreePtr parent;
- Location(BasicBlock* block, GenTreePtr stmt, GenTreeLclVarCommon* tree, GenTreePtr parent)
+ GenTree* parent;
+ Location(BasicBlock* block, GenTree* stmt, GenTreeLclVarCommon* tree, GenTree* parent)
: block(block), stmt(stmt), tree(tree), parent(parent)
{
}
@@ -471,7 +471,7 @@ public:
// assumes that the lower range is resolved and upper range is symbolic as in an
// increasing loop.
// TODO-CQ: This is not general enough.
- bool BetweenBounds(Range& range, int lower, GenTreePtr upper);
+ bool BetweenBounds(Range& range, int lower, GenTree* upper);
// Entry point to optimize range checks in the block. Assumes value numbering
// and assertion prop phases are completed.
@@ -480,28 +480,28 @@ public:
// Given a "tree" node, check if it contains array bounds check node and
// optimize to remove it, if possible. Requires "stmt" and "block" that
// contain the tree.
- void OptimizeRangeCheck(BasicBlock* block, GenTreePtr stmt, GenTreePtr tree);
+ void OptimizeRangeCheck(BasicBlock* block, GenTree* stmt, GenTree* tree);
// Given the index expression try to find its range.
// The range of a variable depends on its rhs which in turn depends on its constituent variables.
// The "path" is the path taken in the search for the rhs' range and its constituents' range.
// If "monotonic" is true, the calculations are made more liberally assuming initial values
// at phi definitions.
- Range GetRange(BasicBlock* block, GenTreePtr expr, bool monotonic DEBUGARG(int indent));
+ Range GetRange(BasicBlock* block, GenTree* expr, bool monotonic DEBUGARG(int indent));
// Given the local variable, first find the definition of the local and find the range of the rhs.
// Helper for GetRange.
Range ComputeRangeForLocalDef(BasicBlock* block, GenTreeLclVarCommon* lcl, bool monotonic DEBUGARG(int indent));
// Compute the range, rather than retrieve a cached value. Helper for GetRange.
- Range ComputeRange(BasicBlock* block, GenTreePtr expr, bool monotonic DEBUGARG(int indent));
+ Range ComputeRange(BasicBlock* block, GenTree* expr, bool monotonic DEBUGARG(int indent));
// Compute the range for the op1 and op2 for the given binary operator.
Range ComputeRangeForBinOp(BasicBlock* block, GenTreeOp* binop, bool monotonic DEBUGARG(int indent));
// Merge assertions from AssertionProp's flags, for the corresponding "phiArg."
// Requires "pRange" to contain range that is computed partially.
- void MergeAssertion(BasicBlock* block, GenTreePtr phiArg, Range* pRange DEBUGARG(int indent));
+ void MergeAssertion(BasicBlock* block, GenTree* phiArg, Range* pRange DEBUGARG(int indent));
// Inspect the "assertions" and extract assertions about the given "phiArg" and
// refine the "pRange" value.
@@ -518,27 +518,27 @@ public:
bool DoesBinOpOverflow(BasicBlock* block, GenTreeOp* binop);
// Does the phi operands involve an assignment that could overflow?
- bool DoesPhiOverflow(BasicBlock* block, GenTreePtr expr);
+ bool DoesPhiOverflow(BasicBlock* block, GenTree* expr);
// Find the def of the "expr" local and recurse on the arguments if any of them involve a
// calculation that overflows.
bool DoesVarDefOverflow(GenTreeLclVarCommon* lcl);
- bool ComputeDoesOverflow(BasicBlock* block, GenTreePtr expr);
+ bool ComputeDoesOverflow(BasicBlock* block, GenTree* expr);
// Does the current "expr" which is a use involve a definition, that overflows.
- bool DoesOverflow(BasicBlock* block, GenTreePtr tree);
+ bool DoesOverflow(BasicBlock* block, GenTree* tree);
// Widen the range by first checking if the induction variable is monotonic. Requires "pRange"
// to be partially computed.
- void Widen(BasicBlock* block, GenTreePtr tree, Range* pRange);
+ void Widen(BasicBlock* block, GenTree* tree, Range* pRange);
// Is the binary operation increasing the value.
bool IsBinOpMonotonicallyIncreasing(GenTreeOp* binop);
// Given an "expr" trace its rhs and their definitions to check if all the assignments
// are monotonically increasing.
- bool IsMonotonicallyIncreasing(GenTreePtr tree);
+ bool IsMonotonicallyIncreasing(GenTree* tree);
// We allocate a budget to avoid walking long UD chains. When traversing each link in the UD
// chain, we decrement the budget. When the budget hits 0, then no more range check optimization
diff --git a/src/jit/regalloc.cpp b/src/jit/regalloc.cpp
index 5130cc236e..6046d31f05 100644
--- a/src/jit/regalloc.cpp
+++ b/src/jit/regalloc.cpp
@@ -356,7 +356,7 @@ void Compiler::raAdjustVarIntf()
/* Determine register mask for a call/return from type.
*/
-inline regMaskTP Compiler::genReturnRegForTree(GenTreePtr tree)
+inline regMaskTP Compiler::genReturnRegForTree(GenTree* tree)
{
var_types type = tree->TypeGet();
@@ -464,7 +464,7 @@ void Compiler::raDispFPlifeInfo()
for (block = fgFirstBB; block; block = block->bbNext)
{
- GenTreePtr stmt;
+ GenTree* stmt;
printf("BB%02u: in = [ ", block->bbNum);
dispLifeSet(this, optAllFloatVars, block->bbLiveIn);
@@ -473,7 +473,7 @@ void Compiler::raDispFPlifeInfo()
VARSET_TP life(VarSetOps::MakeCopy(this, block->bbLiveIn));
for (stmt = block->bbTreeList; stmt; stmt = stmt->gtNext)
{
- GenTreePtr tree;
+ GenTree* tree;
noway_assert(stmt->gtOper == GT_STMT);
@@ -1383,11 +1383,11 @@ RET:
*/
regMaskTP Compiler::rpPredictAddressMode(
- GenTreePtr tree, var_types type, regMaskTP lockedRegs, regMaskTP rsvdRegs, GenTreePtr lenCSE)
+ GenTree* tree, var_types type, regMaskTP lockedRegs, regMaskTP rsvdRegs, GenTree* lenCSE)
{
- GenTreePtr op1;
- GenTreePtr op2;
- GenTreePtr opTemp;
+ GenTree* op1;
+ GenTree* op2;
+ GenTree* opTemp;
genTreeOps oper = tree->OperGet();
regMaskTP op1Mask;
regMaskTP op2Mask;
@@ -1709,7 +1709,7 @@ void Compiler::rpPredictRefAssign(unsigned lclNum)
* the rpLastUseVars set should be saved and restored
* so that we don't add any new variables to rpLastUseVars.
*/
-regMaskTP Compiler::rpPredictBlkAsgRegUse(GenTreePtr tree,
+regMaskTP Compiler::rpPredictBlkAsgRegUse(GenTree* tree,
rpPredictReg predictReg,
regMaskTP lockedRegs,
regMaskTP rsvdRegs)
@@ -1722,8 +1722,8 @@ regMaskTP Compiler::rpPredictBlkAsgRegUse(GenTreePtr tree,
bool useMemHelper = false;
bool useBarriers = false;
GenTreeBlk* dst = tree->gtGetOp1()->AsBlk();
- GenTreePtr dstAddr = dst->Addr();
- GenTreePtr srcAddrOrFill = tree->gtGetOp2IfPresent();
+ GenTree* dstAddr = dst->Addr();
+ GenTree* srcAddrOrFill = tree->gtGetOp2IfPresent();
size_t blkSize = dst->gtBlkSize;
@@ -1832,8 +1832,8 @@ regMaskTP Compiler::rpPredictBlkAsgRegUse(GenTreePtr tree,
}
#endif
// What order should the Dest, Val/Src, and Size be calculated
- GenTreePtr opsPtr[3];
- regMaskTP regsPtr[3];
+ GenTree* opsPtr[3];
+ regMaskTP regsPtr[3];
#if defined(_TARGET_XARCH_)
fgOrderBlockOps(tree, RBM_EDI, (isInitBlk) ? RBM_EAX : RBM_ESI, RBM_ECX, opsPtr, regsPtr);
@@ -1935,7 +1935,7 @@ regMaskTP Compiler::rpPredictBlkAsgRegUse(GenTreePtr tree,
#pragma warning(push)
#pragma warning(disable : 21000) // Suppress PREFast warning about overly large function
#endif
-regMaskTP Compiler::rpPredictTreeRegUse(GenTreePtr tree,
+regMaskTP Compiler::rpPredictTreeRegUse(GenTree* tree,
rpPredictReg predictReg,
regMaskTP lockedRegs,
regMaskTP rsvdRegs)
@@ -1983,7 +1983,7 @@ regMaskTP Compiler::rpPredictTreeRegUse(GenTreePtr tree,
unsigned varIndex = rpGetVarIndexForPredict(predictReg);
unsigned lclNum = lvaTrackedToVarNum[varIndex];
bool found = false;
- for (GenTreePtr nextTree = tree->gtNext; nextTree != NULL && !found; nextTree = nextTree->gtNext)
+ for (GenTree* nextTree = tree->gtNext; nextTree != NULL && !found; nextTree = nextTree->gtNext)
{
if (nextTree->gtOper == GT_LCL_VAR && nextTree->gtLclVarCommon.gtLclNum == lclNum)
{
@@ -2476,11 +2476,11 @@ regMaskTP Compiler::rpPredictTreeRegUse(GenTreePtr tree,
if (kind & GTK_SMPOP)
{
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtGetOp2IfPresent();
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtGetOp2IfPresent();
- GenTreePtr opsPtr[3];
- regMaskTP regsPtr[3];
+ GenTree* opsPtr[3];
+ regMaskTP regsPtr[3];
VARSET_TP startAsgUseInPlaceVars(VarSetOps::UninitVal());
@@ -2826,9 +2826,10 @@ regMaskTP Compiler::rpPredictTreeRegUse(GenTreePtr tree,
if (op1->gtOper == GT_IND)
{
- GenTreePtr rv1, rv2;
- unsigned mul, cns;
- bool rev;
+ GenTree* rv1;
+ GenTree* rv2;
+ unsigned mul, cns;
+ bool rev;
/* Special handling of indirect assigns for write barrier */
@@ -3244,7 +3245,7 @@ regMaskTP Compiler::rpPredictTreeRegUse(GenTreePtr tree,
rsvdRegs |= RBM_LASTUSE;
}
- GenTreePtr lenCSE;
+ GenTree* lenCSE;
lenCSE = NULL;
/* check for address mode */
@@ -3449,7 +3450,8 @@ regMaskTP Compiler::rpPredictTreeRegUse(GenTreePtr tree,
if (regMask == RBM_NONE)
{
rpPredictReg op1xPredictReg, op2xPredictReg;
- GenTreePtr op1x, op2x;
+ GenTree* op1x;
+ GenTree* op2x;
if (tree->gtFlags & GTF_REVERSE_OPS) // TODO: do we really need to handle this case?
{
op1xPredictReg = op2PredictReg;
@@ -3817,7 +3819,7 @@ regMaskTP Compiler::rpPredictTreeRegUse(GenTreePtr tree,
#endif
/* set the lvPref reg if possible */
- GenTreePtr dest;
+ GenTree* dest;
/*
* Walking the gtNext link twice from here should get us back
* to our parent node, if this is an simple assignment tree.
@@ -4124,8 +4126,8 @@ regMaskTP Compiler::rpPredictTreeRegUse(GenTreePtr tree,
//
predictReg = PREDICT_SCRATCH_REG;
}
- GenTreePtr elseTree = op2->AsColon()->ElseNode();
- GenTreePtr thenTree = op2->AsColon()->ThenNode();
+ GenTree* elseTree = op2->AsColon()->ElseNode();
+ GenTree* thenTree = op2->AsColon()->ThenNode();
noway_assert(thenTree != NULL && elseTree != NULL);
@@ -4476,7 +4478,7 @@ regMaskTP Compiler::rpPredictTreeRegUse(GenTreePtr tree,
switch (oper)
{
- GenTreePtr args;
+ GenTree* args;
GenTreeArgList* list;
regMaskTP keepMask;
unsigned regArgsNum;
@@ -4646,8 +4648,8 @@ regMaskTP Compiler::rpPredictTreeRegUse(GenTreePtr tree,
}
else if (args->TypeGet() == TYP_STRUCT)
{
- GenTreePtr argx = args;
- GenTreePtr lclVarTree = NULL;
+ GenTree* argx = args;
+ GenTree* lclVarTree = NULL;
/* The GT_OBJ may be be a child of a GT_COMMA */
while (argx->gtOper == GT_COMMA)
@@ -4811,7 +4813,7 @@ regMaskTP Compiler::rpPredictTreeRegUse(GenTreePtr tree,
// for the duration of the OBJ.
if (args->OperGet() == GT_OBJ && (args->gtFlags & GTF_VAR_DEATH))
{
- GenTreePtr lclVarTree = fgIsIndirOfAddrOfLocal(args);
+ GenTree* lclVarTree = fgIsIndirOfAddrOfLocal(args);
assert(lclVarTree != NULL); // Or else would not be marked with GTF_VAR_DEATH.
compUpdateLifeVar</*ForCodeGen*/ false>(lclVarTree);
}
@@ -6430,7 +6432,7 @@ void Compiler::rpPredictRegUse()
for (BasicBlock* block = fgFirstBB; block != NULL; block = block->bbNext)
{
- GenTreePtr stmt;
+ GenTree* stmt;
compCurBB = block;
compCurLifeTree = NULL;
VarSetOps::Assign(this, compCurLife, block->bbLiveIn);
@@ -6445,7 +6447,7 @@ void Compiler::rpPredictRegUse()
VarSetOps::AssignNoCopy(this, rpLastUseVars, VarSetOps::MakeEmpty(this));
VarSetOps::AssignNoCopy(this, rpUseInPlace, VarSetOps::MakeEmpty(this));
- GenTreePtr tree = stmt->gtStmt.gtStmtExpr;
+ GenTree* tree = stmt->gtStmt.gtStmtExpr;
stmtNum++;
#ifdef DEBUG
if (verbose && 1)
diff --git a/src/jit/registerfp.cpp b/src/jit/registerfp.cpp
index 34343646af..b2d0a6ee83 100644
--- a/src/jit/registerfp.cpp
+++ b/src/jit/registerfp.cpp
@@ -75,7 +75,7 @@ void CodeGen::genFloatMath(GenTree* tree, RegSet::RegisterPreference* pref)
{
assert(tree->OperGet() == GT_INTRINSIC);
- GenTreePtr op1 = tree->gtOp.gtOp1;
+ GenTree* op1 = tree->gtOp.gtOp1;
// get tree into a register
genCodeForTreeFloat(op1, pref);
@@ -161,7 +161,7 @@ void CodeGen::genFloatSimple(GenTree* tree, RegSet::RegisterPreference* pref)
case GT_NEG:
{
- GenTreePtr op1 = tree->gtOp.gtOp1;
+ GenTree* op1 = tree->gtOp.gtOp1;
// get the tree into a register
genCodeForTreeFloat(op1, pref);
@@ -214,7 +214,7 @@ void CodeGen::genFloatSimple(GenTree* tree, RegSet::RegisterPreference* pref)
case GT_RETURN:
{
- GenTreePtr op1 = tree->gtOp.gtOp1;
+ GenTree* op1 = tree->gtOp.gtOp1;
assert(op1);
pref->best = (type == TYP_DOUBLE) ? RBM_DOUBLERET : RBM_FLOATRET;
@@ -242,8 +242,8 @@ void CodeGen::genFloatSimple(GenTree* tree, RegSet::RegisterPreference* pref)
case GT_COMMA:
{
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtGetOp2IfPresent();
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtGetOp2IfPresent();
if (tree->gtFlags & GTF_REVERSE_OPS)
{
@@ -278,7 +278,7 @@ void CodeGen::genFloatCheckFinite(GenTree* tree, RegSet::RegisterPreference* pre
TempDsc* temp;
int offs;
- GenTreePtr op1 = tree->gtOp.gtOp1;
+ GenTree* op1 = tree->gtOp.gtOp1;
// Offset of the DWord containing the exponent
offs = (op1->gtType == TYP_FLOAT) ? 0 : sizeof(int);
@@ -316,9 +316,9 @@ void CodeGen::genFloatCheckFinite(GenTree* tree, RegSet::RegisterPreference* pre
void CodeGen::genFloatAssign(GenTree* tree)
{
- var_types type = tree->TypeGet();
- GenTreePtr op1 = tree->gtGetOp1();
- GenTreePtr op2 = tree->gtGetOp2IfPresent();
+ var_types type = tree->TypeGet();
+ GenTree* op1 = tree->gtGetOp1();
+ GenTree* op2 = tree->gtGetOp2IfPresent();
regMaskTP needRegOp1 = RBM_ALLINT;
regMaskTP addrReg = RBM_NONE;
@@ -610,7 +610,7 @@ DONE_ASG:
siCheckVarScope(lclVarNum, lclILoffs);
}
-void CodeGen::genCodeForTreeFloat(GenTreePtr tree, RegSet::RegisterPreference* pref)
+void CodeGen::genCodeForTreeFloat(GenTree* tree, RegSet::RegisterPreference* pref)
{
genTreeOps oper;
unsigned kind;
@@ -681,7 +681,7 @@ void CodeGen::genFloatLeaf(GenTree* tree, RegSet::RegisterPreference* pref)
return;
}
-void CodeGen::genLoadFloat(GenTreePtr tree, regNumber reg)
+void CodeGen::genLoadFloat(GenTree* tree, regNumber reg)
{
if (tree->IsRegVar())
{
@@ -780,12 +780,12 @@ void CodeGen::genLoadFloat(GenTreePtr tree, regNumber reg)
}
}
-void CodeGen::genCodeForTreeFloat_DONE(GenTreePtr tree, regNumber reg)
+void CodeGen::genCodeForTreeFloat_DONE(GenTree* tree, regNumber reg)
{
return genCodeForTree_DONE(tree, reg);
}
-void CodeGen::genFloatAsgArith(GenTreePtr tree)
+void CodeGen::genFloatAsgArith(GenTree* tree)
{
// Set Flowgraph.cpp, line 13750
// arm VFP has tons of regs, 3-op instructions, and no addressing modes
@@ -793,8 +793,7 @@ void CodeGen::genFloatAsgArith(GenTreePtr tree)
noway_assert(!"Not Reachable for _TARGET_ARM_");
}
-regNumber CodeGen::genAssignArithFloat(
- genTreeOps oper, GenTreePtr dst, regNumber dstreg, GenTreePtr src, regNumber srcreg)
+regNumber CodeGen::genAssignArithFloat(genTreeOps oper, GenTree* dst, regNumber dstreg, GenTree* src, regNumber srcreg)
{
regNumber result;
@@ -841,12 +840,12 @@ regNumber CodeGen::genAssignArithFloat(
return result;
}
-void CodeGen::genFloatArith(GenTreePtr tree, RegSet::RegisterPreference* tgtPref)
+void CodeGen::genFloatArith(GenTree* tree, RegSet::RegisterPreference* tgtPref)
{
var_types type = tree->TypeGet();
genTreeOps oper = tree->OperGet();
- GenTreePtr op1 = tree->gtGetOp1();
- GenTreePtr op2 = tree->gtGetOp2IfPresent();
+ GenTree* op1 = tree->gtGetOp1();
+ GenTree* op2 = tree->gtGetOp2IfPresent();
regNumber tgtReg;
unsigned varNum;
@@ -929,7 +928,7 @@ void CodeGen::genFloatArith(GenTreePtr tree, RegSet::RegisterPreference* tgtPref
}
regNumber CodeGen::genArithmFloat(
- genTreeOps oper, GenTreePtr dst, regNumber dstreg, GenTreePtr src, regNumber srcreg, bool bReverse)
+ genTreeOps oper, GenTree* dst, regNumber dstreg, GenTree* src, regNumber srcreg, bool bReverse)
{
regNumber result = REG_NA;
@@ -967,7 +966,7 @@ regNumber CodeGen::genArithmFloat(
return result;
}
-void CodeGen::genKeepAddressableFloat(GenTreePtr tree, regMaskTP* regMaskIntPtr, regMaskTP* regMaskFltPtr)
+void CodeGen::genKeepAddressableFloat(GenTree* tree, regMaskTP* regMaskIntPtr, regMaskTP* regMaskFltPtr)
{
regMaskTP regMaskInt, regMaskFlt;
@@ -1019,7 +1018,7 @@ void CodeGen::genKeepAddressableFloat(GenTreePtr tree, regMaskTP* regMaskIntPtr,
}
}
-void CodeGen::genComputeAddressableFloat(GenTreePtr tree,
+void CodeGen::genComputeAddressableFloat(GenTree* tree,
regMaskTP addrRegInt,
regMaskTP addrRegFlt,
RegSet::KeepReg keptReg,
@@ -1057,7 +1056,7 @@ void CodeGen::genComputeAddressableFloat(GenTreePtr tree,
}
}
-void CodeGen::genDoneAddressableFloat(GenTreePtr tree,
+void CodeGen::genDoneAddressableFloat(GenTree* tree,
regMaskTP addrRegInt,
regMaskTP addrRegFlt,
RegSet::KeepReg keptReg)
@@ -1085,10 +1084,10 @@ void CodeGen::genDoneAddressableFloat(GenTreePtr tree,
}
}
-GenTreePtr CodeGen::genMakeAddressableFloat(GenTreePtr tree,
- regMaskTP* regMaskIntPtr,
- regMaskTP* regMaskFltPtr,
- bool bCollapseConstantDoubles)
+GenTree* CodeGen::genMakeAddressableFloat(GenTree* tree,
+ regMaskTP* regMaskIntPtr,
+ regMaskTP* regMaskFltPtr,
+ bool bCollapseConstantDoubles)
{
*regMaskIntPtr = *regMaskFltPtr = 0;
@@ -1114,8 +1113,8 @@ GenTreePtr CodeGen::genMakeAddressableFloat(GenTreePtr tree,
}
else
{
- GenTreePtr addr = tree;
- tree = tree->gtOp.gtOp1;
+ GenTree* addr = tree;
+ tree = tree->gtOp.gtOp1;
genCodeForTree(tree, 0);
regSet.rsMarkRegUsed(tree, addr);
@@ -1139,9 +1138,9 @@ GenTreePtr CodeGen::genMakeAddressableFloat(GenTreePtr tree,
void CodeGen::genCodeForTreeCastFloat(GenTree* tree, RegSet::RegisterPreference* pref)
{
- GenTreePtr op1 = tree->gtOp.gtOp1;
- var_types from = op1->gtType;
- var_types to = tree->gtType;
+ GenTree* op1 = tree->gtOp.gtOp1;
+ var_types from = op1->gtType;
+ var_types to = tree->gtType;
if (varTypeIsFloating(from))
genCodeForTreeCastFromFloat(tree, pref);
@@ -1151,10 +1150,10 @@ void CodeGen::genCodeForTreeCastFloat(GenTree* tree, RegSet::RegisterPreference*
void CodeGen::genCodeForTreeCastFromFloat(GenTree* tree, RegSet::RegisterPreference* pref)
{
- GenTreePtr op1 = tree->gtOp.gtOp1;
- var_types from = op1->gtType;
- var_types final = tree->gtType;
- var_types intermediate = tree->CastToType();
+ GenTree* op1 = tree->gtOp.gtOp1;
+ var_types from = op1->gtType;
+ var_types final = tree->gtType;
+ var_types intermediate = tree->CastToType();
regNumber srcReg;
regNumber dstReg;
@@ -1230,7 +1229,7 @@ void CodeGen::genCodeForTreeCastFromFloat(GenTree* tree, RegSet::RegisterPrefere
genCodeForTree_DONE(tree, dstReg);
}
-void CodeGen::genCodeForTreeCastToFloat(GenTreePtr tree, RegSet::RegisterPreference* pref)
+void CodeGen::genCodeForTreeCastToFloat(GenTree* tree, RegSet::RegisterPreference* pref)
{
regNumber srcReg;
regNumber dstReg;
@@ -1238,7 +1237,7 @@ void CodeGen::genCodeForTreeCastToFloat(GenTreePtr tree, RegSet::RegisterPrefere
regMaskTP addrReg;
- GenTreePtr op1 = tree->gtOp.gtOp1;
+ GenTree* op1 = tree->gtOp.gtOp1;
op1 = genCodeForCommaTree(op1); // Trim off any comma expressions.
var_types from = op1->gtType;
var_types to = tree->gtType;
@@ -1342,7 +1341,7 @@ void CodeGen::genCodeForTreeCastToFloat(GenTreePtr tree, RegSet::RegisterPrefere
}
}
-void CodeGen::genRoundFloatExpression(GenTreePtr op, var_types type)
+void CodeGen::genRoundFloatExpression(GenTree* op, var_types type)
{
// Do nothing with memory resident opcodes - these are the right precision
if (type == TYP_UNDEF)
@@ -1477,14 +1476,14 @@ instruction genFloatJumpInstr(genTreeOps cmp, bool isUnordered)
}
}
-void CodeGen::genCondJumpFloat(GenTreePtr cond, BasicBlock* jumpTrue, BasicBlock* jumpFalse)
+void CodeGen::genCondJumpFloat(GenTree* cond, BasicBlock* jumpTrue, BasicBlock* jumpFalse)
{
assert(jumpTrue && jumpFalse);
assert(!(cond->gtFlags & GTF_REVERSE_OPS)); // Done in genCondJump()
assert(varTypeIsFloating(cond->gtOp.gtOp1->gtType));
- GenTreePtr op1 = cond->gtOp.gtOp1;
- GenTreePtr op2 = cond->gtOp.gtOp2;
+ GenTree* op1 = cond->gtOp.gtOp1;
+ GenTree* op2 = cond->gtOp.gtOp2;
genTreeOps cmp = cond->OperGet();
bool isUnordered = cond->gtFlags & GTF_RELOP_NAN_UN ? true : false;
diff --git a/src/jit/regset.cpp b/src/jit/regset.cpp
index f671fa1702..4b3134d3bd 100644
--- a/src/jit/regset.cpp
+++ b/src/jit/regset.cpp
@@ -274,7 +274,7 @@ regMaskTP RegSet::rsRegMaskCanGrab()
// Load all the variable arguments in registers back to their registers.
for (regNumber reg = REG_ARG_FIRST; reg <= REG_ARG_LAST; reg = REG_NEXT(reg))
{
- GenTreePtr regHolds = rsUsedTree[reg];
+ GenTree* regHolds = rsUsedTree[reg];
if ((regHolds != NULL) && (regHolds->TypeGet() == TYP_STRUCT))
{
structArgMask |= genRegMask(reg);
@@ -532,7 +532,7 @@ bool RegTracker::rsTrackIsLclVarLng(regValKind rvKind)
/*****************************************************************************/
// inline
-void RegTracker::rsTrackRegClsVar(regNumber reg, GenTreePtr clsVar)
+void RegTracker::rsTrackRegClsVar(regNumber reg, GenTree* clsVar)
{
rsTrackRegTrash(reg);
}
@@ -840,7 +840,7 @@ RegSet::RegSet(Compiler* compiler, GCInfo& gcInfo) : m_rsCompiler(compiler), m_r
* be marked if the register is ever spilled.
*/
-void RegSet::rsMarkRegUsed(GenTreePtr tree, GenTreePtr addr)
+void RegSet::rsMarkRegUsed(GenTree* tree, GenTree* addr)
{
var_types type;
regNumber regNum;
@@ -909,7 +909,7 @@ void RegSet::rsMarkRegUsed(GenTreePtr tree, GenTreePtr addr)
rsUsedAddr[regNum] = addr;
}
-void RegSet::rsMarkArgRegUsedByPromotedFieldArg(GenTreePtr promotedStructArg, regNumber regNum, bool isGCRef)
+void RegSet::rsMarkArgRegUsedByPromotedFieldArg(GenTree* promotedStructArg, regNumber regNum, bool isGCRef)
{
regMaskTP regMask;
@@ -971,7 +971,7 @@ void RegSet::rsMarkArgRegUsedByPromotedFieldArg(GenTreePtr promotedStructArg, re
* Marks the register pair that holds the given operand value as 'used'.
*/
-void RegSet::rsMarkRegPairUsed(GenTreePtr tree)
+void RegSet::rsMarkRegPairUsed(GenTree* tree)
{
regNumber regLo;
regNumber regHi;
@@ -1062,7 +1062,7 @@ void RegSet::rsMarkRegPairUsed(GenTreePtr tree)
* to search rsMultiDesc[reg].
*/
-bool RegSet::rsIsTreeInReg(regNumber reg, GenTreePtr tree)
+bool RegSet::rsIsTreeInReg(regNumber reg, GenTree* tree)
{
/* First do the trivial check */
@@ -1097,7 +1097,7 @@ bool RegSet::rsIsTreeInReg(regNumber reg, GenTreePtr tree)
* Finds the SpillDsc corresponding to 'tree' assuming it was spilled from 'reg'.
*/
-RegSet::SpillDsc* RegSet::rsGetSpillInfo(GenTreePtr tree,
+RegSet::SpillDsc* RegSet::rsGetSpillInfo(GenTree* tree,
regNumber reg,
SpillDsc** pPrevDsc
#ifdef LEGACY_BACKEND
@@ -1182,7 +1182,7 @@ void RegSet::rsMarkRegFree(regMaskTP regMask)
printf("\n");
}
#endif
- GenTreePtr usedTree = rsUsedTree[regNum];
+ GenTree* usedTree = rsUsedTree[regNum];
assert(usedTree != NULL);
rsUsedTree[regNum] = NULL;
rsUsedAddr[regNum] = NULL;
@@ -1216,7 +1216,7 @@ void RegSet::rsMarkRegFree(regMaskTP regMask)
* it will still be marked as used, else it will be completely free.
*/
-void RegSet::rsMarkRegFree(regNumber reg, GenTreePtr tree)
+void RegSet::rsMarkRegFree(regNumber reg, GenTree* tree)
{
assert(rsIsTreeInReg(reg, tree));
regMaskTP regMask = genRegMask(reg);
@@ -1474,7 +1474,7 @@ void RegTracker::rsTrackRegCopy(regNumber reg1, regNumber reg2)
* One of the operands of this complex address mode has been spilled
*/
-void rsAddrSpillOper(GenTreePtr addr)
+void rsAddrSpillOper(GenTree* addr)
{
if (addr)
{
@@ -1491,7 +1491,7 @@ void rsAddrSpillOper(GenTreePtr addr)
}
}
-void rsAddrUnspillOper(GenTreePtr addr)
+void rsAddrUnspillOper(GenTree* addr)
{
if (addr)
{
@@ -1537,7 +1537,7 @@ void RegSet::rsSpillRegIfUsed(regNumber reg)
// caller of this method is expected to clear GTF_SPILL flag on call
// node after all of its registers marked for spilling are spilled.
//
-void RegSet::rsSpillTree(regNumber reg, GenTreePtr tree, unsigned regIdx /* =0 */)
+void RegSet::rsSpillTree(regNumber reg, GenTree* tree, unsigned regIdx /* =0 */)
{
assert(tree != nullptr);
@@ -1842,7 +1842,7 @@ void RegSet::rsSpillFPStack(GenTreeCall* call)
void RegSet::rsSpillReg(regNumber reg)
{
/* We must know the value in the register that we are spilling */
- GenTreePtr tree = rsUsedTree[reg];
+ GenTree* tree = rsUsedTree[reg];
#ifdef _TARGET_ARM_
if (tree == NULL && genIsValidFloatReg(reg) && !genIsValidDoubleReg(reg))
@@ -2203,7 +2203,7 @@ TempDsc* RegSet::rsGetSpillTempWord(regNumber reg, SpillDsc* dsc, SpillDsc* prev
* again as needed.
*/
-regNumber RegSet::rsUnspillOneReg(GenTreePtr tree, regNumber oldReg, KeepReg willKeepNewReg, regMaskTP needReg)
+regNumber RegSet::rsUnspillOneReg(GenTree* tree, regNumber oldReg, KeepReg willKeepNewReg, regMaskTP needReg)
{
/* Was oldReg multi-used when it was spilled? */
@@ -2403,7 +2403,7 @@ regNumber RegSet::rsUnspillOneReg(GenTreePtr tree, regNumber oldReg, KeepReg wil
// itself after ensuring there are no outstanding regs in GTF_SPILLED
// state.
//
-TempDsc* RegSet::rsUnspillInPlace(GenTreePtr tree, regNumber oldReg, unsigned regIdx /* =0 */)
+TempDsc* RegSet::rsUnspillInPlace(GenTree* tree, regNumber oldReg, unsigned regIdx /* =0 */)
{
assert(!isRegPairType(tree->gtType));
@@ -2465,7 +2465,7 @@ TempDsc* RegSet::rsUnspillInPlace(GenTreePtr tree, regNumber oldReg, unsigned re
* is set to KEEP_REG, we'll mark the new register as used.
*/
-void RegSet::rsUnspillReg(GenTreePtr tree, regMaskTP needReg, KeepReg keepReg)
+void RegSet::rsUnspillReg(GenTree* tree, regMaskTP needReg, KeepReg keepReg)
{
assert(!isRegPairType(tree->gtType)); // use rsUnspillRegPair()
regNumber oldReg = tree->gtRegNum;
@@ -2479,7 +2479,7 @@ void RegSet::rsUnspillReg(GenTreePtr tree, regMaskTP needReg, KeepReg keepReg)
* the reg was part of an address mode
*/
- GenTreePtr unspillAddr = spillDsc->spillAddr;
+ GenTree* unspillAddr = spillDsc->spillAddr;
/* Pick a new home for the value */
@@ -2510,7 +2510,7 @@ void RegSet::rsUnspillReg(GenTreePtr tree, regMaskTP needReg, KeepReg keepReg)
}
#endif // LEGACY_BACKEND
-void RegSet::rsMarkSpill(GenTreePtr tree, regNumber reg)
+void RegSet::rsMarkSpill(GenTree* tree, regNumber reg)
{
#ifdef LEGACY_BACKEND
tree->SetInReg(false);
@@ -2520,7 +2520,7 @@ void RegSet::rsMarkSpill(GenTreePtr tree, regNumber reg)
#ifdef LEGACY_BACKEND
-void RegSet::rsMarkUnspill(GenTreePtr tree, regNumber reg)
+void RegSet::rsMarkUnspill(GenTree* tree, regNumber reg)
{
#ifndef _TARGET_AMD64_
assert(tree->gtType != TYP_LONG);
@@ -2697,7 +2697,7 @@ AGAIN:
* any spillage, of course).
*/
-void RegSet::rsUnspillRegPair(GenTreePtr tree, regMaskTP needReg, KeepReg keepReg)
+void RegSet::rsUnspillRegPair(GenTree* tree, regMaskTP needReg, KeepReg keepReg)
{
assert(isRegPairType(tree->gtType));
diff --git a/src/jit/regset.h b/src/jit/regset.h
index d16cd95502..5542bc5f88 100644
--- a/src/jit/regset.h
+++ b/src/jit/regset.h
@@ -11,7 +11,6 @@
class LclVarDsc;
class TempDsc;
-typedef struct GenTree* GenTreePtr;
class Compiler;
class CodeGen;
class GCInfo;
@@ -76,7 +75,7 @@ private:
SpillDsc* spillNext; // next spilled value of same reg
union {
- GenTreePtr spillTree; // the value that was spilled
+ GenTree* spillTree; // the value that was spilled
#ifdef LEGACY_BACKEND
LclVarDsc* spillVarDsc; // variable if it's an enregistered variable
#endif // LEGACY_BACKEND
@@ -85,7 +84,7 @@ private:
TempDsc* spillTemp; // the temp holding the spilled value
#ifdef LEGACY_BACKEND
- GenTreePtr spillAddr; // owning complex address mode or nullptr
+ GenTree* spillAddr; // owning complex address mode or nullptr
union {
bool spillMoreMultis;
@@ -108,12 +107,12 @@ public:
// Track the status of the registers
//
#ifdef LEGACY_BACKEND
-public: // TODO-Cleanup: Should be private, but Compiler uses it
- GenTreePtr rsUsedTree[REG_COUNT]; // trees currently sitting in the registers
+public: // TODO-Cleanup: Should be private, but Compiler uses it
+ GenTree* rsUsedTree[REG_COUNT]; // trees currently sitting in the registers
private:
- GenTreePtr rsUsedAddr[REG_COUNT]; // addr for which rsUsedTree[reg] is a part of the addressing mode
- SpillDsc* rsMultiDesc[REG_COUNT]; // keeps track of 'multiple-use' registers.
-#endif // LEGACY_BACKEND
+ GenTree* rsUsedAddr[REG_COUNT]; // addr for which rsUsedTree[reg] is a part of the addressing mode
+ SpillDsc* rsMultiDesc[REG_COUNT]; // keeps track of 'multiple-use' registers.
+#endif // LEGACY_BACKEND
private:
bool rsNeededSpillReg; // true if this method needed to spill any registers
@@ -213,14 +212,14 @@ private:
void rsSetMaskVars(regMaskTP maskVars); // Setter for rsMaskVars or rsMaskRegVarFloat
void rsSetMaskLock(regMaskTP maskLock); // Setter for rsMaskLock or rsMaskLockedFloat
- void rsSetUsedTree(regNumber regNum, GenTreePtr tree); // Setter for rsUsedTree[]/genUsedRegsFloat[]
- void rsFreeUsedTree(regNumber regNum, GenTreePtr tree); // Free for rsUsedTree[]/genUsedRegsFloat[]
+ void rsSetUsedTree(regNumber regNum, GenTree* tree); // Setter for rsUsedTree[]/genUsedRegsFloat[]
+ void rsFreeUsedTree(regNumber regNum, GenTree* tree); // Free for rsUsedTree[]/genUsedRegsFloat[]
public:
regPairNo rsFindRegPairNo(regMaskTP regMask);
private:
- bool rsIsTreeInReg(regNumber reg, GenTreePtr tree);
+ bool rsIsTreeInReg(regNumber reg, GenTree* tree);
regMaskTP rsExcludeHint(regMaskTP regs, regMaskTP excludeHint);
regMaskTP rsNarrowHint(regMaskTP regs, regMaskTP narrowHint);
@@ -228,17 +227,17 @@ private:
regMaskTP rsRegMaskFree();
regMaskTP rsRegMaskCanGrab();
- void rsMarkRegUsed(GenTreePtr tree, GenTreePtr addr = 0);
+ void rsMarkRegUsed(GenTree* tree, GenTree* addr = 0);
// A special case of "rsMarkRegUsed": the register used is an argument register, used to hold part of
// the given argument node "promotedStructArg". (The name suggests that we're likely to use use this
// for register holding a promoted struct argument, but the implementation doesn't depend on that.) The
// "isGCRef" argument indicates whether the register contains a GC reference.
- void rsMarkArgRegUsedByPromotedFieldArg(GenTreePtr promotedStructArg, regNumber regNum, bool isGCRef);
+ void rsMarkArgRegUsedByPromotedFieldArg(GenTree* promotedStructArg, regNumber regNum, bool isGCRef);
- void rsMarkRegPairUsed(GenTreePtr tree);
+ void rsMarkRegPairUsed(GenTree* tree);
void rsMarkRegFree(regMaskTP regMask);
- void rsMarkRegFree(regNumber reg, GenTreePtr tree);
+ void rsMarkRegFree(regNumber reg, GenTree* tree);
void rsMultRegFree(regMaskTP regMask);
unsigned rsFreeNeededRegCount(regMaskTP needReg);
@@ -277,19 +276,19 @@ private:
best = _best;
}
};
- regNumber PickRegFloat(GenTreePtr tree,
+ regNumber PickRegFloat(GenTree* tree,
var_types type = TYP_DOUBLE,
RegisterPreference* pref = NULL,
bool bUsed = true);
regNumber PickRegFloat(var_types type = TYP_DOUBLE, RegisterPreference* pref = NULL, bool bUsed = true);
- regNumber PickRegFloatOtherThan(GenTreePtr tree, var_types type, regNumber reg);
+ regNumber PickRegFloatOtherThan(GenTree* tree, var_types type, regNumber reg);
regNumber PickRegFloatOtherThan(var_types type, regNumber reg);
regMaskTP RegFreeFloat();
- void SetUsedRegFloat(GenTreePtr tree, bool bValue);
- void SetLockedRegFloat(GenTreePtr tree, bool bValue);
- bool IsLockedRegFloat(GenTreePtr tree);
+ void SetUsedRegFloat(GenTree* tree, bool bValue);
+ void SetLockedRegFloat(GenTree* tree, bool bValue);
+ bool IsLockedRegFloat(GenTree* tree);
var_types rsRmvMultiReg(regNumber reg);
void rsRecMultiReg(regNumber reg, var_types type);
@@ -336,7 +335,7 @@ private:
void rsSpillBeg();
void rsSpillEnd();
- void rsSpillTree(regNumber reg, GenTreePtr tree, unsigned regIdx = 0);
+ void rsSpillTree(regNumber reg, GenTree* tree, unsigned regIdx = 0);
#if defined(_TARGET_X86_) && !FEATURE_STACK_FP_X87
void rsSpillFPStack(GenTreeCall* call);
@@ -348,7 +347,7 @@ private:
void rsSpillRegs(regMaskTP regMask);
#endif // LEGACY_BACKEND
- SpillDsc* rsGetSpillInfo(GenTreePtr tree,
+ SpillDsc* rsGetSpillInfo(GenTree* tree,
regNumber reg,
SpillDsc** pPrevDsc = nullptr
#ifdef LEGACY_BACKEND
@@ -371,28 +370,28 @@ private:
KEEP_REG
};
- regNumber rsUnspillOneReg(GenTreePtr tree, regNumber oldReg, KeepReg willKeepNewReg, regMaskTP needReg);
+ regNumber rsUnspillOneReg(GenTree* tree, regNumber oldReg, KeepReg willKeepNewReg, regMaskTP needReg);
#endif // LEGACY_BACKEND
- TempDsc* rsUnspillInPlace(GenTreePtr tree, regNumber oldReg, unsigned regIdx = 0);
+ TempDsc* rsUnspillInPlace(GenTree* tree, regNumber oldReg, unsigned regIdx = 0);
#ifdef LEGACY_BACKEND
- void rsUnspillReg(GenTreePtr tree, regMaskTP needReg, KeepReg keepReg);
+ void rsUnspillReg(GenTree* tree, regMaskTP needReg, KeepReg keepReg);
- void rsUnspillRegPair(GenTreePtr tree, regMaskTP needReg, KeepReg keepReg);
+ void rsUnspillRegPair(GenTree* tree, regMaskTP needReg, KeepReg keepReg);
#endif // LEGACY_BACKEND
- void rsMarkSpill(GenTreePtr tree, regNumber reg);
+ void rsMarkSpill(GenTree* tree, regNumber reg);
#ifdef LEGACY_BACKEND
- void rsMarkUnspill(GenTreePtr tree, regNumber reg);
+ void rsMarkUnspill(GenTree* tree, regNumber reg);
#endif // LEGACY_BACKEND
#if FEATURE_STACK_FP_X87
regMaskTP rsMaskUsedFloat;
regMaskTP rsMaskRegVarFloat;
regMaskTP rsMaskLockedFloat;
- GenTreePtr genUsedRegsFloat[REG_FPCOUNT];
+ GenTree* genUsedRegsFloat[REG_FPCOUNT];
LclVarDsc* genRegVarsFloat[REG_FPCOUNT];
#endif // FEATURE_STACK_FP_X87
};
@@ -448,7 +447,7 @@ public:
#ifdef LEGACY_BACKEND
void rsTrackRegLclVarLng(regNumber reg, unsigned var, bool low);
bool rsTrackIsLclVarLng(regValKind rvKind);
- void rsTrackRegClsVar(regNumber reg, GenTreePtr clsVar);
+ void rsTrackRegClsVar(regNumber reg, GenTree* clsVar);
#endif // LEGACY_BACKEND
void rsTrackRegCopy(regNumber reg1, regNumber reg2);
#ifdef LEGACY_BACKEND
diff --git a/src/jit/sharedfloat.cpp b/src/jit/sharedfloat.cpp
index fa7c733e1c..35d5519216 100644
--- a/src/jit/sharedfloat.cpp
+++ b/src/jit/sharedfloat.cpp
@@ -47,12 +47,12 @@ void RegSet::rsSetMaskLock(regMaskTP maskLock)
rsMaskLockedFloat = maskLock;
}
-void RegSet::rsSetUsedTree(regNumber regNum, GenTreePtr tree)
+void RegSet::rsSetUsedTree(regNumber regNum, GenTree* tree)
{
assert(genUsedRegsFloat[regNum] == 0);
genUsedRegsFloat[regNum] = tree;
}
-void RegSet::rsFreeUsedTree(regNumber regNum, GenTreePtr tree)
+void RegSet::rsFreeUsedTree(regNumber regNum, GenTree* tree)
{
assert(genUsedRegsFloat[regNum] == tree);
genUsedRegsFloat[regNum] = 0;
@@ -89,12 +89,12 @@ void RegSet::rsSetMaskLock(regMaskTP maskLock)
rsMaskLock = maskLock;
}
-void RegSet::rsSetUsedTree(regNumber regNum, GenTreePtr tree)
+void RegSet::rsSetUsedTree(regNumber regNum, GenTree* tree)
{
assert(rsUsedTree[regNum] == 0);
rsUsedTree[regNum] = tree;
}
-void RegSet::rsFreeUsedTree(regNumber regNum, GenTreePtr tree)
+void RegSet::rsFreeUsedTree(regNumber regNum, GenTree* tree)
{
assert(rsUsedTree[regNum] == tree);
rsUsedTree[regNum] = 0;
@@ -152,7 +152,7 @@ static const regNumber pickOrder[] = {REG_FPV0, REG_FPV1, REG_FPV2, REG_FPV3, RE
#endif
// picks a reg other than the one specified
-regNumber RegSet::PickRegFloatOtherThan(GenTreePtr tree, var_types type, regNumber reg)
+regNumber RegSet::PickRegFloatOtherThan(GenTree* tree, var_types type, regNumber reg)
{
return PickRegFloatOtherThan(type, reg);
}
@@ -163,7 +163,7 @@ regNumber RegSet::PickRegFloatOtherThan(var_types type, regNumber reg)
return PickRegFloat(type, &pref);
}
-regNumber RegSet::PickRegFloat(GenTreePtr tree, var_types type, RegisterPreference* pref, bool bUsed)
+regNumber RegSet::PickRegFloat(GenTree* tree, var_types type, RegisterPreference* pref, bool bUsed)
{
return PickRegFloat(type, pref, bUsed);
}
@@ -266,7 +266,7 @@ RET:
}
#ifdef LEGACY_BACKEND
-void RegSet::SetUsedRegFloat(GenTreePtr tree, bool bValue)
+void RegSet::SetUsedRegFloat(GenTree* tree, bool bValue)
{
/* The value must be sitting in a register */
assert(tree);
@@ -374,7 +374,7 @@ void RegSet::SetLockedRegFloat(GenTree* tree, bool bValue)
}
}
-bool RegSet::IsLockedRegFloat(GenTreePtr tree)
+bool RegSet::IsLockedRegFloat(GenTree* tree)
{
/* The value must be sitting in a register */
assert(tree);
@@ -385,7 +385,7 @@ bool RegSet::IsLockedRegFloat(GenTreePtr tree)
return (rsGetMaskLock() & regMask) == regMask;
}
-void CodeGen::UnspillFloat(GenTreePtr tree)
+void CodeGen::UnspillFloat(GenTree* tree)
{
#ifdef DEBUG
if (verbose)
@@ -446,9 +446,9 @@ void CodeGen::UnspillFloat(RegSet::SpillDsc* spillDsc)
#if FEATURE_STACK_FP_X87
-Compiler::fgWalkResult CodeGen::genRegVarDiesInSubTreeWorker(GenTreePtr* pTree, Compiler::fgWalkData* data)
+Compiler::fgWalkResult CodeGen::genRegVarDiesInSubTreeWorker(GenTree** pTree, Compiler::fgWalkData* data)
{
- GenTreePtr tree = *pTree;
+ GenTree* tree = *pTree;
genRegVarDiesInSubTreeData* pData = (genRegVarDiesInSubTreeData*)data->pCallbackData;
// if it's dying, just rename the register, else load it normally
@@ -461,7 +461,7 @@ Compiler::fgWalkResult CodeGen::genRegVarDiesInSubTreeWorker(GenTreePtr* pTree,
return Compiler::WALK_CONTINUE;
}
-bool CodeGen::genRegVarDiesInSubTree(GenTreePtr tree, regNumber reg)
+bool CodeGen::genRegVarDiesInSubTree(GenTree* tree, regNumber reg)
{
genRegVarDiesInSubTreeData Data;
Data.reg = reg;
@@ -482,7 +482,7 @@ bool CodeGen::genRegVarDiesInSubTree(GenTreePtr tree, regNumber reg)
* If type!=TYP_UNDEF, that is the desired presicion, else it is op->gtType
*/
-void CodeGen::genRoundFpExpression(GenTreePtr op, var_types type)
+void CodeGen::genRoundFpExpression(GenTree* op, var_types type)
{
#if FEATURE_STACK_FP_X87
return genRoundFpExpressionStackFP(op, type);
@@ -491,7 +491,7 @@ void CodeGen::genRoundFpExpression(GenTreePtr op, var_types type)
#endif
}
-void CodeGen::genCodeForTreeFloat(GenTreePtr tree, regMaskTP needReg, regMaskTP bestReg)
+void CodeGen::genCodeForTreeFloat(GenTree* tree, regMaskTP needReg, regMaskTP bestReg)
{
RegSet::RegisterPreference pref(needReg, bestReg);
genCodeForTreeFloat(tree, &pref);
diff --git a/src/jit/simd.cpp b/src/jit/simd.cpp
index e67662361a..62bed5fb8c 100644
--- a/src/jit/simd.cpp
+++ b/src/jit/simd.cpp
@@ -893,8 +893,8 @@ const SIMDIntrinsicInfo* Compiler::getSIMDIntrinsicInfo(CORINFO_CLASS_HANDLE* in
// We don't check anything in that case.
if (!isThisPtr || !isNewObj)
{
- GenTreePtr arg = impStackTop(stackIndex).val;
- var_types argType = arg->TypeGet();
+ GenTree* arg = impStackTop(stackIndex).val;
+ var_types argType = arg->TypeGet();
var_types expectedArgType;
if (argIndex < fixedArgCnt)
@@ -1000,11 +1000,11 @@ const SIMDIntrinsicInfo* Compiler::getSIMDIntrinsicInfo(CORINFO_CLASS_HANDLE* in
// If the popped value is a struct, and the expected type is a simd type, it will be set
// to that type, otherwise it will assert if the type being popped is not the expected type.
-GenTreePtr Compiler::impSIMDPopStack(var_types type, bool expectAddr)
+GenTree* Compiler::impSIMDPopStack(var_types type, bool expectAddr)
{
StackEntry se = impPopStack();
typeInfo ti = se.seTypeInfo;
- GenTreePtr tree = se.val;
+ GenTree* tree = se.val;
// If expectAddr is true implies what we have on stack is address and we need
// SIMD type struct that it points to.
@@ -1536,12 +1536,12 @@ SIMDIntrinsicID Compiler::impSIMDRelOp(SIMDIntrinsicID relOpIntrinsicId,
// size - vector size in bytes
// op1 - operand of Abs intrinsic
//
-GenTreePtr Compiler::impSIMDAbs(CORINFO_CLASS_HANDLE typeHnd, var_types baseType, unsigned size, GenTree* op1)
+GenTree* Compiler::impSIMDAbs(CORINFO_CLASS_HANDLE typeHnd, var_types baseType, unsigned size, GenTree* op1)
{
assert(varTypeIsSIMD(op1));
- var_types simdType = op1->TypeGet();
- GenTreePtr retVal = nullptr;
+ var_types simdType = op1->TypeGet();
+ GenTree* retVal = nullptr;
#ifdef _TARGET_XARCH_
// When there is no direct support, Abs(v) could be computed
@@ -1573,8 +1573,8 @@ GenTreePtr Compiler::impSIMDAbs(CORINFO_CLASS_HANDLE typeHnd, var_types baseType
// This works only on integer vectors not on float/double vectors.
assert(varTypeIsIntegral(baseType));
- GenTreePtr op1Assign;
- unsigned op1LclNum;
+ GenTree* op1Assign;
+ unsigned op1LclNum;
if (op1->OperGet() == GT_LCL_VAR)
{
@@ -1590,31 +1590,31 @@ GenTreePtr Compiler::impSIMDAbs(CORINFO_CLASS_HANDLE typeHnd, var_types baseType
}
// Assign Vector.Zero to a temp since it is needed more than once
- GenTreePtr vecZero = gtNewSIMDVectorZero(simdType, baseType, size);
- unsigned vecZeroLclNum = lvaGrabTemp(true DEBUGARG("SIMD Abs VecZero"));
+ GenTree* vecZero = gtNewSIMDVectorZero(simdType, baseType, size);
+ unsigned vecZeroLclNum = lvaGrabTemp(true DEBUGARG("SIMD Abs VecZero"));
lvaSetStruct(vecZeroLclNum, typeHnd, false);
- GenTreePtr vecZeroAssign = gtNewTempAssign(vecZeroLclNum, vecZero);
+ GenTree* vecZeroAssign = gtNewTempAssign(vecZeroLclNum, vecZero);
// Construct BitVector = v < vector.Zero
- GenTreePtr bitVecOp1 = op1;
- GenTreePtr bitVecOp2 = gtNewLclvNode(vecZeroLclNum, vecZero->TypeGet());
+ GenTree* bitVecOp1 = op1;
+ GenTree* bitVecOp2 = gtNewLclvNode(vecZeroLclNum, vecZero->TypeGet());
var_types relOpBaseType = baseType;
SIMDIntrinsicID relOpIntrinsic =
impSIMDRelOp(SIMDIntrinsicLessThan, typeHnd, size, &relOpBaseType, &bitVecOp1, &bitVecOp2);
- GenTreePtr bitVec = gtNewSIMDNode(simdType, bitVecOp1, bitVecOp2, relOpIntrinsic, relOpBaseType, size);
- unsigned bitVecLclNum = lvaGrabTemp(true DEBUGARG("SIMD Abs bitVec"));
+ GenTree* bitVec = gtNewSIMDNode(simdType, bitVecOp1, bitVecOp2, relOpIntrinsic, relOpBaseType, size);
+ unsigned bitVecLclNum = lvaGrabTemp(true DEBUGARG("SIMD Abs bitVec"));
lvaSetStruct(bitVecLclNum, typeHnd, false);
- GenTreePtr bitVecAssign = gtNewTempAssign(bitVecLclNum, bitVec);
- bitVec = gtNewLclvNode(bitVecLclNum, bitVec->TypeGet());
+ GenTree* bitVecAssign = gtNewTempAssign(bitVecLclNum, bitVec);
+ bitVec = gtNewLclvNode(bitVecLclNum, bitVec->TypeGet());
// Construct condSelectOp1 = vector.Zero - v
- GenTreePtr subOp1 = gtNewLclvNode(vecZeroLclNum, vecZero->TypeGet());
- GenTreePtr subOp2 = gtNewLclvNode(op1LclNum, op1->TypeGet());
- GenTreePtr negVec = gtNewSIMDNode(simdType, subOp1, subOp2, SIMDIntrinsicSub, baseType, size);
+ GenTree* subOp1 = gtNewLclvNode(vecZeroLclNum, vecZero->TypeGet());
+ GenTree* subOp2 = gtNewLclvNode(op1LclNum, op1->TypeGet());
+ GenTree* negVec = gtNewSIMDNode(simdType, subOp1, subOp2, SIMDIntrinsicSub, baseType, size);
// Construct ConditionalSelect(bitVec, vector.Zero - v, v)
- GenTreePtr vec = gtNewLclvNode(op1LclNum, op1->TypeGet());
- retVal = impSIMDSelect(typeHnd, baseType, size, bitVec, negVec, vec);
+ GenTree* vec = gtNewLclvNode(op1LclNum, op1->TypeGet());
+ retVal = impSIMDSelect(typeHnd, baseType, size, bitVec, negVec, vec);
// Prepend bitVec assignment to retVal.
// retVal = (tmp2 = v < tmp1), CondSelect(tmp2, tmp1 - v, v)
@@ -1698,7 +1698,7 @@ GenTreePtr Compiler::impSIMDAbs(CORINFO_CLASS_HANDLE typeHnd, var_types baseType
// Return Value:
// Returns GT_SIMD tree that computes Select(vc, va, vb)
//
-GenTreePtr Compiler::impSIMDSelect(
+GenTree* Compiler::impSIMDSelect(
CORINFO_CLASS_HANDLE typeHnd, var_types baseType, unsigned size, GenTree* op1, GenTree* op2, GenTree* op3)
{
assert(varTypeIsSIMD(op1));
@@ -1758,12 +1758,12 @@ GenTreePtr Compiler::impSIMDSelect(
// Return Value:
// Returns GT_SIMD tree that computes Max(va, vb)
//
-GenTreePtr Compiler::impSIMDMinMax(SIMDIntrinsicID intrinsicId,
- CORINFO_CLASS_HANDLE typeHnd,
- var_types baseType,
- unsigned size,
- GenTree* op1,
- GenTree* op2)
+GenTree* Compiler::impSIMDMinMax(SIMDIntrinsicID intrinsicId,
+ CORINFO_CLASS_HANDLE typeHnd,
+ var_types baseType,
+ unsigned size,
+ GenTree* op1,
+ GenTree* op2)
{
assert(intrinsicId == SIMDIntrinsicMin || intrinsicId == SIMDIntrinsicMax);
assert(varTypeIsSIMD(op1));
@@ -1946,7 +1946,7 @@ GenTreePtr Compiler::impSIMDMinMax(SIMDIntrinsicID intrinsicId,
// Notes:
// This method handles the differences between the CEE_NEWOBJ and constructor cases.
//
-GenTreePtr Compiler::getOp1ForConstructor(OPCODE opcode, GenTreePtr newobjThis, CORINFO_CLASS_HANDLE clsHnd)
+GenTree* Compiler::getOp1ForConstructor(OPCODE opcode, GenTree* newobjThis, CORINFO_CLASS_HANDLE clsHnd)
{
GenTree* op1;
if (opcode == CEE_NEWOBJ)
@@ -1970,9 +1970,9 @@ GenTreePtr Compiler::getOp1ForConstructor(OPCODE opcode, GenTreePtr newobjThis,
// Set the flag that indicates that the lclVar referenced by this tree
// is used in a SIMD intrinsic.
// Arguments:
-// tree - GenTreePtr
+// tree - GenTree*
-void Compiler::setLclRelatedToSIMDIntrinsic(GenTreePtr tree)
+void Compiler::setLclRelatedToSIMDIntrinsic(GenTree* tree)
{
assert(tree->OperIsLocal());
unsigned lclNum = tree->AsLclVarCommon()->GetLclNum();
@@ -1984,18 +1984,18 @@ void Compiler::setLclRelatedToSIMDIntrinsic(GenTreePtr tree)
// Check if two field nodes reference at the same memory location.
// Notice that this check is just based on pattern matching.
// Arguments:
-// op1 - GenTreePtr.
-// op2 - GenTreePtr.
+// op1 - GenTree*.
+// op2 - GenTree*.
// Return Value:
// If op1's parents node and op2's parents node are at the same location, return true. Otherwise, return false
-bool areFieldsParentsLocatedSame(GenTreePtr op1, GenTreePtr op2)
+bool areFieldsParentsLocatedSame(GenTree* op1, GenTree* op2)
{
assert(op1->OperGet() == GT_FIELD);
assert(op2->OperGet() == GT_FIELD);
- GenTreePtr op1ObjRef = op1->gtField.gtFldObj;
- GenTreePtr op2ObjRef = op2->gtField.gtFldObj;
+ GenTree* op1ObjRef = op1->gtField.gtFldObj;
+ GenTree* op2ObjRef = op2->gtField.gtFldObj;
while (op1ObjRef != nullptr && op2ObjRef != nullptr)
{
@@ -2033,13 +2033,13 @@ bool areFieldsParentsLocatedSame(GenTreePtr op1, GenTreePtr op2)
//----------------------------------------------------------------------
// Check whether two field are contiguous
// Arguments:
-// first - GenTreePtr. The Type of the node should be TYP_FLOAT
-// second - GenTreePtr. The Type of the node should be TYP_FLOAT
+// first - GenTree*. The Type of the node should be TYP_FLOAT
+// second - GenTree*. The Type of the node should be TYP_FLOAT
// Return Value:
// if the first field is located before second field, and they are located contiguously,
// then return true. Otherwise, return false.
-bool Compiler::areFieldsContiguous(GenTreePtr first, GenTreePtr second)
+bool Compiler::areFieldsContiguous(GenTree* first, GenTree* second)
{
assert(first->OperGet() == GT_FIELD);
assert(second->OperGet() == GT_FIELD);
@@ -2063,8 +2063,8 @@ bool Compiler::areFieldsContiguous(GenTreePtr first, GenTreePtr second)
//-------------------------------------------------------------------------------
// Check whether two array element nodes are located contiguously or not.
// Arguments:
-// op1 - GenTreePtr.
-// op2 - GenTreePtr.
+// op1 - GenTree*.
+// op2 - GenTree*.
// Return Value:
// if the array element op1 is located before array element op2, and they are contiguous,
// then return true. Otherwise, return false.
@@ -2072,20 +2072,20 @@ bool Compiler::areFieldsContiguous(GenTreePtr first, GenTreePtr second)
// Right this can only check array element with const number as index. In future,
// we should consider to allow this function to check the index using expression.
-bool Compiler::areArrayElementsContiguous(GenTreePtr op1, GenTreePtr op2)
+bool Compiler::areArrayElementsContiguous(GenTree* op1, GenTree* op2)
{
noway_assert(op1->gtOper == GT_INDEX);
noway_assert(op2->gtOper == GT_INDEX);
GenTreeIndex* op1Index = op1->AsIndex();
GenTreeIndex* op2Index = op2->AsIndex();
- GenTreePtr op1ArrayRef = op1Index->Arr();
- GenTreePtr op2ArrayRef = op2Index->Arr();
+ GenTree* op1ArrayRef = op1Index->Arr();
+ GenTree* op2ArrayRef = op2Index->Arr();
assert(op1ArrayRef->TypeGet() == TYP_REF);
assert(op2ArrayRef->TypeGet() == TYP_REF);
- GenTreePtr op1IndexNode = op1Index->Index();
- GenTreePtr op2IndexNode = op2Index->Index();
+ GenTree* op1IndexNode = op1Index->Index();
+ GenTree* op2IndexNode = op2Index->Index();
if ((op1IndexNode->OperGet() == GT_CNS_INT && op2IndexNode->OperGet() == GT_CNS_INT) &&
op1IndexNode->gtIntCon.gtIconVal + 1 == op2IndexNode->gtIntCon.gtIconVal)
{
@@ -2106,8 +2106,8 @@ bool Compiler::areArrayElementsContiguous(GenTreePtr op1, GenTreePtr op2)
//-------------------------------------------------------------------------------
// Check whether two argument nodes are contiguous or not.
// Arguments:
-// op1 - GenTreePtr.
-// op2 - GenTreePtr.
+// op1 - GenTree*.
+// op2 - GenTree*.
// Return Value:
// if the argument node op1 is located before argument node op2, and they are located contiguously,
// then return true. Otherwise, return false.
@@ -2115,7 +2115,7 @@ bool Compiler::areArrayElementsContiguous(GenTreePtr op1, GenTreePtr op2)
// Right now this can only check field and array. In future we should add more cases.
//
-bool Compiler::areArgumentsContiguous(GenTreePtr op1, GenTreePtr op2)
+bool Compiler::areArgumentsContiguous(GenTree* op1, GenTree* op2)
{
if (op1->OperGet() == GT_INDEX && op2->OperGet() == GT_INDEX)
{
@@ -2133,7 +2133,7 @@ bool Compiler::areArgumentsContiguous(GenTreePtr op1, GenTreePtr op2)
// from first argument's address.
//
// Arguments:
-// tree - GenTreePtr. This the tree node which is used to get the address for indir.
+// tree - GenTree*. This the tree node which is used to get the address for indir.
// simdsize - unsigned. This the simd vector size.
// arrayElementsCount - unsigned. This is used for generating the boundary check for array.
//
@@ -2145,20 +2145,20 @@ bool Compiler::areArgumentsContiguous(GenTreePtr op1, GenTreePtr op2)
// are located contiguously or not. In future we should support more cases.
// 2. Though it happens to just work fine front-end phases are not aware of GT_LEA node. Therefore, convert these
// to use GT_ADDR.
-GenTreePtr Compiler::createAddressNodeForSIMDInit(GenTreePtr tree, unsigned simdSize)
+GenTree* Compiler::createAddressNodeForSIMDInit(GenTree* tree, unsigned simdSize)
{
assert(tree->OperGet() == GT_FIELD || tree->OperGet() == GT_INDEX);
- GenTreePtr byrefNode = nullptr;
- GenTreePtr startIndex = nullptr;
- unsigned offset = 0;
- var_types baseType = tree->gtType;
+ GenTree* byrefNode = nullptr;
+ GenTree* startIndex = nullptr;
+ unsigned offset = 0;
+ var_types baseType = tree->gtType;
if (tree->OperGet() == GT_FIELD)
{
- GenTreePtr objRef = tree->gtField.gtFldObj;
+ GenTree* objRef = tree->gtField.gtFldObj;
if (objRef != nullptr && objRef->gtOper == GT_ADDR)
{
- GenTreePtr obj = objRef->gtOp.gtOp1;
+ GenTree* obj = objRef->gtOp.gtOp1;
// If the field is directly from a struct, then in this case,
// we should set this struct's lvUsedInSIMDIntrinsic as true,
@@ -2184,13 +2184,13 @@ GenTreePtr Compiler::createAddressNodeForSIMDInit(GenTreePtr tree, unsigned simd
else if (tree->OperGet() == GT_INDEX)
{
- GenTreePtr index = tree->AsIndex()->Index();
+ GenTree* index = tree->AsIndex()->Index();
assert(index->OperGet() == GT_CNS_INT);
- GenTreePtr checkIndexExpr = nullptr;
- unsigned indexVal = (unsigned)(index->gtIntCon.gtIconVal);
- offset = indexVal * genTypeSize(tree->TypeGet());
- GenTreePtr arrayRef = tree->AsIndex()->Arr();
+ GenTree* checkIndexExpr = nullptr;
+ unsigned indexVal = (unsigned)(index->gtIntCon.gtIconVal);
+ offset = indexVal * genTypeSize(tree->TypeGet());
+ GenTree* arrayRef = tree->AsIndex()->Arr();
// Generate the boundary check exception.
// The length for boundary check should be the maximum index number which should be
@@ -2209,7 +2209,7 @@ GenTreePtr Compiler::createAddressNodeForSIMDInit(GenTreePtr tree, unsigned simd
{
unreached();
}
- GenTreePtr address =
+ GenTree* address =
new (this, GT_LEA) GenTreeAddrMode(TYP_BYREF, byrefNode, startIndex, genTypeSize(tree->TypeGet()), offset);
return address;
}
@@ -2220,23 +2220,23 @@ GenTreePtr Compiler::createAddressNodeForSIMDInit(GenTreePtr tree, unsigned simd
// lclvar so that it won't be promoted.
//
// Arguments:
-// stmt - GenTreePtr. Input statement node.
+// stmt - GenTree*. Input statement node.
-void Compiler::impMarkContiguousSIMDFieldAssignments(GenTreePtr stmt)
+void Compiler::impMarkContiguousSIMDFieldAssignments(GenTree* stmt)
{
if (!featureSIMD || opts.MinOpts())
{
return;
}
- GenTreePtr expr = stmt->gtStmt.gtStmtExpr;
+ GenTree* expr = stmt->gtStmt.gtStmtExpr;
if (expr->OperGet() == GT_ASG && expr->TypeGet() == TYP_FLOAT)
{
- GenTreePtr curDst = expr->gtOp.gtOp1;
- GenTreePtr curSrc = expr->gtOp.gtOp2;
- unsigned index = 0;
- var_types baseType = TYP_UNKNOWN;
- unsigned simdSize = 0;
- GenTreePtr srcSimdStructNode = getSIMDStructFromField(curSrc, &baseType, &index, &simdSize, true);
+ GenTree* curDst = expr->gtOp.gtOp1;
+ GenTree* curSrc = expr->gtOp.gtOp2;
+ unsigned index = 0;
+ var_types baseType = TYP_UNKNOWN;
+ unsigned simdSize = 0;
+ GenTree* srcSimdStructNode = getSIMDStructFromField(curSrc, &baseType, &index, &simdSize, true);
if (srcSimdStructNode == nullptr || baseType != TYP_FLOAT)
{
fgPreviousCandidateSIMDFieldAsgStmt = nullptr;
@@ -2248,9 +2248,9 @@ void Compiler::impMarkContiguousSIMDFieldAssignments(GenTreePtr stmt)
else if (fgPreviousCandidateSIMDFieldAsgStmt != nullptr)
{
assert(index > 0);
- GenTreePtr prevAsgExpr = fgPreviousCandidateSIMDFieldAsgStmt->gtStmt.gtStmtExpr;
- GenTreePtr prevDst = prevAsgExpr->gtOp.gtOp1;
- GenTreePtr prevSrc = prevAsgExpr->gtOp.gtOp2;
+ GenTree* prevAsgExpr = fgPreviousCandidateSIMDFieldAsgStmt->gtStmt.gtStmtExpr;
+ GenTree* prevDst = prevAsgExpr->gtOp.gtOp1;
+ GenTree* prevSrc = prevAsgExpr->gtOp.gtOp2;
if (!areArgumentsContiguous(prevDst, curDst) || !areArgumentsContiguous(prevSrc, curSrc))
{
fgPreviousCandidateSIMDFieldAsgStmt = nullptr;
@@ -2267,10 +2267,10 @@ void Compiler::impMarkContiguousSIMDFieldAssignments(GenTreePtr stmt)
if (curDst->OperGet() == GT_FIELD)
{
- GenTreePtr objRef = curDst->gtField.gtFldObj;
+ GenTree* objRef = curDst->gtField.gtFldObj;
if (objRef != nullptr && objRef->gtOper == GT_ADDR)
{
- GenTreePtr obj = objRef->gtOp.gtOp1;
+ GenTree* obj = objRef->gtOp.gtOp1;
if (varTypeIsStruct(obj) && obj->OperIsLocal())
{
setLclRelatedToSIMDIntrinsic(obj);
@@ -2307,12 +2307,12 @@ void Compiler::impMarkContiguousSIMDFieldAssignments(GenTreePtr stmt)
// implemented as an intrinsic in the JIT, then return the tree that implements
// it.
//
-GenTreePtr Compiler::impSIMDIntrinsic(OPCODE opcode,
- GenTreePtr newobjThis,
- CORINFO_CLASS_HANDLE clsHnd,
- CORINFO_METHOD_HANDLE methodHnd,
- CORINFO_SIG_INFO* sig,
- int memberRef)
+GenTree* Compiler::impSIMDIntrinsic(OPCODE opcode,
+ GenTree* newobjThis,
+ CORINFO_CLASS_HANDLE clsHnd,
+ CORINFO_METHOD_HANDLE methodHnd,
+ CORINFO_SIG_INFO* sig,
+ int memberRef)
{
assert(featureSIMD);
@@ -2425,18 +2425,18 @@ GenTreePtr Compiler::impSIMDIntrinsic(OPCODE opcode,
// We must maintain left-to-right order of the args, but we will pop
// them off in reverse order (the Nth arg was pushed onto the stack last).
- GenTree* list = nullptr;
- GenTreePtr firstArg = nullptr;
- GenTreePtr prevArg = nullptr;
- int offset = 0;
- bool areArgsContiguous = true;
+ GenTree* list = nullptr;
+ GenTree* firstArg = nullptr;
+ GenTree* prevArg = nullptr;
+ int offset = 0;
+ bool areArgsContiguous = true;
for (unsigned i = 0; i < initCount; i++)
{
GenTree* nextArg = impSIMDPopStack(baseType);
if (areArgsContiguous)
{
- GenTreePtr curArg = nextArg;
- firstArg = curArg;
+ GenTree* curArg = nextArg;
+ firstArg = curArg;
if (prevArg != nullptr)
{
@@ -2455,8 +2455,8 @@ GenTreePtr Compiler::impSIMDIntrinsic(OPCODE opcode,
// we intialize the vector from first argument address, only when
// the baseType is TYP_FLOAT and the arguments are located contiguously in memory
initFromFirstArgIndir = true;
- GenTreePtr op2Address = createAddressNodeForSIMDInit(firstArg, size);
- var_types simdType = getSIMDTypeForSize(size);
+ GenTree* op2Address = createAddressNodeForSIMDInit(firstArg, size);
+ var_types simdType = getSIMDTypeForSize(size);
op2 = gtNewOperNode(GT_IND, simdType, op2Address);
}
else
diff --git a/src/jit/simdcodegenxarch.cpp b/src/jit/simdcodegenxarch.cpp
index 75c05a4069..64a9bd5cf5 100644
--- a/src/jit/simdcodegenxarch.cpp
+++ b/src/jit/simdcodegenxarch.cpp
@@ -2855,8 +2855,8 @@ void CodeGen::genLoadIndTypeSIMD12(GenTree* treeNode)
{
assert(treeNode->OperGet() == GT_IND);
- regNumber targetReg = treeNode->gtRegNum;
- GenTreePtr op1 = treeNode->gtOp.gtOp1;
+ regNumber targetReg = treeNode->gtRegNum;
+ GenTree* op1 = treeNode->gtOp.gtOp1;
assert(!op1->isContained());
regNumber operandReg = genConsumeReg(op1);
@@ -2900,7 +2900,7 @@ void CodeGen::genStoreLclTypeSIMD12(GenTree* treeNode)
offs = treeNode->gtLclFld.gtLclOffs;
}
- GenTreePtr op1 = treeNode->gtOp.gtOp1;
+ GenTree* op1 = treeNode->gtOp.gtOp1;
assert(!op1->isContained());
regNumber operandReg = genConsumeReg(op1);
@@ -3004,7 +3004,7 @@ void CodeGen::genPutArgStkSIMD12(GenTree* treeNode)
{
assert(treeNode->OperGet() == GT_PUTARG_STK);
- GenTreePtr op1 = treeNode->gtOp.gtOp1;
+ GenTree* op1 = treeNode->gtOp.gtOp1;
assert(!op1->isContained());
regNumber operandReg = genConsumeReg(op1);
diff --git a/src/jit/ssabuilder.cpp b/src/jit/ssabuilder.cpp
index 67db1df66e..367a8f906d 100644
--- a/src/jit/ssabuilder.cpp
+++ b/src/jit/ssabuilder.cpp
@@ -99,7 +99,7 @@ void Compiler::fgResetForSsa()
}
if (blk->bbTreeList != nullptr)
{
- GenTreePtr last = blk->bbTreeList->gtPrev;
+ GenTree* last = blk->bbTreeList->gtPrev;
blk->bbTreeList = blk->FirstNonPhiDef();
if (blk->bbTreeList != nullptr)
{
@@ -113,7 +113,7 @@ void Compiler::fgResetForSsa()
blk->bbPostOrderNum = 0;
for (GenTreeStmt* stmt = blk->firstStmt(); stmt != nullptr; stmt = stmt->getNextStmt())
{
- for (GenTreePtr tree = stmt->gtStmt.gtStmtList; tree != nullptr; tree = tree->gtNext)
+ for (GenTree* tree = stmt->gtStmt.gtStmtList; tree != nullptr; tree = tree->gtNext)
{
if (tree->IsLocal())
{
@@ -683,7 +683,7 @@ BlkToBlkVectorMap* SsaBuilder::ComputeIteratedDominanceFrontier(BasicBlock** pos
static GenTree* GetPhiNode(BasicBlock* block, unsigned lclNum)
{
// Walk the statements for phi nodes.
- for (GenTreePtr stmt = block->bbTreeList; stmt; stmt = stmt->gtNext)
+ for (GenTree* stmt = block->bbTreeList; stmt; stmt = stmt->gtNext)
{
// A prefix of the statements of the block are phi definition nodes. If we complete processing
// that prefix, exit.
@@ -692,9 +692,9 @@ static GenTree* GetPhiNode(BasicBlock* block, unsigned lclNum)
break;
}
- GenTreePtr tree = stmt->gtStmt.gtStmtExpr;
+ GenTree* tree = stmt->gtStmt.gtStmtExpr;
- GenTreePtr phiLhs = tree->gtOp.gtOp1;
+ GenTree* phiLhs = tree->gtOp.gtOp1;
assert(phiLhs->OperGet() == GT_LCL_VAR);
if (phiLhs->gtLclVarCommon.gtLclNum == lclNum)
{
@@ -773,19 +773,19 @@ void SsaBuilder::InsertPhiFunctions(BasicBlock** postOrder, int count)
// j. So insert a phi node at l.
JITDUMP("Inserting phi definition for V%02u at start of BB%02u.\n", lclNum, bbInDomFront->bbNum);
- GenTreePtr phiLhs = m_pCompiler->gtNewLclvNode(lclNum, m_pCompiler->lvaTable[lclNum].TypeGet());
+ GenTree* phiLhs = m_pCompiler->gtNewLclvNode(lclNum, m_pCompiler->lvaTable[lclNum].TypeGet());
// Create 'phiRhs' as a GT_PHI node for 'lclNum', it will eventually hold a GT_LIST of GT_PHI_ARG
// nodes. However we have to construct this list so for now the gtOp1 of 'phiRhs' is a nullptr.
// It will get replaced with a GT_LIST of GT_PHI_ARG nodes in
// SsaBuilder::AssignPhiNodeRhsVariables() and in SsaBuilder::AddDefToHandlerPhis()
- GenTreePtr phiRhs =
+ GenTree* phiRhs =
m_pCompiler->gtNewOperNode(GT_PHI, m_pCompiler->lvaTable[lclNum].TypeGet(), nullptr);
- GenTreePtr phiAsg = m_pCompiler->gtNewAssignNode(phiLhs, phiRhs);
+ GenTree* phiAsg = m_pCompiler->gtNewAssignNode(phiLhs, phiRhs);
- GenTreePtr stmt = m_pCompiler->fgInsertStmtAtBeg(bbInDomFront, phiAsg);
+ GenTree* stmt = m_pCompiler->fgInsertStmtAtBeg(bbInDomFront, phiAsg);
m_pCompiler->gtSetStmtInfo(stmt);
m_pCompiler->fgSetStmtSeq(stmt);
}
@@ -914,7 +914,7 @@ void SsaBuilder::AddDefPoint(GenTree* tree, BasicBlock* blk)
#endif
}
-bool SsaBuilder::IsIndirectAssign(GenTreePtr tree, Compiler::IndirectAssignmentAnnotation** ppIndirAssign)
+bool SsaBuilder::IsIndirectAssign(GenTree* tree, Compiler::IndirectAssignmentAnnotation** ppIndirAssign)
{
return tree->OperGet() == GT_ASG && m_pCompiler->m_indirAssignMap != nullptr &&
m_pCompiler->GetIndirAssignMap()->Lookup(tree, ppIndirAssign);
@@ -938,8 +938,8 @@ void SsaBuilder::TreeRenameVariables(GenTree* tree, BasicBlock* block, SsaRename
// can skip these during (at least) value numbering.
if (tree->OperIsAssignment())
{
- GenTreePtr lhs = tree->gtOp.gtOp1->gtEffectiveVal(/*commaOnly*/ true);
- GenTreePtr trueLhs = lhs->gtEffectiveVal(/*commaOnly*/ true);
+ GenTree* lhs = tree->gtOp.gtOp1->gtEffectiveVal(/*commaOnly*/ true);
+ GenTree* trueLhs = lhs->gtEffectiveVal(/*commaOnly*/ true);
if (trueLhs->OperIsIndir())
{
trueLhs->gtFlags |= GTF_IND_ASG_LHS;
@@ -1134,7 +1134,7 @@ void SsaBuilder::AddDefToHandlerPhis(BasicBlock* block, unsigned lclNum, unsigne
bool phiFound = false;
#endif
// A prefix of blocks statements will be SSA definitions. Search those for "lclNum".
- for (GenTreePtr stmt = handler->bbTreeList; stmt; stmt = stmt->gtNext)
+ for (GenTree* stmt = handler->bbTreeList; stmt; stmt = stmt->gtNext)
{
// If the tree is not an SSA def, break out of the loop: we're done.
if (!stmt->IsPhiDefnStmt())
@@ -1142,14 +1142,14 @@ void SsaBuilder::AddDefToHandlerPhis(BasicBlock* block, unsigned lclNum, unsigne
break;
}
- GenTreePtr tree = stmt->gtStmt.gtStmtExpr;
+ GenTree* tree = stmt->gtStmt.gtStmtExpr;
assert(tree->IsPhiDefn());
if (tree->gtOp.gtOp1->gtLclVar.gtLclNum == lclNum)
{
// It's the definition for the right local. Add "count" to the RHS.
- GenTreePtr phi = tree->gtOp.gtOp2;
+ GenTree* phi = tree->gtOp.gtOp2;
GenTreeArgList* args = nullptr;
if (phi->gtOp.gtOp1 != nullptr)
{
@@ -1311,16 +1311,16 @@ void SsaBuilder::BlockRenameVariables(BasicBlock* block, SsaRenameState* pRename
// We need to iterate over phi definitions, to give them SSA names, but we need
// to know which are which, so we don't add phi definitions to handler phi arg lists.
// Statements are phi defns until they aren't.
- bool isPhiDefn = true;
- GenTreePtr firstNonPhi = block->FirstNonPhiDef();
- for (GenTreePtr stmt = block->bbTreeList; stmt; stmt = stmt->gtNext)
+ bool isPhiDefn = true;
+ GenTree* firstNonPhi = block->FirstNonPhiDef();
+ for (GenTree* stmt = block->bbTreeList; stmt; stmt = stmt->gtNext)
{
if (stmt == firstNonPhi)
{
isPhiDefn = false;
}
- for (GenTreePtr tree = stmt->gtStmt.gtStmtList; tree; tree = tree->gtNext)
+ for (GenTree* tree = stmt->gtStmt.gtStmtList; tree; tree = tree->gtNext)
{
TreeRenameVariables(tree, block, pRenameState, isPhiDefn);
}
@@ -1374,13 +1374,13 @@ void SsaBuilder::AssignPhiNodeRhsVariables(BasicBlock* block, SsaRenameState* pR
for (BasicBlock* succ : block->GetAllSuccs(m_pCompiler))
{
// Walk the statements for phi nodes.
- for (GenTreePtr stmt = succ->bbTreeList; stmt != nullptr && stmt->IsPhiDefnStmt(); stmt = stmt->gtNext)
+ for (GenTree* stmt = succ->bbTreeList; stmt != nullptr && stmt->IsPhiDefnStmt(); stmt = stmt->gtNext)
{
- GenTreePtr tree = stmt->gtStmt.gtStmtExpr;
+ GenTree* tree = stmt->gtStmt.gtStmtExpr;
assert(tree->IsPhiDefn());
// Get the phi node from GT_ASG.
- GenTreePtr phiNode = tree->gtOp.gtOp2;
+ GenTree* phiNode = tree->gtOp.gtOp2;
assert(phiNode->gtOp.gtOp1 == nullptr || phiNode->gtOp.gtOp1->OperGet() == GT_LIST);
unsigned lclNum = tree->gtOp.gtOp1->gtLclVar.gtLclNum;
@@ -1402,7 +1402,7 @@ void SsaBuilder::AssignPhiNodeRhsVariables(BasicBlock* block, SsaRenameState* pR
}
if (!found)
{
- GenTreePtr newPhiArg =
+ GenTree* newPhiArg =
new (m_pCompiler, GT_PHI_ARG) GenTreePhiArg(tree->gtOp.gtOp1->TypeGet(), lclNum, ssaNum, block);
argList = (phiNode->gtOp.gtOp1 == nullptr ? nullptr : phiNode->gtOp.gtOp1->AsArgList());
phiNode->gtOp.gtOp1 = new (m_pCompiler, GT_LIST) GenTreeArgList(newPhiArg, argList);
@@ -1510,9 +1510,9 @@ void SsaBuilder::AssignPhiNodeRhsVariables(BasicBlock* block, SsaRenameState* pR
// For a filter, we consider the filter to be the "real" handler.
BasicBlock* handlerStart = succTry->ExFlowBlock();
- for (GenTreePtr stmt = handlerStart->bbTreeList; stmt; stmt = stmt->gtNext)
+ for (GenTree* stmt = handlerStart->bbTreeList; stmt; stmt = stmt->gtNext)
{
- GenTreePtr tree = stmt->gtStmt.gtStmtExpr;
+ GenTree* tree = stmt->gtStmt.gtStmtExpr;
// Check if the first n of the statements are phi nodes. If not, exit.
if (tree->OperGet() != GT_ASG || tree->gtOp.gtOp2 == nullptr ||
@@ -1522,8 +1522,8 @@ void SsaBuilder::AssignPhiNodeRhsVariables(BasicBlock* block, SsaRenameState* pR
}
// Get the phi node from GT_ASG.
- GenTreePtr lclVar = tree->gtOp.gtOp1;
- unsigned lclNum = lclVar->gtLclVar.gtLclNum;
+ GenTree* lclVar = tree->gtOp.gtOp1;
+ unsigned lclNum = lclVar->gtLclVar.gtLclNum;
// If the variable is live-out of "blk", and is therefore live on entry to the try-block-start
// "succ", then we make sure the current SSA name for the
@@ -1535,7 +1535,7 @@ void SsaBuilder::AssignPhiNodeRhsVariables(BasicBlock* block, SsaRenameState* pR
continue;
}
- GenTreePtr phiNode = tree->gtOp.gtOp2;
+ GenTree* phiNode = tree->gtOp.gtOp2;
assert(phiNode->gtOp.gtOp1 == nullptr || phiNode->gtOp.gtOp1->OperGet() == GT_LIST);
GenTreeArgList* argList = reinterpret_cast<GenTreeArgList*>(phiNode->gtOp.gtOp1);
@@ -1555,7 +1555,7 @@ void SsaBuilder::AssignPhiNodeRhsVariables(BasicBlock* block, SsaRenameState* pR
if (!alreadyArg)
{
// Add the new argument.
- GenTreePtr newPhiArg =
+ GenTree* newPhiArg =
new (m_pCompiler, GT_PHI_ARG) GenTreePhiArg(lclVar->TypeGet(), lclNum, ssaNum, block);
phiNode->gtOp.gtOp1 = new (m_pCompiler, GT_LIST) GenTreeArgList(newPhiArg, argList);
@@ -1979,7 +1979,7 @@ void Compiler::JitTestCheckSSA()
for (NodeToTestDataMap::KeyIterator ki = testData->Begin(); !ki.Equal(testData->End()); ++ki)
{
TestLabelAndNum tlAndN;
- GenTreePtr node = ki.Get();
+ GenTree* node = ki.Get();
bool b = testData->Lookup(node, &tlAndN);
assert(b);
if (tlAndN.m_tl == TL_SsaName)
diff --git a/src/jit/ssabuilder.h b/src/jit/ssabuilder.h
index 013b3ce072..58181e333d 100644
--- a/src/jit/ssabuilder.h
+++ b/src/jit/ssabuilder.h
@@ -166,7 +166,7 @@ private:
// Returns true, and sets "*ppIndirAssign", if "tree" has been recorded as an indirect assignment.
// (If the tree is an assignment, it's a definition only if it's labeled as an indirect definition, where
// we took the address of the local elsewhere in the extended tree.)
- bool IsIndirectAssign(GenTreePtr tree, Compiler::IndirectAssignmentAnnotation** ppIndirAssign);
+ bool IsIndirectAssign(GenTree* tree, Compiler::IndirectAssignmentAnnotation** ppIndirAssign);
#ifdef DEBUG
void Print(BasicBlock** postOrder, int count);
diff --git a/src/jit/stackfp.cpp b/src/jit/stackfp.cpp
index f35f34e9cf..c15ae629d1 100644
--- a/src/jit/stackfp.cpp
+++ b/src/jit/stackfp.cpp
@@ -765,7 +765,7 @@ int CodeGen::genNumberTemps()
return compCurFPState.m_uStackSize - genCountBits(regSet.rsMaskRegVarFloat);
}
-void CodeGen::genDiscardStackFP(GenTreePtr tree)
+void CodeGen::genDiscardStackFP(GenTree* tree)
{
assert(tree->InReg());
assert(varTypeIsFloating(tree));
@@ -835,7 +835,7 @@ void CodeGen::genRegVarBirthStackFP(LclVarDsc* varDsc)
SetRegVarFloat(reg, varDsc->TypeGet(), varDsc);
}
-void CodeGen::genRegVarBirthStackFP(GenTreePtr tree)
+void CodeGen::genRegVarBirthStackFP(GenTree* tree)
{
#ifdef DEBUG
if (compiler->verbose)
@@ -861,7 +861,7 @@ void CodeGen::genRegVarDeathStackFP(LclVarDsc* varDsc)
SetRegVarFloat(reg, varDsc->TypeGet(), 0);
}
-void CodeGen::genRegVarDeathStackFP(GenTreePtr tree)
+void CodeGen::genRegVarDeathStackFP(GenTree* tree)
{
#ifdef DEBUG
if (compiler->verbose)
@@ -876,7 +876,7 @@ void CodeGen::genRegVarDeathStackFP(GenTreePtr tree)
genRegVarDeathStackFP(varDsc);
}
-void CodeGen::genLoadStackFP(GenTreePtr tree, regNumber reg)
+void CodeGen::genLoadStackFP(GenTree* tree, regNumber reg)
{
#ifdef DEBUG
if (compiler->verbose)
@@ -916,7 +916,7 @@ void CodeGen::genLoadStackFP(GenTreePtr tree, regNumber reg)
}
}
-void CodeGen::genMovStackFP(GenTreePtr dst, regNumber dstreg, GenTreePtr src, regNumber srcreg)
+void CodeGen::genMovStackFP(GenTree* dst, regNumber dstreg, GenTree* src, regNumber srcreg)
{
if (dstreg == REG_FPNONE && !dst->IsRegVar())
{
@@ -1049,7 +1049,7 @@ void CodeGen::genMovStackFP(GenTreePtr dst, regNumber dstreg, GenTreePtr src, re
}
}
-void CodeGen::genCodeForTreeStackFP_DONE(GenTreePtr tree, regNumber reg)
+void CodeGen::genCodeForTreeStackFP_DONE(GenTree* tree, regNumber reg)
{
return genCodeForTree_DONE(tree, reg);
}
@@ -1105,16 +1105,16 @@ void CodeGen::genSetupStateStackFP(BasicBlock* block)
assert(block->bbFPStateX87->IsConsistent());
}
-regMaskTP CodeGen::genPushArgumentStackFP(GenTreePtr args)
+regMaskTP CodeGen::genPushArgumentStackFP(GenTree* args)
{
regMaskTP addrReg = 0;
unsigned opsz = genTypeSize(genActualType(args->TypeGet()));
switch (args->gtOper)
{
- GenTreePtr temp;
- GenTreePtr fval;
- size_t flopsz;
+ GenTree* temp;
+ GenTree* fval;
+ size_t flopsz;
case GT_CNS_DBL:
{
@@ -1228,7 +1228,7 @@ regMaskTP CodeGen::genPushArgumentStackFP(GenTreePtr args)
return addrReg;
}
-void CodeGen::genRoundFpExpressionStackFP(GenTreePtr op, var_types type)
+void CodeGen::genRoundFpExpressionStackFP(GenTree* op, var_types type)
{
// Do nothing with memory resident opcodes - these are the right precision
// (even if genMakeAddrOrFPstk loads them to the FP stack)
@@ -1267,7 +1267,7 @@ void CodeGen::genRoundFpExpressionStackFP(GenTreePtr op, var_types type)
compiler->tmpRlsTemp(temp);
}
-void CodeGen::genCodeForTreeStackFP_Const(GenTreePtr tree)
+void CodeGen::genCodeForTreeStackFP_Const(GenTree* tree)
{
#ifdef DEBUG
if (compiler->verbose)
@@ -1298,7 +1298,7 @@ void CodeGen::genCodeForTreeStackFP_Const(GenTreePtr tree)
genCodeForTreeStackFP_DONE(tree, reg);
}
-void CodeGen::genCodeForTreeStackFP_Leaf(GenTreePtr tree)
+void CodeGen::genCodeForTreeStackFP_Leaf(GenTree* tree)
{
#ifdef DEBUG
if (compiler->verbose)
@@ -1361,7 +1361,7 @@ void CodeGen::genCodeForTreeStackFP_Leaf(GenTreePtr tree)
genUpdateLife(tree);
}
-void CodeGen::genCodeForTreeStackFP_Asg(GenTreePtr tree)
+void CodeGen::genCodeForTreeStackFP_Asg(GenTree* tree)
{
#ifdef DEBUG
if (compiler->verbose)
@@ -1372,10 +1372,10 @@ void CodeGen::genCodeForTreeStackFP_Asg(GenTreePtr tree)
}
#endif // DEBUG
- emitAttr size;
- unsigned offs;
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtGetOp2IfPresent();
+ emitAttr size;
+ unsigned offs;
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtGetOp2IfPresent();
assert(tree->OperGet() == GT_ASG);
@@ -1396,7 +1396,7 @@ void CodeGen::genCodeForTreeStackFP_Asg(GenTreePtr tree)
op1 = genCodeForCommaTree(op1);
}
- GenTreePtr op1NonCom = op1->gtEffectiveVal();
+ GenTree* op1NonCom = op1->gtEffectiveVal();
if (op1NonCom->gtOper == GT_LCL_VAR)
{
#ifdef DEBUG
@@ -1440,7 +1440,7 @@ void CodeGen::genCodeForTreeStackFP_Asg(GenTreePtr tree)
constantValue = (double)temp;
}
- GenTreePtr constantTree;
+ GenTree* constantTree;
constantTree = compiler->gtNewDconNode(constantValue);
if (genConstantLoadStackFP(constantTree, true))
{
@@ -1465,7 +1465,7 @@ void CodeGen::genCodeForTreeStackFP_Asg(GenTreePtr tree)
{
// Load constant to fp stack.
- GenTreePtr cnsaddr;
+ GenTree* cnsaddr;
// Create slot for constant
if (op1->gtType == TYP_FLOAT || StackFPIsSameAsFloat(op2->gtDblCon.gtDconVal))
@@ -1507,7 +1507,7 @@ void CodeGen::genCodeForTreeStackFP_Asg(GenTreePtr tree)
// store forwarding fix for pentium 4 and centrino and also
// fld for doubles that can be represented as floats, saving
// 4 bytes of load
- GenTreePtr cnsaddr;
+ GenTree* cnsaddr;
// Create slot for constant
if (op1->gtType == TYP_FLOAT || StackFPIsSameAsFloat(op2->gtDblCon.gtDconVal))
@@ -1563,7 +1563,7 @@ void CodeGen::genCodeForTreeStackFP_Asg(GenTreePtr tree)
}
void CodeGen::genSetupForOpStackFP(
- GenTreePtr& op1, GenTreePtr& op2, bool bReverse, bool bMakeOp1Addressable, bool bOp1ReadOnly, bool bOp2ReadOnly)
+ GenTree*& op1, GenTree*& op2, bool bReverse, bool bMakeOp1Addressable, bool bOp1ReadOnly, bool bOp2ReadOnly)
{
if (bMakeOp1Addressable)
{
@@ -1664,7 +1664,7 @@ void CodeGen::genSetupForOpStackFP(
}
}
-void CodeGen::genCodeForTreeStackFP_Arithm(GenTreePtr tree)
+void CodeGen::genCodeForTreeStackFP_Arithm(GenTree* tree)
{
#ifdef DEBUG
if (compiler->verbose)
@@ -1687,7 +1687,8 @@ void CodeGen::genCodeForTreeStackFP_Arithm(GenTreePtr tree)
// here and tell genArithmStackFP to do the reverse operation
bool bReverse;
- GenTreePtr op1, op2;
+ GenTree* op1;
+ GenTree* op2;
if (tree->gtFlags & GTF_REVERSE_OPS)
{
@@ -1747,7 +1748,7 @@ void CodeGen::genCodeForTreeStackFP_Arithm(GenTreePtr tree)
}
regNumber CodeGen::genArithmStackFP(
- genTreeOps oper, GenTreePtr dst, regNumber dstreg, GenTreePtr src, regNumber srcreg, bool bReverse)
+ genTreeOps oper, GenTree* dst, regNumber dstreg, GenTree* src, regNumber srcreg, bool bReverse)
{
#ifdef DEBUG
if (compiler->verbose)
@@ -1910,7 +1911,7 @@ regNumber CodeGen::genArithmStackFP(
return result;
}
-void CodeGen::genCodeForTreeStackFP_AsgArithm(GenTreePtr tree)
+void CodeGen::genCodeForTreeStackFP_AsgArithm(GenTree* tree)
{
#ifdef DEBUG
if (compiler->verbose)
@@ -1924,10 +1925,8 @@ void CodeGen::genCodeForTreeStackFP_AsgArithm(GenTreePtr tree)
assert(tree->OperGet() == GT_ASG_ADD || tree->OperGet() == GT_ASG_SUB || tree->OperGet() == GT_ASG_MUL ||
tree->OperGet() == GT_ASG_DIV);
- GenTreePtr op1, op2;
-
- op1 = tree->gtOp.gtOp1;
- op2 = tree->gtGetOp2IfPresent();
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtGetOp2IfPresent();
genSetupForOpStackFP(op1, op2, (tree->gtFlags & GTF_REVERSE_OPS) ? true : false, true, false, true);
@@ -1937,8 +1936,7 @@ void CodeGen::genCodeForTreeStackFP_AsgArithm(GenTreePtr tree)
genCodeForTreeStackFP_DONE(tree, result);
}
-regNumber CodeGen::genAsgArithmStackFP(
- genTreeOps oper, GenTreePtr dst, regNumber dstreg, GenTreePtr src, regNumber srcreg)
+regNumber CodeGen::genAsgArithmStackFP(genTreeOps oper, GenTree* dst, regNumber dstreg, GenTree* src, regNumber srcreg)
{
regNumber result = REG_FPNONE;
@@ -2089,7 +2087,7 @@ regNumber CodeGen::genAsgArithmStackFP(
return result;
}
-void CodeGen::genCodeForTreeStackFP_SmpOp(GenTreePtr tree)
+void CodeGen::genCodeForTreeStackFP_SmpOp(GenTree* tree)
{
#ifdef DEBUG
if (compiler->verbose)
@@ -2152,7 +2150,7 @@ void CodeGen::genCodeForTreeStackFP_SmpOp(GenTreePtr tree)
case GT_RETURN:
{
- GenTreePtr op1 = tree->gtOp.gtOp1;
+ GenTree* op1 = tree->gtOp.gtOp1;
assert(op1);
// Compute the result onto the FP stack
@@ -2206,8 +2204,8 @@ void CodeGen::genCodeForTreeStackFP_SmpOp(GenTreePtr tree)
case GT_COMMA:
{
- GenTreePtr op1 = tree->gtOp.gtOp1;
- GenTreePtr op2 = tree->gtGetOp2IfPresent();
+ GenTree* op1 = tree->gtOp.gtOp1;
+ GenTree* op2 = tree->gtGetOp2IfPresent();
if (tree->gtFlags & GTF_REVERSE_OPS)
{
@@ -2241,7 +2239,7 @@ void CodeGen::genCodeForTreeStackFP_SmpOp(GenTreePtr tree)
case GT_NEG:
{
- GenTreePtr op1 = tree->gtOp.gtOp1;
+ GenTree* op1 = tree->gtOp.gtOp1;
// get the tree into a register
genCodeForTreeFloat(op1);
@@ -2260,7 +2258,7 @@ void CodeGen::genCodeForTreeStackFP_SmpOp(GenTreePtr tree)
{
assert(compiler->IsMathIntrinsic(tree));
- GenTreePtr op1 = tree->gtOp.gtOp1;
+ GenTree* op1 = tree->gtOp.gtOp1;
// get tree into a register
genCodeForTreeFloat(op1);
@@ -2290,7 +2288,7 @@ void CodeGen::genCodeForTreeStackFP_SmpOp(GenTreePtr tree)
TempDsc* temp;
int offs;
- GenTreePtr op1 = tree->gtOp.gtOp1;
+ GenTree* op1 = tree->gtOp.gtOp1;
// Offset of the DWord containing the exponent
offs = (op1->gtType == TYP_FLOAT) ? 0 : sizeof(int);
@@ -2340,7 +2338,7 @@ void CodeGen::genCodeForTreeStackFP_SmpOp(GenTreePtr tree)
}
}
-void CodeGen::genCodeForTreeStackFP_Cast(GenTreePtr tree)
+void CodeGen::genCodeForTreeStackFP_Cast(GenTree* tree)
{
#ifdef DEBUG
if (compiler->verbose)
@@ -2359,7 +2357,7 @@ void CodeGen::genCodeForTreeStackFP_Cast(GenTreePtr tree)
TempDsc* temp;
emitAttr size;
- GenTreePtr op1 = tree->gtOp.gtOp1;
+ GenTree* op1 = tree->gtOp.gtOp1;
// If op1 is a comma expression, evaluate the non-last parts, make op1 be the rest.
op1 = genCodeForCommaTree(op1);
@@ -2579,7 +2577,7 @@ void CodeGen::genCodeForTreeStackFP_Cast(GenTreePtr tree)
}
}
-void CodeGen::genCodeForTreeStackFP_Special(GenTreePtr tree)
+void CodeGen::genCodeForTreeStackFP_Special(GenTree* tree)
{
#ifdef DEBUG
if (compiler->verbose)
@@ -2603,7 +2601,7 @@ void CodeGen::genCodeForTreeStackFP_Special(GenTreePtr tree)
}
}
-void CodeGen::genCodeForTreeFloat(GenTreePtr tree, RegSet::RegisterPreference* pref)
+void CodeGen::genCodeForTreeFloat(GenTree* tree, RegSet::RegisterPreference* pref)
{
// TestTransitions();
genTreeOps oper;
@@ -2643,7 +2641,7 @@ void CodeGen::genCodeForTreeFloat(GenTreePtr tree, RegSet::RegisterPreference* p
#endif
}
-bool CodeGen::genCompInsStackFP(GenTreePtr tos, GenTreePtr other)
+bool CodeGen::genCompInsStackFP(GenTree* tos, GenTree* other)
{
// assume gensetupop done
@@ -2750,22 +2748,22 @@ bool CodeGen::genCompInsStackFP(GenTreePtr tos, GenTreePtr other)
return bReverse;
}
-void CodeGen::genCondJumpFltStackFP(GenTreePtr cond, BasicBlock* jumpTrue, BasicBlock* jumpFalse, bool bDoTransition)
+void CodeGen::genCondJumpFltStackFP(GenTree* cond, BasicBlock* jumpTrue, BasicBlock* jumpFalse, bool bDoTransition)
{
assert(jumpTrue && jumpFalse);
assert(!(cond->gtFlags & GTF_REVERSE_OPS)); // Done in genCondJump()
assert(varTypeIsFloating(cond->gtOp.gtOp1));
- GenTreePtr op1 = cond->gtOp.gtOp1;
- GenTreePtr op2 = cond->gtOp.gtOp2;
+ GenTree* op1 = cond->gtOp.gtOp1;
+ GenTree* op2 = cond->gtOp.gtOp2;
genTreeOps cmp = cond->OperGet();
// Prepare operands.
genSetupForOpStackFP(op1, op2, false, false, true, false);
- GenTreePtr tos;
- GenTreePtr other;
- bool bReverseCmp = false;
+ GenTree* tos;
+ GenTree* other;
+ bool bReverseCmp = false;
if ((op2->IsRegVar() || (op2->InReg())) && // op2 is in a reg
(compCurFPState.TopVirtual() == (unsigned)op2->gtRegNum && // Is it already at the top of the stack?
@@ -2888,7 +2886,7 @@ BasicBlock* CodeGen::genTransitionBlockStackFP(FlatFPStateX87* pState, BasicBloc
return pBlock;
}
-void CodeGen::genCondJumpLngStackFP(GenTreePtr cond, BasicBlock* jumpTrue, BasicBlock* jumpFalse)
+void CodeGen::genCondJumpLngStackFP(GenTree* cond, BasicBlock* jumpTrue, BasicBlock* jumpFalse)
{
// For the moment, and so we don't have to deal with the amount of special cases
// we have, will insert a dummy block for jumpTrue (if necessary) that will do the
@@ -2908,7 +2906,7 @@ void CodeGen::genCondJumpLngStackFP(GenTreePtr cond, BasicBlock* jumpTrue, Basic
genDefineTempLabel(pTransition);
}
-void CodeGen::genQMarkRegVarTransition(GenTreePtr nextNode, VARSET_VALARG_TP liveset)
+void CodeGen::genQMarkRegVarTransition(GenTree* nextNode, VARSET_VALARG_TP liveset)
{
// Kill any vars that may die in the transition
VARSET_TP newLiveSet(VarSetOps::Intersection(compiler, liveset, compiler->optAllFPregVars));
@@ -2943,7 +2941,7 @@ void CodeGen::genQMarkRegVarTransition(GenTreePtr nextNode, VARSET_VALARG_TP liv
}
}
-void CodeGen::genQMarkBeforeElseStackFP(QmarkStateStackFP* pState, VARSET_VALARG_TP varsetCond, GenTreePtr nextNode)
+void CodeGen::genQMarkBeforeElseStackFP(QmarkStateStackFP* pState, VARSET_VALARG_TP varsetCond, GenTree* nextNode)
{
assert(regSet.rsMaskLockedFloat == 0);
@@ -2954,7 +2952,7 @@ void CodeGen::genQMarkBeforeElseStackFP(QmarkStateStackFP* pState, VARSET_VALARG
genQMarkRegVarTransition(nextNode, varsetCond);
}
-void CodeGen::genQMarkAfterElseBlockStackFP(QmarkStateStackFP* pState, VARSET_VALARG_TP varsetCond, GenTreePtr nextNode)
+void CodeGen::genQMarkAfterElseBlockStackFP(QmarkStateStackFP* pState, VARSET_VALARG_TP varsetCond, GenTree* nextNode)
{
assert(regSet.rsMaskLockedFloat == 0);
@@ -3143,7 +3141,7 @@ void CodeGen::genTableSwitchStackFP(regNumber reg, unsigned jumpCnt, BasicBlock*
return genTableSwitch(reg, jumpCnt, jumpTab);
}
-bool CodeGen::genConstantLoadStackFP(GenTreePtr tree, bool bOnlyNoMemAccess)
+bool CodeGen::genConstantLoadStackFP(GenTree* tree, bool bOnlyNoMemAccess)
{
assert(tree->gtOper == GT_CNS_DBL);
@@ -3176,7 +3174,7 @@ bool CodeGen::genConstantLoadStackFP(GenTreePtr tree, bool bOnlyNoMemAccess)
}
else
{
- GenTreePtr addr;
+ GenTree* addr;
if (tree->gtType == TYP_FLOAT || StackFPIsSameAsFloat(tree->gtDblCon.gtDconVal))
{
float f = forceCastToFloat(tree->gtDblCon.gtDconVal);
@@ -3241,10 +3239,10 @@ bool CodeGen::StackFPIsSameAsFloat(double d)
return false;
}
-GenTreePtr CodeGen::genMakeAddressableStackFP(GenTreePtr tree,
- regMaskTP* regMaskIntPtr,
- regMaskTP* regMaskFltPtr,
- bool bCollapseConstantDoubles)
+GenTree* CodeGen::genMakeAddressableStackFP(GenTree* tree,
+ regMaskTP* regMaskIntPtr,
+ regMaskTP* regMaskFltPtr,
+ bool bCollapseConstantDoubles)
{
*regMaskIntPtr = *regMaskFltPtr = 0;
@@ -3262,7 +3260,7 @@ GenTreePtr CodeGen::genMakeAddressableStackFP(GenTreePtr tree,
}
else
{
- GenTreePtr addr;
+ GenTree* addr;
if (tree->gtType == TYP_FLOAT ||
(bCollapseConstantDoubles && StackFPIsSameAsFloat(tree->gtDblCon.gtDconVal)))
{
@@ -3311,8 +3309,8 @@ GenTreePtr CodeGen::genMakeAddressableStackFP(GenTreePtr tree,
}
else
{
- GenTreePtr addr = tree;
- tree = tree->gtOp.gtOp1;
+ GenTree* addr = tree;
+ tree = tree->gtOp.gtOp1;
genCodeForTree(tree, 0);
regSet.rsMarkRegUsed(tree, addr);
@@ -3335,7 +3333,7 @@ GenTreePtr CodeGen::genMakeAddressableStackFP(GenTreePtr tree,
}
}
-void CodeGen::genKeepAddressableStackFP(GenTreePtr tree, regMaskTP* regMaskIntPtr, regMaskTP* regMaskFltPtr)
+void CodeGen::genKeepAddressableStackFP(GenTree* tree, regMaskTP* regMaskIntPtr, regMaskTP* regMaskFltPtr)
{
regMaskTP regMaskInt, regMaskFlt;
@@ -3401,7 +3399,7 @@ void CodeGen::genKeepAddressableStackFP(GenTreePtr tree, regMaskTP* regMaskIntPt
}
}
-void CodeGen::genDoneAddressableStackFP(GenTreePtr tree,
+void CodeGen::genDoneAddressableStackFP(GenTree* tree,
regMaskTP addrRegInt,
regMaskTP addrRegFlt,
RegSet::KeepReg keptReg)
@@ -3885,12 +3883,12 @@ void Compiler::raEnregisterVarsPrePassStackFP()
{
case BBJ_COND:
{
- GenTreePtr stmt;
+ GenTree* stmt;
stmt = block->bbTreeList->gtPrev;
assert(stmt->gtNext == NULL && stmt->gtStmt.gtStmtExpr->gtOper == GT_JTRUE);
assert(stmt->gtStmt.gtStmtExpr->gtOp.gtOp1);
- GenTreePtr cond = stmt->gtStmt.gtStmtExpr->gtOp.gtOp1;
+ GenTree* cond = stmt->gtStmt.gtStmtExpr->gtOp.gtOp1;
assert(cond->OperIsCompare());
@@ -3936,12 +3934,12 @@ void Compiler::raEnregisterVarsPrePassStackFP()
}
VARSET_TP liveSet(VarSetOps::MakeCopy(this, block->bbLiveIn));
- for (GenTreePtr stmt = block->FirstNonPhiDef(); stmt; stmt = stmt->gtNext)
+ for (GenTree* stmt = block->FirstNonPhiDef(); stmt; stmt = stmt->gtNext)
{
assert(stmt->gtOper == GT_STMT);
unsigned prevHeight = stmt->gtStmt.gtStmtList->gtFPlvl;
- for (GenTreePtr tree = stmt->gtStmt.gtStmtList; tree; tree = tree->gtNext)
+ for (GenTree* tree = stmt->gtStmt.gtStmtList; tree; tree = tree->gtNext)
{
VarSetOps::AssignNoCopy(this, liveSet, fgUpdateLiveSet(liveSet, tree));
switch (tree->gtOper)
@@ -3954,7 +3952,7 @@ void Compiler::raEnregisterVarsPrePassStackFP()
// to avoid store forwarding stall
if (tree->gtType == TYP_DOUBLE)
{
- GenTreePtr op1 = tree->gtOp.gtOp1;
+ GenTree* op1 = tree->gtOp.gtOp1;
if (op1->gtOper == GT_LCL_VAR && op1->gtType == TYP_LONG)
{
unsigned int lclNum = op1->gtLclVarCommon.gtLclNum;
@@ -4034,7 +4032,7 @@ void Compiler::raEnregisterVarsPrePassStackFP()
#endif
}
-void Compiler::raSetRegLclBirthDeath(GenTreePtr tree, VARSET_VALARG_TP lastlife, bool fromLDOBJ)
+void Compiler::raSetRegLclBirthDeath(GenTree* tree, VARSET_VALARG_TP lastlife, bool fromLDOBJ)
{
assert(tree->gtOper == GT_LCL_VAR);
@@ -4128,11 +4126,11 @@ void Compiler::raEnregisterVarsPostPassStackFP()
*/
VARSET_TP lastlife(VarSetOps::MakeCopy(this, block->bbLiveIn));
- for (GenTreePtr stmt = block->FirstNonPhiDef(); stmt; stmt = stmt->gtNext)
+ for (GenTree* stmt = block->FirstNonPhiDef(); stmt; stmt = stmt->gtNext)
{
assert(stmt->gtOper == GT_STMT);
- for (GenTreePtr tree = stmt->gtStmt.gtStmtList; tree;
+ for (GenTree *tree = stmt->gtStmt.gtStmtList; tree;
VarSetOps::AssignNoCopy(this, lastlife, fgUpdateLiveSet(lastlife, tree)), tree = tree->gtNext)
{
if (tree->gtOper == GT_LCL_VAR)
diff --git a/src/jit/valuenum.cpp b/src/jit/valuenum.cpp
index 7cdbfdcfb0..b6ff775f8f 100644
--- a/src/jit/valuenum.cpp
+++ b/src/jit/valuenum.cpp
@@ -2817,7 +2817,7 @@ ValueNum ValueNumStore::FieldSeqVNAppend(ValueNum fsVN1, ValueNum fsVN2)
return fieldSeqVN;
}
-ValueNum ValueNumStore::ExtendPtrVN(GenTreePtr opA, GenTreePtr opB)
+ValueNum ValueNumStore::ExtendPtrVN(GenTree* opA, GenTree* opB)
{
if (opB->OperGet() == GT_CNS_INT)
{
@@ -2830,7 +2830,7 @@ ValueNum ValueNumStore::ExtendPtrVN(GenTreePtr opA, GenTreePtr opB)
return NoVN;
}
-ValueNum ValueNumStore::ExtendPtrVN(GenTreePtr opA, FieldSeqNode* fldSeq)
+ValueNum ValueNumStore::ExtendPtrVN(GenTree* opA, FieldSeqNode* fldSeq)
{
assert(fldSeq != nullptr);
@@ -2981,7 +2981,7 @@ ValueNum Compiler::fgValueNumberArrIndexAssign(CORINFO_CLASS_HANDLE elemTypeEq,
return vnStore->VNForMapStore(TYP_REF, fgCurMemoryVN[GcHeap], elemTypeEqVN, newValAtArrType);
}
-ValueNum Compiler::fgValueNumberArrIndexVal(GenTreePtr tree, VNFuncApp* pFuncApp, ValueNum addrXvn)
+ValueNum Compiler::fgValueNumberArrIndexVal(GenTree* tree, VNFuncApp* pFuncApp, ValueNum addrXvn)
{
assert(vnStore->IsVNHandle(pFuncApp->m_args[0]));
CORINFO_CLASS_HANDLE arrElemTypeEQ = CORINFO_CLASS_HANDLE(vnStore->ConstantValue<ssize_t>(pFuncApp->m_args[0]));
@@ -2991,7 +2991,7 @@ ValueNum Compiler::fgValueNumberArrIndexVal(GenTreePtr tree, VNFuncApp* pFuncApp
return fgValueNumberArrIndexVal(tree, arrElemTypeEQ, arrVN, inxVN, addrXvn, fldSeq);
}
-ValueNum Compiler::fgValueNumberArrIndexVal(GenTreePtr tree,
+ValueNum Compiler::fgValueNumberArrIndexVal(GenTree* tree,
CORINFO_CLASS_HANDLE elemTypeEq,
ValueNum arrVN,
ValueNum inxVN,
@@ -4536,10 +4536,10 @@ void Compiler::fgValueNumber()
for (BasicBlock* blk = fgFirstBB; blk != nullptr; blk = blk->bbNext)
{
// Now iterate over the block's statements, and their trees.
- for (GenTreePtr stmts = blk->FirstNonPhiDef(); stmts != nullptr; stmts = stmts->gtNext)
+ for (GenTree* stmts = blk->FirstNonPhiDef(); stmts != nullptr; stmts = stmts->gtNext)
{
assert(stmts->IsStatement());
- for (GenTreePtr tree = stmts->gtStmt.gtStmtList; tree; tree = tree->gtNext)
+ for (GenTree* tree = stmts->gtStmt.gtStmtList; tree; tree = tree->gtNext)
{
tree->gtVNPair.SetBoth(ValueNumStore::NoVN);
}
@@ -4697,23 +4697,23 @@ void Compiler::fgValueNumberBlock(BasicBlock* blk)
// First: visit phi's. If "newVNForPhis", give them new VN's. If not,
// first check to see if all phi args have the same value.
- GenTreePtr firstNonPhi = blk->FirstNonPhiDef();
- for (GenTreePtr phiDefs = blk->bbTreeList; phiDefs != firstNonPhi; phiDefs = phiDefs->gtNext)
+ GenTree* firstNonPhi = blk->FirstNonPhiDef();
+ for (GenTree* phiDefs = blk->bbTreeList; phiDefs != firstNonPhi; phiDefs = phiDefs->gtNext)
{
// TODO-Cleanup: It has been proposed that we should have an IsPhiDef predicate. We would use it
// in Block::FirstNonPhiDef as well.
- GenTreePtr phiDef = phiDefs->gtStmt.gtStmtExpr;
+ GenTree* phiDef = phiDefs->gtStmt.gtStmtExpr;
assert(phiDef->OperGet() == GT_ASG);
GenTreeLclVarCommon* newSsaVar = phiDef->gtOp.gtOp1->AsLclVarCommon();
ValueNumPair phiAppVNP;
ValueNumPair sameVNPair;
- GenTreePtr phiFunc = phiDef->gtOp.gtOp2;
+ GenTree* phiFunc = phiDef->gtOp.gtOp2;
// At this point a GT_PHI node should never have a nullptr for gtOp1
// and the gtOp1 should always be a GT_LIST node.
- GenTreePtr phiOp1 = phiFunc->gtOp.gtOp1;
+ GenTree* phiOp1 = phiFunc->gtOp.gtOp1;
noway_assert(phiOp1 != nullptr);
noway_assert(phiOp1->OperGet() == GT_LIST);
@@ -4905,7 +4905,7 @@ void Compiler::fgValueNumberBlock(BasicBlock* blk)
}
// Now iterate over the remaining statements, and their trees.
- for (GenTreePtr stmt = firstNonPhi; stmt != nullptr; stmt = stmt->gtNext)
+ for (GenTree* stmt = firstNonPhi; stmt != nullptr; stmt = stmt->gtNext)
{
assert(stmt->IsStatement());
@@ -4919,7 +4919,7 @@ void Compiler::fgValueNumberBlock(BasicBlock* blk)
}
#endif
- for (GenTreePtr tree = stmt->gtStmt.gtStmtList; tree; tree = tree->gtNext)
+ for (GenTree* tree = stmt->gtStmt.gtStmtList; tree; tree = tree->gtNext)
{
fgValueNumberTree(tree);
}
@@ -5130,19 +5130,19 @@ ValueNum Compiler::fgMemoryVNForLoopSideEffects(MemoryKind memoryKind,
return newMemoryVN;
}
-void Compiler::fgMutateGcHeap(GenTreePtr tree DEBUGARG(const char* msg))
+void Compiler::fgMutateGcHeap(GenTree* tree DEBUGARG(const char* msg))
{
// Update the current memory VN, and if we're tracking the heap SSA # caused by this node, record it.
recordGcHeapStore(tree, vnStore->VNForExpr(compCurBB, TYP_REF) DEBUGARG(msg));
}
-void Compiler::fgMutateAddressExposedLocal(GenTreePtr tree DEBUGARG(const char* msg))
+void Compiler::fgMutateAddressExposedLocal(GenTree* tree DEBUGARG(const char* msg))
{
// Update the current ByrefExposed VN, and if we're tracking the heap SSA # caused by this node, record it.
recordAddressExposedLocalStore(tree, vnStore->VNForExpr(compCurBB) DEBUGARG(msg));
}
-void Compiler::recordGcHeapStore(GenTreePtr curTree, ValueNum gcHeapVN DEBUGARG(const char* msg))
+void Compiler::recordGcHeapStore(GenTree* curTree, ValueNum gcHeapVN DEBUGARG(const char* msg))
{
// bbMemoryDef must include GcHeap for any block that mutates the GC Heap
// and GC Heap mutations are also ByrefExposed mutations
@@ -5178,7 +5178,7 @@ void Compiler::recordGcHeapStore(GenTreePtr curTree, ValueNum gcHeapVN DEBUGARG(
fgValueNumberRecordMemorySsa(GcHeap, curTree);
}
-void Compiler::recordAddressExposedLocalStore(GenTreePtr curTree, ValueNum memoryVN DEBUGARG(const char* msg))
+void Compiler::recordAddressExposedLocalStore(GenTree* curTree, ValueNum memoryVN DEBUGARG(const char* msg))
{
// This should only happen if GcHeap and ByrefExposed are being tracked separately;
// otherwise we'd go through recordGcHeapStore.
@@ -5200,7 +5200,7 @@ void Compiler::recordAddressExposedLocalStore(GenTreePtr curTree, ValueNum memor
fgValueNumberRecordMemorySsa(ByrefExposed, curTree);
}
-void Compiler::fgValueNumberRecordMemorySsa(MemoryKind memoryKind, GenTreePtr tree)
+void Compiler::fgValueNumberRecordMemorySsa(MemoryKind memoryKind, GenTree* tree)
{
unsigned ssaNum;
if (GetMemorySsaMap(memoryKind)->Lookup(tree, &ssaNum))
@@ -5221,7 +5221,7 @@ void Compiler::fgValueNumberRecordMemorySsa(MemoryKind memoryKind, GenTreePtr tr
// The input 'tree' is a leaf node that is a constant
// Assign the proper value number to the tree
-void Compiler::fgValueNumberTreeConst(GenTreePtr tree)
+void Compiler::fgValueNumberTreeConst(GenTree* tree)
{
genTreeOps oper = tree->OperGet();
var_types typ = tree->TypeGet();
@@ -5311,7 +5311,7 @@ void Compiler::fgValueNumberTreeConst(GenTreePtr tree)
// Assumptions:
// 'tree' must be a block assignment (GT_INITBLK, GT_COPYBLK, GT_COPYOBJ).
-void Compiler::fgValueNumberBlockAssignment(GenTreePtr tree, bool evalAsgLhsInd)
+void Compiler::fgValueNumberBlockAssignment(GenTree* tree, bool evalAsgLhsInd)
{
GenTree* lhs = tree->gtGetOp1();
GenTree* rhs = tree->gtGetOp2();
@@ -5342,8 +5342,8 @@ void Compiler::fgValueNumberBlockAssignment(GenTreePtr tree, bool evalAsgLhsInd)
unsigned lclDefSsaNum = GetSsaNumForLocalVarDef(lclVarTree);
- ValueNum initBlkVN = ValueNumStore::NoVN;
- GenTreePtr initConst = rhs;
+ ValueNum initBlkVN = ValueNumStore::NoVN;
+ GenTree* initConst = rhs;
if (isEntire && initConst->OperGet() == GT_CNS_INT)
{
unsigned initVal = 0xFF & (unsigned)initConst->AsIntConCommon()->IconValue();
@@ -5480,8 +5480,8 @@ void Compiler::fgValueNumberBlockAssignment(GenTreePtr tree, bool evalAsgLhsInd)
}
else
{
- GenTreePtr srcAddr = rhs->AsIndir()->Addr();
- VNFuncApp srcAddrFuncApp;
+ GenTree* srcAddr = rhs->AsIndir()->Addr();
+ VNFuncApp srcAddrFuncApp;
if (srcAddr->IsLocalAddrExpr(this, &rhsLclVarTree, &rhsFldSeq))
{
unsigned rhsLclNum = rhsLclVarTree->GetLclNum();
@@ -5615,7 +5615,7 @@ void Compiler::fgValueNumberBlockAssignment(GenTreePtr tree, bool evalAsgLhsInd)
}
}
-void Compiler::fgValueNumberTree(GenTreePtr tree, bool evalAsgLhsInd)
+void Compiler::fgValueNumberTree(GenTree* tree, bool evalAsgLhsInd)
{
genTreeOps oper = tree->OperGet();
@@ -5932,8 +5932,8 @@ void Compiler::fgValueNumberTree(GenTreePtr tree, bool evalAsgLhsInd)
if (GenTree::OperIsAssignment(oper) && !varTypeIsStruct(tree))
{
- GenTreePtr lhs = tree->gtOp.gtOp1;
- GenTreePtr rhs = tree->gtOp.gtOp2;
+ GenTree* lhs = tree->gtOp.gtOp1;
+ GenTree* rhs = tree->gtOp.gtOp2;
ValueNumPair rhsVNPair;
if (oper == GT_ASG)
@@ -5949,7 +5949,7 @@ void Compiler::fgValueNumberTree(GenTreePtr tree, bool evalAsgLhsInd)
// But we didn't know that the parent was an op=. We do now, so go back and evaluate it.
// (We actually check if the effective val is the IND. We will have evaluated any non-last
// args of an LHS comma already -- including their memory effects.)
- GenTreePtr lhsVal = lhs->gtEffectiveVal(/*commaOnly*/ true);
+ GenTree* lhsVal = lhs->gtEffectiveVal(/*commaOnly*/ true);
if (lhsVal->OperIsIndir() || (lhsVal->OperGet() == GT_CLS_VAR))
{
fgValueNumberTree(lhsVal, /*evalAsgLhsInd*/ true);
@@ -6015,7 +6015,7 @@ void Compiler::fgValueNumberTree(GenTreePtr tree, bool evalAsgLhsInd)
// We have to handle the case where the LHS is a comma. In that case, we don't evaluate the comma,
// so we give it VNForVoid, and we're really interested in the effective value.
- GenTreePtr lhsCommaIter = lhs;
+ GenTree* lhsCommaIter = lhs;
while (lhsCommaIter->OperGet() == GT_COMMA)
{
lhsCommaIter->gtVNPair.SetBoth(vnStore->VNForVoid());
@@ -6173,7 +6173,7 @@ void Compiler::fgValueNumberTree(GenTreePtr tree, bool evalAsgLhsInd)
tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, lhs->TypeGet()));
}
- GenTreePtr arg = lhs->gtOp.gtOp1;
+ GenTree* arg = lhs->gtOp.gtOp1;
// Indicates whether the argument of the IND is the address of a local.
bool wasLocal = false;
@@ -6304,8 +6304,8 @@ void Compiler::fgValueNumberTree(GenTreePtr tree, bool evalAsgLhsInd)
// Was the argument of the GT_IND the address of a local, handled above?
if (!wasLocal)
{
- GenTreePtr obj = nullptr;
- GenTreePtr staticOffset = nullptr;
+ GenTree* obj = nullptr;
+ GenTree* staticOffset = nullptr;
FieldSeqNode* fldSeq = nullptr;
// Is the LHS an array index expression?
@@ -6348,7 +6348,7 @@ void Compiler::fgValueNumberTree(GenTreePtr tree, bool evalAsgLhsInd)
FieldSeqNode* fldSeq = nullptr;
// Try to parse it.
- GenTreePtr arr = nullptr;
+ GenTree* arr = nullptr;
arg->ParseArrayAddress(this, &arrInfo, &arr, &inxVN, &fldSeq);
if (arr == nullptr)
{
@@ -6576,7 +6576,7 @@ void Compiler::fgValueNumberTree(GenTreePtr tree, bool evalAsgLhsInd)
else if (oper == GT_ADDR)
{
// We have special representations for byrefs to lvalues.
- GenTreePtr arg = tree->gtOp.gtOp1;
+ GenTree* arg = tree->gtOp.gtOp1;
if (arg->OperIsLocal())
{
FieldSeqNode* fieldSeq = nullptr;
@@ -6648,12 +6648,12 @@ void Compiler::fgValueNumberTree(GenTreePtr tree, bool evalAsgLhsInd)
// a pointer to an object field or array alement. Other cases become uses of
// the current ByrefExposed value and the pointer value, so that at least we
// can recognize redundant loads with no stores between them.
- GenTreePtr addr = tree->AsIndir()->Addr();
+ GenTree* addr = tree->AsIndir()->Addr();
GenTreeLclVarCommon* lclVarTree = nullptr;
FieldSeqNode* fldSeq1 = nullptr;
FieldSeqNode* fldSeq2 = nullptr;
- GenTreePtr obj = nullptr;
- GenTreePtr staticOffset = nullptr;
+ GenTree* obj = nullptr;
+ GenTree* staticOffset = nullptr;
bool isVolatile = (tree->gtFlags & GTF_IND_VOLATILE) != 0;
// See if the addr has any exceptional part.
@@ -6692,11 +6692,11 @@ void Compiler::fgValueNumberTree(GenTreePtr tree, bool evalAsgLhsInd)
ValueNum inxVN = ValueNumStore::NoVN;
FieldSeqNode* fldSeq = nullptr;
- // GenTreePtr addr = tree->gtOp.gtOp1;
+ // GenTree* addr = tree->gtOp.gtOp1;
ValueNum addrVN = addrNvnp.GetLiberal();
// Try to parse it.
- GenTreePtr arr = nullptr;
+ GenTree* arr = nullptr;
addr->ParseArrayAddress(this, &arrInfo, &arr, &inxVN, &fldSeq);
if (arr == nullptr)
{
@@ -7171,7 +7171,7 @@ void Compiler::fgValueNumberTree(GenTreePtr tree, bool evalAsgLhsInd)
#endif // DEBUG
}
-void Compiler::fgValueNumberIntrinsic(GenTreePtr tree)
+void Compiler::fgValueNumberIntrinsic(GenTree* tree)
{
assert(tree->OperGet() == GT_INTRINSIC);
GenTreeIntrinsic* intrinsic = tree->AsIntrinsic();
@@ -7236,7 +7236,7 @@ void Compiler::fgValueNumberIntrinsic(GenTreePtr tree)
}
}
-void Compiler::fgValueNumberCastTree(GenTreePtr tree)
+void Compiler::fgValueNumberCastTree(GenTree* tree)
{
assert(tree->OperGet() == GT_CAST);
@@ -7437,7 +7437,7 @@ void Compiler::fgValueNumberHelperCallFunc(GenTreeCall* call, VNFunc vnf, ValueN
else
{
auto getCurrentArg = [call, &args, useEntryPointAddrAsArg0](int currentIndex) {
- GenTreePtr arg = args->Current();
+ GenTree* arg = args->Current();
if ((arg->gtFlags & GTF_LATE_ARG) != 0)
{
// This arg is a setup node that moves the arg into position.
@@ -7542,11 +7542,11 @@ void Compiler::fgValueNumberCall(GenTreeCall* call)
bool updatedArgPlace = false;
while (args != nullptr)
{
- GenTreePtr arg = args->Current();
+ GenTree* arg = args->Current();
if (arg->OperGet() == GT_ARGPLACE)
{
// Find the corresponding late arg.
- GenTreePtr lateArg = call->fgArgInfo->GetLateArg(i);
+ GenTree* lateArg = call->fgArgInfo->GetLateArg(i);
assert(lateArg->gtVNPair.BothDefined());
arg->gtVNPair = lateArg->gtVNPair;
updatedArgPlace = true;
@@ -7973,7 +7973,7 @@ void Compiler::JitTestCheckVN()
NodeToTestDataMap* testData = GetNodeTestData();
// First we have to know which nodes in the tree are reachable.
- typedef JitHashTable<GenTreePtr, JitPtrKeyFuncs<GenTree>, int> NodeToIntMap;
+ typedef JitHashTable<GenTree*, JitPtrKeyFuncs<GenTree>, int> NodeToIntMap;
NodeToIntMap* reachable = FindReachableNodesInNodeTestData();
LabelToVNMap* labelToVN = new (getAllocatorDebugOnly()) LabelToVNMap(getAllocatorDebugOnly());
@@ -7986,7 +7986,7 @@ void Compiler::JitTestCheckVN()
for (NodeToTestDataMap::KeyIterator ki = testData->Begin(); !ki.Equal(testData->End()); ++ki)
{
TestLabelAndNum tlAndN;
- GenTreePtr node = ki.Get();
+ GenTree* node = ki.Get();
ValueNum nodeVN = node->GetVN(VNK_Liberal);
bool b = testData->Lookup(node, &tlAndN);
diff --git a/src/jit/valuenum.h b/src/jit/valuenum.h
index b54e8b51b6..489f5191e5 100644
--- a/src/jit/valuenum.h
+++ b/src/jit/valuenum.h
@@ -520,10 +520,10 @@ public:
// If "opA" has a PtrToLoc, PtrToArrElem, or PtrToStatic application as its value numbers, and "opB" is an integer
// with a "fieldSeq", returns the VN for the pointer form extended with the field sequence; or else NoVN.
- ValueNum ExtendPtrVN(GenTreePtr opA, GenTreePtr opB);
+ ValueNum ExtendPtrVN(GenTree* opA, GenTree* opB);
// If "opA" has a PtrToLoc, PtrToArrElem, or PtrToStatic application as its value numbers, returns the VN for the
// pointer form extended with "fieldSeq"; or else NoVN.
- ValueNum ExtendPtrVN(GenTreePtr opA, FieldSeqNode* fieldSeq);
+ ValueNum ExtendPtrVN(GenTree* opA, FieldSeqNode* fieldSeq);
// Queries on value numbers.
// All queries taking value numbers require that those value numbers are valid, that is, that