// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /*XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX XX XX BasicBlock XX XX XX XX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX */ #include "jitpch.h" #ifdef _MSC_VER #pragma hdrstop #endif #if MEASURE_BLOCK_SIZE /* static */ size_t BasicBlock::s_Size; /* static */ size_t BasicBlock::s_Count; #endif // MEASURE_BLOCK_SIZE #ifdef DEBUG // The max # of tree nodes in any BB /* static */ unsigned BasicBlock::s_nMaxTrees; #endif // DEBUG #ifdef DEBUG flowList* ShuffleHelper(unsigned hash, flowList* res) { flowList* head = res; for (flowList *prev = nullptr; res != nullptr; prev = res, res = res->flNext) { unsigned blkHash = (hash ^ (res->flBlock->bbNum << 16) ^ res->flBlock->bbNum); if (((blkHash % 1879) & 1) && prev != nullptr) { // Swap res with head. prev->flNext = head; jitstd::swap(head->flNext, res->flNext); jitstd::swap(head, res); } } return head; } unsigned SsaStressHashHelper() { // hash = 0: turned off, hash = 1: use method hash, hash = *: use custom hash. unsigned hash = JitConfig.JitSsaStress(); if (hash == 0) { return hash; } if (hash == 1) { return JitTls::GetCompiler()->info.compMethodHash(); } return ((hash >> 16) == 0) ? ((hash << 16) | hash) : hash; } #endif EHSuccessorIter::EHSuccessorIter(Compiler* comp, BasicBlock* block) : m_comp(comp) , m_block(block) , m_curRegSucc(nullptr) , m_curTry(comp->ehGetBlockExnFlowDsc(block)) , m_remainingRegSuccs(block->NumSucc(comp)) { // If "block" is a "leave helper" block (the empty BBJ_ALWAYS block that pairs with a // preceding BBJ_CALLFINALLY block to implement a "leave" IL instruction), then no exceptions // can occur within it, so clear m_curTry if it's non-null. if (m_curTry != nullptr) { BasicBlock* beforeBlock = block->bbPrev; if (beforeBlock != nullptr && beforeBlock->isBBCallAlwaysPair()) { m_curTry = nullptr; } } if (m_curTry == nullptr && m_remainingRegSuccs > 0) { // Examine the successors to see if any are the start of try blocks. FindNextRegSuccTry(); } } void EHSuccessorIter::FindNextRegSuccTry() { assert(m_curTry == nullptr); // Must now consider the next regular successor, if any. while (m_remainingRegSuccs > 0) { m_remainingRegSuccs--; m_curRegSucc = m_block->GetSucc(m_remainingRegSuccs, m_comp); if (m_comp->bbIsTryBeg(m_curRegSucc)) { assert(m_curRegSucc->hasTryIndex()); // Since it is a try begin. unsigned newTryIndex = m_curRegSucc->getTryIndex(); // If the try region started by "m_curRegSucc" (represented by newTryIndex) contains m_block, // we've already yielded its handler, as one of the EH handler successors of m_block itself. if (m_comp->bbInExnFlowRegions(newTryIndex, m_block)) { continue; } // Otherwise, consider this try. m_curTry = m_comp->ehGetDsc(newTryIndex); break; } } } void EHSuccessorIter::operator++(void) { assert(m_curTry != nullptr); if (m_curTry->ebdEnclosingTryIndex != EHblkDsc::NO_ENCLOSING_INDEX) { m_curTry = m_comp->ehGetDsc(m_curTry->ebdEnclosingTryIndex); // If we've gone over into considering try's containing successors, // then the enclosing try must have the successor as its first block. if (m_curRegSucc == nullptr || m_curTry->ebdTryBeg == m_curRegSucc) { return; } // Otherwise, give up, try the next regular successor. m_curTry = nullptr; } else { m_curTry = nullptr; } // We've exhausted all try blocks. // See if there are any remaining regular successors that start try blocks. FindNextRegSuccTry(); } BasicBlock* EHSuccessorIter::operator*() { assert(m_curTry != nullptr); return m_curTry->ExFlowBlock(); } flowList* Compiler::BlockPredsWithEH(BasicBlock* blk) { BlockToFlowListMap* ehPreds = GetBlockToEHPreds(); flowList* res; if (ehPreds->Lookup(blk, &res)) { return res; } res = blk->bbPreds; unsigned tryIndex; if (bbIsExFlowBlock(blk, &tryIndex)) { // Find the first block of the try. EHblkDsc* ehblk = ehGetDsc(tryIndex); BasicBlock* tryStart = ehblk->ebdTryBeg; for (flowList* tryStartPreds = tryStart->bbPreds; tryStartPreds != nullptr; tryStartPreds = tryStartPreds->flNext) { res = new (this, CMK_FlowList) flowList(tryStartPreds->flBlock, res); #if MEASURE_BLOCK_SIZE genFlowNodeCnt += 1; genFlowNodeSize += sizeof(flowList); #endif // MEASURE_BLOCK_SIZE } // Now add all blocks handled by this handler (except for second blocks of BBJ_CALLFINALLY/BBJ_ALWAYS pairs; // these cannot cause transfer to the handler...) BasicBlock* prevBB = nullptr; // TODO-Throughput: It would be nice if we could iterate just over the blocks in the try, via // something like: // for (BasicBlock* bb = ehblk->ebdTryBeg; bb != ehblk->ebdTryLast->bbNext; bb = bb->bbNext) // (plus adding in any filter blocks outside the try whose exceptions are handled here). // That doesn't work, however: funclets have caused us to sometimes split the body of a try into // more than one sequence of contiguous blocks. We need to find a better way to do this. for (BasicBlock *bb = fgFirstBB; bb != nullptr; prevBB = bb, bb = bb->bbNext) { if (bbInExnFlowRegions(tryIndex, bb) && (prevBB == nullptr || !prevBB->isBBCallAlwaysPair())) { res = new (this, CMK_FlowList) flowList(bb, res); #if MEASURE_BLOCK_SIZE genFlowNodeCnt += 1; genFlowNodeSize += sizeof(flowList); #endif // MEASURE_BLOCK_SIZE } } #ifdef DEBUG unsigned hash = SsaStressHashHelper(); if (hash != 0) { res = ShuffleHelper(hash, res); } #endif // DEBUG ehPreds->Set(blk, res); } return res; } #ifdef DEBUG //------------------------------------------------------------------------ // dspBlockILRange(): Display the block's IL range as [XXX...YYY), where XXX and YYY might be "???" for BAD_IL_OFFSET. // void BasicBlock::dspBlockILRange() { if (bbCodeOffs != BAD_IL_OFFSET) { printf("[%03X..", bbCodeOffs); } else { printf("[???" ".."); } if (bbCodeOffsEnd != BAD_IL_OFFSET) { // brace-matching editor workaround for following line: ( printf("%03X)", bbCodeOffsEnd); } else { // brace-matching editor workaround for following line: ( printf("???" ")"); } } //------------------------------------------------------------------------ // dspFlags: Print out the block's flags // void BasicBlock::dspFlags() { if (bbFlags & BBF_VISITED) { printf("v "); } if (bbFlags & BBF_MARKED) { printf("m "); } if (bbFlags & BBF_CHANGED) { printf("! "); } if (bbFlags & BBF_REMOVED) { printf("del "); } if (bbFlags & BBF_DONT_REMOVE) { printf("keep "); } if (bbFlags & BBF_IMPORTED) { printf("i "); } if (bbFlags & BBF_INTERNAL) { printf("internal "); } if (bbFlags & BBF_FAILED_VERIFICATION) { printf("failV "); } if (bbFlags & BBF_TRY_BEG) { printf("try "); } if (bbFlags & BBF_NEEDS_GCPOLL) { printf("poll "); } if (bbFlags & BBF_RUN_RARELY) { printf("rare "); } if (bbFlags & BBF_LOOP_HEAD) { printf("Loop "); } if (bbFlags & BBF_LOOP_CALL0) { printf("Loop0 "); } if (bbFlags & BBF_LOOP_CALL1) { printf("Loop1 "); } if (bbFlags & BBF_HAS_LABEL) { printf("label "); } if (bbFlags & BBF_JMP_TARGET) { printf("target "); } if (bbFlags & BBF_HAS_JMP) { printf("jmp "); } if (bbFlags & BBF_GC_SAFE_POINT) { printf("gcsafe "); } if (bbFlags & BBF_FUNCLET_BEG) { printf("flet "); } if (bbFlags & BBF_HAS_IDX_LEN) { printf("idxlen "); } if (bbFlags & BBF_HAS_NEWARRAY) { printf("new[] "); } if (bbFlags & BBF_HAS_NEWOBJ) { printf("newobj "); } #if FEATURE_EH_FUNCLETS && defined(_TARGET_ARM_) if (bbFlags & BBF_FINALLY_TARGET) { printf("ftarget "); } #endif // FEATURE_EH_FUNCLETS && defined(_TARGET_ARM_) if (bbFlags & BBF_BACKWARD_JUMP) { printf("bwd "); } if (bbFlags & BBF_RETLESS_CALL) { printf("retless "); } if (bbFlags & BBF_LOOP_PREHEADER) { printf("LoopPH "); } if (bbFlags & BBF_COLD) { printf("cold "); } if (bbFlags & BBF_PROF_WEIGHT) { printf("IBC "); } #ifdef LEGACY_BACKEND if (bbFlags & BBF_FORWARD_SWITCH) { printf("fswitch "); } #else // !LEGACY_BACKEND if (bbFlags & BBF_IS_LIR) { printf("LIR "); } #endif // LEGACY_BACKEND if (bbFlags & BBF_KEEP_BBJ_ALWAYS) { printf("KEEP "); } if (bbFlags & BBF_CLONED_FINALLY_BEGIN) { printf("cfb "); } if (bbFlags & BBF_CLONED_FINALLY_END) { printf("cfe "); } } /***************************************************************************** * * Display the bbPreds basic block list (the block predecessors). * Returns the number of characters printed. */ unsigned BasicBlock::dspPreds() { unsigned count = 0; for (flowList* pred = bbPreds; pred != nullptr; pred = pred->flNext) { if (count != 0) { printf(","); count += 1; } printf("BB%02u", pred->flBlock->bbNum); count += 4; // Account for %02u only handling 2 digits, but we can display more than that. unsigned digits = CountDigits(pred->flBlock->bbNum); if (digits > 2) { count += digits - 2; } // Does this predecessor have an interesting dup count? If so, display it. if (pred->flDupCount > 1) { printf("(%u)", pred->flDupCount); count += 2 + CountDigits(pred->flDupCount); } } return count; } /***************************************************************************** * * Display the bbCheapPreds basic block list (the block predecessors). * Returns the number of characters printed. */ unsigned BasicBlock::dspCheapPreds() { unsigned count = 0; for (BasicBlockList* pred = bbCheapPreds; pred != nullptr; pred = pred->next) { if (count != 0) { printf(","); count += 1; } printf("BB%02u", pred->block->bbNum); count += 4; // Account for %02u only handling 2 digits, but we can display more than that. unsigned digits = CountDigits(pred->block->bbNum); if (digits > 2) { count += digits - 2; } } return count; } /***************************************************************************** * * Display the basic block successors. * Returns the count of successors. */ unsigned BasicBlock::dspSuccs(Compiler* compiler) { unsigned numSuccs = NumSucc(compiler); unsigned count = 0; for (unsigned i = 0; i < numSuccs; i++) { printf("%s", (count == 0) ? "" : ","); printf("BB%02u", GetSucc(i, compiler)->bbNum); count++; } return count; } // Display a compact representation of the bbJumpKind, that is, where this block branches. // This is similar to code in Compiler::fgTableDispBasicBlock(), but doesn't have that code's requirements to align // things strictly. void BasicBlock::dspJumpKind() { switch (bbJumpKind) { case BBJ_EHFINALLYRET: printf(" (finret)"); break; case BBJ_EHFILTERRET: printf(" (fltret)"); break; case BBJ_EHCATCHRET: printf(" -> BB%02u (cret)", bbJumpDest->bbNum); break; case BBJ_THROW: printf(" (throw)"); break; case BBJ_RETURN: printf(" (return)"); break; case BBJ_NONE: // For fall-through blocks, print nothing. break; case BBJ_ALWAYS: if (bbFlags & BBF_KEEP_BBJ_ALWAYS) { printf(" -> BB%02u (ALWAYS)", bbJumpDest->bbNum); } else { printf(" -> BB%02u (always)", bbJumpDest->bbNum); } break; case BBJ_LEAVE: printf(" -> BB%02u (leave)", bbJumpDest->bbNum); break; case BBJ_CALLFINALLY: printf(" -> BB%02u (callf)", bbJumpDest->bbNum); break; case BBJ_COND: printf(" -> BB%02u (cond)", bbJumpDest->bbNum); break; case BBJ_SWITCH: printf(" ->"); unsigned jumpCnt; jumpCnt = bbJumpSwt->bbsCount; BasicBlock** jumpTab; jumpTab = bbJumpSwt->bbsDstTab; do { printf("%cBB%02u", (jumpTab == bbJumpSwt->bbsDstTab) ? ' ' : ',', (*jumpTab)->bbNum); } while (++jumpTab, --jumpCnt); printf(" (switch)"); break; default: unreached(); break; } } void BasicBlock::dspBlockHeader(Compiler* compiler, bool showKind /*= true*/, bool showFlags /*= false*/, bool showPreds /*= true*/) { printf("BB%02u ", bbNum); dspBlockILRange(); if (showKind) { dspJumpKind(); } if (showPreds) { printf(", preds={"); if (compiler->fgCheapPredsValid) { dspCheapPreds(); } else { dspPreds(); } printf("} succs={"); dspSuccs(compiler); printf("}"); } if (showFlags) { const unsigned lowFlags = (unsigned)bbFlags; const unsigned highFlags = (unsigned)(bbFlags >> 32); printf(" flags=0x%08x.%08x: ", highFlags, lowFlags); dspFlags(); } printf("\n"); } #endif // DEBUG // Allocation function for MemoryPhiArg. void* BasicBlock::MemoryPhiArg::operator new(size_t sz, Compiler* comp) { return comp->compGetMem(sz, CMK_MemoryPhiArg); } //------------------------------------------------------------------------ // CloneBlockState: Try to populate `to` block with a copy of `from` block's statements, replacing // uses of local `varNum` with IntCns `varVal`. // // Arguments: // compiler - Jit compiler instance // to - New/empty block to copy statements into // from - Block to copy statements from // varNum - lclVar uses with lclNum `varNum` will be replaced; can be ~0 to indicate no replacement. // varVal - If replacing uses of `varNum`, replace them with int constants with value `varVal`. // // Return Value: // Cloning may fail because this routine uses `gtCloneExpr` for cloning and it can't handle all // IR nodes. If cloning of any statement fails, `false` will be returned and block `to` may be // partially populated. If cloning of all statements succeeds, `true` will be returned and // block `to` will be fully populated. bool BasicBlock::CloneBlockState( Compiler* compiler, BasicBlock* to, const BasicBlock* from, unsigned varNum, int varVal) { assert(to->bbTreeList == nullptr); to->bbFlags = from->bbFlags; to->bbWeight = from->bbWeight; BlockSetOps::AssignAllowUninitRhs(compiler, to->bbReach, from->bbReach); to->copyEHRegion(from); to->bbCatchTyp = from->bbCatchTyp; to->bbRefs = from->bbRefs; to->bbStkTempsIn = from->bbStkTempsIn; to->bbStkTempsOut = from->bbStkTempsOut; to->bbStkDepth = from->bbStkDepth; to->bbCodeOffs = from->bbCodeOffs; to->bbCodeOffsEnd = from->bbCodeOffsEnd; VarSetOps::AssignAllowUninitRhs(compiler, to->bbScope, from->bbScope); #if FEATURE_STACK_FP_X87 to->bbFPStateX87 = from->bbFPStateX87; #endif // FEATURE_STACK_FP_X87 to->bbNatLoopNum = from->bbNatLoopNum; #ifdef DEBUG to->bbLoopNum = from->bbLoopNum; to->bbTgtStkDepth = from->bbTgtStkDepth; #endif // DEBUG for (GenTreePtr fromStmt = from->bbTreeList; fromStmt != nullptr; fromStmt = fromStmt->gtNext) { auto newExpr = compiler->gtCloneExpr(fromStmt->gtStmt.gtStmtExpr, 0, varNum, varVal); if (!newExpr) { // gtCloneExpr doesn't handle all opcodes, so may fail to clone a statement. // When that happens, it returns nullptr; abandon the rest of this block and // return `false` to the caller to indicate that cloning was unsuccessful. return false; } compiler->fgInsertStmtAtEnd(to, compiler->fgNewStmtFromTree(newExpr)); } return true; } // LIR helpers void BasicBlock::MakeLIR(GenTree* firstNode, GenTree* lastNode) { #ifdef LEGACY_BACKEND unreached(); #else // !LEGACY_BACKEND assert(!IsLIR()); assert((firstNode == nullptr) == (lastNode == nullptr)); assert((firstNode == lastNode) || firstNode->Precedes(lastNode)); m_firstNode = firstNode; m_lastNode = lastNode; bbFlags |= BBF_IS_LIR; #endif // LEGACY_BACKEND } bool BasicBlock::IsLIR() { #ifdef LEGACY_BACKEND return false; #else // !LEGACY_BACKEND const bool isLIR = (bbFlags & BBF_IS_LIR) != 0; assert((bbTreeList == nullptr) || ((isLIR) == !bbTreeList->IsStatement())); return isLIR; #endif // LEGACY_BACKEND } //------------------------------------------------------------------------ // firstStmt: Returns the first statement in the block // // Arguments: // None. // // Return Value: // The first statement in the block's bbTreeList. // GenTreeStmt* BasicBlock::firstStmt() const { if (bbTreeList == nullptr) { return nullptr; } return bbTreeList->AsStmt(); } //------------------------------------------------------------------------ // lastStmt: Returns the last statement in the block // // Arguments: // None. // // Return Value: // The last statement in the block's bbTreeList. // GenTreeStmt* BasicBlock::lastStmt() const { if (bbTreeList == nullptr) { return nullptr; } GenTree* result = bbTreeList->gtPrev; assert(result && result->gtNext == nullptr); return result->AsStmt(); } //------------------------------------------------------------------------ // BasicBlock::firstNode: Returns the first node in the block. // GenTree* BasicBlock::firstNode() { return IsLIR() ? bbTreeList : Compiler::fgGetFirstNode(firstStmt()->gtStmtExpr); } //------------------------------------------------------------------------ // BasicBlock::lastNode: Returns the last node in the block. // GenTree* BasicBlock::lastNode() { return IsLIR() ? m_lastNode : lastStmt()->gtStmtExpr; } //------------------------------------------------------------------------ // GetUniquePred: Returns the unique predecessor of a block, if one exists. // The predecessor lists must be accurate. // // Arguments: // None. // // Return Value: // The unique predecessor of a block, or nullptr if there is no unique predecessor. // // Notes: // If the first block has a predecessor (which it may have, if it is the target of // a backedge), we never want to consider it "unique" because the prolog is an // implicit predecessor. BasicBlock* BasicBlock::GetUniquePred(Compiler* compiler) { if ((bbPreds == nullptr) || (bbPreds->flNext != nullptr) || (this == compiler->fgFirstBB)) { return nullptr; } else { return bbPreds->flBlock; } } //------------------------------------------------------------------------ // GetUniqueSucc: Returns the unique successor of a block, if one exists. // Only considers BBJ_ALWAYS and BBJ_NONE block types. // // Arguments: // None. // // Return Value: // The unique successor of a block, or nullptr if there is no unique successor. BasicBlock* BasicBlock::GetUniqueSucc() { if (bbJumpKind == BBJ_ALWAYS) { return bbJumpDest; } else if (bbJumpKind == BBJ_NONE) { return bbNext; } else { return nullptr; } } // Static vars. BasicBlock::MemoryPhiArg* BasicBlock::EmptyMemoryPhiDef = (BasicBlock::MemoryPhiArg*)0x1; unsigned PtrKeyFuncs::GetHashCode(const BasicBlock* ptr) { #ifdef DEBUG unsigned hash = SsaStressHashHelper(); if (hash != 0) { return (hash ^ (ptr->bbNum << 16) ^ ptr->bbNum); } #endif return ptr->bbNum; } bool BasicBlock::isEmpty() { if (!IsLIR()) { return (this->FirstNonPhiDef() == nullptr); } for (GenTree* node : LIR::AsRange(this).NonPhiNodes()) { if (node->OperGet() != GT_IL_OFFSET) { return false; } } return true; } GenTreeStmt* BasicBlock::FirstNonPhiDef() { GenTreePtr stmt = bbTreeList; if (stmt == nullptr) { return nullptr; } GenTreePtr tree = stmt->gtStmt.gtStmtExpr; while ((tree->OperGet() == GT_ASG && tree->gtOp.gtOp2->OperGet() == GT_PHI) || (tree->OperGet() == GT_STORE_LCL_VAR && tree->gtOp.gtOp1->OperGet() == GT_PHI)) { stmt = stmt->gtNext; if (stmt == nullptr) { return nullptr; } tree = stmt->gtStmt.gtStmtExpr; } return stmt->AsStmt(); } GenTreePtr BasicBlock::FirstNonPhiDefOrCatchArgAsg() { GenTreePtr stmt = FirstNonPhiDef(); if (stmt == nullptr) { return nullptr; } GenTreePtr tree = stmt->gtStmt.gtStmtExpr; if ((tree->OperGet() == GT_ASG && tree->gtOp.gtOp2->OperGet() == GT_CATCH_ARG) || (tree->OperGet() == GT_STORE_LCL_VAR && tree->gtOp.gtOp1->OperGet() == GT_CATCH_ARG)) { stmt = stmt->gtNext; } return stmt; } /***************************************************************************** * * Mark a block as rarely run, we also don't want to have a loop in a * rarely run block, and we set it's weight to zero. */ void BasicBlock::bbSetRunRarely() { setBBWeight(BB_ZERO_WEIGHT); if (bbWeight == BB_ZERO_WEIGHT) { bbFlags |= BBF_RUN_RARELY; // This block is never/rarely run } } /***************************************************************************** * * Can a BasicBlock be inserted after this without altering the flowgraph */ bool BasicBlock::bbFallsThrough() { switch (bbJumpKind) { case BBJ_THROW: case BBJ_EHFINALLYRET: case BBJ_EHFILTERRET: case BBJ_EHCATCHRET: case BBJ_RETURN: case BBJ_ALWAYS: case BBJ_LEAVE: case BBJ_SWITCH: return false; case BBJ_NONE: case BBJ_COND: return true; case BBJ_CALLFINALLY: return ((bbFlags & BBF_RETLESS_CALL) == 0); default: assert(!"Unknown bbJumpKind in bbFallsThrough()"); return true; } } //------------------------------------------------------------------------ // NumSucc: Returns the count of block successors. See the declaration comment for details. // // Arguments: // None. // // Return Value: // Count of block successors. // unsigned BasicBlock::NumSucc() { switch (bbJumpKind) { case BBJ_THROW: case BBJ_RETURN: case BBJ_EHFINALLYRET: case BBJ_EHFILTERRET: return 0; case BBJ_CALLFINALLY: case BBJ_ALWAYS: case BBJ_EHCATCHRET: case BBJ_LEAVE: case BBJ_NONE: return 1; case BBJ_COND: if (bbJumpDest == bbNext) { return 1; } else { return 2; } case BBJ_SWITCH: return bbJumpSwt->bbsCount; default: unreached(); } } //------------------------------------------------------------------------ // GetSucc: Returns the requested block successor. See the declaration comment for details. // // Arguments: // i - index of successor to return. 0 <= i <= NumSucc(). // // Return Value: // Requested successor block // BasicBlock* BasicBlock::GetSucc(unsigned i) { assert(i < NumSucc()); // Index bounds check. switch (bbJumpKind) { case BBJ_CALLFINALLY: case BBJ_ALWAYS: case BBJ_EHCATCHRET: case BBJ_LEAVE: return bbJumpDest; case BBJ_NONE: return bbNext; case BBJ_COND: if (i == 0) { return bbNext; } else { assert(i == 1); return bbJumpDest; } case BBJ_SWITCH: return bbJumpSwt->bbsDstTab[i]; default: unreached(); } } //------------------------------------------------------------------------ // NumSucc: Returns the count of block successors. See the declaration comment for details. // // Arguments: // comp - Compiler instance // // Return Value: // Count of block successors. // unsigned BasicBlock::NumSucc(Compiler* comp) { assert(comp != nullptr); switch (bbJumpKind) { case BBJ_THROW: case BBJ_RETURN: return 0; case BBJ_EHFINALLYRET: { // The first block of the handler is labelled with the catch type. BasicBlock* hndBeg = comp->fgFirstBlockOfHandler(this); if (hndBeg->bbCatchTyp == BBCT_FINALLY) { return comp->fgNSuccsOfFinallyRet(this); } else { assert(hndBeg->bbCatchTyp == BBCT_FAULT); // We can only BBJ_EHFINALLYRET from FINALLY and FAULT. // A FAULT block has no successors. return 0; } } case BBJ_CALLFINALLY: case BBJ_ALWAYS: case BBJ_EHCATCHRET: case BBJ_EHFILTERRET: case BBJ_LEAVE: case BBJ_NONE: return 1; case BBJ_COND: if (bbJumpDest == bbNext) { return 1; } else { return 2; } case BBJ_SWITCH: { Compiler::SwitchUniqueSuccSet sd = comp->GetDescriptorForSwitch(this); return sd.numDistinctSuccs; } default: unreached(); } } //------------------------------------------------------------------------ // GetSucc: Returns the requested block successor. See the declaration comment for details. // // Arguments: // i - index of successor to return. 0 <= i <= NumSucc(comp). // comp - Compiler instance // // Return Value: // Requested successor block // BasicBlock* BasicBlock::GetSucc(unsigned i, Compiler* comp) { assert(comp != nullptr); assert(i < NumSucc(comp)); // Index bounds check. switch (bbJumpKind) { case BBJ_EHFILTERRET: { // Handler is the (sole) normal successor of the filter. assert(comp->fgFirstBlockOfHandler(this) == bbJumpDest); return bbJumpDest; } case BBJ_EHFINALLYRET: // Note: the following call is expensive. return comp->fgSuccOfFinallyRet(this, i); case BBJ_CALLFINALLY: case BBJ_ALWAYS: case BBJ_EHCATCHRET: case BBJ_LEAVE: return bbJumpDest; case BBJ_NONE: return bbNext; case BBJ_COND: if (i == 0) { return bbNext; } else { assert(i == 1); return bbJumpDest; } case BBJ_SWITCH: { Compiler::SwitchUniqueSuccSet sd = comp->GetDescriptorForSwitch(this); assert(i < sd.numDistinctSuccs); // Range check. return sd.nonDuplicates[i]; } default: unreached(); } } void BasicBlock::InitVarSets(Compiler* comp) { VarSetOps::AssignNoCopy(comp, bbVarUse, VarSetOps::MakeEmpty(comp)); VarSetOps::AssignNoCopy(comp, bbVarDef, VarSetOps::MakeEmpty(comp)); VarSetOps::AssignNoCopy(comp, bbLiveIn, VarSetOps::MakeEmpty(comp)); VarSetOps::AssignNoCopy(comp, bbLiveOut, VarSetOps::MakeEmpty(comp)); VarSetOps::AssignNoCopy(comp, bbScope, VarSetOps::MakeEmpty(comp)); bbMemoryUse = emptyMemoryKindSet; bbMemoryDef = emptyMemoryKindSet; bbMemoryLiveIn = emptyMemoryKindSet; bbMemoryLiveOut = emptyMemoryKindSet; } // Returns true if the basic block ends with GT_JMP bool BasicBlock::endsWithJmpMethod(Compiler* comp) { if (comp->compJmpOpUsed && (bbJumpKind == BBJ_RETURN) && (bbFlags & BBF_HAS_JMP)) { GenTree* lastNode = this->lastNode(); assert(lastNode != nullptr); return lastNode->OperGet() == GT_JMP; } return false; } // Returns true if the basic block ends with either // i) GT_JMP or // ii) tail call (implicit or explicit) // // Params: // comp - Compiler instance // fastTailCallsOnly - Only consider fast tail calls excluding tail calls via helper. // bool BasicBlock::endsWithTailCallOrJmp(Compiler* comp, bool fastTailCallsOnly /*=false*/) { GenTreePtr tailCall = nullptr; bool tailCallsConvertibleToLoopOnly = false; return endsWithJmpMethod(comp) || endsWithTailCall(comp, fastTailCallsOnly, tailCallsConvertibleToLoopOnly, &tailCall); } //------------------------------------------------------------------------------ // endsWithTailCall : Check if the block ends with a tail call. // // Arguments: // comp - compiler instance // fastTailCallsOnly - check for fast tail calls only // tailCallsConvertibleToLoopOnly - check for tail calls convertible to loop only // tailCall - a pointer to a tree that will be set to the call tree if the block // ends with a tail call and will be set to nullptr otherwise. // // Return Value: // true if the block ends with a tail call; false otherwise. // // Notes: // At most one of fastTailCallsOnly and tailCallsConvertibleToLoopOnly flags can be true. // bool BasicBlock::endsWithTailCall(Compiler* comp, bool fastTailCallsOnly, bool tailCallsConvertibleToLoopOnly, GenTree** tailCall) { assert(!fastTailCallsOnly || !tailCallsConvertibleToLoopOnly); *tailCall = nullptr; bool result = false; // Is this a tail call? // The reason for keeping this under RyuJIT is so as not to impact existing Jit32 x86 and arm // targets. if (comp->compTailCallUsed) { if (fastTailCallsOnly || tailCallsConvertibleToLoopOnly) { // Only fast tail calls or only tail calls convertible to loops result = (bbFlags & BBF_HAS_JMP) && (bbJumpKind == BBJ_RETURN); } else { // Fast tail calls, tail calls convertible to loops, and tails calls dispatched via helper result = (bbJumpKind == BBJ_THROW) || ((bbFlags & BBF_HAS_JMP) && (bbJumpKind == BBJ_RETURN)); } if (result) { GenTree* lastNode = this->lastNode(); if (lastNode->OperGet() == GT_CALL) { GenTreeCall* call = lastNode->AsCall(); if (tailCallsConvertibleToLoopOnly) { result = call->IsTailCallConvertibleToLoop(); } else if (fastTailCallsOnly) { result = call->IsFastTailCall(); } else { result = call->IsTailCall(); } if (result) { *tailCall = call; } } else { result = false; } } } return result; } //------------------------------------------------------------------------------ // endsWithTailCallConvertibleToLoop : Check if the block ends with a tail call convertible to loop. // // Arguments: // comp - compiler instance // tailCall - a pointer to a tree that will be set to the call tree if the block // ends with a tail call convertible to loop and will be set to nullptr otherwise. // // Return Value: // true if the block ends with a tail call convertible to loop. // bool BasicBlock::endsWithTailCallConvertibleToLoop(Compiler* comp, GenTree** tailCall) { bool fastTailCallsOnly = false; bool tailCallsConvertibleToLoopOnly = true; return endsWithTailCall(comp, fastTailCallsOnly, tailCallsConvertibleToLoopOnly, tailCall); } /***************************************************************************** * * Allocate a basic block but don't append it to the current BB list. */ BasicBlock* Compiler::bbNewBasicBlock(BBjumpKinds jumpKind) { BasicBlock* block; /* Allocate the block descriptor and zero it out */ assert(fgSafeBasicBlockCreation); block = new (this, CMK_BasicBlock) BasicBlock; #if MEASURE_BLOCK_SIZE BasicBlock::s_Count += 1; BasicBlock::s_Size += sizeof(*block); #endif #ifdef DEBUG // fgLookupBB() is invalid until fgInitBBLookup() is called again. fgBBs = (BasicBlock**)0xCDCD; #endif // TODO-Throughput: The following memset is pretty expensive - do something else? // Note that some fields have to be initialized to 0 (like bbFPStateX87) memset(block, 0, sizeof(*block)); // scopeInfo needs to be able to differentiate between blocks which // correspond to some instrs (and so may have some LocalVarInfo // boundaries), or have been inserted by the JIT block->bbCodeOffs = BAD_IL_OFFSET; block->bbCodeOffsEnd = BAD_IL_OFFSET; /* Give the block a number, set the ancestor count and weight */ ++fgBBcount; if (compIsForInlining()) { block->bbNum = ++impInlineInfo->InlinerCompiler->fgBBNumMax; } else { block->bbNum = ++fgBBNumMax; } #ifndef LEGACY_BACKEND if (compRationalIRForm) { block->bbFlags |= BBF_IS_LIR; } #endif // !LEGACY_BACKEND block->bbRefs = 1; block->bbWeight = BB_UNITY_WEIGHT; block->bbStkTempsIn = NO_BASE_TMP; block->bbStkTempsOut = NO_BASE_TMP; block->bbEntryState = nullptr; /* Record the jump kind in the block */ block->bbJumpKind = jumpKind; if (jumpKind == BBJ_THROW) { block->bbSetRunRarely(); } #ifdef DEBUG if (verbose) { printf("New Basic Block BB%02u [%p] created.\n", block->bbNum, dspPtr(block)); } #endif // We will give all the blocks var sets after the number of tracked variables // is determined and frozen. After that, if we dynamically create a basic block, // we will initialize its var sets. if (fgBBVarSetsInited) { VarSetOps::AssignNoCopy(this, block->bbVarUse, VarSetOps::MakeEmpty(this)); VarSetOps::AssignNoCopy(this, block->bbVarDef, VarSetOps::MakeEmpty(this)); VarSetOps::AssignNoCopy(this, block->bbLiveIn, VarSetOps::MakeEmpty(this)); VarSetOps::AssignNoCopy(this, block->bbLiveOut, VarSetOps::MakeEmpty(this)); VarSetOps::AssignNoCopy(this, block->bbScope, VarSetOps::MakeEmpty(this)); } else { VarSetOps::AssignNoCopy(this, block->bbVarUse, VarSetOps::UninitVal()); VarSetOps::AssignNoCopy(this, block->bbVarDef, VarSetOps::UninitVal()); VarSetOps::AssignNoCopy(this, block->bbLiveIn, VarSetOps::UninitVal()); VarSetOps::AssignNoCopy(this, block->bbLiveOut, VarSetOps::UninitVal()); VarSetOps::AssignNoCopy(this, block->bbScope, VarSetOps::UninitVal()); } block->bbMemoryUse = emptyMemoryKindSet; block->bbMemoryDef = emptyMemoryKindSet; block->bbMemoryLiveIn = emptyMemoryKindSet; block->bbMemoryLiveOut = emptyMemoryKindSet; for (MemoryKind memoryKind : allMemoryKinds()) { block->bbMemorySsaPhiFunc[memoryKind] = nullptr; block->bbMemorySsaNumIn[memoryKind] = 0; block->bbMemorySsaNumOut[memoryKind] = 0; } // Make sure we reserve a NOT_IN_LOOP value that isn't a legal table index. static_assert_no_msg(MAX_LOOP_NUM < BasicBlock::NOT_IN_LOOP); block->bbNatLoopNum = BasicBlock::NOT_IN_LOOP; return block; }