@@ -2526,7 +2526,6 @@ bool Compiler::fgOptimizeBranch(BasicBlock* bJump)
2526
2526
return false ;
2527
2527
}
2528
2528
2529
- // We might be able to compact blocks that always jump to the next block.
2530
2529
if (bJump->JumpsToNext ())
2531
2530
{
2532
2531
return false ;
@@ -2537,7 +2536,7 @@ bool Compiler::fgOptimizeBranch(BasicBlock* bJump)
2537
2536
return false ;
2538
2537
}
2539
2538
2540
- BasicBlock* bDest = bJump->GetTarget ();
2539
+ BasicBlock* const bDest = bJump->GetTarget ();
2541
2540
2542
2541
if (!bDest->KindIs (BBJ_COND))
2543
2542
{
@@ -2556,12 +2555,11 @@ bool Compiler::fgOptimizeBranch(BasicBlock* bJump)
2556
2555
return false ;
2557
2556
}
2558
2557
2559
- // do not jump into another try region
2560
- BasicBlock* bDestNormalTarget = bDest->GetFalseTarget ();
2561
- if (bDestNormalTarget->hasTryIndex () && !BasicBlock::sameTryRegion (bJump, bDestNormalTarget))
2562
- {
2563
- return false ;
2564
- }
2558
+ // We should have already compacted 'bDest' into 'bJump', if it is possible.
2559
+ assert (!fgCanCompactBlock (bJump));
2560
+
2561
+ BasicBlock* const trueTarget = bDest->GetTrueTarget ();
2562
+ BasicBlock* const falseTarget = bDest->GetFalseTarget ();
2565
2563
2566
2564
// This function is only called in the frontend.
2567
2565
assert (!bJump->IsLIR ());
@@ -2587,10 +2585,10 @@ bool Compiler::fgOptimizeBranch(BasicBlock* bJump)
2587
2585
bool haveProfileWeights = false ;
2588
2586
weight_t weightJump = bJump->bbWeight ;
2589
2587
weight_t weightDest = bDest->bbWeight ;
2590
- weight_t weightNext = bJump-> Next () ->bbWeight ;
2588
+ weight_t weightNext = trueTarget ->bbWeight ;
2591
2589
bool rareJump = bJump->isRunRarely ();
2592
2590
bool rareDest = bDest->isRunRarely ();
2593
- bool rareNext = bJump-> Next () ->isRunRarely ();
2591
+ bool rareNext = trueTarget ->isRunRarely ();
2594
2592
2595
2593
// If we have profile data then we calculate the number of time
2596
2594
// the loop will iterate into loopIterations
@@ -2601,7 +2599,7 @@ bool Compiler::fgOptimizeBranch(BasicBlock* bJump)
2601
2599
//
2602
2600
if (bJump->HasAnyFlag (BBF_PROF_WEIGHT | BBF_RUN_RARELY) &&
2603
2601
bDest->HasAnyFlag (BBF_PROF_WEIGHT | BBF_RUN_RARELY) &&
2604
- bJump-> Next () ->HasAnyFlag (BBF_PROF_WEIGHT | BBF_RUN_RARELY))
2602
+ trueTarget ->HasAnyFlag (BBF_PROF_WEIGHT | BBF_RUN_RARELY))
2605
2603
{
2606
2604
haveProfileWeights = true ;
2607
2605
@@ -2715,7 +2713,7 @@ bool Compiler::fgOptimizeBranch(BasicBlock* bJump)
2715
2713
noway_assert (condTree->gtOper == GT_JTRUE);
2716
2714
2717
2715
// Set condTree to the operand to the GT_JTRUE.
2718
- condTree = condTree->AsOp ()-> gtOp1 ;
2716
+ condTree = condTree->gtGetOp1 () ;
2719
2717
2720
2718
// This condTree has to be a RelOp comparison.
2721
2719
if (condTree->OperIsCompare () == false )
@@ -2767,12 +2765,11 @@ bool Compiler::fgOptimizeBranch(BasicBlock* bJump)
2767
2765
// because the comparison in 'bJump' is flipped.
2768
2766
// Similarly, we will derive the true edge's likelihood from 'destFalseEdge'.
2769
2767
//
2770
- BasicBlock* const bDestFalseTarget = bJump->Next ();
2771
- FlowEdge* const falseEdge = fgAddRefPred (bDestFalseTarget, bJump, destTrueEdge);
2768
+ FlowEdge* const falseEdge = fgAddRefPred (trueTarget, bJump, destTrueEdge);
2772
2769
2773
2770
// bJump now jumps to bDest's normal jump target
2774
2771
//
2775
- fgRedirectTargetEdge (bJump, bDestNormalTarget );
2772
+ fgRedirectTargetEdge (bJump, falseTarget );
2776
2773
bJump->GetTargetEdge ()->setLikelihood (destFalseEdge->getLikelihood ());
2777
2774
2778
2775
bJump->SetCond (bJump->GetTargetEdge (), falseEdge);
@@ -2787,10 +2784,10 @@ bool Compiler::fgOptimizeBranch(BasicBlock* bJump)
2787
2784
2788
2785
// Propagate bJump's weight into its new successors
2789
2786
//
2790
- bDestNormalTarget ->setBBProfileWeight (bDestNormalTarget ->computeIncomingWeight ());
2791
- bDestFalseTarget ->setBBProfileWeight (bDestFalseTarget ->computeIncomingWeight ());
2787
+ trueTarget ->setBBProfileWeight (trueTarget ->computeIncomingWeight ());
2788
+ falseTarget ->setBBProfileWeight (falseTarget ->computeIncomingWeight ());
2792
2789
2793
- if ((bDestNormalTarget ->NumSucc () > 0 ) || (bDestFalseTarget ->NumSucc () > 0 ))
2790
+ if ((trueTarget ->NumSucc () > 0 ) || (falseTarget ->NumSucc () > 0 ))
2794
2791
{
2795
2792
JITDUMP (" fgOptimizeBranch: New flow out of " FMT_BB " needs to be propagated. Data %s inconsistent.\n " ,
2796
2793
bJump->bbNum , fgPgoConsistent ? " is now" : " was already" );
@@ -2815,6 +2812,14 @@ bool Compiler::fgOptimizeBranch(BasicBlock* bJump)
2815
2812
}
2816
2813
#endif // DEBUG
2817
2814
2815
+ // Removing flow from 'bJump' into 'bDest' may have made it possible to compact the latter.
2816
+ BasicBlock* const uniquePred = bDest->GetUniquePred (this );
2817
+ if ((uniquePred != nullptr ) && fgCanCompactBlock (uniquePred))
2818
+ {
2819
+ JITDUMP (FMT_BB " can now be compacted into its remaining predecessor.\n " , bDest->bbNum );
2820
+ fgCompactBlock (uniquePred);
2821
+ }
2822
+
2818
2823
return true ;
2819
2824
}
2820
2825
0 commit comments