diff --git a/src/coreclr/jit/redundantbranchopts.cpp b/src/coreclr/jit/redundantbranchopts.cpp index fbe16664ab7609..d8a46cea75c4f0 100644 --- a/src/coreclr/jit/redundantbranchopts.cpp +++ b/src/coreclr/jit/redundantbranchopts.cpp @@ -797,6 +797,19 @@ bool Compiler::optRedundantDominatingBranch(BasicBlock* const block) return false; } + // Exclude floating point compares. + // + VNFuncApp treeApp; + if (!vnStore->GetVNFunc(treeNormVN, &treeApp) || !ValueNumStore::VNFuncIsComparison(treeApp.m_func)) + { + return false; + } + + if (varTypeIsFloating(vnStore->TypeOfVN(treeApp.m_args[0]))) + { + return false; + } + // Skip through chains of empty or side effect free blocks. // Watch for cycles. // @@ -974,8 +987,68 @@ bool Compiler::optRedundantDominatingBranch(BasicBlock* const block) rii.domCmpNormVN = blockPathVN; optRelopImpliesRelop(&rii); + bool canOptimize = rii.canInfer && rii.canInferFromTrue && !rii.reverseSense; + + genTreeOps newRelop = GT_NONE; + bool isUnsigned = false; - if (!(rii.canInfer && rii.canInferFromTrue && !rii.reverseSense)) + if (!canOptimize) + { + if (rii.canInfer) + { + JITDUMP("Can't infer along the path we care about; trying simplification instead\n"); + } + else + { + JITDUMP("Can't infer, trying simplification instead\n"); + } + + // See if we can simplify the VN for blockPathVN AND domPathVN + // + ValueNum andVN = vnStore->VNForFunc(TYP_INT, VNF_AND, blockPathVN, domPathVN); + VNFuncApp andApp; + VNFuncApp pathApp; + VNFunc newRelopFunc = VNF_NONE; + if (vnStore->IsVNRelop(andVN, &andApp) && vnStore->GetVNFunc(blockPathVN, &pathApp)) + { + if (andApp.m_args[0] == pathApp.m_args[0] && andApp.m_args[1] == pathApp.m_args[1]) + { + newRelopFunc = andApp.m_func; + } + else if (andApp.m_args[0] == pathApp.m_args[1] && andApp.m_args[1] == pathApp.m_args[0]) + { + andVN = vnStore->GetRelatedRelop(andVN, ValueNumStore::VN_RELATION_KIND::VRK_Swap); + vnStore->GetVNFunc(andVN, &andApp); + newRelopFunc = andApp.m_func; + } + + JITDUMPEXEC(vnStore->vnDump(this, blockPathVN)); + JITDUMP(" AND"); + JITDUMPEXEC(vnStore->vnDump(this, domPathVN)); + JITDUMP(" ==>"); + JITDUMPEXEC(vnStore->vnDump(this, andVN)); + } + + // TODO-CQ: if the AND simplifies to a constant, we can optimize both the dominating branch + // and the current branch. This is likely rare. + + if (newRelopFunc != VNF_NONE) + { + newRelop = vnStore->VNRelopToGenTreeOp(newRelopFunc, &isUnsigned); + + if (newRelop != GT_NONE) + { + JITDUMP("; simplified to %s%s\n", GenTree::OpName(newRelop), isUnsigned ? " (unsigned)" : ""); + canOptimize = true; + } + } + else + { + JITDUMP("; not a relop, cannot simplify\n"); + } + } + + if (!canOptimize) { JITDUMP("failed -- Dominated VN " FMT_VN " does not imply dominating VN " FMT_VN "\n", blockPathVN, domPathVN); @@ -1008,6 +1081,29 @@ bool Compiler::optRedundantDominatingBranch(BasicBlock* const block) fgMorphBlockStmt(domBlockProbe, domStmt DEBUGARG(__FUNCTION__), /* allowFGChange */ true, /* invalidateDFSTreeOnFGChange */ false); Metrics.RedundantBranchesEliminated++; + + if (newRelop != GT_NONE) + { + if (sharedSuccessor == blockTrueSucc) + { + newRelop = GenTree::ReverseRelop(newRelop); + } + + tree->SetOper(newRelop); + + // Update GTF_UNSIGNED before re-value-numbering. + // + if (isUnsigned) + { + tree->SetUnsigned(); + } + else + { + tree->ClearUnsigned(); + } + + fgValueNumberTree(tree); + } madeChanges = true; // We can keep looking if we haven't seen any side effects yet along the path to block. diff --git a/src/coreclr/jit/valuenum.cpp b/src/coreclr/jit/valuenum.cpp index 3d76fd6333b04f..8950270870abc7 100644 --- a/src/coreclr/jit/valuenum.cpp +++ b/src/coreclr/jit/valuenum.cpp @@ -2574,6 +2574,25 @@ ValueNum ValueNumStore::VNForFunc(var_types typ, VNFunc func, ValueNum arg0VN) } } } + else if (func == VNFunc(GT_NOT)) + { + VNFuncApp funcApp; + if (GetVNFunc(arg0VN, &funcApp)) + { + // NOT(NOT(x)) ==> x + // + if (funcApp.m_func == VNFunc(GT_NOT)) + { + *resultVN = funcApp.m_args[0]; + } + // NOT(relop(x,y)) ==> Reverse(relop)(x,y) + // + else if (VNFuncIsComparison(funcApp.m_func)) + { + *resultVN = GetRelatedRelop(arg0VN, VN_RELATION_KIND::VRK_Reverse); + } + } + } // Try to perform constant-folding. // @@ -5048,6 +5067,206 @@ bool ValueNumStore::VNEvalShouldFold(var_types typ, VNFunc func, ValueNum arg0VN return true; } +// Table entry describing when AND/OR of two relops +// with identical operands can be combined into one +// or result in true/false +// +struct RelatedRelopEntry +{ + VNFunc relop0; + VNFunc relop1; + int constantValue; + VNFunc jointRelop; +}; + +// clang-format off + +static const RelatedRelopEntry s_relatedRelopTable_AND[] = { + // EQ & ... + {VNFunc(GT_EQ), VNFunc(GT_EQ), -1, VNFunc(GT_EQ)}, + {VNFunc(GT_EQ), VNFunc(GT_NE), 0, VNF_COUNT}, + {VNFunc(GT_EQ), VNFunc(GT_LE), -1, VNFunc(GT_EQ)}, + {VNFunc(GT_EQ), VNFunc(GT_LT), 0, VNF_COUNT}, + {VNFunc(GT_EQ), VNFunc(GT_GT), 0, VNF_COUNT}, + {VNFunc(GT_EQ), VNFunc(GT_GE), -1, VNFunc(GT_EQ)}, + + {VNFunc(GT_EQ), VNF_LE_UN, -1, VNFunc(GT_EQ)}, + {VNFunc(GT_EQ), VNF_LT_UN, 0, VNF_COUNT}, + {VNFunc(GT_EQ), VNF_GT_UN, 0, VNF_COUNT}, + {VNFunc(GT_EQ), VNF_GE_UN, -1, VNFunc(GT_EQ)}, + + // NE & ... + {VNFunc(GT_NE), VNFunc(GT_EQ), 0, VNF_COUNT}, + {VNFunc(GT_NE), VNFunc(GT_NE), -1, VNFunc(GT_NE)}, + {VNFunc(GT_NE), VNFunc(GT_LE), -1, VNFunc(GT_LT)}, + {VNFunc(GT_NE), VNFunc(GT_LT), -1, VNFunc(GT_LT)}, + {VNFunc(GT_NE), VNFunc(GT_GT), -1, VNFunc(GT_GT)}, + {VNFunc(GT_NE), VNFunc(GT_GE), -1, VNFunc(GT_GT)}, + + {VNFunc(GT_NE), VNF_LE_UN, -1, VNF_LT_UN}, + {VNFunc(GT_NE), VNF_LT_UN, -1, VNF_LT_UN}, + {VNFunc(GT_NE), VNF_GT_UN, -1, VNF_GT_UN}, + {VNFunc(GT_NE), VNF_GE_UN, -1, VNF_GT_UN}, + + // LE & ... + {VNFunc(GT_LE), VNFunc(GT_EQ), -1, VNFunc(GT_EQ)}, + {VNFunc(GT_LE), VNFunc(GT_NE), -1, VNFunc(GT_LT)}, + {VNFunc(GT_LE), VNFunc(GT_LE), -1, VNFunc(GT_LE)}, + {VNFunc(GT_LE), VNFunc(GT_LT), -1, VNFunc(GT_LT)}, + {VNFunc(GT_LE), VNFunc(GT_GT), 0, VNF_COUNT}, + {VNFunc(GT_LE), VNFunc(GT_GE), -1, VNFunc(GT_EQ)}, + + // LT & ... + {VNFunc(GT_LT), VNFunc(GT_EQ), 0, VNF_COUNT}, + {VNFunc(GT_LT), VNFunc(GT_NE), -1, VNFunc(GT_LT)}, + {VNFunc(GT_LT), VNFunc(GT_LE), -1, VNFunc(GT_LT)}, + {VNFunc(GT_LT), VNFunc(GT_LT), -1, VNFunc(GT_LT)}, + {VNFunc(GT_LT), VNFunc(GT_GT), 0, VNF_COUNT}, + {VNFunc(GT_LT), VNFunc(GT_GE), 0, VNF_COUNT}, + + // GT & ... + {VNFunc(GT_GT), VNFunc(GT_EQ), 0, VNF_COUNT}, + {VNFunc(GT_GT), VNFunc(GT_NE), -1, VNFunc(GT_GT)}, + {VNFunc(GT_GT), VNFunc(GT_LE), 0, VNF_COUNT}, + {VNFunc(GT_GT), VNFunc(GT_LT), 0, VNF_COUNT}, + {VNFunc(GT_GT), VNFunc(GT_GT), -1, VNFunc(GT_GT)}, + {VNFunc(GT_GT), VNFunc(GT_GE), -1, VNFunc(GT_GT)}, + + // GE & ... + {VNFunc(GT_GE), VNFunc(GT_EQ), -1, VNFunc(GT_EQ)}, + {VNFunc(GT_GE), VNFunc(GT_NE), -1, VNFunc(GT_GT)}, + {VNFunc(GT_GE), VNFunc(GT_LE), -1, VNFunc(GT_EQ)}, + {VNFunc(GT_GE), VNFunc(GT_LT), 0, VNF_COUNT}, + {VNFunc(GT_GE), VNFunc(GT_GT), -1, VNFunc(GT_GT)}, + {VNFunc(GT_GE), VNFunc(GT_GE), -1, VNFunc(GT_GE)}, + + // LEU & ... + {VNF_LE_UN, VNFunc(GT_EQ), -1, VNFunc(GT_EQ)}, + {VNF_LE_UN, VNFunc(GT_NE), -1, VNF_LT_UN}, + {VNF_LE_UN, VNF_LE_UN, -1, VNF_LE_UN}, + {VNF_LE_UN, VNF_LT_UN, -1, VNF_LT_UN}, + {VNF_LE_UN, VNF_GT_UN, 0, VNF_COUNT}, + {VNF_LE_UN, VNF_GE_UN, -1, VNFunc(GT_EQ)}, + + // LTU & ... + {VNF_LT_UN, VNFunc(GT_EQ), 0, VNF_COUNT}, + {VNF_LT_UN, VNFunc(GT_NE), -1, VNF_LT_UN}, + {VNF_LT_UN, VNF_LE_UN, -1, VNF_LT_UN}, + {VNF_LT_UN, VNF_LT_UN, -1, VNF_LT_UN}, + {VNF_LT_UN, VNF_GT_UN, 0, VNF_COUNT}, + {VNF_LT_UN, VNF_GE_UN, 0, VNF_COUNT}, + + // GTU & ... + {VNF_GT_UN, VNFunc(GT_EQ), 0, VNF_COUNT}, + {VNF_GT_UN, VNFunc(GT_NE), -1, VNF_GT_UN}, + {VNF_GT_UN, VNF_LE_UN, 0, VNF_COUNT}, + {VNF_GT_UN, VNF_LT_UN, 0, VNF_COUNT}, + {VNF_GT_UN, VNF_GT_UN, -1, VNF_GT_UN}, + {VNF_GT_UN, VNF_GE_UN, -1, VNF_GT_UN}, + + // GEU & ... + {VNF_GE_UN, VNFunc(GT_EQ), -1, VNFunc(GT_EQ)}, + {VNF_GE_UN, VNFunc(GT_NE), -1, VNF_GT_UN}, + {VNF_GE_UN, VNF_LE_UN, -1, VNFunc(GT_EQ)}, + {VNF_GE_UN, VNF_LT_UN, 0, VNF_COUNT}, + {VNF_GE_UN, VNF_GT_UN, -1, VNF_GT_UN}, + {VNF_GE_UN, VNF_GE_UN, -1, VNF_GE_UN}, +}; + +static const RelatedRelopEntry s_relatedRelopTable_OR[] = { + // EQ | ... + {VNFunc(GT_EQ), VNFunc(GT_EQ), -1, VNFunc(GT_EQ)}, + {VNFunc(GT_EQ), VNFunc(GT_NE), 1, VNF_COUNT}, + {VNFunc(GT_EQ), VNFunc(GT_LE), -1, VNFunc(GT_LE)}, + {VNFunc(GT_EQ), VNFunc(GT_LT), -1, VNFunc(GT_LE)}, + {VNFunc(GT_EQ), VNFunc(GT_GT), -1, VNFunc(GT_GE)}, + {VNFunc(GT_EQ), VNFunc(GT_GE), -1, VNFunc(GT_GE)}, + + {VNFunc(GT_EQ), VNF_LE_UN, -1, VNF_LE_UN}, + {VNFunc(GT_EQ), VNF_LT_UN, -1, VNF_LE_UN}, + {VNFunc(GT_EQ), VNF_GT_UN, -1, VNF_GE_UN}, + {VNFunc(GT_EQ), VNF_GE_UN, -1, VNF_GE_UN}, + + // NE | ... + {VNFunc(GT_NE), VNFunc(GT_EQ), 1, VNF_COUNT}, + {VNFunc(GT_NE), VNFunc(GT_NE), -1, VNFunc(GT_NE)}, + {VNFunc(GT_NE), VNFunc(GT_LE), 1, VNF_COUNT}, + {VNFunc(GT_NE), VNFunc(GT_LT), -1, VNFunc(GT_NE)}, + {VNFunc(GT_NE), VNFunc(GT_GT), -1, VNFunc(GT_NE)}, + {VNFunc(GT_NE), VNFunc(GT_GE), 1, VNF_COUNT}, + + {VNFunc(GT_NE), VNF_LE_UN, 1, VNF_COUNT}, + {VNFunc(GT_NE), VNF_LT_UN, -1, VNFunc(GT_NE)}, + {VNFunc(GT_NE), VNF_GT_UN, -1, VNFunc(GT_NE)}, + {VNFunc(GT_NE), VNF_GE_UN, 1, VNF_COUNT}, + + // LE | ... + {VNFunc(GT_LE), VNFunc(GT_EQ), -1, VNFunc(GT_LE)}, + {VNFunc(GT_LE), VNFunc(GT_NE), 1, VNF_COUNT}, + {VNFunc(GT_LE), VNFunc(GT_LE), -1, VNFunc(GT_LE)}, + {VNFunc(GT_LE), VNFunc(GT_LT), -1, VNFunc(GT_LE)}, + {VNFunc(GT_LE), VNFunc(GT_GT), 1, VNF_COUNT}, + {VNFunc(GT_LE), VNFunc(GT_GE), 1, VNF_COUNT}, + + // LT | ... + {VNFunc(GT_LT), VNFunc(GT_EQ), -1, VNFunc(GT_LE)}, + {VNFunc(GT_LT), VNFunc(GT_NE), -1, VNFunc(GT_NE)}, + {VNFunc(GT_LT), VNFunc(GT_LE), -1, VNFunc(GT_LE)}, + {VNFunc(GT_LT), VNFunc(GT_LT), -1, VNFunc(GT_LT)}, + {VNFunc(GT_LT), VNFunc(GT_GT), -1, VNFunc(GT_NE)}, + {VNFunc(GT_LT), VNFunc(GT_GE), 1, VNF_COUNT}, + + // GT | ... + {VNFunc(GT_GT), VNFunc(GT_EQ), -1, VNFunc(GT_GE)}, + {VNFunc(GT_GT), VNFunc(GT_NE), -1, VNFunc(GT_NE)}, + {VNFunc(GT_GT), VNFunc(GT_LE), 1, VNF_COUNT}, + {VNFunc(GT_GT), VNFunc(GT_LT), -1, VNFunc(GT_NE)}, + {VNFunc(GT_GT), VNFunc(GT_GT), -1, VNFunc(GT_GT)}, + {VNFunc(GT_GT), VNFunc(GT_GE), -1, VNFunc(GT_GE)}, + + // GE | ... + {VNFunc(GT_GE), VNFunc(GT_EQ), -1, VNFunc(GT_GE)}, + {VNFunc(GT_GE), VNFunc(GT_NE), 1, VNF_COUNT}, + {VNFunc(GT_GE), VNFunc(GT_LE), 1, VNF_COUNT}, + {VNFunc(GT_GE), VNFunc(GT_LT), 1, VNF_COUNT}, + {VNFunc(GT_GE), VNFunc(GT_GT), -1, VNFunc(GT_GE)}, + {VNFunc(GT_GE), VNFunc(GT_GE), -1, VNFunc(GT_GE)}, + + // LEU | ... + {VNF_LE_UN, VNFunc(GT_EQ), -1, VNF_LE_UN}, + {VNF_LE_UN, VNFunc(GT_NE), 1, VNF_COUNT}, + {VNF_LE_UN, VNF_LE_UN, -1, VNF_LE_UN}, + {VNF_LE_UN, VNF_LT_UN, -1, VNF_LE_UN}, + {VNF_LE_UN, VNF_GT_UN, 1, VNF_COUNT}, + {VNF_LE_UN, VNF_GE_UN, 1, VNF_COUNT}, + + // LTU | ... + {VNF_LT_UN, VNFunc(GT_EQ), -1, VNF_LE_UN}, + {VNF_LT_UN, VNFunc(GT_NE), -1, VNFunc(GT_NE)}, + {VNF_LT_UN, VNF_LE_UN, -1, VNF_LE_UN}, + {VNF_LT_UN, VNF_LT_UN, -1, VNF_LT_UN}, + {VNF_LT_UN, VNF_GT_UN, -1, VNFunc(GT_NE)}, + {VNF_LT_UN, VNF_GE_UN, 1, VNF_COUNT}, + + // GTU | ... + {VNF_GT_UN, VNFunc(GT_EQ), -1, VNF_GE_UN}, + {VNF_GT_UN, VNFunc(GT_NE), -1, VNFunc(GT_NE)}, + {VNF_GT_UN, VNF_LE_UN, 1, VNF_COUNT}, + {VNF_GT_UN, VNF_LT_UN, -1, VNFunc(GT_NE)}, + {VNF_GT_UN, VNF_GT_UN, -1, VNF_GT_UN}, + {VNF_GT_UN, VNF_GE_UN, -1, VNF_GE_UN}, + + // GEU | ... + {VNF_GE_UN, VNFunc(GT_EQ), -1, VNF_GE_UN}, + {VNF_GE_UN, VNFunc(GT_NE), 1, VNF_COUNT}, + {VNF_GE_UN, VNF_LE_UN, 1, VNF_COUNT}, + {VNF_GE_UN, VNF_LT_UN, 1, VNF_COUNT}, + {VNF_GE_UN, VNF_GT_UN, -1, VNF_GE_UN}, + {VNF_GE_UN, VNF_GE_UN, -1, VNF_GE_UN}, +}; + +// clang-format on + //---------------------------------------------------------------------------------------- // EvalUsingMathIdentity // - Attempts to evaluate 'func' by using mathematical identities @@ -5360,7 +5579,68 @@ ValueNum ValueNumStore::EvalUsingMathIdentity(var_types typ, VNFunc func, ValueN if (arg0VN == arg1VN) { resultVN = arg0VN; + break; + } + + // x | ~x == ~0 + // + // Skip when x is a relop: relops have boolean 0/1 values, so the + // result should be 1, not AllBitsSet. The relop-combination table + // below handles `relop | Reverse(relop)` and yields VNOneForType. + // + VNFuncApp arg0Check; + const bool arg0IsRelop = GetVNFunc(arg0VN, &arg0Check) && VNFuncIsComparison(arg0Check.m_func); + if (!arg0IsRelop) + { + ValueNum arg0VNnot = VNForFunc(typ, VNFunc(GT_NOT), arg0VN); + if (arg0VNnot == arg1VN) + { + resultVN = VNAllBitsForType(typ, 1); + break; + } + } + + // relop1(x,y) | relop2(x,y) ==> relop3(x,y) or 0/1 + // + // eg + // LE(x,y) | NE(x,y) ==> NE(x,y) + // LE(x,y) | GT(x,y) ==> 1 + // + // for integral comparisons + // + VNFuncApp arg0FN; + if (GetVNFunc(arg0VN, &arg0FN) && VNFuncIsComparison(arg0FN.m_func) && + !varTypeIsFloating(TypeOfVN(arg0FN.m_args[0]))) + { + VNFuncApp arg1FN; + if (GetVNFunc(arg1VN, &arg1FN) && VNFuncIsComparison(arg1FN.m_func)) + { + if ((arg0FN.m_args[0] == arg1FN.m_args[0]) && (arg0FN.m_args[1] == arg1FN.m_args[1])) + { + for (const RelatedRelopEntry& entry : s_relatedRelopTable_OR) + { + if (((entry.relop0 == arg0FN.m_func) && (entry.relop1 == arg1FN.m_func)) || + ((entry.relop0 == arg1FN.m_func) && (entry.relop1 == arg0FN.m_func))) + { + if (entry.constantValue == 1) + { + resultVN = VNOneForType(typ); + } + else if (entry.constantValue == 0) + { + resultVN = VNZeroForType(typ); + } + else + { + resultVN = VNForFunc(typ, entry.jointRelop, arg0FN.m_args[0], arg0FN.m_args[1]); + } + break; + } + } + } + } } + break; } @@ -5405,7 +5685,58 @@ ValueNum ValueNumStore::EvalUsingMathIdentity(var_types typ, VNFunc func, ValueN if (arg0VN == arg1VN) { resultVN = arg0VN; + break; + } + + // x & ~x == 0 + ValueNum arg0VNnot = VNForFunc(typ, VNFunc(GT_NOT), arg0VN); + if (arg0VNnot == arg1VN) + { + resultVN = ZeroVN; + break; } + + // relop1(x,y) & relop2(x,y) ==> relop3(x,y) or 0/1 + // + // eg + // LE(x,y) & NE(x,y) ==> LT(x,y) + // LE(x,y) & GT(x,y) ==> 0 + // + // for integral comparisons + // + VNFuncApp arg0FN; + if (GetVNFunc(arg0VN, &arg0FN) && VNFuncIsComparison(arg0FN.m_func) && + !varTypeIsFloating(TypeOfVN(arg0FN.m_args[0]))) + { + VNFuncApp arg1FN; + if (GetVNFunc(arg1VN, &arg1FN) && VNFuncIsComparison(arg1FN.m_func)) + { + if ((arg0FN.m_args[0] == arg1FN.m_args[0]) && (arg0FN.m_args[1] == arg1FN.m_args[1])) + { + for (const RelatedRelopEntry& entry : s_relatedRelopTable_AND) + { + if (((entry.relop0 == arg0FN.m_func) && (entry.relop1 == arg1FN.m_func)) || + ((entry.relop0 == arg1FN.m_func) && (entry.relop1 == arg0FN.m_func))) + { + if (entry.constantValue == 1) + { + resultVN = VNOneForType(typ); + } + else if (entry.constantValue == 0) + { + resultVN = VNZeroForType(typ); + } + else + { + resultVN = VNForFunc(typ, entry.jointRelop, arg0FN.m_args[0], arg0FN.m_args[1]); + } + break; + } + } + } + } + } + break; } @@ -6843,6 +7174,50 @@ VNFunc ValueNumStore::SwapRelop(VNFunc vnf) return swappedFunc; } +//------------------------------------------------------------------------ +// VNRelopToGenTreeOp: return genTreeOps for a relop VNFunc +// +// Arguments: +// vnf - vnf for original relop +// isUnsigned - [out] set to true if vnf is an unsigned integer relop, false otherwise +// +// Returns: +// GenTreeOps for the relop, or GT_NONE if the original VNFunc was not a relop. +// +genTreeOps ValueNumStore::VNRelopToGenTreeOp(VNFunc vnf, bool* isUnsigned) +{ + *isUnsigned = false; + switch (vnf) + { + case VNF_LT: + return GT_LT; + case VNF_LE: + return GT_LE; + case VNF_GE: + return GT_GE; + case VNF_GT: + return GT_GT; + case VNF_EQ: + return GT_EQ; + case VNF_NE: + return GT_NE; + case VNF_LT_UN: + *isUnsigned = true; + return GT_LT; + case VNF_LE_UN: + *isUnsigned = true; + return GT_LE; + case VNF_GE_UN: + *isUnsigned = true; + return GT_GE; + case VNF_GT_UN: + *isUnsigned = true; + return GT_GT; + default: + return GT_NONE; + } +} + //------------------------------------------------------------------------ // GetRelatedRelop: return value number for reversed/swapped comparison // diff --git a/src/coreclr/jit/valuenum.h b/src/coreclr/jit/valuenum.h index 47c7af240ce290..b9d9ec6eff6b82 100644 --- a/src/coreclr/jit/valuenum.h +++ b/src/coreclr/jit/valuenum.h @@ -1136,6 +1136,15 @@ class ValueNumStore // Returns true iff the VN represents a relop bool IsVNRelop(ValueNum vn, VNFuncApp* pFuncApp = nullptr); + // Map this VNFunc back to a gen tree op (relops only). Returns GT_NONE for + // any non-relop VNFunc. `isUnsigned` is set to true for VNF_*_UN variants. + // + // Note: VNF_*_UN is also used to represent unordered floating-point relops + // (see `GetVNFuncForNode`). Callers that propagate `isUnsigned` into a + // GTF_UNSIGNED flag must ensure the operands are integral; this helper + // cannot distinguish the two cases from a VNFunc alone. + genTreeOps VNRelopToGenTreeOp(VNFunc vnf, bool* isUnsigned); + enum class VN_RELATION_KIND { VRK_Inferred, // (x ? y) diff --git a/src/tests/JIT/opt/RedundantBranch/RedundantBranchSimplify.cs b/src/tests/JIT/opt/RedundantBranch/RedundantBranchSimplify.cs new file mode 100644 index 00000000000000..3341a4d9550f26 --- /dev/null +++ b/src/tests/JIT/opt/RedundantBranch/RedundantBranchSimplify.cs @@ -0,0 +1,147 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Runtime.CompilerServices; +using Xunit; + +// These cases exercise the relop simplification path in redundant branch +// elimination: a dominating relop does not directly imply the dominated +// relop, but AND of the two path predicates simplifies to a single relop +// (or the dominated relop can be reversed/rewritten). +public class RedundantBranchSimplify +{ + // if (a >= 100) { if (a <= 100) return 1; } => inner becomes (a == 100) + [MethodImpl(MethodImplOptions.NoInlining)] + private static int GeLe(int a) + { + if (a >= 100) + { + if (a <= 100) + { + return 1; + } + return 2; + } + return 3; + } + + [Theory] + [InlineData(99, 3)] + [InlineData(100, 1)] + [InlineData(101, 2)] + public static void TestGeLe(int a, int expected) => Assert.Equal(expected, GeLe(a)); + + [MethodImpl(MethodImplOptions.NoInlining)] + private static bool GreaterThanOrEqualZero(int a) => a == 0 || a > 0; + + [Theory] + [InlineData(-1, false)] + [InlineData(0, true)] + [InlineData(1, true)] + [InlineData(int.MinValue, false)] + [InlineData(int.MaxValue, true)] + public static void TestGreaterThanOrEqualZero(int a, bool expected) => + Assert.Equal(expected, GreaterThanOrEqualZero(a)); + + [MethodImpl(MethodImplOptions.NoInlining)] + private static int GeLeSwapped(int a) + { + if (100 <= a) + { + if (a <= 100) + { + return 1; + } + return 2; + } + return 3; + } + + [Theory] + [InlineData(99, 3)] + [InlineData(100, 1)] + [InlineData(101, 2)] + public static void TestGeLeSwapped(int a, int expected) => Assert.Equal(expected, GeLeSwapped(a)); + + [MethodImpl(MethodImplOptions.NoInlining)] + private static int NeLeUnsigned(uint a, uint b) + { + if (a != b) + { + if (a <= b) + { + return 1; + } + return 2; + } + return 3; + } + + [Theory] + [InlineData(0u, 0u, 3)] + [InlineData(5u, 5u, 3)] + [InlineData(4u, 5u, 1)] + [InlineData(6u, 5u, 2)] + [InlineData(0u, uint.MaxValue, 1)] + [InlineData(uint.MaxValue, 0u, 2)] + public static void TestNeLeUnsigned(uint a, uint b, int expected) => + Assert.Equal(expected, NeLeUnsigned(a, b)); + + [MethodImpl(MethodImplOptions.NoInlining)] + private static int NeGeUnsigned(uint a, uint b) + { + if (a != b) + { + if (a >= b) + { + return 1; + } + return 2; + } + return 3; + } + + [Theory] + [InlineData(0u, 0u, 3)] + [InlineData(5u, 5u, 3)] + [InlineData(4u, 5u, 2)] + [InlineData(6u, 5u, 1)] + [InlineData(0u, uint.MaxValue, 2)] + [InlineData(uint.MaxValue, 0u, 1)] + public static void TestNeGeUnsigned(uint a, uint b, int expected) => + Assert.Equal(expected, NeGeUnsigned(a, b)); + + // NaN must be handled correctly. + [MethodImpl(MethodImplOptions.NoInlining)] + private static int FloatGeLe(double a) + { + if (a >= 0.0) + { + if (a <= 0.0) + { + return 1; + } + return 2; + } + return 3; + } + + [Theory] + [InlineData(-1.0, 3)] + [InlineData(0.0, 1)] + [InlineData(1.0, 2)] + public static void TestFloatGeLe(double a, int expected) => Assert.Equal(expected, FloatGeLe(a)); + + // Special FP values are passed via separate Fact methods because the + // XUnitWrapperGenerator that emits the merged test runner does not + // qualify identifiers like `NaN` in InlineData arguments. + [Fact] + public static void TestFloatGeLeNaN() => Assert.Equal(3, FloatGeLe(double.NaN)); + + [Fact] + public static void TestFloatGeLePositiveInfinity() => Assert.Equal(2, FloatGeLe(double.PositiveInfinity)); + + [Fact] + public static void TestFloatGeLeNegativeInfinity() => Assert.Equal(3, FloatGeLe(double.NegativeInfinity)); +} \ No newline at end of file diff --git a/src/tests/JIT/opt/RedundantBranch/RedundantBranchSimplify.csproj b/src/tests/JIT/opt/RedundantBranch/RedundantBranchSimplify.csproj new file mode 100644 index 00000000000000..de6d5e08882e86 --- /dev/null +++ b/src/tests/JIT/opt/RedundantBranch/RedundantBranchSimplify.csproj @@ -0,0 +1,8 @@ + + + True + + + + +