diff --git a/src/coreclr/jit/assertionprop.cpp b/src/coreclr/jit/assertionprop.cpp index ce1eb57bf0c1da..9d4731ac02f6ab 100644 --- a/src/coreclr/jit/assertionprop.cpp +++ b/src/coreclr/jit/assertionprop.cpp @@ -6263,6 +6263,7 @@ Compiler::fgWalkResult Compiler::optVNBasedFoldCurStmt(BasicBlock* block, case GT_LSH: case GT_RSH: case GT_RSZ: + case GT_NOT: case GT_NEG: case GT_CAST: case GT_BITCAST: diff --git a/src/coreclr/jit/morph.cpp b/src/coreclr/jit/morph.cpp index 369f44207ba6d1..7caba07734be6d 100644 --- a/src/coreclr/jit/morph.cpp +++ b/src/coreclr/jit/morph.cpp @@ -7887,7 +7887,6 @@ GenTree* Compiler::fgMorphSmpOp(GenTree* tree, MorphAddrContext* mac, bool* optA // Otherwise we may sign-extend incorrectly in cases where the GT_NEG // node ends up feeding directly into a cast, for example in // GT_CAST(GT_SUB(0, s_1.ubyte)) - if (op1->IsIntegralConst(0)) { tree->ChangeOper(GT_NEG); @@ -10460,19 +10459,52 @@ GenTree* Compiler::fgOptimizeAddition(GenTreeOp* add) } } - // - a + b = > b - a - // ADD(NEG(a), b) => SUB(b, a) - - // Do not do this if "op2" is constant for canonicalization purposes. - if (op1->OperIs(GT_NEG) && !op2->OperIs(GT_NEG) && !op2->IsIntegralConst() && gtCanSwapOrder(op1, op2)) + if (op1->OperIs(GT_NEG) && !op2->OperIs(GT_NEG)) { - add->SetOper(GT_SUB); - add->gtOp1 = op2; - add->gtOp2 = op1->AsOp()->gtGetOp1(); + if (op2->IsIntegralConst()) + { + // ADD(NEG(x), CONST) => XOR(x, CONST) - DEBUG_DESTROY_NODE(op1); + auto isSubToXorValid = [=](uint64_t cns, IntegralRange range) { + // cns - x, where x in [lo, hi] + uint64_t lo = IntegralRange::SymbolicToRealValue(range.GetLowerBound()); + uint64_t hi = IntegralRange::SymbolicToRealValue(range.GetUpperBound()); - return add; + // OR of all numbers in [lo, hi] + uint64_t knownBits = (lo == hi) ? 0 : (UINT64_MAX >> BitOperations::LeadingZeroCount(lo ^ hi)); + knownBits = lo | knownBits; + + // Zero out bits outside of TYPE. This handles cases that rely on overflow + uint32_t sizeInBits = genTypeSize(add->TypeGet()) * BITS_PER_BYTE; + knownBits &= (1ULL << (sizeInBits - 1)) - 1; + + // At every bit pos with a 1 in knownBits, cns also needs 1. + // Otherwise borrowing occurs and XOR is not equivalent to SUB + return (cns & knownBits) == knownBits; + }; + + IntegralRange range = IntegralRange::ForNode(op1->gtGetOp1(), this); + uint64_t cns = (uint64_t)op2->AsIntConCommon()->IntegralValue(); + if (isSubToXorValid(cns, range)) + { + add->SetOper(GT_XOR, GenTree::PRESERVE_VN); + add->gtOp1 = op1->gtGetOp1(); + return fgMorphTree(add); + } + } + + // - a + b => b - a + // ADD(NEG(a), b) => SUB(b, a) + // Do not do this if "op2" is constant for canonicalization purposes. + if (!op2->IsIntegralConst() && gtCanSwapOrder(op1, op2)) + { + add->SetOper(GT_SUB); + add->gtOp1 = op2; + add->gtOp2 = op1->AsOp()->gtGetOp1(); + + DEBUG_DESTROY_NODE(op1); + return add; + } } // a + -b = > a - b