diff --git a/src/coreclr/jit/codegen.h b/src/coreclr/jit/codegen.h index 926f2c1eb86022..96954703599c93 100644 --- a/src/coreclr/jit/codegen.h +++ b/src/coreclr/jit/codegen.h @@ -216,6 +216,7 @@ class CodeGen final : public CodeGenInterface unsigned findTargetDepth(BasicBlock* target); void WasmProduceReg(GenTree* node); regNumber GetMultiUseOperandReg(GenTree* operand); + void genEmitNullCheck(regNumber reg); #endif void genEmitStartBlock(BasicBlock* block); diff --git a/src/coreclr/jit/codegenwasm.cpp b/src/coreclr/jit/codegenwasm.cpp index a23105ca5b1a1b..378f402fb3c60c 100644 --- a/src/coreclr/jit/codegenwasm.cpp +++ b/src/coreclr/jit/codegenwasm.cpp @@ -747,16 +747,19 @@ static constexpr uint32_t PackTypes(var_types toType, var_types fromType) // void CodeGen::genIntToIntCast(GenTreeCast* cast) { - if (cast->gtOverflow()) + GenIntCastDesc desc(cast); + + if (desc.CheckKind() != GenIntCastDesc::CHECK_NONE) { - NYI_WASM("Overflow checks"); + GenTree* castValue = cast->gtGetOp1(); + regNumber castReg = GetMultiUseOperandReg(castValue); + genIntCastOverflowCheck(cast, desc, castReg); } - GenIntCastDesc desc(cast); - var_types toType = genActualType(cast->CastToType()); - var_types fromType = genActualType(cast->CastOp()); - int extendSize = desc.ExtendSrcSize(); - instruction ins = INS_none; + var_types toType = genActualType(cast->CastToType()); + var_types fromType = genActualType(cast->CastOp()); + int extendSize = desc.ExtendSrcSize(); + instruction ins = INS_none; assert(fromType == TYP_INT || fromType == TYP_LONG); genConsumeOperands(cast); @@ -819,6 +822,97 @@ void CodeGen::genIntToIntCast(GenTreeCast* cast) WasmProduceReg(cast); } +//------------------------------------------------------------------------ +// genIntCastOverflowCheck: Generate overflow checking code for an integer cast. +// +// Arguments: +// cast - The GT_CAST node +// desc - The cast description +// reg - The register containing the value to check +// +void CodeGen::genIntCastOverflowCheck(GenTreeCast* cast, const GenIntCastDesc& desc, regNumber reg) +{ + bool const is64BitSrc = (desc.CheckSrcSize() == 8); + emitAttr const srcSize = is64BitSrc ? EA_8BYTE : EA_4BYTE; + + GetEmitter()->emitIns_I(INS_local_get, srcSize, WasmRegToIndex(reg)); + + switch (desc.CheckKind()) + { + + case GenIntCastDesc::CHECK_POSITIVE: + { + // INT or LONG to ULONG + GetEmitter()->emitIns_I(is64BitSrc ? INS_i64_const : INS_i32_const, srcSize, 0); + GetEmitter()->emitIns(is64BitSrc ? INS_i64_lt_s : INS_i32_lt_s); + genJumpToThrowHlpBlk(SCK_OVERFLOW); + break; + } + + case GenIntCastDesc::CHECK_UINT_RANGE: + { + // (U)LONG to UINT + assert(is64BitSrc); + GetEmitter()->emitIns_I(INS_i64_const, srcSize, UINT32_MAX); + // We can re-interpret LONG as ULONG + // Then negative values will be larger than UINT32_MAX + GetEmitter()->emitIns(INS_i64_gt_u); + genJumpToThrowHlpBlk(SCK_OVERFLOW); + break; + } + + case GenIntCastDesc::CHECK_POSITIVE_INT_RANGE: + { + // ULONG to INT + GetEmitter()->emitIns_I(INS_i64_const, srcSize, INT32_MAX); + GetEmitter()->emitIns(INS_i64_gt_u); + genJumpToThrowHlpBlk(SCK_OVERFLOW); + break; + } + + case GenIntCastDesc::CHECK_INT_RANGE: + { + // LONG to INT + GetEmitter()->emitIns(INS_i64_extend32_s); + GetEmitter()->emitIns_I(INS_local_get, srcSize, WasmRegToIndex(reg)); + GetEmitter()->emitIns(INS_i64_ne); + genJumpToThrowHlpBlk(SCK_OVERFLOW); + break; + } + + case GenIntCastDesc::CHECK_SMALL_INT_RANGE: + { + // (U)(INT|LONG) to Small INT + const int castMaxValue = desc.CheckSmallIntMax(); + const int castMinValue = desc.CheckSmallIntMin(); + + if (castMinValue == 0) + { + // When the minimum is 0, a single unsigned upper-bound check is sufficient. + // For signed sources, negative values become large unsigned values and + // thus also trigger the overflow via the same comparison. + GetEmitter()->emitIns_I(is64BitSrc ? INS_i64_const : INS_i32_const, srcSize, castMaxValue); + GetEmitter()->emitIns(is64BitSrc ? INS_i64_gt_u : INS_i32_gt_u); + } + else + { + assert(!cast->IsUnsigned()); + GetEmitter()->emitIns_I(is64BitSrc ? INS_i64_const : INS_i32_const, srcSize, castMaxValue); + GetEmitter()->emitIns(is64BitSrc ? INS_i64_gt_s : INS_i32_gt_s); + GetEmitter()->emitIns_I(INS_local_get, srcSize, WasmRegToIndex(reg)); + GetEmitter()->emitIns_I(is64BitSrc ? INS_i64_const : INS_i32_const, srcSize, castMinValue); + GetEmitter()->emitIns(is64BitSrc ? INS_i64_lt_s : INS_i32_lt_s); + GetEmitter()->emitIns(INS_i32_or); + } + genJumpToThrowHlpBlk(SCK_OVERFLOW); + break; + } + + default: + unreached(); + } +} + //------------------------------------------------------------------------ // genFloatToIntCast: Generate code for a floating point to integer cast // @@ -1368,6 +1462,22 @@ void CodeGen::genJumpToThrowHlpBlk(SpecialCodeKind codeKind) void CodeGen::genCodeForNullCheck(GenTreeIndir* tree) { genConsumeAddress(tree->Addr()); + genEmitNullCheck(REG_NA); +} + +//--------------------------------------------------------------------- +// genEmitNullCheck - generate code for a null check +// +// Arguments: +// regNum - register to check, or REG_NA if value to check is on the stack +// +void CodeGen::genEmitNullCheck(regNumber reg) +{ + if (reg != REG_NA) + { + GetEmitter()->emitIns_I(INS_local_get, EA_PTRSIZE, WasmRegToIndex(reg)); + } + GetEmitter()->emitIns_I(INS_I_const, EA_PTRSIZE, m_compiler->compMaxUncheckedOffsetForNullObject); GetEmitter()->emitIns(INS_I_le_u); genJumpToThrowHlpBlk(SCK_NULL_CHECK); @@ -1629,15 +1739,22 @@ void CodeGen::genCodeForStoreInd(GenTreeStoreInd* tree) //------------------------------------------------------------------------ // genCall: Produce code for a GT_CALL node // +// Arguments: +// call - the GT_CALL node +// void CodeGen::genCall(GenTreeCall* call) { + assert(!call->IsTailCall()); + + regNumber thisReg = REG_NA; + if (call->NeedsNullCheck()) { - NYI_WASM("Insert nullchecks for calls that need it in lowering"); + CallArg* thisArg = call->gtArgs.GetThisArg(); + GenTree* thisNode = thisArg->GetNode(); + thisReg = GetMultiUseOperandReg(thisNode); } - assert(!call->IsTailCall()); - for (CallArg& arg : call->gtArgs.EarlyArgs()) { genConsumeReg(arg.GetEarlyNode()); @@ -1648,6 +1765,11 @@ void CodeGen::genCall(GenTreeCall* call) genConsumeReg(arg.GetLateNode()); } + if (call->NeedsNullCheck()) + { + genEmitNullCheck(thisReg); + } + genCallInstruction(call); WasmProduceReg(call); } diff --git a/src/coreclr/jit/compiler.cpp b/src/coreclr/jit/compiler.cpp index 5b360e494ca22e..f4ec64bdfef52a 100644 --- a/src/coreclr/jit/compiler.cpp +++ b/src/coreclr/jit/compiler.cpp @@ -766,7 +766,7 @@ var_types Compiler::getReturnTypeForStruct(CORINFO_CLASS_HANDLE clsHnd, } else { - howToReturnStruct = SPK_ByValue; + howToReturnStruct = SPK_PrimitiveType; useType = WasmClassifier::ToJitType(abiType); } diff --git a/src/coreclr/jit/gentree.cpp b/src/coreclr/jit/gentree.cpp index cf4f69d8ca2376..756b5007e7f4dd 100644 --- a/src/coreclr/jit/gentree.cpp +++ b/src/coreclr/jit/gentree.cpp @@ -31040,6 +31040,11 @@ void ReturnTypeDesc::InitializeStructReturnType(Compiler* comp, m_regType[i] = comp->getJitGCType(gcPtrs[i]); } +#elif defined(TARGET_WASM) + + // For Wasm, structs are either returned by-ref or as primitives. + unreached(); + #else // TARGET_XXX // This target needs support here! @@ -34180,3 +34185,26 @@ ValueSize ValueSize::FromJitType(var_types type) return ValueSize(genTypeSize(type)); } } + +//------------------------------------------------------------------------ +// gtFirstNodeInOperandOrder : return the first node of this tree +// in operand order +// +// Returns: +// If tree is a leaf, return the tree. +// If the tree has operands, recurse on the first operand. +// +GenTree* GenTree::gtFirstNodeInOperandOrder() +{ + GenTree* op = this; + GenTree::VisitResult visitResult; + do + { + visitResult = op->VisitOperands([&op](GenTree* operand) { + op = operand; + return GenTree::VisitResult::Abort; + }); + } while (visitResult == GenTree::VisitResult::Abort); + + return op; +} diff --git a/src/coreclr/jit/gentree.h b/src/coreclr/jit/gentree.h index a0da22d21572df..0cdf4cce4a1fe8 100644 --- a/src/coreclr/jit/gentree.h +++ b/src/coreclr/jit/gentree.h @@ -1984,6 +1984,8 @@ struct GenTree inline GenTree* gtCommaStoreVal(); + GenTree* gtFirstNodeInOperandOrder(); + // Return the child of this node if it is a GT_RELOAD or GT_COPY; otherwise simply return the node itself inline GenTree* gtSkipReloadOrCopy(); diff --git a/src/coreclr/jit/lower.cpp b/src/coreclr/jit/lower.cpp index 656b58ee84fbf7..54309ffb17e14b 100644 --- a/src/coreclr/jit/lower.cpp +++ b/src/coreclr/jit/lower.cpp @@ -2012,8 +2012,24 @@ void Lowering::LowerArgsForCall(GenTreeCall* call) #endif // defined(TARGET_X86) && defined(FEATURE_IJW) LegalizeArgPlacement(call); + AfterLowerArgsForCall(call); } +#if !defined(TARGET_WASM) + +//------------------------------------------------------------------------ +// AfterLowerArgsForCall: post processing after call args are lowered +// +// Arguments: +// call - Call node +// +void Lowering::AfterLowerArgsForCall(GenTreeCall* call) +{ + // no-op for non-Wasm targets +} + +#endif // !defined(TARGET_WASM) + #if defined(TARGET_X86) && defined(FEATURE_IJW) //------------------------------------------------------------------------ // LowerSpecialCopyArgs: Lower special copy arguments for P/Invoke IL stubs diff --git a/src/coreclr/jit/lower.h b/src/coreclr/jit/lower.h index 3ae6b68fada104..579e2dd283cccf 100644 --- a/src/coreclr/jit/lower.h +++ b/src/coreclr/jit/lower.h @@ -211,6 +211,7 @@ class Lowering final : public Phase GenTree* LowerVirtualVtableCall(GenTreeCall* call); GenTree* LowerVirtualStubCall(GenTreeCall* call); void LowerArgsForCall(GenTreeCall* call); + void AfterLowerArgsForCall(GenTreeCall* call); #if defined(TARGET_X86) && defined(FEATURE_IJW) void LowerSpecialCopyArgs(GenTreeCall* call); void InsertSpecialCopyArg(GenTreePutArgStk* putArgStk, CORINFO_CLASS_HANDLE argType, unsigned lclNum); diff --git a/src/coreclr/jit/lowerwasm.cpp b/src/coreclr/jit/lowerwasm.cpp index 9f68ac04f3843b..710b1d125c4b2f 100644 --- a/src/coreclr/jit/lowerwasm.cpp +++ b/src/coreclr/jit/lowerwasm.cpp @@ -121,7 +121,12 @@ GenTree* Lowering::LowerNeg(GenTreeOp* node) // GenTree* x = node->gtGetOp1(); GenTree* zero = m_compiler->gtNewZeroConNode(node->TypeGet()); - BlockRange().InsertBefore(x, zero); + + // To preserve stack order we must insert the zero before the entire + // tree rooted at x. + // + GenTree* insertBefore = x->gtFirstNodeInOperandOrder(); + BlockRange().InsertBefore(insertBefore, zero); LowerNode(zero); node->ChangeOper(GT_SUB); node->gtOp1 = zero; @@ -221,6 +226,11 @@ void Lowering::LowerPutArgStk(GenTreePutArgStk* putArgNode) void Lowering::LowerCast(GenTree* tree) { assert(tree->OperIs(GT_CAST)); + + if (tree->gtOverflow()) + { + tree->gtGetOp1()->gtLIRFlags |= LIR::Flags::MultiplyUsed; + } ContainCheckCast(tree->AsCast()); } @@ -463,3 +473,19 @@ void Lowering::AfterLowerBlock() Stackifier stackifier(this); stackifier.StackifyCurrentBlock(); } + +//------------------------------------------------------------------------ +// AfterLowerArgsForCall: post processing after call args are lowered +// +// Arguments: +// call - Call node +// +void Lowering::AfterLowerArgsForCall(GenTreeCall* call) +{ + if (call->NeedsNullCheck()) + { + // Prepare for explicit null check + CallArg* thisArg = call->gtArgs.GetThisArg(); + thisArg->GetNode()->gtLIRFlags |= LIR::Flags::MultiplyUsed; + } +} diff --git a/src/coreclr/jit/regallocwasm.cpp b/src/coreclr/jit/regallocwasm.cpp index 237b3e09b93ac9..af3300d3bb9baa 100644 --- a/src/coreclr/jit/regallocwasm.cpp +++ b/src/coreclr/jit/regallocwasm.cpp @@ -282,6 +282,14 @@ void WasmRegAlloc::CollectReferencesForNode(GenTree* node) CollectReferencesForDivMod(node->AsOp()); break; + case GT_CALL: + CollectReferencesForCall(node->AsCall()); + break; + + case GT_CAST: + CollectReferencesForCast(node->AsOp()); + break; + default: assert(!node->OperIsLocalStore()); break; @@ -304,6 +312,37 @@ void WasmRegAlloc::CollectReferencesForDivMod(GenTreeOp* divModNode) ConsumeTemporaryRegForOperand(divModNode->gtGetOp1() DEBUGARG("div-by-zero / overflow check")); } +//------------------------------------------------------------------------ +// CollectReferencesForCall: Collect virtual register references for a call. +// +// Consumes temporary registers for a call. +// +// Arguments: +// callNode - The GT_CALL node +// +void WasmRegAlloc::CollectReferencesForCall(GenTreeCall* callNode) +{ + CallArg* thisArg = callNode->gtArgs.GetThisArg(); + + if (thisArg != nullptr) + { + ConsumeTemporaryRegForOperand(thisArg->GetNode() DEBUGARG("call this argument")); + } +} + +//------------------------------------------------------------------------ +// CollectReferencesForCast: Collect virtual register references for a cast. +// +// Consumes temporary registers for a cast. +// +// Arguments: +// castNode - The GT_CAST node +// +void WasmRegAlloc::CollectReferencesForCast(GenTreeOp* castNode) +{ + ConsumeTemporaryRegForOperand(castNode->gtGetOp1() DEBUGARG("cast overflow check")); +} + //------------------------------------------------------------------------ // RewriteLocalStackStore: rewrite a store to the stack to STOREIND(LCL_ADDR, ...). // @@ -318,16 +357,8 @@ void WasmRegAlloc::RewriteLocalStackStore(GenTreeLclVarCommon* lclNode) { // At this point, the IR is already stackified, so we just need to find the first node in the dataflow. // TODO-WASM-TP: this is nice and simple, but can we do this more efficiently? - GenTree* value = lclNode->Data(); - GenTree* op = value; - GenTree::VisitResult visitResult; - do - { - visitResult = op->VisitOperands([&op](GenTree* operand) { - op = operand; - return GenTree::VisitResult::Abort; - }); - } while (visitResult == GenTree::VisitResult::Abort); + GenTree* value = lclNode->Data(); + GenTree* insertionPoint = value->gtFirstNodeInOperandOrder(); // TODO-WASM-RA: figure out the address mode story here. Right now this will produce an address not folded // into the store's address mode. We can utilize a contained LEA, but that will require some liveness work. @@ -348,7 +379,7 @@ void WasmRegAlloc::RewriteLocalStackStore(GenTreeLclVarCommon* lclNode) } CurrentRange().InsertAfter(lclNode, store); CurrentRange().Remove(lclNode); - CurrentRange().InsertBefore(op, lclNode); + CurrentRange().InsertBefore(insertionPoint, lclNode); } //------------------------------------------------------------------------ @@ -379,6 +410,14 @@ void WasmRegAlloc::CollectReference(GenTree* node) refs->Nodes[m_lastVirtualRegRefsCount++] = node; } +//------------------------------------------------------------------------ +// RequestTemporaryRegisterForMultiplyUsedNode: request a temporary register for a node with multiple uses. +// +// To be later assigned a physical register. +// +// Arguments: +// node - A node possibly needing a temporary register +// void WasmRegAlloc::RequestTemporaryRegisterForMultiplyUsedNode(GenTree* node) { if ((node->gtLIRFlags & LIR::Flags::MultiplyUsed) == LIR::Flags::None) diff --git a/src/coreclr/jit/regallocwasm.h b/src/coreclr/jit/regallocwasm.h index f87464b8c31499..2b9fa9e6324fec 100644 --- a/src/coreclr/jit/regallocwasm.h +++ b/src/coreclr/jit/regallocwasm.h @@ -119,6 +119,8 @@ class WasmRegAlloc : public RegAllocInterface void CollectReferencesForBlock(BasicBlock* block); void CollectReferencesForNode(GenTree* node); void CollectReferencesForDivMod(GenTreeOp* divModNode); + void CollectReferencesForCall(GenTreeCall* callNode); + void CollectReferencesForCast(GenTreeOp* castNode); void RewriteLocalStackStore(GenTreeLclVarCommon* node); void CollectReference(GenTree* node); void RequestTemporaryRegisterForMultiplyUsedNode(GenTree* node); diff --git a/src/coreclr/jit/stacklevelsetter.cpp b/src/coreclr/jit/stacklevelsetter.cpp index 08ff7071df7be9..91254f67c098bb 100644 --- a/src/coreclr/jit/stacklevelsetter.cpp +++ b/src/coreclr/jit/stacklevelsetter.cpp @@ -289,6 +289,13 @@ void StackLevelSetter::SetThrowHelperBlocks(GenTree* node, BasicBlock* block) case GT_NULLCHECK: SetThrowHelperBlock(SCK_NULL_CHECK, block); break; + + case GT_CALL: + if (node->AsCall()->NeedsNullCheck()) + { + SetThrowHelperBlock(SCK_NULL_CHECK, block); + } + break; #endif // defined(TARGET_WASM) default: // Other opers can target throw only due to overflow.