From 6a65c7829eb89f28f7339ee89982bc6f60c581cc Mon Sep 17 00:00:00 2001 From: Michael Gottesman Date: Thu, 2 Nov 2023 15:19:59 -0700 Subject: [PATCH 1/3] [sil] Add tuple_addr_constructor an instruction that can be used to initial a tuple in memory from individual address and object components. This commit just introduces the instruction. In a subsequent commit, I am going to add support to SILGen to emit this. This ensures that when we assign into a tuple var we initialize it with one instruction instead of doing it in pieces. The problem with doing it in pieces is that when one is emitting diagnostics it looks semantically like SILGen actually is emitting code for initializing in pieces which could be an error. --- docs/SIL.rst | 65 ++++++ include/swift/SIL/AddressWalker.h | 3 +- include/swift/SIL/InstructionUtils.h | 17 ++ include/swift/SIL/SILBuilder.h | 9 + include/swift/SIL/SILCloner.h | 11 + include/swift/SIL/SILInstruction.h | 70 ++++++ include/swift/SIL/SILNode.h | 3 + include/swift/SIL/SILNodes.def | 2 + lib/IRGen/IRGenSIL.cpp | 3 + lib/SIL/IR/OperandOwnership.cpp | 9 + lib/SIL/IR/SILInstructions.cpp | 12 + lib/SIL/IR/SILPrinter.cpp | 20 +- lib/SIL/Parser/ParseSIL.cpp | 58 +++++ lib/SIL/Utils/InstructionUtils.cpp | 46 ++++ lib/SIL/Verifier/MemoryLifetimeVerifier.cpp | 9 + .../UtilityPasses/SerializeSILPass.cpp | 1 + lib/SILOptimizer/Utils/SILInliner.cpp | 1 + lib/Serialization/DeserializeSIL.cpp | 46 ++++ lib/Serialization/ModuleFormat.h | 2 +- lib/Serialization/SILFormat.h | 18 +- lib/Serialization/SerializeSIL.cpp | 205 ++++++++++-------- test/SIL/Parser/basic2.sil | 66 ++++++ test/SIL/Serialization/basic2.sil | 69 ++++++ 23 files changed, 643 insertions(+), 102 deletions(-) diff --git a/docs/SIL.rst b/docs/SIL.rst index ffae6ebdd28ff..5774d28ea8c41 100644 --- a/docs/SIL.rst +++ b/docs/SIL.rst @@ -4799,6 +4799,71 @@ eliminated. However, a memory location ``%a`` must not be accessed after ``destroy_addr %a`` (which has not yet been eliminated) regardless of its type. +tuple_addr_constructor +`````````````````````` + +:: + + sil-instruction ::= 'tuple_addr_constructor' sil-tuple-addr-constructor-init sil-operand 'with' sil-tuple-addr-constructor-elements + sil-tuple-addr-constructor-init ::= init|assign + sil-tuple-addr-constructor-elements ::= '(' (sil-operand (',' sil-operand)*)? ')' + + // %destAddr has the type $*(Type1, Type2, Type3). Note how we convert all of the types + // to their address form. + %1 = tuple_addr_constructor [init] %destAddr : $*(Type1, Type2, Type3) with (%a : $Type1, %b : $*Type2, %c : $Type3) + +Creates a new tuple in memory from an exploded list of object and address +values. The SSA values form the leaf elements of the exploded tuple. So for a +simple tuple that only has top level tuple elements, then the instruction lowers +as follows:: + + %1 = tuple_addr_constructor [init] %destAddr : $*(Type1, Type2, Type3) with (%a : $Type1, %b : $*Type2, %c : $Type3) + + --> + + %0 = tuple_element_addr %destAddr : $*(Type1, Type2, Type3), 0 + store %a to [init] %0 : $*Type1 + %1 = tuple_element_addr %destAddr : $*(Type1, Type2, Type3), 1 + copy_addr %b to [init] %1 : $*Type2 + %2 = tuple_element_addr %destAddr : $*(Type1, Type2, Type3), 2 + store %2 to [init] %2 : $*Type3 + +A ``tuple_addr_constructor`` is lowered similarly with each store/copy_addr +being changed to their dest assign form. + +In contrast, if we have a more complicated form of tuple with sub-tuples, then +we read one element from the list as we process the tuple recursively from left +to right. So for instance we would lower as follows a more complicated tuple:: + + %1 = tuple_addr_constructor [init] %destAddr : $*((), (Type1, ((), Type2)), Type3) with (%a : $Type1, %b : $*Type2, %c : $Type3) + + -> + + %0 = tuple_element_addr %destAddr : $*((), (Type1, ((), Type2)), Type3), 1 + %1 = tuple_element_addr %0 : $*(Type1, ((), Type2)), 0 + store %a to [init] %1 : $*Type1 + %2 = tuple_element_addr %0 : $*(Type1, ((), Type2)), 1 + %3 = tuple_element_addr %2 : $*((), Type2), 1 + copy_addr %b to [init] %3 : $*Type2 + %4 = tuple_element_addr %destAddr : $*((), (Type1, ((), Type2)), Type3), 2 + store %c to [init] %4 : $*Type3 + +This instruction exists to enable for SILGen to init and assign RValues into +tuples with a single instruction. Since an RValue is a potentially exploded +tuple, we are forced to use our representation here. If SILGen instead just uses +separate address projections and stores when it sees such an aggregate, +diagnostic SIL passes can not tell the difference semantically in between +initializing a tuple in parts or at once:: + + var arg = (Type1(), Type2()) + + // This looks the same at the SIL level... + arg = (a, b) + + // to assigning in pieces even though we have formed a new tuple. + arg.0 = a + arg.1 = a + index_addr `````````` :: diff --git a/include/swift/SIL/AddressWalker.h b/include/swift/SIL/AddressWalker.h index 171d3ca9f3354..807c3f9297d4e 100644 --- a/include/swift/SIL/AddressWalker.h +++ b/include/swift/SIL/AddressWalker.h @@ -199,7 +199,8 @@ TransitiveAddressWalker::walk(SILValue projectedAddress) && { isa(user) || isa(user) || isa(user) || isa(user) || isa(user) || isa(user) || - isa(user) || isa(user)) { + isa(user) || isa(user) || + isa(user)) { callVisitUse(op); continue; } diff --git a/include/swift/SIL/InstructionUtils.h b/include/swift/SIL/InstructionUtils.h index 698bb9878438e..3d70e3c28e5b6 100644 --- a/include/swift/SIL/InstructionUtils.h +++ b/include/swift/SIL/InstructionUtils.h @@ -205,6 +205,23 @@ struct PolymorphicBuiltinSpecializedOverloadInfo { /// polymorphic builtin or does not have any available overload for these types, /// return SILValue(). SILValue getStaticOverloadForSpecializedPolymorphicBuiltin(BuiltinInst *bi); + +/// Visit the exploded leaf elements of a tuple type that contains potentially +/// a tree of tuples. +/// +/// If visitor returns false, we stop processing early. We return true if we +/// visited all of the tuple elements without the visitor returing false. +bool visitExplodedTupleType(SILType type, + llvm::function_ref callback); + +/// Visit the exploded leaf elements of a tuple type that contains potentially +/// a tree of tuples. +/// +/// If visitor returns false, we stop processing early. We return true if we +/// visited all of the tuple elements without the visitor returing false. +bool visitExplodedTupleValue(SILValue value, + llvm::function_ref)> callback); + } // end namespace swift #endif diff --git a/include/swift/SIL/SILBuilder.h b/include/swift/SIL/SILBuilder.h index 28f63cd7041bc..9d785981e6f2c 100644 --- a/include/swift/SIL/SILBuilder.h +++ b/include/swift/SIL/SILBuilder.h @@ -1644,6 +1644,15 @@ class SILBuilder { TupleInst *createTuple(SILLocation loc, ArrayRef elts); + TupleAddrConstructorInst * + createTupleAddrConstructor(SILLocation Loc, SILValue DestAddr, + ArrayRef Elements, + IsInitialization_t IsInitOfDest) { + return insert(TupleAddrConstructorInst::create(getSILDebugLocation(Loc), + DestAddr, Elements, + IsInitOfDest, getModule())); + } + EnumInst *createEnum(SILLocation Loc, SILValue Operand, EnumElementDecl *Element, SILType Ty) { return createEnum(Loc, Operand, Element, Ty, diff --git a/include/swift/SIL/SILCloner.h b/include/swift/SIL/SILCloner.h index ffd1279fe77f4..56e50fdbb406b 100644 --- a/include/swift/SIL/SILCloner.h +++ b/include/swift/SIL/SILCloner.h @@ -2181,6 +2181,17 @@ SILCloner::visitTupleInst(TupleInst *Inst) { : ValueOwnershipKind(OwnershipKind::None))); } +template +void SILCloner::visitTupleAddrConstructorInst( + TupleAddrConstructorInst *Inst) { + auto Elements = getOpValueArray<8>(Inst->getElements()); + getBuilder().setCurrentDebugScope(getOpScope(Inst->getDebugScope())); + recordClonedInstruction(Inst, getBuilder().createTupleAddrConstructor( + getOpLocation(Inst->getLoc()), + getOpValue(Inst->getDestValue()), Elements, + Inst->isInitializationOfDest())); +} + template void SILCloner::visitEnumInst(EnumInst *Inst) { diff --git a/include/swift/SIL/SILInstruction.h b/include/swift/SIL/SILInstruction.h index 80c55ca9ce79a..dc24803dde875 100644 --- a/include/swift/SIL/SILInstruction.h +++ b/include/swift/SIL/SILInstruction.h @@ -6339,6 +6339,76 @@ class TupleInst final : public InstructionBaseWithTrailingOperands< } }; +/// TupleAddrConstructorInst - a constructor for address tuples. Can take +/// objects and addresses. Intended only to be used with diagnostics and be +/// lowered after diagnostics run. Once we have opaque values this will not be +/// necessary. +/// +/// tuple_addr_constructor [init] dest with (operands) +/// +/// This always consumes its operands but will either init or assign into dest. +class TupleAddrConstructorInst final + : public InstructionBaseWithTrailingOperands< + SILInstructionKind::TupleAddrConstructorInst, + TupleAddrConstructorInst, NonValueInstruction> { + friend SILBuilder; + USE_SHARED_UINT8; + + TupleAddrConstructorInst(SILDebugLocation DebugLoc, ArrayRef Elts, + IsInitialization_t IsInitOfDest) + : InstructionBaseWithTrailingOperands(Elts, DebugLoc) { + sharedUInt8().TupleAddrConstructorInst.isInitializationOfDest = + bool(IsInitOfDest); + } + + static TupleAddrConstructorInst *create(SILDebugLocation DebugLoc, + SILValue DestAddr, + ArrayRef Elements, + IsInitialization_t IsInitOfDest, + SILModule &Mod); + +public: + enum { + Dest = 0, + }; + + Operand &getDest() { return getAllOperands().front(); } + const Operand &getDest() const { return getAllOperands().front(); } + + SILValue getDestValue() const { return getDest().get(); } + + /// The elements referenced by this TupleInst. + MutableArrayRef getElementOperands() { + return getAllOperands().drop_front(); + } + + /// The elements referenced by this TupleInst. + OperandValueArrayRef getElements() const { + return OperandValueArrayRef(getAllOperands().drop_front()); + } + + /// Return the i'th value referenced by this TupleInst. + SILValue getElement(unsigned i) const { return getElements()[i]; } + + unsigned getElementIndex(Operand *operand) { + assert(operand->getUser() == this); + assert(operand != &getDest() && "Cannot pass in the destination"); + return operand->getOperandNumber() + 1; + } + + TupleType *getTupleType() const { + return getDest().get()->getType().getRawASTType()->castTo(); + } + + IsInitialization_t isInitializationOfDest() const { + return IsInitialization_t( + sharedUInt8().TupleAddrConstructorInst.isInitializationOfDest); + } + void setIsInitializationOfDest(IsInitialization_t I) { + sharedUInt8().TupleAddrConstructorInst.isInitializationOfDest = (bool)I; + } +}; + /// Represents a loadable enum constructed from one of its /// elements. class EnumInst diff --git a/include/swift/SIL/SILNode.h b/include/swift/SIL/SILNode.h index 4cccd2441a3aa..29d1b147f9acb 100644 --- a/include/swift/SIL/SILNode.h +++ b/include/swift/SIL/SILNode.h @@ -251,6 +251,9 @@ class alignas(8) SILNode : isTakeOfSrc : 1, isInitializationOfDest : 1); + SHARED_FIELD(TupleAddrConstructorInst, uint8_t + isInitializationOfDest : 1); + SHARED_FIELD(PointerToAddressInst, uint8_t isStrict : 1, isInvariant : 1); diff --git a/include/swift/SIL/SILNodes.def b/include/swift/SIL/SILNodes.def index 555d7f74ea55f..15ff057778820 100644 --- a/include/swift/SIL/SILNodes.def +++ b/include/swift/SIL/SILNodes.def @@ -848,6 +848,8 @@ NON_VALUE_INST(CopyAddrInst, copy_addr, SILInstruction, MayHaveSideEffects, MayRelease) NON_VALUE_INST(ExplicitCopyAddrInst, explicit_copy_addr, SILInstruction, MayHaveSideEffects, MayRelease) +NON_VALUE_INST(TupleAddrConstructorInst, tuple_addr_constructor, + SILInstruction, MayHaveSideEffects, MayRelease) NON_VALUE_INST(DestroyAddrInst, destroy_addr, SILInstruction, MayHaveSideEffects, MayRelease) NON_VALUE_INST(EndLifetimeInst, end_lifetime, diff --git a/lib/IRGen/IRGenSIL.cpp b/lib/IRGen/IRGenSIL.cpp index 61a1e34f079f8..7a1a31f5aba64 100644 --- a/lib/IRGen/IRGenSIL.cpp +++ b/lib/IRGen/IRGenSIL.cpp @@ -1417,6 +1417,9 @@ class IRGenSILFunction : void visitMarkUnresolvedMoveAddrInst(MarkUnresolvedMoveAddrInst *mai) { llvm_unreachable("Valid only when ownership is enabled"); } + void visitTupleAddrConstructorInst(TupleAddrConstructorInst *i) { + llvm_unreachable("Valid only in raw SIL"); + } void visitDestroyAddrInst(DestroyAddrInst *i); void visitBindMemoryInst(BindMemoryInst *i); diff --git a/lib/SIL/IR/OperandOwnership.cpp b/lib/SIL/IR/OperandOwnership.cpp index d5a00270f6652..55eb979987867 100644 --- a/lib/SIL/IR/OperandOwnership.cpp +++ b/lib/SIL/IR/OperandOwnership.cpp @@ -676,6 +676,15 @@ OperandOwnership OperandOwnershipClassifier::visitKeyPathInst(KeyPathInst *I) { return OperandOwnership::ForwardingConsume; } +OperandOwnership OperandOwnershipClassifier::visitTupleAddrConstructorInst( + TupleAddrConstructorInst *inst) { + // If we have an object, then we have an instantaneous use... + if (getValue()->getType().isObject()) + return OperandOwnership::DestroyingConsume; + // Otherwise, we have a trivial use since we have an address. + return OperandOwnership::TrivialUse; +} + //===----------------------------------------------------------------------===// // Builtin Use Checker //===----------------------------------------------------------------------===// diff --git a/lib/SIL/IR/SILInstructions.cpp b/lib/SIL/IR/SILInstructions.cpp index 36995368837ec..96c8cfa803cc2 100644 --- a/lib/SIL/IR/SILInstructions.cpp +++ b/lib/SIL/IR/SILInstructions.cpp @@ -1460,6 +1460,18 @@ TupleInst *TupleInst::create(SILDebugLocation Loc, SILType Ty, return ::new (Buffer) TupleInst(Loc, Ty, Elements, forwardingOwnershipKind); } +TupleAddrConstructorInst *TupleAddrConstructorInst::create( + SILDebugLocation Loc, SILValue DestAddr, ArrayRef Elements, + IsInitialization_t IsInitOfDest, SILModule &M) { + assert(DestAddr->getType().isAddress()); + auto Size = totalSizeToAlloc(Elements.size() + 1); + auto Buffer = M.allocateInst(Size, alignof(TupleAddrConstructorInst)); + llvm::SmallVector Data; + Data.push_back(DestAddr); + copy(Elements, std::back_inserter(Data)); + return ::new (Buffer) TupleAddrConstructorInst(Loc, Data, IsInitOfDest); +} + bool TupleExtractInst::isTrivialEltOfOneRCIDTuple() const { auto *F = getFunction(); diff --git a/lib/SIL/IR/SILPrinter.cpp b/lib/SIL/IR/SILPrinter.cpp index 63822a40583e4..a1317382c22db 100644 --- a/lib/SIL/IR/SILPrinter.cpp +++ b/lib/SIL/IR/SILPrinter.cpp @@ -2215,7 +2215,25 @@ class SILPrinter : public SILInstructionVisitor { *this << ')'; } } - + + void visitTupleAddrConstructorInst(TupleAddrConstructorInst *TI) { + // First print out our dest. + if (TI->isInitializationOfDest()) { + *this << "[init] "; + } else { + *this << "[assign] "; + } + *this << getIDAndType(TI->getDestValue()); + + *this << " with ("; + + llvm::interleave( + TI->getElements(), [&](const SILValue &V) { *this << getIDAndType(V); }, + [&] { *this << ", "; }); + + *this << ')'; + } + void visitEnumInst(EnumInst *UI) { *this << UI->getType() << ", " << SILDeclRef(UI->getElement(), SILDeclRef::Kind::EnumElement); diff --git a/lib/SIL/Parser/ParseSIL.cpp b/lib/SIL/Parser/ParseSIL.cpp index 8129552c44908..4664f80b9f36f 100644 --- a/lib/SIL/Parser/ParseSIL.cpp +++ b/lib/SIL/Parser/ParseSIL.cpp @@ -5328,6 +5328,64 @@ bool SILParser::parseSpecificSILInstruction(SILBuilder &B, ResultVal = B.createTuple(InstLoc, Ty, OpList); break; } + case SILInstructionKind::TupleAddrConstructorInst: { + // First parse [init] or [assign]. + StringRef InitOrAssign; + SILValue DestValue; + SourceLoc WithLoc, DestToken; + Identifier WithToken; + + if (!parseSILOptional(InitOrAssign, *this) || + parseTypedValueRef(DestValue, DestToken, B) || + parseSILIdentifier(WithToken, WithLoc, + diag::expected_tok_in_sil_instr, "with")) + return true; + + auto IsInit = + llvm::StringSwitch>(InitOrAssign) + .Case("init", IsInitialization_t::IsInitialization) + .Case("assign", IsInitialization_t::IsNotInitialization) + .Default({}); + if (!IsInit) { + P.diagnose(WithLoc, diag::expected_tok_in_sil_instr, + "[init] | [assign]"); + return true; + } + + if (WithToken.str() != "with") { + P.diagnose(WithLoc, diag::expected_tok_in_sil_instr, "with"); + return true; + } + + // If there is no type, parse the simple form. + if (P.parseToken(tok::l_paren, diag::expected_tok_in_sil_instr, "(")) { + P.diagnose(WithLoc, diag::expected_tok_in_sil_instr, "("); + return true; + } + + // Then parse our tuple element list. + SmallVector TypeElts; + if (P.Tok.isNot(tok::r_paren)) { + do { + if (parseTypedValueRef(Val, B)) + return true; + OpList.push_back(Val); + TypeElts.push_back(Val->getType().getRawASTType()); + } while (P.consumeIf(tok::comma)); + } + + if (P.parseToken(tok::r_paren, diag::expected_tok_in_sil_instr, ")")) { + P.diagnose(WithLoc, diag::expected_tok_in_sil_instr, ")"); + return true; + } + + if (parseSILDebugLocation(InstLoc, B)) + return true; + + ResultVal = + B.createTupleAddrConstructor(InstLoc, DestValue, OpList, *IsInit); + break; + } case SILInstructionKind::EnumInst: { SILType Ty; SILDeclRef Elt; diff --git a/lib/SIL/Utils/InstructionUtils.cpp b/lib/SIL/Utils/InstructionUtils.cpp index f4da211239e90..d62022d5d4f21 100644 --- a/lib/SIL/Utils/InstructionUtils.cpp +++ b/lib/SIL/Utils/InstructionUtils.cpp @@ -748,6 +748,13 @@ RuntimeEffect swift::getRuntimeEffect(SILInstruction *inst, SILType &impactType) return RuntimeEffect::MetaData | RuntimeEffect::RefCounting; return RuntimeEffect::MetaData; } + case SILInstructionKind::TupleAddrConstructorInst: { + auto *ca = cast(inst); + impactType = ca->getDestValue()->getType(); + if (!ca->isInitializationOfDest()) + return RuntimeEffect::MetaData | RuntimeEffect::Releasing; + return RuntimeEffect::MetaData; + } case SILInstructionKind::ExplicitCopyAddrInst: { auto *ca = cast(inst); impactType = ca->getSrc()->getType(); @@ -1290,3 +1297,42 @@ swift::getStaticOverloadForSpecializedPolymorphicBuiltin(BuiltinInst *bi) { return newBI; } + +//===----------------------------------------------------------------------===// +// Exploded Tuple Visitors +//===----------------------------------------------------------------------===// + +bool swift::visitExplodedTupleType(SILType inputType, + llvm::function_ref callback) { + auto tupType = inputType.getAs(); + if (!tupType || tupType.containsPackExpansionType()) { + return callback(inputType); + } + + for (auto elt : tupType->getElementTypes()) { + auto eltSILTy = SILType::getPrimitiveType(elt->getCanonicalType(), + inputType.getCategory()); + if (!visitExplodedTupleType(eltSILTy, callback)) + return false; + } + + return true; +} + +bool swift::visitExplodedTupleValue( + SILValue inputValue, + llvm::function_ref)> callback) { + SILType inputType = inputValue->getType(); + auto tupType = inputType.getAs(); + if (!tupType || tupType.containsPackExpansionType()) { + return callback(inputValue, {}); + } + + for (auto eltIndex : range(tupType->getNumElements())) { + auto elt = callback(inputValue, eltIndex); + if (!visitExplodedTupleValue(elt, callback)) + return false; + } + + return true; +} diff --git a/lib/SIL/Verifier/MemoryLifetimeVerifier.cpp b/lib/SIL/Verifier/MemoryLifetimeVerifier.cpp index c928c9157f675..c98dea4d1a77c 100644 --- a/lib/SIL/Verifier/MemoryLifetimeVerifier.cpp +++ b/lib/SIL/Verifier/MemoryLifetimeVerifier.cpp @@ -411,6 +411,15 @@ void MemoryLifetimeVerifier::initDataflowInBlock(SILBasicBlock *block, } break; } + case SILInstructionKind::TupleAddrConstructorInst: { + auto *taci = cast(&I); + for (SILValue elt : taci->getElements()) { + if (elt->getType().isAddress()) + killBits(state, elt); + } + genBits(state, taci->getDestValue()); + break; + } case SILInstructionKind::DestroyAddrInst: case SILInstructionKind::DeallocStackInst: killBits(state, I.getOperand(0)); diff --git a/lib/SILOptimizer/UtilityPasses/SerializeSILPass.cpp b/lib/SILOptimizer/UtilityPasses/SerializeSILPass.cpp index bacedb1f9ec82..3c1e2836f556e 100644 --- a/lib/SILOptimizer/UtilityPasses/SerializeSILPass.cpp +++ b/lib/SILOptimizer/UtilityPasses/SerializeSILPass.cpp @@ -240,6 +240,7 @@ static bool hasOpaqueArchetype(TypeExpansionContext context, case SILInstructionKind::ObjCProtocolInst: case SILInstructionKind::ObjectInst: case SILInstructionKind::TupleInst: + case SILInstructionKind::TupleAddrConstructorInst: case SILInstructionKind::TupleExtractInst: case SILInstructionKind::TuplePackExtractInst: case SILInstructionKind::TupleElementAddrInst: diff --git a/lib/SILOptimizer/Utils/SILInliner.cpp b/lib/SILOptimizer/Utils/SILInliner.cpp index 16f944428f2c0..90552f7140426 100644 --- a/lib/SILOptimizer/Utils/SILInliner.cpp +++ b/lib/SILOptimizer/Utils/SILInliner.cpp @@ -1043,6 +1043,7 @@ InlineCost swift::instructionInlineCost(SILInstruction &I) { case SILInstructionKind::CopyBlockWithoutEscapingInst: case SILInstructionKind::CopyAddrInst: case SILInstructionKind::ExplicitCopyAddrInst: + case SILInstructionKind::TupleAddrConstructorInst: case SILInstructionKind::MarkUnresolvedMoveAddrInst: case SILInstructionKind::RetainValueInst: case SILInstructionKind::RetainValueAddrInst: diff --git a/lib/Serialization/DeserializeSIL.cpp b/lib/Serialization/DeserializeSIL.cpp index f9a3fd6fbfc15..a106f70e6b336 100644 --- a/lib/Serialization/DeserializeSIL.cpp +++ b/lib/Serialization/DeserializeSIL.cpp @@ -1150,6 +1150,7 @@ bool SILDeserializer::readSILInstruction(SILFunction *Fn, SourceLoc SLoc; ApplyOptions ApplyOpts; ArrayRef ListOfValues; + SILLocation Loc = RegularLocation(SLoc); ValueOwnershipKind forwardingOwnership(OwnershipKind::Any); auto decodeValueOwnership = [](unsigned field){ @@ -1208,6 +1209,13 @@ bool SILDeserializer::readSILInstruction(SILFunction *Fn, forwardingOwnership = decodeValueOwnership(ownershipField); break; } + case SIL_ONE_TYPE_VALUES_CATEGORIES: { + // NOTE: This is the same as Values except we smuggle in the category in the + // top bit. + SILOneTypeValuesCategoriesLayout::readRecord( + scratch, RawOpCode, TyID, TyCategory, Attr, ListOfValues); + break; + } case SIL_TWO_OPERANDS: SILTwoOperandsLayout::readRecord(scratch, RawOpCode, Attr, TyID, TyCategory, ValID, @@ -2563,6 +2571,44 @@ bool SILDeserializer::readSILInstruction(SILFunction *Fn, Loc, getSILType(Ty, (SILValueCategory)TyCategory, Fn), OpList); break; } + case SILInstructionKind::TupleAddrConstructorInst: { + assert(RecordKind == SIL_ONE_TYPE_VALUES_CATEGORIES); + + // Format: A type followed by a list of values. A value is expressed by 2 + // IDs: ValueID, ValueResultNumber. The type is the type of the first + // element (which is our dest). + auto Ty = MF->getType(TyID); + TupleType *TT = Ty->castTo(); + assert( + TT && + "Type of the DestAddr of a TupleAddrConstructor should be TupleType"); + assert(ListOfValues.size() >= 2 && + "Should have at least a dest and one element"); + + auto getValue = [&](Type type, uint64_t value) -> SILValue { + assert((value & 0xFFFFFFFF00000000) == 0 && + "High bits should never be set"); + uint32_t count = value & 0x7FFFFFFF; + SILValueCategory category = value & 0x80000000 ? SILValueCategory::Address + : SILValueCategory::Object; + return getLocalValue(count, getSILType(type, category, Fn)); + }; + + SILValue DestAddr = getValue(TT, ListOfValues[0]); + + SmallVector OpList; + unsigned Count = 1; + + visitExplodedTupleType(DestAddr->getType(), [&](SILType eltType) { + OpList.push_back(getValue(eltType.getRawASTType(), ListOfValues[Count])); + ++Count; + return true; + }); + auto IsInitOfDest = IsInitialization_t(Attr & 0x1); + ResultInst = + Builder.createTupleAddrConstructor(Loc, DestAddr, OpList, IsInitOfDest); + break; + } case SILInstructionKind::ObjectInst: { assert(RecordKind == SIL_ONE_TYPE_VALUES && "Layout should be OneTypeValues."); diff --git a/lib/Serialization/ModuleFormat.h b/lib/Serialization/ModuleFormat.h index c9f434bcd19d7..6ac85afdba268 100644 --- a/lib/Serialization/ModuleFormat.h +++ b/lib/Serialization/ModuleFormat.h @@ -58,7 +58,7 @@ const uint16_t SWIFTMODULE_VERSION_MAJOR = 0; /// describe what change you made. The content of this comment isn't important; /// it just ensures a conflict if two people change the module format. /// Don't worry about adhering to the 80-column limit for this line. -const uint16_t SWIFTMODULE_VERSION_MINOR = 817; // Opaque type export details +const uint16_t SWIFTMODULE_VERSION_MINOR = 818; // tuple_addr_constructor /// A standard hash seed used for all string hashes in a serialized module. /// diff --git a/lib/Serialization/SILFormat.h b/lib/Serialization/SILFormat.h index 29eb90a077665..56f2166818507 100644 --- a/lib/Serialization/SILFormat.h +++ b/lib/Serialization/SILFormat.h @@ -134,6 +134,7 @@ namespace sil_block { SIL_ONE_TYPE_ONE_OPERAND, SIL_ONE_TYPE_VALUES, SIL_ONE_TYPE_OWNERSHIP_VALUES, + SIL_ONE_TYPE_VALUES_CATEGORIES, SIL_TWO_OPERANDS, SIL_TAIL_ADDR, SIL_INST_APPLY, @@ -382,10 +383,10 @@ namespace sil_block { // SIL instructions with one type and a list of values. using SILOneTypeValuesLayout = BCRecordLayout< SIL_ONE_TYPE_VALUES, - SILInstOpCodeField, - TypeIDField, - SILTypeCategoryField, - BCArray + SILInstOpCodeField, // opcode + TypeIDField, // destType + SILTypeCategoryField, // destCategory + BCArray // operand ids >; // SIL instructions with one type, forwarding ownership, and a list of values. @@ -398,6 +399,15 @@ namespace sil_block { SILTypeCategoryField, BCArray>; + using SILOneTypeValuesCategoriesLayout = BCRecordLayout< + SIL_ONE_TYPE_VALUES_CATEGORIES, + SILInstOpCodeField, // opcode + TypeIDField, // destType + SILTypeCategoryField, // destCategory + BCFixed<1>, // options + BCArray> // operand id and categories. + >; + enum ApplyKind : unsigned { SIL_APPLY = 0, SIL_PARTIAL_APPLY, diff --git a/lib/Serialization/SerializeSIL.cpp b/lib/Serialization/SerializeSIL.cpp index 17bb7cc2ae48b..81ddf1c665038 100644 --- a/lib/Serialization/SerializeSIL.cpp +++ b/lib/Serialization/SerializeSIL.cpp @@ -907,12 +907,10 @@ void SILSerializer::writeSILInstruction(const SILInstruction &SI) { assert(OpType.isObject()); Args.push_back(S.addTypeRef(OpType.getRawASTType())); } - SILOneTypeValuesLayout::emitRecord(Out, ScratchRecord, abbrCode, - (unsigned)SI.getKind(), - S.addTypeRef( - OI->getType().getRawASTType()), - (unsigned)OI->getType().getCategory(), - Args); + SILOneTypeValuesLayout::emitRecord( + Out, ScratchRecord, abbrCode, (unsigned)SI.getKind(), + S.addTypeRef(OI->getType().getRawASTType()), + (unsigned)OI->getType().getCategory(), Args); break; } @@ -1081,12 +1079,10 @@ void SILSerializer::writeSILInstruction(const SILInstruction &SI) { assert(OpType.isObject()); Args.push_back(S.addTypeRef(OpType.getRawASTType())); } - SILOneTypeValuesLayout::emitRecord(Out, ScratchRecord, abbrCode, - (unsigned)SI.getKind(), - S.addTypeRef( - ARI->getType().getRawASTType()), - (unsigned)ARI->getType().getCategory(), - Args); + SILOneTypeValuesLayout::emitRecord( + Out, ScratchRecord, abbrCode, (unsigned)SI.getKind(), + S.addTypeRef(ARI->getType().getRawASTType()), + (unsigned)ARI->getType().getCategory(), Args); break; } case SILInstructionKind::AllocStackInst: { @@ -1283,10 +1279,10 @@ void SILSerializer::writeSILInstruction(const SILInstruction &SI) { ListOfValues.push_back(addValueRef(Elt)); } - SILOneTypeValuesLayout::emitRecord(Out, ScratchRecord, - SILAbbrCodes[SILOneTypeValuesLayout::Code], - (unsigned)SI.getKind(), - BasicBlockMap[BrI->getDestBB()], 0, ListOfValues); + SILOneTypeValuesLayout::emitRecord( + Out, ScratchRecord, SILAbbrCodes[SILOneTypeValuesLayout::Code], + (unsigned)SI.getKind(), BasicBlockMap[BrI->getDestBB()], 0, + ListOfValues); break; } case SILInstructionKind::CondBranchInst: { @@ -1312,12 +1308,11 @@ void SILSerializer::writeSILInstruction(const SILInstruction &SI) { ListOfValues.push_back(addValueRef(Elt)); } - SILOneTypeValuesLayout::emitRecord(Out, ScratchRecord, - SILAbbrCodes[SILOneTypeValuesLayout::Code], + SILOneTypeValuesLayout::emitRecord( + Out, ScratchRecord, SILAbbrCodes[SILOneTypeValuesLayout::Code], (unsigned)SI.getKind(), S.addTypeRef(CBI->getCondition()->getType().getRawASTType()), - (unsigned)CBI->getCondition()->getType().getCategory(), - ListOfValues); + (unsigned)CBI->getCondition()->getType().getCategory(), ListOfValues); break; } case SILInstructionKind::AwaitAsyncContinuationInst: { @@ -1332,12 +1327,11 @@ void SILSerializer::writeSILInstruction(const SILInstruction &SI) { if (auto errorBB = AACI->getErrorBB()) { ListOfValues.push_back(BasicBlockMap[errorBB]); } - SILOneTypeValuesLayout::emitRecord(Out, ScratchRecord, - SILAbbrCodes[SILOneTypeValuesLayout::Code], - (unsigned)SI.getKind(), - S.addTypeRef(AACI->getOperand()->getType().getRawASTType()), - (unsigned)AACI->getOperand()->getType().getCategory(), - ListOfValues); + SILOneTypeValuesLayout::emitRecord( + Out, ScratchRecord, SILAbbrCodes[SILOneTypeValuesLayout::Code], + (unsigned)SI.getKind(), + S.addTypeRef(AACI->getOperand()->getType().getRawASTType()), + (unsigned)AACI->getOperand()->getType().getCategory(), ListOfValues); break; } case SILInstructionKind::SwitchEnumInst: @@ -1435,12 +1429,11 @@ void SILSerializer::writeSILInstruction(const SILInstruction &SI) { ListOfValues.push_back(addValueRef(value)); ListOfValues.push_back(BasicBlockMap[dest]); } - SILOneTypeValuesLayout::emitRecord(Out, ScratchRecord, - SILAbbrCodes[SILOneTypeValuesLayout::Code], + SILOneTypeValuesLayout::emitRecord( + Out, ScratchRecord, SILAbbrCodes[SILOneTypeValuesLayout::Code], (unsigned)SI.getKind(), S.addTypeRef(SII->getOperand()->getType().getRawASTType()), - (unsigned)SII->getOperand()->getType().getCategory(), - ListOfValues); + (unsigned)SII->getOperand()->getType().getCategory(), ListOfValues); break; } case SILInstructionKind::UnownedCopyValueInst: @@ -1574,8 +1567,8 @@ void SILSerializer::writeSILInstruction(const SILInstruction &SI) { } args.push_back(BasicBlockMap[YI->getResumeBB()]); args.push_back(BasicBlockMap[YI->getUnwindBB()]); - SILOneTypeValuesLayout::emitRecord(Out, ScratchRecord, - SILAbbrCodes[SILOneTypeValuesLayout::Code], + SILOneTypeValuesLayout::emitRecord( + Out, ScratchRecord, SILAbbrCodes[SILOneTypeValuesLayout::Code], (unsigned)YI->getKind(), 0, 0, args); break; } @@ -1823,8 +1816,8 @@ void SILSerializer::writeSILInstruction(const SILInstruction &SI) { ListOfValues.push_back(addValueRef(Elt)); } - SILOneTypeValuesLayout::emitRecord(Out, ScratchRecord, - SILAbbrCodes[SILOneTypeValuesLayout::Code], + SILOneTypeValuesLayout::emitRecord( + Out, ScratchRecord, SILAbbrCodes[SILOneTypeValuesLayout::Code], (unsigned)SI.getKind(), 0, 0, ListOfValues); break; } @@ -1983,11 +1976,12 @@ void SILSerializer::writeSILInstruction(const SILInstruction &SI) { S.addTypeRef(CI->getTargetFormalType()) }; - SILOneTypeValuesLayout::emitRecord(Out, ScratchRecord, - SILAbbrCodes[SILOneTypeValuesLayout::Code], (unsigned)SI.getKind(), - S.addTypeRef(CI->getTargetLoweredType().getRawASTType()), - (unsigned)CI->getTargetLoweredType().getCategory(), - llvm::makeArrayRef(listOfValues)); + SILOneTypeValuesLayout::emitRecord( + Out, ScratchRecord, SILAbbrCodes[SILOneTypeValuesLayout::Code], + (unsigned)SI.getKind(), + S.addTypeRef(CI->getTargetLoweredType().getRawASTType()), + (unsigned)CI->getTargetLoweredType().getCategory(), + llvm::makeArrayRef(listOfValues)); break; } case SILInstructionKind::UnconditionalCheckedCastAddrInst: { @@ -2000,11 +1994,12 @@ void SILSerializer::writeSILInstruction(const SILInstruction &SI) { S.addTypeRef(CI->getTargetFormalType()), addValueRef(CI->getDest()) }; - SILOneTypeValuesLayout::emitRecord(Out, ScratchRecord, - SILAbbrCodes[SILOneTypeValuesLayout::Code], (unsigned)SI.getKind(), - S.addTypeRef(CI->getTargetLoweredType().getRawASTType()), - (unsigned)CI->getTargetLoweredType().getCategory(), - llvm::makeArrayRef(listOfValues)); + SILOneTypeValuesLayout::emitRecord( + Out, ScratchRecord, SILAbbrCodes[SILOneTypeValuesLayout::Code], + (unsigned)SI.getKind(), + S.addTypeRef(CI->getTargetLoweredType().getRawASTType()), + (unsigned)CI->getTargetLoweredType().getCategory(), + llvm::makeArrayRef(listOfValues)); break; } case SILInstructionKind::UncheckedRefCastAddrInst: { @@ -2017,11 +2012,12 @@ void SILSerializer::writeSILInstruction(const SILInstruction &SI) { S.addTypeRef(CI->getTargetFormalType()), addValueRef(CI->getDest()) }; - SILOneTypeValuesLayout::emitRecord(Out, ScratchRecord, - SILAbbrCodes[SILOneTypeValuesLayout::Code], (unsigned)SI.getKind(), - S.addTypeRef(CI->getTargetLoweredType().getRawASTType()), - (unsigned)CI->getTargetLoweredType().getCategory(), - llvm::makeArrayRef(listOfValues)); + SILOneTypeValuesLayout::emitRecord( + Out, ScratchRecord, SILAbbrCodes[SILOneTypeValuesLayout::Code], + (unsigned)SI.getKind(), + S.addTypeRef(CI->getTargetLoweredType().getRawASTType()), + (unsigned)CI->getTargetLoweredType().getCategory(), + llvm::makeArrayRef(listOfValues)); break; } @@ -2200,13 +2196,9 @@ void SILSerializer::writeSILInstruction(const SILInstruction &SI) { ListOfValues.push_back(addValueRef(indexOperand)); SILOneTypeValuesLayout::emitRecord( - Out, - ScratchRecord, - SILAbbrCodes[SILOneTypeValuesLayout::Code], - (unsigned)SI.getKind(), - S.addTypeRef(boundType.getRawASTType()), - (unsigned)boundType.getCategory(), - ListOfValues); + Out, ScratchRecord, SILAbbrCodes[SILOneTypeValuesLayout::Code], + (unsigned)SI.getKind(), S.addTypeRef(boundType.getRawASTType()), + (unsigned)boundType.getCategory(), ListOfValues); break; } case SILInstructionKind::RebindMemoryInst: { @@ -2294,10 +2286,9 @@ void SILSerializer::writeSILInstruction(const SILInstruction &SI) { ListOfValues.push_back(addValueRef(Elt)); } - SILOneTypeValuesLayout::emitRecord(Out, ScratchRecord, - SILAbbrCodes[SILOneTypeValuesLayout::Code], - (unsigned)SI.getKind(), - S.addTypeRef(StrI->getType().getRawASTType()), + SILOneTypeValuesLayout::emitRecord( + Out, ScratchRecord, SILAbbrCodes[SILOneTypeValuesLayout::Code], + (unsigned)SI.getKind(), S.addTypeRef(StrI->getType().getRawASTType()), (unsigned)StrI->getType().getCategory(), ListOfValues); break; } @@ -2337,11 +2328,34 @@ void SILSerializer::writeSILInstruction(const SILInstruction &SI) { } unsigned abbrCode = SILAbbrCodes[SILOneTypeValuesLayout::Code]; - SILOneTypeValuesLayout::emitRecord(Out, ScratchRecord, abbrCode, - (unsigned)SI.getKind(), + SILOneTypeValuesLayout::emitRecord( + Out, ScratchRecord, abbrCode, (unsigned)SI.getKind(), S.addTypeRef(TI->getType().getRawASTType()), - (unsigned)TI->getType().getCategory(), - ListOfValues); + (unsigned)TI->getType().getCategory(), ListOfValues); + break; + } + case SILInstructionKind::TupleAddrConstructorInst: { + // Format: a type followed by a list of values. A value is expressed by + // 2 IDs: ValueID, ValueResultNumber. + const auto *TI = cast(&SI); + SmallVector ListOfValues; + auto getValue = [&](SILValue value) -> uint64_t { + uint32_t result = addValueRef(value); + // Set the top bit if we are an address. + result |= value->getType().isObject() ? 0 : 0x80000000; + return result; + }; + ListOfValues.push_back(getValue(TI->getDestValue())); + for (auto Elt : TI->getElements()) { + ListOfValues.push_back(getValue(Elt)); + } + unsigned abbrCode = SILAbbrCodes[SILOneTypeValuesCategoriesLayout::Code]; + unsigned options = 0; + options |= bool(TI->isInitializationOfDest()); + SILOneTypeValuesCategoriesLayout::emitRecord( + Out, ScratchRecord, abbrCode, (unsigned)SI.getKind(), + S.addTypeRef(TI->getDestValue()->getType().getRawASTType()), + (unsigned)SILValueCategory::Address, options, ListOfValues); break; } case SILInstructionKind::EnumInst: { @@ -2396,9 +2410,9 @@ void SILSerializer::writeSILInstruction(const SILInstruction &SI) { SmallVector ListOfValues; handleMethodInst(CMI, CMI->getOperand(), ListOfValues); - SILOneTypeValuesLayout::emitRecord(Out, ScratchRecord, - SILAbbrCodes[SILOneTypeValuesLayout::Code], (unsigned)SI.getKind(), - S.addTypeRef(Ty.getRawASTType()), + SILOneTypeValuesLayout::emitRecord( + Out, ScratchRecord, SILAbbrCodes[SILOneTypeValuesLayout::Code], + (unsigned)SI.getKind(), S.addTypeRef(Ty.getRawASTType()), (unsigned)Ty.getCategory(), ListOfValues); break; } @@ -2411,9 +2425,9 @@ void SILSerializer::writeSILInstruction(const SILInstruction &SI) { SmallVector ListOfValues; handleMethodInst(SMI, SMI->getOperand(), ListOfValues); - SILOneTypeValuesLayout::emitRecord(Out, ScratchRecord, - SILAbbrCodes[SILOneTypeValuesLayout::Code], (unsigned)SI.getKind(), - S.addTypeRef(Ty.getRawASTType()), + SILOneTypeValuesLayout::emitRecord( + Out, ScratchRecord, SILAbbrCodes[SILOneTypeValuesLayout::Code], + (unsigned)SI.getKind(), S.addTypeRef(Ty.getRawASTType()), (unsigned)Ty.getCategory(), ListOfValues); break; } @@ -2426,9 +2440,9 @@ void SILSerializer::writeSILInstruction(const SILInstruction &SI) { SmallVector ListOfValues; handleMethodInst(OMI, OMI->getOperand(), ListOfValues); - SILOneTypeValuesLayout::emitRecord(Out, ScratchRecord, - SILAbbrCodes[SILOneTypeValuesLayout::Code], (unsigned)SI.getKind(), - S.addTypeRef(Ty.getRawASTType()), + SILOneTypeValuesLayout::emitRecord( + Out, ScratchRecord, SILAbbrCodes[SILOneTypeValuesLayout::Code], + (unsigned)SI.getKind(), S.addTypeRef(Ty.getRawASTType()), (unsigned)Ty.getCategory(), ListOfValues); break; } @@ -2441,9 +2455,9 @@ void SILSerializer::writeSILInstruction(const SILInstruction &SI) { SmallVector ListOfValues; handleMethodInst(SMI, SMI->getOperand(), ListOfValues); - SILOneTypeValuesLayout::emitRecord(Out, ScratchRecord, - SILAbbrCodes[SILOneTypeValuesLayout::Code], (unsigned)SI.getKind(), - S.addTypeRef(Ty.getRawASTType()), + SILOneTypeValuesLayout::emitRecord( + Out, ScratchRecord, SILAbbrCodes[SILOneTypeValuesLayout::Code], + (unsigned)SI.getKind(), S.addTypeRef(Ty.getRawASTType()), (unsigned)Ty.getCategory(), ListOfValues); break; } @@ -2457,8 +2471,9 @@ void SILSerializer::writeSILInstruction(const SILInstruction &SI) { ListOfValues.push_back(BasicBlockMap[DMB->getHasMethodBB()]); ListOfValues.push_back(BasicBlockMap[DMB->getNoMethodBB()]); - SILOneTypeValuesLayout::emitRecord(Out, ScratchRecord, - SILAbbrCodes[SILOneTypeValuesLayout::Code], (unsigned)SI.getKind(), + SILOneTypeValuesLayout::emitRecord( + Out, ScratchRecord, SILAbbrCodes[SILOneTypeValuesLayout::Code], + (unsigned)SI.getKind(), S.addTypeRef(DMB->getOperand()->getType().getRawASTType()), (unsigned)DMB->getOperand()->getType().getCategory(), ListOfValues); break; @@ -2498,11 +2513,12 @@ void SILSerializer::writeSILInstruction(const SILInstruction &SI) { BasicBlockMap[CBI->getSuccessBB()], BasicBlockMap[CBI->getFailureBB()] }; - SILOneTypeValuesLayout::emitRecord(Out, ScratchRecord, - SILAbbrCodes[SILOneTypeValuesLayout::Code], (unsigned)SI.getKind(), - S.addTypeRef(CBI->getTargetLoweredType().getRawASTType()), - (unsigned)CBI->getTargetLoweredType().getCategory(), - llvm::makeArrayRef(listOfValues)); + SILOneTypeValuesLayout::emitRecord( + Out, ScratchRecord, SILAbbrCodes[SILOneTypeValuesLayout::Code], + (unsigned)SI.getKind(), + S.addTypeRef(CBI->getTargetLoweredType().getRawASTType()), + (unsigned)CBI->getTargetLoweredType().getCategory(), + llvm::makeArrayRef(listOfValues)); break; } case SILInstructionKind::InitBlockStorageHeaderInst: { @@ -2518,12 +2534,11 @@ void SILSerializer::writeSILInstruction(const SILInstruction &SI) { S.addTypeRef(IBSHI->getInvokeFunction()->getType().getRawASTType())); // Always a value, don't need to save category ListOfValues.push_back(S.addSubstitutionMapRef(IBSHI->getSubstitutions())); - - SILOneTypeValuesLayout::emitRecord(Out, ScratchRecord, - SILAbbrCodes[SILOneTypeValuesLayout::Code], (unsigned)SI.getKind(), - S.addTypeRef(IBSHI->getType().getRawASTType()), - (unsigned)IBSHI->getType().getCategory(), - ListOfValues); + + SILOneTypeValuesLayout::emitRecord( + Out, ScratchRecord, SILAbbrCodes[SILOneTypeValuesLayout::Code], + (unsigned)SI.getKind(), S.addTypeRef(IBSHI->getType().getRawASTType()), + (unsigned)IBSHI->getType().getCategory(), ListOfValues); break; } @@ -2561,12 +2576,11 @@ void SILSerializer::writeSILInstruction(const SILInstruction &SI) { ListOfValues.push_back(S.addTypeRef(value->getType().getRawASTType())); ListOfValues.push_back((unsigned)value->getType().getCategory()); } - - SILOneTypeValuesLayout::emitRecord(Out, ScratchRecord, - SILAbbrCodes[SILOneTypeValuesLayout::Code], (unsigned)SI.getKind(), - S.addTypeRef(KPI->getType().getRawASTType()), - (unsigned)KPI->getType().getCategory(), - ListOfValues); + + SILOneTypeValuesLayout::emitRecord( + Out, ScratchRecord, SILAbbrCodes[SILOneTypeValuesLayout::Code], + (unsigned)SI.getKind(), S.addTypeRef(KPI->getType().getRawASTType()), + (unsigned)KPI->getType().getCategory(), ListOfValues); break; } @@ -3100,6 +3114,7 @@ void SILSerializer::writeSILBlock(const SILModule *SILMod) { registerSILAbbr(); registerSILAbbr(); registerSILAbbr(); + registerSILAbbr(); registerSILAbbr(); registerSILAbbr(); registerSILAbbr(); diff --git a/test/SIL/Parser/basic2.sil b/test/SIL/Parser/basic2.sil index c98ab36f0e6c0..ac144e4182ce8 100644 --- a/test/SIL/Parser/basic2.sil +++ b/test/SIL/Parser/basic2.sil @@ -218,4 +218,70 @@ bb0: dealloc_box %1 : ${ var Builtin.NativeObject } %9999 = tuple () return %9999 : $() +} + +// CHECK-LABEL: sil [ossa] @tuple_addr_constructor_assign : $@convention(thin) (@in Builtin.NativeObject, @in (Builtin.NativeObject, Builtin.NativeObject)) -> () { +// CHECK: bb0([[LHS:%.*]] : $*Builtin.NativeObject, +// CHECK: [[RHS:%.*]] = alloc_stack $Builtin.NativeObject +// CHECK: [[DEST:%.*]] = alloc_stack $(Builtin.NativeObject, Builtin.NativeObject) +// CHECK: tuple_addr_constructor [assign] [[DEST]] : $*(Builtin.NativeObject, Builtin.NativeObject) with ([[LHS]] : $*Builtin.NativeObject, [[RHS]] : $*Builtin.NativeObject) +// CHECK: } // end sil function 'tuple_addr_constructor_assign' +sil [ossa] @tuple_addr_constructor_assign : $@convention(thin) (@in Builtin.NativeObject, @in (Builtin.NativeObject, Builtin.NativeObject)) -> () { +bb0(%0 : $*Builtin.NativeObject, %0a : $*(Builtin.NativeObject, Builtin.NativeObject)): + %1 = alloc_stack $Builtin.NativeObject + copy_addr %0 to [init] %1 : $*Builtin.NativeObject + %2 = alloc_stack $(Builtin.NativeObject, Builtin.NativeObject) + copy_addr [take] %0a to [init] %2 : $*(Builtin.NativeObject, Builtin.NativeObject) + tuple_addr_constructor [assign] %2 : $*(Builtin.NativeObject, Builtin.NativeObject) with (%0 : $*Builtin.NativeObject, %1 : $*Builtin.NativeObject) + destroy_addr %2 : $*(Builtin.NativeObject, Builtin.NativeObject) + dealloc_stack %2 : $*(Builtin.NativeObject, Builtin.NativeObject) + dealloc_stack %1 : $*Builtin.NativeObject + %9999 = tuple () + return %9999 : $() +} + +// CHECK-LABEL: sil [ossa] @tuple_addr_constructor_init : $@convention(thin) (@in Builtin.NativeObject) -> () { +// CHECK: bb0([[LHS:%.*]] : $* +// CHECK: [[RHS:%.*]] = alloc_stack $Builtin.NativeObject +// CHECK: [[DEST:%.*]] = alloc_stack $(Builtin.NativeObject, Builtin.NativeObject) +// CHECK: tuple_addr_constructor [init] [[DEST]] : $*(Builtin.NativeObject, Builtin.NativeObject) with ([[LHS]] : $*Builtin.NativeObject, [[RHS]] : $*Builtin.NativeObject) +// CHECK: } // end sil function 'tuple_addr_constructor_init' +sil [ossa] @tuple_addr_constructor_init : $@convention(thin) (@in Builtin.NativeObject) -> () { +bb0(%0 : $*Builtin.NativeObject): + %1 = alloc_stack $Builtin.NativeObject + copy_addr %0 to [init] %1 : $*Builtin.NativeObject + %2 = alloc_stack $(Builtin.NativeObject, Builtin.NativeObject) + tuple_addr_constructor [init] %2 : $*(Builtin.NativeObject, Builtin.NativeObject) with (%0 : $*Builtin.NativeObject, %1 : $*Builtin.NativeObject) + destroy_addr %2 : $*(Builtin.NativeObject, Builtin.NativeObject) + dealloc_stack %2 : $*(Builtin.NativeObject, Builtin.NativeObject) + dealloc_stack %1 : $*Builtin.NativeObject + %9999 = tuple () + return %9999 : $() +} + +// CHECK-LABEL: sil [ossa] @tuple_addr_constructor_differing_category : $@convention(thin) (@in Builtin.NativeObject, @owned Builtin.NativeObject) -> () { +// CHECK: bb0([[LHS:%.*]] : $*Builtin.NativeObject, [[RHS:%.*]] : @owned $Builtin.NativeObject) +// CHECK: [[DEST:%.*]] = alloc_stack $(Builtin.NativeObject, Builtin.NativeObject) +// CHECK: tuple_addr_constructor [init] [[DEST]] : $*(Builtin.NativeObject, Builtin.NativeObject) with ([[LHS]] : $*Builtin.NativeObject, [[RHS]] : $Builtin.NativeObject) +// CHECK: } // end sil function 'tuple_addr_constructor_differing_category' +sil [ossa] @tuple_addr_constructor_differing_category : $@convention(thin) (@in Builtin.NativeObject, @owned Builtin.NativeObject) -> () { +bb0(%0 : $*Builtin.NativeObject, %1 : @owned $Builtin.NativeObject): + %2 = alloc_stack $(Builtin.NativeObject, Builtin.NativeObject) + tuple_addr_constructor [init] %2 : $*(Builtin.NativeObject, Builtin.NativeObject) with (%0 : $*Builtin.NativeObject, %1 : $Builtin.NativeObject) + destroy_addr %2 : $*(Builtin.NativeObject, Builtin.NativeObject) + dealloc_stack %2 : $*(Builtin.NativeObject, Builtin.NativeObject) + %9999 = tuple () + return %9999 : $() +} + +// CHECK-LABEL: sil [ossa] @tuple_addr_constructor_unfriendly_tuple : $@convention(thin) (@in Builtin.NativeObject, @owned Builtin.NativeObject) -> () { +// CHECK: tuple_addr_constructor [init] {{%.*}} : $*((), (Builtin.NativeObject, Builtin.NativeObject)) with +sil [ossa] @tuple_addr_constructor_unfriendly_tuple : $@convention(thin) (@in Builtin.NativeObject, @owned Builtin.NativeObject) -> () { +bb0(%0 : $*Builtin.NativeObject, %1 : @owned $Builtin.NativeObject): + %2 = alloc_stack $((), (Builtin.NativeObject, Builtin.NativeObject)) + tuple_addr_constructor [init] %2 : $*((), (Builtin.NativeObject, Builtin.NativeObject)) with (%0 : $*Builtin.NativeObject, %1 : $Builtin.NativeObject) + destroy_addr %2 : $*((), (Builtin.NativeObject, Builtin.NativeObject)) + dealloc_stack %2 : $*((), (Builtin.NativeObject, Builtin.NativeObject)) + %9999 = tuple () + return %9999 : $() } \ No newline at end of file diff --git a/test/SIL/Serialization/basic2.sil b/test/SIL/Serialization/basic2.sil index fbe6c08093d1e..49ac4c1cbe32c 100644 --- a/test/SIL/Serialization/basic2.sil +++ b/test/SIL/Serialization/basic2.sil @@ -115,4 +115,73 @@ bb0: dealloc_box %1 : ${ var Builtin.NativeObject } %9999 = tuple () return %9999 : $() +} + + +// CHECK-LABEL: sil [ossa] @tuple_addr_constructor_assign : $@convention(thin) (@in Builtin.NativeObject, @in (Builtin.NativeObject, Builtin.NativeObject)) -> () { +// CHECK: bb0([[LHS:%.*]] : $*Builtin.NativeObject, +// CHECK: [[RHS:%.*]] = alloc_stack $Builtin.NativeObject +// CHECK: [[DEST:%.*]] = alloc_stack $(Builtin.NativeObject, Builtin.NativeObject) +// CHECK: tuple_addr_constructor [assign] [[DEST]] : $*(Builtin.NativeObject, Builtin.NativeObject) with ([[LHS]] : $*Builtin.NativeObject, [[RHS]] : $*Builtin.NativeObject) +// CHECK: } // end sil function 'tuple_addr_constructor_assign' +sil [ossa] @tuple_addr_constructor_assign : $@convention(thin) (@in Builtin.NativeObject, @in (Builtin.NativeObject, Builtin.NativeObject)) -> () { +bb0(%0 : $*Builtin.NativeObject, %0a : $*(Builtin.NativeObject, Builtin.NativeObject)): + %1 = alloc_stack $Builtin.NativeObject + copy_addr %0 to [init] %1 : $*Builtin.NativeObject + %2 = alloc_stack $(Builtin.NativeObject, Builtin.NativeObject) + copy_addr [take] %0a to [init] %2 : $*(Builtin.NativeObject, Builtin.NativeObject) + tuple_addr_constructor [assign] %2 : $*(Builtin.NativeObject, Builtin.NativeObject) with (%0 : $*Builtin.NativeObject, %1 : $*Builtin.NativeObject) + destroy_addr %2 : $*(Builtin.NativeObject, Builtin.NativeObject) + destroy_addr %1 : $*Builtin.NativeObject + dealloc_stack %2 : $*(Builtin.NativeObject, Builtin.NativeObject) + dealloc_stack %1 : $*Builtin.NativeObject + %9999 = tuple () + return %9999 : $() +} + + +// CHECK-LABEL: sil [ossa] @tuple_addr_constructor_differing_category : $@convention(thin) (@in Builtin.NativeObject, @owned Builtin.NativeObject) -> () { +// CHECK: bb0([[LHS:%.*]] : $*Builtin.NativeObject, [[RHS:%.*]] : @owned $Builtin.NativeObject) +// CHECK: [[DEST:%.*]] = alloc_stack $(Builtin.NativeObject, Builtin.NativeObject) +// CHECK: tuple_addr_constructor [init] [[DEST]] : $*(Builtin.NativeObject, Builtin.NativeObject) with ([[LHS]] : $*Builtin.NativeObject, [[RHS]] : $Builtin.NativeObject) +// CHECK: } // end sil function 'tuple_addr_constructor_differing_category' +sil [ossa] @tuple_addr_constructor_differing_category : $@convention(thin) (@in Builtin.NativeObject, @owned Builtin.NativeObject) -> () { +bb0(%0 : $*Builtin.NativeObject, %1 : @owned $Builtin.NativeObject): + %2 = alloc_stack $(Builtin.NativeObject, Builtin.NativeObject) + tuple_addr_constructor [init] %2 : $*(Builtin.NativeObject, Builtin.NativeObject) with (%0 : $*Builtin.NativeObject, %1 : $Builtin.NativeObject) + destroy_addr %2 : $*(Builtin.NativeObject, Builtin.NativeObject) + dealloc_stack %2 : $*(Builtin.NativeObject, Builtin.NativeObject) + %9999 = tuple () + return %9999 : $() +} + +// CHECK-LABEL: sil [ossa] @tuple_addr_constructor_init : $@convention(thin) (@in Builtin.NativeObject) -> () { +// CHECK: bb0([[LHS:%.*]] : $* +// CHECK: [[RHS:%.*]] = alloc_stack $Builtin.NativeObject +// CHECK: [[DEST:%.*]] = alloc_stack $(Builtin.NativeObject, Builtin.NativeObject) +// CHECK: tuple_addr_constructor [init] [[DEST]] : $*(Builtin.NativeObject, Builtin.NativeObject) with ([[LHS]] : $*Builtin.NativeObject, [[RHS]] : $*Builtin.NativeObject) +// CHECK: } // end sil function 'tuple_addr_constructor_init' +sil [ossa] @tuple_addr_constructor_init : $@convention(thin) (@in Builtin.NativeObject) -> () { +bb0(%0 : $*Builtin.NativeObject): + %1 = alloc_stack $Builtin.NativeObject + copy_addr %0 to [init] %1 : $*Builtin.NativeObject + %2 = alloc_stack $(Builtin.NativeObject, Builtin.NativeObject) + tuple_addr_constructor [init] %2 : $*(Builtin.NativeObject, Builtin.NativeObject) with (%0 : $*Builtin.NativeObject, %1 : $*Builtin.NativeObject) + destroy_addr %2 : $*(Builtin.NativeObject, Builtin.NativeObject) + dealloc_stack %2 : $*(Builtin.NativeObject, Builtin.NativeObject) + dealloc_stack %1 : $*Builtin.NativeObject + %9999 = tuple () + return %9999 : $() +} + +// CHECK-LABEL: sil [ossa] @tuple_addr_constructor_unfriendly_tuple : $@convention(thin) (@in Builtin.NativeObject, @owned Builtin.NativeObject) -> () { +// CHECK: tuple_addr_constructor [init] {{%.*}} : $*((), (Builtin.NativeObject, Builtin.NativeObject)) with +sil [ossa] @tuple_addr_constructor_unfriendly_tuple : $@convention(thin) (@in Builtin.NativeObject, @owned Builtin.NativeObject) -> () { +bb0(%0 : $*Builtin.NativeObject, %1 : @owned $Builtin.NativeObject): + %2 = alloc_stack $((), (Builtin.NativeObject, Builtin.NativeObject)) + tuple_addr_constructor [init] %2 : $*((), (Builtin.NativeObject, Builtin.NativeObject)) with (%0 : $*Builtin.NativeObject, %1 : $Builtin.NativeObject) + destroy_addr %2 : $*((), (Builtin.NativeObject, Builtin.NativeObject)) + dealloc_stack %2 : $*((), (Builtin.NativeObject, Builtin.NativeObject)) + %9999 = tuple () + return %9999 : $() } \ No newline at end of file From d2b5bc33a186d55400617e43fa9e04446af60f73 Mon Sep 17 00:00:00 2001 From: Michael Gottesman Date: Fri, 3 Nov 2023 13:47:50 -0700 Subject: [PATCH 2/3] [sil-optimizer] Add a small pass that runs after TransferNonSendable and eliminates tuple addr constructor. This will limit the number of passes that need to be updated to handle tuple_addr_constructor. --- include/swift/SIL/SILInstruction.h | 2 + .../swift/SILOptimizer/PassManager/Passes.def | 2 + lib/SILOptimizer/Mandatory/CMakeLists.txt | 1 + .../Mandatory/LowerTupleAddrConstructor.cpp | 78 +++++++++ .../Mandatory/TransferNonSendable.cpp | 1 + lib/SILOptimizer/PassManager/PassPipeline.cpp | 4 + .../lower_tuple_addr_constructor.sil | 158 ++++++++++++++++++ 7 files changed, 246 insertions(+) create mode 100644 lib/SILOptimizer/Mandatory/LowerTupleAddrConstructor.cpp create mode 100644 test/SILOptimizer/lower_tuple_addr_constructor.sil diff --git a/include/swift/SIL/SILInstruction.h b/include/swift/SIL/SILInstruction.h index dc24803dde875..352bdd5bcdfb2 100644 --- a/include/swift/SIL/SILInstruction.h +++ b/include/swift/SIL/SILInstruction.h @@ -6396,6 +6396,8 @@ class TupleAddrConstructorInst final return operand->getOperandNumber() + 1; } + unsigned getNumElements() const { return getTupleType()->getNumElements(); } + TupleType *getTupleType() const { return getDest().get()->getType().getRawASTType()->castTo(); } diff --git a/include/swift/SILOptimizer/PassManager/Passes.def b/include/swift/SILOptimizer/PassManager/Passes.def index 5f9c6c77ec6cb..0b9bffca5b853 100644 --- a/include/swift/SILOptimizer/PassManager/Passes.def +++ b/include/swift/SILOptimizer/PassManager/Passes.def @@ -369,6 +369,8 @@ PASS(IRGenPrepare, "irgen-prepare", "Cleanup SIL in preparation for IRGen") PASS(TransferNonSendable, "transfer-non-sendable", "Checks calls that send non-sendable values between isolation domains") +PASS(LowerTupleAddrConstructor, "lower-tuple-addr-constructor", + "Lower tuple addr constructor to tuple_element_addr+copy_addr") PASS(SILGenCleanup, "silgen-cleanup", "Cleanup SIL in preparation for diagnostics") PASS(SILCombine, "sil-combine", diff --git a/lib/SILOptimizer/Mandatory/CMakeLists.txt b/lib/SILOptimizer/Mandatory/CMakeLists.txt index fb9a72c995711..9ec86043d7eeb 100644 --- a/lib/SILOptimizer/Mandatory/CMakeLists.txt +++ b/lib/SILOptimizer/Mandatory/CMakeLists.txt @@ -43,6 +43,7 @@ target_sources(swiftSILOptimizer PRIVATE RawSILInstLowering.cpp ReferenceBindingTransform.cpp TransferNonSendable.cpp + LowerTupleAddrConstructor.cpp SILGenCleanup.cpp YieldOnceCheck.cpp OSLogOptimization.cpp diff --git a/lib/SILOptimizer/Mandatory/LowerTupleAddrConstructor.cpp b/lib/SILOptimizer/Mandatory/LowerTupleAddrConstructor.cpp new file mode 100644 index 0000000000000..533d23960721c --- /dev/null +++ b/lib/SILOptimizer/Mandatory/LowerTupleAddrConstructor.cpp @@ -0,0 +1,78 @@ +//===--- LowerTupleAddrConstructor.cpp ------------------------------------===// +// +// This source file is part of the Swift.org open source project +// +// Copyright (c) 2014 - 2023 Apple Inc. and the Swift project authors +// Licensed under Apache License v2.0 with Runtime Library Exception +// +// See https://swift.org/LICENSE.txt for license information +// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors +// +//===----------------------------------------------------------------------===// + +#include "swift/SIL/SILBuilder.h" +#include "swift/SIL/SILFunction.h" +#include "swift/SILOptimizer/PassManager/Transforms.h" + +using namespace swift; + +//===----------------------------------------------------------------------===// +// MARK: Top Level Entrypoint +//===----------------------------------------------------------------------===// + +namespace { + +class LowerTupleAddrConstructorTransform : public SILFunctionTransform { + void run() override { + SILFunction *function = getFunction(); + + // Once we have finished, lower all tuple_addr_constructor that we see. + bool deletedInst = false; + for (auto &block : *function) { + for (auto ii = block.begin(), ie = block.end(); ii != ie;) { + auto *inst = dyn_cast(&*ii); + ++ii; + + if (!inst) + continue; + + SILBuilderWithScope builder(inst); + + unsigned count = 0; + visitExplodedTupleValue( + inst->getDestValue(), + [&](SILValue value, std::optional index) -> SILValue { + if (!index) { + SILValue elt = inst->getElement(count); + if (elt->getType().isAddress()) { + builder.createCopyAddr(inst->getLoc(), elt, value, IsTake, + inst->isInitializationOfDest()); + } else { + builder.emitStoreValueOperation( + inst->getLoc(), elt, value, + bool(inst->isInitializationOfDest()) + ? StoreOwnershipQualifier::Init + : StoreOwnershipQualifier::Assign); + } + ++count; + return value; + } + auto *teai = + builder.createTupleElementAddr(inst->getLoc(), value, *index); + return teai; + }); + inst->eraseFromParent(); + deletedInst = true; + } + } + + if (deletedInst) + invalidateAnalysis(SILAnalysis::InvalidationKind::Instructions); + } +}; + +} // end anonymous namespace + +SILTransform *swift::createLowerTupleAddrConstructor() { + return new LowerTupleAddrConstructorTransform(); +} diff --git a/lib/SILOptimizer/Mandatory/TransferNonSendable.cpp b/lib/SILOptimizer/Mandatory/TransferNonSendable.cpp index b9cc055bc1168..281547952f27f 100644 --- a/lib/SILOptimizer/Mandatory/TransferNonSendable.cpp +++ b/lib/SILOptimizer/Mandatory/TransferNonSendable.cpp @@ -21,6 +21,7 @@ #include "swift/SIL/NodeDatastructures.h" #include "swift/SIL/OwnershipUtils.h" #include "swift/SIL/SILBasicBlock.h" +#include "swift/SIL/SILBuilder.h" #include "swift/SIL/SILFunction.h" #include "swift/SIL/SILInstruction.h" #include "swift/SILOptimizer/PassManager/Transforms.h" diff --git a/lib/SILOptimizer/PassManager/PassPipeline.cpp b/lib/SILOptimizer/PassManager/PassPipeline.cpp index 8a64aa2dde346..f3f41060df057 100644 --- a/lib/SILOptimizer/PassManager/PassPipeline.cpp +++ b/lib/SILOptimizer/PassManager/PassPipeline.cpp @@ -134,6 +134,10 @@ static void addMandatoryDiagnosticOptPipeline(SILPassPipelinePlan &P) { P.addFlowIsolation(); P.addTransferNonSendable(); + // Lower tuple addr constructor. Eventually this can be merged into later + // passes. This ensures we do not need to update later passes for something + // that is only needed by TransferNonSendable(). + P.addLowerTupleAddrConstructor(); // Automatic differentiation: canonicalize all differentiability witnesses // and `differentiable_function` instructions. diff --git a/test/SILOptimizer/lower_tuple_addr_constructor.sil b/test/SILOptimizer/lower_tuple_addr_constructor.sil new file mode 100644 index 0000000000000..7044630b97456 --- /dev/null +++ b/test/SILOptimizer/lower_tuple_addr_constructor.sil @@ -0,0 +1,158 @@ +// RUN: %target-sil-opt -lower-tuple-addr-constructor %s | %FileCheck %s + +sil_stage canonical + +import Builtin +import Swift + +// CHECK-LABEL: sil [ossa] @tuple_addr_constructor_init : $@convention(thin) (@in Builtin.NativeObject) -> () { +// CHECK: bb0([[LHS:%.*]] : $* +// CHECK: [[RHS:%.*]] = alloc_stack $Builtin.NativeObject +// CHECK: [[DEST:%.*]] = alloc_stack $(Builtin.NativeObject, Builtin.NativeObject) +// CHECK: [[LHS_ADDR:%.*]] = tuple_element_addr [[DEST]] : $*(Builtin.NativeObject, Builtin.NativeObject), 0 +// CHECK: copy_addr [take] [[LHS]] to [init] [[LHS_ADDR]] +// CHECK: [[RHS_ADDR:%.*]] = tuple_element_addr [[DEST]] : $*(Builtin.NativeObject, Builtin.NativeObject), 1 +// CHECK: copy_addr [take] [[RHS]] to [init] [[RHS_ADDR]] +// CHECK: } // end sil function 'tuple_addr_constructor_init' +sil [ossa] @tuple_addr_constructor_init : $@convention(thin) (@in Builtin.NativeObject) -> () { +bb0(%0 : $*Builtin.NativeObject): + %1 = alloc_stack $Builtin.NativeObject + copy_addr %0 to [init] %1 : $*Builtin.NativeObject + %2 = alloc_stack $(Builtin.NativeObject, Builtin.NativeObject) + tuple_addr_constructor [init] %2 : $*(Builtin.NativeObject, Builtin.NativeObject) with (%0 : $*Builtin.NativeObject, %1 : $*Builtin.NativeObject) + destroy_addr %2 : $*(Builtin.NativeObject, Builtin.NativeObject) + dealloc_stack %2 : $*(Builtin.NativeObject, Builtin.NativeObject) + dealloc_stack %1 : $*Builtin.NativeObject + %9999 = tuple () + return %9999 : $() +} + +// CHECK-LABEL: sil [ossa] @tuple_addr_constructor_assign : $@convention(thin) (@in Builtin.NativeObject, @in (Builtin.NativeObject, Builtin.NativeObject)) -> () { +// CHECK: bb0([[LHS:%.*]] : $*Builtin.NativeObject, +// CHECK: [[RHS:%.*]] = alloc_stack $Builtin.NativeObject +// CHECK: [[DEST:%.*]] = alloc_stack $(Builtin.NativeObject, Builtin.NativeObject) +// CHECK: [[LHS_ADDR:%.*]] = tuple_element_addr [[DEST]] : $*(Builtin.NativeObject, Builtin.NativeObject), 0 +// CHECK: copy_addr [take] [[LHS]] to [[LHS_ADDR]] +// CHECK: [[RHS_ADDR:%.*]] = tuple_element_addr [[DEST]] : $*(Builtin.NativeObject, Builtin.NativeObject), 1 +// CHECK: copy_addr [take] [[RHS]] to [[RHS_ADDR]] +// CHECK: } // end sil function 'tuple_addr_constructor_assign' +sil [ossa] @tuple_addr_constructor_assign : $@convention(thin) (@in Builtin.NativeObject, @in (Builtin.NativeObject, Builtin.NativeObject)) -> () { +bb0(%0 : $*Builtin.NativeObject, %0a : $*(Builtin.NativeObject, Builtin.NativeObject)): + %1 = alloc_stack $Builtin.NativeObject + copy_addr %0 to [init] %1 : $*Builtin.NativeObject + %2 = alloc_stack $(Builtin.NativeObject, Builtin.NativeObject) + copy_addr [take] %0a to [init] %2 : $*(Builtin.NativeObject, Builtin.NativeObject) + tuple_addr_constructor [assign] %2 : $*(Builtin.NativeObject, Builtin.NativeObject) with (%0 : $*Builtin.NativeObject, %1 : $*Builtin.NativeObject) + destroy_addr %2 : $*(Builtin.NativeObject, Builtin.NativeObject) + dealloc_stack %2 : $*(Builtin.NativeObject, Builtin.NativeObject) + dealloc_stack %1 : $*Builtin.NativeObject + %9999 = tuple () + return %9999 : $() +} + +// CHECK-LABEL: sil [ossa] @tuple_addr_constructor_mixed_init : $@convention(thin) (@in Builtin.NativeObject, @owned Builtin.NativeObject) -> () { +// CHECK: bb0([[LHS:%.*]] : $*Builtin.NativeObject, [[RHS:%.*]] : @owned $Builtin.NativeObject): +// CHECK: [[DEST:%.*]] = alloc_stack $(Builtin.NativeObject, Builtin.NativeObject) +// CHECK: [[LHS_ADDR:%.*]] = tuple_element_addr [[DEST]] : $*(Builtin.NativeObject, Builtin.NativeObject), 0 +// CHECK: copy_addr [take] [[LHS]] to [init] [[LHS_ADDR]] +// CHECK: [[RHS_ADDR:%.*]] = tuple_element_addr [[DEST]] : $*(Builtin.NativeObject, Builtin.NativeObject), 1 +// CHECK: store [[RHS]] to [init] [[RHS_ADDR]] +// CHECK: } // end sil function 'tuple_addr_constructor_mixed_init' +sil [ossa] @tuple_addr_constructor_mixed_init : $@convention(thin) (@in Builtin.NativeObject, @owned Builtin.NativeObject) -> () { +bb0(%0 : $*Builtin.NativeObject, %1 : @owned $Builtin.NativeObject): + %2 = alloc_stack $(Builtin.NativeObject, Builtin.NativeObject) + tuple_addr_constructor [init] %2 : $*(Builtin.NativeObject, Builtin.NativeObject) with (%0 : $*Builtin.NativeObject, %1 : $Builtin.NativeObject) + destroy_addr %2 : $*(Builtin.NativeObject, Builtin.NativeObject) + dealloc_stack %2 : $*(Builtin.NativeObject, Builtin.NativeObject) + %9999 = tuple () + return %9999 : $() +} + +// CHECK-LABEL: sil [ossa] @tuple_addr_constructor_mixed_assign : $@convention(thin) (@in Builtin.NativeObject, @in (Builtin.NativeObject, Builtin.NativeObject), @owned Builtin.NativeObject) -> () { +// CHECK: bb0([[LHS:%.*]] : $*Builtin.NativeObject, {{%.*}} : $*(Builtin.NativeObject, Builtin.NativeObject), [[RHS:%.*]] : @owned +// CHECK: [[DEST:%.*]] = alloc_stack $(Builtin.NativeObject, Builtin.NativeObject) +// CHECK: [[LHS_ADDR:%.*]] = tuple_element_addr [[DEST]] : $*(Builtin.NativeObject, Builtin.NativeObject), 0 +// CHECK: copy_addr [take] [[LHS]] to [[LHS_ADDR]] +// CHECK: [[RHS_ADDR:%.*]] = tuple_element_addr [[DEST]] : $*(Builtin.NativeObject, Builtin.NativeObject), 1 +// CHECK: store [[RHS]] to [assign] [[RHS_ADDR]] +// CHECK: } // end sil function 'tuple_addr_constructor_mixed_assign' +sil [ossa] @tuple_addr_constructor_mixed_assign : $@convention(thin) (@in Builtin.NativeObject, @in (Builtin.NativeObject, Builtin.NativeObject), @owned Builtin.NativeObject) -> () { +bb0(%0 : $*Builtin.NativeObject, %0a : $*(Builtin.NativeObject, Builtin.NativeObject), %0b : @owned $Builtin.NativeObject): + %2 = alloc_stack $(Builtin.NativeObject, Builtin.NativeObject) + copy_addr [take] %0a to [init] %2 : $*(Builtin.NativeObject, Builtin.NativeObject) + tuple_addr_constructor [assign] %2 : $*(Builtin.NativeObject, Builtin.NativeObject) with (%0 : $*Builtin.NativeObject, %0b : $Builtin.NativeObject) + destroy_addr %2 : $*(Builtin.NativeObject, Builtin.NativeObject) + dealloc_stack %2 : $*(Builtin.NativeObject, Builtin.NativeObject) + %9999 = tuple () + return %9999 : $() +} + +// CHECK-LABEL: sil [ossa] @tuple_addr_constructor_unfriendly : $@convention(thin) (@in Builtin.NativeObject, @owned Builtin.NativeObject) -> () { +// CHECK: bb0([[LHS:%.*]] : $*Builtin.NativeObject, [[RHS:%.*]] : @owned +// CHECK: [[DEST:%.*]] = alloc_stack $((), (Builtin.NativeObject, Builtin.NativeObject)) +// CHECK: [[LHS_ADDR:%.*]] = tuple_element_addr [[DEST]] : $*((), (Builtin.NativeObject, Builtin.NativeObject)), 0 +// CHECK: [[RHS_ADDR:%.*]] = tuple_element_addr [[DEST]] : $*((), (Builtin.NativeObject, Builtin.NativeObject)), 1 +// CHECK: [[RHS_ADDR_1:%.*]] = tuple_element_addr [[RHS_ADDR]] : $*(Builtin.NativeObject, Builtin.NativeObject), 0 +// CHECK: copy_addr [take] [[LHS]] to [init] [[RHS_ADDR_1]] +// CHECK: [[RHS_ADDR_2:%.*]] = tuple_element_addr [[RHS_ADDR]] : $*(Builtin.NativeObject, Builtin.NativeObject), 1 +// CHECK: store [[RHS]] to [init] [[RHS_ADDR_2]] +// CHECK: } // end sil function 'tuple_addr_constructor_unfriendly' +sil [ossa] @tuple_addr_constructor_unfriendly : $@convention(thin) (@in Builtin.NativeObject, @owned Builtin.NativeObject) -> () { +bb0(%0 : $*Builtin.NativeObject, %0a : @owned $Builtin.NativeObject): + %2 = alloc_stack $((), (Builtin.NativeObject, Builtin.NativeObject)) + tuple_addr_constructor [init] %2 : $*((), (Builtin.NativeObject, Builtin.NativeObject)) with (%0 : $*Builtin.NativeObject, %0a : $Builtin.NativeObject) + destroy_addr %2 : $*((), (Builtin.NativeObject, Builtin.NativeObject)) + dealloc_stack %2 : $*((), (Builtin.NativeObject, Builtin.NativeObject)) + %9999 = tuple () + return %9999 : $() +} + +// CHECK-LABEL: sil [ossa] @tuple_addr_constructor_unfriendly_2 : $@convention(thin) (@in Builtin.NativeObject, @owned Builtin.NativeObject, @in Builtin.NativeObject) -> () { +// CHECK: bb0([[ARG1:%.*]] : $*Builtin.NativeObject, [[ARG2:%.*]] : @owned $Builtin.NativeObject, [[ARG3:%.*]] : $*Builtin.NativeObject +// CHECK: [[DEST:%.*]] = alloc_stack $((), (Builtin.NativeObject, Builtin.NativeObject), ((), (), Builtin.NativeObject)) +// CHECK: [[ADDR_1:%.*]] = tuple_element_addr [[DEST]] : $*((), (Builtin.NativeObject, Builtin.NativeObject), ((), (), Builtin.NativeObject)) +// CHECK: [[ADDR_2:%.*]] = tuple_element_addr [[DEST]] : $*((), (Builtin.NativeObject, Builtin.NativeObject), ((), (), Builtin.NativeObject)) +// CHECK: [[ADDR_2_1:%.*]] = tuple_element_addr [[ADDR_2]] : $*(Builtin.NativeObject, Builtin.NativeObject), 0 +// CHECK: copy_addr [take] [[ARG1]] to [init] [[ADDR_2_1]] +// CHECK: [[ADDR_2_2:%.*]] = tuple_element_addr [[ADDR_2]] : $*(Builtin.NativeObject, Builtin.NativeObject), 1 +// CHECK: store [[ARG2]] to [init] [[ADDR_2_2]] +// CHECK: [[ADDR_3:%.*]] = tuple_element_addr [[DEST]] : $*((), (Builtin.NativeObject, Builtin.NativeObject), ((), (), Builtin.NativeObject)) +// CHECK: [[ADDR_3_1:%.*]] = tuple_element_addr [[ADDR_3]] : $*((), (), Builtin.NativeObject), 0 +// CHECK: [[ADDR_3_2:%.*]] = tuple_element_addr [[ADDR_3]] : $*((), (), Builtin.NativeObject), 1 +// CHECK: [[ADDR_3_3:%.*]] = tuple_element_addr [[ADDR_3]] : $*((), (), Builtin.NativeObject), 2 +// CHECK: copy_addr [take] [[ARG3]] to [init] [[ADDR_3_3]] +// CHECK: } // end sil function 'tuple_addr_constructor_unfriendly_2' +sil [ossa] @tuple_addr_constructor_unfriendly_2 : $@convention(thin) (@in Builtin.NativeObject, @owned Builtin.NativeObject, @in Builtin.NativeObject) -> () { +bb0(%0 : $*Builtin.NativeObject, %0a : @owned $Builtin.NativeObject, %0b : $*Builtin.NativeObject): + %2 = alloc_stack $((), (Builtin.NativeObject, Builtin.NativeObject), ((), (), (Builtin.NativeObject))) + tuple_addr_constructor [init] %2 : $*((), (Builtin.NativeObject, Builtin.NativeObject), ((), (), (Builtin.NativeObject))) with (%0 : $*Builtin.NativeObject, %0a : $Builtin.NativeObject, %0b : $*Builtin.NativeObject) + destroy_addr %2 : $*((), (Builtin.NativeObject, Builtin.NativeObject), ((), (), (Builtin.NativeObject))) + dealloc_stack %2 : $*((), (Builtin.NativeObject, Builtin.NativeObject), ((), (), (Builtin.NativeObject))) + %9999 = tuple () + return %9999 : $() +} + +// CHECK-LABEL: sil [ossa] @tuple_addr_constructor_unfriendly_3 : $@convention(thin) (@in Builtin.NativeObject, @owned Builtin.NativeObject, @in Builtin.NativeObject) -> () { +// CHECK: bb0([[ARG1:%.*]] : $*Builtin.NativeObject, [[ARG2:%.*]] : @owned $Builtin.NativeObject, [[ARG3:%.*]] : $*Builtin.NativeObject +// CHECK: [[DEST:%.*]] = alloc_stack $((a: ()), (Builtin.NativeObject, Builtin.NativeObject), ((), (), b: Builtin.NativeObject)) +// CHECK: [[ADDR_1:%.*]] = tuple_element_addr [[DEST]] : $*((a: ()), (Builtin.NativeObject, Builtin.NativeObject), ((), (), b: Builtin.NativeObject)) +// CHECK: [[ADDR_2:%.*]] = tuple_element_addr [[DEST]] : $*((a: ()), (Builtin.NativeObject, Builtin.NativeObject), ((), (), b: Builtin.NativeObject)) +// CHECK: [[ADDR_2_1:%.*]] = tuple_element_addr [[ADDR_2]] : $*(Builtin.NativeObject, Builtin.NativeObject), 0 +// CHECK: copy_addr [take] [[ARG1]] to [init] [[ADDR_2_1]] +// CHECK: [[ADDR_2_2:%.*]] = tuple_element_addr [[ADDR_2]] : $*(Builtin.NativeObject, Builtin.NativeObject), 1 +// CHECK: store [[ARG2]] to [init] [[ADDR_2_2]] +// CHECK: [[ADDR_3:%.*]] = tuple_element_addr [[DEST]] : $*((a: ()), (Builtin.NativeObject, Builtin.NativeObject), ((), (), b: Builtin.NativeObject)) +// CHECK: [[ADDR_3_1:%.*]] = tuple_element_addr [[ADDR_3]] : $*((), (), b: Builtin.NativeObject), 0 +// CHECK: [[ADDR_3_2:%.*]] = tuple_element_addr [[ADDR_3]] : $*((), (), b: Builtin.NativeObject), 1 +// CHECK: [[ADDR_3_3:%.*]] = tuple_element_addr [[ADDR_3]] : $*((), (), b: Builtin.NativeObject), 2 +// CHECK: copy_addr [take] [[ARG3]] to [init] [[ADDR_3_3]] +// CHECK: } // end sil function 'tuple_addr_constructor_unfriendly_3' +sil [ossa] @tuple_addr_constructor_unfriendly_3 : $@convention(thin) (@in Builtin.NativeObject, @owned Builtin.NativeObject, @in Builtin.NativeObject) -> () { +bb0(%0 : $*Builtin.NativeObject, %0a : @owned $Builtin.NativeObject, %0b : $*Builtin.NativeObject): + %2 = alloc_stack $*((a: ()), (Builtin.NativeObject, Builtin.NativeObject), ((), (), b: (Builtin.NativeObject))) + tuple_addr_constructor [init] %2 : $*((a: ()), (Builtin.NativeObject, Builtin.NativeObject), ((), (), b: (Builtin.NativeObject))) with (%0 : $*Builtin.NativeObject, %0a : $Builtin.NativeObject, %0b : $*Builtin.NativeObject) + destroy_addr %2 : $*((a: ()), (Builtin.NativeObject, Builtin.NativeObject), ((), (), b: (Builtin.NativeObject))) + dealloc_stack %2 : $*((a: ()), (Builtin.NativeObject, Builtin.NativeObject), ((), (), b: (Builtin.NativeObject))) + %9999 = tuple () + return %9999 : $() +} From b1f69030fc159ac45b68bc9c4908da41b6858b6c Mon Sep 17 00:00:00 2001 From: Michael Gottesman Date: Sun, 5 Nov 2023 19:17:46 -0800 Subject: [PATCH 3/3] [region-isolation] When assigning RValues into memory, use tuple_addr_constructor instead of doing it in pieces. I also included changes to the rest of the SIL optimizer pipeline to ensure that the part of the optimizer pipeline before we lower tuple_addr_constructor (which is right after we run TransferNonSendable) work as before. The reason why I am doing this is that this ensures that diagnostic passes can tell the difference in between: ``` x = (a, b, c) ``` and ``` x.0 = a x.1 = b x.2 = c ``` This is important for things like TransferNonSendable where assigning over the entire tuple element is treated differently from if one were to initialize it in pieces using projections. rdar://117880194 --- include/swift/SIL/SILCloner.h | 22 ++- include/swift/SIL/SILInstruction.h | 12 +- lib/SIL/IR/SILPrinter.cpp | 2 +- lib/SIL/Utils/InstructionUtils.cpp | 2 +- lib/SIL/Verifier/MemoryLifetimeVerifier.cpp | 2 +- lib/SIL/Verifier/SILVerifier.cpp | 16 +++ lib/SILGen/RValue.cpp | 47 +++---- lib/SILGen/SILGenBuilder.h | 13 ++ .../Mandatory/DIMemoryUseCollector.cpp | 21 +++ .../Mandatory/DefiniteInitialization.cpp | 24 ++++ .../Mandatory/LowerTupleAddrConstructor.cpp | 2 +- .../Mandatory/TransferNonSendable.cpp | 58 ++++++-- lib/Serialization/SerializeSIL.cpp | 4 +- test/Concurrency/sendnonsendable_basic.swift | 20 +-- test/SILGen/builtins.swift | 5 +- test/SILGen/tuples.swift | 10 +- test/SILOptimizer/definite_init_tuple.sil | 66 ++++++++- test/SILOptimizer/init_accessors.swift | 47 +++++-- test/SILOptimizer/specialize_ossa.sil | 128 ++++++++++++++++++ 19 files changed, 424 insertions(+), 77 deletions(-) diff --git a/include/swift/SIL/SILCloner.h b/include/swift/SIL/SILCloner.h index 56e50fdbb406b..8abb6a4a7f6ba 100644 --- a/include/swift/SIL/SILCloner.h +++ b/include/swift/SIL/SILCloner.h @@ -2184,11 +2184,29 @@ SILCloner::visitTupleInst(TupleInst *Inst) { template void SILCloner::visitTupleAddrConstructorInst( TupleAddrConstructorInst *Inst) { - auto Elements = getOpValueArray<8>(Inst->getElements()); + SmallVector Elements; + for (auto e : Inst->getElements()) { + SILValue mappedValue = getOpValue(e); + + // Check if mappedValue only consists of empty tuple elements. If it does, + // then we do not add it to our result. This is because we know that the + // corresponding elements in getOpValue(Inst->getDest()) will also change + // into an empty exploded tuple. Since we only have leaf non-empty non-tuple + // elements as operands, these are not represented. + bool FoundNonTuple = false; + mappedValue->getType().getASTType().visit( + [&](CanType ty) { FoundNonTuple |= !ty->is(); }); + if (FoundNonTuple) + Elements.push_back(mappedValue); + } + + if (Elements.empty()) + return; + getBuilder().setCurrentDebugScope(getOpScope(Inst->getDebugScope())); recordClonedInstruction(Inst, getBuilder().createTupleAddrConstructor( getOpLocation(Inst->getLoc()), - getOpValue(Inst->getDestValue()), Elements, + getOpValue(Inst->getDest()), Elements, Inst->isInitializationOfDest())); } diff --git a/include/swift/SIL/SILInstruction.h b/include/swift/SIL/SILInstruction.h index 352bdd5bcdfb2..86fd682d8c92d 100644 --- a/include/swift/SIL/SILInstruction.h +++ b/include/swift/SIL/SILInstruction.h @@ -6372,10 +6372,10 @@ class TupleAddrConstructorInst final Dest = 0, }; - Operand &getDest() { return getAllOperands().front(); } - const Operand &getDest() const { return getAllOperands().front(); } + Operand &getDestOperand() { return getAllOperands().front(); } + const Operand &getDestOperand() const { return getAllOperands().front(); } - SILValue getDestValue() const { return getDest().get(); } + SILValue getDest() const { return getDestOperand().get(); } /// The elements referenced by this TupleInst. MutableArrayRef getElementOperands() { @@ -6392,14 +6392,16 @@ class TupleAddrConstructorInst final unsigned getElementIndex(Operand *operand) { assert(operand->getUser() == this); - assert(operand != &getDest() && "Cannot pass in the destination"); + assert(operand != &getDestOperand() && "Cannot pass in the destination"); return operand->getOperandNumber() + 1; } unsigned getNumElements() const { return getTupleType()->getNumElements(); } TupleType *getTupleType() const { - return getDest().get()->getType().getRawASTType()->castTo(); + // We use getASTType() since we want to look through a wrapped noncopyable + // type to get to the underlying tuple type. + return getDest()->getType().getASTType()->castTo(); } IsInitialization_t isInitializationOfDest() const { diff --git a/lib/SIL/IR/SILPrinter.cpp b/lib/SIL/IR/SILPrinter.cpp index a1317382c22db..cbc86af223bce 100644 --- a/lib/SIL/IR/SILPrinter.cpp +++ b/lib/SIL/IR/SILPrinter.cpp @@ -2223,7 +2223,7 @@ class SILPrinter : public SILInstructionVisitor { } else { *this << "[assign] "; } - *this << getIDAndType(TI->getDestValue()); + *this << getIDAndType(TI->getDest()); *this << " with ("; diff --git a/lib/SIL/Utils/InstructionUtils.cpp b/lib/SIL/Utils/InstructionUtils.cpp index d62022d5d4f21..59cd6bfe8a450 100644 --- a/lib/SIL/Utils/InstructionUtils.cpp +++ b/lib/SIL/Utils/InstructionUtils.cpp @@ -750,7 +750,7 @@ RuntimeEffect swift::getRuntimeEffect(SILInstruction *inst, SILType &impactType) } case SILInstructionKind::TupleAddrConstructorInst: { auto *ca = cast(inst); - impactType = ca->getDestValue()->getType(); + impactType = ca->getDest()->getType(); if (!ca->isInitializationOfDest()) return RuntimeEffect::MetaData | RuntimeEffect::Releasing; return RuntimeEffect::MetaData; diff --git a/lib/SIL/Verifier/MemoryLifetimeVerifier.cpp b/lib/SIL/Verifier/MemoryLifetimeVerifier.cpp index c98dea4d1a77c..e8fdc69c18bbe 100644 --- a/lib/SIL/Verifier/MemoryLifetimeVerifier.cpp +++ b/lib/SIL/Verifier/MemoryLifetimeVerifier.cpp @@ -417,7 +417,7 @@ void MemoryLifetimeVerifier::initDataflowInBlock(SILBasicBlock *block, if (elt->getType().isAddress()) killBits(state, elt); } - genBits(state, taci->getDestValue()); + genBits(state, taci->getDest()); break; } case SILInstructionKind::DestroyAddrInst: diff --git a/lib/SIL/Verifier/SILVerifier.cpp b/lib/SIL/Verifier/SILVerifier.cpp index 4c8f434fb87f1..51b0317674d6c 100644 --- a/lib/SIL/Verifier/SILVerifier.cpp +++ b/lib/SIL/Verifier/SILVerifier.cpp @@ -3290,6 +3290,22 @@ class SILVerifier : public SILVerifierBase { } } + void checkTupleAddrConstructorInst(TupleAddrConstructorInst *taci) { + require(taci->getNumElements() > 0, + "Cannot be applied to tuples that do not contain any real " + "elements. E.x.: ((), ())"); + for (auto elt : taci->getElements()) { + // We cannot have any elements that contain only tuple elements. This is + // due to our exploded representation. This means when specializing, + // cloners must eliminate these parameters. + bool hasNonTuple = false; + elt->getType().getASTType().visit([&](CanType ty) { + hasNonTuple |= !ty->is(); + }); + require(hasNonTuple, "Element only consists of tuples"); + } + } + // Is a SIL type a potential lowering of a formal type? bool isLoweringOf(SILType loweredType, CanType formalType) { return loweredType.isLoweringOf(F.getTypeExpansionContext(), F.getModule(), diff --git a/lib/SILGen/RValue.cpp b/lib/SILGen/RValue.cpp index 4a26b7aeb3e39..caa003c67f0e0 100644 --- a/lib/SILGen/RValue.cpp +++ b/lib/SILGen/RValue.cpp @@ -558,36 +558,33 @@ void RValue::copyInto(SILGenFunction &SGF, SILLocation loc, copyOrInitValuesInto(I, elts, type, loc, SGF); } -static void assignRecursive(SILGenFunction &SGF, SILLocation loc, - CanType type, ArrayRef &srcValues, - SILValue destAddr) { - // Recurse into tuples. +void RValue::assignInto(SILGenFunction &SGF, SILLocation loc, + SILValue destAddr) && { + assert(isComplete() && "rvalue is not complete"); + assert(isPlusOneOrTrivial(SGF) && "Can not assign borrowed RValues"); + ArrayRef srcMvValues = values; + + SWIFT_DEFER { assert(srcMvValues.empty() && "didn't claim all elements!"); }; + + // If we do not have a tuple, just bail early. auto srcTupleType = dyn_cast(type); - if (srcTupleType && !srcTupleType.containsPackExpansionType()) { - assert(destAddr->getType().castTo()->getNumElements() - == srcTupleType->getNumElements()); - for (auto eltIndex : indices(srcTupleType.getElementTypes())) { - auto eltDestAddr = SGF.B.createTupleElementAddr(loc, destAddr, eltIndex); - assignRecursive(SGF, loc, srcTupleType.getElementType(eltIndex), - srcValues, eltDestAddr); - } + if (!srcTupleType || srcTupleType.containsPackExpansionType()) { + // Otherwise, pull the front value off the list. + auto srcValue = srcMvValues.front(); + srcMvValues = srcMvValues.slice(1); + srcValue.assignInto(SGF, loc, destAddr); return; } - // Otherwise, pull the front value off the list. - auto srcValue = srcValues.front(); - srcValues = srcValues.slice(1); + assert(destAddr->getType().castTo()->getNumElements() == + srcTupleType->getNumElements()); - srcValue.assignInto(SGF, loc, destAddr); -} - -void RValue::assignInto(SILGenFunction &SGF, SILLocation loc, - SILValue destAddr) && { - assert(isComplete() && "rvalue is not complete"); - assert(isPlusOneOrTrivial(SGF) && "Can not assign borrowed RValues"); - ArrayRef srcValues = values; - assignRecursive(SGF, loc, type, srcValues, destAddr); - assert(srcValues.empty() && "didn't claim all elements!"); + // If we do have any srcMvValues, then emit a TupleAddrConstructor. If we do + // not have any, then our tuple must consist only of empty tuples. + if (srcMvValues.size()) + SGF.B.createTupleAddrConstructor(loc, destAddr, srcMvValues, + IsNotInitialization); + srcMvValues = ArrayRef(); } ManagedValue RValue::getAsSingleValue(SILGenFunction &SGF, SILLocation loc) && { diff --git a/lib/SILGen/SILGenBuilder.h b/lib/SILGen/SILGenBuilder.h index 0c9d4288bccc8..f4853e22b8d2a 100644 --- a/lib/SILGen/SILGenBuilder.h +++ b/lib/SILGen/SILGenBuilder.h @@ -505,6 +505,19 @@ class SILGenBuilder : public SILBuilder { void createEndLifetime(SILLocation loc, ManagedValue selfValue) { createEndLifetime(loc, selfValue.forward(SGF)); } + + using SILBuilder::createTupleAddrConstructor; + + void createTupleAddrConstructor(SILLocation loc, SILValue destAddr, + ArrayRef elements, + IsInitialization_t isInitOfDest) { + SmallVector values; + for (auto mv : elements) { + values.push_back(mv.forward(SGF)); + } + + createTupleAddrConstructor(loc, destAddr, values, isInitOfDest); + } }; } // namespace Lowering diff --git a/lib/SILOptimizer/Mandatory/DIMemoryUseCollector.cpp b/lib/SILOptimizer/Mandatory/DIMemoryUseCollector.cpp index 0df39ecfd9945..e6a27e365a10c 100644 --- a/lib/SILOptimizer/Mandatory/DIMemoryUseCollector.cpp +++ b/lib/SILOptimizer/Mandatory/DIMemoryUseCollector.cpp @@ -908,6 +908,27 @@ void ElementUseCollector::collectUses(SILValue Pointer, unsigned BaseEltNo) { continue; } + if (auto *TACI = dyn_cast(User)) { + // If this is the source of the copy_addr, then this is a load. If it is + // the destination, then this is an unknown assignment. Note that we'll + // revisit this instruction and add it to Uses twice if it is both a load + // and store to the same aggregate. + DIUseKind Kind; + if (TACI->getDest() == Op->get()) { + if (InStructSubElement) + Kind = DIUseKind::PartialStore; + else if (TACI->isInitializationOfDest()) + Kind = DIUseKind::Initialization; + else + Kind = DIUseKind::InitOrAssign; + } else { + Kind = DIUseKind::Load; + } + + addElementUses(BaseEltNo, PointeeType, User, Kind); + continue; + } + if (auto *MAI = dyn_cast(User)) { // If this is the source of the copy_addr, then this is a load. If it is // the destination, then this is an unknown assignment. Note that we'll diff --git a/lib/SILOptimizer/Mandatory/DefiniteInitialization.cpp b/lib/SILOptimizer/Mandatory/DefiniteInitialization.cpp index 2ba2bfa64a50c..d918813ffd16b 100644 --- a/lib/SILOptimizer/Mandatory/DefiniteInitialization.cpp +++ b/lib/SILOptimizer/Mandatory/DefiniteInitialization.cpp @@ -2529,6 +2529,30 @@ void LifetimeChecker::updateInstructionForInitState(unsigned UseID) { return; } + if (auto *TACI = dyn_cast(Inst)) { + assert(!TACI->isInitializationOfDest() && + "should not modify copy_addr that already knows it is initialized"); + TACI->setIsInitializationOfDest(InitKind); + if (InitKind == IsInitialization) + setStaticInitAccess(TACI->getDest()); + + // If we had an initialization and had an assignable_but_not_consumable + // noncopyable type, convert it to be an initable_but_not_consumable so that + // we do not consume an uninitialized value. + if (InitKind == IsInitialization) { + if (auto *mmci = dyn_cast( + stripAccessMarkers(TACI->getDest()))) { + if (mmci->getCheckKind() == MarkUnresolvedNonCopyableValueInst:: + CheckKind::AssignableButNotConsumable) { + mmci->setCheckKind(MarkUnresolvedNonCopyableValueInst::CheckKind:: + InitableButNotConsumable); + } + } + } + + return; + } + // Ignore non-stores for SelfInits. assert(isa(Inst) && "Unknown store instruction!"); } diff --git a/lib/SILOptimizer/Mandatory/LowerTupleAddrConstructor.cpp b/lib/SILOptimizer/Mandatory/LowerTupleAddrConstructor.cpp index 533d23960721c..fb9a7b22ef284 100644 --- a/lib/SILOptimizer/Mandatory/LowerTupleAddrConstructor.cpp +++ b/lib/SILOptimizer/Mandatory/LowerTupleAddrConstructor.cpp @@ -40,7 +40,7 @@ class LowerTupleAddrConstructorTransform : public SILFunctionTransform { unsigned count = 0; visitExplodedTupleValue( - inst->getDestValue(), + inst->getDest(), [&](SILValue value, std::optional index) -> SILValue { if (!index) { SILValue elt = inst->getElement(count); diff --git a/lib/SILOptimizer/Mandatory/TransferNonSendable.cpp b/lib/SILOptimizer/Mandatory/TransferNonSendable.cpp index 281547952f27f..df1f94dc5ce33 100644 --- a/lib/SILOptimizer/Mandatory/TransferNonSendable.cpp +++ b/lib/SILOptimizer/Mandatory/TransferNonSendable.cpp @@ -860,9 +860,14 @@ class PartitionOpTranslator { "srcID and dstID are different?!"); } - void translateSILAssign(SILValue dest, SILValue src) { - return translateSILMultiAssign(TinyPtrVector(dest), - TinyPtrVector(src)); + template + void translateSILAssign(SILValue dest, Collection collection) { + return translateSILMultiAssign(TinyPtrVector(dest), collection); + } + + template <> + void translateSILAssign(SILValue dest, SILValue src) { + return translateSILAssign(dest, TinyPtrVector(src)); } void translateSILAssign(SILInstruction *inst) { @@ -877,13 +882,22 @@ class PartitionOpTranslator { TinyPtrVector()); } - void translateSILMerge(SILValue dest, SILValue src) { + template + void translateSILMerge(SILValue dest, Collection collection) { auto trackableDest = tryToTrackValue(dest); - auto trackableSrc = tryToTrackValue(src); - if (!trackableDest || !trackableSrc) + if (!trackableDest) return; - builder.addMerge(trackableDest->getRepresentative(), - trackableSrc->getRepresentative()); + for (SILValue elt : collection) { + if (auto trackableSrc = tryToTrackValue(elt)) { + builder.addMerge(trackableDest->getRepresentative(), + trackableSrc->getRepresentative()); + } + } + } + + template <> + void translateSILMerge(SILValue dest, SILValue src) { + return translateSILMerge(dest, TinyPtrVector(src)); } /// If tgt is known to be unaliased (computed thropugh a combination of @@ -914,6 +928,30 @@ class PartitionOpTranslator { // Stores to storage of non-Sendable type can be ignored. } + void translateSILTupleAddrConstructor(TupleAddrConstructorInst *inst) { + SILValue dest = inst->getDest(); + if (auto nonSendableTgt = tryToTrackValue(dest)) { + // In the following situations, we can perform an assign: + // + // 1. A store to unaliased storage. + // 2. A store that is to an entire value. + // + // DISCUSSION: If we have case 2, we need to merge the regions since we + // are not overwriting the entire region of the value. This does mean that + // we artificially include the previous region that was stored + // specifically in this projection... but that is better than + // miscompiling. For memory like this, we probably need to track it on a + // per field basis to allow for us to assign. + if (nonSendableTgt.value().isNoAlias() && !isProjectedFromAggregate(dest)) + return translateSILAssign(dest, inst->getElements()); + + // Stores to possibly aliased storage must be treated as merges. + return translateSILMerge(dest, inst->getElements()); + } + + // Stores to storage of non-Sendable type can be ignored. + } + void translateSILRequire(SILValue val) { if (auto nonSendableVal = tryToTrackValue(val)) return builder.addRequire(nonSendableVal->getRepresentative()); @@ -1090,6 +1128,10 @@ class PartitionOpTranslator { case SILInstructionKind::StoreWeakInst: return translateSILStore(inst->getOperand(1), inst->getOperand(0)); + case SILInstructionKind::TupleAddrConstructorInst: + return translateSILTupleAddrConstructor( + cast(inst)); + // Applies are handled specially since we need to merge their results. case SILInstructionKind::ApplyInst: case SILInstructionKind::BeginApplyInst: diff --git a/lib/Serialization/SerializeSIL.cpp b/lib/Serialization/SerializeSIL.cpp index 81ddf1c665038..324e49d7b28f3 100644 --- a/lib/Serialization/SerializeSIL.cpp +++ b/lib/Serialization/SerializeSIL.cpp @@ -2345,7 +2345,7 @@ void SILSerializer::writeSILInstruction(const SILInstruction &SI) { result |= value->getType().isObject() ? 0 : 0x80000000; return result; }; - ListOfValues.push_back(getValue(TI->getDestValue())); + ListOfValues.push_back(getValue(TI->getDest())); for (auto Elt : TI->getElements()) { ListOfValues.push_back(getValue(Elt)); } @@ -2354,7 +2354,7 @@ void SILSerializer::writeSILInstruction(const SILInstruction &SI) { options |= bool(TI->isInitializationOfDest()); SILOneTypeValuesCategoriesLayout::emitRecord( Out, ScratchRecord, abbrCode, (unsigned)SI.getKind(), - S.addTypeRef(TI->getDestValue()->getType().getRawASTType()), + S.addTypeRef(TI->getDest()->getType().getRawASTType()), (unsigned)SILValueCategory::Address, options, ListOfValues); break; } diff --git a/test/Concurrency/sendnonsendable_basic.swift b/test/Concurrency/sendnonsendable_basic.swift index 25e75d2d0af7d..6c15655ddadc5 100644 --- a/test/Concurrency/sendnonsendable_basic.swift +++ b/test/Concurrency/sendnonsendable_basic.swift @@ -596,24 +596,24 @@ func multipleFieldTupleMergeTest2() async { var box = (NonSendableKlass(), NonSendableKlass()) // This transfers the entire region. - await transferToMain(box.0) // expected-tns-warning {{passing argument of non-sendable type 'NonSendableKlass' from nonisolated context to main actor-isolated context at this call site could yield a race with accesses later in this function}} + await transferToMain(box.0) // expected-complete-warning @-1 {{passing argument of non-sendable type 'NonSendableKlass' into main actor-isolated context may introduce data races}} let box2 = (NonSendableKlass(), NonSendableKlass()) // But if we assign over box completely, we can use it again. - box = box2 // expected-tns-note {{access here could race}} + box = box2 - useValue(box.0) // expected-tns-note {{access here could race}} - useValue(box.1) // expected-tns-note {{access here could race}} - useValue(box) // expected-tns-note {{access here could race}} + useValue(box.0) + useValue(box.1) + useValue(box) - await transferToMain(box.1) // expected-tns-note {{access here could race}} + await transferToMain(box.1) // expected-complete-warning @-1 {{passing argument of non-sendable type 'NonSendableKlass' into main actor-isolated context may introduce data races}} // But if we assign over box completely, we can use it again. - box = (NonSendableKlass(), NonSendableKlass()) // expected-tns-note {{access here could race}} + box = (NonSendableKlass(), NonSendableKlass()) - useValue(box.0) // expected-tns-note {{access here could race}} - useValue(box.1) // expected-tns-note {{access here could race}} - useValue(box) // expected-tns-note {{access here could race}} + useValue(box.0) + useValue(box.1) + useValue(box) } diff --git a/test/SILGen/builtins.swift b/test/SILGen/builtins.swift index c9ef47374e0d0..3cae1e9b3a4f6 100644 --- a/test/SILGen/builtins.swift +++ b/test/SILGen/builtins.swift @@ -154,10 +154,7 @@ func assign_tuple(_ x: (Builtin.Int64, Builtin.NativeObject), var x = x var y = y // CHECK: [[ADDR:%.*]] = pointer_to_address {{%.*}} to [strict] $*(Builtin.Int64, Builtin.NativeObject) - // CHECK: [[T0:%.*]] = tuple_element_addr [[ADDR]] - // CHECK: assign {{%.*}} to [[T0]] - // CHECK: [[T0:%.*]] = tuple_element_addr [[ADDR]] - // CHECK: assign {{%.*}} to [[T0]] + // CHECK: tuple_addr_constructor [assign] [[ADDR]] : $*(Builtin.Int64, Builtin.NativeObject) with // CHECK: destroy_value Builtin.assign(x, y) } diff --git a/test/SILGen/tuples.swift b/test/SILGen/tuples.swift index 4623190946741..e3d416d2a569a 100644 --- a/test/SILGen/tuples.swift +++ b/test/SILGen/tuples.swift @@ -57,10 +57,7 @@ func testShuffleOpaque() { // CHECK-NEXT: [[T0:%.*]] = function_ref @$s6tuples7make_xySi1x_AA1P_p1ytyF // CHECK-NEXT: [[T1:%.*]] = apply [[T0]]([[TMP]]) // CHECK-NEXT: [[WRITE:%.*]] = begin_access [modify] [unknown] [[PBPAIR]] : $*(y: any P, x: Int) - // CHECK-NEXT: [[PAIR_0:%.*]] = tuple_element_addr [[WRITE]] : $*(y: any P, x: Int), 0 - // CHECK-NEXT: copy_addr [take] [[TMP]] to [[PAIR_0]] - // CHECK-NEXT: [[PAIR_1:%.*]] = tuple_element_addr [[WRITE]] : $*(y: any P, x: Int), 1 - // CHECK-NEXT: assign [[T1]] to [[PAIR_1]] + // CHECK-NEXT: tuple_addr_constructor [assign] [[WRITE]] : $*(y: any P, x: Int) with ([[TMP]] : $*any P, [[T1]] : $Int) // CHECK-NEXT: end_access [[WRITE]] : $*(y: any P, x: Int) // CHECK-NEXT: dealloc_stack [[TMP]] pair = make_xy() @@ -110,10 +107,7 @@ func testShuffleTuple() { // CHECK-NEXT: [[T0:%.*]] = function_ref @$s6tuples6make_pAA1P_pyF // CHECK-NEXT: apply [[T0]]([[TEMP]]) // CHECK-NEXT: [[WRITE:%.*]] = begin_access [modify] [unknown] [[PBPAIR]] : $*(y: any P, x: Int) - // CHECK-NEXT: [[PAIR_0:%.*]] = tuple_element_addr [[WRITE]] : $*(y: any P, x: Int), 0 - // CHECK-NEXT: copy_addr [take] [[TEMP]] to [[PAIR_0]] - // CHECK-NEXT: [[PAIR_1:%.*]] = tuple_element_addr [[WRITE]] : $*(y: any P, x: Int), 1 - // CHECK-NEXT: assign [[INT]] to [[PAIR_1]] + // CHECK-NEXT: tuple_addr_constructor [assign] [[WRITE]] : $*(y: any P, x: Int) with ([[TEMP]] : $*any P, [[INT]] : $Int) // CHECK-NEXT: end_access [[WRITE]] : $*(y: any P, x: Int) // CHECK-NEXT: dealloc_stack [[TEMP]] pair = (x: make_int(), y: make_p()) diff --git a/test/SILOptimizer/definite_init_tuple.sil b/test/SILOptimizer/definite_init_tuple.sil index a364aefda5f34..08e2271f59437 100644 --- a/test/SILOptimizer/definite_init_tuple.sil +++ b/test/SILOptimizer/definite_init_tuple.sil @@ -1,4 +1,4 @@ -// RUN: %target-sil-opt -enable-sil-verify-all %s -definite-init -raw-sil-inst-lowering +// RUN: %target-sil-opt -enable-sil-verify-all %s -definite-init -raw-sil-inst-lowering -verify import Builtin import Swift @@ -24,4 +24,68 @@ bb0(%0 : $*S, %1 : $@thin S.Type): destroy_value %3 : $<τ_0_0> { var S<τ_0_0> } %12 = tuple () return %12 : $() +} + +struct MyText { + let label: String + // expected-note @-1 {{'self.label' not initialized}} + let value: V + // expected-note @-1 {{'self.value.0' not initialized}} + // expected-note @-2 {{'self.value.1' not initialized}} +} + +sil [ossa] @tuple_addr_constructor_1 : $@convention(method) (@owned String, Int, @thin MyText<(Int, Int)>.Type) -> @owned MyText<(Int, Int)> { +bb0(%0 : @owned $String, %1 : $Int, %2 : $@thin MyText<(Int, Int)>.Type): + %3 = alloc_stack $MyText<(Int, Int)>, var, name "self", implicit + %4 = mark_uninitialized [rootself] %3 : $*MyText<(Int, Int)> + %7 = copy_value %0 : $String + %8 = begin_access [modify] [static] %4 : $*MyText<(Int, Int)> + %9 = struct_element_addr %8 : $*MyText<(Int, Int)>, #MyText.label + assign %7 to %9 : $*String + end_access %8 : $*MyText<(Int, Int)> + %12 = begin_access [modify] [static] %4 : $*MyText<(Int, Int)> + %13 = struct_element_addr %12 : $*MyText<(Int, Int)>, #MyText.value + tuple_addr_constructor [assign] %13 : $*(Int, Int) with (%1 : $Int, %1 : $Int) + end_access %12 : $*MyText<(Int, Int)> + %16 = load [copy] %4 : $*MyText<(Int, Int)> + destroy_value %0 : $String + destroy_addr %4 : $*MyText<(Int, Int)> + dealloc_stack %3 : $*MyText<(Int, Int)> + return %16 : $MyText<(Int, Int)> +} + +sil [ossa] @tuple_addr_constructor_2 : $@convention(method) (@owned String, Int, @thin MyText<(Int, Int)>.Type) -> @owned MyText<(Int, Int)> { +bb0(%0 : @owned $String, %1 : $Int, %2 : $@thin MyText<(Int, Int)>.Type): + %3 = alloc_stack $MyText<(Int, Int)>, var, name "self", implicit + %4 = mark_uninitialized [rootself] %3 : $*MyText<(Int, Int)> + %7 = copy_value %0 : $String + %8 = begin_access [modify] [static] %4 : $*MyText<(Int, Int)> + %9 = struct_element_addr %8 : $*MyText<(Int, Int)>, #MyText.label + destroy_value %7 : $String + end_access %8 : $*MyText<(Int, Int)> + %12 = begin_access [modify] [static] %4 : $*MyText<(Int, Int)> + %13 = struct_element_addr %12 : $*MyText<(Int, Int)>, #MyText.value + tuple_addr_constructor [assign] %13 : $*(Int, Int) with (%1 : $Int, %1 : $Int) + end_access %12 : $*MyText<(Int, Int)> + %16 = load [copy] %4 : $*MyText<(Int, Int)> // expected-error {{return from initializer without initializing all stored properties}} + destroy_value %0 : $String + destroy_addr %4 : $*MyText<(Int, Int)> + dealloc_stack %3 : $*MyText<(Int, Int)> + return %16 : $MyText<(Int, Int)> +} + +sil [ossa] @tuple_addr_constructor_3 : $@convention(method) (@owned String, Int, @thin MyText<(Int, Int)>.Type) -> @owned MyText<(Int, Int)> { +bb0(%0 : @owned $String, %1 : $Int, %2 : $@thin MyText<(Int, Int)>.Type): + %3 = alloc_stack $MyText<(Int, Int)>, var, name "self", implicit + %4 = mark_uninitialized [rootself] %3 : $*MyText<(Int, Int)> + %7 = copy_value %0 : $String + %8 = begin_access [modify] [static] %4 : $*MyText<(Int, Int)> + %9 = struct_element_addr %8 : $*MyText<(Int, Int)>, #MyText.label + assign %7 to %9 : $*String + end_access %8 : $*MyText<(Int, Int)> + %16 = load [copy] %4 : $*MyText<(Int, Int)> // expected-error {{return from initializer without initializing all stored properties}} + destroy_value %0 : $String + destroy_addr %4 : $*MyText<(Int, Int)> + dealloc_stack %3 : $*MyText<(Int, Int)> + return %16 : $MyText<(Int, Int)> } \ No newline at end of file diff --git a/test/SILOptimizer/init_accessors.swift b/test/SILOptimizer/init_accessors.swift index 505fad79b14f8..47a1959bbc7e9 100644 --- a/test/SILOptimizer/init_accessors.swift +++ b/test/SILOptimizer/init_accessors.swift @@ -28,10 +28,7 @@ struct TestInit { // CHECK-NEXT: end_access [[Y_ACCESS]] : $*Int // // CHECK-NEXT: [[FULL_ACCESS:%.*]] = begin_access [modify] [static] [[FULL_REF]] : $*(Int, Int) - // CHECK-NEXT: [[FULL_ELT_0:%.*]] = tuple_element_addr [[FULL_ACCESS]] : $*(Int, Int), 0 - // CHECK-NEXT: store [[X_VAL]] to [trivial] [[FULL_ELT_0]] : $*Int - // CHECK-NEXT: [[FULL_ELT_1:%.*]] = tuple_element_addr [[FULL_ACCESS]] : $*(Int, Int), 1 - // CHECK-NEXT: store [[Y_VAL]] to [trivial] [[FULL_ELT_1]] : $*Int + // CHECK-NEXT: tuple_addr_constructor [init] [[FULL_ACCESS]] : $*(Int, Int) with ([[X_VAL]] : $Int, [[Y_VAL]] : $Int) // CHECK-NEXT: end_access [[FULL_ACCESS]] : $*(Int, Int) @storageRestrictions(initializes: y, full, accesses: x) init(initialValue) { @@ -237,10 +234,7 @@ class TestClass { // CHECK: ([[X_VAL:%.*]], [[Y_VAL:%.*]]) = destructure_tuple {{.*}} : $(Int, (Int, Array)) // CHECK: ([[Y_VAL_0:%.*]], [[Y_VAL_1:%.*]]) = destructure_tuple {{.*}} : $(Int, Array) // CHECK: [[Y_ACCESS:%.*]] = begin_access [modify] [static] [[Y_REF]] : $*(Int, Array) - // CHECK-NEXT: [[Y_ELT_0:%.*]] = tuple_element_addr [[Y_ACCESS]] : $*(Int, Array), 0 - // CHECK-NEXT: store [[Y_VAL_0]] to [trivial] [[Y_ELT_0]] : $*Int - // CHECK-NEXT: [[Y_ELT_1:%.*]] = tuple_element_addr [[Y_ACCESS]] : $*(Int, Array), 1 - // CHECK-NEXT: store [[Y_VAL_1]] to [init] [[Y_ELT_1]] : $*Array + // CHECK-NEXT: tuple_addr_constructor [init] [[Y_ACCESS]] : $*(Int, Array) with ([[Y_VAL_0]] : $Int, [[Y_VAL_1]] : // CHECK-NEXT: end_access [[Y_ACCESS]] : $*(Int, Array) @storageRestrictions(initializes: x, y) init(initialValue) { @@ -319,6 +313,43 @@ struct TestGeneric { } } +struct TestGenericTuple { + var a: T + var b: (T, U) + + // CHECK-LABEL: sil private [ossa] @$s14init_accessors16TestGenericTupleV4datax_x_q_ttvi : $@convention(thin) (@in T, @in T, @in U) -> (@out T, @out (T, U)) { + // + // CHECK: bb0([[A_REF:%.*]] : $*T, [[B_REF:%.*]] : $*(T, U), [[A_VALUE:%.*]] : $*T, [[B_VALUE:%.*]] : $*T, [[C_VALUE:%.*]] : $*U): + // + // CHECK: [[INIT_VALUE_1:%.*]] = alloc_stack $(T, U), let, name "initialValue" + // CHECK-NEXT: [[INIT_VALUE_1_0:%.*]] = tuple_element_addr [[INIT_VALUE_1]] : $*(T, U), 0 + // CHECK-NEXT: copy_addr [take] [[B_VALUE]] to [init] [[INIT_VALUE_1_0]] + // CHECK-NEXT: [[INIT_VALUE_1_1:%.*]] = tuple_element_addr [[INIT_VALUE_1]] : $*(T, U), 1 + // CHECK-NEXT: copy_addr [take] [[C_VALUE]] to [init] [[INIT_VALUE_1_1]] + + // CHECK-NEXT: [[INIT_VALUE_2:%.*]] = alloc_stack [lexical] $(T, (T, U)) + // CHECK-NEXT: [[INIT_VALUE_2_0:%.*]] = tuple_element_addr [[INIT_VALUE_2]] : $*(T, (T, U)), 0 + // CHECK-NEXT: copy_addr [take] [[A_VALUE]] to [init] [[INIT_VALUE_2_0]] + // CHECK-NEXT: [[INIT_VALUE_2_1:%.*]] = tuple_element_addr [[INIT_VALUE_2]] : $*(T, (T, U)), 1 + // CHECK-NEXT: copy_addr [take] [[INIT_VALUE_1]] to [init] [[INIT_VALUE_2_1]] + + var data: (T, (T, U)) { + @storageRestrictions(initializes: a, b) + init(initialValue) { + a = initialValue.0 + b = initialValue.1 + } + + get { (a, b) } + set { } + } + + init(a: T, b: T, c: U) { + self.data = (a, (b, c)) + self.data = (b, (a, c)) + } +} + func test_local_with_memberwise() { class MyValue {} diff --git a/test/SILOptimizer/specialize_ossa.sil b/test/SILOptimizer/specialize_ossa.sil index ae2c90f957ec8..ab781da1763e7 100644 --- a/test/SILOptimizer/specialize_ossa.sil +++ b/test/SILOptimizer/specialize_ossa.sil @@ -1483,3 +1483,131 @@ bb0(%0 : @guaranteed $GenericKlass): return %3 : $() } +// Specialize tuple_addr_constructor + +// In this case we specialized using all non-tuple types +// CHECK-LABEL: sil shared [ossa] @$s29tuple_addr_constructor_calleeBo_4main4BaseCBoADTg5 : $@convention(thin) (@owned Builtin.NativeObject, @owned Base, @owned Builtin.NativeObject, @owned Base) -> () { +// CHECK: tuple_addr_constructor [init] {{%.*}} : $*(Builtin.NativeObject, Base, Builtin.NativeObject, Base) with ({{%.*}} : $*Builtin.NativeObject, {{%.*}} : $*Base, {{%.*}} : $Builtin.NativeObject, {{%.*}} : $Base) +// CHECK: } // end sil function '$s29tuple_addr_constructor_calleeBo_4main4BaseCBoADTg5' + +// In this case, we specialized where a single parameter was an empty +// tuple... make sure that we eliminated it as a parameter to tuple_addr_constructor +// CHECK-LABEL: sil shared [ossa] @$s29tuple_addr_constructor_calleeBo_4main4BaseCytADTg5 : $@convention(thin) (@owned Builtin.NativeObject, @owned Base, @owned (), @owned Base) -> () { +// CHECK: tuple_addr_constructor [init] {{%.*}} : $*(Builtin.NativeObject, Base, (), Base) with ({{%.*}} : $*Builtin.NativeObject, {{%.*}} : $*Base, {{%.*}} : $Base) +// CHECK: } // end sil function '$s29tuple_addr_constructor_calleeBo_4main4BaseCytADTg5' + +// Make sure we handle this with multiple level tuples +// CHECK-LABEL: sil shared [ossa] @$s29tuple_addr_constructor_calleeBo_4main4BaseCyt_yt_ytttADTg5 : $@convention(thin) (@owned Builtin.NativeObject, @owned Base, @owned ((), ((), ())), @owned Base) -> () { +// CHECK: tuple_addr_constructor [init] {{%.*}} : $*(Builtin.NativeObject, Base, ((), ((), ())), Base) with ({{%.*}} : $*Builtin.NativeObject, {{%.*}} : $*Base, {{%.*}} : $Base) +// CHECK: } // end sil function '$s29tuple_addr_constructor_calleeBo_4main4BaseCyt_yt_ytttADTg5' + +// Make sure we keep in the parameter if we have one leaf node. +// CHECK-LABEL: sil shared [ossa] @$s29tuple_addr_constructor_calleeBo_4main4BaseCyt_Bo_ytttADTg5 : $@convention(thin) (@owned Builtin.NativeObject, @owned Base, @owned ((), (Builtin.NativeObject, ())), @owned Base) -> () { +// CHECK: tuple_addr_constructor [init] {{%.*}} : $*(Builtin.NativeObject, Base, ((), (Builtin.NativeObject, ())), Base) with ({{%.*}} : $*Builtin.NativeObject, {{%.*}} : $*Base, {{%.*}} : $((), (Builtin.NativeObject, ())), {{%.*}} : $Base) +// CHECK: } // end sil function '$s29tuple_addr_constructor_calleeBo_4main4BaseCyt_Bo_ytttADTg5' + +// CHECK-LABEL: sil shared [ossa] @$s29tuple_addr_constructor_calleeyt_ytyt_yt_ytttytTg5 : $@convention(thin) ((), (), @owned ((), ((), ())), @owned ()) -> () { +// CHECK-NOT: tuple_addr_constructor +// CHECK: } // end sil function '$s29tuple_addr_constructor_calleeyt_ytyt_yt_ytttytTg5' + +sil [ossa] @tuple_addr_constructor_callee : $@convention(thin) (@in T1, @in T2, @owned T3, @owned T4) -> () { +bb0(%arg0 : $*T1, %arg1 : $*T2, %arg2 : @owned $T3, %arg3 : @owned $T4): + %tup = alloc_stack $(T1, T2, T3, T4) + tuple_addr_constructor [init] %tup : $*(T1, T2, T3, T4) with (%arg0 : $*T1, %arg1 : $*T2, %arg2 : $T3, %arg3 : $T4) + destroy_addr %tup : $*(T1, T2, T3, T4) + dealloc_stack %tup : $*(T1, T2, T3, T4) + %2 = tuple () + return %2 : $() +} + +sil [ossa] @specializeTupleAddrConstructorSimple : $@convention(thin) (@guaranteed Builtin.NativeObject, @guaranteed Base) -> () { +bb0(%0 : @guaranteed $Builtin.NativeObject, %1 : @guaranteed $Base): + %2 = function_ref @tuple_addr_constructor_callee : $@convention(thin) (@in T1, @in T2, @owned T3, @owned T4) -> () + %3 = alloc_stack $*Builtin.NativeObject + %4 = alloc_stack $*Base + %0a = copy_value %0 : $Builtin.NativeObject + %0b = copy_value %0 : $Builtin.NativeObject + store %0b to [init] %3 : $*Builtin.NativeObject + %1a = copy_value %1 : $Base + %1b = copy_value %1 : $Base + store %1b to [init] %4 : $*Base + apply %2(%3, %4, %0a, %1a) : $@convention(thin) (@in T1, @in T2, @owned T3, @owned T4) -> () + dealloc_stack %4 : $*Base + dealloc_stack %3 : $*Builtin.NativeObject + %9999 = tuple () + return %9999 : $() +} + +sil [ossa] @specializeTupleAddrConstructorEliminateTuple : $@convention(thin) (@guaranteed Builtin.NativeObject, @guaranteed Base) -> () { +bb0(%0 : @guaranteed $Builtin.NativeObject, %1 : @guaranteed $Base): + %2 = function_ref @tuple_addr_constructor_callee : $@convention(thin) (@in T1, @in T2, @owned T3, @owned T4) -> () + %3 = alloc_stack $*Builtin.NativeObject + %4 = alloc_stack $*Base + %0a = tuple () + %0b = copy_value %0 : $Builtin.NativeObject + store %0b to [init] %3 : $*Builtin.NativeObject + %1a = copy_value %1 : $Base + %1b = copy_value %1 : $Base + store %1b to [init] %4 : $*Base + apply %2(%3, %4, %0a, %1a) : $@convention(thin) (@in T1, @in T2, @owned T3, @owned T4) -> () + dealloc_stack %4 : $*Base + dealloc_stack %3 : $*Builtin.NativeObject + %9999 = tuple () + return %9999 : $() +} + +sil [ossa] @specializeTupleAddrConstructorEliminateMultiLevelTuple : $@convention(thin) (@guaranteed Builtin.NativeObject, @guaranteed Base) -> () { +bb0(%0 : @guaranteed $Builtin.NativeObject, %1 : @guaranteed $Base): + %2 = function_ref @tuple_addr_constructor_callee : $@convention(thin) (@in T1, @in T2, @owned T3, @owned T4) -> () + %3 = alloc_stack $*Builtin.NativeObject + %4 = alloc_stack $*Base + %0b = copy_value %0 : $Builtin.NativeObject + store %0b to [init] %3 : $*Builtin.NativeObject + %1a = copy_value %1 : $Base + %1b = copy_value %1 : $Base + store %1b to [init] %4 : $*Base + %tup = tuple () + %tup2 = tuple (%tup : $(), %tup : $()) + %tup3 = tuple (%tup : $(), %tup2 : $((), ())) + apply %2(%3, %4, %tup3, %1a) : $@convention(thin) (@in T1, @in T2, @owned T3, @owned T4) -> () + dealloc_stack %4 : $*Base + dealloc_stack %3 : $*Builtin.NativeObject + %9999 = tuple () + return %9999 : $() +} + +sil [ossa] @specializeTupleAddrConstructorEliminateNoEliminateIfOneElt : $@convention(thin) (@guaranteed Builtin.NativeObject, @guaranteed Base) -> () { +bb0(%0 : @guaranteed $Builtin.NativeObject, %1 : @guaranteed $Base): + %2 = function_ref @tuple_addr_constructor_callee : $@convention(thin) (@in T1, @in T2, @owned T3, @owned T4) -> () + %3 = alloc_stack $*Builtin.NativeObject + %4 = alloc_stack $*Base + %0a = copy_value %0 : $Builtin.NativeObject + %0b = copy_value %0 : $Builtin.NativeObject + store %0b to [init] %3 : $*Builtin.NativeObject + %1a = copy_value %1 : $Base + %1b = copy_value %1 : $Base + store %1b to [init] %4 : $*Base + %tup = tuple () + %tup2 = tuple (%0a : $Builtin.NativeObject, %tup : $()) + %tup3 = tuple (%tup : $(), %tup2 : $(Builtin.NativeObject, ())) + apply %2(%3, %4, %tup3, %1a) : $@convention(thin) (@in T1, @in T2, @owned T3, @owned T4) -> () + dealloc_stack %4 : $*Base + dealloc_stack %3 : $*Builtin.NativeObject + %9999 = tuple () + return %9999 : $() +} + +sil [ossa] @specializeTupleAddrConstructorNoEmitIfAllTuple : $@convention(thin) () -> () { +bb0: + %2 = function_ref @tuple_addr_constructor_callee : $@convention(thin) (@in T1, @in T2, @owned T3, @owned T4) -> () + %3 = alloc_stack $*() + %4 = alloc_stack $*() + %tup = tuple () + %tup2 = tuple (%tup : $(), %tup : $()) + %tup3 = tuple (%tup : $(), %tup2 : $((), ())) + apply %2<(), (), ((), ((), ())), ()>(%3, %4, %tup3, %tup) : $@convention(thin) (@in T1, @in T2, @owned T3, @owned T4) -> () + dealloc_stack %4 : $*() + dealloc_stack %3 : $*() + %9999 = tuple () + return %9999 : $() +}