diff --git a/SwiftCompilerSources/Sources/AST/Type.swift b/SwiftCompilerSources/Sources/AST/Type.swift index d7ae1d3c7f5de..1ca939e0b271d 100644 --- a/SwiftCompilerSources/Sources/AST/Type.swift +++ b/SwiftCompilerSources/Sources/AST/Type.swift @@ -61,6 +61,8 @@ public struct Type: TypeProperties, CustomStringConvertible, NoReflectionChildre public var builtinVectorElementType: Type { Type(bridged: bridged.getBuiltinVectorElementType()) } + public var builtinFixedArrayElementType: Type { Type(bridged: bridged.getBuiltinFixedArrayElementType()) } + public func subst(with substitutionMap: SubstitutionMap) -> Type { return Type(bridged: bridged.subst(substitutionMap.bridged)) } @@ -81,6 +83,8 @@ public struct CanonicalType: TypeProperties, CustomStringConvertible, NoReflecti public var builtinVectorElementType: CanonicalType { rawType.builtinVectorElementType.canonical } + public var builtinFixedArrayElementType: CanonicalType { rawType.builtinFixedArrayElementType.canonical } + public func subst(with substitutionMap: SubstitutionMap) -> CanonicalType { return rawType.subst(with: substitutionMap).canonical } @@ -106,6 +110,7 @@ extension TypeProperties { public var isBuiltinFloat: Bool { rawType.bridged.isBuiltinFloat() } public var isBuiltinVector: Bool { rawType.bridged.isBuiltinVector() } + public var isBuiltinFixedArray: Bool { rawType.bridged.isBuiltinFixedArray() } public var isClass: Bool { if let nominal = nominal, nominal is ClassDecl { diff --git a/SwiftCompilerSources/Sources/Optimizer/FunctionPasses/InitializeStaticGlobals.swift b/SwiftCompilerSources/Sources/Optimizer/FunctionPasses/InitializeStaticGlobals.swift index 7dd89bffac9c1..46ab48e99f922 100644 --- a/SwiftCompilerSources/Sources/Optimizer/FunctionPasses/InitializeStaticGlobals.swift +++ b/SwiftCompilerSources/Sources/Optimizer/FunctionPasses/InitializeStaticGlobals.swift @@ -175,7 +175,7 @@ private func lowerInlineArray(array: InlineArray, _ context: FunctionPassContext /// private func getInlineArrayInfo(of allocStack: AllocStackInst) -> InlineArray? { var arrayLoad: LoadInst? = nil - var elementStorage: UncheckedAddrCastInst? = nil + var elementStorage: VectorBaseAddrInst? = nil for use in allocStack.uses { switch use.instruction { @@ -188,11 +188,11 @@ private func getInlineArrayInfo(of allocStack: AllocStackInst) -> InlineArray? { arrayLoad = load case is DeallocStackInst: break - case let addrCastToElement as UncheckedAddrCastInst: + case let baseAddr as VectorBaseAddrInst: if elementStorage != nil { return nil } - elementStorage = addrCastToElement + elementStorage = baseAddr default: return nil } diff --git a/SwiftCompilerSources/Sources/Optimizer/InstructionSimplification/CMakeLists.txt b/SwiftCompilerSources/Sources/Optimizer/InstructionSimplification/CMakeLists.txt index 043a62589f37c..eb7a3d22d051e 100644 --- a/SwiftCompilerSources/Sources/Optimizer/InstructionSimplification/CMakeLists.txt +++ b/SwiftCompilerSources/Sources/Optimizer/InstructionSimplification/CMakeLists.txt @@ -39,6 +39,7 @@ swift_compiler_sources(Optimizer SimplifySwitchEnum.swift SimplifyTuple.swift SimplifyTupleExtract.swift + SimplifyUncheckedAddrCast.swift SimplifyUncheckedEnumData.swift SimplifyValueToBridgeObject.swift SimplifyWitnessMethod.swift) diff --git a/SwiftCompilerSources/Sources/Optimizer/InstructionSimplification/SimplifyUncheckedAddrCast.swift b/SwiftCompilerSources/Sources/Optimizer/InstructionSimplification/SimplifyUncheckedAddrCast.swift new file mode 100644 index 0000000000000..d8455d555aef8 --- /dev/null +++ b/SwiftCompilerSources/Sources/Optimizer/InstructionSimplification/SimplifyUncheckedAddrCast.swift @@ -0,0 +1,81 @@ +//===--- SimplifyUncheckedAddrCast.swift ----------------------------------===// +// +// This source file is part of the Swift.org open source project +// +// Copyright (c) 2014 - 2025 Apple Inc. and the Swift project authors +// Licensed under Apache License v2.0 with Runtime Library Exception +// +// See https://swift.org/LICENSE.txt for license information +// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors +// +//===----------------------------------------------------------------------===// + +import SIL + +extension UncheckedAddrCastInst : OnoneSimplifiable, SILCombineSimplifiable { + + func simplify(_ context: SimplifyContext) { + // ``` + // %1 = unchecked_addr_cast %0 : $*T to $*T + // ``` + // -> + // replace %1 with %0 + // + if optimizeSameTypeCast(context) { + return + } + + // ``` + // %1 = unchecked_addr_cast %0 : $*U to $*V + // %2 = unchecked_addr_cast %1 : $*V to $*T + // ``` + // -> + // ``` + // %2 = unchecked_addr_cast %0: $*U to $*T + // ``` + if optimizeDoubleCast(context) { + return + } + + // ``` + // %1 = unchecked_addr_cast %0 : $*Builtin.FixedArray to $*Element + // ``` + // -> + // ``` + // %1 = vector_base_addr %0 : $*Builtin.FixedArray + // ``` + _ = optimizeVectorBaseCast(context) + } +} + +private extension UncheckedAddrCastInst { + func optimizeSameTypeCast(_ context: SimplifyContext) -> Bool { + if fromAddress.type == type { + self.replace(with: fromAddress, context) + return true + } + return false + } + + func optimizeDoubleCast(_ context: SimplifyContext) -> Bool { + if let firstCast = fromAddress as? UncheckedAddrCastInst { + let builder = Builder(before: self, context) + let newCast = builder.createUncheckedAddrCast(from: firstCast.fromAddress, to: type) + self.replace(with: newCast, context) + return true + } + return false + } + + func optimizeVectorBaseCast(_ context: SimplifyContext) -> Bool { + if fromAddress.type.isBuiltinFixedArray, + fromAddress.type.builtinFixedArrayElementType(in: parentFunction, maximallyAbstracted: true).addressType == type + { + let builder = Builder(before: self, context) + let vectorBase = builder.createVectorBaseAddr(vector: fromAddress) + self.replace(with: vectorBase, context) + return true + } + return false + } +} diff --git a/SwiftCompilerSources/Sources/Optimizer/PassManager/PassRegistration.swift b/SwiftCompilerSources/Sources/Optimizer/PassManager/PassRegistration.swift index b5a70e1a30cdb..e6ff780a93321 100644 --- a/SwiftCompilerSources/Sources/Optimizer/PassManager/PassRegistration.swift +++ b/SwiftCompilerSources/Sources/Optimizer/PassManager/PassRegistration.swift @@ -125,6 +125,7 @@ private func registerSwiftPasses() { registerForSILCombine(PointerToAddressInst.self, { run(PointerToAddressInst.self, $0) }) registerForSILCombine(UncheckedEnumDataInst.self, { run(UncheckedEnumDataInst.self, $0) }) registerForSILCombine(WitnessMethodInst.self, { run(WitnessMethodInst.self, $0) }) + registerForSILCombine(UncheckedAddrCastInst.self, { run(UncheckedAddrCastInst.self, $0) }) registerForSILCombine(UnconditionalCheckedCastInst.self, { run(UnconditionalCheckedCastInst.self, $0) }) registerForSILCombine(AllocStackInst.self, { run(AllocStackInst.self, $0) }) registerForSILCombine(ApplyInst.self, { run(ApplyInst.self, $0) }) diff --git a/SwiftCompilerSources/Sources/Optimizer/Utilities/OptUtils.swift b/SwiftCompilerSources/Sources/Optimizer/Utilities/OptUtils.swift index c8a4ef4e2ccc4..9000af0d317bd 100644 --- a/SwiftCompilerSources/Sources/Optimizer/Utilities/OptUtils.swift +++ b/SwiftCompilerSources/Sources/Optimizer/Utilities/OptUtils.swift @@ -387,7 +387,7 @@ extension Instruction { case let bi as BuiltinInst: switch bi.id { case .ZeroInitializer: - let type = bi.type.isBuiltinVector ? bi.type.builtinVectorElementType : bi.type + let type = bi.type.isBuiltinVector ? bi.type.builtinVectorElementType(in: parentFunction) : bi.type return type.isBuiltinInteger || type.isBuiltinFloat case .PtrToInt: return bi.operands[0].value is StringLiteralInst diff --git a/SwiftCompilerSources/Sources/Optimizer/Utilities/Verifier.swift b/SwiftCompilerSources/Sources/Optimizer/Utilities/Verifier.swift index d54ded9805934..7bb7e42a6c0ed 100644 --- a/SwiftCompilerSources/Sources/Optimizer/Utilities/Verifier.swift +++ b/SwiftCompilerSources/Sources/Optimizer/Utilities/Verifier.swift @@ -148,6 +148,16 @@ extension LoadBorrowInst : VerifiableInstruction { } } +extension VectorBaseAddrInst : VerifiableInstruction { + func verify(_ context: FunctionPassContext) { + require(vector.type.isBuiltinFixedArray, + "vector operand of vector_element_addr must be a Builtin.FixedArray") + require(type == vector.type.builtinFixedArrayElementType(in: parentFunction, + maximallyAbstracted: true).addressType, + "result of vector_element_addr has wrong type") + } +} + // Used to check if any instruction is mutating the memory location within the liverange of a `load_borrow`. // Note that it is not checking if an instruction _may_ mutate the memory, but it's checking if any instruction // _definitely_ will mutate the memory. diff --git a/SwiftCompilerSources/Sources/SIL/Builder.swift b/SwiftCompilerSources/Sources/SIL/Builder.swift index 235a5dcc411eb..a5ce4b04b2eab 100644 --- a/SwiftCompilerSources/Sources/SIL/Builder.swift +++ b/SwiftCompilerSources/Sources/SIL/Builder.swift @@ -487,6 +487,10 @@ public struct Builder { return notifyNew(vectorInst.getAs(VectorInst.self)) } + public func createVectorBaseAddr(vector: Value) -> VectorBaseAddrInst { + return notifyNew(bridged.createVectorBaseAddr(vector.bridged).getAs(VectorBaseAddrInst.self)) + } + public func createGlobalAddr(global: GlobalVariable, dependencyToken: Value?) -> GlobalAddrInst { return notifyNew(bridged.createGlobalAddr(global.bridged, dependencyToken.bridged).getAs(GlobalAddrInst.self)) } diff --git a/SwiftCompilerSources/Sources/SIL/Instruction.swift b/SwiftCompilerSources/Sources/SIL/Instruction.swift index 2eea59c78e607..49c8a845567fa 100644 --- a/SwiftCompilerSources/Sources/SIL/Instruction.swift +++ b/SwiftCompilerSources/Sources/SIL/Instruction.swift @@ -1350,6 +1350,10 @@ final public class ObjectInst : SingleValueInstruction { final public class VectorInst : SingleValueInstruction { } +final public class VectorBaseAddrInst : SingleValueInstruction, UnaryInstruction { + public var vector: Value { operand.value } +} + final public class DifferentiableFunctionInst: SingleValueInstruction {} final public class LinearFunctionInst: SingleValueInstruction {} diff --git a/SwiftCompilerSources/Sources/SIL/Registration.swift b/SwiftCompilerSources/Sources/SIL/Registration.swift index 15b83dc9002dd..d8f352762e129 100644 --- a/SwiftCompilerSources/Sources/SIL/Registration.swift +++ b/SwiftCompilerSources/Sources/SIL/Registration.swift @@ -118,6 +118,7 @@ public func registerSILClasses() { register(MoveOnlyWrapperToCopyableAddrInst.self) register(ObjectInst.self) register(VectorInst.self) + register(VectorBaseAddrInst.self) register(TuplePackExtractInst.self) register(TuplePackElementAddrInst.self) register(PackElementGetInst.self) diff --git a/SwiftCompilerSources/Sources/SIL/Type.swift b/SwiftCompilerSources/Sources/SIL/Type.swift index 3e49c0766409a..d88e0b3483dc0 100644 --- a/SwiftCompilerSources/Sources/SIL/Type.swift +++ b/SwiftCompilerSources/Sources/SIL/Type.swift @@ -80,7 +80,13 @@ public struct Type : TypeProperties, CustomStringConvertible, NoReflectionChildr !isNoEscapeFunction && isEscapable(in: function) } - public var builtinVectorElementType: Type { canonicalType.builtinVectorElementType.silType! } + public func builtinVectorElementType(in function: Function) -> Type { + canonicalType.builtinVectorElementType.loweredType(in: function) + } + + public func builtinFixedArrayElementType(in function: Function, maximallyAbstracted: Bool = false) -> Type { + canonicalType.builtinFixedArrayElementType.loweredType(in: function, maximallyAbstracted: maximallyAbstracted) + } public var superClassType: Type? { canonicalType.superClassType?.silType } diff --git a/SwiftCompilerSources/Sources/SIL/Utilities/AccessUtils.swift b/SwiftCompilerSources/Sources/SIL/Utilities/AccessUtils.swift index 26fb99731356b..2fbcf88d9e7f7 100644 --- a/SwiftCompilerSources/Sources/SIL/Utilities/AccessUtils.swift +++ b/SwiftCompilerSources/Sources/SIL/Utilities/AccessUtils.swift @@ -431,7 +431,7 @@ public struct AccessPath : CustomStringConvertible, Hashable { private func canBeOperandOfIndexAddr(_ value: Value) -> Bool { switch value { - case is IndexAddrInst, is RefTailAddrInst, is PointerToAddressInst: + case is IndexAddrInst, is RefTailAddrInst, is PointerToAddressInst, is VectorBaseAddrInst: return true default: return false diff --git a/SwiftCompilerSources/Sources/SIL/Utilities/SmallProjectionPath.swift b/SwiftCompilerSources/Sources/SIL/Utilities/SmallProjectionPath.swift index 188fd38fa1b51..ea94f2771e83f 100644 --- a/SwiftCompilerSources/Sources/SIL/Utilities/SmallProjectionPath.swift +++ b/SwiftCompilerSources/Sources/SIL/Utilities/SmallProjectionPath.swift @@ -83,15 +83,17 @@ public struct SmallProjectionPath : Hashable, CustomStringConvertible, NoReflect // This and all following kinds (we'll add in the future) cannot have a field index. case tailElements = 0x07 // (0 << 3) | 0x7 A tail allocated element of a class: syntax `ct` case existential = 0x0f // (1 << 3) | 0x7 A concrete value projected out of an existential: synatx 'x' - case anyClassField = 0x17 // (2 << 3) | 0x7 Any class field, including tail elements: syntax `c*` - case anyIndexedElement = 0x1f // (3 << 3) | 0x7 An unknown offset into an array of elements. + case vectorBase = 0x17 // (2 << 3) | 0x7 The base element of a vector: synatx 'b' + case anyClassField = 0x1f // (3 << 3) | 0x7 Any class field, including tail elements: syntax `c*` + case anyIndexedElement = 0x27 // (4 << 3) | 0x7 An unknown offset into an array of elements. // There must not be two successive element indices in the path. - case anyValueFields = 0x27 // (4 << 3) | 0x7 Any number of any value fields (struct, tuple, enum): syntax `v**` - case anything = 0x2f // (5 << 3) | 0x7 Any number of any fields: syntax `**` + case anyValueFields = 0x2f // (5 << 3) | 0x7 Any number of any value fields (struct, tuple, enum): syntax `v**` + case anything = 0x37 // (6 << 3) | 0x7 Any number of any fields: syntax `**` public var isValueField: Bool { switch self { - case .anyValueFields, .structField, .tupleField, .enumCase, .indexedElement, .anyIndexedElement, .existential: + case .structField, .tupleField, .enumCase, .indexedElement, .existential, .vectorBase, + .anyValueFields, .anyIndexedElement: return true case .root, .anything, .anyClassField, .classField, .tailElements: return false @@ -102,7 +104,8 @@ public struct SmallProjectionPath : Hashable, CustomStringConvertible, NoReflect switch self { case .anyClassField, .classField, .tailElements: return true - case .root, .anything, .anyValueFields, .structField, .tupleField, .enumCase, .indexedElement, .anyIndexedElement, .existential: + case .root, .anything, .anyValueFields, .structField, .tupleField, .enumCase, .indexedElement, + .anyIndexedElement, .existential, .vectorBase: return false } } @@ -140,6 +143,7 @@ public struct SmallProjectionPath : Hashable, CustomStringConvertible, NoReflect case .classField: s = "c\(idx)" case .tailElements: s = "ct" case .existential: s = "x" + case .vectorBase: s = "b" case .indexedElement: s = "i\(idx)" case .anyIndexedElement: s = "i*" case .anything: s = "**" @@ -398,7 +402,7 @@ public struct SmallProjectionPath : Hashable, CustomStringConvertible, NoReflect return subPath.matches(pattern: subPattern) case .anyIndexedElement: return popIndexedElements().matches(pattern: subPattern) - case .structField, .tupleField, .enumCase, .classField, .tailElements, .indexedElement, .existential: + case .structField, .tupleField, .enumCase, .classField, .tailElements, .indexedElement, .existential, .vectorBase: let (kind, index, subPath) = pop() if kind != patternKind || index != patternIdx { return false } return subPath.matches(pattern: subPattern) @@ -478,8 +482,18 @@ public struct SmallProjectionPath : Hashable, CustomStringConvertible, NoReflect } if (lhsKind == rhsKind && lhsIdx == rhsIdx) || (lhsKind == .anyClassField && rhsKind.isClassField) || - (lhsKind.isClassField && rhsKind == .anyClassField) { - return pop(numBits: lhsBits).mayOverlap(with: rhs.pop(numBits: rhsBits)) + (lhsKind.isClassField && rhsKind == .anyClassField) + { + let poppedPath = pop(numBits: lhsBits) + let rhsPoppedPath = rhs.pop(numBits: rhsBits) + // Check for the case of overlapping the first element of a vector with another element. + // Note that the index of `.indexedElement` cannot be 0. + if (poppedPath.isEmpty && rhsPoppedPath.pop().kind == .indexedElement) || + (rhsPoppedPath.isEmpty && poppedPath.pop().kind == .indexedElement) + { + return false + } + return poppedPath.mayOverlap(with: rhsPoppedPath) } return false } @@ -496,7 +510,7 @@ public struct SmallProjectionPath : Hashable, CustomStringConvertible, NoReflect switch lhsKind { case .root: return rhs - case .classField, .tailElements, .structField, .tupleField, .enumCase, .existential, .indexedElement: + case .classField, .tailElements, .structField, .tupleField, .enumCase, .existential, .indexedElement, .vectorBase: let (rhsKind, rhsIdx, rhsBits) = rhs.top if lhsKind == rhsKind && lhsIdx == rhsIdx { return pop(numBits: lhsBits).subtract(from: rhs.pop(numBits: rhsBits)) @@ -601,6 +615,8 @@ extension StringParser { entries.append((.tailElements, 0)) } else if consume("x") { entries.append((.existential, 0)) + } else if consume("b") { + entries.append((.vectorBase, 0)) } else if consume("c") { guard let idx = consumeInt(withWhiteSpace: false) else { try throwError("expected class field index") @@ -701,7 +717,8 @@ extension SmallProjectionPath { .push(.enumCase, index: 6) .push(.anyClassField) .push(.tupleField, index: 2)) - testParse("i3.x.i*", expect: SmallProjectionPath(.anyIndexedElement) + testParse("i3.x.b.i*", expect: SmallProjectionPath(.anyIndexedElement) + .push(.vectorBase) .push(.existential) .push(.indexedElement, index: 3)) @@ -739,6 +756,8 @@ extension SmallProjectionPath { testMerge("i*", "i2", expect: "i*") testMerge("s0.i*.e3", "s0.e3", expect: "s0.i*.e3") testMerge("i*", "v**", expect: "v**") + testMerge("s0.b.i1", "s0.b.i0", expect: "s0.b.i*") + testMerge("s0.b", "s0.1", expect: "s0.v**") testMerge("ct.s0.e0.v**.c0", "ct.s0.e0.v**.c0", expect: "ct.s0.e0.v**.c0") testMerge("ct.s0.s0.c0", "ct.s0.e0.s0.c0", expect: "ct.s0.v**.c0") @@ -813,6 +832,7 @@ extension SmallProjectionPath { testMatch("s1.v**", "s0.**", expect: false) testMatch("s0.**", "s0.v**", expect: false) testMatch("s0.s1", "s0.i*.s1", expect: true) + testMatch("s0.b.s1", "s0.b.i*.s1", expect: true) } func testMatch(_ lhsStr: String, _ rhsStr: String, expect: Bool) { @@ -847,6 +867,13 @@ extension SmallProjectionPath { testOverlap("i1", "i*", expect: true) testOverlap("i1", "v**", expect: true) testOverlap("s0.i*.s1", "s0.s1", expect: true) + testOverlap("s0.b.s1", "s0.b.i*.s1", expect: true) + testOverlap("s0.b.i0.s1", "s0.b.i1.s1", expect: false) + testOverlap("s0.b.i2.s1", "s0.b.i1.s1", expect: false) + testOverlap("s0.b.s1", "s0.b.i0.s1", expect: true) + testOverlap("s0.b", "s0.b.i1", expect: false) + testOverlap("s0.b.i1", "s0.b", expect: false) + testOverlap("s0.b.i1", "s0", expect: true) } func testOverlap(_ lhsStr: String, _ rhsStr: String, expect: Bool) { @@ -889,7 +916,7 @@ extension SmallProjectionPath { } func path2path() { - testPath2Path("s0.e2.3.c4.s1", { $0.popAllValueFields() }, expect: "c4.s1") + testPath2Path("s0.b.e2.3.c4.s1", { $0.popAllValueFields() }, expect: "c4.s1") testPath2Path("v**.c4.s1", { $0.popAllValueFields() }, expect: "c4.s1") testPath2Path("**", { $0.popAllValueFields() }, expect: "**") diff --git a/SwiftCompilerSources/Sources/SIL/Utilities/WalkUtils.swift b/SwiftCompilerSources/Sources/SIL/Utilities/WalkUtils.swift index 6ab15c5de5a1f..7c626a6923e59 100644 --- a/SwiftCompilerSources/Sources/SIL/Utilities/WalkUtils.swift +++ b/SwiftCompilerSources/Sources/SIL/Utilities/WalkUtils.swift @@ -509,6 +509,12 @@ extension AddressDefUseWalker { } else { return unmatchedPath(address: operand, path: path) } + case let vba as VectorBaseAddrInst: + if let path = path.popIfMatches(.vectorBase, index: 0) { + return walkDownUses(ofAddress: vba, path: path) + } else { + return unmatchedPath(address: operand, path: path) + } case is InitEnumDataAddrInst, is UncheckedTakeEnumDataAddrInst: let ei = instruction as! SingleValueInstruction if let path = path.popIfMatches(.enumCase, index: (instruction as! EnumInstruction).caseIndex) { @@ -814,6 +820,8 @@ extension AddressUseDefWalker { return walkUp(address: sea.struct, path: path.push(.structField, index: sea.fieldIndex)) case let tea as TupleElementAddrInst: return walkUp(address: tea.tuple, path: path.push(.tupleField, index: tea.fieldIndex)) + case let vba as VectorBaseAddrInst: + return walkUp(address: vba.vector, path: path.push(.vectorBase, index: 0)) case let ida as InitEnumDataAddrInst: return walkUp(address: ida.operand.value, path: path.push(.enumCase, index: ida.caseIndex)) case let uteda as UncheckedTakeEnumDataAddrInst: diff --git a/docs/SIL/Instructions.md b/docs/SIL/Instructions.md index abc7944d4b301..d077602d622b1 100644 --- a/docs/SIL/Instructions.md +++ b/docs/SIL/Instructions.md @@ -3467,6 +3467,20 @@ Constructs a statically initialized vector of elements. This instruction can only appear as final instruction in a global variable static initializer list. +### vector_base_addr + +``` +sil-instruction ::= 'vector_base_addr' sil-operand + +%1 = vector_base_addr %0 : $*Builtin.FixedArray +// %0 must have type $*Builtin.FixedArray +// %1 will be of the element type of the Builtin.FixedArray +``` + +Derives the address of the first element of a vector, i.e. a `Builtin.FixedArray`, +from the address of the vector itself. +Addresses of other vector elements can then be derived with `index_addr`. + ### ref_element_addr ``` diff --git a/include/swift/AST/ASTBridging.h b/include/swift/AST/ASTBridging.h index b4791d6a5836f..54220772a2462 100644 --- a/include/swift/AST/ASTBridging.h +++ b/include/swift/AST/ASTBridging.h @@ -3102,7 +3102,9 @@ struct BridgedASTType { BRIDGED_INLINE bool isBuiltinInteger() const; BRIDGED_INLINE bool isBuiltinFloat() const; BRIDGED_INLINE bool isBuiltinVector() const; + BRIDGED_INLINE bool isBuiltinFixedArray() const; SWIFT_IMPORT_UNSAFE BRIDGED_INLINE BridgedASTType getBuiltinVectorElementType() const; + SWIFT_IMPORT_UNSAFE BRIDGED_INLINE BridgedASTType getBuiltinFixedArrayElementType() const; BRIDGED_INLINE bool isBuiltinFixedWidthInteger(SwiftInt width) const; BRIDGED_INLINE bool isOptional() const; BRIDGED_INLINE bool isBuiltinType() const; diff --git a/include/swift/AST/ASTBridgingImpl.h b/include/swift/AST/ASTBridgingImpl.h index e746053673d75..75228fe0f4cb6 100644 --- a/include/swift/AST/ASTBridgingImpl.h +++ b/include/swift/AST/ASTBridgingImpl.h @@ -542,10 +542,18 @@ bool BridgedASTType::isBuiltinVector() const { return unbridged()->is(); } +bool BridgedASTType::isBuiltinFixedArray() const { + return unbridged()->is(); +} + BridgedASTType BridgedASTType::getBuiltinVectorElementType() const { return {unbridged()->castTo()->getElementType().getPointer()}; } +BridgedASTType BridgedASTType::getBuiltinFixedArrayElementType() const { + return {unbridged()->castTo()->getElementType().getPointer()}; +} + bool BridgedASTType::isBuiltinFixedWidthInteger(SwiftInt width) const { if (auto *intTy = unbridged()->getAs()) return intTy->isFixedWidth((unsigned)width); diff --git a/include/swift/SIL/AddressWalker.h b/include/swift/SIL/AddressWalker.h index 003f636a373a3..9edaeadcb0951 100644 --- a/include/swift/SIL/AddressWalker.h +++ b/include/swift/SIL/AddressWalker.h @@ -249,6 +249,7 @@ TransitiveAddressWalker::walk(SILValue projectedAddress) { isa(user) || isa(user) || isa(user) || isa(user) || isa(user) || + isa(user) || isa(user) || isa(user) || isa(user) || isa(user) || isa(user) || diff --git a/include/swift/SIL/SILBridging.h b/include/swift/SIL/SILBridging.h index 2578d0ca6d3ee..e6a9baaa41a44 100644 --- a/include/swift/SIL/SILBridging.h +++ b/include/swift/SIL/SILBridging.h @@ -1258,6 +1258,7 @@ struct BridgedBuilder{ SWIFT_IMPORT_UNSAFE BRIDGED_INLINE BridgedInstruction createObject(BridgedType type, BridgedValueArray arguments, SwiftInt numBaseElements) const; SWIFT_IMPORT_UNSAFE BRIDGED_INLINE BridgedInstruction createVector(BridgedValueArray arguments) const; + SWIFT_IMPORT_UNSAFE BRIDGED_INLINE BridgedInstruction createVectorBaseAddr(BridgedValue vector) const; SWIFT_IMPORT_UNSAFE BRIDGED_INLINE BridgedInstruction createGlobalAddr(BridgedGlobalVar global, OptionalBridgedValue dependencyToken) const; SWIFT_IMPORT_UNSAFE BRIDGED_INLINE BridgedInstruction createGlobalValue(BridgedGlobalVar global, diff --git a/include/swift/SIL/SILBridgingImpl.h b/include/swift/SIL/SILBridgingImpl.h index f807da6e1ab85..8db4dee339031 100644 --- a/include/swift/SIL/SILBridgingImpl.h +++ b/include/swift/SIL/SILBridgingImpl.h @@ -2453,6 +2453,10 @@ BridgedInstruction BridgedBuilder::createVector(BridgedValueArray arguments) con return {unbridged().createVector(swift::ArtificialUnreachableLocation(), arguments.getValues(argValues))}; } +BridgedInstruction BridgedBuilder::createVectorBaseAddr(BridgedValue vector) const { + return {unbridged().createVectorBaseAddr(regularLoc(), vector.getSILValue())}; +} + BridgedInstruction BridgedBuilder::createGlobalAddr(BridgedGlobalVar global, OptionalBridgedValue dependencyToken) const { return {unbridged().createGlobalAddr(regularLoc(), global.getGlobal(), dependencyToken.getSILValue())}; diff --git a/include/swift/SIL/SILBuilder.h b/include/swift/SIL/SILBuilder.h index a37ea657ade9e..6c72b5583fd6a 100644 --- a/include/swift/SIL/SILBuilder.h +++ b/include/swift/SIL/SILBuilder.h @@ -22,6 +22,7 @@ #include "swift/SIL/SILInstruction.h" #include "swift/SIL/SILModule.h" #include "swift/SIL/SILUndef.h" +#include "swift/SIL/AbstractionPattern.h" #include "llvm/ADT/PointerUnion.h" #include "llvm/ADT/StringExtras.h" #include @@ -1923,6 +1924,15 @@ class SILBuilder { return createStructElementAddr(Loc, Operand, Field, ResultTy); } + VectorBaseAddrInst * + createVectorBaseAddr(SILLocation loc, SILValue vector) { + auto arrayTy = vector->getType().getAs(); + ASSERT(arrayTy && "operand of vector_extract must be a builtin array type"); + auto elemtTy = getFunction().getLoweredType(Lowering::AbstractionPattern::getOpaque(), arrayTy->getElementType()); + return insert(new (getModule()) VectorBaseAddrInst( + getSILDebugLocation(loc), vector, elemtTy.getAddressType())); + } + RefElementAddrInst *createRefElementAddr(SILLocation Loc, SILValue Operand, VarDecl *Field, SILType ResultTy, bool IsImmutable = false) { diff --git a/include/swift/SIL/SILCloner.h b/include/swift/SIL/SILCloner.h index dc985ed698d11..3be450dd60cc4 100644 --- a/include/swift/SIL/SILCloner.h +++ b/include/swift/SIL/SILCloner.h @@ -2542,6 +2542,16 @@ SILCloner::visitStructElementAddrInst(StructElementAddrInst *Inst) { Inst->getField(), getOpType(Inst->getType()))); } +template +void +SILCloner::visitVectorBaseAddrInst(VectorBaseAddrInst *Inst) { + getBuilder().setCurrentDebugScope(getOpScope(Inst->getDebugScope())); + recordClonedInstruction( + Inst, getBuilder().createVectorBaseAddr( + getOpLocation(Inst->getLoc()), + getOpValue(Inst->getVector()))); +} + template void SILCloner::visitRefElementAddrInst(RefElementAddrInst *Inst) { diff --git a/include/swift/SIL/SILInstruction.h b/include/swift/SIL/SILInstruction.h index 437fbc7596fad..4057ee62a29e0 100644 --- a/include/swift/SIL/SILInstruction.h +++ b/include/swift/SIL/SILInstruction.h @@ -6791,6 +6791,17 @@ class VectorInst final : public InstructionBaseWithTrailingOperands< } }; +class VectorBaseAddrInst + : public UnaryInstructionBase { + friend SILBuilder; + + VectorBaseAddrInst(SILDebugLocation debugLoc, SILValue vector, SILType resultTy) + : UnaryInstructionBase(debugLoc, vector, resultTy) {} +public: + SILValue getVector() const { return getOperand(); } +}; + /// TupleInst - Represents a constructed loadable tuple. class TupleInst final : public InstructionBaseWithTrailingOperands< SILInstructionKind::TupleInst, TupleInst, diff --git a/include/swift/SIL/SILNodes.def b/include/swift/SIL/SILNodes.def index 27cb867792a97..4468b0a6c03e4 100644 --- a/include/swift/SIL/SILNodes.def +++ b/include/swift/SIL/SILNodes.def @@ -587,6 +587,8 @@ ABSTRACT_VALUE_AND_INST(SingleValueInstruction, ValueBase, SILInstruction) SingleValueInstruction, None, DoesNotRelease) SINGLE_VALUE_INST(VectorInst, vector, SingleValueInstruction, None, DoesNotRelease) + SINGLE_VALUE_INST(VectorBaseAddrInst, vector_base_addr, + SingleValueInstruction, None, DoesNotRelease) SINGLE_VALUE_INST(TupleInst, tuple, SingleValueInstruction, None, DoesNotRelease) SINGLE_VALUE_INST(TupleExtractInst, tuple_extract, diff --git a/lib/IRGen/IRGenSIL.cpp b/lib/IRGen/IRGenSIL.cpp index f1c820e2c3fed..5592f971e83ff 100644 --- a/lib/IRGen/IRGenSIL.cpp +++ b/lib/IRGen/IRGenSIL.cpp @@ -1352,6 +1352,7 @@ class IRGenSILFunction : void visitTupleElementAddrInst(TupleElementAddrInst *i); void visitStructExtractInst(StructExtractInst *i); void visitStructElementAddrInst(StructElementAddrInst *i); + void visitVectorBaseAddrInst(VectorBaseAddrInst *i); void visitRefElementAddrInst(RefElementAddrInst *i); void visitRefTailAddrInst(RefTailAddrInst *i); @@ -5639,6 +5640,13 @@ void IRGenSILFunction::visitStructElementAddrInst( setLoweredAddress(i, field); } +void IRGenSILFunction::visitVectorBaseAddrInst(VectorBaseAddrInst *i) { + auto addr = getLoweredAddress(i->getVector()); + auto &ti = getTypeInfo(i->getType()); + auto result = Builder.CreateElementBitCast(addr, ti.getStorageType()); + setLoweredAddress(i, result); +} + void IRGenSILFunction::visitRefElementAddrInst(swift::RefElementAddrInst *i) { Explosion base = getLoweredExplosion(i->getOperand()); llvm::Value *value = base.claimNext(); diff --git a/lib/SIL/IR/OperandOwnership.cpp b/lib/SIL/IR/OperandOwnership.cpp index 5765c57f9f66c..ffb4729be8959 100644 --- a/lib/SIL/IR/OperandOwnership.cpp +++ b/lib/SIL/IR/OperandOwnership.cpp @@ -171,6 +171,7 @@ OPERAND_OWNERSHIP(TrivialUse, DestroyAddr) OPERAND_OWNERSHIP(TrivialUse, EndAccess) OPERAND_OWNERSHIP(TrivialUse, EndUnpairedAccess) OPERAND_OWNERSHIP(TrivialUse, GetAsyncContinuationAddr) +OPERAND_OWNERSHIP(TrivialUse, VectorBaseAddr) OPERAND_OWNERSHIP(TrivialUse, IndexAddr) OPERAND_OWNERSHIP(TrivialUse, IndexRawPointer) OPERAND_OWNERSHIP(TrivialUse, InitBlockStorageHeader) diff --git a/lib/SIL/IR/SILPrinter.cpp b/lib/SIL/IR/SILPrinter.cpp index 5b87a78f9b174..35e57ae916a81 100644 --- a/lib/SIL/IR/SILPrinter.cpp +++ b/lib/SIL/IR/SILPrinter.cpp @@ -2500,6 +2500,9 @@ class SILPrinter : public SILInstructionVisitor { printFullContext(EI->getField()->getDeclContext(), PrintState.OS); *this << EI->getField()->getName().get(); } + void visitVectorBaseAddrInst(VectorBaseAddrInst *vbai) { + *this << getIDAndType(vbai->getVector()); + } void visitRefElementAddrInst(RefElementAddrInst *EI) { *this << (EI->isImmutable() ? "[immutable] " : "") << getIDAndType(EI->getOperand()) << ", #"; diff --git a/lib/SIL/IR/ValueOwnership.cpp b/lib/SIL/IR/ValueOwnership.cpp index 77d5c6fbcf9fb..8a7da389229e7 100644 --- a/lib/SIL/IR/ValueOwnership.cpp +++ b/lib/SIL/IR/ValueOwnership.cpp @@ -125,6 +125,7 @@ CONSTANT_OWNERSHIP_INST(None, PreviousDynamicFunctionRef) CONSTANT_OWNERSHIP_INST(None, GlobalAddr) CONSTANT_OWNERSHIP_INST(None, BaseAddrForOffset) CONSTANT_OWNERSHIP_INST(None, HasSymbol) +CONSTANT_OWNERSHIP_INST(None, VectorBaseAddr) CONSTANT_OWNERSHIP_INST(None, IndexAddr) CONSTANT_OWNERSHIP_INST(None, IndexRawPointer) CONSTANT_OWNERSHIP_INST(None, InitEnumDataAddr) diff --git a/lib/SIL/Parser/ParseSIL.cpp b/lib/SIL/Parser/ParseSIL.cpp index bc7845144f084..741dfe45b4836 100644 --- a/lib/SIL/Parser/ParseSIL.cpp +++ b/lib/SIL/Parser/ParseSIL.cpp @@ -5891,6 +5891,12 @@ bool SILParser::parseSpecificSILInstruction(SILBuilder &B, ResultVal = B.createIndexAddr(InstLoc, Val, IndexVal, needsStackProtection); break; } + case SILInstructionKind::VectorBaseAddrInst: { + if (parseTypedValueRef(Val, B) || parseSILDebugLocation(InstLoc, B)) + return true; + ResultVal = B.createVectorBaseAddr(InstLoc, Val); + break; + } case SILInstructionKind::TailAddrInst: { SILValue IndexVal; SILType ResultObjTy; diff --git a/lib/SIL/Utils/InstructionUtils.cpp b/lib/SIL/Utils/InstructionUtils.cpp index a69056caea38c..19005d5e8220b 100644 --- a/lib/SIL/Utils/InstructionUtils.cpp +++ b/lib/SIL/Utils/InstructionUtils.cpp @@ -581,6 +581,7 @@ RuntimeEffect swift::getRuntimeEffect(SILInstruction *inst, SILType &impactType) case SILInstructionKind::TupleExtractInst: case SILInstructionKind::StructInst: case SILInstructionKind::StructExtractInst: + case SILInstructionKind::VectorBaseAddrInst: case SILInstructionKind::RefElementAddrInst: case SILInstructionKind::EnumInst: case SILInstructionKind::UncheckedEnumDataInst: diff --git a/lib/SIL/Verifier/SILVerifier.cpp b/lib/SIL/Verifier/SILVerifier.cpp index bfb895052b21e..61b58145fce8d 100644 --- a/lib/SIL/Verifier/SILVerifier.cpp +++ b/lib/SIL/Verifier/SILVerifier.cpp @@ -821,6 +821,7 @@ struct ImmutableAddressUseVerifier { LLVM_FALLTHROUGH; case SILInstructionKind::MoveOnlyWrapperToCopyableAddrInst: case SILInstructionKind::CopyableToMoveOnlyWrapperAddrInst: + case SILInstructionKind::VectorBaseAddrInst: case SILInstructionKind::StructElementAddrInst: case SILInstructionKind::TupleElementAddrInst: case SILInstructionKind::IndexAddrInst: diff --git a/lib/SILGen/SILGenBuiltin.cpp b/lib/SILGen/SILGenBuiltin.cpp index e02a87385ea74..7ae2a7058ce15 100644 --- a/lib/SILGen/SILGenBuiltin.cpp +++ b/lib/SILGen/SILGenBuiltin.cpp @@ -477,6 +477,29 @@ static ManagedValue emitBuiltinUnprotectedAddressOf(SILGenFunction &SGF, /*stackProtected=*/ false); } +// Like `tryEmitAddressableParameterAsAddress`, but also handles struct element projections. +static SILValue emitAddressOf(Expr *e, SILGenFunction &SGF, SILLocation loc) { + + if (auto *memberRef = dyn_cast(e)) { + VarDecl *fieldDecl = dyn_cast(memberRef->getDecl().getDecl()); + if (!fieldDecl) + return SILValue(); + SILValue addr = emitAddressOf(memberRef->getBase(), SGF, loc); + if (!addr) + return SILValue(); + if (addr->getType().getStructOrBoundGenericStruct() != fieldDecl->getDeclContext()) + return SILValue(); + return SGF.B.createStructElementAddr(loc, addr, fieldDecl); + } + + if (auto addressableAddr = SGF.tryEmitAddressableParameterAsAddress( + ArgumentSource(e), + ValueOwnership::Shared)) { + return addressableAddr.getValue(); + } + return SILValue(); +} + /// Specialized emitter for Builtin.addressOfBorrow. static ManagedValue emitBuiltinAddressOfBorrowBuiltins(SILGenFunction &SGF, SILLocation loc, @@ -491,15 +514,11 @@ static ManagedValue emitBuiltinAddressOfBorrowBuiltins(SILGenFunction &SGF, auto argument = (*argsOrError)[0]; - SILValue addr; // Try to borrow the argument at +0 indirect. // If the argument is a reference to a borrowed addressable parameter, then // use that parameter's stable address. - if (auto addressableAddr = SGF.tryEmitAddressableParameterAsAddress( - ArgumentSource(argument), - ValueOwnership::Shared)) { - addr = addressableAddr.getValue(); - } else { + SILValue addr = emitAddressOf(argument, SGF, loc); + if (!addr) { // We otherwise only support the builtin applied to values that // are naturally emitted borrowed in memory. (But it would probably be good // to phase this out since it's not really well-defined how long diff --git a/lib/SILGen/SILGenExpr.cpp b/lib/SILGen/SILGenExpr.cpp index ca38704b70c77..187adea7d4a4e 100644 --- a/lib/SILGen/SILGenExpr.cpp +++ b/lib/SILGen/SILGenExpr.cpp @@ -4992,7 +4992,7 @@ static RValue emitInlineArrayLiteral(SILGenFunction &SGF, CollectionExpr *E, SGFContext C) { ArgumentScope scope(SGF, E); - auto iaTy = E->getType()->castTo(); + auto iaTy = E->getType()->castTo(); auto loweredIAType = SGF.getLoweredType(iaTy); // If this is an empty InlineArray literal and it's loadable, then create an @@ -5007,9 +5007,19 @@ static RValue emitInlineArrayLiteral(SILGenFunction &SGF, CollectionExpr *E, auto elementType = iaTy->getGenericArgs()[1]->getCanonicalType(); auto &eltTL = SGF.getTypeLowering(AbstractionPattern::getOpaque(), elementType); + + auto *arrayDecl = cast(iaTy->getDecl()); + VarDecl *storageProperty = nullptr; + for (VarDecl *property : arrayDecl->getStoredProperties()) { + if ((property->getTypeInContext()->is())) { + storageProperty = property; + break; + } + } + SILValue alloc = SGF.emitTemporaryAllocation(E, loweredIAType); - SILValue addr = SGF.B.createUncheckedAddrCast(E, alloc, - eltTL.getLoweredType().getAddressType()); + SILValue storage = SGF.B.createStructElementAddr(E, alloc, storageProperty); + SILValue addr = SGF.B.createVectorBaseAddr(E, storage); // Cleanups for any elements that have been initialized so far. SmallVector cleanups; diff --git a/lib/SILOptimizer/Analysis/RegionAnalysis.cpp b/lib/SILOptimizer/Analysis/RegionAnalysis.cpp index 454deacf0f460..12e3acb12fb68 100644 --- a/lib/SILOptimizer/Analysis/RegionAnalysis.cpp +++ b/lib/SILOptimizer/Analysis/RegionAnalysis.cpp @@ -3411,6 +3411,7 @@ CONSTANT_TRANSLATION(InitExistentialValueInst, LookThrough) CONSTANT_TRANSLATION(UncheckedEnumDataInst, LookThrough) CONSTANT_TRANSLATION(TupleElementAddrInst, LookThrough) CONSTANT_TRANSLATION(StructElementAddrInst, LookThrough) +CONSTANT_TRANSLATION(VectorBaseAddrInst, LookThrough) CONSTANT_TRANSLATION(UncheckedTakeEnumDataAddrInst, LookThrough) //===--- diff --git a/lib/SILOptimizer/Analysis/SimplifyInstruction.cpp b/lib/SILOptimizer/Analysis/SimplifyInstruction.cpp index e5881fb9f2895..ca8b1b7a501ff 100644 --- a/lib/SILOptimizer/Analysis/SimplifyInstruction.cpp +++ b/lib/SILOptimizer/Analysis/SimplifyInstruction.cpp @@ -52,7 +52,6 @@ namespace { SILValue visitUnconditionalCheckedCastInst(UnconditionalCheckedCastInst *UCCI); SILValue visitUncheckedRefCastInst(UncheckedRefCastInst *OPRI); - SILValue visitUncheckedAddrCastInst(UncheckedAddrCastInst *UACI); SILValue visitStructInst(StructInst *SI); SILValue visitTupleInst(TupleInst *SI); SILValue visitBuiltinInst(BuiltinInst *AI); @@ -357,21 +356,6 @@ visitUncheckedRefCastInst(UncheckedRefCastInst *OPRI) { return simplifyDeadCast(OPRI); } -SILValue -InstSimplifier:: -visitUncheckedAddrCastInst(UncheckedAddrCastInst *UACI) { - // (unchecked-addr-cast Y->X (unchecked-addr-cast x X->Y)) -> x - if (auto *OtherUACI = dyn_cast(&*UACI->getOperand())) - if (OtherUACI->getOperand()->getType() == UACI->getType()) - return OtherUACI->getOperand(); - - // (unchecked-addr-cast X->X x) -> x - if (UACI->getOperand()->getType() == UACI->getType()) - return UACI->getOperand(); - - return SILValue(); -} - SILValue InstSimplifier::visitUpcastInst(UpcastInst *UI) { // (upcast Y->X (unchecked-ref-cast x X->Y)) -> x if (auto *URCI = dyn_cast(UI->getOperand())) diff --git a/lib/SILOptimizer/SILCombiner/SILCombiner.h b/lib/SILOptimizer/SILCombiner/SILCombiner.h index 5f5de2e0bb1bc..ebc56ba5b6328 100644 --- a/lib/SILOptimizer/SILCombiner/SILCombiner.h +++ b/lib/SILOptimizer/SILCombiner/SILCombiner.h @@ -260,7 +260,6 @@ class SILCombiner : bool optimizeStackAllocatedEnum(AllocStackInst *AS); SILInstruction *visitSwitchEnumAddrInst(SwitchEnumAddrInst *SEAI); SILInstruction *visitInjectEnumAddrInst(InjectEnumAddrInst *IEAI); - SILInstruction *visitUncheckedAddrCastInst(UncheckedAddrCastInst *UADCI); SILInstruction *visitUncheckedRefCastInst(UncheckedRefCastInst *URCI); SILInstruction *visitEndCOWMutationInst(EndCOWMutationInst *URCI); SILInstruction *visitUncheckedRefCastAddrInst(UncheckedRefCastAddrInst *URCI); diff --git a/lib/SILOptimizer/SILCombiner/SILCombinerCastVisitors.cpp b/lib/SILOptimizer/SILCombiner/SILCombinerCastVisitors.cpp index 9e28834bb1c27..09f14c969e1a4 100644 --- a/lib/SILOptimizer/SILCombiner/SILCombinerCastVisitors.cpp +++ b/lib/SILOptimizer/SILCombiner/SILCombinerCastVisitors.cpp @@ -219,23 +219,6 @@ SILInstruction *SILCombiner::visitUpcastInst(UpcastInst *uci) { return nullptr; } -SILInstruction * -SILCombiner::visitUncheckedAddrCastInst(UncheckedAddrCastInst *UADCI) { - // These are always safe to perform due to interior pointer ownership - // requirements being transitive along addresses. - - Builder.setCurrentDebugScope(UADCI->getDebugScope()); - - // (unchecked_addr_cast (unchecked_addr_cast x X->Y) Y->Z) - // -> - // (unchecked_addr_cast x X->Z) - if (auto *OtherUADCI = dyn_cast(UADCI->getOperand())) - return Builder.createUncheckedAddrCast(UADCI->getLoc(), - OtherUADCI->getOperand(), - UADCI->getType()); - return nullptr; -} - SILInstruction * SILCombiner::visitUncheckedRefCastInst(UncheckedRefCastInst *urci) { // %0 = unchecked_ref_cast %x : $X->Y diff --git a/lib/SILOptimizer/SILCombiner/Simplifications.def b/lib/SILOptimizer/SILCombiner/Simplifications.def index 0b4444664bf31..2fd5d6720466b 100644 --- a/lib/SILOptimizer/SILCombiner/Simplifications.def +++ b/lib/SILOptimizer/SILCombiner/Simplifications.def @@ -47,6 +47,7 @@ INSTRUCTION_SIMPLIFICATION(DestructureStructInst) INSTRUCTION_SIMPLIFICATION(DestructureTupleInst) INSTRUCTION_SIMPLIFICATION(PointerToAddressInst) INSTRUCTION_SIMPLIFICATION(TypeValueInst) +INSTRUCTION_SIMPLIFICATION(UncheckedAddrCastInst) INSTRUCTION_SIMPLIFICATION(UncheckedEnumDataInst) INSTRUCTION_SIMPLIFICATION(WitnessMethodInst) INSTRUCTION_SIMPLIFICATION_WITH_LEGACY(AllocStackInst) diff --git a/lib/SILOptimizer/UtilityPasses/SerializeSILPass.cpp b/lib/SILOptimizer/UtilityPasses/SerializeSILPass.cpp index 2be680f257449..69ed207bbe67e 100644 --- a/lib/SILOptimizer/UtilityPasses/SerializeSILPass.cpp +++ b/lib/SILOptimizer/UtilityPasses/SerializeSILPass.cpp @@ -209,6 +209,7 @@ static bool hasOpaqueArchetype(TypeExpansionContext context, case SILInstructionKind::ObjCProtocolInst: case SILInstructionKind::ObjectInst: case SILInstructionKind::VectorInst: + case SILInstructionKind::VectorBaseAddrInst: case SILInstructionKind::TupleInst: case SILInstructionKind::TupleAddrConstructorInst: case SILInstructionKind::TupleExtractInst: diff --git a/lib/SILOptimizer/Utils/SILInliner.cpp b/lib/SILOptimizer/Utils/SILInliner.cpp index 94d44400920df..ef15f35ad20ac 100644 --- a/lib/SILOptimizer/Utils/SILInliner.cpp +++ b/lib/SILOptimizer/Utils/SILInliner.cpp @@ -1096,6 +1096,7 @@ InlineCost swift::instructionInlineCost(SILInstruction &I) { case SILInstructionKind::DynamicMethodBranchInst: case SILInstructionKind::EnumInst: case SILInstructionKind::IndexAddrInst: + case SILInstructionKind::VectorBaseAddrInst: case SILInstructionKind::TailAddrInst: case SILInstructionKind::IndexRawPointerInst: case SILInstructionKind::InitEnumDataAddrInst: diff --git a/lib/Serialization/DeserializeSIL.cpp b/lib/Serialization/DeserializeSIL.cpp index 75d10adf1e213..2cffe5ae07f3a 100644 --- a/lib/Serialization/DeserializeSIL.cpp +++ b/lib/Serialization/DeserializeSIL.cpp @@ -2452,6 +2452,15 @@ bool SILDeserializer::readSILInstruction(SILFunction *Fn, getSILType(Ty2, (SILValueCategory)TyCategory2, Fn))); break; } + case SILInstructionKind::VectorBaseAddrInst: { + assert(RecordKind == SIL_ONE_TYPE_ONE_OPERAND); + ResultInst = Builder.createVectorBaseAddr( + Loc, + getLocalValue( + Builder.maybeGetFunction(), ValID, + getSILType(MF->getType(TyID2), (SILValueCategory)TyCategory2, Fn))); + break; + } case SILInstructionKind::IndexAddrInst: { auto Ty = MF->getType(TyID); auto Ty2 = MF->getType(TyID2); diff --git a/lib/Serialization/ModuleFormat.h b/lib/Serialization/ModuleFormat.h index 461c59d27ad36..8ee427566de8e 100644 --- a/lib/Serialization/ModuleFormat.h +++ b/lib/Serialization/ModuleFormat.h @@ -58,7 +58,7 @@ const uint16_t SWIFTMODULE_VERSION_MAJOR = 0; /// describe what change you made. The content of this comment isn't important; /// it just ensures a conflict if two people change the module format. /// Don't worry about adhering to the 80-column limit for this line. -const uint16_t SWIFTMODULE_VERSION_MINOR = 948; // remove SwiftSettings +const uint16_t SWIFTMODULE_VERSION_MINOR = 949; // vector_base_addr instruction /// A standard hash seed used for all string hashes in a serialized module. /// diff --git a/lib/Serialization/SerializeSIL.cpp b/lib/Serialization/SerializeSIL.cpp index 043b2dc17b0a5..facf968ab331f 100644 --- a/lib/Serialization/SerializeSIL.cpp +++ b/lib/Serialization/SerializeSIL.cpp @@ -2177,6 +2177,7 @@ void SILSerializer::writeSILInstruction(const SILInstruction &SI) { case SILInstructionKind::UncheckedTrivialBitCastInst: case SILInstructionKind::UncheckedBitwiseCastInst: case SILInstructionKind::UncheckedValueCastInst: + case SILInstructionKind::VectorBaseAddrInst: case SILInstructionKind::BridgeObjectToRefInst: case SILInstructionKind::BridgeObjectToWordInst: case SILInstructionKind::UpcastInst: diff --git a/stdlib/public/core/InlineArray.swift b/stdlib/public/core/InlineArray.swift index 1edc026eab9e8..2bf9c6339e2cf 100644 --- a/stdlib/public/core/InlineArray.swift +++ b/stdlib/public/core/InlineArray.swift @@ -46,7 +46,7 @@ @_addressableForDependencies public struct InlineArray: ~Copyable { @usableFromInline - internal let _storage: Builtin.FixedArray + internal var _storage: Builtin.FixedArray } @available(SwiftStdlib 6.2, *) @@ -69,7 +69,7 @@ extension InlineArray where Element: ~Copyable { @_alwaysEmitIntoClient @_transparent internal var _address: UnsafePointer { - unsafe UnsafePointer(Builtin.unprotectedAddressOfBorrow(self)) + unsafe UnsafePointer(Builtin.unprotectedAddressOfBorrow(_storage)) } /// Returns a buffer pointer over the entire array. @@ -86,7 +86,7 @@ extension InlineArray where Element: ~Copyable { @_transparent internal var _mutableAddress: UnsafeMutablePointer { mutating get { - unsafe UnsafeMutablePointer(Builtin.unprotectedAddressOf(&self)) + unsafe UnsafeMutablePointer(Builtin.unprotectedAddressOf(&_storage)) } } @@ -147,7 +147,7 @@ extension InlineArray where Element: ~Copyable { @_alwaysEmitIntoClient public init(_ body: (Index) throws(E) -> Element) throws(E) { #if $BuiltinEmplaceTypedThrows - self = try Builtin.emplace { (rawPtr) throws(E) -> () in + _storage = try Builtin.emplace { (rawPtr) throws(E) -> () in let buffer = unsafe Self._initializationBuffer(start: rawPtr) for i in 0 ..< count { @@ -204,7 +204,7 @@ extension InlineArray where Element: ~Copyable { // and take the underlying value within the closure. var o: Element? = first - self = try Builtin.emplace { (rawPtr) throws(E) -> () in + _storage = try Builtin.emplace { (rawPtr) throws(E) -> () in let buffer = unsafe Self._initializationBuffer(start: rawPtr) guard Self.count > 0 else { @@ -248,7 +248,7 @@ extension InlineArray where Element: Copyable { @_alwaysEmitIntoClient public init(repeating value: Element) { #if $ValueGenericsNameLookup - self = Builtin.emplace { + _storage = Builtin.emplace { let buffer = unsafe Self._initializationBuffer(start: $0) unsafe buffer.initialize(repeating: value) diff --git a/test/SIL/Parser/basic.sil b/test/SIL/Parser/basic.sil index 2af0a635ce4d1..9dabff5a02b6a 100644 --- a/test/SIL/Parser/basic.sil +++ b/test/SIL/Parser/basic.sil @@ -1220,6 +1220,16 @@ sil @alloc_stack_test : $() -> () { // CHECK: } // end sil function 'alloc_stack_test' } +// CHECK-LABEL: sil @vector_base_addr : +// CHECK: %1 = vector_base_addr %0 : $*Builtin.FixedArray<10, Int> +// CHECK-LABEL: } // end sil function 'vector_base_addr' +sil @vector_base_addr : $@convention(thin) (@inout Builtin.FixedArray<10, Int>) -> Int { +bb0(%0 : $*Builtin.FixedArray<10, Int>): + %1 = vector_base_addr %0 + %2 = load %1 + return %2 +} + sil_global @staticProp: $Int // CHECK-LABEL: sil private @globalinit_func0 : $@convention(thin) () -> () { diff --git a/test/SILGen/inlinearray_literal.swift b/test/SILGen/inlinearray_literal.swift index c48a3dbf35a3d..26becddbe69ab 100644 --- a/test/SILGen/inlinearray_literal.swift +++ b/test/SILGen/inlinearray_literal.swift @@ -37,7 +37,8 @@ func emptyNoncopyable() -> InlineArray<0, Atomic> { // CHECK-LABEL: sil{{.*}} @$s19inlinearray_literal7trivials11InlineArrayVy$3_SiGyF : $@convention(thin) () -> InlineArray<4, Int> { // CHECK: [[SLAB_ALLOC:%.*]] = alloc_stack $InlineArray<4, Int> -// CHECK-NEXT: [[ELEMENT_PTR:%.*]] = unchecked_addr_cast [[SLAB_ALLOC]] to $*Int +// CHECK-NEXT: [[SE:%.*]] = struct_element_addr [[SLAB_ALLOC]], #InlineArray._storage +// CHECK-NEXT: [[ELEMENT_PTR:%.*]] = vector_base_addr [[SE]] // CHECK-NEXT: [[ELT_0_LITERAL:%.*]] = integer_literal $Builtin.IntLiteral, 1 // CHECK: [[ELT_0:%.*]] = apply {{%.*}}([[ELT_0_LITERAL]], {{%.*}}) : $@convention(method) (Builtin.IntLiteral, @thin Int.Type) -> Int // CHECK-NEXT: store [[ELT_0]] to [trivial] [[ELEMENT_PTR]] @@ -66,7 +67,8 @@ func trivial() -> InlineArray<4, Int> { // CHECK-LABEL: sil{{.*}} @$s19inlinearray_literal10nontrivials11InlineArrayVy$1_SSGyF : $@convention(thin) () -> @owned InlineArray<2, String> { // CHECK: [[SLAB_ALLOC:%.*]] = alloc_stack $InlineArray<2, String> -// CHECK-NEXT: [[ELEMENT_PTR:%.*]] = unchecked_addr_cast [[SLAB_ALLOC]] to $*String +// CHECK-NEXT: [[SE:%.*]] = struct_element_addr [[SLAB_ALLOC]], #InlineArray._storage +// CHECK-NEXT: [[ELEMENT_PTR:%.*]] = vector_base_addr [[SE]] // CHECK-NEXT: [[ELT_0_LITERAL:%.*]] = string_literal utf8 "hello" // CHECK: [[ELT_0:%.*]] = apply {{%.*}}([[ELT_0_LITERAL]], {{.*}}) : $@convention(method) (Builtin.RawPointer, Builtin.Word, Builtin.Int1, @thin String.Type) -> @owned String // CHECK-NEXT: store [[ELT_0]] to [init] [[ELEMENT_PTR]] @@ -86,7 +88,8 @@ func nontrivial() -> InlineArray<2, String> { // CHECK-LABEL: sil{{.*}} @$s19inlinearray_literal11noncopyables11InlineArrayVy$1_15Synchronization6AtomicVySiGGyF : $@convention(thin) () -> @out InlineArray<2, Atomic> { // CHECK: bb0([[SLAB_RETURN:%.*]] : $*InlineArray<2, Atomic>): // CHECK-NEXT: [[SLAB_ALLOC:%.*]] = alloc_stack $InlineArray<2, Atomic> -// CHECK-NEXT: [[ELEMENT_PTR:%.*]] = unchecked_addr_cast [[SLAB_ALLOC]] to $*Atomic +// CHECK-NEXT: [[SE:%.*]] = struct_element_addr [[SLAB_ALLOC]], #InlineArray._storage +// CHECK-NEXT: [[ELEMENT_PTR:%.*]] = vector_base_addr [[SE]] // CHECK: [[ATOMIC_INIT:%.*]] = function_ref @$s15Synchronization6AtomicVyACyxGxcfC // CHECK-NEXT: [[ELT_0:%.*]] = apply [[ATOMIC_INIT]]([[ELEMENT_PTR]], {{.*}}) : $@convention(method) <τ_0_0 where τ_0_0 : AtomicRepresentable> (@in τ_0_0, @thin Atomic<τ_0_0>.Type) -> @out Atomic<τ_0_0> // CHECK: [[ELT_1_OFFSET:%.*]] = integer_literal $Builtin.Word, 1 @@ -109,10 +112,11 @@ func noncopyable() -> InlineArray<2, Atomic> { // CHECK-LABEL: sil{{.*}} @$s19inlinearray_literal7closures11InlineArrayVy$0_S2icGyF : $@convention(thin) () -> @owned InlineArray<1, (Int) -> Int> { // CHECK: [[IA_ALLOC:%.*]] = alloc_stack $InlineArray<1, (Int) -> Int> -// CHECK-NEXT: [[ADDR_CAST:%.*]] = unchecked_addr_cast [[IA_ALLOC]] to $*@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0) -> @out τ_0_1 for +// CHECK-NEXT: [[SE:%.*]] = struct_element_addr [[IA_ALLOC]], #InlineArray._storage +// CHECK-NEXT: [[ELEMENT_PTR:%.*]] = vector_base_addr [[SE]] // CHECK: [[FN_REF:%.*]] = function_ref // CHECK-NEXT: [[THIN_TO_THICK_FN:%.*]] = thin_to_thick_function [[FN_REF]] to $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@in_guaranteed τ_0_0) -> @out τ_0_1 for -// CHECK-NEXT: store [[THIN_TO_THICK_FN]] to [init] [[ADDR_CAST]] +// CHECK-NEXT: store [[THIN_TO_THICK_FN]] to [init] [[ELEMENT_PTR]] // CHECK-NEXT: [[IA:%.*]] = load [take] [[IA_ALLOC]] // CHECK-NEXT: dealloc_stack [[IA_ALLOC]] // CHECK-NEXT: return [[IA]] diff --git a/test/SILOptimizer/accessutils.sil b/test/SILOptimizer/accessutils.sil index a098496ceea7f..a3381efc51b99 100644 --- a/test/SILOptimizer/accessutils.sil +++ b/test/SILOptimizer/accessutils.sil @@ -27,6 +27,10 @@ struct Ptr { var p: Int64 } +struct VectorStruct { + var a: Builtin.FixedArray<100, Int64> +} + class C {} sil @_getC : $@convention(thin) () -> @owned C @@ -707,3 +711,20 @@ bb0(%0 : @guaranteed $List): return %6 : $Int64 } +// CHECK-LABEL: Accesses for vectors +// CHECK: Value: %4 = index_addr %2 : $*Int64, %3 : $Builtin.Int64 +// CHECK-NEXT: Scope: base +// CHECK-NEXT: Base: argument - %0 = argument of bb0 : $*VectorStruct +// CHECK-NEXT: Path: "s0.b.i5" +// CHECK-NEXT: no Storage paths +// CHECK-LABEL: End accesses for vectors +sil [ossa] @vectors : $@convention(thin) (@in_guaranteed VectorStruct) -> Int64 { +bb0(%0 : $*VectorStruct): + %1 = struct_element_addr %0, #VectorStruct.a + %2 = vector_base_addr %1 + %3 = integer_literal $Builtin.Int64, 5 + %4 = index_addr %2, %3 + %5 = load [trivial] %4 + return %5 +} + diff --git a/test/SILOptimizer/alias-analysis.sil b/test/SILOptimizer/alias-analysis.sil index 19e8e72fa82fc..d9cd41613fd39 100644 --- a/test/SILOptimizer/alias-analysis.sil +++ b/test/SILOptimizer/alias-analysis.sil @@ -99,3 +99,44 @@ bb0(%0 : $*MyStruct, %1 : $Builtin.Word): %99 = tuple () return %99 : $() } + +// CHECK-LABEL: @testVectorBaseAddr +// CHECK: PAIR #18. +// CHECK-NEXT: %2 = vector_base_addr %0 : $*Builtin.FixedArray<10, Int> +// CHECK-NEXT: %3 = index_addr %2 : $*Int, %1 : $Builtin.Word +// CHECK-NEXT: MayAlias +// CHECK: PAIR #21. +// CHECK-NEXT: %2 = vector_base_addr %0 : $*Builtin.FixedArray<10, Int> +// CHECK-NEXT: %6 = index_addr %2 : $*Int, %4 : $Builtin.Word +// CHECK-NEXT: NoAlias +// CHECK: PAIR #22. +// CHECK-NEXT: %2 = vector_base_addr %0 : $*Builtin.FixedArray<10, Int> +// CHECK-NEXT: %7 = index_addr %2 : $*Int, %5 : $Builtin.Word +// CHECK-NEXT: NoAlias +// CHECK: PAIR #27. +// CHECK-NEXT: %3 = index_addr %2 : $*Int, %1 : $Builtin.Word +// CHECK-NEXT: %6 = index_addr %2 : $*Int, %4 : $Builtin.Word +// CHECK-NEXT: MayAlias +// CHECK: PAIR #28. +// CHECK-NEXT: %3 = index_addr %2 : $*Int, %1 : $Builtin.Word +// CHECK-NEXT: %7 = index_addr %2 : $*Int, %5 : $Builtin.Word +// CHECK-NEXT: MayAlias +// CHECK: PAIR #40. +// CHECK-NEXT: %6 = index_addr %2 : $*Int, %4 : $Builtin.Word +// CHECK-NEXT: %7 = index_addr %2 : $*Int, %5 : $Builtin.Word +// CHECK-NEXT: NoAlias +sil @testVectorBaseAddr : $@convention(thin) (@inout Builtin.FixedArray<10, Int>, Builtin.Word) -> () { +bb0(%0 : $*Builtin.FixedArray<10, Int>, %1 : $Builtin.Word): + %2 = vector_base_addr %0 + %3 = index_addr %2, %1 + %4 = integer_literal $Builtin.Word, 1 + %5 = integer_literal $Builtin.Word, 2 + %6 = index_addr %2, %4 + %7 = index_addr %2, %5 + fix_lifetime %2 + fix_lifetime %3 + fix_lifetime %6 + fix_lifetime %7 + %99 = tuple () + return %99 : $() +} diff --git a/test/SILOptimizer/escape_info.sil b/test/SILOptimizer/escape_info.sil index 2230c83e8e223..285c7d5dfd7f6 100644 --- a/test/SILOptimizer/escape_info.sil +++ b/test/SILOptimizer/escape_info.sil @@ -1510,3 +1510,21 @@ bb0: dealloc_stack %0 : $*X return %4 : $Builtin.RawPointer } + +// CHECK-LABEL: Escape information for test_vector_base_addr_escaping: +// CHECK: return[]: %1 = alloc_ref $X +// CHECK: End function test_vector_base_addr_escaping +sil @test_vector_base_addr_escaping : $@convention(thin) () -> @owned X { +bb0: + %0 = alloc_stack $Builtin.FixedArray<10, X> + %1 = alloc_ref $X + %2 = vector_base_addr %0 + %3 = integer_literal $Builtin.Int64, 1 + %4 = index_addr %2, %3 + store %1 to %4 + %6 = vector_base_addr %0 + %7 = index_addr %6, %3 + %8 = load %4 + dealloc_stack %0 + return %8 +} diff --git a/test/SILOptimizer/init_static_globals.sil b/test/SILOptimizer/init_static_globals.sil index 21e58e0cfc1c5..13296ec649897 100644 --- a/test/SILOptimizer/init_static_globals.sil +++ b/test/SILOptimizer/init_static_globals.sil @@ -107,7 +107,8 @@ sil_global [let] @g9 : $TwoFields // CHECK-NEXT: %3 = struct $Int32 (%2) // CHECK-NEXT: %4 = integer_literal $Builtin.Int32, 3 // CHECK-NEXT: %5 = struct $Int32 (%4) -// CHECK-NEXT: %initval = vector (%1, %3, %5) +// CHECK-NEXT: %6 = vector (%1, %3, %5) +// CHECK-NEXT: %initval = struct $InlineArray<3, Int32> (%6) // CHECK-NEXT: } sil_global [let] @inline_array1 : $InlineArray<3, Int32> @@ -127,7 +128,8 @@ sil_global [let] @inline_array1 : $InlineArray<3, Int32> // CHECK-NEXT: %12 = integer_literal $Builtin.Int32, 60 // CHECK-NEXT: %13 = struct $Int32 (%12) // CHECK-NEXT: %14 = tuple (%11, %13) -// CHECK-NEXT: %initval = vector (%4, %9, %14) +// CHECK-NEXT: %15 = vector (%4, %9, %14) +// CHECK-NEXT: %initval = struct $InlineArray<3, (Int32, Int32)> (%15) // CHECK-NEXT: } sil_global [let] @inline_array2 : $InlineArray<3, (Int32, Int32)> @@ -357,8 +359,8 @@ sil [global_init_once_fn] [ossa] @globalinit_inline_array : $@convention(c) (Bui bb0(%0 : $Builtin.RawPointer): alloc_global @inline_array1 %2 = global_addr @inline_array1 : $*InlineArray<3, Int32> - %3 = alloc_stack $InlineArray<3, Int32> - %4 = unchecked_addr_cast %3 to $*Int32 + %3 = alloc_stack $Builtin.FixedArray<3, Int32> + %4 = vector_base_addr %3 %5 = integer_literal $Builtin.Int32, 1 %6 = struct $Int32 (%5) store %6 to [trivial] %4 @@ -373,8 +375,9 @@ bb0(%0 : $Builtin.RawPointer): %16 = struct $Int32 (%15) store %16 to [trivial] %14 %18 = load [trivial] %3 + %19 = struct $InlineArray<3, Int32> (%18) dealloc_stack %3 - store %18 to [trivial] %2 + store %19 to [trivial] %2 %21 = tuple () return %21 } @@ -388,8 +391,8 @@ sil [global_init_once_fn] [ossa] @globalinit_inline_array_of_tuples : $@conventi bb0(%0 : $Builtin.RawPointer): alloc_global @inline_array2 %2 = global_addr @inline_array2 : $*InlineArray<3, (Int32, Int32)> - %3 = alloc_stack $InlineArray<3, (Int32, Int32)> - %4 = unchecked_addr_cast %3 to $*(Int32, Int32) + %3 = alloc_stack $Builtin.FixedArray<3, (Int32, Int32)> + %4 = vector_base_addr %3 %5 = tuple_element_addr %4, 0 %6 = tuple_element_addr %4, 1 %7 = integer_literal $Builtin.Int32, 10 @@ -419,8 +422,9 @@ bb0(%0 : $Builtin.RawPointer): %31 = struct $Int32 (%30) store %31 to [trivial] %26 %33 = load [trivial] %3 + %34 = struct $InlineArray<3, (Int32, Int32)> (%33) dealloc_stack %3 - store %33 to [trivial] %2 + store %34 to [trivial] %2 %36 = tuple () return %36 } @@ -434,8 +438,8 @@ sil [global_init_once_fn] [ossa] @no_globalinit_double_store: $@convention(c) (B bb0(%0 : $Builtin.RawPointer): alloc_global @inline_array3 %2 = global_addr @inline_array3 : $*InlineArray<2, Int32> - %3 = alloc_stack $InlineArray<2, Int32> - %4 = unchecked_addr_cast %3 to $*Int32 + %3 = alloc_stack $Builtin.FixedArray<2, Int32> + %4 = vector_base_addr %3 %5 = integer_literal $Builtin.Int32, 1 %6 = struct $Int32 (%5) store %6 to [trivial] %4 @@ -446,8 +450,9 @@ bb0(%0 : $Builtin.RawPointer): store %11 to [trivial] %9 store %6 to [trivial] %9 %18 = load [trivial] %3 + %19 = struct $InlineArray<2, Int32> (%18) dealloc_stack %3 - store %18 to [trivial] %2 + store %19 to [trivial] %2 %21 = tuple () return %21 } @@ -461,8 +466,8 @@ sil [global_init_once_fn] [ossa] @no_globalinit_extra_load: $@convention(c) (Bui bb0(%0 : $Builtin.RawPointer): alloc_global @inline_array4 %2 = global_addr @inline_array4 : $*InlineArray<2, Int32> - %3 = alloc_stack $InlineArray<2, Int32> - %4 = unchecked_addr_cast %3 to $*Int32 + %3 = alloc_stack $Builtin.FixedArray<2, Int32> + %4 = vector_base_addr %3 %5 = integer_literal $Builtin.Int32, 1 %6 = struct $Int32 (%5) store %6 to [trivial] %4 @@ -471,8 +476,9 @@ bb0(%0 : $Builtin.RawPointer): %10 = load [trivial] %4 store %10 to [trivial] %9 %18 = load [trivial] %3 + %19 = struct $InlineArray<2, Int32> (%18) dealloc_stack %3 - store %18 to [trivial] %2 + store %19 to [trivial] %2 %21 = tuple () return %21 } @@ -486,8 +492,8 @@ sil [global_init_once_fn] [ossa] @no_globalinit_no_load: $@convention(c) (Builti bb0(%0 : $Builtin.RawPointer): alloc_global @gint %2 = global_addr @gint : $*Int32 - %3 = alloc_stack $InlineArray<2, Int32> - %4 = unchecked_addr_cast %3 to $*Int32 + %3 = alloc_stack $Builtin.FixedArray<2, Int32> + %4 = vector_base_addr %3 %5 = integer_literal $Builtin.Int32, 1 %6 = struct $Int32 (%5) store %6 to [trivial] %4 @@ -511,11 +517,12 @@ sil [global_init_once_fn] [ossa] @no_globalinit_empty_inline_array : $@conventio bb0(%0 : $Builtin.RawPointer): alloc_global @empty_inline_array %2 = global_addr @empty_inline_array : $*InlineArray<0, Int32> - %3 = alloc_stack $InlineArray<0, Int32> - %4 = unchecked_addr_cast %3 to $*Int32 + %3 = alloc_stack $Builtin.FixedArray<0, Int32> + %4 = vector_base_addr %3 %18 = load [trivial] %3 + %19 = struct $InlineArray<0, Int32> (%18) dealloc_stack %3 - store %18 to [trivial] %2 + store %19 to [trivial] %2 %21 = tuple () return %21 } @@ -529,15 +536,16 @@ sil [global_init_once_fn] [ossa] @no_globalinit_inline_array_empty_elements : $@ bb0(%0 : $Builtin.RawPointer): alloc_global @inline_array_empty_elements %2 = global_addr @inline_array_empty_elements : $*InlineArray<3, ()> - %3 = alloc_stack $InlineArray<3, ()> - %4 = unchecked_addr_cast %3 to $*() + %3 = alloc_stack $Builtin.FixedArray<3, ()> + %4 = vector_base_addr %3 %13 = integer_literal $Builtin.Word, 2 %14 = index_addr %4, %13 %15 = tuple () store %15 to [trivial] %14 %18 = load [trivial] %3 + %19 = struct $InlineArray<3, ()> (%18) dealloc_stack %3 - store %18 to [trivial] %2 + store %19 to [trivial] %2 %21 = tuple () return %21 } diff --git a/test/SILOptimizer/redundant_load_elim_ossa.sil b/test/SILOptimizer/redundant_load_elim_ossa.sil index 2084ba1154ff9..8ddedacace959 100644 --- a/test/SILOptimizer/redundant_load_elim_ossa.sil +++ b/test/SILOptimizer/redundant_load_elim_ossa.sil @@ -1743,3 +1743,18 @@ bb0(%0 : $Int): dealloc_stack %3 return %6 } + +// CHECK-LABEL: sil [ossa] @vector : +// CHECK: return %1 +// CHECK-LABEL: } // end sil function 'vector' +sil [ossa] @vector : $@convention(thin) (@inout Builtin.FixedArray<10, Int>, Int, Int) -> Int { +bb0(%0 : $*Builtin.FixedArray<10, Int>, %1 : $Int, %2 : $Int): + %3 = vector_base_addr %0 + %4 = integer_literal $Builtin.Word, 1 + %5 = index_addr %3, %4 + store %1 to [trivial] %5 + store %2 to [trivial] %3 + %6 = load [trivial] %5 + return %6 +} + diff --git a/test/SILOptimizer/simplify_unchecked_addr_cast.sil b/test/SILOptimizer/simplify_unchecked_addr_cast.sil new file mode 100644 index 0000000000000..43a83f0f7dd13 --- /dev/null +++ b/test/SILOptimizer/simplify_unchecked_addr_cast.sil @@ -0,0 +1,73 @@ +// RUN: %target-sil-opt %s -onone-simplification -simplify-instruction=unchecked_addr_cast | %FileCheck %s +// RUN: %target-sil-opt %s -simplification -simplify-instruction=unchecked_addr_cast | %FileCheck %s + +import Swift +import Builtin + +// CHECK-LABEL: sil [ossa] @same_type : +// CHECK-NOT: unchecked_addr_cast +// CHECK: %1 = load [trivial] %0 +// CHECK: } // end sil function 'same_type' +sil [ossa] @same_type : $@convention(thin) (@inout Int) -> Int { +bb0(%0 : $*Int): + %1 = unchecked_addr_cast %0 to $*Int + %2 = load [trivial] %1 + return %2 +} + +// CHECK-LABEL: sil [ossa] @not_same_type : +// CHECK: %1 = unchecked_addr_cast %0 to $*Float +// CHECK: %2 = load [trivial] %1 +// CHECK: } // end sil function 'not_same_type' +sil [ossa] @not_same_type : $@convention(thin) (@inout Int) -> Float { +bb0(%0 : $*Int): + %1 = unchecked_addr_cast %0 to $*Float + %2 = load [trivial] %1 + return %2 +} + +// CHECK-LABEL: sil [ossa] @double_cast : +// CHECK: %1 = unchecked_addr_cast %0 to $*Bool +// CHECK: %2 = load [trivial] %1 +// CHECK: } // end sil function 'double_cast' +sil [ossa] @double_cast : $@convention(thin) (@inout Int) -> Bool { +bb0(%0 : $*Int): + %1 = unchecked_addr_cast %0 to $*Float + %2 = unchecked_addr_cast %1 to $*Bool + %3 = load [trivial] %2 + return %3 +} + +// CHECK-LABEL: sil [ossa] @vector_base : +// CHECK: %1 = vector_base_addr %0 +// CHECK: %2 = load [trivial] %1 +// CHECK: } // end sil function 'vector_base' +sil [ossa] @vector_base : $@convention(thin) (@inout Builtin.FixedArray<10, Int>) -> Int { +bb0(%0 : $*Builtin.FixedArray<10, Int>): + %1 = unchecked_addr_cast %0 to $*Int + %2 = load [trivial] %1 + return %2 +} + +// CHECK-LABEL: sil [ossa] @vector_base_wrong_type : +// CHECK: %1 = unchecked_addr_cast %0 to $*Bool +// CHECK: %2 = load [trivial] %1 +// CHECK: } // end sil function 'vector_base_wrong_type' +sil [ossa] @vector_base_wrong_type : $@convention(thin) (@inout Builtin.FixedArray<10, Int>) -> Bool { +bb0(%0 : $*Builtin.FixedArray<10, Int>): + %1 = unchecked_addr_cast %0 to $*Bool + %2 = load [trivial] %1 + return %2 +} + +// CHECK-LABEL: sil [ossa] @vector_base_function_type : +// CHECK: %1 = vector_base_addr %0 +// CHECK: %2 = load [copy] %1 +// CHECK: } // end sil function 'vector_base_function_type' +sil [ossa] @vector_base_function_type : $@convention(thin) (@inout Builtin.FixedArray<10, ()->()>) -> @owned @callee_guaranteed @substituted <τ_0_0> () -> @out τ_0_0 for <()> { +bb0(%0 : $*Builtin.FixedArray<10, ()->()>): + %1 = unchecked_addr_cast %0 to $*@callee_guaranteed @substituted <τ_0_0> () -> @out τ_0_0 for <()> + %2 = load [copy] %1 + return %2 +} + diff --git a/test/SILOptimizer/static_inline_arrays.swift b/test/SILOptimizer/static_inline_arrays.swift index 9009913c87c60..a5af2dd878b35 100644 --- a/test/SILOptimizer/static_inline_arrays.swift +++ b/test/SILOptimizer/static_inline_arrays.swift @@ -25,18 +25,21 @@ struct IntByteAndByte { struct S { // CHECK-LABEL: sil_global hidden [let] @$s4test1SV6simples11InlineArrayVy$2_SiGvpZ : $InlineArray<3, Int> = { - // CHECK: %initval = vector + // CHECK: [[V:%.*]] = vector + // CHECK: %initval = struct $InlineArray<3, Int> ([[V]]) // CHECK: } static let simple: InlineArray = [1, 2, 3] // CHECK-LABEL: sil_global hidden [let] @$s4test1SV12optionalIntss11InlineArrayVy$2_SiSgGvpZ : $InlineArray<3, Optional> = { - // CHECK: %initval = vector + // CHECK: [[V:%.*]] = vector + // CHECK: %initval = struct $InlineArray<3, Optional> ([[V]]) // CHECK: } static let optionalInts: InlineArray<_, Int?> = [10, 20, 30] // CHECK-LABEL: sil_global hidden [let] @$s4test1SV13optionalArrays06InlineC0Vy$2_SiGSgvpZ : $Optional> = { // CHECK: [[V:%.*]] = vector - // CHECK: %initval = enum $Optional>, #Optional.some!enumelt, [[V]] + // CHECK: [[A:%.*]] = struct $InlineArray<3, Int> ([[V]]) + // CHECK: %initval = enum $Optional>, #Optional.some!enumelt, [[A]] // CHECK: } static let optionalArray: InlineArray? = [1, 2, 3] @@ -44,13 +47,15 @@ struct S { // CHECK: [[S0:%.*]] = struct $IntByte // CHECK: [[S1:%.*]] = struct $IntByte // CHECK: [[S2:%.*]] = struct $IntByte - // CHECK: %initval = vector ([[S0]], [[S1]], [[S2]]) + // CHECK: [[V:%.*]] = vector ([[S0]], [[S1]], [[S2]]) + // CHECK: %initval = struct $InlineArray<3, IntByte> ([[V]]) // CHECK: } static let intBytePairs: InlineArray<_, IntByte> = [IntByte(i: 1, b: 2), IntByte(i: 3, b: 4), IntByte(i: 5, b: 6)] // CHECK-LABEL: sil_global hidden [let] @$s4test1SV26optionalInlineArrayOfPairss0cD0Vy$2_AA7IntByteVGSgvpZ : $Optional> = { // CHECK: [[V:%.*]] = vector - // CHECK: %initval = enum $Optional>, #Optional.some!enumelt, [[V]] + // CHECK: [[A:%.*]] = struct $InlineArray<3, IntByte> ([[V]]) + // CHECK: %initval = enum $Optional>, #Optional.some!enumelt, [[A]] // CHECK: } static let optionalInlineArrayOfPairs: InlineArray<_, IntByte>? = [IntByte(i: 11, b: 12), IntByte(i: 13, b: 14), IntByte(i: 15, b: 16)] @@ -58,21 +63,27 @@ struct S { // CHECK: [[T0:%.*]] = tuple // CHECK: [[T1:%.*]] = tuple // CHECK: [[T2:%.*]] = tuple - // CHECK: %initval = vector ([[T0]], [[T1]], [[T2]]) + // CHECK: [[V:%.*]] = vector ([[T0]], [[T1]], [[T2]]) + // CHECK: %initval = struct $InlineArray<3, (Int, Int)> ([[V]]) // CHECK: } static let tuples: InlineArray = [(10, 20), (30, 40), (50, 60)] // CHECK-LABEL: sil_global hidden [let] @$s4test1SV6nesteds11InlineArrayVy$2_AFy$1_SiGGvpZ : $InlineArray<3, InlineArray<2, Int>> = { // CHECK: [[V0:%.*]] = vector + // CHECK: [[A0:%.*]] = struct $InlineArray<2, Int> ([[V0]]) // CHECK: [[V1:%.*]] = vector + // CHECK: [[A1:%.*]] = struct $InlineArray<2, Int> ([[V1]]) // CHECK: [[V2:%.*]] = vector - // CHECK: %initval = vector ([[V0]], [[V1]], [[V2]]) + // CHECK: [[A2:%.*]] = struct $InlineArray<2, Int> ([[V2]]) + // CHECK: [[V:%.*]] = vector ([[A0]], [[A1]], [[A2]]) + // CHECK: %initval = struct $InlineArray<3, InlineArray<2, Int>> ([[V]]) // CHECK: } static let nested: InlineArray<3, InlineArray<2, Int>> = [[100, 200], [300, 400], [500, 600]] // CHECK-LABEL: sil_global hidden [let] @$s4test1SV010intByteAndC0AA03IntcdC0VvpZ : $IntByteAndByte = { // CHECK: [[V:%.*]] = vector - // CHECK: %initval = struct $IntByteAndByte ([[V]], + // CHECK: [[A:%.*]] = struct $InlineArray<3, IntByte> ([[V]]) + // CHECK: %initval = struct $IntByteAndByte ([[A]], // CHECK: } static let intByteAndByte = IntByteAndByte(a: [IntByte(i: 1, b: 2), IntByte(i: 3, b: 4), IntByte(i: 5, b: 6)], x: 27) } diff --git a/test/Serialization/Inputs/def_basic.sil b/test/Serialization/Inputs/def_basic.sil index bc78012a84e3b..35b2ca427caf3 100644 --- a/test/Serialization/Inputs/def_basic.sil +++ b/test/Serialization/Inputs/def_basic.sil @@ -120,6 +120,16 @@ bb0(%0 : $Int): // CHECK: bb0(%0 : $Int): return %3 : $Int // CHECK: return {{.*}} : $Int } +// CHECK-LABEL: sil public_external @vector_base_addr : +// CHECK: %1 = vector_base_addr %0 : $*Builtin.FixedArray<10, Int> +// CHECK-LABEL: } // end sil function 'vector_base_addr' +sil [serialized] @vector_base_addr : $@convention(thin) (@inout Builtin.FixedArray<10, Int>) -> Int { +bb0(%0 : $*Builtin.FixedArray<10, Int>): + %1 = vector_base_addr %0 + %2 = load %1 + return %2 +} + // CHECK-LABEL: @call_fn_pointer : $@convention(thin) (() -> Int) -> Int { sil [transparent] [serialized] @call_fn_pointer : $@convention(thin) (() -> Int) -> Int { bb0(%0 : $() -> Int): @@ -1528,6 +1538,7 @@ bb0: %4 = function_ref @test2 : $@convention(thin) (Int) -> () %6 = function_ref @named_tuple : $@convention(thin) () -> (Builtin.Word, Builtin.Word) %9 = function_ref @return_int : $@convention(thin) (Int) -> Int + %10 = function_ref @vector_base_addr : $@convention(thin) (@inout Builtin.FixedArray<10, Int>) -> Int %11 = function_ref @call_fn_pointer : $@convention(thin) (() -> Int) -> Int %13 = function_ref @return_constant : $@convention(thin) () -> Int %23 = function_ref @existentials : $@convention(thin) (@in P) -> () diff --git a/test/abi/macOS/arm64/stdlib.swift b/test/abi/macOS/arm64/stdlib.swift index 1e8c39edcaa35..6517b81aef889 100644 --- a/test/abi/macOS/arm64/stdlib.swift +++ b/test/abi/macOS/arm64/stdlib.swift @@ -1122,3 +1122,8 @@ Added: $ld$previous$@rpath/libswiftCompatibilitySpan.dylib$$1$10.14$15.0$_$ss7Ra // Duration.nanoseconds(_:) Added: _$ss8DurationV11nanosecondsyABSdFZ + +// var InlineArray._storage +Added: _$ss11InlineArrayVsRi__rlE8_storagexq_BVvM +Added: _$ss11InlineArrayVsRi__rlE8_storagexq_BVvs + diff --git a/test/abi/macOS/x86_64/stdlib.swift b/test/abi/macOS/x86_64/stdlib.swift index ea0cdc82b5da4..4aef353b83dbc 100644 --- a/test/abi/macOS/x86_64/stdlib.swift +++ b/test/abi/macOS/x86_64/stdlib.swift @@ -1123,3 +1123,8 @@ Added: $ld$previous$@rpath/libswiftCompatibilitySpan.dylib$$1$10.14$15.0$_$ss7Ra // Duration.nanoseconds(_:) Added: _$ss8DurationV11nanosecondsyABSdFZ + +// var InlineArray._storage +Added: _$ss11InlineArrayVsRi__rlE8_storagexq_BVvM +Added: _$ss11InlineArrayVsRi__rlE8_storagexq_BVvs +