//===--- SliceBuffer.swift - Backing storage for ArraySlice ------===// // // This source file is part of the Swift.org open source project // // Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors // Licensed under Apache License v2.0 with Runtime Library Exception // // See https://swift.org/LICENSE.txt for license information // See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors // //===----------------------------------------------------------------------===// /// Buffer type for `ArraySlice`. @frozen @usableFromInline internal struct _SliceBuffer : _ArrayBufferProtocol, RandomAccessCollection { internal typealias NativeStorage = _ContiguousArrayStorage @usableFromInline internal typealias NativeBuffer = _ContiguousArrayBuffer /// An object that keeps the elements stored in this buffer alive. @usableFromInline internal var owner: AnyObject @usableFromInline internal let subscriptBaseAddress: UnsafeMutablePointer /// The position of the first element in a non-empty collection. /// /// In an empty collection, `startIndex == endIndex`. @usableFromInline internal var startIndex: Int /// [63:1: 63-bit index][0: has a native buffer] @usableFromInline internal var endIndexAndFlags: UInt @inlinable internal init( owner: AnyObject, subscriptBaseAddress: UnsafeMutablePointer, startIndex: Int, endIndexAndFlags: UInt ) { self.owner = owner self.subscriptBaseAddress = subscriptBaseAddress self.startIndex = startIndex self.endIndexAndFlags = endIndexAndFlags } @inlinable internal init( owner: AnyObject, subscriptBaseAddress: UnsafeMutablePointer, indices: Range, hasNativeBuffer: Bool ) { self.owner = owner self.subscriptBaseAddress = subscriptBaseAddress self.startIndex = indices.lowerBound let bufferFlag = UInt(hasNativeBuffer ? 1 : 0) self.endIndexAndFlags = (UInt(indices.upperBound) << 1) | bufferFlag _invariantCheck() } @inlinable internal init() { let empty = _ContiguousArrayBuffer() self.owner = empty.owner self.subscriptBaseAddress = empty.firstElementAddress self.startIndex = empty.startIndex self.endIndexAndFlags = 1 _invariantCheck() } @inlinable internal init(_buffer buffer: NativeBuffer, shiftedToStartIndex: Int) { let shift = buffer.startIndex - shiftedToStartIndex self.init( owner: buffer.owner, subscriptBaseAddress: buffer.subscriptBaseAddress + shift, indices: shiftedToStartIndex..( _ subrange: Range, with insertCount: Int, elementsOf newValues: __owned C ) where C: Collection, C.Element == Element { _invariantCheck() _internalInvariant(insertCount <= numericCast(newValues.count)) _internalInvariant(_hasNativeBuffer) _internalInvariant(isUniquelyReferenced()) let eraseCount = subrange.count let growth = insertCount - eraseCount let oldCount = count var native = nativeBuffer let hiddenElementCount = firstElementAddress - native.firstElementAddress _internalInvariant(native.count + growth <= native.capacity) let start = subrange.lowerBound - startIndex + hiddenElementCount let end = subrange.upperBound - startIndex + hiddenElementCount native.replaceSubrange( start.. { return subscriptBaseAddress + startIndex } @inlinable internal var firstElementAddressIfContiguous: UnsafeMutablePointer? { return firstElementAddress } //===--- Non-essential bits ---------------------------------------------===// @inlinable internal mutating func requestUniqueMutableBackingBuffer( minimumCapacity: Int ) -> NativeBuffer? { _invariantCheck() // This is a performance optimization that was put in to ensure that at // -Onone, copy of self we make to call _hasNativeBuffer is destroyed before // we call isUniquelyReferenced. Otherwise, isUniquelyReferenced will always // fail causing us to always copy. // // if _fastPath(_hasNativeBuffer && isUniquelyReferenced) { // // SR-6437 let native = _hasNativeBuffer let unique = isUniquelyReferenced() if _fastPath(native && unique) { if capacity >= minimumCapacity { // Since we have the last reference, drop any inaccessible // trailing elements in the underlying storage. That will // tend to reduce shuffling of later elements. Since this // function isn't called for subscripting, this won't slow // down that case. var native = nativeBuffer let offset = self.firstElementAddress - native.firstElementAddress let backingCount = native.count let myCount = count if _slowPath(backingCount > myCount + offset) { native.replaceSubrange( (myCount+offset).. Bool { // This is a performance optimization that ensures that the copy of self // that occurs at -Onone is destroyed before we call // isUniquelyReferenced. This code used to be: // // return _hasNativeBuffer && isUniquelyReferenced() // // SR-6437 if !_hasNativeBuffer { return false } return isUniquelyReferenced() } /// If this buffer is backed by a `_ContiguousArrayBuffer` /// containing the same number of elements as `self`, return it. /// Otherwise, return `nil`. @inlinable internal func requestNativeBuffer() -> _ContiguousArrayBuffer? { _invariantCheck() if _fastPath(_hasNativeBuffer && nativeBuffer.count == count) { return nativeBuffer } return nil } @inlinable @discardableResult internal __consuming func _copyContents( subRange bounds: Range, initializing target: UnsafeMutablePointer ) -> UnsafeMutablePointer { _invariantCheck() _internalInvariant(bounds.lowerBound >= startIndex) _internalInvariant(bounds.upperBound >= bounds.lowerBound) _internalInvariant(bounds.upperBound <= endIndex) let c = bounds.count target.initialize(from: subscriptBaseAddress + bounds.lowerBound, count: c) return target + c } public __consuming func _copyContents( initializing buffer: UnsafeMutableBufferPointer ) -> (Iterator,UnsafeMutableBufferPointer.Index) { // This customization point is not implemented for internal types. // Accidentally calling it would be a catastrophic performance bug. fatalError("unsupported") } /// True, if the array is native and does not need a deferred type check. @inlinable internal var arrayPropertyIsNativeTypeChecked: Bool { return _hasNativeBuffer } @inlinable internal var count: Int { get { return endIndex - startIndex } set { let growth = newValue - count if growth != 0 { nativeBuffer.count += growth self.endIndex += growth } _invariantCheck() } } /// Traps unless the given `index` is valid for subscripting, i.e. /// `startIndex ≤ index < endIndex` @inlinable internal func _checkValidSubscript(_ index: Int) { _precondition( index >= startIndex && index < endIndex, "Index out of bounds") } @inlinable internal var capacity: Int { let count = self.count if _slowPath(!_hasNativeBuffer) { return count } let n = nativeBuffer let nativeEnd = n.firstElementAddress + n.count if (firstElementAddress + count) == nativeEnd { return count + (n.capacity - n.count) } return count } @inlinable internal mutating func isUniquelyReferenced() -> Bool { return isKnownUniquelyReferenced(&owner) } @inlinable internal func getElement(_ i: Int) -> Element { _internalInvariant(i >= startIndex, "slice index is out of range (before startIndex)") _internalInvariant(i < endIndex, "slice index is out of range") return subscriptBaseAddress[i] } /// Access the element at `position`. /// /// - Precondition: `position` is a valid position in `self` and /// `position != endIndex`. @inlinable internal subscript(position: Int) -> Element { get { return getElement(position) } nonmutating set { _internalInvariant(position >= startIndex, "slice index is out of range (before startIndex)") _internalInvariant(position < endIndex, "slice index is out of range") subscriptBaseAddress[position] = newValue } } @inlinable internal subscript(bounds: Range) -> _SliceBuffer { get { _internalInvariant(bounds.lowerBound >= startIndex) _internalInvariant(bounds.upperBound >= bounds.lowerBound) _internalInvariant(bounds.upperBound <= endIndex) return _SliceBuffer( owner: owner, subscriptBaseAddress: subscriptBaseAddress, indices: bounds, hasNativeBuffer: _hasNativeBuffer) } set { fatalError("not implemented") } } //===--- Collection conformance -------------------------------------===// /// The collection's "past the end" position---that is, the position one /// greater than the last valid subscript argument. /// /// `endIndex` is always reachable from `startIndex` by zero or more /// applications of `index(after:)`. @inlinable internal var endIndex: Int { get { return Int(endIndexAndFlags >> 1) } set { endIndexAndFlags = (UInt(newValue) << 1) | (_hasNativeBuffer ? 1 : 0) } } @usableFromInline internal typealias Indices = Range //===--- misc -----------------------------------------------------------===// /// Call `body(p)`, where `p` is an `UnsafeBufferPointer` over the /// underlying contiguous storage. @inlinable internal func withUnsafeBufferPointer( _ body: (UnsafeBufferPointer) throws -> R ) rethrows -> R { defer { _fixLifetime(self) } return try body(UnsafeBufferPointer(start: firstElementAddress, count: count)) } /// Call `body(p)`, where `p` is an `UnsafeMutableBufferPointer` /// over the underlying contiguous storage. @inlinable internal mutating func withUnsafeMutableBufferPointer( _ body: (UnsafeMutableBufferPointer) throws -> R ) rethrows -> R { defer { _fixLifetime(self) } return try body( UnsafeMutableBufferPointer(start: firstElementAddress, count: count)) } @inlinable internal func unsafeCastElements(to type: T.Type) -> _SliceBuffer { _internalInvariant(_isClassOrObjCExistential(T.self)) let baseAddress = UnsafeMutableRawPointer(self.subscriptBaseAddress) .assumingMemoryBound(to: T.self) return _SliceBuffer( owner: self.owner, subscriptBaseAddress: baseAddress, startIndex: self.startIndex, endIndexAndFlags: self.endIndexAndFlags) } } extension _SliceBuffer { @inlinable internal __consuming func _copyToContiguousArray() -> ContiguousArray { if _hasNativeBuffer { let n = nativeBuffer if count == n.count { return ContiguousArray(_buffer: n) } } let result = _ContiguousArrayBuffer( _uninitializedCount: count, minimumCapacity: 0) result.firstElementAddress.initialize( from: firstElementAddress, count: count) return ContiguousArray(_buffer: result) } }