//===--- SliceBuffer.swift - Backing storage for ArraySlice ------===// // // This source file is part of the Swift.org open source project // // Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors // Licensed under Apache License v2.0 with Runtime Library Exception // // See https://swift.org/LICENSE.txt for license information // See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors // //===----------------------------------------------------------------------===// /// Buffer type for `ArraySlice`. @_fixed_layout @usableFromInline internal struct _SliceBuffer : _ArrayBufferProtocol, RandomAccessCollection { internal typealias NativeStorage = _ContiguousArrayStorage internal typealias NativeBuffer = _ContiguousArrayBuffer @inlinable internal init( owner: AnyObject, subscriptBaseAddress: UnsafeMutablePointer, indices: Range, hasNativeBuffer: Bool ) { self.owner = owner self.subscriptBaseAddress = subscriptBaseAddress self.startIndex = indices.lowerBound let bufferFlag = UInt(hasNativeBuffer ? 1 : 0) self.endIndexAndFlags = (UInt(indices.upperBound) << 1) | bufferFlag _invariantCheck() } @inlinable internal init() { let empty = _ContiguousArrayBuffer() self.owner = empty.owner self.subscriptBaseAddress = empty.firstElementAddress self.startIndex = empty.startIndex self.endIndexAndFlags = 1 _invariantCheck() } @inlinable internal init(_buffer buffer: NativeBuffer, shiftedToStartIndex: Int) { let shift = buffer.startIndex - shiftedToStartIndex self.init( owner: buffer.owner, subscriptBaseAddress: buffer.subscriptBaseAddress + shift, indices: shiftedToStartIndex..( _ subrange: Range, with insertCount: Int, elementsOf newValues: C ) where C : Collection, C.Element == Element { _invariantCheck() _sanityCheck(insertCount <= numericCast(newValues.count)) _sanityCheck(_hasNativeBuffer) _sanityCheck(isUniquelyReferenced()) let eraseCount = subrange.count let growth = insertCount - eraseCount let oldCount = count var native = nativeBuffer let hiddenElementCount = firstElementAddress - native.firstElementAddress _sanityCheck(native.count + growth <= native.capacity) let start = subrange.lowerBound - startIndex + hiddenElementCount let end = subrange.upperBound - startIndex + hiddenElementCount native.replaceSubrange( start.. @inlinable // FIXME(sil-serialize-all) internal var firstElementAddress: UnsafeMutablePointer { return subscriptBaseAddress + startIndex } @inlinable // FIXME(sil-serialize-all) internal var firstElementAddressIfContiguous: UnsafeMutablePointer? { return firstElementAddress } /// [63:1: 63-bit index][0: has a native buffer] @usableFromInline internal var endIndexAndFlags: UInt //===--- Non-essential bits ---------------------------------------------===// @inlinable // FIXME(sil-serialize-all) internal mutating func requestUniqueMutableBackingBuffer( minimumCapacity: Int ) -> NativeBuffer? { _invariantCheck() // This is a performance optimization that was put in to ensure that at // -Onone, copy of self we make to call _hasNativeBuffer is destroyed before // we call isUniquelyReferenced. Otherwise, isUniquelyReferenced will always // fail causing us to always copy. // // if _fastPath(_hasNativeBuffer && isUniquelyReferenced) { // // SR-6437 let native = _hasNativeBuffer let unique = isUniquelyReferenced() if _fastPath(native && unique) { if capacity >= minimumCapacity { // Since we have the last reference, drop any inaccessible // trailing elements in the underlying storage. That will // tend to reduce shuffling of later elements. Since this // function isn't called for subscripting, this won't slow // down that case. var native = nativeBuffer let offset = self.firstElementAddress - native.firstElementAddress let backingCount = native.count let myCount = count if _slowPath(backingCount > myCount + offset) { native.replaceSubrange( (myCount+offset).. Bool { // This is a performance optimization that ensures that the copy of self // that occurs at -Onone is destroyed before we call // isUniquelyReferencedOrPinned. This code used to be: // // return _hasNativeBuffer && isUniquelyReferenced() // // SR-6437 if !_hasNativeBuffer { return false } return isUniquelyReferenced() } @inlinable internal mutating func isMutableAndUniquelyReferencedOrPinned() -> Bool { // This is a performance optimization that ensures that the copy of self // that occurs at -Onone is destroyed before we call // isUniquelyReferencedOrPinned. This code used to be: // // return _hasNativeBuffer && isUniquelyReferencedOrPinned() // // SR-6437 if !_hasNativeBuffer { return false } return isUniquelyReferencedOrPinned() } /// If this buffer is backed by a `_ContiguousArrayBuffer` /// containing the same number of elements as `self`, return it. /// Otherwise, return `nil`. @inlinable // FIXME(sil-serialize-all) internal func requestNativeBuffer() -> _ContiguousArrayBuffer? { _invariantCheck() if _fastPath(_hasNativeBuffer && nativeBuffer.count == count) { return nativeBuffer } return nil } @inlinable // FIXME(sil-serialize-all) @discardableResult internal func _copyContents( subRange bounds: Range, initializing target: UnsafeMutablePointer ) -> UnsafeMutablePointer { _invariantCheck() _sanityCheck(bounds.lowerBound >= startIndex) _sanityCheck(bounds.upperBound >= bounds.lowerBound) _sanityCheck(bounds.upperBound <= endIndex) let c = bounds.count target.initialize(from: subscriptBaseAddress + bounds.lowerBound, count: c) return target + c } /// True, if the array is native and does not need a deferred type check. @inlinable // FIXME(sil-serialize-all) internal var arrayPropertyIsNativeTypeChecked: Bool { return _hasNativeBuffer } @inlinable // FIXME(sil-serialize-all) internal var count: Int { get { return endIndex - startIndex } set { let growth = newValue - count if growth != 0 { nativeBuffer.count += growth self.endIndex += growth } _invariantCheck() } } /// Traps unless the given `index` is valid for subscripting, i.e. /// `startIndex ≤ index < endIndex` @inlinable // FIXME(sil-serialize-all) internal func _checkValidSubscript(_ index : Int) { _precondition( index >= startIndex && index < endIndex, "Index out of bounds") } @inlinable // FIXME(sil-serialize-all) internal var capacity: Int { let count = self.count if _slowPath(!_hasNativeBuffer) { return count } let n = nativeBuffer let nativeEnd = n.firstElementAddress + n.count if (firstElementAddress + count) == nativeEnd { return count + (n.capacity - n.count) } return count } @inlinable // FIXME(sil-serialize-all) internal mutating func isUniquelyReferenced() -> Bool { return isKnownUniquelyReferenced(&owner) } @inlinable // FIXME(sil-serialize-all) internal mutating func isUniquelyReferencedOrPinned() -> Bool { return _isKnownUniquelyReferencedOrPinned(&owner) } @inlinable // FIXME(sil-serialize-all) internal func getElement(_ i: Int) -> Element { _sanityCheck(i >= startIndex, "slice index is out of range (before startIndex)") _sanityCheck(i < endIndex, "slice index is out of range") return subscriptBaseAddress[i] } /// Access the element at `position`. /// /// - Precondition: `position` is a valid position in `self` and /// `position != endIndex`. @inlinable // FIXME(sil-serialize-all) internal subscript(position: Int) -> Element { get { return getElement(position) } nonmutating set { _sanityCheck(position >= startIndex, "slice index is out of range (before startIndex)") _sanityCheck(position < endIndex, "slice index is out of range") subscriptBaseAddress[position] = newValue } } @inlinable // FIXME(sil-serialize-all) internal subscript(bounds: Range) -> _SliceBuffer { get { _sanityCheck(bounds.lowerBound >= startIndex) _sanityCheck(bounds.upperBound >= bounds.lowerBound) _sanityCheck(bounds.upperBound <= endIndex) return _SliceBuffer( owner: owner, subscriptBaseAddress: subscriptBaseAddress, indices: bounds, hasNativeBuffer: _hasNativeBuffer) } set { fatalError("not implemented") } } //===--- Collection conformance -------------------------------------===// /// The position of the first element in a non-empty collection. /// /// In an empty collection, `startIndex == endIndex`. @usableFromInline internal var startIndex: Int /// The collection's "past the end" position---that is, the position one /// greater than the last valid subscript argument. /// /// `endIndex` is always reachable from `startIndex` by zero or more /// applications of `index(after:)`. @inlinable // FIXME(sil-serialize-all) internal var endIndex: Int { get { return Int(endIndexAndFlags >> 1) } set { endIndexAndFlags = (UInt(newValue) << 1) | (_hasNativeBuffer ? 1 : 0) } } internal typealias Indices = Range //===--- misc -----------------------------------------------------------===// /// Call `body(p)`, where `p` is an `UnsafeBufferPointer` over the /// underlying contiguous storage. @inlinable internal func withUnsafeBufferPointer( _ body: (UnsafeBufferPointer) throws -> R ) rethrows -> R { defer { _fixLifetime(self) } return try body(UnsafeBufferPointer(start: firstElementAddress, count: count)) } /// Call `body(p)`, where `p` is an `UnsafeMutableBufferPointer` /// over the underlying contiguous storage. @inlinable internal mutating func withUnsafeMutableBufferPointer( _ body: (UnsafeMutableBufferPointer) throws -> R ) rethrows -> R { defer { _fixLifetime(self) } return try body( UnsafeMutableBufferPointer(start: firstElementAddress, count: count)) } } extension _SliceBuffer { @inlinable // FIXME(sil-serialize-all) internal func _copyToContiguousArray() -> ContiguousArray { if _hasNativeBuffer { let n = nativeBuffer if count == n.count { return ContiguousArray(_buffer: n) } } let result = _ContiguousArrayBuffer( _uninitializedCount: count, minimumCapacity: 0) result.firstElementAddress.initialize( from: firstElementAddress, count: count) return ContiguousArray(_buffer: result) } }