diff --git a/Sources/WebURL/Parser/Parser+Host.swift b/Sources/WebURL/Parser/Parser+Host.swift index d4ed977b6..dc305568d 100644 --- a/Sources/WebURL/Parser/Parser+Host.swift +++ b/Sources/WebURL/Parser/Parser+Host.swift @@ -74,7 +74,7 @@ extension ParsedHost { let result: Optional let needsPercentDecoding = hostname.withContiguousStorageIfAvailable { - $0.boundsChecked.uncheckedFastContains(ASCII.percentSign.codePoint) + $0.boundsChecked.fastContains(ASCII.percentSign.codePoint) } ?? true if needsPercentDecoding { result = ParsedHost._parseSpecialHostname( diff --git a/Sources/WebURL/PercentEncoding.swift b/Sources/WebURL/PercentEncoding.swift index 906dd347d..450cbc901 100644 --- a/Sources/WebURL/PercentEncoding.swift +++ b/Sources/WebURL/PercentEncoding.swift @@ -1506,7 +1506,7 @@ public struct NoSubstitutions: SubstitutionMap { @inlinable @inline(__always) public func _canSkipDecoding(_ source: UnsafeBufferPointer) -> Bool { source.count <= _percentDecodingFastPathThreshold - && !source.boundsChecked.uncheckedFastContains(ASCII.percentSign.codePoint) + && !source.boundsChecked.fastContains(ASCII.percentSign.codePoint) } } @@ -2130,8 +2130,8 @@ extension URLEncodeSet { @inlinable @inline(__always) public func _canSkipDecoding(_ source: UnsafeBufferPointer) -> Bool { source.count <= _percentDecodingFastPathThreshold - && !(source.boundsChecked.uncheckedFastContains(ASCII.percentSign.codePoint) - || source.boundsChecked.uncheckedFastContains(ASCII.plus.codePoint)) + && !(source.boundsChecked.fastContains(ASCII.percentSign.codePoint) + || source.boundsChecked.fastContains(ASCII.plus.codePoint)) } } diff --git a/Sources/WebURL/Util/ASCII+LazyTextTransformations.swift b/Sources/WebURL/Util/ASCII+LazyTextTransformations.swift index df073fde2..9427c822f 100644 --- a/Sources/WebURL/Util/ASCII+LazyTextTransformations.swift +++ b/Sources/WebURL/Util/ASCII+LazyTextTransformations.swift @@ -37,7 +37,7 @@ extension ASCII { if !trimmedSlice.isEmpty { let hasInternalNewlinesOrTabs = trimmedSlice.withContiguousStorageIfAvailable { - $0.boundsChecked.uncheckedFastContainsTabOrCROrLF() + $0.boundsChecked.fastContainsTabOrCROrLF() } ?? trimmedSlice.contains(where: { isNewlineOrTab($0) }) if hasInternalNewlinesOrTabs { return .left(ASCII.NewlineAndTabFiltered(unchecked: trimmedSlice)) diff --git a/Sources/WebURL/Util/BitTwiddling.swift b/Sources/WebURL/Util/BitTwiddling.swift index 55676ce3a..c92ace67c 100644 --- a/Sources/WebURL/Util/BitTwiddling.swift +++ b/Sources/WebURL/Util/BitTwiddling.swift @@ -18,17 +18,7 @@ extension UInt64 { /// @inlinable @inline(__always) internal init(repeatingByte byte: UInt8) { - self = 0 - withUnsafeMutableBytes(of: &self) { - $0[0] = byte - $0[1] = byte - $0[2] = byte - $0[3] = byte - $0[4] = byte - $0[5] = byte - $0[6] = byte - $0[7] = byte - } + self = 0x01010101_01010101 &* UInt64(byte) } } @@ -36,31 +26,26 @@ extension UnsafeBoundsCheckedBufferPointer where Element == UInt8 { /// Whether or not the buffer contains the given byte. /// - /// This implementation is able to search chunks of 8 bytes at a time, using only 5 instructions per chunk. - /// - /// > Important: - /// > This function is **not** bounds-checked (since 8-byte chunks are loaded directly from the `baseAddress`, - /// > rather than via the Collection interface), although of course it only reads data within the buffer's bounds. - /// > The reason it lives on `UnsafeBoundsCheckedBufferPointer` is because unsigned indexes allow for - /// > better performance and code-size. - /// - @inlinable @inline(__always) // mask must be constant-folded. - internal func uncheckedFastContains(_ element: UInt8) -> Bool { - let mask = UInt64(repeatingByte: element) - return _uncheckedFastContains(element: element, mask: mask) - } - @inlinable - internal func _uncheckedFastContains(element: UInt8, mask: UInt64) -> Bool { + internal func fastContains(_ element: UInt8) -> Bool { + var i = startIndex - while distance(from: i, to: endIndex) >= 8 { + + // - UnsafeBoundsCheckedBufferPointer does not enforce that its startIndex is in-bounds + // by construction; it only checks indexes which are actually read from. + // We need to check it here since we'll be reading using 'loadUnaligned'. + // + // - Since our index type is UInt, 'i <= endIndex' and 'endIndex <= Int.max' SHOULD be enough + // for the compiler to know that (i + 8) cannot overflow. Unfortunately it doesn't, + // so the precondition is only for the benefit of humans. https://github.com/apple/swift/issues/71919 + precondition(i <= endIndex && endIndex <= Int.max) + + while i &+ 8 <= endIndex { // Load 8 bytes from the source. - var eightBytes = UnsafeRawPointer( - self.baseAddress.unsafelyUnwrapped.advanced(by: Int(bitPattern: i)) - ).loadUnaligned(as: UInt64.self) + var eightBytes = self.loadUnaligned_unchecked(fromByteOffset: i, as: UInt64.self) // XOR every byte with the element we're searching for. // If there are any matches, we'll get a zero byte in that position. - eightBytes ^= mask + eightBytes ^= UInt64(repeatingByte: element) // Use bit-twiddling to detect if any bytes were zero. // https://graphics.stanford.edu/~seander/bithacks.html#ValueInWord let found = (eightBytes &- 0x0101_0101_0101_0101) & (~eightBytes & 0x8080_8080_8080_8080) @@ -80,22 +65,23 @@ extension UnsafeBoundsCheckedBufferPointer where Element == UInt8 { /// Whether or not the buffer contains an ASCII horizontal tab (0x09), line feed (0x0A), /// or carriage return (0x0D) code-unit. /// - /// This implementation is able to search chunks of 8 bytes at a time, using only 5 instructions per chunk. - /// - /// > Important: - /// > This function is **not** bounds-checked (since 8-byte chunks are loaded directly from the `baseAddress`, - /// > rather than via the Collection interface), although of course it only reads data within the buffer's bounds. - /// > The reason it lives on `UnsafeBoundsCheckedBufferPointer` is because unsigned indexes allow for - /// > better performance and code-size. - /// @inlinable - internal func uncheckedFastContainsTabOrCROrLF() -> Bool { + internal func fastContainsTabOrCROrLF() -> Bool { + var i = startIndex - while distance(from: i, to: endIndex) >= 8 { + + // - UnsafeBoundsCheckedBufferPointer does not enforce that its startIndex is in-bounds + // by construction; it only checks indexes which are actually read from. + // We need to check it here since we'll be reading using 'loadUnaligned'. + // + // - Since our index type is UInt, 'i <= endIndex' and 'endIndex <= Int.max' SHOULD be enough + // for the compiler to know that (i + 8) cannot overflow. Unfortunately it doesn't, + // so the precondition is only for the benefit of humans. https://github.com/apple/swift/issues/71919 + precondition(i <= endIndex && endIndex <= Int.max) + + while i &+ 8 <= endIndex { // Load 8 bytes from the source. - let eightBytes = UnsafeRawPointer( - self.baseAddress.unsafelyUnwrapped.advanced(by: Int(bitPattern: i)) - ).loadUnaligned(as: UInt64.self) + var eightBytes = self.loadUnaligned_unchecked(fromByteOffset: i, as: UInt64.self) // Check for line feeds first; we're more likely to find one than a tab or carriage return. var bytesForLF = eightBytes @@ -105,10 +91,9 @@ extension UnsafeBoundsCheckedBufferPointer where Element == UInt8 { // Check for tabs (0x09, 0b0000_1001) and carriage returns (0x0D, 0b0000_1101). // These differ by one bit, so mask it out (turns carriage returns in to tabs), then look for tabs. - var bytesForTCR = eightBytes - bytesForTCR &= UInt64(repeatingByte: 0b1111_1011) - bytesForTCR ^= UInt64(repeatingByte: 0b0000_1001) - found = (bytesForTCR &- 0x0101_0101_0101_0101) & (~bytesForTCR & 0x8080_8080_8080_8080) + eightBytes &= UInt64(repeatingByte: 0b1111_1011) + eightBytes ^= UInt64(repeatingByte: 0b0000_1001) + found = (eightBytes &- 0x0101_0101_0101_0101) & (~eightBytes & 0x8080_8080_8080_8080) if found != 0 { return true } i &+= 8 diff --git a/Sources/WebURL/Util/Pointers.swift b/Sources/WebURL/Util/Pointers.swift index 81fb83dea..f3742dda7 100644 --- a/Sources/WebURL/Util/Pointers.swift +++ b/Sources/WebURL/Util/Pointers.swift @@ -18,21 +18,93 @@ // -------------------------------------------- -extension UnsafeRawPointer { +#if swift(<5.9) + extension UnsafeRawPointer { + + /// Returns a new instance of the given type, constructed from the raw memory at the specified offset. + /// + /// The memory at this pointer plus offset must be initialized to `T` or another type + /// that is layout compatible with `T`. It does not need to be aligned for access to `T`. + /// + @inlinable @inline(__always) + internal func loadUnaligned(fromByteOffset offset: Int = 0, as: T.Type) -> T where T: FixedWidthInteger { + assert(_isPOD(T.self)) + var val: T = 0 + withUnsafeMutableBytes(of: &val) { + $0.copyMemory(from: UnsafeRawBufferPointer(start: self, count: T.bitWidth / 8)) + } + return val + } + } +#endif + +extension UnsafeBoundsCheckedBufferPointer where Element == UInt8 { + + /// Returns a new instance of the given type, constructed from the raw memory at the specified offset. + /// + /// The memory at this pointer plus offset must be initialized to `T` or another type + /// that is layout compatible with `T`. It does not need to be aligned for access to `T`. + /// + @inlinable @inline(__always) + internal func loadUnaligned( + fromByteOffset offset: UInt = 0, + as: T.Type + ) -> T where T: FixedWidthInteger { + + // As far as memory safety is concerned, we only need to check the final byte against endIndex. + precondition(_isPOD(T.self)) + precondition(offset + UInt(MemoryLayout.size) <= endIndex) + return loadUnaligned_unchecked(fromByteOffset: offset, as: T.self) + } /// Returns a new instance of the given type, constructed from the raw memory at the specified offset. /// - /// The memory at this pointer plus offset must be initialized to `T` or another type that is layout compatible with `T`. - /// It does not need to be aligned for access to `T`. + /// The memory at this pointer plus offset must be initialized to `T` or another type + /// that is layout compatible with `T`. It does not need to be aligned for access to `T`. + /// + /// > Important: + /// > + /// > This function does not bounds-check the load operation in release builds. + /// > Callers of this function must perform their own reasoning about bounds-checking + /// > to ensure than an out-of-bounds read never occurs. + /// > + /// > If you need to perform a single unaligned load, + /// > use the ``loadUnaligned`` function instead (without the `_unchecked` suffix), + /// > as it includes bounds-checking. /// @inlinable @inline(__always) - internal func loadUnaligned(fromByteOffset offset: Int = 0, as: T.Type) -> T where T: FixedWidthInteger { + internal func loadUnaligned_unchecked( + fromByteOffset offset: UInt = 0, as: T.Type + ) -> T where T: FixedWidthInteger { + assert(_isPOD(T.self)) - var val: T = 0 - withUnsafeMutableBytes(of: &val) { - $0.copyMemory(from: UnsafeRawBufferPointer(start: self, count: T.bitWidth / 8)) + assert(offset >= startIndex) + assert(offset + UInt(MemoryLayout.size) <= endIndex) + + // Given that we may assume the load is in-bounds, + // by ruling out zero-sized loads we may infer that `self.count > 0`, + // and hence that `self.baseAddress != nil`. + // + // But this way of handling nil baseAddresses + // is more likely to be constant-folded. + guard MemoryLayout.size > 0 else { + return unsafeBitCast((), to: T.self) } - return val + + #if swift(>=5.9) + return UnsafeRawPointer(self.baseAddress.unsafelyUnwrapped) + .loadUnaligned(fromByteOffset: Int(bitPattern: offset), as: T.self) + #else + var val: T = 0 + withUnsafeMutableBytes(of: &val) { dest in + dest.copyMemory( + from: UnsafeRawBufferPointer( + start: self.baseAddress.unsafelyUnwrapped + Int(bitPattern: offset), + count: MemoryLayout.size + )) + } + return val + #endif } } @@ -464,12 +536,23 @@ extension UnsafeBoundsCheckedBufferPointer: RandomAccessCollection { } @inlinable - internal func index(_ i: UInt, offsetBy n: Int, limitedBy limit: UInt) -> UInt? { - let l = distance(from: i, to: limit) - if n > 0 ? l >= 0 && l < n : l <= 0 && n < l { - return nil + internal func index(_ i: UInt, offsetBy distance: Int, limitedBy limit: UInt) -> UInt? { + // Note that we are taking some liberties here: + // If (i, distance, limit) are not in order, Collection requires the limit to have no effect. + // We return 'nil' instead. + // Details at: https://forums.swift.org/t/allow-index-limitedby-to-return-nil-if-limit-is-invalid/70578 + + if distance >= 0 { + // All valid 'i' are <= Int.max, so this will not overflow. + // An invalid 'i' is allowed to return a nonsense result. + let j = i &+ UInt(distance) + return j <= limit ? j : nil + + } else { + // All valid 'i' are >= 0 and <= Int.max, so this will not underflow. + let j = Int(bitPattern: i) &+ distance + return j >= limit ? UInt(bitPattern: j) : nil } - return UInt(bitPattern: Int(bitPattern: i) &+ n) } @inlinable diff --git a/Sources/WebURL/WebURL+Scheme.swift b/Sources/WebURL/WebURL+Scheme.swift index 4a6053dca..909648229 100644 --- a/Sources/WebURL/WebURL+Scheme.swift +++ b/Sources/WebURL/WebURL+Scheme.swift @@ -54,10 +54,10 @@ extension WebURL.SchemeKind { @inlinable internal init(parsing schemeContent: UTF8Bytes) where UTF8Bytes: Sequence, UTF8Bytes.Element == UInt8 { - if let contiguouslyParsed = schemeContent.withContiguousStorageIfAvailable({ buffer -> Self in - guard let count = UInt8(exactly: buffer.count), count > 0 else { return .other } - return WebURL.SchemeKind(ptr: UnsafeRawPointer(buffer.baseAddress.unsafelyUnwrapped), count: count) - }) { + func parseContiguous(_ buffer: UnsafeBufferPointer) -> Self { + buffer.baseAddress.map { WebURL.SchemeKind(ptr: UnsafeRawPointer($0), count: buffer.count) } ?? .other + } + if let contiguouslyParsed = schemeContent.withContiguousStorageIfAvailable(parseContiguous) { self = contiguouslyParsed return } @@ -93,7 +93,7 @@ extension WebURL.SchemeKind { // Note: 'count' is a separate parameter because UnsafeRawBufferPointer.count includes a force-unwrap, // which can have a significant performance impact: https://bugs.swift.org/browse/SR-14422 @inlinable - internal init(ptr: UnsafeRawPointer, count: UInt8) { + internal init(ptr: UnsafeRawPointer, count: Int) { // Setting the 6th bit of each byte (i.e. OR-ing with 00100000) normalizes the code-unit to lowercase ASCII. switch count { case 2: diff --git a/Sources/WebURLFoundationExtras/Util/EvenFasterCollectionAlgorithms.swift b/Sources/WebURLFoundationExtras/Util/EvenFasterCollectionAlgorithms.swift index c5177f74a..49eb791df 100644 --- a/Sources/WebURLFoundationExtras/Util/EvenFasterCollectionAlgorithms.swift +++ b/Sources/WebURLFoundationExtras/Util/EvenFasterCollectionAlgorithms.swift @@ -76,40 +76,38 @@ internal func fastEquals(_ lhs: IPv6Address, _ rhs: IPv6Address) -> Bool { // -------------------------------------------- // MARK: - FastContains // -------------------------------------------- -// From WebURL/Util/Pointers.swift, WebURL/Util/BitTwiddling.swift +// Copied from WebURL/Util/Pointers.swift, WebURL/Util/BitTwiddling.swift extension Sequence where Element == UInt8 { /// Whether or not this sequence contains the given byte. /// - /// If the sequence has contiguous storage, this optimizes to a fast, chunked search. - /// @inlinable @inline(__always) internal func fastContains(_ element: Element) -> Bool { - // Hoist mask calculation out of wCSIA to ensure it is constant-folded, even if wCSIA isn't inlined. - let mask = UInt64(repeatingByte: element) - return withContiguousStorageIfAvailable { $0.__fastContains(element: element, mask: mask) } ?? contains(element) + withContiguousStorageIfAvailable { $0.__fastContains(element) } ?? contains(element) } } -extension UnsafeRawPointer { - - /// Returns a new instance of the given type, constructed from the raw memory at the specified offset. - /// - /// The memory at this pointer plus offset must be initialized to `T` or another type - /// that is layout compatible with `T`. It does not need to be aligned for access to `T`. - /// - @inlinable @inline(__always) - internal func loadUnaligned(fromByteOffset offset: Int = 0, as: T.Type) -> T where T: FixedWidthInteger { - assert(_isPOD(T.self)) - var val: T = 0 - withUnsafeMutableBytes(of: &val) { - $0.copyMemory(from: UnsafeRawBufferPointer(start: self, count: T.bitWidth / 8)) +#if swift(<5.9) + extension UnsafeRawPointer { + + /// Returns a new instance of the given type, constructed from the raw memory at the specified offset. + /// + /// The memory at this pointer plus offset must be initialized to `T` or another type + /// that is layout compatible with `T`. It does not need to be aligned for access to `T`. + /// + @inlinable @inline(__always) + internal func loadUnaligned(fromByteOffset offset: Int = 0, as: T.Type) -> T where T: FixedWidthInteger { + assert(_isPOD(T.self)) + var val: T = 0 + withUnsafeMutableBytes(of: &val) { + $0.copyMemory(from: UnsafeRawBufferPointer(start: self, count: T.bitWidth / 8)) + } + return val } - return val } -} +#endif extension UInt64 { @@ -117,38 +115,35 @@ extension UInt64 { /// @inlinable @inline(__always) internal init(repeatingByte byte: UInt8) { - self = 0 - withUnsafeMutableBytes(of: &self) { - $0[0] = byte - $0[1] = byte - $0[2] = byte - $0[3] = byte - $0[4] = byte - $0[5] = byte - $0[6] = byte - $0[7] = byte - } + self = 0x01010101_01010101 &* UInt64(byte) } } extension UnsafeBufferPointer where Element == UInt8 { - /// Whether or not the buffer contains the given byte. - /// - /// This implementation compares chunks of 8 bytes at a time, - /// using only 4 instructions per chunk of 8 bytes. - /// + // Copied from UnsafeBoundsCheckedBufferPointer. @inlinable - internal func __fastContains(element: UInt8, mask: UInt64) -> Bool { + internal func __fastContains(_ element: UInt8) -> Bool { + var i = startIndex - while distance(from: i, to: endIndex) >= 8 { + + // - UnsafeBoundsCheckedBufferPointer does not enforce that its startIndex is in-bounds + // by construction; it only checks indexes which are actually read from. + // We need to check it here since we'll be reading using 'loadUnaligned'. + // + // - Since our index type is UInt, 'i <= endIndex' and 'endIndex <= Int.max' SHOULD be enough + // for the compiler to know that (i + 8) cannot overflow. Unfortunately it doesn't, + // so the precondition is only for the benefit of humans. https://github.com/apple/swift/issues/71919 + precondition(i <= endIndex && endIndex <= Int.max) + + while i &+ 8 <= endIndex { // Load 8 bytes from the source. var eightBytes = UnsafeRawPointer( - self.baseAddress.unsafelyUnwrapped.advanced(by: i) - ).loadUnaligned(as: UInt64.self) + self.baseAddress.unsafelyUnwrapped + ).loadUnaligned(fromByteOffset: i, as: UInt64.self) // XOR every byte with the element we're searching for. // If there are any matches, we'll get a zero byte in that position. - eightBytes ^= mask + eightBytes ^= UInt64(repeatingByte: element) // Use bit-twiddling to detect if any bytes were zero. // https://graphics.stanford.edu/~seander/bithacks.html#ValueInWord let found = (eightBytes &- 0x0101_0101_0101_0101) & (~eightBytes & 0x8080_8080_8080_8080) diff --git a/Sources/WebURLTestSupport/IPAddressUtils.swift b/Sources/WebURLTestSupport/IPAddressUtils.swift index 84df4ee16..dfe32d231 100644 --- a/Sources/WebURLTestSupport/IPAddressUtils.swift +++ b/Sources/WebURLTestSupport/IPAddressUtils.swift @@ -334,20 +334,22 @@ extension IPv6Address.Utils { // -------------------------------------------- -extension UnsafeRawPointer { - - /// Returns a new instance of the given type, constructed from the raw memory at the specified offset. - /// - /// The memory at this pointer plus offset must be initialized to `T` or another type that is layout compatible with `T`. - /// It does not need to be aligned for access to `T`. - /// - @inlinable @inline(__always) - internal func loadUnaligned(fromByteOffset offset: Int = 0, as: T.Type) -> T where T: FixedWidthInteger { - assert(_isPOD(T.self)) - var val: T = 0 - withUnsafeMutableBytes(of: &val) { - $0.copyMemory(from: UnsafeRawBufferPointer(start: self + offset, count: MemoryLayout.stride)) +#if swift(<5.9) + extension UnsafeRawPointer { + + /// Returns a new instance of the given type, constructed from the raw memory at the specified offset. + /// + /// The memory at this pointer plus offset must be initialized to `T` or another type + /// that is layout compatible with `T`. It does not need to be aligned for access to `T`. + /// + @inlinable @inline(__always) + internal func loadUnaligned(fromByteOffset offset: Int = 0, as: T.Type) -> T where T: FixedWidthInteger { + assert(_isPOD(T.self)) + var val: T = 0 + withUnsafeMutableBytes(of: &val) { + $0.copyMemory(from: UnsafeRawBufferPointer(start: self, count: T.bitWidth / 8)) + } + return val } - return val } -} +#endif diff --git a/Tests/UnicodeDataStructuresTests/SegmentedLineTests.swift b/Tests/UnicodeDataStructuresTests/SegmentedLineTests.swift index f80eab808..c6718500a 100644 --- a/Tests/UnicodeDataStructuresTests/SegmentedLineTests.swift +++ b/Tests/UnicodeDataStructuresTests/SegmentedLineTests.swift @@ -190,7 +190,7 @@ extension SegmentedLineTests { var line = SegmentedLine(bounds: 0..<50, value: 42) line.set(10..<20, to: 99) line.set(30..<50, to: 1024) - + // swift-format-ignore XCTAssertSegments(line, [ (0..<10, 42), diff --git a/Tests/WebURLTests/OtherUtilitiesTests.swift b/Tests/WebURLTests/OtherUtilitiesTests.swift index ee856f9c7..353acdf0a 100644 --- a/Tests/WebURLTests/OtherUtilitiesTests.swift +++ b/Tests/WebURLTests/OtherUtilitiesTests.swift @@ -202,6 +202,61 @@ extension OtherUtilitiesTests { XCTAssertEqualElements($0.boundsChecked.suffix(20), [1, 2, 3, 4]) } } + + // index(_:offsetBy:limitedBy:) + do { + [1, 2, 3, 4, 5].withUnsafeBufferPointer { + let buffer = $0.boundsChecked + + // Subtracting from limit. + XCTAssertEqual(buffer.index(0, offsetBy: -1, limitedBy: 0), nil) + XCTAssertEqual(buffer.index(0, offsetBy: Int.min, limitedBy: 0), nil) + + // Incrementing from limit. + XCTAssertEqual(buffer.index(4, offsetBy: 1, limitedBy: 4), nil) + XCTAssertEqual(buffer.index(4, offsetBy: Int.max, limitedBy: 4), nil) + + XCTAssertEqual(buffer.index(2, offsetBy: -3, limitedBy: 0), nil) + XCTAssertEqual(buffer.index(2, offsetBy: -2, limitedBy: 0), 0) + XCTAssertEqual(buffer.index(2, offsetBy: -1, limitedBy: 0), 1) + XCTAssertEqual(buffer.index(2, offsetBy: 0, limitedBy: 4), 2) + XCTAssertEqual(buffer.index(2, offsetBy: 1, limitedBy: 4), 3) + XCTAssertEqual(buffer.index(2, offsetBy: 2, limitedBy: 4), 4) + XCTAssertEqual(buffer.index(2, offsetBy: 3, limitedBy: 4), nil) + + // If subtracting, and 'limit > i' (or if incrementing, and 'limit < i'), + // Collection says the limit should have no effect. + // UnsafeBoundsCheckedBufferPointer intentionally deviates from that and returns nil instead. + // I'm quite sure nobody depends on Collection's behaviour (at least, none of the code which uses UBCBP does). + // See: https://forums.swift.org/t/allow-index-limitedby-to-return-nil-if-limit-is-invalid/70578 + XCTAssertEqual(buffer.index(2, offsetBy: -3, limitedBy: 3), nil) + XCTAssertEqual(buffer.index(2, offsetBy: -2, limitedBy: 3), nil) + XCTAssertEqual(buffer.index(2, offsetBy: -1, limitedBy: 3), nil) + XCTAssertEqual(buffer.index(2, offsetBy: 1, limitedBy: 1), nil) + XCTAssertEqual(buffer.index(2, offsetBy: 2, limitedBy: 1), nil) + XCTAssertEqual(buffer.index(2, offsetBy: 3, limitedBy: 1), nil) + + // Extremes. + // [start = Int.max, offset = Int.max] + XCTAssertEqual(buffer.index(UInt(Int.max), offsetBy: Int.max, limitedBy: UInt.max), UInt.max - 1) + XCTAssertEqual(buffer.index(UInt(Int.max), offsetBy: Int.max, limitedBy: UInt.min), nil) + // [start = Int.max, offset = -Int.max] + XCTAssertEqual(buffer.index(UInt(Int.max), offsetBy: -Int.max, limitedBy: UInt.max), nil) + XCTAssertEqual(buffer.index(UInt(Int.max), offsetBy: -Int.max, limitedBy: UInt.min), 0) + // [start = Int.max, offset = Int.min] + XCTAssertEqual(buffer.index(UInt(Int.max), offsetBy: Int.min, limitedBy: UInt.max), nil) + XCTAssertEqual(buffer.index(UInt(Int.max), offsetBy: Int.min, limitedBy: UInt.min), nil) + // [start = 0, offset = Int.max] + XCTAssertEqual(buffer.index(0, offsetBy: Int.max, limitedBy: UInt.max), UInt(Int.max)) + XCTAssertEqual(buffer.index(0, offsetBy: Int.max, limitedBy: UInt.min), nil) + // [start = 0, offset = -Int.max] + XCTAssertEqual(buffer.index(0, offsetBy: -Int.max, limitedBy: UInt.max), nil) + XCTAssertEqual(buffer.index(0, offsetBy: -Int.max, limitedBy: UInt.min), nil) + // [start = 0, offset = Int.min] + XCTAssertEqual(buffer.index(0, offsetBy: Int.min, limitedBy: UInt.max), nil) + XCTAssertEqual(buffer.index(0, offsetBy: Int.min, limitedBy: UInt.min), nil) + } + } } } @@ -210,7 +265,7 @@ extension OtherUtilitiesTests { func testFastContains() { func check(_ str: String, for byte: UInt8, expected: Bool) { var copy = str - let fastResult = copy.withUTF8 { $0.boundsChecked.uncheckedFastContains(byte) } + let fastResult = copy.withUTF8 { $0.boundsChecked.fastContains(byte) } let slowResult = copy.withUTF8 { $0.contains(byte) } XCTAssertEqual(fastResult, slowResult) XCTAssertEqual(fastResult, expected)