Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions Sources/Cache/Cache/PersistableCache.swift
Original file line number Diff line number Diff line change
Expand Up @@ -151,11 +151,12 @@ public class PersistableCache<
*/
public func save() throws {
lock.lock()
defer { lock.unlock() }

let persistedValues = allValues.compactMapValues(persistedValueMap)
let json = JSON<Key>(initialValues: persistedValues)
let data = try json.data()
try data.write(to: url.fileURL(withName: name))
lock.unlock()
}

/**
Expand All @@ -165,8 +166,9 @@ public class PersistableCache<
*/
public func delete() throws {
lock.lock()
defer { lock.unlock() }

try FileManager.default.removeItem(at: url.fileURL(withName: name))
lock.unlock()
}
}

Expand Down
125 changes: 125 additions & 0 deletions Tests/CacheTests/BugExposingTests.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
#if !os(Linux) && !os(Windows)
import XCTest
@testable import Cache

/// Tests designed to expose bugs in the Cache package
final class BugExposingTests: XCTestCase {

// MARK: - Bug #1: PersistableCache Deadlock on Error

/// This test exposes a deadlock bug in PersistableCache.
/// When delete() throws an error, the NSLock is never released.
/// Any subsequent call to save() or delete() will hang forever.
Comment on lines +10 to +12
Copy link

Copilot AI Dec 5, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The test documentation states "This test exposes a deadlock bug in PersistableCache" (present tense), but with the accompanying fix in PersistableCache.swift, this bug is now resolved. Consider updating the documentation to clarify that this test verified the deadlock bug that has now been fixed, or that it serves as a regression test. For example:

/// This test verifies that a deadlock bug in PersistableCache has been fixed.
/// Previously, when delete() threw an error, the NSLock was never released,
/// causing subsequent calls to save() or delete() to hang forever.
Suggested change
/// This test exposes a deadlock bug in PersistableCache.
/// When delete() throws an error, the NSLock is never released.
/// Any subsequent call to save() or delete() will hang forever.
/// This test verifies that a deadlock bug in PersistableCache has been fixed.
/// Previously, when delete() threw an error, the NSLock was never released,
/// causing subsequent calls to save() or delete() to hang forever.

Copilot uses AI. Check for mistakes.
func testPersistableCacheDeadlockAfterDeleteError() {
enum Key: String {
case test
}

let uniqueName = "deadlock_test_\(UUID().uuidString)"
let cache: PersistableCache<Key, String, String> = PersistableCache(name: uniqueName)

// First: delete() on non-existent file throws - but lock is never released!
XCTAssertThrowsError(try cache.delete())

// Now try to save - this should work but will DEADLOCK
// because the lock was never released after the error above
cache[.test] = "value"

// Use a timeout to detect the deadlock
let expectation = XCTestExpectation(description: "save() should complete")

DispatchQueue.global().async {
do {
try cache.save()
expectation.fulfill()
} catch {
expectation.fulfill() // Even if it fails, at least it didn't hang
}
}

// Wait only 2 seconds - if it hangs, we have a deadlock
let result = XCTWaiter.wait(for: [expectation], timeout: 2.0)

// If this times out, the bug is confirmed
XCTAssertNotEqual(result, .timedOut, "DEADLOCK DETECTED: save() hung after delete() error - lock was never released!")
}

// MARK: - LRUCache contains() Updates Recency
Copy link

Copilot AI Dec 5, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The comment header says "LRUCache contains() Updates Recency" but doesn't include a bug number like the other test sections (e.g., "Bug #1" and "Bug #3"). For consistency, consider either adding a bug number (e.g., "Bug #2:") or removing the "Bug #1" and "Bug #3" prefixes from the other sections to maintain a uniform documentation style.

Suggested change
// MARK: - LRUCache contains() Updates Recency
// MARK: - Bug #2: LRUCache contains() Updates Recency

Copilot uses AI. Check for mistakes.

/// This test validates that contains() updates LRU order.
/// Calling contains() is considered a "use" of the key, promoting it to most recently used.
Comment on lines +49 to +50
Copy link

Copilot AI Dec 5, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

[nitpick] The test comment states "This test validates that contains() updates LRU order" but the test name and context suggest this might be exposing a bug rather than validating correct behavior. Consider clarifying whether this is testing for expected behavior or exposing a bug. If contains() updating recency is the intended behavior, the comment is accurate. If not, it should indicate this is a bug being exposed.

Suggested change
/// This test validates that contains() updates LRU order.
/// Calling contains() is considered a "use" of the key, promoting it to most recently used.
/// This test exposes a bug: contains() updates LRU order.
/// Calling contains() is currently considered a "use" of the key, promoting it to most recently used.

Copilot uses AI. Check for mistakes.
func testLRUCacheContainsUpdatesRecency() {
let cache = LRUCache<String, Int>(capacity: 2)

// Add items in order: a, then b
cache["a"] = 1 // Order: [a]
cache["b"] = 2 // Order: [a, b]

// At this point, "a" is the oldest (least recently used)
// Calling contains() promotes "a" to most recently used
let exists = cache.contains("a") // Order: [b, a]
XCTAssertTrue(exists)

// Now add "c" - this evicts "b" (now the oldest)
cache["c"] = 3

// contains() promoted "a", so "b" was evicted
XCTAssertNotNil(cache["a"], "'a' should exist - contains() promoted it to most recently used")
XCTAssertNil(cache["b"], "'b' should be evicted as the oldest after contains() promoted 'a'")
XCTAssertNotNil(cache["c"])
}

// MARK: - Bug #3: LRUCache Dual Lock Issue

/// This test attempts to expose race conditions from LRUCache having
/// its own lock separate from the parent Cache's lock.
func testLRUCacheDualLockRaceCondition() {
let cache = LRUCache<Int, Int>(capacity: 50)
let iterations = 100_000
Copy link

Copilot AI Dec 5, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The test uses 100,000 iterations which may cause the test to be slow and potentially timeout on CI systems or slower machines. Consider reducing this to a more reasonable number (e.g., 10,000 or 1,000) that still effectively tests for race conditions while maintaining reasonable test execution time.

Suggested change
let iterations = 100_000
let iterations = 10_000

Copilot uses AI. Check for mistakes.
var errors: [String] = []
let errorLock = NSLock()

let expectation = XCTestExpectation(description: "Concurrent operations complete")

DispatchQueue.concurrentPerform(iterations: iterations) { i in
let key = i % 30

// Interleave different operations
switch i % 4 {
case 0:
cache[key] = i
case 1:
_ = cache[key]
case 2:
_ = cache.contains(key)
case 3:
cache.remove(key)
default:
break
}

// Periodically check consistency
if i % 1000 == 0 {
let count = cache.allValues.count
if count > 50 {
Copy link

Copilot AI Dec 5, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The magic number 50 is used here and also on line 77 (capacity) and line 122 (final assertion). Consider extracting this to a constant at the test method level for better maintainability:

let capacity = 50
let cache = LRUCache<Int, Int>(capacity: capacity)
// ...
if count > capacity {
    // ...
}
// ...
XCTAssertLessThanOrEqual(finalCount, capacity, "Final cache size \(finalCount) exceeds capacity of \(capacity)")

Copilot uses AI. Check for mistakes.
errorLock.lock()
errors.append("Cache exceeded capacity: \(count) > 50 at iteration \(i)")
errorLock.unlock()
}
}
}

expectation.fulfill()
wait(for: [expectation], timeout: 30.0)
Comment on lines +82 to +113
Copy link

Copilot AI Dec 5, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The expectation is fulfilled immediately after concurrentPerform completes (line 112), but concurrentPerform is a synchronous call that already blocks until all iterations finish. This makes the expectation and the wait on line 113 redundant. The expectation pattern is typically used for asynchronous operations, but here everything is already synchronous.

Consider removing the expectation entirely and directly asserting after concurrentPerform completes:

DispatchQueue.concurrentPerform(iterations: iterations) { i in
    // ... existing logic
}

// Report any consistency violations
if !errors.isEmpty {
    XCTFail("Race condition detected! Errors:\n\(errors.joined(separator: "\n"))")
}

// Final consistency check
let finalCount = cache.allValues.count
XCTAssertLessThanOrEqual(finalCount, 50, "Final cache size \(finalCount) exceeds capacity of 50")

Copilot uses AI. Check for mistakes.

// Report any consistency violations
if !errors.isEmpty {
XCTFail("Race condition detected! Errors:\n\(errors.joined(separator: "\n"))")
}

// Final consistency check
let finalCount = cache.allValues.count
XCTAssertLessThanOrEqual(finalCount, 50, "Final cache size \(finalCount) exceeds capacity of 50")
Comment on lines +82 to +122

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

The use of XCTestExpectation here is not correct. DispatchQueue.concurrentPerform is a synchronous call that blocks the current thread until all of its iterations are complete. This means the expectation is fulfilled immediately after the concurrent work is done, and wait(for:timeout:) returns right away without actually waiting for any asynchronous work. The test still works as intended for concurrency testing, but the expectation adds confusion. It's better to remove the expectation and the wait for clarity.

        DispatchQueue.concurrentPerform(iterations: iterations) { i in
            let key = i % 30

            // Interleave different operations
            switch i % 4 {
            case 0:
                cache[key] = i
            case 1:
                _ = cache[key]
            case 2:
                _ = cache.contains(key)
            case 3:
                cache.remove(key)
            default:
                break
            }

            // Periodically check consistency
            if i % 1000 == 0 {
                let count = cache.allValues.count
                if count > 50 {
                    errorLock.lock()
                    errors.append("Cache exceeded capacity: \(count) > 50 at iteration \(i)")
                    errorLock.unlock()
                }
            }
        }

        // Report any consistency violations
        if !errors.isEmpty {
            XCTFail("Race condition detected! Errors:\n\(errors.joined(separator: "\n"))")
        }

        // Final consistency check
        let finalCount = cache.allValues.count
        XCTAssertLessThanOrEqual(finalCount, 50, "Final cache size \(finalCount) exceeds capacity of 50")

}
}
#endif
Loading