Skip to content

Commit ae1872d

Browse files
committed
add ForEachFunc
1 parent 36ffada commit ae1872d

File tree

7 files changed

+129
-8
lines changed

7 files changed

+129
-8
lines changed

bucket.go

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,18 @@ func (b *bucket) itemCount() int {
1717
return len(b.lookup)
1818
}
1919

20+
func (b *bucket) forEachFunc(matches func(key string, item *Item) bool) bool {
21+
lookup := b.lookup
22+
b.RLock()
23+
defer b.RUnlock()
24+
for key, item := range lookup {
25+
if !matches(key, item) {
26+
return false
27+
}
28+
}
29+
return true
30+
}
31+
2032
func (b *bucket) get(key string) *Item {
2133
b.RLock()
2234
defer b.RUnlock()

cache.go

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -76,6 +76,14 @@ func (c *Cache) DeleteFunc(matches func(key string, item *Item) bool) int {
7676
return count
7777
}
7878

79+
func (c *Cache) ForEachFunc(matches func(key string, item *Item) bool) {
80+
for _, b := range c.buckets {
81+
if !b.forEachFunc(matches) {
82+
break
83+
}
84+
}
85+
}
86+
7987
// Get an item from the cache. Returns nil if the item wasn't found.
8088
// This can return an expired item. Use item.Expired() to see if the item
8189
// is expired and item.TTL() to see how long until the item expires (which
@@ -210,7 +218,7 @@ func (c *Cache) promote(item *Item) {
210218
case c.promotables <- item:
211219
default:
212220
}
213-
221+
214222
}
215223

216224
func (c *Cache) worker() {

cache_test.go

Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
package ccache
22

33
import (
4+
"sort"
45
"strconv"
56
"sync/atomic"
67
"testing"
@@ -273,6 +274,34 @@ func (_ CacheTests) ResizeOnTheFly() {
273274
Expect(cache.Get("6").Value()).To.Equal(6)
274275
}
275276

277+
func (_ CacheTests) ForEachFunc() {
278+
cache := New(Configure().MaxSize(3).ItemsToPrune(1))
279+
Expect(forEachKeys(cache)).To.Equal([]string{})
280+
281+
cache.Set("1", 1, time.Minute)
282+
Expect(forEachKeys(cache)).To.Equal([]string{"1"})
283+
284+
cache.Set("2", 2, time.Minute)
285+
time.Sleep(time.Millisecond * 10)
286+
Expect(forEachKeys(cache)).To.Equal([]string{"1", "2"})
287+
288+
cache.Set("3", 3, time.Minute)
289+
time.Sleep(time.Millisecond * 10)
290+
Expect(forEachKeys(cache)).To.Equal([]string{"1", "2", "3"})
291+
292+
cache.Set("4", 4, time.Minute)
293+
time.Sleep(time.Millisecond * 10)
294+
Expect(forEachKeys(cache)).To.Equal([]string{"2", "3", "4"})
295+
296+
cache.Set("stop", 5, time.Minute)
297+
time.Sleep(time.Millisecond * 10)
298+
Expect(forEachKeys(cache)).Not.To.Contain("stop")
299+
300+
cache.Set("6", 6, time.Minute)
301+
time.Sleep(time.Millisecond * 10)
302+
Expect(forEachKeys(cache)).Not.To.Contain("stop")
303+
}
304+
276305
type SizedItem struct {
277306
id int
278307
s int64
@@ -293,3 +322,16 @@ func gcCache(cache *Cache) {
293322
cache.gc()
294323
cache.restart()
295324
}
325+
326+
func forEachKeys(cache *Cache) []string {
327+
keys := make([]string, 0, 10)
328+
cache.ForEachFunc(func(key string, i *Item) bool {
329+
if key == "stop" {
330+
return false
331+
}
332+
keys = append(keys, key)
333+
return true
334+
})
335+
sort.Strings(keys)
336+
return keys
337+
}

layeredbucket.go

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -102,6 +102,15 @@ func (b *layeredBucket) deleteAll(primary string, deletables chan *Item) bool {
102102
return true
103103
}
104104

105+
func (b *layeredBucket) forEachFunc(primary string, matches func(key string, item *Item) bool) {
106+
b.RLock()
107+
bucket, exists := b.buckets[primary]
108+
b.RUnlock()
109+
if exists {
110+
bucket.forEachFunc(matches)
111+
}
112+
}
113+
105114
func (b *layeredBucket) clear() {
106115
b.Lock()
107116
defer b.Unlock()

layeredcache.go

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -73,6 +73,10 @@ func (c *LayeredCache) Get(primary, secondary string) *Item {
7373
return item
7474
}
7575

76+
func (c *LayeredCache) ForEachFunc(primary string, matches func(key string, item *Item) bool) {
77+
c.bucket(primary).forEachFunc(primary, matches)
78+
}
79+
7680
// Get the secondary cache for a given primary key. This operation will
7781
// never return nil. In the case where the primary key does not exist, a
7882
// new, underlying, empty bucket will be created and returned.

layeredcache_test.go

Lines changed: 49 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
package ccache
22

33
import (
4+
"sort"
45
"strconv"
56
"sync/atomic"
67
"testing"
@@ -261,12 +262,6 @@ func (_ LayeredCacheTests) ResizeOnTheFly() {
261262
Expect(cache.Get("6", "a").Value()).To.Equal(6)
262263
}
263264

264-
func newLayered() *LayeredCache {
265-
c := Layered(Configure())
266-
c.Clear()
267-
return c
268-
}
269-
270265
func (_ LayeredCacheTests) RemovesOldestItemWhenFullBySizer() {
271266
cache := Layered(Configure().MaxSize(9).ItemsToPrune(2))
272267
for i := 0; i < 7; i++ {
@@ -329,6 +324,41 @@ func (_ LayeredCacheTests) ReplaceChangesSize() {
329324
checkLayeredSize(cache, 5)
330325
}
331326

327+
func (_ LayeredCacheTests) EachFunc() {
328+
cache := Layered(Configure().MaxSize(3).ItemsToPrune(1))
329+
Expect(forEachKeysLayered(cache, "1")).To.Equal([]string{})
330+
331+
cache.Set("1", "a", 1, time.Minute)
332+
Expect(forEachKeysLayered(cache, "1")).To.Equal([]string{"a"})
333+
334+
cache.Set("1", "b", 2, time.Minute)
335+
time.Sleep(time.Millisecond * 10)
336+
Expect(forEachKeysLayered(cache, "1")).To.Equal([]string{"a", "b"})
337+
338+
cache.Set("1", "c", 3, time.Minute)
339+
time.Sleep(time.Millisecond * 10)
340+
Expect(forEachKeysLayered(cache, "1")).To.Equal([]string{"a", "b", "c"})
341+
342+
cache.Set("1", "d", 4, time.Minute)
343+
time.Sleep(time.Millisecond * 10)
344+
Expect(forEachKeysLayered(cache, "1")).To.Equal([]string{"b", "c", "d"})
345+
346+
// iteration is non-deterministic, all we know for sure is "stop" should not be in there
347+
cache.Set("1", "stop", 5, time.Minute)
348+
time.Sleep(time.Millisecond * 10)
349+
Expect(forEachKeysLayered(cache, "1")).Not.To.Contain("stop")
350+
351+
cache.Set("1", "e", 6, time.Minute)
352+
time.Sleep(time.Millisecond * 10)
353+
Expect(forEachKeysLayered(cache, "1")).Not.To.Contain("stop")
354+
}
355+
356+
func newLayered() *LayeredCache {
357+
c := Layered(Configure())
358+
c.Clear()
359+
return c
360+
}
361+
332362
func checkLayeredSize(cache *LayeredCache, sz int64) {
333363
cache.Stop()
334364
Expect(cache.size).To.Equal(sz)
@@ -340,3 +370,16 @@ func gcLayeredCache(cache *LayeredCache) {
340370
cache.gc()
341371
cache.restart()
342372
}
373+
374+
func forEachKeysLayered(cache *LayeredCache, primary string) []string {
375+
keys := make([]string, 0, 10)
376+
cache.ForEachFunc(primary, func(key string, i *Item) bool {
377+
if key == "stop" {
378+
return false
379+
}
380+
keys = append(keys, key)
381+
return true
382+
})
383+
sort.Strings(keys)
384+
return keys
385+
}

readme.md

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,10 @@ cache.Delete("user:4")
9797
`DeletePrefix` deletes all keys matching the provided prefix. Returns the number of keys removed.
9898

9999
### DeleteFunc
100-
`DeleteFunc` deletes all items that the provded matches func evaluates to true. Returns the number of keys removed.
100+
`DeleteFunc` deletes all items that the provided matches func evaluates to true. Returns the number of keys removed.
101+
102+
### ForEachFunc
103+
`ForEachFunc` iterates through all keys and values in the map and passes them to the provided function. Iteration stops if the function returns false. Iteration order is random.
101104

102105
### Clear
103106
`Clear` clears the cache. If the cache's gc is running, `Clear` waits for it to finish.

0 commit comments

Comments
 (0)