Skip to content

Commit

Permalink
rename LoadingCache to Cache
Browse files Browse the repository at this point in the history
  • Loading branch information
paskal committed May 12, 2020
1 parent 724b0f3 commit 2ff274f
Show file tree
Hide file tree
Showing 4 changed files with 54 additions and 54 deletions.
14 changes: 7 additions & 7 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
[![Coverage Status](https://coveralls.io/repos/github/go-pkgz/expirable-cache/badge.svg?branch=master)](https://coveralls.io/github/go-pkgz/expirable-cache?branch=master)
[![godoc](https://godoc.org/github.com/go-pkgz/expirable-cache?status.svg)](https://pkg.go.dev/github.com/go-pkgz/expirable-cache?tab=doc)

Package cache implements expirable LoadingCache.
Package cache implements expirable cache.

- Support LRC, LRU and TTL-based eviction.
- Package is thread-safe and doesn't spawn any goroutines.
Expand Down Expand Up @@ -34,14 +34,14 @@ import (

func main() {
// make cache with short TTL and 3 max keys
lc, _ := cache.NewLoadingCache(cache.MaxKeys(3), cache.TTL(time.Millisecond*10))
c, _ := cache.NewCache(cache.MaxKeys(3), cache.TTL(time.Millisecond*10))

// set value under key1.
// with 0 ttl (last parameter) will use cache-wide setting instead (10ms).
lc.Set("key1", "val1", 0)
c.Set("key1", "val1", 0)

// get value under key1
r, ok := lc.Get("key1")
r, ok := c.Get("key1")

// check for OK value, because otherwise return would be nil and
// type conversion will panic
Expand All @@ -53,15 +53,15 @@ func main() {
time.Sleep(time.Millisecond * 11)

// get value under key1 after key expiration
r, ok = lc.Get("key1")
r, ok = c.Get("key1")
// don't convert to string as with ok == false value would be nil
fmt.Printf("value after expiration is found: %v, value: %v\n", ok, r)

// set value under key2, would evict old entry because it is already expired.
// ttl (last parameter) overrides cache-wide ttl.
lc.Set("key2", "val2", time.Minute*5)
c.Set("key2", "val2", time.Minute*5)

fmt.Printf("%+v\n", lc)
fmt.Printf("%+v\n", c)
// Output:
// value before expiration is found: true, value: val1
// value after expiration is found: false, value: <nil>
Expand Down
48 changes: 24 additions & 24 deletions cache.go
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
// Package cache implements LoadingCache similar to hashicorp/golang-lru
// Package cache implements Cache similar to hashicorp/golang-lru
//
// Support LRC, LRU and TTL-based eviction.
// Package is thread-safe and doesn't spawn any goroutines.
Expand All @@ -21,8 +21,8 @@ import (
"github.com/pkg/errors"
)

// LoadingCache defines loading cache interface
type LoadingCache interface {
// Cache defines cache interface
type Cache interface {
fmt.Stringer
Set(key string, value interface{}, ttl time.Duration)
Get(key string) (interface{}, bool)
Expand All @@ -43,8 +43,8 @@ type Stats struct {
Added, Evicted int // number of added and evicted records
}

// loadingCacheImpl provides loading cache, implements LoadingCache interface.
type loadingCacheImpl struct {
// cacheImpl provides Cache interface implementation.
type cacheImpl struct {
ttl time.Duration
maxKeys int
isLRU bool
Expand All @@ -59,12 +59,12 @@ type loadingCacheImpl struct {
// noEvictionTTL - very long ttl to prevent eviction
const noEvictionTTL = time.Hour * 24 * 365 * 10

// NewLoadingCache returns a new LoadingCache.
// NewCache returns a new Cache.
// Default MaxKeys is unlimited (0).
// Default TTL is 10 years, sane value for expirable cache is 5 minutes.
// Default eviction mode is LRC, appropriate option allow to change it to LRU.
func NewLoadingCache(options ...Option) (LoadingCache, error) {
res := loadingCacheImpl{
func NewCache(options ...Option) (Cache, error) {
res := cacheImpl{
items: map[string]*list.Element{},
evictList: list.New(),
ttl: noEvictionTTL,
Expand All @@ -80,7 +80,7 @@ func NewLoadingCache(options ...Option) (LoadingCache, error) {
}

// Set key, ttl of 0 would use cache-wide TTL
func (c *loadingCacheImpl) Set(key string, value interface{}, ttl time.Duration) {
func (c *cacheImpl) Set(key string, value interface{}, ttl time.Duration) {
c.Lock()
defer c.Unlock()
now := time.Now()
Expand Down Expand Up @@ -114,7 +114,7 @@ func (c *loadingCacheImpl) Set(key string, value interface{}, ttl time.Duration)
}

// Get returns the key value if it's not expired
func (c *loadingCacheImpl) Get(key string) (interface{}, bool) {
func (c *cacheImpl) Get(key string) (interface{}, bool) {
c.Lock()
defer c.Unlock()
if ent, ok := c.items[key]; ok {
Expand All @@ -135,7 +135,7 @@ func (c *loadingCacheImpl) Get(key string) (interface{}, bool) {

// Peek returns the key value (or undefined if not found) without updating the "recently used"-ness of the key.
// Works exactly the same as Get in case of LRC mode (default one).
func (c *loadingCacheImpl) Peek(key string) (interface{}, bool) {
func (c *cacheImpl) Peek(key string) (interface{}, bool) {
c.Lock()
defer c.Unlock()
if ent, ok := c.items[key]; ok {
Expand All @@ -152,21 +152,21 @@ func (c *loadingCacheImpl) Peek(key string) (interface{}, bool) {
}

// Keys returns a slice of the keys in the cache, from oldest to newest.
func (c *loadingCacheImpl) Keys() []string {
func (c *cacheImpl) Keys() []string {
c.Lock()
defer c.Unlock()
return c.keys()
}

// Len return count of items in cache, including expired
func (c *loadingCacheImpl) Len() int {
func (c *cacheImpl) Len() int {
c.Lock()
defer c.Unlock()
return c.evictList.Len()
}

// Invalidate key (item) from the cache
func (c *loadingCacheImpl) Invalidate(key string) {
func (c *cacheImpl) Invalidate(key string) {
c.Lock()
defer c.Unlock()
if ent, ok := c.items[key]; ok {
Expand All @@ -175,7 +175,7 @@ func (c *loadingCacheImpl) Invalidate(key string) {
}

// InvalidateFn deletes multiple keys if predicate is true
func (c *loadingCacheImpl) InvalidateFn(fn func(key string) bool) {
func (c *cacheImpl) InvalidateFn(fn func(key string) bool) {
c.Lock()
defer c.Unlock()
for key, ent := range c.items {
Expand All @@ -186,14 +186,14 @@ func (c *loadingCacheImpl) InvalidateFn(fn func(key string) bool) {
}

// RemoveOldest remove oldest element in the cache
func (c *loadingCacheImpl) RemoveOldest() {
func (c *cacheImpl) RemoveOldest() {
c.Lock()
defer c.Unlock()
c.removeOldest()
}

// DeleteExpired clears cache of expired items
func (c *loadingCacheImpl) DeleteExpired() {
func (c *cacheImpl) DeleteExpired() {
c.Lock()
defer c.Unlock()
for _, key := range c.keys() {
Expand All @@ -204,7 +204,7 @@ func (c *loadingCacheImpl) DeleteExpired() {
}

// Purge clears the cache completely.
func (c *loadingCacheImpl) Purge() {
func (c *cacheImpl) Purge() {
c.Lock()
defer c.Unlock()
for k, v := range c.items {
Expand All @@ -218,20 +218,20 @@ func (c *loadingCacheImpl) Purge() {
}

// Stat gets the current stats for cache
func (c *loadingCacheImpl) Stat() Stats {
func (c *cacheImpl) Stat() Stats {
c.Lock()
defer c.Unlock()
return c.stat
}

func (c *loadingCacheImpl) String() string {
func (c *cacheImpl) String() string {
stats := c.Stat()
size := c.Len()
return fmt.Sprintf("Size: %d, Stats: %+v (%0.1f%%)", size, stats, 100*float64(stats.Hits)/float64(stats.Hits+stats.Misses))
}

// Keys returns a slice of the keys in the cache, from oldest to newest. Has to be called with lock!
func (c *loadingCacheImpl) keys() []string {
func (c *cacheImpl) keys() []string {
keys := make([]string, 0, len(c.items))
for ent := c.evictList.Back(); ent != nil; ent = ent.Prev() {
keys = append(keys, ent.Value.(*cacheItem).key)
Expand All @@ -240,23 +240,23 @@ func (c *loadingCacheImpl) keys() []string {
}

// removeOldest removes the oldest item from the cache. Has to be called with lock!
func (c *loadingCacheImpl) removeOldest() {
func (c *cacheImpl) removeOldest() {
ent := c.evictList.Back()
if ent != nil {
c.removeElement(ent)
}
}

// removeOldest removes the oldest item from the cache in case it's already expired. Has to be called with lock!
func (c *loadingCacheImpl) removeOldestIfExpired() {
func (c *cacheImpl) removeOldestIfExpired() {
ent := c.evictList.Back()
if ent != nil && time.Now().After(ent.Value.(*cacheItem).expiresAt) {
c.removeElement(ent)
}
}

// removeElement is used to remove a given list element from the cache. Has to be called with lock!
func (c *loadingCacheImpl) removeElement(e *list.Element) {
func (c *cacheImpl) removeElement(e *list.Element) {
c.evictList.Remove(e)
kv := e.Value.(*cacheItem)
delete(c.items, kv.key)
Expand Down
36 changes: 18 additions & 18 deletions cache_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@ import (
"github.com/stretchr/testify/assert"
)

func TestLoadingCacheNoPurge(t *testing.T) {
lc, err := NewLoadingCache()
func TestCacheNoPurge(t *testing.T) {
lc, err := NewCache()
assert.NoError(t, err)

lc.Set("key1", "val1", 0)
Expand All @@ -28,9 +28,9 @@ func TestLoadingCacheNoPurge(t *testing.T) {
assert.Equal(t, []string{"key1"}, lc.Keys())
}

func TestLoadingCacheWithDeleteExpired(t *testing.T) {
func TestCacheWithDeleteExpired(t *testing.T) {
var evicted []string
lc, err := NewLoadingCache(
lc, err := NewCache(
TTL(150*time.Millisecond),
OnEvicted(func(key string, value interface{}) { evicted = append(evicted, key, value.(string)) }),
)
Expand Down Expand Up @@ -70,8 +70,8 @@ func TestLoadingCacheWithDeleteExpired(t *testing.T) {
assert.Equal(t, []string{"key1", "val1", "key2", "val2"}, evicted)
}

func TestLoadingCacheWithPurgeEnforcedBySize(t *testing.T) {
lc, err := NewLoadingCache(MaxKeys(10), TTL(time.Hour))
func TestCacheWithPurgeEnforcedBySize(t *testing.T) {
lc, err := NewCache(MaxKeys(10), TTL(time.Hour))
assert.NoError(t, err)

for i := 0; i < 100; i++ {
Expand All @@ -86,8 +86,8 @@ func TestLoadingCacheWithPurgeEnforcedBySize(t *testing.T) {
assert.Equal(t, 10, lc.Len())
}

func TestLoadingCacheConcurrency(t *testing.T) {
lc, err := NewLoadingCache()
func TestCacheConcurrency(t *testing.T) {
lc, err := NewCache()
assert.NoError(t, err)
wg := sync.WaitGroup{}
wg.Add(1000)
Expand All @@ -101,9 +101,9 @@ func TestLoadingCacheConcurrency(t *testing.T) {
assert.Equal(t, 100, lc.Len())
}

func TestLoadingCacheInvalidateAndEvict(t *testing.T) {
func TestCacheInvalidateAndEvict(t *testing.T) {
var evicted int
lc, err := NewLoadingCache(LRU(), OnEvicted(func(_ string, _ interface{}) { evicted++ }))
lc, err := NewCache(LRU(), OnEvicted(func(_ string, _ interface{}) { evicted++ }))
assert.NoError(t, err)

lc.Set("key1", "val1", 0)
Expand Down Expand Up @@ -133,16 +133,16 @@ func TestLoadingCacheInvalidateAndEvict(t *testing.T) {
assert.Equal(t, 0, lc.Len())
}

func TestLoadingCacheBadOption(t *testing.T) {
lc, err := NewLoadingCache(func(lc *loadingCacheImpl) error {
func TestCacheBadOption(t *testing.T) {
lc, err := NewCache(func(lc *cacheImpl) error {
return errors.New("mock err")
})
assert.EqualError(t, err, "failed to set cache option: mock err")
assert.Nil(t, lc)
}

func TestLoadingExpired(t *testing.T) {
lc, err := NewLoadingCache(TTL(time.Millisecond * 5))
func TestCacheExpired(t *testing.T) {
lc, err := NewCache(TTL(time.Millisecond * 5))
assert.NoError(t, err)

lc.Set("key1", "val1", 0)
Expand All @@ -168,8 +168,8 @@ func TestLoadingExpired(t *testing.T) {
assert.False(t, ok)
}

func TestLoadingCacheRemoveOldest(t *testing.T) {
lc, err := NewLoadingCache(LRU(), MaxKeys(2))
func TestCacheRemoveOldest(t *testing.T) {
lc, err := NewCache(LRU(), MaxKeys(2))
assert.NoError(t, err)

lc.Set("key1", "val1", 0)
Expand All @@ -192,9 +192,9 @@ func TestLoadingCacheRemoveOldest(t *testing.T) {
assert.Equal(t, 1, lc.Len())
}

func ExampleLoadingCache() {
func ExampleCache() {
// make cache with short TTL and 3 max keys
cache, _ := NewLoadingCache(MaxKeys(3), TTL(time.Millisecond*10))
cache, _ := NewCache(MaxKeys(3), TTL(time.Millisecond*10))

// set value under key1.
// with 0 ttl (last parameter) will use cache-wide setting instead (10ms).
Expand Down
10 changes: 5 additions & 5 deletions options.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,11 @@ package cache
import "time"

// Option func type
type Option func(lc *loadingCacheImpl) error
type Option func(lc *cacheImpl) error

// OnEvicted called automatically for automatically and manually deleted entries
func OnEvicted(fn func(key string, value interface{})) Option {
return func(lc *loadingCacheImpl) error {
return func(lc *cacheImpl) error {
lc.onEvicted = fn
return nil
}
Expand All @@ -16,7 +16,7 @@ func OnEvicted(fn func(key string, value interface{})) Option {
// MaxKeys functional option defines how many keys to keep.
// By default it is 0, which means unlimited.
func MaxKeys(max int) Option {
return func(lc *loadingCacheImpl) error {
return func(lc *cacheImpl) error {
lc.maxKeys = max
return nil
}
Expand All @@ -25,15 +25,15 @@ func MaxKeys(max int) Option {
// TTL functional option defines TTL for all cache entries.
// By default it is set to 10 years, sane option for expirable cache might be 5 minutes.
func TTL(ttl time.Duration) Option {
return func(lc *loadingCacheImpl) error {
return func(lc *cacheImpl) error {
lc.ttl = ttl
return nil
}
}

// LRU sets cache to LRU (Least Recently Used) eviction mode.
func LRU() Option {
return func(lc *loadingCacheImpl) error {
return func(lc *cacheImpl) error {
lc.isLRU = true
return nil
}
Expand Down

0 comments on commit 2ff274f

Please sign in to comment.