Documentation
¶
Index ¶
- Constants
- type TwoQueueCache
- func New2QCache[K comparable, V any](capacity int) *TwoQueueCache[K, V]
- func New2QCacheWithEvictionCallback[K comparable, V any](capacity int, onEviction base.EvictionCallback[K, V]) *TwoQueueCache[K, V]
- func New2QCacheWithRatio[K comparable, V any](capacity int, recentRatio, ghostRatio float64) *TwoQueueCache[K, V]
- func New2QCacheWithRatioAndEvictionCallback[K comparable, V any](capacity int, recentRatio, ghostRatio float64, ...) *TwoQueueCache[K, V]
- func (c *TwoQueueCache[K, V]) Algorithm() string
- func (c *TwoQueueCache[K, V]) Capacity() int
- func (c *TwoQueueCache[K, V]) Delete(key K) bool
- func (c *TwoQueueCache[K, V]) DeleteMany(keys []K) map[K]bool
- func (c *TwoQueueCache[K, V]) Get(key K) (value V, ok bool)
- func (c *TwoQueueCache[K, V]) GetMany(keys []K) (map[K]V, []K)
- func (c *TwoQueueCache[K, V]) Has(key K) bool
- func (c *TwoQueueCache[K, V]) HasMany(keys []K) map[K]bool
- func (c *TwoQueueCache[K, V]) Keys() []K
- func (c *TwoQueueCache[K, V]) Len() int
- func (c *TwoQueueCache[K, V]) Peek(key K) (value V, ok bool)
- func (c *TwoQueueCache[K, V]) PeekMany(keys []K) (map[K]V, []K)
- func (c *TwoQueueCache[K, V]) Purge()
- func (c *TwoQueueCache[K, V]) Range(f func(K, V) bool)
- func (c *TwoQueueCache[K, V]) Set(key K, value V)
- func (c *TwoQueueCache[K, V]) SetMany(items map[K]V)
- func (c *TwoQueueCache[K, V]) Values() []V
Constants ¶
const ( // Default2QRecentRatio is the ratio of the 2Q cache dedicated // to recently added entries that have only been accessed once. Default2QRecentRatio = 0.25 // Default2QGhostEntries is the default ratio of ghost // entries kept to track entries recently evicted Default2QGhostEntries = 0.50 )
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type TwoQueueCache ¶
type TwoQueueCache[K comparable, V any] struct { // contains filtered or unexported fields }
2Q is an enhancement over the standard LRU cache in that it tracks both frequently and recently used entries separately. This avoids a burst in access to new entries from evicting frequently used entries. It adds some additional tracking overhead to the standard LRU cache, and is computationally about 2x the cost, and adds some metadata over head. TwoQueueCache is not safe for concurrent access.
func New2QCache ¶
func New2QCache[K comparable, V any](capacity int) *TwoQueueCache[K, V]
func New2QCacheWithEvictionCallback ¶ added in v0.2.0
func New2QCacheWithEvictionCallback[K comparable, V any](capacity int, onEviction base.EvictionCallback[K, V]) *TwoQueueCache[K, V]
func New2QCacheWithRatio ¶
func New2QCacheWithRatio[K comparable, V any](capacity int, recentRatio, ghostRatio float64) *TwoQueueCache[K, V]
func New2QCacheWithRatioAndEvictionCallback ¶ added in v0.2.0
func New2QCacheWithRatioAndEvictionCallback[K comparable, V any](capacity int, recentRatio, ghostRatio float64, onEviction base.EvictionCallback[K, V]) *TwoQueueCache[K, V]
func (*TwoQueueCache[K, V]) Algorithm ¶
func (c *TwoQueueCache[K, V]) Algorithm() string
implements base.InMemoryCache
func (*TwoQueueCache[K, V]) Capacity ¶
func (c *TwoQueueCache[K, V]) Capacity() int
implements base.InMemoryCache
func (*TwoQueueCache[K, V]) Delete ¶
func (c *TwoQueueCache[K, V]) Delete(key K) bool
implements base.InMemoryCache
func (*TwoQueueCache[K, V]) DeleteMany ¶
func (c *TwoQueueCache[K, V]) DeleteMany(keys []K) map[K]bool
implements base.InMemoryCache
func (*TwoQueueCache[K, V]) Get ¶
func (c *TwoQueueCache[K, V]) Get(key K) (value V, ok bool)
implements base.InMemoryCache
func (*TwoQueueCache[K, V]) GetMany ¶
func (c *TwoQueueCache[K, V]) GetMany(keys []K) (map[K]V, []K)
implements base.InMemoryCache
func (*TwoQueueCache[K, V]) Has ¶
func (c *TwoQueueCache[K, V]) Has(key K) bool
implements base.InMemoryCache
func (*TwoQueueCache[K, V]) HasMany ¶
func (c *TwoQueueCache[K, V]) HasMany(keys []K) map[K]bool
implements base.InMemoryCache
func (*TwoQueueCache[K, V]) Keys ¶
func (c *TwoQueueCache[K, V]) Keys() []K
implements base.InMemoryCache
func (*TwoQueueCache[K, V]) Len ¶
func (c *TwoQueueCache[K, V]) Len() int
implements base.InMemoryCache
func (*TwoQueueCache[K, V]) Peek ¶
func (c *TwoQueueCache[K, V]) Peek(key K) (value V, ok bool)
implements base.InMemoryCache
func (*TwoQueueCache[K, V]) PeekMany ¶
func (c *TwoQueueCache[K, V]) PeekMany(keys []K) (map[K]V, []K)
implements base.InMemoryCache
func (*TwoQueueCache[K, V]) Purge ¶
func (c *TwoQueueCache[K, V]) Purge()
implements base.InMemoryCache
func (*TwoQueueCache[K, V]) Range ¶
func (c *TwoQueueCache[K, V]) Range(f func(K, V) bool)
implements base.InMemoryCache
func (*TwoQueueCache[K, V]) Set ¶
func (c *TwoQueueCache[K, V]) Set(key K, value V)
implements base.InMemoryCache
func (*TwoQueueCache[K, V]) SetMany ¶
func (c *TwoQueueCache[K, V]) SetMany(items map[K]V)
implements base.InMemoryCache
func (*TwoQueueCache[K, V]) Values ¶
func (c *TwoQueueCache[K, V]) Values() []V
implements base.InMemoryCache