2022-10-27 16:00:57 +02:00
|
|
|
package dataext
|
|
|
|
|
|
|
|
import (
|
|
|
|
"sync"
|
|
|
|
)
|
|
|
|
|
|
|
|
//
|
|
|
|
// This is an LRU (Least-Recently-Used) cache based on a double linked list
|
|
|
|
// All the work we do below is to have a cache where we can easily remove the least-used element
|
|
|
|
// (to ensure that the cache is never bigger than maxsize items)
|
|
|
|
//
|
|
|
|
// The cache algorithm the following properties:
|
|
|
|
// - Memory footprint is O(n), with neglible overhead
|
|
|
|
// - The cache is multi-threading safe (sync.Mutex)
|
|
|
|
// - Inserts are O(1)
|
|
|
|
// - Gets are O(1)
|
|
|
|
// - Re-Shuffles are O(1) (= an element is removed on Insert because teh cache was full)
|
|
|
|
//
|
|
|
|
// There are also a bunch of unit tests to ensure that the cache is always in a consistent state
|
|
|
|
//
|
|
|
|
|
2022-12-29 22:52:52 +01:00
|
|
|
type LRUMap[TKey comparable, TData any] struct {
|
2022-10-27 16:00:57 +02:00
|
|
|
maxsize int
|
|
|
|
lock sync.Mutex
|
|
|
|
|
2022-12-29 22:52:52 +01:00
|
|
|
cache map[TKey]*cacheNode[TKey, TData]
|
2022-10-27 16:00:57 +02:00
|
|
|
|
2022-12-29 22:52:52 +01:00
|
|
|
lfuHead *cacheNode[TKey, TData]
|
|
|
|
lfuTail *cacheNode[TKey, TData]
|
2022-10-27 16:00:57 +02:00
|
|
|
}
|
|
|
|
|
2022-12-29 22:52:52 +01:00
|
|
|
type cacheNode[TKey comparable, TData any] struct {
|
|
|
|
key TKey
|
2022-12-22 10:06:25 +01:00
|
|
|
data TData
|
2022-12-29 22:52:52 +01:00
|
|
|
parent *cacheNode[TKey, TData]
|
|
|
|
child *cacheNode[TKey, TData]
|
2022-10-27 16:00:57 +02:00
|
|
|
}
|
|
|
|
|
2022-12-29 22:52:52 +01:00
|
|
|
func NewLRUMap[TKey comparable, TData any](size int) *LRUMap[TKey, TData] {
|
2022-10-27 16:00:57 +02:00
|
|
|
if size <= 2 && size != 0 {
|
|
|
|
panic("Size must be > 2 (or 0)")
|
|
|
|
}
|
|
|
|
|
2022-12-29 22:52:52 +01:00
|
|
|
return &LRUMap[TKey, TData]{
|
2022-10-27 16:00:57 +02:00
|
|
|
maxsize: size,
|
|
|
|
lock: sync.Mutex{},
|
2022-12-29 22:52:52 +01:00
|
|
|
cache: make(map[TKey]*cacheNode[TKey, TData], size+1),
|
2022-10-27 16:00:57 +02:00
|
|
|
lfuHead: nil,
|
|
|
|
lfuTail: nil,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-12-29 22:52:52 +01:00
|
|
|
func (c *LRUMap[TKey, TData]) Put(key TKey, value TData) {
|
2022-10-27 16:00:57 +02:00
|
|
|
if c.maxsize == 0 {
|
|
|
|
return // cache disabled
|
|
|
|
}
|
|
|
|
|
|
|
|
c.lock.Lock()
|
|
|
|
defer c.lock.Unlock()
|
|
|
|
|
|
|
|
node, exists := c.cache[key]
|
|
|
|
|
|
|
|
if exists {
|
|
|
|
// key already in data: only update LFU and value
|
|
|
|
c.moveNodeToTop(node)
|
|
|
|
node.data = value
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// key does not exist: insert into map and add to top of LFU
|
2022-12-29 22:52:52 +01:00
|
|
|
node = &cacheNode[TKey, TData]{
|
2022-10-27 16:00:57 +02:00
|
|
|
key: key,
|
|
|
|
data: value,
|
|
|
|
parent: nil,
|
|
|
|
child: c.lfuHead,
|
|
|
|
}
|
|
|
|
if c.lfuHead == nil && c.lfuTail == nil { // special case - previously the cache was empty (head == tail == nil)
|
|
|
|
c.lfuHead = node
|
|
|
|
c.lfuTail = node
|
|
|
|
} else {
|
|
|
|
c.lfuHead = node
|
|
|
|
node.child.parent = node
|
|
|
|
}
|
|
|
|
c.cache[key] = node
|
|
|
|
|
|
|
|
if len(c.cache) > c.maxsize { // maxsize is always > 2
|
|
|
|
tail := c.lfuTail
|
|
|
|
delete(c.cache, tail.key)
|
|
|
|
c.lfuTail = tail.parent
|
|
|
|
c.lfuTail.child = nil
|
|
|
|
tail.parent = nil
|
|
|
|
tail.child = nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-12-29 22:52:52 +01:00
|
|
|
func (c *LRUMap[TKey, TData]) TryGet(key TKey) (TData, bool) {
|
2022-10-27 16:00:57 +02:00
|
|
|
if c.maxsize == 0 {
|
2022-12-22 10:06:25 +01:00
|
|
|
return *new(TData), false // cache disabled
|
2022-10-27 16:00:57 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
c.lock.Lock()
|
|
|
|
defer c.lock.Unlock()
|
|
|
|
|
|
|
|
val, ok := c.cache[key]
|
|
|
|
if !ok {
|
2022-12-22 10:06:25 +01:00
|
|
|
return *new(TData), false
|
2022-10-27 16:00:57 +02:00
|
|
|
}
|
|
|
|
c.moveNodeToTop(val)
|
|
|
|
return val.data, ok
|
|
|
|
}
|
|
|
|
|
2022-12-29 22:52:52 +01:00
|
|
|
func (c *LRUMap[TKey, TData]) moveNodeToTop(node *cacheNode[TKey, TData]) {
|
2022-10-27 16:00:57 +02:00
|
|
|
// (only called in critical section !)
|
|
|
|
|
|
|
|
if c.lfuHead == node { // fast case
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// Step 1 unlink
|
|
|
|
|
|
|
|
parent := node.parent
|
|
|
|
child := node.child
|
|
|
|
if parent != nil {
|
|
|
|
parent.child = child
|
|
|
|
}
|
|
|
|
if child != nil {
|
|
|
|
child.parent = parent
|
|
|
|
}
|
|
|
|
if node == c.lfuHead {
|
|
|
|
c.lfuHead = node.child
|
|
|
|
}
|
|
|
|
if node == c.lfuTail {
|
|
|
|
c.lfuTail = node.parent
|
|
|
|
}
|
|
|
|
|
|
|
|
// Step 2 re-insert at top
|
|
|
|
node.parent = nil
|
|
|
|
node.child = c.lfuHead
|
|
|
|
c.lfuHead = node
|
|
|
|
if node.child != nil {
|
|
|
|
node.child.parent = node
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-12-29 22:52:52 +01:00
|
|
|
func (c *LRUMap[TKey, TData]) Size() int {
|
2022-10-27 16:00:57 +02:00
|
|
|
c.lock.Lock()
|
|
|
|
defer c.lock.Unlock()
|
|
|
|
return len(c.cache)
|
|
|
|
}
|