Add LFU/LRU template

This commit is contained in:
YDZ
2021-01-03 23:48:14 +08:00
parent fb72c855ee
commit 38aa0acb0d
2 changed files with 335 additions and 0 deletions

196
template/LFUCache.go Normal file
View File

@ -0,0 +1,196 @@
package template
import "container/list"
// LFUCache define
type LFUCache struct {
nodes map[int]*list.Element
lists map[int]*list.List
capacity int
min int
}
type node struct {
key int
value int
frequency int
}
// Constructor define
func Constructor(capacity int) LFUCache {
return LFUCache{nodes: make(map[int]*list.Element),
lists: make(map[int]*list.List),
capacity: capacity,
min: 0,
}
}
// Get define
func (lfuCache *LFUCache) Get(key int) int {
value, ok := lfuCache.nodes[key]
if !ok {
return -1
}
currentNode := value.Value.(*node)
lfuCache.lists[currentNode.frequency].Remove(value)
currentNode.frequency++
if _, ok := lfuCache.lists[currentNode.frequency]; !ok {
lfuCache.lists[currentNode.frequency] = list.New()
}
newList := lfuCache.lists[currentNode.frequency]
newNode := newList.PushFront(currentNode)
lfuCache.nodes[key] = newNode
if currentNode.frequency-1 == lfuCache.min && lfuCache.lists[currentNode.frequency-1].Len() == 0 {
lfuCache.min++
}
return currentNode.value
}
// Put define
func (lfuCache *LFUCache) Put(key int, value int) {
if lfuCache.capacity == 0 {
return
}
if currentValue, ok := lfuCache.nodes[key]; ok {
currentNode := currentValue.Value.(*node)
currentNode.value = value
lfuCache.Get(key)
return
}
if lfuCache.capacity == len(lfuCache.nodes) {
currentList := lfuCache.lists[lfuCache.min]
backNode := currentList.Back()
delete(lfuCache.nodes, backNode.Value.(*node).key)
currentList.Remove(backNode)
}
lfuCache.min = 1
currentNode := &node{
key: key,
value: value,
frequency: 1,
}
if _, ok := lfuCache.lists[1]; !ok {
lfuCache.lists[1] = list.New()
}
newList := lfuCache.lists[1]
newNode := newList.PushFront(currentNode)
lfuCache.nodes[key] = newNode
}
/**
* Your LFUCache object will be instantiated and called as such:
* obj := Constructor(capacity);
* param_1 := obj.Get(key);
* obj.Put(key,value);
*/
// Index Priority Queue
// import "container/heap"
// type LFUCache struct {
// capacity int
// pq PriorityQueue
// hash map[int]*Item
// counter int
// }
// func Constructor(capacity int) LFUCache {
// lfu := LFUCache{
// pq: PriorityQueue{},
// hash: make(map[int]*Item, capacity),
// capacity: capacity,
// }
// return lfu
// }
// func (this *LFUCache) Get(key int) int {
// if this.capacity == 0 {
// return -1
// }
// if item, ok := this.hash[key]; ok {
// this.counter++
// this.pq.update(item, item.value, item.frequency+1, this.counter)
// return item.value
// }
// return -1
// }
// func (this *LFUCache) Put(key int, value int) {
// if this.capacity == 0 {
// return
// }
// // fmt.Printf("Put %d\n", key)
// this.counter++
// // 如果存在,增加 frequency再调整堆
// if item, ok := this.hash[key]; ok {
// this.pq.update(item, value, item.frequency+1, this.counter)
// return
// }
// // 如果不存在且缓存满了,需要删除。在 hashmap 和 pq 中删除。
// if len(this.pq) == this.capacity {
// item := heap.Pop(&this.pq).(*Item)
// delete(this.hash, item.key)
// }
// // 新建结点,在 hashmap 和 pq 中添加。
// item := &Item{
// value: value,
// key: key,
// count: this.counter,
// }
// heap.Push(&this.pq, item)
// this.hash[key] = item
// }
// // An Item is something we manage in a priority queue.
// type Item struct {
// value int // The value of the item; arbitrary.
// key int
// frequency int // The priority of the item in the queue.
// count int // use for evicting the oldest element
// // The index is needed by update and is maintained by the heap.Interface methods.
// index int // The index of the item in the heap.
// }
// // A PriorityQueue implements heap.Interface and holds Items.
// type PriorityQueue []*Item
// func (pq PriorityQueue) Len() int { return len(pq) }
// func (pq PriorityQueue) Less(i, j int) bool {
// // We want Pop to give us the highest, not lowest, priority so we use greater than here.
// if pq[i].frequency == pq[j].frequency {
// return pq[i].count < pq[j].count
// }
// return pq[i].frequency < pq[j].frequency
// }
// func (pq PriorityQueue) Swap(i, j int) {
// pq[i], pq[j] = pq[j], pq[i]
// pq[i].index = i
// pq[j].index = j
// }
// func (pq *PriorityQueue) Push(x interface{}) {
// n := len(*pq)
// item := x.(*Item)
// item.index = n
// *pq = append(*pq, item)
// }
// func (pq *PriorityQueue) Pop() interface{} {
// old := *pq
// n := len(old)
// item := old[n-1]
// old[n-1] = nil // avoid memory leak
// item.index = -1 // for safety
// *pq = old[0 : n-1]
// return item
// }
// // update modifies the priority and value of an Item in the queue.
// func (pq *PriorityQueue) update(item *Item, value int, frequency int, count int) {
// item.value = value
// item.count = count
// item.frequency = frequency
// heap.Fix(pq, item.index)
// }

139
template/LRUCache.go Normal file
View File

@ -0,0 +1,139 @@
package template
// LRUCache define
type LRUCache struct {
head, tail *Node
keys map[int]*Node
capacity int
}
// Node define
type Node struct {
key, val int
prev, next *Node
}
// ConstructorLRU define
func ConstructorLRU(capacity int) LRUCache {
return LRUCache{keys: make(map[int]*Node), capacity: capacity}
}
// Get define
func (lruCache *LRUCache) Get(key int) int {
if node, ok := lruCache.keys[key]; ok {
lruCache.Remove(node)
lruCache.Add(node)
return node.val
}
return -1
}
// Put define
func (lruCache *LRUCache) Put(key int, value int) {
node, ok := lruCache.keys[key]
if ok {
node.val = value
lruCache.Remove(node)
lruCache.Add(node)
return
}
node = &Node{key: key, val: value}
lruCache.keys[key] = node
lruCache.Add(node)
if len(lruCache.keys) > lruCache.capacity {
delete(lruCache.keys, lruCache.tail.key)
lruCache.Remove(lruCache.tail)
}
}
// Add define
func (lruCache *LRUCache) Add(node *Node) {
node.prev = nil
node.next = lruCache.head
if lruCache.head != nil {
lruCache.head.prev = node
}
lruCache.head = node
if lruCache.tail == nil {
lruCache.tail = node
lruCache.tail.next = nil
}
}
// Remove define
func (lruCache *LRUCache) Remove(node *Node) {
if node == lruCache.head {
lruCache.head = node.next
if node.next != nil {
node.next.prev = nil
}
node.next = nil
return
}
if node == lruCache.tail {
lruCache.tail = node.prev
node.prev.next = nil
node.prev = nil
return
}
node.prev.next = node.next
node.next.prev = node.prev
}
/**
* Your LRUCache object will be instantiated and called as such:
* obj := Constructor(capacity);
* param_1 := obj.Get(key);
* obj.Put(key,value);
*/
// 22%
// import "container/list"
// type LRUCache struct {
// Cap int
// Keys map[int]*list.Element
// List *list.List
// }
// type pair struct {
// K, V int
// }
// func Constructor(capacity int) LRUCache {
// return LRUCache{
// Cap: capacity,
// Keys: make(map[int]*list.Element),
// List: list.New(),
// }
// }
// func (c *LRUCache) Get(key int) int {
// if el, ok := c.Keys[key]; ok {
// c.List.MoveToFront(el)
// return el.Value.(pair).V
// }
// return -1
// }
// func (c *LRUCache) Put(key int, value int) {
// if el, ok := c.Keys[key]; ok {
// el.Value = pair{K: key, V: value}
// c.List.MoveToFront(el)
// } else {
// el := c.List.PushFront(pair{K: key, V: value})
// c.Keys[key] = el
// }
// if c.List.Len() > c.Cap {
// el := c.List.Back()
// c.List.Remove(el)
// delete(c.Keys, el.Value.(pair).K)
// }
// }
/**
* Your LRUCache object will be instantiated and called as such:
* obj := Constructor(capacity);
* param_1 := obj.Get(key);
* obj.Put(key,value);
*/