146. LRU Cache - cocoder39/coco39_LC GitHub Wiki

146. LRU Cache

Notes 2022

Option 1

OrderedDict has 2 APIs: move_to-end(last=True) and popitem(last=True). last parameter should be set differently so we can move to one end and pop from the other end

class LRUCache:

    def __init__(self, capacity: int):
        self.cache = collections.OrderedDict()
        self.capacity = capacity

    def get(self, key: int) -> int:
        if key not in self.cache:
            return -1
        
        self.cache.move_to_end(key)
        return self.cache[key]
        

    def put(self, key: int, value: int) -> None:
        self.cache[key] = value
        self.cache.move_to_end(key)
        if len(self.cache) > self.capacity:
            self.cache.popitem(last=False)

Option 2

class Node():
    def __init__(self, key=0, value=0):
        self.key = key
        self.value = value
        self.prev = None
        self.next = None

class LRUCache:
    def __add_to_head(self, node: Node) -> None:
        node.prev = self.head
        node.next = self.head.next
        node.next.prev = node
        self.head.next = node
    
    def __remove(self, node: Node) -> None:
        node.prev.next = node.next
        node.next.prev = node.prev
    
    def __pop_tail(self) -> Node:
        node = self.tail.prev
        self.__remove(node)
        return node
    
    def __move_to_head(self, node: Node) -> None:
        self.__remove(node)
        self.__add_to_head(node)

    def __init__(self, capacity: int):
        self.head = Node()
        self.tail = Node()
        self.head.next = self.tail
        self.tail.prev = self.head
        
        self.capacity = capacity
        self.size = 0
        self.hashmap = {}

    def get(self, key: int) -> int:
        if key not in self.hashmap:
            return -1
        
        node = self.hashmap[key]
        self.__move_to_head(node)
        return node.value

    def put(self, key: int, value: int) -> None:
        if key not in self.hashmap:
            if self.size == self.capacity:
                tail = self.__pop_tail()
                del self.hashmap[tail.key]
                self.size -= 1
            node = Node(key, value)
            self.hashmap[key] = node
            self.__add_to_head(node)
            self.size += 1
            
        else:
            node = self.hashmap[key]
            node.value = value
            self.__move_to_head(node)

NOTES 2021

from typing import TypeVar, Generic

KT = TypeVar('KT')
VT = TypeVar('VT')


class Node(Generic[KT, VT]):
    
    def __init__(self, key: KT, val: VT):
        self.key = key
        self.val = val
        self.prev = None
        self.next = None

class LRUCache(Generic[KT, VT]):

    def __init__(self, capacity: int):
        self.head = Node(0, 0)
        self.head.prev = self.head
        self.head.next = self.head
        self.cache = {}
        self.capacity = capacity
        
    def get(self, key: KT) -> VT:
        if key not in self.cache:
            return -1
        
        node = self.cache[key]
        self.remove(node)
        self.add_to_head(node)
        return node.val

    def put(self, key: KT, value: VT) -> None:
        if key not in self.cache:
            node = Node(key, value)
            self.add_to_head(node)
            self.cache[key] = node
            if len(self.cache) > self.capacity:
                del_node = self.head.prev
                del self.cache[del_node.key]
                self.remove(del_node)
        else:
            node = self.cache[key]
            node.val = value
            self.remove(node)
            self.add_to_head(node)
            self.cache[key] = node
            
    def remove(self, node: Node) -> None:
        node.next.prev = node.prev
        node.prev.next = node.next
    
    def add_to_head(self, node: Node) -> None:
        node.next = self.head.next
        node.prev = self.head
        self.head.next = node
        node.next.prev = node        


obj = LRUCache[int, int](2)
obj.put(1, 1)
obj.put(2, 2)
print(obj.get(1))
obj.put(3, 3)
print(obj.get(2))
obj.put(4, 4)
print(obj.get(1))
print(obj.get(3))
print(obj.get(4))

========================================

  1. when accessing a new element. we need put the new element at the most recently position (front of a sequence) so that we can access it quickly. (linked list-O(1), deque-O(1), heap-O(log n)). Also, if hit an element within the sequence, we need update the position of that element (delete and insert to the head). Finally we choose doubly linked list
  2. when the cache is full, knick out the last sequence, list has pop_back() api.
  3. if hit, it takes O(n) time to check which element is hit. we use hash to save time with using memory. then O(1) time
class LRUCache{
public:
    LRUCache(int capacity) {
        sz = capacity;
    }
    
    int get(int key) {
        if (mp.find(key) == mp.end())   return -1;
        auto it = mp[key];
        l.push_front({it->first, it->second});
        l.erase(it);
        mp[key] = l.begin();
        return l.begin()->second;
    }
    
    void set(int key, int value) {
        if (mp.find(key) != mp.end()) {
            l.push_front({key, value});
            l.erase(mp[key]);
            mp[key] = l.begin();
        } else {
            l.push_front({key, value});
            mp[key] = l.begin();
            if (mp.size() > sz) {
                mp.erase(l.rbegin()->first);
                l.pop_back();
            }
        }
    }
private:
    list<pair<int, int>> l;
    unordered_map<int, list<pair<int, int>>::iterator> mp;
    int sz;
};
⚠️ **GitHub.com Fallback** ⚠️