Skip to content

LRU(Latest Recently Used) Cache

Least Recently Used(LRU) is a common caching strategy. It defines the policy to evict elements from the cache to make room for new elements when the cache is full, meaning it discards the least recently used items first.

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
class LRUCache {
  struct Node {
    int key{0};
    int val{0};
    Node* prev{nullptr};
    Node* next{nullptr};
  };

  Node* head{new Node{}};
  Node* tail{new Node{.prev = head}};
  int cap{0};
  unordered_map<int, Node*> k_n{};

  void to_head(Node* node) {
    if (node->prev) node->prev->next = node->next;
    if (node->next) node->next->prev = node->prev;
    node->prev = head;
    node->next = head->next;
    node->prev->next = node;
    node->next->prev = node;
  }

  void trim_tail() {
    k_n.erase(tail->prev->key);
    auto died = tail->prev;
    tail->prev->prev->next = tail;
    tail->prev = tail->prev->prev;
    delete died;
  }

 public:
  LRUCache(int capacity) : cap{capacity} {
    head->next = tail;
  } 
  ~LRUCache() { delete head; delete tail; }

  int get(int key) {
    if (!k_n.count(key)) return -1;
    to_head(k_n[key]);
    return k_n[key]->val;
  }

  void put(int key, int value) {
    if (!k_n.count(key)) k_n[key] = new Node{key, value};
    to_head(k_n[key]);
    k_n[key]->val = value;
    if (k_n.size() > cap) trim_tail();
  }
};