146. LRU Cache

Design and implement a data structure for Least Recently Used (LRU) cache. It should support the following operations: get and put.

get(key) - Get the value (will always be positive) of the key if the key exists in the cache, otherwise return -1.

put(key, value) - Set or insert the value if the key is not already present. When the cache reached its capacity, it should invalidate the least recently used item before inserting a new item.

Follow-Up:

Could you do both operations in time complexity?

Example:

LRUCache cache = new LRUCache( 2 /* capacity */ );

cache.put(1, 1);
cache.put(2, 2);
cache.get(1);       // returns 1
cache.put(3, 3);    // evicts key 2
cache.get(2);       // returns -1 (not found)
cache.put(4, 4);    // evicts key 1
cache.get(1);       // returns -1 (not found)
cache.get(3);       // returns 3
cache.get(4);       // returns 4

Solution: Doubly Linked List

class LinkedNode:
    def __init__(self, key, val, pre=None, nxt=None):
        self.key = key
        self.val = val
        self.pre = pre
        self.nxt = nxt
        if pre:
            pre.nxt = self
        if nxt:
            nxt.pre = self

    def __str__(self):
        return '{} {}'.format(self.key, self.nxt)

    def append(self, node):
        self.nxt = node
        node.pre = self

    def remove(self):
        if self.pre:
            self.pre.nxt = self.nxt
        if self.nxt:
            self.nxt.pre = self.pre
        self.pre = None
        self.nxt = None
class LRUCache(object):
    def __init__(self, capacity):
        """
        :type capacity: int
        """
        self.capacity = capacity
        self.head = LinkedNode(0, 0)
        self.tail = self.head
        self.cache = dict()

    def get(self, key):
        """
        :rtype: int
        """
        if key in self.cache:
            node = self.cache[key]
            self.update(node)
            return node.val
        return -1

    def put(self, key, value):
        """
        :type key: int
        :type value: int
        :rtype: nothing
        """
        if key in self.cache:
            node = self.cache[key]
            node.val = value
            self.update(node)
        elif self.capacity > 0:
            if self.is_full():
                head = self.head.nxt
                head.remove()
                del self.cache[head.key]
            node = LinkedNode(key, value, self.tail)
            self.tail = node
            self.cache[key] = node

    def is_full(self):
        return len(self.cache) == self.capacity

    def update(self, node):
        if node is not self.tail:
            node.remove()
            self.tail.append(node)
            self.tail = node

Lessons:

  • Use dummy head, so no need to update head reference when change the real head.
  • When cache is full, remove the real head instead of dummy head.
  • Update tail reference after appending a new tail.
  • Put node-related operations in node class.

Solution: OrderedDict

import collections


class LRUCache(object):
    def __init__(self, capacity):
        """
        :type capacity: int
        """
        self.capacity = capacity
        self.cache = collections.OrderedDict()

    def get(self, key):
        """
        :rtype: int
        """
        if key in self.cache:
            val = self.cache[key]
            self.update(key, val)
            return val
        return -1

    def put(self, key, value):
        """
        :type key: int
        :type value: int
        :rtype: nothing
        """
        if key in self.cache:
            self.update(key, value)
        elif self.capacity > 0:
            if self.is_full():
                self.cache.popitem(last=False)
            self.cache[key] = value

    def is_full(self):
        return len(self.cache) == self.capacity

    def update(self, key, val):
        del self.cache[key]
        self.cache[key] = val

Lessons:

  • OrderedDict.popitem(last=False), use last=False to pop left (first).

results matching ""

    No results matching ""