comments | difficulty | edit_url | tags | ||||
---|---|---|---|---|---|---|---|
true |
Medium |
|
Design a data structure that follows the constraints of a Least Recently Used (LRU) cache.
Implement the LRUCache
class:
LRUCache(int capacity)
Initialize the LRU cache with positive sizecapacity
.int get(int key)
Return the value of thekey
if the key exists, otherwise return-1
.void put(int key, int value)
Update the value of thekey
if thekey
exists. Otherwise, add thekey-value
pair to the cache. If the number of keys exceeds thecapacity
from this operation, evict the least recently used key.
The functions get
and put
must each run in O(1)
average time complexity.
Example 1:
Input ["LRUCache", "put", "put", "get", "put", "get", "put", "get", "get", "get"] [[2], [1, 1], [2, 2], [1], [3, 3], [2], [4, 4], [1], [3], [4]] Output [null, null, null, 1, null, -1, null, -1, 3, 4] Explanation LRUCache lRUCache = new LRUCache(2); lRUCache.put(1, 1); // cache is {1=1} lRUCache.put(2, 2); // cache is {1=1, 2=2} lRUCache.get(1); // return 1 lRUCache.put(3, 3); // LRU key was 2, evicts key 2, cache is {1=1, 3=3} lRUCache.get(2); // returns -1 (not found) lRUCache.put(4, 4); // LRU key was 1, evicts key 1, cache is {4=4, 3=3} lRUCache.get(1); // return -1 (not found) lRUCache.get(3); // return 3 lRUCache.get(4); // return 4
Constraints:
1 <= capacity <= 3000
0 <= key <= 104
0 <= value <= 105
- At most
2 * 105
calls will be made toget
andput
.
We can implement an LRU (Least Recently Used) cache using a "hash table" and a "doubly linked list".
- Hash Table: Used to store the key and its corresponding node location.
- Doubly Linked List: Used to store node data, sorted by access time.
When accessing a node, if the node exists, we delete it from its original position and reinsert it at the head of the list. This ensures that the node stored at the tail of the list is the least recently used node. When the number of nodes exceeds the maximum cache space, we eliminate the node at the tail of the list.
When inserting a node, if the node exists, we delete it from its original position and reinsert it at the head of the list. If it does not exist, we first check if the cache is full. If it is full, we delete the node at the tail of the list and insert the new node at the head of the list.
The time complexity is
class Node:
def __init__(self, key: int = 0, val: int = 0):
self.key = key
self.val = val
self.prev = None
self.next = None
class LRUCache:
def __init__(self, capacity: int):
self.size = 0
self.capacity = capacity
self.cache = {}
self.head = Node()
self.tail = Node()
self.head.next = self.tail
self.tail.prev = self.head
def get(self, key: int) -> int:
if key not in self.cache:
return -1
node = self.cache[key]
self.remove_node(node)
self.add_to_head(node)
return node.val
def put(self, key: int, value: int) -> None:
if key in self.cache:
node = self.cache[key]
self.remove_node(node)
node.val = value
self.add_to_head(node)
else:
node = Node(key, value)
self.cache[key] = node
self.add_to_head(node)
self.size += 1
if self.size > self.capacity:
node = self.tail.prev
self.cache.pop(node.key)
self.remove_node(node)
self.size -= 1
def remove_node(self, node):
node.prev.next = node.next
node.next.prev = node.prev
def add_to_head(self, node):
node.next = self.head.next
node.prev = self.head
self.head.next = node
node.next.prev = node
# Your LRUCache object will be instantiated and called as such:
# obj = LRUCache(capacity)
# param_1 = obj.get(key)
# obj.put(key,value)
class Node {
int key, val;
Node prev, next;
Node() {
}
Node(int key, int val) {
this.key = key;
this.val = val;
}
}
class LRUCache {
private int size;
private int capacity;
private Node head = new Node();
private Node tail = new Node();
private Map<Integer, Node> cache = new HashMap<>();
public LRUCache(int capacity) {
this.capacity = capacity;
head.next = tail;
tail.prev = head;
}
public int get(int key) {
if (!cache.containsKey(key)) {
return -1;
}
Node node = cache.get(key);
removeNode(node);
addToHead(node);
return node.val;
}
public void put(int key, int value) {
if (cache.containsKey(key)) {
Node node = cache.get(key);
removeNode(node);
node.val = value;
addToHead(node);
} else {
Node node = new Node(key, value);
cache.put(key, node);
addToHead(node);
if (++size > capacity) {
node = tail.prev;
cache.remove(node.key);
removeNode(node);
--size;
}
}
}
private void removeNode(Node node) {
node.prev.next = node.next;
node.next.prev = node.prev;
}
private void addToHead(Node node) {
node.next = head.next;
node.prev = head;
head.next = node;
node.next.prev = node;
}
}
/**
* Your LRUCache object will be instantiated and called as such:
* LRUCache obj = new LRUCache(capacity);
* int param_1 = obj.get(key);
* obj.put(key,value);
*/
class LRUCache {
private:
struct Node {
int key, val;
Node* prev;
Node* next;
Node(int key, int val)
: key(key)
, val(val)
, prev(nullptr)
, next(nullptr) {}
};
int size;
int capacity;
Node* head;
Node* tail;
unordered_map<int, Node*> cache;
void removeNode(Node* node) {
node->prev->next = node->next;
node->next->prev = node->prev;
}
void addToHead(Node* node) {
node->next = head->next;
node->prev = head;
head->next->prev = node;
head->next = node;
}
public:
LRUCache(int capacity)
: size(0)
, capacity(capacity) {
head = new Node(0, 0);
tail = new Node(0, 0);
head->next = tail;
tail->prev = head;
}
int get(int key) {
if (!cache.contains(key)) {
return -1;
}
Node* node = cache[key];
removeNode(node);
addToHead(node);
return node->val;
}
void put(int key, int value) {
if (cache.contains(key)) {
Node* node = cache[key];
removeNode(node);
node->val = value;
addToHead(node);
} else {
Node* node = new Node(key, value);
cache[key] = node;
addToHead(node);
if (++size > capacity) {
node = tail->prev;
cache.erase(node->key);
removeNode(node);
--size;
}
}
}
};
/**
* Your LRUCache object will be instantiated and called as such:
* LRUCache* obj = new LRUCache(capacity);
* int param_1 = obj->get(key);
* obj->put(key,value);
*/
type Node struct {
key, val int
prev, next *Node
}
type LRUCache struct {
size, capacity int
head, tail *Node
cache map[int]*Node
}
func Constructor(capacity int) LRUCache {
head := &Node{}
tail := &Node{}
head.next = tail
tail.prev = head
return LRUCache{
capacity: capacity,
head: head,
tail: tail,
cache: make(map[int]*Node),
}
}
func (this *LRUCache) Get(key int) int {
if node, exists := this.cache[key]; exists {
this.removeNode(node)
this.addToHead(node)
return node.val
}
return -1
}
func (this *LRUCache) Put(key int, value int) {
if node, exists := this.cache[key]; exists {
this.removeNode(node)
node.val = value
this.addToHead(node)
} else {
node := &Node{key: key, val: value}
this.cache[key] = node
this.addToHead(node)
if this.size++; this.size > this.capacity {
node = this.tail.prev
delete(this.cache, node.key)
this.removeNode(node)
this.size--
}
}
}
func (this *LRUCache) removeNode(node *Node) {
node.prev.next = node.next
node.next.prev = node.prev
}
func (this *LRUCache) addToHead(node *Node) {
node.next = this.head.next
node.prev = this.head
this.head.next = node
node.next.prev = node
}
/**
* Your LRUCache object will be instantiated and called as such:
* obj := Constructor(capacity);
* param_1 := obj.Get(key);
* obj.Put(key,value);
*/
/**
* Your LRUCache object will be instantiated and called as such:
* obj := Constructor(capacity);
* param_1 := obj.Get(key);
* obj.Put(key,value);
*/
class Node {
key: number;
val: number;
prev: Node | null;
next: Node | null;
constructor(key: number, val: number) {
this.key = key;
this.val = val;
this.prev = null;
this.next = null;
}
}
class LRUCache {
private size: number;
private capacity: number;
private head: Node;
private tail: Node;
private cache: Map<number, Node>;
constructor(capacity: number) {
this.size = 0;
this.capacity = capacity;
this.head = new Node(0, 0);
this.tail = new Node(0, 0);
this.head.next = this.tail;
this.tail.prev = this.head;
this.cache = new Map();
}
get(key: number): number {
if (!this.cache.has(key)) {
return -1;
}
const node = this.cache.get(key)!;
this.removeNode(node);
this.addToHead(node);
return node.val;
}
put(key: number, value: number): void {
if (this.cache.has(key)) {
const node = this.cache.get(key)!;
this.removeNode(node);
node.val = value;
this.addToHead(node);
} else {
const node = new Node(key, value);
this.cache.set(key, node);
this.addToHead(node);
if (++this.size > this.capacity) {
const nodeToRemove = this.tail.prev!;
this.cache.delete(nodeToRemove.key);
this.removeNode(nodeToRemove);
--this.size;
}
}
}
private removeNode(node: Node): void {
if (!node) return;
node.prev!.next = node.next;
node.next!.prev = node.prev;
}
private addToHead(node: Node): void {
node.next = this.head.next;
node.prev = this.head;
this.head.next!.prev = node;
this.head.next = node;
}
}
/**
* Your LRUCache object will be instantiated and called as such:
* var obj = new LRUCache(capacity)
* var param_1 = obj.get(key)
* obj.put(key,value)
*/
use std::cell::RefCell;
use std::collections::HashMap;
use std::rc::Rc;
struct Node {
key: i32,
value: i32,
prev: Option<Rc<RefCell<Node>>>,
next: Option<Rc<RefCell<Node>>>,
}
impl Node {
#[inline]
fn new(key: i32, value: i32) -> Self {
Self {
key,
value,
prev: None,
next: None,
}
}
}
struct LRUCache {
capacity: usize,
cache: HashMap<i32, Rc<RefCell<Node>>>,
head: Option<Rc<RefCell<Node>>>,
tail: Option<Rc<RefCell<Node>>>,
}
/**
* `&self` means the method takes an immutable reference.
* If you need a mutable reference, change it to `&mut self` instead.
*/
impl LRUCache {
fn new(capacity: i32) -> Self {
Self {
capacity: capacity as usize,
cache: HashMap::new(),
head: None,
tail: None,
}
}
fn get(&mut self, key: i32) -> i32 {
match self.cache.get(&key) {
Some(node) => {
let node = Rc::clone(node);
self.remove(&node);
self.push_front(&node);
let value = node.borrow().value;
value
}
None => -1,
}
}
fn put(&mut self, key: i32, value: i32) {
match self.cache.get(&key) {
Some(node) => {
let node = Rc::clone(node);
node.borrow_mut().value = value;
self.remove(&node);
self.push_front(&node);
}
None => {
let node = Rc::new(RefCell::new(Node::new(key, value)));
self.cache.insert(key, Rc::clone(&node));
self.push_front(&node);
if self.cache.len() > self.capacity {
let back_key = self.pop_back().unwrap().borrow().key;
self.cache.remove(&back_key);
}
}
};
}
fn push_front(&mut self, node: &Rc<RefCell<Node>>) {
match self.head.take() {
Some(head) => {
head.borrow_mut().prev = Some(Rc::clone(node));
node.borrow_mut().prev = None;
node.borrow_mut().next = Some(head);
self.head = Some(Rc::clone(node));
}
None => {
self.head = Some(Rc::clone(node));
self.tail = Some(Rc::clone(node));
}
};
}
fn remove(&mut self, node: &Rc<RefCell<Node>>) {
match (node.borrow().prev.as_ref(), node.borrow().next.as_ref()) {
(None, None) => {
self.head = None;
self.tail = None;
}
(None, Some(next)) => {
self.head = Some(Rc::clone(next));
next.borrow_mut().prev = None;
}
(Some(prev), None) => {
self.tail = Some(Rc::clone(prev));
prev.borrow_mut().next = None;
}
(Some(prev), Some(next)) => {
next.borrow_mut().prev = Some(Rc::clone(prev));
prev.borrow_mut().next = Some(Rc::clone(next));
}
};
}
fn pop_back(&mut self) -> Option<Rc<RefCell<Node>>> {
match self.tail.take() {
Some(tail) => {
self.remove(&tail);
Some(tail)
}
None => None,
}
}
}
/**
* @param {number} capacity
*/
var LRUCache = function (capacity) {
this.size = 0;
this.capacity = capacity;
this.cache = new Map();
this.head = new Node(0, 0);
this.tail = new Node(0, 0);
this.head.next = this.tail;
this.tail.prev = this.head;
};
/**
* @param {number} key
* @return {number}
*/
LRUCache.prototype.get = function (key) {
if (!this.cache.has(key)) {
return -1;
}
const node = this.cache.get(key);
this.removeNode(node);
this.addToHead(node);
return node.val;
};
/**
* @param {number} key
* @param {number} value
* @return {void}
*/
LRUCache.prototype.put = function (key, value) {
if (this.cache.has(key)) {
const node = this.cache.get(key);
this.removeNode(node);
node.val = value;
this.addToHead(node);
} else {
const node = new Node(key, value);
this.cache.set(key, node);
this.addToHead(node);
if (++this.size > this.capacity) {
const nodeToRemove = this.tail.prev;
this.cache.delete(nodeToRemove.key);
this.removeNode(nodeToRemove);
--this.size;
}
}
};
LRUCache.prototype.removeNode = function (node) {
if (!node) return;
node.prev.next = node.next;
node.next.prev = node.prev;
};
LRUCache.prototype.addToHead = function (node) {
node.next = this.head.next;
node.prev = this.head;
this.head.next.prev = node;
this.head.next = node;
};
/**
* @constructor
* @param {number} key
* @param {number} val
*/
function Node(key, val) {
this.key = key;
this.val = val;
this.prev = null;
this.next = null;
}
/**
* Your LRUCache object will be instantiated and called as such:
* var obj = new LRUCache(capacity)
* var param_1 = obj.get(key)
* obj.put(key,value)
*/
public class LRUCache {
private int size;
private int capacity;
private Dictionary<int, Node> cache = new Dictionary<int, Node>();
private Node head = new Node();
private Node tail = new Node();
public LRUCache(int capacity) {
this.capacity = capacity;
head.Next = tail;
tail.Prev = head;
}
public int Get(int key) {
if (!cache.ContainsKey(key)) {
return -1;
}
Node node = cache[key];
RemoveNode(node);
AddToHead(node);
return node.Val;
}
public void Put(int key, int value) {
if (cache.ContainsKey(key)) {
Node node = cache[key];
RemoveNode(node);
node.Val = value;
AddToHead(node);
} else {
Node node = new Node(key, value);
cache[key] = node;
AddToHead(node);
if (++size > capacity) {
node = tail.Prev;
cache.Remove(node.Key);
RemoveNode(node);
--size;
}
}
}
private void RemoveNode(Node node) {
node.Prev.Next = node.Next;
node.Next.Prev = node.Prev;
}
private void AddToHead(Node node) {
node.Next = head.Next;
node.Prev = head;
head.Next = node;
node.Next.Prev = node;
}
// Node class to represent each entry in the cache.
private class Node {
public int Key;
public int Val;
public Node Prev;
public Node Next;
public Node() {}
public Node(int key, int val) {
Key = key;
Val = val;
}
}
}
/**
* Your LRUCache object will be instantiated and called as such:
* LRUCache obj = new LRUCache(capacity);
* int param_1 = obj.Get(key);
* obj.Put(key,value);
*/