forked from TheAlgorithms/C-Sharp
-
Notifications
You must be signed in to change notification settings - Fork 0
/
LruCache.cs
111 lines (97 loc) · 3.64 KB
/
LruCache.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
using System;
using System.Collections.Generic;
namespace DataStructures.Cache;
/// <summary>
/// Least Recently Used (LRU) cache implementation.
/// </summary>
/// <typeparam name="TKey">The type of the key (must be not null).</typeparam>
/// <typeparam name="TValue">The type of the value.</typeparam>
/// <remarks>
/// Cache keeps up to <c>capacity</c> items. When new item is added and cache is full,
/// the least recently used item is removed (e.g. it keeps N items that were recently requested
/// using <c>Get()</c> or <c>Put()</c> methods).
///
/// Cache is built on top of two data structures:
/// - <c>Dictionary</c> - allows items to be looked up by key in O(1) time.
/// - <c>LinkedList</c> - allows items to be ordered by last usage time in O(1) time.
///
/// Useful links:
/// https://en.wikipedia.org/wiki/Cache_replacement_policies
/// https://www.educative.io/m/implement-least-recently-used-cache
/// https://leetcode.com/problems/lru-cache/
///
/// In order to make the most recently used (MRU) cache, when the cache is full,
/// just remove the last node from the linked list in the method <c>Put</c>
/// (replace <c>RemoveFirst</c> with <c>RemoveLast</c>).
/// </remarks>
public class LruCache<TKey, TValue> where TKey : notnull
{
private class CachedItem
{
public TKey Key { get; set; } = default!;
public TValue? Value { get; set; }
}
private const int DefaultCapacity = 100;
private readonly int capacity;
// Note that <c>Dictionary</c> stores <c>LinkedListNode</c> as it allows
// removing the node from the <c>LinkedList</c> in O(1) time.
private readonly Dictionary<TKey, LinkedListNode<CachedItem>> cache = new();
private readonly LinkedList<CachedItem> lruList = new();
/// <summary>
/// Initializes a new instance of the <see cref="LruCache{TKey, TValue}"/> class.
/// </summary>
/// <param name="capacity">The max number of items the cache can store.</param>
public LruCache(int capacity = DefaultCapacity)
{
this.capacity = capacity;
}
public bool Contains(TKey key) => cache.ContainsKey(key);
/// <summary>
/// Gets the cached item by key.
/// </summary>
/// <param name="key">The key of cached item.</param>
/// <returns>The cached item or <c>default</c> if item is not found.</returns>
/// <remarks> Time complexity: O(1). </remarks>
public TValue? Get(TKey key)
{
if (!cache.ContainsKey(key))
{
return default;
}
var node = cache[key];
lruList.Remove(node);
lruList.AddLast(node);
return node.Value.Value;
}
/// <summary>
/// Adds or updates the value in the cache.
/// </summary>
/// <param name="key">The key of item to cache.</param>
/// <param name="value">The value to cache.</param>
/// <remarks>
/// Time complexity: O(1).
/// If the value is already cached, it is updated and the item is moved
/// to the end of the LRU list.
/// If the cache is full, the least recently used item is removed.
/// </remarks>
public void Put(TKey key, TValue value)
{
if (cache.ContainsKey(key))
{
var existingNode = cache[key];
existingNode.Value.Value = value;
lruList.Remove(existingNode);
lruList.AddLast(existingNode);
return;
}
if (cache.Count >= capacity)
{
var first = lruList.First!;
lruList.RemoveFirst();
cache.Remove(first.Value.Key);
}
var item = new CachedItem { Key = key, Value = value };
var newNode = lruList.AddLast(item);
cache.Add(key, newNode);
}
}