LRU缓存实现-LinkedHashMap

LRU是Least Recently Used 的缩写,翻译过来就是“最近最少使用”.

LRU缓存的思想

  • 固定缓存大小,需要给缓存分配一个固定的大小。
  • 每次读取缓存都会改变缓存的使用时间,将缓存的存在时间重新刷新。
  • 需要在缓存满了后,将最近最久未使用的缓存删除,再添加最新的缓存。

按照LinkedHashMap自身已经实现了顺序存储,默认情况下是按照元素的添加顺序存储,也可以启用按照访问顺序存储,传入的第三个参数accessOrder为true的时候,就按访问顺序对LinkedHashMap排序,为false的时候就按插入顺序,默认是为false的。

当把accessOrder设置为true后,就可以将最近访问的元素置于最前面,这样就可以满足上述的第二点。即最近读取的数据放在最前面,最早读取的数据放在最后面,然后它还有一个判断是否删除最老数据的方法,默认是返回false,即不删除数据,我们使用LinkedHashMap实现LRU缓存的方法就是对LinkedHashMap实现简单的扩展,扩展方式有两种,一种是inheritance,采用inheritance方式实现比较简单,而且实现了Map接口,在多线程环境使用时可以使用 Collections.synchronizedMap()方法实现线程安全操作,一种是delegation,delegation方式实现更加优雅一些,但是由于没有实现Map接口,所以线程同步就需要自己搞定了.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
//delegation

import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.Map;

/**
* LRU(最近最少使用),Java模拟实现,基于linkedHashMap
* @param <K>
* @param <V>
*/
public class LRUCache<K,V> {
private static final float hashTableLoadFactor = 0.75f;
private LinkedHashMap<K,V> map;
private int cacheSize;

/**
* 创建LRUCache,true访问顺序
* @param cacheSize
*/
public LRUCache(int cacheSize){
this.cacheSize = cacheSize;
int hashTableCapacity = (int) Math.ceil(cacheSize / hashTableLoadFactor) + 1;
map = new LinkedHashMap<K, V>(hashTableCapacity, hashTableLoadFactor,
true) {
private static final long serialVersionUID = 1;

@Override
protected boolean removeEldestEntry(Map.Entry<K, V> eldest) {
return size() > LRUCache.this.cacheSize;
}
};
}

/**
* Retrieves an entry from the cache.<br>
* The retrieved entry becomes the MRU (most recently used) entry.
*
* @param key
* the key whose associated value is to be returned.
* @return the value associated to this key, or null if no value with this
* key exists in the cache.
*/
public synchronized V get(K key) {
return map.get(key);
}
/**
* Adds an entry to this cache. The new entry becomes the MRU (most recently
* used) entry. If an entry with the specified key already exists in the
* cache, it is replaced by the new entry. If the cache is full, the LRU
* (least recently used) entry is removed from the cache.
*
* @param key
* the key with which the specified value is to be associated.
* @param value
* a value to be associated with the specified key.
*/
public synchronized void put(K key, V value) {
map.put(key, value);
}

/**
* Clears the cache.
*/
public synchronized void clear() {
map.clear();
}

/**
* Returns the number of used entries in the cache.
*
* @return the number of entries currently in the cache.
*/
public synchronized int usedEntries() {
return map.size();
}

/**
* Returns a <code>Collection</code> that contains a copy of all cache
* entries.
*
* @return a <code>Collection</code> with a copy of the cache content.
*/
public synchronized Collection<Map.Entry<K, V>> getAll() {
return new ArrayList<Map.Entry<K, V>>(map.entrySet());
}

public static void main(String[] args) {
LRUCache<String,String> c = new LRUCache<>(3);
c.put("1", "one"); // 1
c.put("2", "two"); // 2 1
c.put("3", "three"); // 3 2 1
c.put("4", "four"); // 4 3 2

if (c.get("2") == null){
throw new Error();//2 4 3
}
c.put("5", "five"); // 5 2 4
c.put("4", "second four"); // 4 5 2
if (c.usedEntries() != 3){
throw new Error();
}
if (!c.get("4").equals("second four"))
throw new Error();
if (!c.get("5").equals("five"))
throw new Error();
if (!c.get("2").equals("two"))
throw new Error();
for (Map.Entry<String,String> entry : c.getAll()){
System.out.println(entry.getKey() + ":" + entry.getValue());
}
}
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
//inheritance
import java.util.LinkedHashMap;
import java.util.Map;

public LRUCache<K, V> extends LinkedHashMap<K, V> {
private int cacheSize;

public LRUCache(int cacheSize) {
super(16, 0.75, true);
this.cacheSize = cacheSize;
}

protected boolean removeEldestEntry(Map.Entry<K, V> eldest) {
return size() >= cacheSize;
}
}