Fix potential leak in MimeTypeUtils LRUCache

Prior to this commit, the `MimeTypeUtils` LRUCache would maintain a
queue to track least recently used cached values. In some cases,
concurrent access could create more entries in that unbounded queue than
expected and spend a significant amount of time removing entries in that
queue (i.e. iterating over a long list of elements).

This commit ensures that recently used entries are only added to the
queue if they've been removed by the current thread, in case of
concurrent access.

Fixes gh-24886
This commit is contained in:
Brian Clozel 2020-04-09 15:38:01 +02:00
parent de0d06fab5
commit 39536acf9f
1 changed files with 9 additions and 7 deletions

View File

@ -29,7 +29,7 @@ import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ConcurrentLinkedDeque;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.function.Function;
@ -420,7 +420,7 @@ public abstract class MimeTypeUtils {
private final int maxSize;
private final ConcurrentLinkedQueue<K> queue = new ConcurrentLinkedQueue<>();
private final ConcurrentLinkedDeque<K> queue = new ConcurrentLinkedDeque<>();
private final ConcurrentHashMap<K, V> cache = new ConcurrentHashMap<>();
@ -446,8 +446,9 @@ public abstract class MimeTypeUtils {
}
this.lock.readLock().lock();
try {
this.queue.remove(key);
this.queue.add(key);
if (this.queue.removeLastOccurrence(key)) {
this.queue.offer(key);
}
return cached;
}
finally {
@ -459,8 +460,9 @@ public abstract class MimeTypeUtils {
// Retrying in case of concurrent reads on the same key
cached = this.cache.get(key);
if (cached != null) {
this.queue.remove(key);
this.queue.add(key);
if (this.queue.removeLastOccurrence(key)) {
this.queue.offer(key);
}
return cached;
}
// Generate value first, to prevent size inconsistency
@ -473,7 +475,7 @@ public abstract class MimeTypeUtils {
cacheSize--;
}
}
this.queue.add(key);
this.queue.offer(key);
this.cache.put(key, value);
this.size = cacheSize + 1;
return value;