17
17
"""A simple least-recently-used (LRU) cache."""
19
from __future__ import absolute_import
19
21
from bzrlib import (
25
28
class _LRUNode(object):
26
29
"""This maintains the linked-list which is the lru internals."""
28
__slots__ = ('prev', 'next', 'key', 'value', 'cleanup', 'size')
31
__slots__ = ('prev', 'next_key', 'key', 'value')
30
def __init__(self, key, value, cleanup=None):
33
def __init__(self, key, value):
35
self.next_key = _null_key
35
self.cleanup = cleanup
36
# TODO: We could compute this 'on-the-fly' like we used to, and remove
37
# one pointer from this object, we just need to decide if it
38
# actually costs us much of anything in normal usage
41
39
def __repr__(self):
46
40
if self.prev is None:
43
prev_key = self.prev.key
50
44
return '%s(%r n:%r p:%r)' % (self.__class__.__name__, self.key,
53
def run_cleanup(self):
54
if self.cleanup is not None:
55
self.cleanup(self.key, self.value)
57
# Just make sure to break any refcycles, etc
45
self.next_key, prev_key)
61
48
class LRUCache(object):
62
49
"""A class which manages a cache of entries, removing unused ones."""
64
def __init__(self, max_cache=100, after_cleanup_count=None,
65
after_cleanup_size=symbol_versioning.DEPRECATED_PARAMETER):
66
if symbol_versioning.deprecated_passed(after_cleanup_size):
67
symbol_versioning.warn('LRUCache.__init__(after_cleanup_size) was'
68
' deprecated in 1.11. Use'
69
' after_cleanup_count instead.',
71
after_cleanup_count = after_cleanup_size
51
def __init__(self, max_cache=100, after_cleanup_count=None):
73
53
# The "HEAD" of the lru linked list
74
54
self._most_recently_used = None
75
55
# The "TAIL" of the lru linked list
76
self._last_recently_used = None
56
self._least_recently_used = None
77
57
self._update_max_cache(max_cache, after_cleanup_count)
79
59
def __contains__(self, key):
80
60
return key in self._cache
82
62
def __getitem__(self, key):
83
node = self._cache[key]
84
65
# Inlined from _record_access to decrease the overhead of __getitem__
85
66
# We also have more knowledge about structure if __getitem__ is
86
67
# succeeding, then we know that self._most_recently_used must not be
90
71
# Nothing to do, this node is already at the head of the queue
92
elif node is self._last_recently_used:
93
self._last_recently_used = node.prev
94
73
# Remove this node from the old location
95
74
node_prev = node.prev
97
if node_prev is not None:
98
node_prev.next = node_next
99
if node_next is not None:
75
next_key = node.next_key
76
# benchmarking shows that the lookup of _null_key in globals is faster
77
# than the attribute lookup for (node is self._least_recently_used)
78
if next_key is _null_key:
79
# 'node' is the _least_recently_used, because it doesn't have a
80
# 'next' item. So move the current lru to the previous node.
81
self._least_recently_used = node_prev
83
node_next = cache[next_key]
100
84
node_next.prev = node_prev
101
# Insert this node to the front of the list
85
node_prev.next_key = next_key
86
# Insert this node at the front of the list
87
node.next_key = mru.key
104
89
self._most_recently_used = node
108
93
def __len__(self):
109
94
return len(self._cache)
112
"""Walk the LRU list, only meant to be used in tests."""
113
node = self._most_recently_used
115
if node.prev is not None:
116
raise AssertionError('the _most_recently_used entry is not'
117
' supposed to have a previous entry'
119
while node is not None:
120
if node.next is None:
121
if node is not self._last_recently_used:
122
raise AssertionError('only the last node should have'
123
' no next value: %s' % (node,))
125
if node.next.prev is not node:
126
raise AssertionError('inconsistency found, node.next.prev'
127
' != node: %s' % (node,))
128
if node.prev is None:
129
if node is not self._most_recently_used:
130
raise AssertionError('only the _most_recently_used should'
131
' not have a previous node: %s'
134
if node.prev.next is not node:
135
raise AssertionError('inconsistency found, node.prev.next'
136
' != node: %s' % (node,))
96
@symbol_versioning.deprecated_method(
97
symbol_versioning.deprecated_in((2, 5, 0)))
140
98
def add(self, key, value, cleanup=None):
141
"""Add a new value to the cache.
143
Also, if the entry is ever removed from the cache, call
146
:param key: The key to store it under
147
:param value: The object to store
148
:param cleanup: None or a function taking (key, value) to indicate
149
'value' should be cleaned up.
99
if cleanup is not None:
100
raise ValueError("Per-node cleanup functions no longer supported")
101
return self.__setitem__(key, value)
103
def __setitem__(self, key, value):
104
"""Add a new value to the cache"""
106
raise ValueError('cannot use _null_key as a key')
151
107
if key in self._cache:
152
108
node = self._cache[key]
154
109
node.value = value
155
node.cleanup = cleanup
110
self._record_access(node)
157
node = _LRUNode(key, value, cleanup=cleanup)
112
node = _LRUNode(key, value)
158
113
self._cache[key] = node
159
self._record_access(node)
114
self._record_access(node)
161
116
if len(self._cache) > self._max_cache:
162
117
# Trigger the cleanup
185
140
return self._cache.keys()
188
"""Get the key:value pairs as a dict."""
143
"""Get a new dict with the same key:value pairs as the cache"""
189
144
return dict((k, n.value) for k, n in self._cache.iteritems())
146
items = symbol_versioning.deprecated_method(
147
symbol_versioning.deprecated_in((2, 5, 0)))(as_dict)
191
149
def cleanup(self):
192
150
"""Clear the cache until it shrinks to the requested size.
198
156
while len(self._cache) > self._after_cleanup_count:
199
157
self._remove_lru()
201
def __setitem__(self, key, value):
202
"""Add a value to the cache, there will be no cleanup function."""
203
self.add(key, value, cleanup=None)
205
159
def _record_access(self, node):
206
160
"""Record that key was accessed."""
207
161
# Move 'node' to the front of the queue
208
162
if self._most_recently_used is None:
209
163
self._most_recently_used = node
210
self._last_recently_used = node
164
self._least_recently_used = node
212
166
elif node is self._most_recently_used:
213
167
# Nothing to do, this node is already at the head of the queue
215
elif node is self._last_recently_used:
216
self._last_recently_used = node.prev
217
169
# We've taken care of the tail pointer, remove the node, and insert it
172
if node is self._least_recently_used:
173
self._least_recently_used = node.prev
220
174
if node.prev is not None:
221
node.prev.next = node.next
222
if node.next is not None:
223
node.next.prev = node.prev
175
node.prev.next_key = node.next_key
176
if node.next_key is not _null_key:
177
node_next = self._cache[node.next_key]
178
node_next.prev = node.prev
225
node.next = self._most_recently_used
180
node.next_key = self._most_recently_used.key
181
self._most_recently_used.prev = node
226
182
self._most_recently_used = node
227
node.next.prev = node
230
185
def _remove_node(self, node):
231
if node is self._last_recently_used:
232
self._last_recently_used = node.prev
186
if node is self._least_recently_used:
187
self._least_recently_used = node.prev
233
188
self._cache.pop(node.key)
234
189
# If we have removed all entries, remove the head pointer as well
235
if self._last_recently_used is None:
190
if self._least_recently_used is None:
236
191
self._most_recently_used = None
192
if node.prev is not None:
193
node.prev.next_key = node.next_key
194
if node.next_key is not _null_key:
195
node_next = self._cache[node.next_key]
196
node_next.prev = node.prev
197
# And remove this node's pointers
199
node.next_key = _null_key
239
201
def _remove_lru(self):
240
202
"""Remove one entry from the lru, and handle consequences.
297
259
self._update_max_size(max_size, after_cleanup_size=after_cleanup_size)
298
260
LRUCache.__init__(self, max_cache=max(int(max_size/512), 1))
300
def add(self, key, value, cleanup=None):
301
"""Add a new value to the cache.
303
Also, if the entry is ever removed from the cache, call
306
:param key: The key to store it under
307
:param value: The object to store
308
:param cleanup: None or a function taking (key, value) to indicate
309
'value' should be cleaned up.
262
def __setitem__(self, key, value):
263
"""Add a new value to the cache"""
265
raise ValueError('cannot use _null_key as a key')
311
266
node = self._cache.get(key, None)
312
267
value_len = self._compute_size(value)
313
268
if value_len >= self._after_cleanup_size:
320
275
if node is not None:
321
276
# We won't be replacing the old node, so just remove it
322
277
self._remove_node(node)
323
if cleanup is not None:
327
node = _LRUNode(key, value, cleanup=cleanup)
280
node = _LRUNode(key, value)
328
281
self._cache[key] = node
330
self._value_size -= node.size
331
node.size = value_len
283
self._value_size -= self._compute_size(node.value)
332
284
self._value_size += value_len
333
285
self._record_access(node)