3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
1 |
# Copyright (C) 2006, 2008 Canonical Ltd
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
2 |
#
|
3 |
# This program is free software; you can redistribute it and/or modify
|
|
4 |
# it under the terms of the GNU General Public License as published by
|
|
5 |
# the Free Software Foundation; either version 2 of the License, or
|
|
6 |
# (at your option) any later version.
|
|
7 |
#
|
|
8 |
# This program is distributed in the hope that it will be useful,
|
|
9 |
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
10 |
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
11 |
# GNU General Public License for more details.
|
|
12 |
#
|
|
13 |
# You should have received a copy of the GNU General Public License
|
|
14 |
# along with this program; if not, write to the Free Software
|
|
4183.7.1
by Sabin Iacob
update FSF mailing address |
15 |
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
16 |
|
17 |
"""A simple least-recently-used (LRU) cache."""
|
|
18 |
||
19 |
from collections import deque |
|
3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
20 |
|
21 |
from bzrlib import symbol_versioning |
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
22 |
|
23 |
||
24 |
class LRUCache(object): |
|
25 |
"""A class which manages a cache of entries, removing unused ones."""
|
|
26 |
||
3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
27 |
def __init__(self, max_cache=100, after_cleanup_count=None, |
28 |
after_cleanup_size=symbol_versioning.DEPRECATED_PARAMETER): |
|
29 |
if symbol_versioning.deprecated_passed(after_cleanup_size): |
|
30 |
symbol_versioning.warn('LRUCache.__init__(after_cleanup_size) was' |
|
31 |
' deprecated in 1.11. Use'
|
|
32 |
' after_cleanup_count instead.', |
|
33 |
DeprecationWarning) |
|
34 |
after_cleanup_count = after_cleanup_size |
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
35 |
self._cache = {} |
36 |
self._cleanup = {} |
|
37 |
self._queue = deque() # Track when things are accessed |
|
38 |
self._refcount = {} # number of entries in self._queue for each key |
|
3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
39 |
self._update_max_cache(max_cache, after_cleanup_count) |
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
40 |
|
41 |
def __contains__(self, key): |
|
42 |
return key in self._cache |
|
43 |
||
44 |
def __getitem__(self, key): |
|
45 |
val = self._cache[key] |
|
46 |
self._record_access(key) |
|
47 |
return val |
|
48 |
||
49 |
def __len__(self): |
|
50 |
return len(self._cache) |
|
51 |
||
52 |
def add(self, key, value, cleanup=None): |
|
53 |
"""Add a new value to the cache.
|
|
54 |
||
55 |
Also, if the entry is ever removed from the queue, call cleanup.
|
|
56 |
Passing it the key and value being removed.
|
|
57 |
||
58 |
:param key: The key to store it under
|
|
59 |
:param value: The object to store
|
|
60 |
:param cleanup: None or a function taking (key, value) to indicate
|
|
61 |
'value' sohuld be cleaned up.
|
|
62 |
"""
|
|
63 |
if key in self._cache: |
|
64 |
self._remove(key) |
|
65 |
self._cache[key] = value |
|
3882.3.2
by John Arbash Meinel
Only cache cleanup functions if they aren't None. |
66 |
if cleanup is not None: |
67 |
self._cleanup[key] = cleanup |
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
68 |
self._record_access(key) |
69 |
||
70 |
if len(self._cache) > self._max_cache: |
|
71 |
# Trigger the cleanup
|
|
72 |
self.cleanup() |
|
73 |
||
2998.2.1
by John Arbash Meinel
Implement LRUCache.get() which acts like dict.get() |
74 |
def get(self, key, default=None): |
75 |
if key in self._cache: |
|
76 |
return self[key] |
|
77 |
return default |
|
78 |
||
3763.8.10
by John Arbash Meinel
Add a .keys() member to LRUCache and LRUSizeCache. |
79 |
def keys(self): |
80 |
"""Get the list of keys currently cached.
|
|
81 |
||
82 |
Note that values returned here may not be available by the time you
|
|
83 |
request them later. This is simply meant as a peak into the current
|
|
84 |
state.
|
|
85 |
||
86 |
:return: An unordered list of keys that are currently cached.
|
|
87 |
"""
|
|
88 |
return self._cache.keys() |
|
89 |
||
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
90 |
def cleanup(self): |
91 |
"""Clear the cache until it shrinks to the requested size.
|
|
92 |
||
93 |
This does not completely wipe the cache, just makes sure it is under
|
|
3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
94 |
the after_cleanup_count.
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
95 |
"""
|
96 |
# Make sure the cache is shrunk to the correct size
|
|
3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
97 |
while len(self._cache) > self._after_cleanup_count: |
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
98 |
self._remove_lru() |
3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
99 |
# No need to compact the queue at this point, because the code that
|
100 |
# calls this would have already triggered it based on queue length
|
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
101 |
|
102 |
def __setitem__(self, key, value): |
|
103 |
"""Add a value to the cache, there will be no cleanup function."""
|
|
104 |
self.add(key, value, cleanup=None) |
|
105 |
||
106 |
def _record_access(self, key): |
|
107 |
"""Record that key was accessed."""
|
|
108 |
self._queue.append(key) |
|
109 |
# Can't use setdefault because you can't += 1 the result
|
|
110 |
self._refcount[key] = self._refcount.get(key, 0) + 1 |
|
111 |
||
112 |
# If our access queue is too large, clean it up too
|
|
113 |
if len(self._queue) > self._compact_queue_length: |
|
114 |
self._compact_queue() |
|
115 |
||
116 |
def _compact_queue(self): |
|
117 |
"""Compact the queue, leaving things in sorted last appended order."""
|
|
118 |
new_queue = deque() |
|
119 |
for item in self._queue: |
|
120 |
if self._refcount[item] == 1: |
|
121 |
new_queue.append(item) |
|
122 |
else: |
|
123 |
self._refcount[item] -= 1 |
|
124 |
self._queue = new_queue |
|
125 |
# All entries should be of the same size. There should be one entry in
|
|
126 |
# queue for each entry in cache, and all refcounts should == 1
|
|
3376.2.4
by Martin Pool
Remove every assert statement from bzrlib! |
127 |
if not (len(self._queue) == len(self._cache) == |
128 |
len(self._refcount) == sum(self._refcount.itervalues())): |
|
129 |
raise AssertionError() |
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
130 |
|
131 |
def _remove(self, key): |
|
132 |
"""Remove an entry, making sure to maintain the invariants."""
|
|
3882.3.2
by John Arbash Meinel
Only cache cleanup functions if they aren't None. |
133 |
cleanup = self._cleanup.pop(key, None) |
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
134 |
val = self._cache.pop(key) |
135 |
if cleanup is not None: |
|
136 |
cleanup(key, val) |
|
137 |
return val |
|
138 |
||
139 |
def _remove_lru(self): |
|
140 |
"""Remove one entry from the lru, and handle consequences.
|
|
141 |
||
142 |
If there are no more references to the lru, then this entry should be
|
|
143 |
removed from the cache.
|
|
144 |
"""
|
|
145 |
key = self._queue.popleft() |
|
146 |
self._refcount[key] -= 1 |
|
147 |
if not self._refcount[key]: |
|
148 |
del self._refcount[key] |
|
149 |
self._remove(key) |
|
150 |
||
151 |
def clear(self): |
|
152 |
"""Clear out all of the cache."""
|
|
153 |
# Clean up in LRU order
|
|
154 |
while self._cache: |
|
155 |
self._remove_lru() |
|
156 |
||
3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
157 |
def resize(self, max_cache, after_cleanup_count=None): |
158 |
"""Change the number of entries that will be cached."""
|
|
159 |
self._update_max_cache(max_cache, |
|
160 |
after_cleanup_count=after_cleanup_count) |
|
161 |
||
162 |
def _update_max_cache(self, max_cache, after_cleanup_count=None): |
|
163 |
self._max_cache = max_cache |
|
164 |
if after_cleanup_count is None: |
|
165 |
self._after_cleanup_count = self._max_cache * 8 / 10 |
|
166 |
else: |
|
167 |
self._after_cleanup_count = min(after_cleanup_count, self._max_cache) |
|
168 |
||
169 |
self._compact_queue_length = 4*self._max_cache |
|
170 |
if len(self._queue) > self._compact_queue_length: |
|
171 |
self._compact_queue() |
|
172 |
self.cleanup() |
|
173 |
||
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
174 |
|
175 |
class LRUSizeCache(LRUCache): |
|
176 |
"""An LRUCache that removes things based on the size of the values.
|
|
177 |
||
178 |
This differs in that it doesn't care how many actual items there are,
|
|
179 |
it just restricts the cache to be cleaned up after so much data is stored.
|
|
180 |
||
181 |
The values that are added must support len(value).
|
|
182 |
"""
|
|
183 |
||
184 |
def __init__(self, max_size=1024*1024, after_cleanup_size=None, |
|
185 |
compute_size=None): |
|
186 |
"""Create a new LRUSizeCache.
|
|
187 |
||
188 |
:param max_size: The max number of bytes to store before we start
|
|
189 |
clearing out entries.
|
|
190 |
:param after_cleanup_size: After cleaning up, shrink everything to this
|
|
191 |
size.
|
|
192 |
:param compute_size: A function to compute the size of the values. We
|
|
193 |
use a function here, so that you can pass 'len' if you are just
|
|
194 |
using simple strings, or a more complex function if you are using
|
|
195 |
something like a list of strings, or even a custom object.
|
|
196 |
The function should take the form "compute_size(value) => integer".
|
|
197 |
If not supplied, it defaults to 'len()'
|
|
198 |
"""
|
|
199 |
self._value_size = 0 |
|
200 |
self._compute_size = compute_size |
|
201 |
if compute_size is None: |
|
202 |
self._compute_size = len |
|
3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
203 |
# This approximates that texts are > 0.5k in size. It only really
|
204 |
# effects when we clean up the queue, so we don't want it to be too
|
|
205 |
# large.
|
|
206 |
self._update_max_size(max_size, after_cleanup_size=after_cleanup_size) |
|
207 |
LRUCache.__init__(self, max_cache=max(int(max_size/512), 1)) |
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
208 |
|
209 |
def add(self, key, value, cleanup=None): |
|
210 |
"""Add a new value to the cache.
|
|
211 |
||
212 |
Also, if the entry is ever removed from the queue, call cleanup.
|
|
213 |
Passing it the key and value being removed.
|
|
214 |
||
215 |
:param key: The key to store it under
|
|
216 |
:param value: The object to store
|
|
217 |
:param cleanup: None or a function taking (key, value) to indicate
|
|
218 |
'value' sohuld be cleaned up.
|
|
219 |
"""
|
|
220 |
if key in self._cache: |
|
221 |
self._remove(key) |
|
222 |
value_len = self._compute_size(value) |
|
223 |
if value_len >= self._after_cleanup_size: |
|
224 |
return
|
|
225 |
self._value_size += value_len |
|
226 |
self._cache[key] = value |
|
3882.3.2
by John Arbash Meinel
Only cache cleanup functions if they aren't None. |
227 |
if cleanup is not None: |
228 |
self._cleanup[key] = cleanup |
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
229 |
self._record_access(key) |
230 |
||
231 |
if self._value_size > self._max_size: |
|
232 |
# Time to cleanup
|
|
233 |
self.cleanup() |
|
234 |
||
235 |
def cleanup(self): |
|
236 |
"""Clear the cache until it shrinks to the requested size.
|
|
237 |
||
238 |
This does not completely wipe the cache, just makes sure it is under
|
|
239 |
the after_cleanup_size.
|
|
240 |
"""
|
|
241 |
# Make sure the cache is shrunk to the correct size
|
|
242 |
while self._value_size > self._after_cleanup_size: |
|
243 |
self._remove_lru() |
|
244 |
||
245 |
def _remove(self, key): |
|
246 |
"""Remove an entry, making sure to maintain the invariants."""
|
|
247 |
val = LRUCache._remove(self, key) |
|
248 |
self._value_size -= self._compute_size(val) |
|
3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
249 |
|
250 |
def resize(self, max_size, after_cleanup_size=None): |
|
251 |
"""Change the number of bytes that will be cached."""
|
|
252 |
self._update_max_size(max_size, after_cleanup_size=after_cleanup_size) |
|
253 |
max_cache = max(int(max_size/512), 1) |
|
254 |
self._update_max_cache(max_cache) |
|
255 |
||
256 |
def _update_max_size(self, max_size, after_cleanup_size=None): |
|
257 |
self._max_size = max_size |
|
258 |
if after_cleanup_size is None: |
|
259 |
self._after_cleanup_size = self._max_size * 8 / 10 |
|
260 |
else: |
|
261 |
self._after_cleanup_size = min(after_cleanup_size, self._max_size) |