~bzr-pqm/bzr/bzr.dev

4516.2.1 by John Arbash Meinel
Fix bug #396838, Update LRUCache to maintain invariant even
1
# Copyright (C) 2006, 2008, 2009 Canonical Ltd
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
2
#
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
7
#
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11
# GNU General Public License for more details.
12
#
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
4183.7.1 by Sabin Iacob
update FSF mailing address
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
16
17
"""A simple least-recently-used (LRU) cache."""
18
4178.3.7 by John Arbash Meinel
Review tweaks from Ian.
19
from bzrlib import (
20
    trace,
21
    )
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
22
4287.1.10 by John Arbash Meinel
Restore the ability to handle None as a key.
23
_null_key = object()
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
24
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
25
class _LRUNode(object):
26
    """This maintains the linked-list which is the lru internals."""
27
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
28
    __slots__ = ('prev', 'next_key', 'key', 'value', 'cleanup', 'size')
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
29
30
    def __init__(self, key, value, cleanup=None):
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
31
        self.prev = None
4287.1.10 by John Arbash Meinel
Restore the ability to handle None as a key.
32
        self.next_key = _null_key
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
33
        self.key = key
34
        self.value = value
35
        self.cleanup = cleanup
36
        # TODO: We could compute this 'on-the-fly' like we used to, and remove
37
        #       one pointer from this object, we just need to decide if it
38
        #       actually costs us much of anything in normal usage
39
        self.size = None
40
41
    def __repr__(self):
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
42
        if self.prev is None:
43
            prev_key = None
4287.1.4 by John Arbash Meinel
use indirection on both next and prev.
44
        else:
4287.1.11 by John Arbash Meinel
Small tweaks from Ian.
45
            prev_key = self.prev.key
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
46
        return '%s(%r n:%r p:%r)' % (self.__class__.__name__, self.key,
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
47
                                     self.next_key, prev_key)
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
48
49
    def run_cleanup(self):
4516.2.1 by John Arbash Meinel
Fix bug #396838, Update LRUCache to maintain invariant even
50
        try:
51
            if self.cleanup is not None:
52
                self.cleanup(self.key, self.value)
53
        finally:
4516.2.2 by John Arbash Meinel
Add comments in the finally sections as to why we want them.
54
            # cleanup might raise an exception, but we want to make sure
55
            # to break refcycles, etc
4516.2.1 by John Arbash Meinel
Fix bug #396838, Update LRUCache to maintain invariant even
56
            self.cleanup = None
57
            self.value = None
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
58
59
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
60
class LRUCache(object):
61
    """A class which manages a cache of entries, removing unused ones."""
62
5346.1.4 by Vincent Ladeuil
Delete the after_cleanup_size parameter from the LRUCache constructor.
63
    def __init__(self, max_cache=100, after_cleanup_count=None):
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
64
        self._cache = {}
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
65
        # The "HEAD" of the lru linked list
66
        self._most_recently_used = None
67
        # The "TAIL" of the lru linked list
4287.1.11 by John Arbash Meinel
Small tweaks from Ian.
68
        self._least_recently_used = None
3882.3.1 by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache.
69
        self._update_max_cache(max_cache, after_cleanup_count)
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
70
71
    def __contains__(self, key):
72
        return key in self._cache
73
74
    def __getitem__(self, key):
4287.1.6 by John Arbash Meinel
Remove the double getattr() for self._cache.
75
        cache = self._cache
76
        node = cache[key]
4178.3.4 by John Arbash Meinel
Shave off approx 100ms by inlining _record_access into __getitem__,
77
        # Inlined from _record_access to decrease the overhead of __getitem__
78
        # We also have more knowledge about structure if __getitem__ is
79
        # succeeding, then we know that self._most_recently_used must not be
80
        # None, etc.
81
        mru = self._most_recently_used
82
        if node is mru:
83
            # Nothing to do, this node is already at the head of the queue
84
            return node.value
85
        # Remove this node from the old location
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
86
        node_prev = node.prev
4287.1.4 by John Arbash Meinel
use indirection on both next and prev.
87
        next_key = node.next_key
4287.1.10 by John Arbash Meinel
Restore the ability to handle None as a key.
88
        # benchmarking shows that the lookup of _null_key in globals is faster
4287.1.11 by John Arbash Meinel
Small tweaks from Ian.
89
        # than the attribute lookup for (node is self._least_recently_used)
4287.1.10 by John Arbash Meinel
Restore the ability to handle None as a key.
90
        if next_key is _null_key:
4287.1.11 by John Arbash Meinel
Small tweaks from Ian.
91
            # 'node' is the _least_recently_used, because it doesn't have a
4287.1.7 by John Arbash Meinel
Fairly significant savings... avoid looking at self._last_recently_used.
92
            # 'next' item. So move the current lru to the previous node.
4287.1.11 by John Arbash Meinel
Small tweaks from Ian.
93
            self._least_recently_used = node_prev
4287.1.4 by John Arbash Meinel
use indirection on both next and prev.
94
        else:
4287.1.6 by John Arbash Meinel
Remove the double getattr() for self._cache.
95
            node_next = cache[next_key]
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
96
            node_next.prev = node_prev
4287.1.7 by John Arbash Meinel
Fairly significant savings... avoid looking at self._last_recently_used.
97
        node_prev.next_key = next_key
4287.1.4 by John Arbash Meinel
use indirection on both next and prev.
98
        # Insert this node at the front of the list
99
        node.next_key = mru.key
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
100
        mru.prev = node
4178.3.4 by John Arbash Meinel
Shave off approx 100ms by inlining _record_access into __getitem__,
101
        self._most_recently_used = node
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
102
        node.prev = None
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
103
        return node.value
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
104
105
    def __len__(self):
106
        return len(self._cache)
107
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
108
    def _walk_lru(self):
4178.3.6 by John Arbash Meinel
Remove the asserts, and change some to AssertionError.
109
        """Walk the LRU list, only meant to be used in tests."""
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
110
        node = self._most_recently_used
111
        if node is not None:
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
112
            if node.prev is not None:
4178.3.6 by John Arbash Meinel
Remove the asserts, and change some to AssertionError.
113
                raise AssertionError('the _most_recently_used entry is not'
114
                                     ' supposed to have a previous entry'
115
                                     ' %s' % (node,))
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
116
        while node is not None:
4287.1.10 by John Arbash Meinel
Restore the ability to handle None as a key.
117
            if node.next_key is _null_key:
4287.1.11 by John Arbash Meinel
Small tweaks from Ian.
118
                if node is not self._least_recently_used:
4178.3.6 by John Arbash Meinel
Remove the asserts, and change some to AssertionError.
119
                    raise AssertionError('only the last node should have'
120
                                         ' no next value: %s' % (node,))
4287.1.4 by John Arbash Meinel
use indirection on both next and prev.
121
                node_next = None
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
122
            else:
4287.1.4 by John Arbash Meinel
use indirection on both next and prev.
123
                node_next = self._cache[node.next_key]
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
124
                if node_next.prev is not node:
4178.3.6 by John Arbash Meinel
Remove the asserts, and change some to AssertionError.
125
                    raise AssertionError('inconsistency found, node.next.prev'
126
                                         ' != node: %s' % (node,))
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
127
            if node.prev is None:
4178.3.6 by John Arbash Meinel
Remove the asserts, and change some to AssertionError.
128
                if node is not self._most_recently_used:
129
                    raise AssertionError('only the _most_recently_used should'
130
                                         ' not have a previous node: %s'
131
                                         % (node,))
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
132
            else:
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
133
                if node.prev.next_key != node.key:
4178.3.6 by John Arbash Meinel
Remove the asserts, and change some to AssertionError.
134
                    raise AssertionError('inconsistency found, node.prev.next'
135
                                         ' != node: %s' % (node,))
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
136
            yield node
4287.1.4 by John Arbash Meinel
use indirection on both next and prev.
137
            node = node_next
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
138
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
139
    def add(self, key, value, cleanup=None):
140
        """Add a new value to the cache.
141
4178.3.7 by John Arbash Meinel
Review tweaks from Ian.
142
        Also, if the entry is ever removed from the cache, call
143
        cleanup(key, value).
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
144
145
        :param key: The key to store it under
146
        :param value: The object to store
147
        :param cleanup: None or a function taking (key, value) to indicate
4178.3.7 by John Arbash Meinel
Review tweaks from Ian.
148
                        'value' should be cleaned up.
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
149
        """
4287.1.10 by John Arbash Meinel
Restore the ability to handle None as a key.
150
        if key is _null_key:
151
            raise ValueError('cannot use _null_key as a key')
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
152
        if key in self._cache:
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
153
            node = self._cache[key]
4516.2.1 by John Arbash Meinel
Fix bug #396838, Update LRUCache to maintain invariant even
154
            try:
155
                node.run_cleanup()
156
            finally:
4516.2.2 by John Arbash Meinel
Add comments in the finally sections as to why we want them.
157
                # Maintain the LRU properties, even if cleanup raises an
158
                # exception
4516.2.1 by John Arbash Meinel
Fix bug #396838, Update LRUCache to maintain invariant even
159
                node.value = value
160
                node.cleanup = cleanup
161
                self._record_access(node)
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
162
        else:
163
            node = _LRUNode(key, value, cleanup=cleanup)
164
            self._cache[key] = node
4516.2.1 by John Arbash Meinel
Fix bug #396838, Update LRUCache to maintain invariant even
165
            self._record_access(node)
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
166
167
        if len(self._cache) > self._max_cache:
168
            # Trigger the cleanup
169
            self.cleanup()
170
4178.3.1 by John Arbash Meinel
Implement LRUCache.cache_size(), so that it can trivially substitute for FIFOCache.
171
    def cache_size(self):
172
        """Get the number of entries we will cache."""
173
        return self._max_cache
174
2998.2.1 by John Arbash Meinel
Implement LRUCache.get() which acts like dict.get()
175
    def get(self, key, default=None):
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
176
        node = self._cache.get(key, None)
177
        if node is None:
178
            return default
4178.3.5 by John Arbash Meinel
Add tests that LRUCache.get() properly tracks accesses.
179
        self._record_access(node)
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
180
        return node.value
2998.2.1 by John Arbash Meinel
Implement LRUCache.get() which acts like dict.get()
181
3763.8.10 by John Arbash Meinel
Add a .keys() member to LRUCache and LRUSizeCache.
182
    def keys(self):
183
        """Get the list of keys currently cached.
184
185
        Note that values returned here may not be available by the time you
186
        request them later. This is simply meant as a peak into the current
187
        state.
188
189
        :return: An unordered list of keys that are currently cached.
190
        """
191
        return self._cache.keys()
192
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
193
    def items(self):
194
        """Get the key:value pairs as a dict."""
195
        return dict((k, n.value) for k, n in self._cache.iteritems())
196
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
197
    def cleanup(self):
198
        """Clear the cache until it shrinks to the requested size.
199
200
        This does not completely wipe the cache, just makes sure it is under
3882.3.1 by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache.
201
        the after_cleanup_count.
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
202
        """
203
        # Make sure the cache is shrunk to the correct size
3882.3.1 by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache.
204
        while len(self._cache) > self._after_cleanup_count:
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
205
            self._remove_lru()
206
207
    def __setitem__(self, key, value):
208
        """Add a value to the cache, there will be no cleanup function."""
209
        self.add(key, value, cleanup=None)
210
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
211
    def _record_access(self, node):
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
212
        """Record that key was accessed."""
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
213
        # Move 'node' to the front of the queue
214
        if self._most_recently_used is None:
215
            self._most_recently_used = node
4287.1.11 by John Arbash Meinel
Small tweaks from Ian.
216
            self._least_recently_used = node
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
217
            return
218
        elif node is self._most_recently_used:
219
            # Nothing to do, this node is already at the head of the queue
220
            return
221
        # We've taken care of the tail pointer, remove the node, and insert it
222
        # at the front
223
        # REMOVE
4287.1.11 by John Arbash Meinel
Small tweaks from Ian.
224
        if node is self._least_recently_used:
225
            self._least_recently_used = node.prev
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
226
        if node.prev is not None:
227
            node.prev.next_key = node.next_key
4287.1.10 by John Arbash Meinel
Restore the ability to handle None as a key.
228
        if node.next_key is not _null_key:
4287.1.4 by John Arbash Meinel
use indirection on both next and prev.
229
            node_next = self._cache[node.next_key]
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
230
            node_next.prev = node.prev
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
231
        # INSERT
4287.1.4 by John Arbash Meinel
use indirection on both next and prev.
232
        node.next_key = self._most_recently_used.key
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
233
        self._most_recently_used.prev = node
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
234
        self._most_recently_used = node
4287.1.5 by John Arbash Meinel
Switch to using prev as the object and next_key as the pointer.
235
        node.prev = None
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
236
237
    def _remove_node(self, node):
4287.1.11 by John Arbash Meinel
Small tweaks from Ian.
238
        if node is self._least_recently_used:
239
            self._least_recently_used = node.prev
4178.3.6 by John Arbash Meinel
Remove the asserts, and change some to AssertionError.
240
        self._cache.pop(node.key)
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
241
        # If we have removed all entries, remove the head pointer as well
4287.1.11 by John Arbash Meinel
Small tweaks from Ian.
242
        if self._least_recently_used is None:
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
243
            self._most_recently_used = None
4516.2.1 by John Arbash Meinel
Fix bug #396838, Update LRUCache to maintain invariant even
244
        try:
245
            node.run_cleanup()
246
        finally:
4516.2.2 by John Arbash Meinel
Add comments in the finally sections as to why we want them.
247
            # cleanup might raise an exception, but we want to make sure to
248
            # maintain the linked list
4516.2.1 by John Arbash Meinel
Fix bug #396838, Update LRUCache to maintain invariant even
249
            if node.prev is not None:
250
                node.prev.next_key = node.next_key
251
            if node.next_key is not _null_key:
252
                node_next = self._cache[node.next_key]
253
                node_next.prev = node.prev
254
            # And remove this node's pointers
255
            node.prev = None
256
            node.next_key = _null_key
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
257
258
    def _remove_lru(self):
259
        """Remove one entry from the lru, and handle consequences.
260
261
        If there are no more references to the lru, then this entry should be
262
        removed from the cache.
263
        """
4287.1.11 by John Arbash Meinel
Small tweaks from Ian.
264
        self._remove_node(self._least_recently_used)
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
265
266
    def clear(self):
267
        """Clear out all of the cache."""
268
        # Clean up in LRU order
3735.34.3 by John Arbash Meinel
Cleanup, in preparation for merging to brisbane-core.
269
        while self._cache:
270
            self._remove_lru()
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
271
3882.3.1 by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache.
272
    def resize(self, max_cache, after_cleanup_count=None):
273
        """Change the number of entries that will be cached."""
274
        self._update_max_cache(max_cache,
275
                               after_cleanup_count=after_cleanup_count)
276
277
    def _update_max_cache(self, max_cache, after_cleanup_count=None):
278
        self._max_cache = max_cache
279
        if after_cleanup_count is None:
280
            self._after_cleanup_count = self._max_cache * 8 / 10
281
        else:
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
282
            self._after_cleanup_count = min(after_cleanup_count,
283
                                            self._max_cache)
3882.3.1 by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache.
284
        self.cleanup()
285
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
286
287
class LRUSizeCache(LRUCache):
288
    """An LRUCache that removes things based on the size of the values.
289
290
    This differs in that it doesn't care how many actual items there are,
291
    it just restricts the cache to be cleaned up after so much data is stored.
292
4178.3.7 by John Arbash Meinel
Review tweaks from Ian.
293
    The size of items added will be computed using compute_size(value), which
294
    defaults to len() if not supplied.
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
295
    """
296
297
    def __init__(self, max_size=1024*1024, after_cleanup_size=None,
298
                 compute_size=None):
299
        """Create a new LRUSizeCache.
300
301
        :param max_size: The max number of bytes to store before we start
302
            clearing out entries.
303
        :param after_cleanup_size: After cleaning up, shrink everything to this
304
            size.
305
        :param compute_size: A function to compute the size of the values. We
306
            use a function here, so that you can pass 'len' if you are just
307
            using simple strings, or a more complex function if you are using
308
            something like a list of strings, or even a custom object.
309
            The function should take the form "compute_size(value) => integer".
310
            If not supplied, it defaults to 'len()'
311
        """
312
        self._value_size = 0
313
        self._compute_size = compute_size
314
        if compute_size is None:
315
            self._compute_size = len
3882.3.1 by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache.
316
        self._update_max_size(max_size, after_cleanup_size=after_cleanup_size)
317
        LRUCache.__init__(self, max_cache=max(int(max_size/512), 1))
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
318
319
    def add(self, key, value, cleanup=None):
320
        """Add a new value to the cache.
321
4178.3.7 by John Arbash Meinel
Review tweaks from Ian.
322
        Also, if the entry is ever removed from the cache, call
323
        cleanup(key, value).
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
324
325
        :param key: The key to store it under
326
        :param value: The object to store
327
        :param cleanup: None or a function taking (key, value) to indicate
4178.3.7 by John Arbash Meinel
Review tweaks from Ian.
328
                        'value' should be cleaned up.
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
329
        """
4287.1.10 by John Arbash Meinel
Restore the ability to handle None as a key.
330
        if key is _null_key:
331
            raise ValueError('cannot use _null_key as a key')
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
332
        node = self._cache.get(key, None)
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
333
        value_len = self._compute_size(value)
334
        if value_len >= self._after_cleanup_size:
4178.3.7 by John Arbash Meinel
Review tweaks from Ian.
335
            # The new value is 'too big to fit', as it would fill up/overflow
336
            # the cache all by itself
337
            trace.mutter('Adding the key %r to an LRUSizeCache failed.'
338
                         ' value %d is too big to fit in a the cache'
339
                         ' with size %d %d', key, value_len,
340
                         self._after_cleanup_size, self._max_size)
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
341
            if node is not None:
4178.3.7 by John Arbash Meinel
Review tweaks from Ian.
342
                # We won't be replacing the old node, so just remove it
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
343
                self._remove_node(node)
4178.3.7 by John Arbash Meinel
Review tweaks from Ian.
344
            if cleanup is not None:
345
                cleanup(key, value)
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
346
            return
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
347
        if node is None:
348
            node = _LRUNode(key, value, cleanup=cleanup)
349
            self._cache[key] = node
350
        else:
351
            self._value_size -= node.size
352
        node.size = value_len
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
353
        self._value_size += value_len
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
354
        self._record_access(node)
2993.1.1 by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need
355
356
        if self._value_size > self._max_size:
357
            # Time to cleanup
358
            self.cleanup()
359
360
    def cleanup(self):
361
        """Clear the cache until it shrinks to the requested size.
362
363
        This does not completely wipe the cache, just makes sure it is under
364
        the after_cleanup_size.
365
        """
366
        # Make sure the cache is shrunk to the correct size
367
        while self._value_size > self._after_cleanup_size:
368
            self._remove_lru()
369
4178.3.3 by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list,
370
    def _remove_node(self, node):
371
        self._value_size -= node.size
372
        LRUCache._remove_node(self, node)
3882.3.1 by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache.
373
374
    def resize(self, max_size, after_cleanup_size=None):
375
        """Change the number of bytes that will be cached."""
376
        self._update_max_size(max_size, after_cleanup_size=after_cleanup_size)
377
        max_cache = max(int(max_size/512), 1)
378
        self._update_max_cache(max_cache)
379
380
    def _update_max_size(self, max_size, after_cleanup_size=None):
381
        self._max_size = max_size
382
        if after_cleanup_size is None:
383
            self._after_cleanup_size = self._max_size * 8 / 10
384
        else:
385
            self._after_cleanup_size = min(after_cleanup_size, self._max_size)