1
# Copyright (C) 2006, 2008 Canonical Ltd
1
# Copyright (C) 2006 Canonical Ltd
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
17
17
"""A simple least-recently-used (LRU) cache."""
19
19
from collections import deque
21
from bzrlib import symbol_versioning
24
23
class LRUCache(object):
25
24
"""A class which manages a cache of entries, removing unused ones."""
27
def __init__(self, max_cache=100, after_cleanup_count=None,
28
after_cleanup_size=symbol_versioning.DEPRECATED_PARAMETER):
29
if symbol_versioning.deprecated_passed(after_cleanup_size):
30
symbol_versioning.warn('LRUCache.__init__(after_cleanup_size) was'
31
' deprecated in 1.11. Use'
32
' after_cleanup_count instead.',
34
after_cleanup_count = after_cleanup_size
26
def __init__(self, max_cache=100, after_cleanup_size=None):
27
self._max_cache = max_cache
28
if after_cleanup_size is None:
29
self._after_cleanup_size = self._max_cache
31
self._after_cleanup_size = min(after_cleanup_size, self._max_cache)
33
self._compact_queue_length = 4*self._max_cache
37
37
self._queue = deque() # Track when things are accessed
38
38
self._refcount = {} # number of entries in self._queue for each key
39
self._update_max_cache(max_cache, after_cleanup_count)
41
40
def __contains__(self, key):
42
41
return key in self._cache
80
"""Get the list of keys currently cached.
82
Note that values returned here may not be available by the time you
83
request them later. This is simply meant as a peak into the current
86
:return: An unordered list of keys that are currently cached.
88
return self._cache.keys()
91
78
"""Clear the cache until it shrinks to the requested size.
93
80
This does not completely wipe the cache, just makes sure it is under
94
the after_cleanup_count.
81
the after_cleanup_size.
96
83
# Make sure the cache is shrunk to the correct size
97
while len(self._cache) > self._after_cleanup_count:
84
while len(self._cache) > self._after_cleanup_size:
99
# No need to compact the queue at this point, because the code that
100
# calls this would have already triggered it based on queue length
102
87
def __setitem__(self, key, value):
103
88
"""Add a value to the cache, there will be no cleanup function."""
124
109
self._queue = new_queue
125
110
# All entries should be of the same size. There should be one entry in
126
111
# queue for each entry in cache, and all refcounts should == 1
127
if not (len(self._queue) == len(self._cache) ==
128
len(self._refcount) == sum(self._refcount.itervalues())):
129
raise AssertionError()
112
assert (len(self._queue) == len(self._cache) ==
113
len(self._refcount) == sum(self._refcount.itervalues()))
131
115
def _remove(self, key):
132
116
"""Remove an entry, making sure to maintain the invariants."""
133
cleanup = self._cleanup.pop(key, None)
117
cleanup = self._cleanup.pop(key)
134
118
val = self._cache.pop(key)
135
119
if cleanup is not None:
136
120
cleanup(key, val)
154
138
while self._cache:
155
139
self._remove_lru()
157
def resize(self, max_cache, after_cleanup_count=None):
158
"""Change the number of entries that will be cached."""
159
self._update_max_cache(max_cache,
160
after_cleanup_count=after_cleanup_count)
162
def _update_max_cache(self, max_cache, after_cleanup_count=None):
163
self._max_cache = max_cache
164
if after_cleanup_count is None:
165
self._after_cleanup_count = self._max_cache * 8 / 10
167
self._after_cleanup_count = min(after_cleanup_count, self._max_cache)
169
self._compact_queue_length = 4*self._max_cache
170
if len(self._queue) > self._compact_queue_length:
171
self._compact_queue()
175
142
class LRUSizeCache(LRUCache):
176
143
"""An LRUCache that removes things based on the size of the values.
196
163
The function should take the form "compute_size(value) => integer".
197
164
If not supplied, it defaults to 'len()'
166
# This approximates that texts are > 0.5k in size. It only really
167
# effects when we clean up the queue, so we don't want it to be too
169
LRUCache.__init__(self, max_cache=int(max_size/512))
170
self._max_size = max_size
171
if after_cleanup_size is None:
172
self._after_cleanup_size = self._max_size
174
self._after_cleanup_size = min(after_cleanup_size, self._max_size)
199
176
self._value_size = 0
200
177
self._compute_size = compute_size
201
178
if compute_size is None:
202
179
self._compute_size = len
203
# This approximates that texts are > 0.5k in size. It only really
204
# effects when we clean up the queue, so we don't want it to be too
206
self._update_max_size(max_size, after_cleanup_size=after_cleanup_size)
207
LRUCache.__init__(self, max_cache=max(int(max_size/512), 1))
209
181
def add(self, key, value, cleanup=None):
210
182
"""Add a new value to the cache.
246
217
"""Remove an entry, making sure to maintain the invariants."""
247
218
val = LRUCache._remove(self, key)
248
219
self._value_size -= self._compute_size(val)
250
def resize(self, max_size, after_cleanup_size=None):
251
"""Change the number of bytes that will be cached."""
252
self._update_max_size(max_size, after_cleanup_size=after_cleanup_size)
253
max_cache = max(int(max_size/512), 1)
254
self._update_max_cache(max_cache)
256
def _update_max_size(self, max_size, after_cleanup_size=None):
257
self._max_size = max_size
258
if after_cleanup_size is None:
259
self._after_cleanup_size = self._max_size * 8 / 10
261
self._after_cleanup_size = min(after_cleanup_size, self._max_size)