3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
1 |
# Copyright (C) 2006, 2008 Canonical Ltd
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
2 |
#
|
3 |
# This program is free software; you can redistribute it and/or modify
|
|
4 |
# it under the terms of the GNU General Public License as published by
|
|
5 |
# the Free Software Foundation; either version 2 of the License, or
|
|
6 |
# (at your option) any later version.
|
|
7 |
#
|
|
8 |
# This program is distributed in the hope that it will be useful,
|
|
9 |
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
10 |
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
11 |
# GNU General Public License for more details.
|
|
12 |
#
|
|
13 |
# You should have received a copy of the GNU General Public License
|
|
14 |
# along with this program; if not, write to the Free Software
|
|
4183.7.1
by Sabin Iacob
update FSF mailing address |
15 |
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
16 |
|
17 |
"""Tests for the lru_cache module."""
|
|
18 |
||
19 |
from bzrlib import ( |
|
20 |
lru_cache, |
|
21 |
tests, |
|
22 |
)
|
|
23 |
||
24 |
||
25 |
class TestLRUCache(tests.TestCase): |
|
26 |
"""Test that LRU cache properly keeps track of entries."""
|
|
27 |
||
4178.3.2
by John Arbash Meinel
Add tests for LRUCache.cache_size() |
28 |
def test_cache_size(self): |
29 |
cache = lru_cache.LRUCache(max_cache=10) |
|
30 |
self.assertEqual(10, cache.cache_size()) |
|
31 |
||
32 |
cache = lru_cache.LRUCache(max_cache=256) |
|
33 |
self.assertEqual(256, cache.cache_size()) |
|
34 |
||
35 |
cache.resize(512) |
|
36 |
self.assertEqual(512, cache.cache_size()) |
|
37 |
||
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
38 |
def test_missing(self): |
39 |
cache = lru_cache.LRUCache(max_cache=10) |
|
40 |
||
41 |
self.failIf('foo' in cache) |
|
42 |
self.assertRaises(KeyError, cache.__getitem__, 'foo') |
|
43 |
||
44 |
cache['foo'] = 'bar' |
|
45 |
self.assertEqual('bar', cache['foo']) |
|
46 |
self.failUnless('foo' in cache) |
|
47 |
self.failIf('bar' in cache) |
|
48 |
||
49 |
def test_overflow(self): |
|
50 |
"""Adding extra entries will pop out old ones."""
|
|
3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
51 |
cache = lru_cache.LRUCache(max_cache=1, after_cleanup_count=1) |
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
52 |
|
53 |
cache['foo'] = 'bar' |
|
54 |
# With a max cache of 1, adding 'baz' should pop out 'foo'
|
|
55 |
cache['baz'] = 'biz' |
|
56 |
||
57 |
self.failIf('foo' in cache) |
|
58 |
self.failUnless('baz' in cache) |
|
59 |
||
60 |
self.assertEqual('biz', cache['baz']) |
|
61 |
||
62 |
def test_by_usage(self): |
|
63 |
"""Accessing entries bumps them up in priority."""
|
|
64 |
cache = lru_cache.LRUCache(max_cache=2) |
|
65 |
||
66 |
cache['baz'] = 'biz' |
|
67 |
cache['foo'] = 'bar' |
|
68 |
||
69 |
self.assertEqual('biz', cache['baz']) |
|
70 |
||
71 |
# This must kick out 'foo' because it was the last accessed
|
|
72 |
cache['nub'] = 'in' |
|
73 |
||
74 |
self.failIf('foo' in cache) |
|
75 |
||
76 |
def test_cleanup(self): |
|
77 |
"""Test that we can use a cleanup function."""
|
|
78 |
cleanup_called = [] |
|
79 |
def cleanup_func(key, val): |
|
80 |
cleanup_called.append((key, val)) |
|
81 |
||
82 |
cache = lru_cache.LRUCache(max_cache=2) |
|
83 |
||
84 |
cache.add('baz', '1', cleanup=cleanup_func) |
|
85 |
cache.add('foo', '2', cleanup=cleanup_func) |
|
86 |
cache.add('biz', '3', cleanup=cleanup_func) |
|
87 |
||
88 |
self.assertEqual([('baz', '1')], cleanup_called) |
|
89 |
||
90 |
# 'foo' is now most recent, so final cleanup will call it last
|
|
91 |
cache['foo'] |
|
92 |
cache.clear() |
|
4178.3.7
by John Arbash Meinel
Review tweaks from Ian. |
93 |
self.assertEqual([('baz', '1'), ('biz', '3'), ('foo', '2')], |
94 |
cleanup_called) |
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
95 |
|
96 |
def test_cleanup_on_replace(self): |
|
97 |
"""Replacing an object should cleanup the old value."""
|
|
98 |
cleanup_called = [] |
|
99 |
def cleanup_func(key, val): |
|
100 |
cleanup_called.append((key, val)) |
|
101 |
||
102 |
cache = lru_cache.LRUCache(max_cache=2) |
|
103 |
cache.add(1, 10, cleanup=cleanup_func) |
|
104 |
cache.add(2, 20, cleanup=cleanup_func) |
|
105 |
cache.add(2, 25, cleanup=cleanup_func) |
|
106 |
||
107 |
self.assertEqual([(2, 20)], cleanup_called) |
|
108 |
self.assertEqual(25, cache[2]) |
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
109 |
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
110 |
# Even __setitem__ should make sure cleanup() is called
|
111 |
cache[2] = 26 |
|
112 |
self.assertEqual([(2, 20), (2, 25)], cleanup_called) |
|
113 |
||
114 |
def test_len(self): |
|
3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
115 |
cache = lru_cache.LRUCache(max_cache=10, after_cleanup_count=10) |
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
116 |
|
117 |
cache[1] = 10 |
|
118 |
cache[2] = 20 |
|
119 |
cache[3] = 30 |
|
120 |
cache[4] = 40 |
|
121 |
||
122 |
self.assertEqual(4, len(cache)) |
|
123 |
||
124 |
cache[5] = 50 |
|
125 |
cache[6] = 60 |
|
126 |
cache[7] = 70 |
|
127 |
cache[8] = 80 |
|
128 |
||
129 |
self.assertEqual(8, len(cache)) |
|
130 |
||
131 |
cache[1] = 15 # replacement |
|
132 |
||
133 |
self.assertEqual(8, len(cache)) |
|
134 |
||
135 |
cache[9] = 90 |
|
136 |
cache[10] = 100 |
|
137 |
cache[11] = 110 |
|
138 |
||
139 |
# We hit the max
|
|
140 |
self.assertEqual(10, len(cache)) |
|
4294.1.1
by John Arbash Meinel
When removing a node from an LRUCache, properly remove it from the linked list. |
141 |
self.assertEqual([11, 10, 9, 1, 8, 7, 6, 5, 4, 3], |
142 |
[n.key for n in cache._walk_lru()]) |
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
143 |
|
3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
144 |
def test_cleanup_shrinks_to_after_clean_count(self): |
145 |
cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=3) |
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
146 |
|
147 |
cache.add(1, 10) |
|
148 |
cache.add(2, 20) |
|
149 |
cache.add(3, 25) |
|
150 |
cache.add(4, 30) |
|
151 |
cache.add(5, 35) |
|
152 |
||
153 |
self.assertEqual(5, len(cache)) |
|
154 |
# This will bump us over the max, which causes us to shrink down to
|
|
155 |
# after_cleanup_cache size
|
|
156 |
cache.add(6, 40) |
|
157 |
self.assertEqual(3, len(cache)) |
|
158 |
||
159 |
def test_after_cleanup_larger_than_max(self): |
|
3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
160 |
cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=10) |
161 |
self.assertEqual(5, cache._after_cleanup_count) |
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
162 |
|
163 |
def test_after_cleanup_none(self): |
|
3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
164 |
cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=None) |
165 |
# By default _after_cleanup_size is 80% of the normal size
|
|
166 |
self.assertEqual(4, cache._after_cleanup_count) |
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
167 |
|
168 |
def test_cleanup(self): |
|
3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
169 |
cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=2) |
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
170 |
|
171 |
# Add these in order
|
|
172 |
cache.add(1, 10) |
|
173 |
cache.add(2, 20) |
|
174 |
cache.add(3, 25) |
|
175 |
cache.add(4, 30) |
|
176 |
cache.add(5, 35) |
|
177 |
||
178 |
self.assertEqual(5, len(cache)) |
|
179 |
# Force a compaction
|
|
180 |
cache.cleanup() |
|
181 |
self.assertEqual(2, len(cache)) |
|
182 |
||
4178.3.7
by John Arbash Meinel
Review tweaks from Ian. |
183 |
def test_preserve_last_access_order(self): |
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
184 |
cache = lru_cache.LRUCache(max_cache=5) |
185 |
||
186 |
# Add these in order
|
|
187 |
cache.add(1, 10) |
|
188 |
cache.add(2, 20) |
|
189 |
cache.add(3, 25) |
|
190 |
cache.add(4, 30) |
|
191 |
cache.add(5, 35) |
|
192 |
||
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
193 |
self.assertEqual([5, 4, 3, 2, 1], [n.key for n in cache._walk_lru()]) |
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
194 |
|
195 |
# Now access some randomly
|
|
196 |
cache[2] |
|
197 |
cache[5] |
|
198 |
cache[3] |
|
199 |
cache[2] |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
200 |
self.assertEqual([2, 3, 5, 4, 1], [n.key for n in cache._walk_lru()]) |
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
201 |
|
2998.2.1
by John Arbash Meinel
Implement LRUCache.get() which acts like dict.get() |
202 |
def test_get(self): |
203 |
cache = lru_cache.LRUCache(max_cache=5) |
|
204 |
||
205 |
cache.add(1, 10) |
|
206 |
cache.add(2, 20) |
|
207 |
self.assertEqual(20, cache.get(2)) |
|
208 |
self.assertIs(None, cache.get(3)) |
|
209 |
obj = object() |
|
210 |
self.assertIs(obj, cache.get(3, obj)) |
|
4178.3.5
by John Arbash Meinel
Add tests that LRUCache.get() properly tracks accesses. |
211 |
self.assertEqual([2, 1], [n.key for n in cache._walk_lru()]) |
212 |
self.assertEqual(10, cache.get(1)) |
|
213 |
self.assertEqual([1, 2], [n.key for n in cache._walk_lru()]) |
|
2998.2.1
by John Arbash Meinel
Implement LRUCache.get() which acts like dict.get() |
214 |
|
3763.8.10
by John Arbash Meinel
Add a .keys() member to LRUCache and LRUSizeCache. |
215 |
def test_keys(self): |
3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
216 |
cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=5) |
3763.8.10
by John Arbash Meinel
Add a .keys() member to LRUCache and LRUSizeCache. |
217 |
|
218 |
cache[1] = 2 |
|
219 |
cache[2] = 3 |
|
220 |
cache[3] = 4 |
|
221 |
self.assertEqual([1, 2, 3], sorted(cache.keys())) |
|
222 |
cache[4] = 5 |
|
223 |
cache[5] = 6 |
|
224 |
cache[6] = 7 |
|
225 |
self.assertEqual([2, 3, 4, 5, 6], sorted(cache.keys())) |
|
226 |
||
3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
227 |
def test_after_cleanup_size_deprecated(self): |
228 |
obj = self.callDeprecated([ |
|
229 |
'LRUCache.__init__(after_cleanup_size) was deprecated in 1.11.'
|
|
230 |
' Use after_cleanup_count instead.'], |
|
231 |
lru_cache.LRUCache, 50, after_cleanup_size=25) |
|
232 |
self.assertEqual(obj._after_cleanup_count, 25) |
|
233 |
||
234 |
def test_resize_smaller(self): |
|
235 |
cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=4) |
|
236 |
cache[1] = 2 |
|
237 |
cache[2] = 3 |
|
238 |
cache[3] = 4 |
|
239 |
cache[4] = 5 |
|
240 |
cache[5] = 6 |
|
241 |
self.assertEqual([1, 2, 3, 4, 5], sorted(cache.keys())) |
|
242 |
cache[6] = 7 |
|
243 |
self.assertEqual([3, 4, 5, 6], sorted(cache.keys())) |
|
244 |
# Now resize to something smaller, which triggers a cleanup
|
|
245 |
cache.resize(max_cache=3, after_cleanup_count=2) |
|
246 |
self.assertEqual([5, 6], sorted(cache.keys())) |
|
247 |
# Adding something will use the new size
|
|
248 |
cache[7] = 8 |
|
249 |
self.assertEqual([5, 6, 7], sorted(cache.keys())) |
|
250 |
cache[8] = 9 |
|
251 |
self.assertEqual([7, 8], sorted(cache.keys())) |
|
252 |
||
253 |
def test_resize_larger(self): |
|
254 |
cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=4) |
|
255 |
cache[1] = 2 |
|
256 |
cache[2] = 3 |
|
257 |
cache[3] = 4 |
|
258 |
cache[4] = 5 |
|
259 |
cache[5] = 6 |
|
260 |
self.assertEqual([1, 2, 3, 4, 5], sorted(cache.keys())) |
|
261 |
cache[6] = 7 |
|
262 |
self.assertEqual([3, 4, 5, 6], sorted(cache.keys())) |
|
263 |
cache.resize(max_cache=8, after_cleanup_count=6) |
|
264 |
self.assertEqual([3, 4, 5, 6], sorted(cache.keys())) |
|
265 |
cache[7] = 8 |
|
266 |
cache[8] = 9 |
|
267 |
cache[9] = 10 |
|
268 |
cache[10] = 11 |
|
269 |
self.assertEqual([3, 4, 5, 6, 7, 8, 9, 10], sorted(cache.keys())) |
|
270 |
cache[11] = 12 # triggers cleanup back to new after_cleanup_count |
|
271 |
self.assertEqual([6, 7, 8, 9, 10, 11], sorted(cache.keys())) |
|
272 |
||
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
273 |
|
274 |
class TestLRUSizeCache(tests.TestCase): |
|
275 |
||
276 |
def test_basic_init(self): |
|
277 |
cache = lru_cache.LRUSizeCache() |
|
278 |
self.assertEqual(2048, cache._max_cache) |
|
3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
279 |
self.assertEqual(int(cache._max_size*0.8), cache._after_cleanup_size) |
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
280 |
self.assertEqual(0, cache._value_size) |
281 |
||
282 |
def test_add_tracks_size(self): |
|
283 |
cache = lru_cache.LRUSizeCache() |
|
284 |
self.assertEqual(0, cache._value_size) |
|
285 |
cache.add('my key', 'my value text') |
|
286 |
self.assertEqual(13, cache._value_size) |
|
287 |
||
288 |
def test_remove_tracks_size(self): |
|
289 |
cache = lru_cache.LRUSizeCache() |
|
290 |
self.assertEqual(0, cache._value_size) |
|
291 |
cache.add('my key', 'my value text') |
|
292 |
self.assertEqual(13, cache._value_size) |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
293 |
node = cache._cache['my key'] |
294 |
cache._remove_node(node) |
|
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
295 |
self.assertEqual(0, cache._value_size) |
296 |
||
297 |
def test_no_add_over_size(self): |
|
298 |
"""Adding a large value may not be cached at all."""
|
|
299 |
cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=5) |
|
300 |
self.assertEqual(0, cache._value_size) |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
301 |
self.assertEqual({}, cache.items()) |
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
302 |
cache.add('test', 'key') |
303 |
self.assertEqual(3, cache._value_size) |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
304 |
self.assertEqual({'test': 'key'}, cache.items()) |
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
305 |
cache.add('test2', 'key that is too big') |
306 |
self.assertEqual(3, cache._value_size) |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
307 |
self.assertEqual({'test':'key'}, cache.items()) |
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
308 |
# If we would add a key, only to cleanup and remove all cached entries,
|
309 |
# then obviously that value should not be stored
|
|
310 |
cache.add('test3', 'bigkey') |
|
311 |
self.assertEqual(3, cache._value_size) |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
312 |
self.assertEqual({'test':'key'}, cache.items()) |
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
313 |
|
314 |
cache.add('test4', 'bikey') |
|
315 |
self.assertEqual(3, cache._value_size) |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
316 |
self.assertEqual({'test':'key'}, cache.items()) |
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
317 |
|
4178.3.7
by John Arbash Meinel
Review tweaks from Ian. |
318 |
def test_no_add_over_size_cleanup(self): |
319 |
"""If a large value is not cached, we will call cleanup right away."""
|
|
320 |
cleanup_calls = [] |
|
321 |
def cleanup(key, value): |
|
322 |
cleanup_calls.append((key, value)) |
|
323 |
||
324 |
cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=5) |
|
325 |
self.assertEqual(0, cache._value_size) |
|
326 |
self.assertEqual({}, cache.items()) |
|
327 |
cache.add('test', 'key that is too big', cleanup=cleanup) |
|
328 |
# key was not added
|
|
329 |
self.assertEqual(0, cache._value_size) |
|
330 |
self.assertEqual({}, cache.items()) |
|
331 |
# and cleanup was called
|
|
332 |
self.assertEqual([('test', 'key that is too big')], cleanup_calls) |
|
333 |
||
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
334 |
def test_adding_clears_cache_based_on_size(self): |
335 |
"""The cache is cleared in LRU order until small enough"""
|
|
336 |
cache = lru_cache.LRUSizeCache(max_size=20) |
|
337 |
cache.add('key1', 'value') # 5 chars |
|
338 |
cache.add('key2', 'value2') # 6 chars |
|
339 |
cache.add('key3', 'value23') # 7 chars |
|
340 |
self.assertEqual(5+6+7, cache._value_size) |
|
341 |
cache['key2'] # reference key2 so it gets a newer reference time |
|
342 |
cache.add('key4', 'value234') # 8 chars, over limit |
|
343 |
# We have to remove 2 keys to get back under limit
|
|
344 |
self.assertEqual(6+8, cache._value_size) |
|
345 |
self.assertEqual({'key2':'value2', 'key4':'value234'}, |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
346 |
cache.items()) |
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
347 |
|
348 |
def test_adding_clears_to_after_cleanup_size(self): |
|
349 |
cache = lru_cache.LRUSizeCache(max_size=20, after_cleanup_size=10) |
|
350 |
cache.add('key1', 'value') # 5 chars |
|
351 |
cache.add('key2', 'value2') # 6 chars |
|
352 |
cache.add('key3', 'value23') # 7 chars |
|
353 |
self.assertEqual(5+6+7, cache._value_size) |
|
354 |
cache['key2'] # reference key2 so it gets a newer reference time |
|
355 |
cache.add('key4', 'value234') # 8 chars, over limit |
|
356 |
# We have to remove 3 keys to get back under limit
|
|
357 |
self.assertEqual(8, cache._value_size) |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
358 |
self.assertEqual({'key4':'value234'}, cache.items()) |
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
359 |
|
360 |
def test_custom_sizes(self): |
|
361 |
def size_of_list(lst): |
|
362 |
return sum(len(x) for x in lst) |
|
363 |
cache = lru_cache.LRUSizeCache(max_size=20, after_cleanup_size=10, |
|
364 |
compute_size=size_of_list) |
|
365 |
||
366 |
cache.add('key1', ['val', 'ue']) # 5 chars |
|
367 |
cache.add('key2', ['val', 'ue2']) # 6 chars |
|
368 |
cache.add('key3', ['val', 'ue23']) # 7 chars |
|
369 |
self.assertEqual(5+6+7, cache._value_size) |
|
370 |
cache['key2'] # reference key2 so it gets a newer reference time |
|
371 |
cache.add('key4', ['value', '234']) # 8 chars, over limit |
|
372 |
# We have to remove 3 keys to get back under limit
|
|
373 |
self.assertEqual(8, cache._value_size) |
|
4178.3.3
by John Arbash Meinel
LRUCache is now implemented with a dict to a linked list, |
374 |
self.assertEqual({'key4':['value', '234']}, cache.items()) |
2993.1.1
by Robert Collins
* New module ``lru_cache`` providing a cache for use by tasks that need |
375 |
|
376 |
def test_cleanup(self): |
|
377 |
cache = lru_cache.LRUSizeCache(max_size=20, after_cleanup_size=10) |
|
378 |
||
379 |
# Add these in order
|
|
380 |
cache.add('key1', 'value') # 5 chars |
|
381 |
cache.add('key2', 'value2') # 6 chars |
|
382 |
cache.add('key3', 'value23') # 7 chars |
|
383 |
self.assertEqual(5+6+7, cache._value_size) |
|
384 |
||
385 |
cache.cleanup() |
|
386 |
# Only the most recent fits after cleaning up
|
|
387 |
self.assertEqual(7, cache._value_size) |
|
3763.8.10
by John Arbash Meinel
Add a .keys() member to LRUCache and LRUSizeCache. |
388 |
|
389 |
def test_keys(self): |
|
390 |
cache = lru_cache.LRUSizeCache(max_size=10) |
|
391 |
||
392 |
cache[1] = 'a' |
|
393 |
cache[2] = 'b' |
|
394 |
cache[3] = 'cdef' |
|
395 |
self.assertEqual([1, 2, 3], sorted(cache.keys())) |
|
3882.3.1
by John Arbash Meinel
Add LRUCache.resize(), and change the init arguments for LRUCache. |
396 |
|
397 |
def test_resize_smaller(self): |
|
398 |
cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=9) |
|
399 |
cache[1] = 'abc' |
|
400 |
cache[2] = 'def' |
|
401 |
cache[3] = 'ghi' |
|
402 |
cache[4] = 'jkl' |
|
403 |
# Triggers a cleanup
|
|
404 |
self.assertEqual([2, 3, 4], sorted(cache.keys())) |
|
405 |
# Resize should also cleanup again
|
|
406 |
cache.resize(max_size=6, after_cleanup_size=4) |
|
407 |
self.assertEqual([4], sorted(cache.keys())) |
|
408 |
# Adding should use the new max size
|
|
409 |
cache[5] = 'mno' |
|
410 |
self.assertEqual([4, 5], sorted(cache.keys())) |
|
411 |
cache[6] = 'pqr' |
|
412 |
self.assertEqual([6], sorted(cache.keys())) |
|
413 |
||
414 |
def test_resize_larger(self): |
|
415 |
cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=9) |
|
416 |
cache[1] = 'abc' |
|
417 |
cache[2] = 'def' |
|
418 |
cache[3] = 'ghi' |
|
419 |
cache[4] = 'jkl' |
|
420 |
# Triggers a cleanup
|
|
421 |
self.assertEqual([2, 3, 4], sorted(cache.keys())) |
|
422 |
cache.resize(max_size=15, after_cleanup_size=12) |
|
423 |
self.assertEqual([2, 3, 4], sorted(cache.keys())) |
|
424 |
cache[5] = 'mno' |
|
425 |
cache[6] = 'pqr' |
|
426 |
self.assertEqual([2, 3, 4, 5, 6], sorted(cache.keys())) |
|
427 |
cache[7] = 'stu' |
|
428 |
self.assertEqual([4, 5, 6, 7], sorted(cache.keys())) |
|
429 |