1
# Copyright (C) 2008, 2009 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
"""Tests for maps built on a CHK versionedfiles facility."""
19
from itertools import izip
28
from bzrlib.chk_map import (
34
from bzrlib.static_tuple import StaticTuple
37
class TestNode(tests.TestCase):
39
def assertCommonPrefix(self, expected_common, prefix, key):
40
common = Node.common_prefix(prefix, key)
41
self.assertTrue(len(common) <= len(prefix))
42
self.assertTrue(len(common) <= len(key))
43
self.assertStartsWith(prefix, common)
44
self.assertStartsWith(key, common)
45
self.assertEquals(expected_common, common)
47
def test_common_prefix(self):
48
self.assertCommonPrefix('beg', 'beg', 'begin')
50
def test_no_common_prefix(self):
51
self.assertCommonPrefix('', 'begin', 'end')
54
self.assertCommonPrefix('begin', 'begin', 'begin')
56
def test_not_a_prefix(self):
57
self.assertCommonPrefix('b', 'begin', 'b')
60
self.assertCommonPrefix('', '', 'end')
61
self.assertCommonPrefix('', 'begin', '')
62
self.assertCommonPrefix('', '', '')
65
class TestCaseWithStore(tests.TestCaseWithMemoryTransport):
67
def get_chk_bytes(self):
68
# This creates a standalone CHK store.
69
factory = groupcompress.make_pack_factory(False, False, 1)
70
self.chk_bytes = factory(self.get_transport())
73
def _get_map(self, a_dict, maximum_size=0, chk_bytes=None, key_width=1,
74
search_key_func=None):
76
chk_bytes = self.get_chk_bytes()
77
root_key = CHKMap.from_dict(chk_bytes, a_dict,
78
maximum_size=maximum_size, key_width=key_width,
79
search_key_func=search_key_func)
80
root_key2 = CHKMap._create_via_map(chk_bytes, a_dict,
81
maximum_size=maximum_size, key_width=key_width,
82
search_key_func=search_key_func)
83
self.assertEqual(root_key, root_key2, "CHKMap.from_dict() did not"
84
" match CHKMap._create_via_map")
85
chkmap = CHKMap(chk_bytes, root_key, search_key_func=search_key_func)
88
def read_bytes(self, chk_bytes, key):
89
stream = chk_bytes.get_record_stream([key], 'unordered', True)
90
record = stream.next()
91
if record.storage_kind == 'absent':
92
self.fail('Store does not contain the key %s' % (key,))
93
return record.get_bytes_as("fulltext")
95
def to_dict(self, node, *args):
96
return dict(node.iteritems(*args))
99
class TestCaseWithExampleMaps(TestCaseWithStore):
101
def get_chk_bytes(self):
102
if getattr(self, '_chk_bytes', None) is None:
103
self._chk_bytes = super(TestCaseWithExampleMaps,
104
self).get_chk_bytes()
105
return self._chk_bytes
107
def get_map(self, a_dict, maximum_size=100, search_key_func=None):
108
c_map = self._get_map(a_dict, maximum_size=maximum_size,
109
chk_bytes=self.get_chk_bytes(),
110
search_key_func=search_key_func)
113
def make_root_only_map(self, search_key_func=None):
114
return self.get_map({
115
('aaa',): 'initial aaa content',
116
('abb',): 'initial abb content',
117
}, search_key_func=search_key_func)
119
def make_root_only_aaa_ddd_map(self, search_key_func=None):
120
return self.get_map({
121
('aaa',): 'initial aaa content',
122
('ddd',): 'initial ddd content',
123
}, search_key_func=search_key_func)
125
def make_one_deep_map(self, search_key_func=None):
126
# Same as root_only_map, except it forces an InternalNode at the root
127
return self.get_map({
128
('aaa',): 'initial aaa content',
129
('abb',): 'initial abb content',
130
('ccc',): 'initial ccc content',
131
('ddd',): 'initial ddd content',
132
}, search_key_func=search_key_func)
134
def make_two_deep_map(self, search_key_func=None):
135
# Carefully chosen so that it creates a 2-deep map for both
136
# _search_key_plain and for _search_key_16
137
# Also so that things line up with make_one_deep_two_prefix_map
138
return self.get_map({
139
('aaa',): 'initial aaa content',
140
('abb',): 'initial abb content',
141
('acc',): 'initial acc content',
142
('ace',): 'initial ace content',
143
('add',): 'initial add content',
144
('adh',): 'initial adh content',
145
('adl',): 'initial adl content',
146
('ccc',): 'initial ccc content',
147
('ddd',): 'initial ddd content',
148
}, search_key_func=search_key_func)
150
def make_one_deep_two_prefix_map(self, search_key_func=None):
151
"""Create a map with one internal node, but references are extra long.
153
Otherwise has similar content to make_two_deep_map.
155
return self.get_map({
156
('aaa',): 'initial aaa content',
157
('add',): 'initial add content',
158
('adh',): 'initial adh content',
159
('adl',): 'initial adl content',
160
}, search_key_func=search_key_func)
162
def make_one_deep_one_prefix_map(self, search_key_func=None):
163
"""Create a map with one internal node, but references are extra long.
165
Similar to make_one_deep_two_prefix_map, except the split is at the
166
first char, rather than the second.
168
return self.get_map({
169
('add',): 'initial add content',
170
('adh',): 'initial adh content',
171
('adl',): 'initial adl content',
172
('bbb',): 'initial bbb content',
173
}, search_key_func=search_key_func)
176
class TestTestCaseWithExampleMaps(TestCaseWithExampleMaps):
177
"""Actual tests for the provided examples."""
179
def test_root_only_map_plain(self):
180
c_map = self.make_root_only_map()
181
self.assertEqualDiff(
183
" ('aaa',) 'initial aaa content'\n"
184
" ('abb',) 'initial abb content'\n",
187
def test_root_only_map_16(self):
188
c_map = self.make_root_only_map(search_key_func=chk_map._search_key_16)
189
self.assertEqualDiff(
191
" ('aaa',) 'initial aaa content'\n"
192
" ('abb',) 'initial abb content'\n",
195
def test_one_deep_map_plain(self):
196
c_map = self.make_one_deep_map()
197
self.assertEqualDiff(
200
" ('aaa',) 'initial aaa content'\n"
201
" ('abb',) 'initial abb content'\n"
203
" ('ccc',) 'initial ccc content'\n"
205
" ('ddd',) 'initial ddd content'\n",
208
def test_one_deep_map_16(self):
209
c_map = self.make_one_deep_map(search_key_func=chk_map._search_key_16)
210
self.assertEqualDiff(
213
" ('ccc',) 'initial ccc content'\n"
215
" ('abb',) 'initial abb content'\n"
217
" ('aaa',) 'initial aaa content'\n"
218
" ('ddd',) 'initial ddd content'\n",
221
def test_root_only_aaa_ddd_plain(self):
222
c_map = self.make_root_only_aaa_ddd_map()
223
self.assertEqualDiff(
225
" ('aaa',) 'initial aaa content'\n"
226
" ('ddd',) 'initial ddd content'\n",
229
def test_one_deep_map_16(self):
230
c_map = self.make_root_only_aaa_ddd_map(
231
search_key_func=chk_map._search_key_16)
232
# We use 'aaa' and 'ddd' because they happen to map to 'F' when using
234
self.assertEqualDiff(
236
" ('aaa',) 'initial aaa content'\n"
237
" ('ddd',) 'initial ddd content'\n",
240
def test_two_deep_map_plain(self):
241
c_map = self.make_two_deep_map()
242
self.assertEqualDiff(
244
" 'a' InternalNode\n"
246
" ('aaa',) 'initial aaa content'\n"
248
" ('abb',) 'initial abb content'\n"
250
" ('acc',) 'initial acc content'\n"
251
" ('ace',) 'initial ace content'\n"
253
" ('add',) 'initial add content'\n"
254
" ('adh',) 'initial adh content'\n"
255
" ('adl',) 'initial adl content'\n"
257
" ('ccc',) 'initial ccc content'\n"
259
" ('ddd',) 'initial ddd content'\n",
262
def test_two_deep_map_16(self):
263
c_map = self.make_two_deep_map(search_key_func=chk_map._search_key_16)
264
self.assertEqualDiff(
267
" ('acc',) 'initial acc content'\n"
268
" ('ccc',) 'initial ccc content'\n"
270
" ('abb',) 'initial abb content'\n"
272
" ('ace',) 'initial ace content'\n"
273
" 'F' InternalNode\n"
275
" ('aaa',) 'initial aaa content'\n"
277
" ('adl',) 'initial adl content'\n"
279
" ('adh',) 'initial adh content'\n"
281
" ('ddd',) 'initial ddd content'\n"
283
" ('add',) 'initial add content'\n",
286
def test_one_deep_two_prefix_map_plain(self):
287
c_map = self.make_one_deep_two_prefix_map()
288
self.assertEqualDiff(
291
" ('aaa',) 'initial aaa content'\n"
293
" ('add',) 'initial add content'\n"
294
" ('adh',) 'initial adh content'\n"
295
" ('adl',) 'initial adl content'\n",
298
def test_one_deep_two_prefix_map_16(self):
299
c_map = self.make_one_deep_two_prefix_map(
300
search_key_func=chk_map._search_key_16)
301
self.assertEqualDiff(
304
" ('aaa',) 'initial aaa content'\n"
306
" ('adl',) 'initial adl content'\n"
308
" ('adh',) 'initial adh content'\n"
310
" ('add',) 'initial add content'\n",
313
def test_one_deep_one_prefix_map_plain(self):
314
c_map = self.make_one_deep_one_prefix_map()
315
self.assertEqualDiff(
318
" ('add',) 'initial add content'\n"
319
" ('adh',) 'initial adh content'\n"
320
" ('adl',) 'initial adl content'\n"
322
" ('bbb',) 'initial bbb content'\n",
325
def test_one_deep_one_prefix_map_16(self):
326
c_map = self.make_one_deep_one_prefix_map(
327
search_key_func=chk_map._search_key_16)
328
self.assertEqualDiff(
331
" ('bbb',) 'initial bbb content'\n"
333
" ('add',) 'initial add content'\n"
334
" ('adh',) 'initial adh content'\n"
335
" ('adl',) 'initial adl content'\n",
339
class TestMap(TestCaseWithStore):
341
def assertHasABMap(self, chk_bytes):
342
ab_leaf_bytes = 'chkleaf:\n0\n1\n1\na\n\x001\nb\n'
343
ab_sha1 = osutils.sha_string(ab_leaf_bytes)
344
self.assertEqual('90986195696b177c8895d48fdb4b7f2366f798a0', ab_sha1)
345
root_key = ('sha1:' + ab_sha1,)
346
self.assertEqual(ab_leaf_bytes, self.read_bytes(chk_bytes, root_key))
349
def assertHasEmptyMap(self, chk_bytes):
350
empty_leaf_bytes = 'chkleaf:\n0\n1\n0\n\n'
351
empty_sha1 = osutils.sha_string(empty_leaf_bytes)
352
self.assertEqual('8571e09bf1bcc5b9621ce31b3d4c93d6e9a1ed26', empty_sha1)
353
root_key = ('sha1:' + empty_sha1,)
354
self.assertEqual(empty_leaf_bytes, self.read_bytes(chk_bytes, root_key))
357
def assertMapLayoutEqual(self, map_one, map_two):
358
"""Assert that the internal structure is identical between the maps."""
359
map_one._ensure_root()
360
node_one_stack = [map_one._root_node]
361
map_two._ensure_root()
362
node_two_stack = [map_two._root_node]
363
while node_one_stack:
364
node_one = node_one_stack.pop()
365
node_two = node_two_stack.pop()
366
if node_one.__class__ != node_two.__class__:
367
self.assertEqualDiff(map_one._dump_tree(include_keys=True),
368
map_two._dump_tree(include_keys=True))
369
self.assertEqual(node_one._search_prefix,
370
node_two._search_prefix)
371
if isinstance(node_one, InternalNode):
372
# Internal nodes must have identical references
373
self.assertEqual(sorted(node_one._items.keys()),
374
sorted(node_two._items.keys()))
375
node_one_stack.extend([n for n, _ in
376
node_one._iter_nodes(map_one._store)])
377
node_two_stack.extend([n for n, _ in
378
node_two._iter_nodes(map_two._store)])
380
# Leaf nodes must have identical contents
381
self.assertEqual(node_one._items, node_two._items)
382
self.assertEquals([], node_two_stack)
384
def assertCanonicalForm(self, chkmap):
385
"""Assert that the chkmap is in 'canonical' form.
387
We do this by adding all of the key value pairs from scratch, both in
388
forward order and reverse order, and assert that the final tree layout
391
items = list(chkmap.iteritems())
392
map_forward = chk_map.CHKMap(None, None)
393
map_forward._root_node.set_maximum_size(chkmap._root_node.maximum_size)
394
for key, value in items:
395
map_forward.map(key, value)
396
self.assertMapLayoutEqual(map_forward, chkmap)
397
map_reverse = chk_map.CHKMap(None, None)
398
map_reverse._root_node.set_maximum_size(chkmap._root_node.maximum_size)
399
for key, value in reversed(items):
400
map_reverse.map(key, value)
401
self.assertMapLayoutEqual(map_reverse, chkmap)
403
def test_assert_map_layout_equal(self):
404
store = self.get_chk_bytes()
405
map_one = CHKMap(store, None)
406
map_one._root_node.set_maximum_size(20)
407
map_two = CHKMap(store, None)
408
map_two._root_node.set_maximum_size(20)
409
self.assertMapLayoutEqual(map_one, map_two)
410
map_one.map('aaa', 'value')
411
self.assertRaises(AssertionError,
412
self.assertMapLayoutEqual, map_one, map_two)
413
map_two.map('aaa', 'value')
414
self.assertMapLayoutEqual(map_one, map_two)
415
# Split the tree, so we ensure that internal nodes and leaf nodes are
417
map_one.map('aab', 'value')
418
self.assertIsInstance(map_one._root_node, InternalNode)
419
self.assertRaises(AssertionError,
420
self.assertMapLayoutEqual, map_one, map_two)
421
map_two.map('aab', 'value')
422
self.assertMapLayoutEqual(map_one, map_two)
423
map_one.map('aac', 'value')
424
self.assertRaises(AssertionError,
425
self.assertMapLayoutEqual, map_one, map_two)
426
self.assertCanonicalForm(map_one)
428
def test_from_dict_empty(self):
429
chk_bytes = self.get_chk_bytes()
430
root_key = CHKMap.from_dict(chk_bytes, {})
431
# Check the data was saved and inserted correctly.
432
expected_root_key = self.assertHasEmptyMap(chk_bytes)
433
self.assertEqual(expected_root_key, root_key)
435
def test_from_dict_ab(self):
436
chk_bytes = self.get_chk_bytes()
437
root_key = CHKMap.from_dict(chk_bytes, {"a": "b"})
438
# Check the data was saved and inserted correctly.
439
expected_root_key = self.assertHasABMap(chk_bytes)
440
self.assertEqual(expected_root_key, root_key)
442
def test_apply_empty_ab(self):
443
# applying a delta (None, "a", "b") to an empty chkmap generates the
444
# same map as from_dict_ab.
445
chk_bytes = self.get_chk_bytes()
446
root_key = CHKMap.from_dict(chk_bytes, {})
447
chkmap = CHKMap(chk_bytes, root_key)
448
new_root = chkmap.apply_delta([(None, "a", "b")])
449
# Check the data was saved and inserted correctly.
450
expected_root_key = self.assertHasABMap(chk_bytes)
451
self.assertEqual(expected_root_key, new_root)
452
# The update should have left us with an in memory root node, with an
454
self.assertEqual(new_root, chkmap._root_node._key)
456
def test_apply_ab_empty(self):
457
# applying a delta ("a", None, None) to a map with 'a' in it generates
459
chk_bytes = self.get_chk_bytes()
460
root_key = CHKMap.from_dict(chk_bytes, {("a",):"b"})
461
chkmap = CHKMap(chk_bytes, root_key)
462
new_root = chkmap.apply_delta([(("a",), None, None)])
463
# Check the data was saved and inserted correctly.
464
expected_root_key = self.assertHasEmptyMap(chk_bytes)
465
self.assertEqual(expected_root_key, new_root)
466
# The update should have left us with an in memory root node, with an
468
self.assertEqual(new_root, chkmap._root_node._key)
470
def test_apply_new_keys_must_be_new(self):
471
# applying a delta (None, "a", "b") to a map with 'a' in it generates
473
chk_bytes = self.get_chk_bytes()
474
root_key = CHKMap.from_dict(chk_bytes, {("a",):"b"})
475
chkmap = CHKMap(chk_bytes, root_key)
476
self.assertRaises(errors.InconsistentDelta, chkmap.apply_delta,
477
[(None, ("a",), "b")])
478
# As an error occured, the update should have left us without changing
479
# anything (the root should be unchanged).
480
self.assertEqual(root_key, chkmap._root_node._key)
482
def test_apply_delta_is_deterministic(self):
483
chk_bytes = self.get_chk_bytes()
484
chkmap1 = CHKMap(chk_bytes, None)
485
chkmap1._root_node.set_maximum_size(10)
486
chkmap1.apply_delta([(None, ('aaa',), 'common'),
487
(None, ('bba',), 'target2'),
488
(None, ('bbb',), 'common')])
489
root_key1 = chkmap1._save()
490
self.assertCanonicalForm(chkmap1)
492
chkmap2 = CHKMap(chk_bytes, None)
493
chkmap2._root_node.set_maximum_size(10)
494
chkmap2.apply_delta([(None, ('bbb',), 'common'),
495
(None, ('bba',), 'target2'),
496
(None, ('aaa',), 'common')])
497
root_key2 = chkmap2._save()
498
self.assertEqualDiff(chkmap1._dump_tree(include_keys=True),
499
chkmap2._dump_tree(include_keys=True))
500
self.assertEqual(root_key1, root_key2)
501
self.assertCanonicalForm(chkmap2)
503
def test_stable_splitting(self):
504
store = self.get_chk_bytes()
505
chkmap = CHKMap(store, None)
506
# Should fit 2 keys per LeafNode
507
chkmap._root_node.set_maximum_size(35)
508
chkmap.map(('aaa',), 'v')
509
self.assertEqualDiff("'' LeafNode\n"
512
chkmap.map(('aab',), 'v')
513
self.assertEqualDiff("'' LeafNode\n"
517
self.assertCanonicalForm(chkmap)
519
# Creates a new internal node, and splits the others into leaves
520
chkmap.map(('aac',), 'v')
521
self.assertEqualDiff("'' InternalNode\n"
529
self.assertCanonicalForm(chkmap)
531
# Splits again, because it can't fit in the current structure
532
chkmap.map(('bbb',), 'v')
533
self.assertEqualDiff("'' InternalNode\n"
534
" 'a' InternalNode\n"
544
self.assertCanonicalForm(chkmap)
546
def test_map_splits_with_longer_key(self):
547
store = self.get_chk_bytes()
548
chkmap = CHKMap(store, None)
549
# Should fit 1 key per LeafNode
550
chkmap._root_node.set_maximum_size(10)
551
chkmap.map(('aaa',), 'v')
552
chkmap.map(('aaaa',), 'v')
553
self.assertCanonicalForm(chkmap)
554
self.assertIsInstance(chkmap._root_node, InternalNode)
556
def test_with_linefeed_in_key(self):
557
store = self.get_chk_bytes()
558
chkmap = CHKMap(store, None)
559
# Should fit 1 key per LeafNode
560
chkmap._root_node.set_maximum_size(10)
561
chkmap.map(('a\ra',), 'val1')
562
chkmap.map(('a\rb',), 'val2')
563
chkmap.map(('ac',), 'val3')
564
self.assertCanonicalForm(chkmap)
565
self.assertEqualDiff("'' InternalNode\n"
566
" 'a\\r' InternalNode\n"
567
" 'a\\ra' LeafNode\n"
568
" ('a\\ra',) 'val1'\n"
569
" 'a\\rb' LeafNode\n"
570
" ('a\\rb',) 'val2'\n"
574
# We should also successfully serialise and deserialise these items
575
root_key = chkmap._save()
576
chkmap = CHKMap(store, root_key)
577
self.assertEqualDiff("'' InternalNode\n"
578
" 'a\\r' InternalNode\n"
579
" 'a\\ra' LeafNode\n"
580
" ('a\\ra',) 'val1'\n"
581
" 'a\\rb' LeafNode\n"
582
" ('a\\rb',) 'val2'\n"
587
def test_deep_splitting(self):
588
store = self.get_chk_bytes()
589
chkmap = CHKMap(store, None)
590
# Should fit 2 keys per LeafNode
591
chkmap._root_node.set_maximum_size(40)
592
chkmap.map(('aaaaaaaa',), 'v')
593
chkmap.map(('aaaaabaa',), 'v')
594
self.assertEqualDiff("'' LeafNode\n"
595
" ('aaaaaaaa',) 'v'\n"
596
" ('aaaaabaa',) 'v'\n",
598
chkmap.map(('aaabaaaa',), 'v')
599
chkmap.map(('aaababaa',), 'v')
600
self.assertEqualDiff("'' InternalNode\n"
602
" ('aaaaaaaa',) 'v'\n"
603
" ('aaaaabaa',) 'v'\n"
605
" ('aaabaaaa',) 'v'\n"
606
" ('aaababaa',) 'v'\n",
608
chkmap.map(('aaabacaa',), 'v')
609
chkmap.map(('aaabadaa',), 'v')
610
self.assertEqualDiff("'' InternalNode\n"
612
" ('aaaaaaaa',) 'v'\n"
613
" ('aaaaabaa',) 'v'\n"
614
" 'aaab' InternalNode\n"
615
" 'aaabaa' LeafNode\n"
616
" ('aaabaaaa',) 'v'\n"
617
" 'aaabab' LeafNode\n"
618
" ('aaababaa',) 'v'\n"
619
" 'aaabac' LeafNode\n"
620
" ('aaabacaa',) 'v'\n"
621
" 'aaabad' LeafNode\n"
622
" ('aaabadaa',) 'v'\n",
624
chkmap.map(('aaababba',), 'val')
625
chkmap.map(('aaababca',), 'val')
626
self.assertEqualDiff("'' InternalNode\n"
628
" ('aaaaaaaa',) 'v'\n"
629
" ('aaaaabaa',) 'v'\n"
630
" 'aaab' InternalNode\n"
631
" 'aaabaa' LeafNode\n"
632
" ('aaabaaaa',) 'v'\n"
633
" 'aaabab' InternalNode\n"
634
" 'aaababa' LeafNode\n"
635
" ('aaababaa',) 'v'\n"
636
" 'aaababb' LeafNode\n"
637
" ('aaababba',) 'val'\n"
638
" 'aaababc' LeafNode\n"
639
" ('aaababca',) 'val'\n"
640
" 'aaabac' LeafNode\n"
641
" ('aaabacaa',) 'v'\n"
642
" 'aaabad' LeafNode\n"
643
" ('aaabadaa',) 'v'\n",
645
# Now we add a node that should fit around an existing InternalNode,
646
# but has a slightly different key prefix, which causes a new
648
chkmap.map(('aaabDaaa',), 'v')
649
self.assertEqualDiff("'' InternalNode\n"
651
" ('aaaaaaaa',) 'v'\n"
652
" ('aaaaabaa',) 'v'\n"
653
" 'aaab' InternalNode\n"
654
" 'aaabD' LeafNode\n"
655
" ('aaabDaaa',) 'v'\n"
656
" 'aaaba' InternalNode\n"
657
" 'aaabaa' LeafNode\n"
658
" ('aaabaaaa',) 'v'\n"
659
" 'aaabab' InternalNode\n"
660
" 'aaababa' LeafNode\n"
661
" ('aaababaa',) 'v'\n"
662
" 'aaababb' LeafNode\n"
663
" ('aaababba',) 'val'\n"
664
" 'aaababc' LeafNode\n"
665
" ('aaababca',) 'val'\n"
666
" 'aaabac' LeafNode\n"
667
" ('aaabacaa',) 'v'\n"
668
" 'aaabad' LeafNode\n"
669
" ('aaabadaa',) 'v'\n",
672
def test_map_collapses_if_size_changes(self):
673
store = self.get_chk_bytes()
674
chkmap = CHKMap(store, None)
675
# Should fit 2 keys per LeafNode
676
chkmap._root_node.set_maximum_size(35)
677
chkmap.map(('aaa',), 'v')
678
chkmap.map(('aab',), 'very long value that splits')
679
self.assertEqualDiff("'' InternalNode\n"
683
" ('aab',) 'very long value that splits'\n",
685
self.assertCanonicalForm(chkmap)
686
# Now changing the value to something small should cause a rebuild
687
chkmap.map(('aab',), 'v')
688
self.assertEqualDiff("'' LeafNode\n"
692
self.assertCanonicalForm(chkmap)
694
def test_map_double_deep_collapses(self):
695
store = self.get_chk_bytes()
696
chkmap = CHKMap(store, None)
697
# Should fit 3 small keys per LeafNode
698
chkmap._root_node.set_maximum_size(40)
699
chkmap.map(('aaa',), 'v')
700
chkmap.map(('aab',), 'very long value that splits')
701
chkmap.map(('abc',), 'v')
702
self.assertEqualDiff("'' InternalNode\n"
703
" 'aa' InternalNode\n"
707
" ('aab',) 'very long value that splits'\n"
711
chkmap.map(('aab',), 'v')
712
self.assertCanonicalForm(chkmap)
713
self.assertEqualDiff("'' LeafNode\n"
719
def test_stable_unmap(self):
720
store = self.get_chk_bytes()
721
chkmap = CHKMap(store, None)
722
# Should fit 2 keys per LeafNode
723
chkmap._root_node.set_maximum_size(35)
724
chkmap.map(('aaa',), 'v')
725
chkmap.map(('aab',), 'v')
726
self.assertEqualDiff("'' LeafNode\n"
730
# Creates a new internal node, and splits the others into leaves
731
chkmap.map(('aac',), 'v')
732
self.assertEqualDiff("'' InternalNode\n"
740
self.assertCanonicalForm(chkmap)
741
# Now lets unmap one of the keys, and assert that we collapse the
743
chkmap.unmap(('aac',))
744
self.assertEqualDiff("'' LeafNode\n"
748
self.assertCanonicalForm(chkmap)
750
def test_unmap_double_deep(self):
751
store = self.get_chk_bytes()
752
chkmap = CHKMap(store, None)
753
# Should fit 3 keys per LeafNode
754
chkmap._root_node.set_maximum_size(40)
755
chkmap.map(('aaa',), 'v')
756
chkmap.map(('aaab',), 'v')
757
chkmap.map(('aab',), 'very long value')
758
chkmap.map(('abc',), 'v')
759
self.assertEqualDiff("'' InternalNode\n"
760
" 'aa' InternalNode\n"
765
" ('aab',) 'very long value'\n"
769
# Removing the 'aab' key should cause everything to collapse back to a
771
chkmap.unmap(('aab',))
772
self.assertEqualDiff("'' LeafNode\n"
778
def test_unmap_double_deep_non_empty_leaf(self):
779
store = self.get_chk_bytes()
780
chkmap = CHKMap(store, None)
781
# Should fit 3 keys per LeafNode
782
chkmap._root_node.set_maximum_size(40)
783
chkmap.map(('aaa',), 'v')
784
chkmap.map(('aab',), 'long value')
785
chkmap.map(('aabb',), 'v')
786
chkmap.map(('abc',), 'v')
787
self.assertEqualDiff("'' InternalNode\n"
788
" 'aa' InternalNode\n"
792
" ('aab',) 'long value'\n"
797
# Removing the 'aab' key should cause everything to collapse back to a
799
chkmap.unmap(('aab',))
800
self.assertEqualDiff("'' LeafNode\n"
806
def test_unmap_with_known_internal_node_doesnt_page(self):
807
store = self.get_chk_bytes()
808
chkmap = CHKMap(store, None)
809
# Should fit 3 keys per LeafNode
810
chkmap._root_node.set_maximum_size(30)
811
chkmap.map(('aaa',), 'v')
812
chkmap.map(('aab',), 'v')
813
chkmap.map(('aac',), 'v')
814
chkmap.map(('abc',), 'v')
815
chkmap.map(('acd',), 'v')
816
self.assertEqualDiff("'' InternalNode\n"
817
" 'aa' InternalNode\n"
829
# Save everything to the map, and start over
830
chkmap = CHKMap(store, chkmap._save())
831
# Mapping an 'aa' key loads the internal node, but should not map the
832
# 'ab' and 'ac' nodes
833
chkmap.map(('aad',), 'v')
834
self.assertIsInstance(chkmap._root_node._items['aa'], InternalNode)
835
self.assertIsInstance(chkmap._root_node._items['ab'], StaticTuple)
836
self.assertIsInstance(chkmap._root_node._items['ac'], StaticTuple)
837
# Unmapping 'acd' can notice that 'aa' is an InternalNode and not have
839
chkmap.unmap(('acd',))
840
self.assertIsInstance(chkmap._root_node._items['aa'], InternalNode)
841
self.assertIsInstance(chkmap._root_node._items['ab'], StaticTuple)
843
def test_unmap_without_fitting_doesnt_page_in(self):
844
store = self.get_chk_bytes()
845
chkmap = CHKMap(store, None)
846
# Should fit 2 keys per LeafNode
847
chkmap._root_node.set_maximum_size(20)
848
chkmap.map(('aaa',), 'v')
849
chkmap.map(('aab',), 'v')
850
self.assertEqualDiff("'' InternalNode\n"
856
# Save everything to the map, and start over
857
chkmap = CHKMap(store, chkmap._save())
858
chkmap.map(('aac',), 'v')
859
chkmap.map(('aad',), 'v')
860
chkmap.map(('aae',), 'v')
861
chkmap.map(('aaf',), 'v')
862
# At this point, the previous nodes should not be paged in, but the
863
# newly added nodes would be
864
self.assertIsInstance(chkmap._root_node._items['aaa'], StaticTuple)
865
self.assertIsInstance(chkmap._root_node._items['aab'], StaticTuple)
866
self.assertIsInstance(chkmap._root_node._items['aac'], LeafNode)
867
self.assertIsInstance(chkmap._root_node._items['aad'], LeafNode)
868
self.assertIsInstance(chkmap._root_node._items['aae'], LeafNode)
869
self.assertIsInstance(chkmap._root_node._items['aaf'], LeafNode)
870
# Now unmapping one of the new nodes will use only the already-paged-in
871
# nodes to determine that we don't need to do more.
872
chkmap.unmap(('aaf',))
873
self.assertIsInstance(chkmap._root_node._items['aaa'], StaticTuple)
874
self.assertIsInstance(chkmap._root_node._items['aab'], StaticTuple)
875
self.assertIsInstance(chkmap._root_node._items['aac'], LeafNode)
876
self.assertIsInstance(chkmap._root_node._items['aad'], LeafNode)
877
self.assertIsInstance(chkmap._root_node._items['aae'], LeafNode)
879
def test_unmap_pages_in_if_necessary(self):
880
store = self.get_chk_bytes()
881
chkmap = CHKMap(store, None)
882
# Should fit 2 keys per LeafNode
883
chkmap._root_node.set_maximum_size(30)
884
chkmap.map(('aaa',), 'val')
885
chkmap.map(('aab',), 'val')
886
chkmap.map(('aac',), 'val')
887
self.assertEqualDiff("'' InternalNode\n"
895
root_key = chkmap._save()
896
# Save everything to the map, and start over
897
chkmap = CHKMap(store, root_key)
898
chkmap.map(('aad',), 'v')
899
# At this point, the previous nodes should not be paged in, but the
900
# newly added node would be
901
self.assertIsInstance(chkmap._root_node._items['aaa'], StaticTuple)
902
self.assertIsInstance(chkmap._root_node._items['aab'], StaticTuple)
903
self.assertIsInstance(chkmap._root_node._items['aac'], StaticTuple)
904
self.assertIsInstance(chkmap._root_node._items['aad'], LeafNode)
905
# Unmapping the new node will check the existing nodes to see if they
907
# Clear the page cache so we ensure we have to read all the children
908
chk_map.clear_cache()
909
chkmap.unmap(('aad',))
910
self.assertIsInstance(chkmap._root_node._items['aaa'], LeafNode)
911
self.assertIsInstance(chkmap._root_node._items['aab'], LeafNode)
912
self.assertIsInstance(chkmap._root_node._items['aac'], LeafNode)
914
def test_unmap_pages_in_from_page_cache(self):
915
store = self.get_chk_bytes()
916
chkmap = CHKMap(store, None)
917
# Should fit 2 keys per LeafNode
918
chkmap._root_node.set_maximum_size(30)
919
chkmap.map(('aaa',), 'val')
920
chkmap.map(('aab',), 'val')
921
chkmap.map(('aac',), 'val')
922
root_key = chkmap._save()
923
# Save everything to the map, and start over
924
chkmap = CHKMap(store, root_key)
925
chkmap.map(('aad',), 'val')
926
self.assertEqualDiff("'' InternalNode\n"
936
# Save everything to the map, start over after _dump_tree
937
chkmap = CHKMap(store, root_key)
938
chkmap.map(('aad',), 'v')
939
# At this point, the previous nodes should not be paged in, but the
940
# newly added node would be
941
self.assertIsInstance(chkmap._root_node._items['aaa'], StaticTuple)
942
self.assertIsInstance(chkmap._root_node._items['aab'], StaticTuple)
943
self.assertIsInstance(chkmap._root_node._items['aac'], StaticTuple)
944
self.assertIsInstance(chkmap._root_node._items['aad'], LeafNode)
945
# Now clear the page cache, and only include 2 of the children in the
947
aab_key = chkmap._root_node._items['aab']
948
aab_bytes = chk_map._get_cache()[aab_key]
949
aac_key = chkmap._root_node._items['aac']
950
aac_bytes = chk_map._get_cache()[aac_key]
951
chk_map.clear_cache()
952
chk_map._get_cache()[aab_key] = aab_bytes
953
chk_map._get_cache()[aac_key] = aac_bytes
955
# Unmapping the new node will check the nodes from the page cache
956
# first, and not have to read in 'aaa'
957
chkmap.unmap(('aad',))
958
self.assertIsInstance(chkmap._root_node._items['aaa'], StaticTuple)
959
self.assertIsInstance(chkmap._root_node._items['aab'], LeafNode)
960
self.assertIsInstance(chkmap._root_node._items['aac'], LeafNode)
962
def test_unmap_uses_existing_items(self):
963
store = self.get_chk_bytes()
964
chkmap = CHKMap(store, None)
965
# Should fit 2 keys per LeafNode
966
chkmap._root_node.set_maximum_size(30)
967
chkmap.map(('aaa',), 'val')
968
chkmap.map(('aab',), 'val')
969
chkmap.map(('aac',), 'val')
970
root_key = chkmap._save()
971
# Save everything to the map, and start over
972
chkmap = CHKMap(store, root_key)
973
chkmap.map(('aad',), 'val')
974
chkmap.map(('aae',), 'val')
975
chkmap.map(('aaf',), 'val')
976
# At this point, the previous nodes should not be paged in, but the
977
# newly added node would be
978
self.assertIsInstance(chkmap._root_node._items['aaa'], StaticTuple)
979
self.assertIsInstance(chkmap._root_node._items['aab'], StaticTuple)
980
self.assertIsInstance(chkmap._root_node._items['aac'], StaticTuple)
981
self.assertIsInstance(chkmap._root_node._items['aad'], LeafNode)
982
self.assertIsInstance(chkmap._root_node._items['aae'], LeafNode)
983
self.assertIsInstance(chkmap._root_node._items['aaf'], LeafNode)
985
# Unmapping a new node will see the other nodes that are already in
986
# memory, and not need to page in anything else
987
chkmap.unmap(('aad',))
988
self.assertIsInstance(chkmap._root_node._items['aaa'], StaticTuple)
989
self.assertIsInstance(chkmap._root_node._items['aab'], StaticTuple)
990
self.assertIsInstance(chkmap._root_node._items['aac'], StaticTuple)
991
self.assertIsInstance(chkmap._root_node._items['aae'], LeafNode)
992
self.assertIsInstance(chkmap._root_node._items['aaf'], LeafNode)
994
def test_iter_changes_empty_ab(self):
995
# Asking for changes between an empty dict to a dict with keys returns
997
basis = self._get_map({}, maximum_size=10)
998
target = self._get_map(
999
{('a',): 'content here', ('b',): 'more content'},
1000
chk_bytes=basis._store, maximum_size=10)
1001
self.assertEqual([(('a',), None, 'content here'),
1002
(('b',), None, 'more content')],
1003
sorted(list(target.iter_changes(basis))))
1005
def test_iter_changes_ab_empty(self):
1006
# Asking for changes between a dict with keys to an empty dict returns
1008
basis = self._get_map({('a',): 'content here', ('b',): 'more content'},
1010
target = self._get_map({}, chk_bytes=basis._store, maximum_size=10)
1011
self.assertEqual([(('a',), 'content here', None),
1012
(('b',), 'more content', None)],
1013
sorted(list(target.iter_changes(basis))))
1015
def test_iter_changes_empty_empty_is_empty(self):
1016
basis = self._get_map({}, maximum_size=10)
1017
target = self._get_map({}, chk_bytes=basis._store, maximum_size=10)
1018
self.assertEqual([], sorted(list(target.iter_changes(basis))))
1020
def test_iter_changes_ab_ab_is_empty(self):
1021
basis = self._get_map({('a',): 'content here', ('b',): 'more content'},
1023
target = self._get_map(
1024
{('a',): 'content here', ('b',): 'more content'},
1025
chk_bytes=basis._store, maximum_size=10)
1026
self.assertEqual([], sorted(list(target.iter_changes(basis))))
1028
def test_iter_changes_ab_ab_nodes_not_loaded(self):
1029
basis = self._get_map({('a',): 'content here', ('b',): 'more content'},
1031
target = self._get_map(
1032
{('a',): 'content here', ('b',): 'more content'},
1033
chk_bytes=basis._store, maximum_size=10)
1034
list(target.iter_changes(basis))
1035
self.assertIsInstance(target._root_node, StaticTuple)
1036
self.assertIsInstance(basis._root_node, StaticTuple)
1038
def test_iter_changes_ab_ab_changed_values_shown(self):
1039
basis = self._get_map({('a',): 'content here', ('b',): 'more content'},
1041
target = self._get_map(
1042
{('a',): 'content here', ('b',): 'different content'},
1043
chk_bytes=basis._store, maximum_size=10)
1044
result = sorted(list(target.iter_changes(basis)))
1045
self.assertEqual([(('b',), 'more content', 'different content')],
1048
def test_iter_changes_mixed_node_length(self):
1049
# When one side has different node lengths than the other, common
1050
# but different keys still need to be show, and new-and-old included
1052
# aaa - common unaltered
1053
# aab - common altered
1057
# aaa to be not loaded (later test)
1058
# aab, b, at to be returned.
1059
# basis splits at byte 0,1,2, aaa is commonb is basis only
1060
basis_dict = {('aaa',): 'foo bar',
1061
('aab',): 'common altered a', ('b',): 'foo bar b'}
1062
# target splits at byte 1,2, at is target only
1063
target_dict = {('aaa',): 'foo bar',
1064
('aab',): 'common altered b', ('at',): 'foo bar t'}
1066
(('aab',), 'common altered a', 'common altered b'),
1067
(('at',), None, 'foo bar t'),
1068
(('b',), 'foo bar b', None),
1070
basis = self._get_map(basis_dict, maximum_size=10)
1071
target = self._get_map(target_dict, maximum_size=10,
1072
chk_bytes=basis._store)
1073
self.assertEqual(changes, sorted(list(target.iter_changes(basis))))
1075
def test_iter_changes_common_pages_not_loaded(self):
1076
# aaa - common unaltered
1077
# aab - common altered
1081
# aaa to be not loaded
1082
# aaa not to be in result.
1083
basis_dict = {('aaa',): 'foo bar',
1084
('aab',): 'common altered a', ('b',): 'foo bar b'}
1085
# target splits at byte 1, at is target only
1086
target_dict = {('aaa',): 'foo bar',
1087
('aab',): 'common altered b', ('at',): 'foo bar t'}
1088
basis = self._get_map(basis_dict, maximum_size=10)
1089
target = self._get_map(target_dict, maximum_size=10,
1090
chk_bytes=basis._store)
1091
basis_get = basis._store.get_record_stream
1092
def get_record_stream(keys, order, fulltext):
1093
if ('sha1:1adf7c0d1b9140ab5f33bb64c6275fa78b1580b7',) in keys:
1094
self.fail("'aaa' pointer was followed %r" % keys)
1095
return basis_get(keys, order, fulltext)
1096
basis._store.get_record_stream = get_record_stream
1097
result = sorted(list(target.iter_changes(basis)))
1098
for change in result:
1099
if change[0] == ('aaa',):
1100
self.fail("Found unexpected change: %s" % change)
1102
def test_iter_changes_unchanged_keys_in_multi_key_leafs_ignored(self):
1103
# Within a leaf there are no hash's to exclude keys, make sure multi
1104
# value leaf nodes are handled well.
1105
basis_dict = {('aaa',): 'foo bar',
1106
('aab',): 'common altered a', ('b',): 'foo bar b'}
1107
target_dict = {('aaa',): 'foo bar',
1108
('aab',): 'common altered b', ('at',): 'foo bar t'}
1110
(('aab',), 'common altered a', 'common altered b'),
1111
(('at',), None, 'foo bar t'),
1112
(('b',), 'foo bar b', None),
1114
basis = self._get_map(basis_dict)
1115
target = self._get_map(target_dict, chk_bytes=basis._store)
1116
self.assertEqual(changes, sorted(list(target.iter_changes(basis))))
1118
def test_iteritems_empty(self):
1119
chk_bytes = self.get_chk_bytes()
1120
root_key = CHKMap.from_dict(chk_bytes, {})
1121
chkmap = CHKMap(chk_bytes, root_key)
1122
self.assertEqual([], list(chkmap.iteritems()))
1124
def test_iteritems_two_items(self):
1125
chk_bytes = self.get_chk_bytes()
1126
root_key = CHKMap.from_dict(chk_bytes,
1127
{"a":"content here", "b":"more content"})
1128
chkmap = CHKMap(chk_bytes, root_key)
1129
self.assertEqual([(("a",), "content here"), (("b",), "more content")],
1130
sorted(list(chkmap.iteritems())))
1132
def test_iteritems_selected_one_of_two_items(self):
1133
chkmap = self._get_map( {("a",):"content here", ("b",):"more content"})
1134
self.assertEqual({("a",): "content here"},
1135
self.to_dict(chkmap, [("a",)]))
1137
def test_iteritems_keys_prefixed_by_2_width_nodes(self):
1138
chkmap = self._get_map(
1139
{("a","a"):"content here", ("a", "b",):"more content",
1140
("b", ""): 'boring content'},
1141
maximum_size=10, key_width=2)
1143
{("a", "a"): "content here", ("a", "b"): 'more content'},
1144
self.to_dict(chkmap, [("a",)]))
1146
def test_iteritems_keys_prefixed_by_2_width_nodes_hashed(self):
1147
search_key_func = chk_map.search_key_registry.get('hash-16-way')
1148
self.assertEqual('E8B7BE43\x00E8B7BE43',
1149
search_key_func(StaticTuple('a', 'a')))
1150
self.assertEqual('E8B7BE43\x0071BEEFF9',
1151
search_key_func(StaticTuple('a', 'b')))
1152
self.assertEqual('71BEEFF9\x0000000000',
1153
search_key_func(StaticTuple('b', '')))
1154
chkmap = self._get_map(
1155
{("a","a"):"content here", ("a", "b",):"more content",
1156
("b", ""): 'boring content'},
1157
maximum_size=10, key_width=2, search_key_func=search_key_func)
1159
{("a", "a"): "content here", ("a", "b"): 'more content'},
1160
self.to_dict(chkmap, [("a",)]))
1162
def test_iteritems_keys_prefixed_by_2_width_one_leaf(self):
1163
chkmap = self._get_map(
1164
{("a","a"):"content here", ("a", "b",):"more content",
1165
("b", ""): 'boring content'}, key_width=2)
1167
{("a", "a"): "content here", ("a", "b"): 'more content'},
1168
self.to_dict(chkmap, [("a",)]))
1170
def test___len__empty(self):
1171
chkmap = self._get_map({})
1172
self.assertEqual(0, len(chkmap))
1174
def test___len__2(self):
1175
chkmap = self._get_map({("foo",):"bar", ("gam",):"quux"})
1176
self.assertEqual(2, len(chkmap))
1178
def test_max_size_100_bytes_new(self):
1179
# When there is a 100 byte upper node limit, a tree is formed.
1180
chkmap = self._get_map({("k1"*50,):"v1", ("k2"*50,):"v2"}, maximum_size=100)
1181
# We expect three nodes:
1182
# A root, with two children, and with two key prefixes - k1 to one, and
1183
# k2 to the other as our node splitting is only just being developed.
1184
# The maximum size should be embedded
1185
chkmap._ensure_root()
1186
self.assertEqual(100, chkmap._root_node.maximum_size)
1187
self.assertEqual(1, chkmap._root_node._key_width)
1188
# There should be two child nodes, and prefix of 2(bytes):
1189
self.assertEqual(2, len(chkmap._root_node._items))
1190
self.assertEqual("k", chkmap._root_node._compute_search_prefix())
1191
# The actual nodes pointed at will change as serialisers change; so
1192
# here we test that the key prefix is correct; then load the nodes and
1193
# check they have the right pointed at key; whether they have the
1194
# pointed at value inline or not is also unrelated to this test so we
1195
# don't check that in detail - rather we just check the aggregate
1197
nodes = sorted(chkmap._root_node._items.items())
1200
self.assertEqual('k1', ptr1[0])
1201
self.assertEqual('k2', ptr2[0])
1202
node1 = chk_map._deserialise(chkmap._read_bytes(ptr1[1]), ptr1[1], None)
1203
self.assertIsInstance(node1, LeafNode)
1204
self.assertEqual(1, len(node1))
1205
self.assertEqual({('k1'*50,): 'v1'}, self.to_dict(node1, chkmap._store))
1206
node2 = chk_map._deserialise(chkmap._read_bytes(ptr2[1]), ptr2[1], None)
1207
self.assertIsInstance(node2, LeafNode)
1208
self.assertEqual(1, len(node2))
1209
self.assertEqual({('k2'*50,): 'v2'}, self.to_dict(node2, chkmap._store))
1210
# Having checked we have a good structure, check that the content is
1212
self.assertEqual(2, len(chkmap))
1213
self.assertEqual({("k1"*50,): "v1", ("k2"*50,): "v2"},
1214
self.to_dict(chkmap))
1216
def test_init_root_is_LeafNode_new(self):
1217
chk_bytes = self.get_chk_bytes()
1218
chkmap = CHKMap(chk_bytes, None)
1219
self.assertIsInstance(chkmap._root_node, LeafNode)
1220
self.assertEqual({}, self.to_dict(chkmap))
1221
self.assertEqual(0, len(chkmap))
1223
def test_init_and_save_new(self):
1224
chk_bytes = self.get_chk_bytes()
1225
chkmap = CHKMap(chk_bytes, None)
1226
key = chkmap._save()
1227
leaf_node = LeafNode()
1228
self.assertEqual([key], leaf_node.serialise(chk_bytes))
1230
def test_map_first_item_new(self):
1231
chk_bytes = self.get_chk_bytes()
1232
chkmap = CHKMap(chk_bytes, None)
1233
chkmap.map(("foo,",), "bar")
1234
self.assertEqual({('foo,',): 'bar'}, self.to_dict(chkmap))
1235
self.assertEqual(1, len(chkmap))
1236
key = chkmap._save()
1237
leaf_node = LeafNode()
1238
leaf_node.map(chk_bytes, ("foo,",), "bar")
1239
self.assertEqual([key], leaf_node.serialise(chk_bytes))
1241
def test_unmap_last_item_root_is_leaf_new(self):
1242
chkmap = self._get_map({("k1"*50,): "v1", ("k2"*50,): "v2"})
1243
chkmap.unmap(("k1"*50,))
1244
chkmap.unmap(("k2"*50,))
1245
self.assertEqual(0, len(chkmap))
1246
self.assertEqual({}, self.to_dict(chkmap))
1247
key = chkmap._save()
1248
leaf_node = LeafNode()
1249
self.assertEqual([key], leaf_node.serialise(chkmap._store))
1251
def test__dump_tree(self):
1252
chkmap = self._get_map({("aaa",): "value1", ("aab",): "value2",
1253
("bbb",): "value3",},
1255
self.assertEqualDiff("'' InternalNode\n"
1256
" 'a' InternalNode\n"
1258
" ('aaa',) 'value1'\n"
1260
" ('aab',) 'value2'\n"
1262
" ('bbb',) 'value3'\n",
1263
chkmap._dump_tree())
1264
self.assertEqualDiff("'' InternalNode\n"
1265
" 'a' InternalNode\n"
1267
" ('aaa',) 'value1'\n"
1269
" ('aab',) 'value2'\n"
1271
" ('bbb',) 'value3'\n",
1272
chkmap._dump_tree())
1273
self.assertEqualDiff(
1274
"'' InternalNode sha1:0690d471eb0a624f359797d0ee4672bd68f4e236\n"
1275
" 'a' InternalNode sha1:1514c35503da9418d8fd90c1bed553077cb53673\n"
1276
" 'aaa' LeafNode sha1:4cc5970454d40b4ce297a7f13ddb76f63b88fefb\n"
1277
" ('aaa',) 'value1'\n"
1278
" 'aab' LeafNode sha1:1d68bc90914ef8a3edbcc8bb28b00cb4fea4b5e2\n"
1279
" ('aab',) 'value2'\n"
1280
" 'b' LeafNode sha1:3686831435b5596515353364eab0399dc45d49e7\n"
1281
" ('bbb',) 'value3'\n",
1282
chkmap._dump_tree(include_keys=True))
1284
def test__dump_tree_in_progress(self):
1285
chkmap = self._get_map({("aaa",): "value1", ("aab",): "value2"},
1287
chkmap.map(('bbb',), 'value3')
1288
self.assertEqualDiff("'' InternalNode\n"
1289
" 'a' InternalNode\n"
1291
" ('aaa',) 'value1'\n"
1293
" ('aab',) 'value2'\n"
1295
" ('bbb',) 'value3'\n",
1296
chkmap._dump_tree())
1297
# For things that are updated by adding 'bbb', we don't have a sha key
1298
# for them yet, so they are listed as None
1299
self.assertEqualDiff(
1300
"'' InternalNode None\n"
1301
" 'a' InternalNode sha1:6b0d881dd739a66f733c178b24da64395edfaafd\n"
1302
" 'aaa' LeafNode sha1:40b39a08d895babce17b20ae5f62d187eaa4f63a\n"
1303
" ('aaa',) 'value1'\n"
1304
" 'aab' LeafNode sha1:ad1dc7c4e801302c95bf1ba7b20bc45e548cd51a\n"
1305
" ('aab',) 'value2'\n"
1306
" 'b' LeafNode None\n"
1307
" ('bbb',) 'value3'\n",
1308
chkmap._dump_tree(include_keys=True))
1311
def _search_key_single(key):
1312
"""A search key function that maps all nodes to the same value"""
1315
def _test_search_key(key):
1316
return 'test:' + '\x00'.join(key)
1319
class TestMapSearchKeys(TestCaseWithStore):
1321
def test_default_chk_map_uses_flat_search_key(self):
1322
chkmap = chk_map.CHKMap(self.get_chk_bytes(), None)
1323
self.assertEqual('1',
1324
chkmap._search_key_func(('1',)))
1325
self.assertEqual('1\x002',
1326
chkmap._search_key_func(('1', '2')))
1327
self.assertEqual('1\x002\x003',
1328
chkmap._search_key_func(('1', '2', '3')))
1330
def test_search_key_is_passed_to_root_node(self):
1331
chkmap = chk_map.CHKMap(self.get_chk_bytes(), None,
1332
search_key_func=_test_search_key)
1333
self.assertIs(_test_search_key, chkmap._search_key_func)
1334
self.assertEqual('test:1\x002\x003',
1335
chkmap._search_key_func(('1', '2', '3')))
1336
self.assertEqual('test:1\x002\x003',
1337
chkmap._root_node._search_key(('1', '2', '3')))
1339
def test_search_key_passed_via__ensure_root(self):
1340
chk_bytes = self.get_chk_bytes()
1341
chkmap = chk_map.CHKMap(chk_bytes, None,
1342
search_key_func=_test_search_key)
1343
root_key = chkmap._save()
1344
chkmap = chk_map.CHKMap(chk_bytes, root_key,
1345
search_key_func=_test_search_key)
1346
chkmap._ensure_root()
1347
self.assertEqual('test:1\x002\x003',
1348
chkmap._root_node._search_key(('1', '2', '3')))
1350
def test_search_key_with_internal_node(self):
1351
chk_bytes = self.get_chk_bytes()
1352
chkmap = chk_map.CHKMap(chk_bytes, None,
1353
search_key_func=_test_search_key)
1354
chkmap._root_node.set_maximum_size(10)
1355
chkmap.map(('1',), 'foo')
1356
chkmap.map(('2',), 'bar')
1357
chkmap.map(('3',), 'baz')
1358
self.assertEqualDiff("'' InternalNode\n"
1359
" 'test:1' LeafNode\n"
1361
" 'test:2' LeafNode\n"
1363
" 'test:3' LeafNode\n"
1365
, chkmap._dump_tree())
1366
root_key = chkmap._save()
1367
chkmap = chk_map.CHKMap(chk_bytes, root_key,
1368
search_key_func=_test_search_key)
1369
self.assertEqualDiff("'' InternalNode\n"
1370
" 'test:1' LeafNode\n"
1372
" 'test:2' LeafNode\n"
1374
" 'test:3' LeafNode\n"
1376
, chkmap._dump_tree())
1378
def test_search_key_16(self):
1379
chk_bytes = self.get_chk_bytes()
1380
chkmap = chk_map.CHKMap(chk_bytes, None,
1381
search_key_func=chk_map._search_key_16)
1382
chkmap._root_node.set_maximum_size(10)
1383
chkmap.map(('1',), 'foo')
1384
chkmap.map(('2',), 'bar')
1385
chkmap.map(('3',), 'baz')
1386
self.assertEqualDiff("'' InternalNode\n"
1393
, chkmap._dump_tree())
1394
root_key = chkmap._save()
1395
chkmap = chk_map.CHKMap(chk_bytes, root_key,
1396
search_key_func=chk_map._search_key_16)
1397
# We can get the values back correctly
1398
self.assertEqual([(('1',), 'foo')],
1399
list(chkmap.iteritems([('1',)])))
1400
self.assertEqualDiff("'' InternalNode\n"
1407
, chkmap._dump_tree())
1409
def test_search_key_255(self):
1410
chk_bytes = self.get_chk_bytes()
1411
chkmap = chk_map.CHKMap(chk_bytes, None,
1412
search_key_func=chk_map._search_key_255)
1413
chkmap._root_node.set_maximum_size(10)
1414
chkmap.map(('1',), 'foo')
1415
chkmap.map(('2',), 'bar')
1416
chkmap.map(('3',), 'baz')
1417
self.assertEqualDiff("'' InternalNode\n"
1418
" '\\x1a' LeafNode\n"
1422
" '\\x83' LeafNode\n"
1424
, chkmap._dump_tree())
1425
root_key = chkmap._save()
1426
chkmap = chk_map.CHKMap(chk_bytes, root_key,
1427
search_key_func=chk_map._search_key_255)
1428
# We can get the values back correctly
1429
self.assertEqual([(('1',), 'foo')],
1430
list(chkmap.iteritems([('1',)])))
1431
self.assertEqualDiff("'' InternalNode\n"
1432
" '\\x1a' LeafNode\n"
1436
" '\\x83' LeafNode\n"
1438
, chkmap._dump_tree())
1440
def test_search_key_collisions(self):
1441
chkmap = chk_map.CHKMap(self.get_chk_bytes(), None,
1442
search_key_func=_search_key_single)
1443
# The node will want to expand, but it cannot, because it knows that
1444
# all the keys must map to this node
1445
chkmap._root_node.set_maximum_size(20)
1446
chkmap.map(('1',), 'foo')
1447
chkmap.map(('2',), 'bar')
1448
chkmap.map(('3',), 'baz')
1449
self.assertEqualDiff("'' LeafNode\n"
1453
, chkmap._dump_tree())
1456
class TestLeafNode(TestCaseWithStore):
1458
def test_current_size_empty(self):
1460
self.assertEqual(16, node._current_size())
1462
def test_current_size_size_changed(self):
1464
node.set_maximum_size(10)
1465
self.assertEqual(17, node._current_size())
1467
def test_current_size_width_changed(self):
1469
node._key_width = 10
1470
self.assertEqual(17, node._current_size())
1472
def test_current_size_items(self):
1474
base_size = node._current_size()
1475
node.map(None, ("foo bar",), "baz")
1476
self.assertEqual(base_size + 14, node._current_size())
1478
def test_deserialise_empty(self):
1479
node = LeafNode.deserialise("chkleaf:\n10\n1\n0\n\n", ("sha1:1234",))
1480
self.assertEqual(0, len(node))
1481
self.assertEqual(10, node.maximum_size)
1482
self.assertEqual(("sha1:1234",), node.key())
1483
self.assertIs(None, node._search_prefix)
1484
self.assertIs(None, node._common_serialised_prefix)
1486
def test_deserialise_items(self):
1487
node = LeafNode.deserialise(
1488
"chkleaf:\n0\n1\n2\n\nfoo bar\x001\nbaz\nquux\x001\nblarh\n",
1490
self.assertEqual(2, len(node))
1491
self.assertEqual([(("foo bar",), "baz"), (("quux",), "blarh")],
1492
sorted(node.iteritems(None)))
1494
def test_deserialise_item_with_null_width_1(self):
1495
node = LeafNode.deserialise(
1496
"chkleaf:\n0\n1\n2\n\nfoo\x001\nbar\x00baz\nquux\x001\nblarh\n",
1498
self.assertEqual(2, len(node))
1499
self.assertEqual([(("foo",), "bar\x00baz"), (("quux",), "blarh")],
1500
sorted(node.iteritems(None)))
1502
def test_deserialise_item_with_null_width_2(self):
1503
node = LeafNode.deserialise(
1504
"chkleaf:\n0\n2\n2\n\nfoo\x001\x001\nbar\x00baz\n"
1505
"quux\x00\x001\nblarh\n",
1507
self.assertEqual(2, len(node))
1508
self.assertEqual([(("foo", "1"), "bar\x00baz"), (("quux", ""), "blarh")],
1509
sorted(node.iteritems(None)))
1511
def test_iteritems_selected_one_of_two_items(self):
1512
node = LeafNode.deserialise(
1513
"chkleaf:\n0\n1\n2\n\nfoo bar\x001\nbaz\nquux\x001\nblarh\n",
1515
self.assertEqual(2, len(node))
1516
self.assertEqual([(("quux",), "blarh")],
1517
sorted(node.iteritems(None, [("quux",), ("qaz",)])))
1519
def test_deserialise_item_with_common_prefix(self):
1520
node = LeafNode.deserialise(
1521
"chkleaf:\n0\n2\n2\nfoo\x00\n1\x001\nbar\x00baz\n2\x001\nblarh\n",
1523
self.assertEqual(2, len(node))
1524
self.assertEqual([(("foo", "1"), "bar\x00baz"), (("foo", "2"), "blarh")],
1525
sorted(node.iteritems(None)))
1526
self.assertIs(chk_map._unknown, node._search_prefix)
1527
self.assertEqual('foo\x00', node._common_serialised_prefix)
1529
def test_deserialise_multi_line(self):
1530
node = LeafNode.deserialise(
1531
"chkleaf:\n0\n2\n2\nfoo\x00\n1\x002\nbar\nbaz\n2\x002\nblarh\n\n",
1533
self.assertEqual(2, len(node))
1534
self.assertEqual([(("foo", "1"), "bar\nbaz"),
1535
(("foo", "2"), "blarh\n"),
1536
], sorted(node.iteritems(None)))
1537
self.assertIs(chk_map._unknown, node._search_prefix)
1538
self.assertEqual('foo\x00', node._common_serialised_prefix)
1540
def test_key_new(self):
1542
self.assertEqual(None, node.key())
1544
def test_key_after_map(self):
1545
node = LeafNode.deserialise("chkleaf:\n10\n1\n0\n\n", ("sha1:1234",))
1546
node.map(None, ("foo bar",), "baz quux")
1547
self.assertEqual(None, node.key())
1549
def test_key_after_unmap(self):
1550
node = LeafNode.deserialise(
1551
"chkleaf:\n0\n1\n2\n\nfoo bar\x001\nbaz\nquux\x001\nblarh\n",
1553
node.unmap(None, ("foo bar",))
1554
self.assertEqual(None, node.key())
1556
def test_map_exceeding_max_size_only_entry_new(self):
1558
node.set_maximum_size(10)
1559
result = node.map(None, ("foo bar",), "baz quux")
1560
self.assertEqual(("foo bar", [("", node)]), result)
1561
self.assertTrue(10 < node._current_size())
1563
def test_map_exceeding_max_size_second_entry_early_difference_new(self):
1565
node.set_maximum_size(10)
1566
node.map(None, ("foo bar",), "baz quux")
1567
prefix, result = list(node.map(None, ("blue",), "red"))
1568
self.assertEqual("", prefix)
1569
self.assertEqual(2, len(result))
1570
split_chars = set([result[0][0], result[1][0]])
1571
self.assertEqual(set(["f", "b"]), split_chars)
1572
nodes = dict(result)
1574
self.assertEqual({("foo bar",): "baz quux"}, self.to_dict(node, None))
1575
self.assertEqual(10, node.maximum_size)
1576
self.assertEqual(1, node._key_width)
1578
self.assertEqual({("blue",): "red"}, self.to_dict(node, None))
1579
self.assertEqual(10, node.maximum_size)
1580
self.assertEqual(1, node._key_width)
1582
def test_map_first(self):
1584
result = node.map(None, ("foo bar",), "baz quux")
1585
self.assertEqual(("foo bar", [("", node)]), result)
1586
self.assertEqual({("foo bar",):"baz quux"}, self.to_dict(node, None))
1587
self.assertEqual(1, len(node))
1589
def test_map_second(self):
1591
node.map(None, ("foo bar",), "baz quux")
1592
result = node.map(None, ("bingo",), "bango")
1593
self.assertEqual(("", [("", node)]), result)
1594
self.assertEqual({("foo bar",):"baz quux", ("bingo",):"bango"},
1595
self.to_dict(node, None))
1596
self.assertEqual(2, len(node))
1598
def test_map_replacement(self):
1600
node.map(None, ("foo bar",), "baz quux")
1601
result = node.map(None, ("foo bar",), "bango")
1602
self.assertEqual(("foo bar", [("", node)]), result)
1603
self.assertEqual({("foo bar",): "bango"},
1604
self.to_dict(node, None))
1605
self.assertEqual(1, len(node))
1607
def test_serialise_empty(self):
1608
store = self.get_chk_bytes()
1610
node.set_maximum_size(10)
1611
expected_key = ("sha1:f34c3f0634ea3f85953dffa887620c0a5b1f4a51",)
1612
self.assertEqual([expected_key],
1613
list(node.serialise(store)))
1614
self.assertEqual("chkleaf:\n10\n1\n0\n\n", self.read_bytes(store, expected_key))
1615
self.assertEqual(expected_key, node.key())
1617
def test_serialise_items(self):
1618
store = self.get_chk_bytes()
1620
node.set_maximum_size(10)
1621
node.map(None, ("foo bar",), "baz quux")
1622
expected_key = ("sha1:f89fac7edfc6bdb1b1b54a556012ff0c646ef5e0",)
1623
self.assertEqual('foo bar', node._common_serialised_prefix)
1624
self.assertEqual([expected_key],
1625
list(node.serialise(store)))
1626
self.assertEqual("chkleaf:\n10\n1\n1\nfoo bar\n\x001\nbaz quux\n",
1627
self.read_bytes(store, expected_key))
1628
self.assertEqual(expected_key, node.key())
1630
def test_unique_serialised_prefix_empty_new(self):
1632
self.assertIs(None, node._compute_search_prefix())
1634
def test_unique_serialised_prefix_one_item_new(self):
1636
node.map(None, ("foo bar", "baz"), "baz quux")
1637
self.assertEqual("foo bar\x00baz", node._compute_search_prefix())
1639
def test_unmap_missing(self):
1641
self.assertRaises(KeyError, node.unmap, None, ("foo bar",))
1643
def test_unmap_present(self):
1645
node.map(None, ("foo bar",), "baz quux")
1646
result = node.unmap(None, ("foo bar",))
1647
self.assertEqual(node, result)
1648
self.assertEqual({}, self.to_dict(node, None))
1649
self.assertEqual(0, len(node))
1651
def test_map_maintains_common_prefixes(self):
1654
node.map(None, ("foo bar", "baz"), "baz quux")
1655
self.assertEqual('foo bar\x00baz', node._search_prefix)
1656
self.assertEqual('foo bar\x00baz', node._common_serialised_prefix)
1657
node.map(None, ("foo bar", "bing"), "baz quux")
1658
self.assertEqual('foo bar\x00b', node._search_prefix)
1659
self.assertEqual('foo bar\x00b', node._common_serialised_prefix)
1660
node.map(None, ("fool", "baby"), "baz quux")
1661
self.assertEqual('foo', node._search_prefix)
1662
self.assertEqual('foo', node._common_serialised_prefix)
1663
node.map(None, ("foo bar", "baz"), "replaced")
1664
self.assertEqual('foo', node._search_prefix)
1665
self.assertEqual('foo', node._common_serialised_prefix)
1666
node.map(None, ("very", "different"), "value")
1667
self.assertEqual('', node._search_prefix)
1668
self.assertEqual('', node._common_serialised_prefix)
1670
def test_unmap_maintains_common_prefixes(self):
1673
node.map(None, ("foo bar", "baz"), "baz quux")
1674
node.map(None, ("foo bar", "bing"), "baz quux")
1675
node.map(None, ("fool", "baby"), "baz quux")
1676
node.map(None, ("very", "different"), "value")
1677
self.assertEqual('', node._search_prefix)
1678
self.assertEqual('', node._common_serialised_prefix)
1679
node.unmap(None, ("very", "different"))
1680
self.assertEqual("foo", node._search_prefix)
1681
self.assertEqual("foo", node._common_serialised_prefix)
1682
node.unmap(None, ("fool", "baby"))
1683
self.assertEqual('foo bar\x00b', node._search_prefix)
1684
self.assertEqual('foo bar\x00b', node._common_serialised_prefix)
1685
node.unmap(None, ("foo bar", "baz"))
1686
self.assertEqual('foo bar\x00bing', node._search_prefix)
1687
self.assertEqual('foo bar\x00bing', node._common_serialised_prefix)
1688
node.unmap(None, ("foo bar", "bing"))
1689
self.assertEqual(None, node._search_prefix)
1690
self.assertEqual(None, node._common_serialised_prefix)
1693
class TestInternalNode(TestCaseWithStore):
1695
def test_add_node_empty_new(self):
1696
node = InternalNode('fo')
1698
child.set_maximum_size(100)
1699
child.map(None, ("foo",), "bar")
1700
node.add_node("foo", child)
1701
# Note that node isn't strictly valid now as a tree (only one child),
1702
# but thats ok for this test.
1703
# The first child defines the node's width:
1704
self.assertEqual(3, node._node_width)
1705
# We should be able to iterate over the contents without doing IO.
1706
self.assertEqual({('foo',): 'bar'}, self.to_dict(node, None))
1707
# The length should be known:
1708
self.assertEqual(1, len(node))
1709
# serialising the node should serialise the child and the node.
1710
chk_bytes = self.get_chk_bytes()
1711
keys = list(node.serialise(chk_bytes))
1712
child_key = child.serialise(chk_bytes)[0]
1714
[child_key, ('sha1:cf67e9997d8228a907c1f5bfb25a8bd9cd916fac',)],
1716
# We should be able to access deserialised content.
1717
bytes = self.read_bytes(chk_bytes, keys[1])
1718
node = chk_map._deserialise(bytes, keys[1], None)
1719
self.assertEqual(1, len(node))
1720
self.assertEqual({('foo',): 'bar'}, self.to_dict(node, chk_bytes))
1721
self.assertEqual(3, node._node_width)
1723
def test_add_node_resets_key_new(self):
1724
node = InternalNode('fo')
1726
child.set_maximum_size(100)
1727
child.map(None, ("foo",), "bar")
1728
node.add_node("foo", child)
1729
chk_bytes = self.get_chk_bytes()
1730
keys = list(node.serialise(chk_bytes))
1731
self.assertEqual(keys[1], node._key)
1732
node.add_node("fos", child)
1733
self.assertEqual(None, node._key)
1735
# def test_add_node_empty_oversized_one_ok_new(self):
1736
# def test_add_node_one_oversized_second_kept_minimum_fan(self):
1737
# def test_add_node_two_oversized_third_kept_minimum_fan(self):
1738
# def test_add_node_one_oversized_second_splits_errors(self):
1740
def test__iter_nodes_no_key_filter(self):
1741
node = InternalNode('')
1743
child.set_maximum_size(100)
1744
child.map(None, ("foo",), "bar")
1745
node.add_node("f", child)
1747
child.set_maximum_size(100)
1748
child.map(None, ("bar",), "baz")
1749
node.add_node("b", child)
1751
for child, node_key_filter in node._iter_nodes(None, key_filter=None):
1752
self.assertEqual(None, node_key_filter)
1754
def test__iter_nodes_splits_key_filter(self):
1755
node = InternalNode('')
1757
child.set_maximum_size(100)
1758
child.map(None, ("foo",), "bar")
1759
node.add_node("f", child)
1761
child.set_maximum_size(100)
1762
child.map(None, ("bar",), "baz")
1763
node.add_node("b", child)
1765
# foo and bar both match exactly one leaf node, but 'cat' should not
1766
# match any, and should not be placed in one.
1767
key_filter = (('foo',), ('bar',), ('cat',))
1768
for child, node_key_filter in node._iter_nodes(None,
1769
key_filter=key_filter):
1770
# each child could only match one key filter, so make sure it was
1772
self.assertEqual(1, len(node_key_filter))
1774
def test__iter_nodes_with_multiple_matches(self):
1775
node = InternalNode('')
1777
child.set_maximum_size(100)
1778
child.map(None, ("foo",), "val")
1779
child.map(None, ("fob",), "val")
1780
node.add_node("f", child)
1782
child.set_maximum_size(100)
1783
child.map(None, ("bar",), "val")
1784
child.map(None, ("baz",), "val")
1785
node.add_node("b", child)
1787
# Note that 'ram' doesn't match anything, so it should be freely
1789
key_filter = (('foo',), ('fob',), ('bar',), ('baz',), ('ram',))
1790
for child, node_key_filter in node._iter_nodes(None,
1791
key_filter=key_filter):
1792
# each child could match two key filters, so make sure they were
1794
self.assertEqual(2, len(node_key_filter))
1796
def make_fo_fa_node(self):
1797
node = InternalNode('f')
1799
child.set_maximum_size(100)
1800
child.map(None, ("foo",), "val")
1801
child.map(None, ("fob",), "val")
1802
node.add_node('fo', child)
1804
child.set_maximum_size(100)
1805
child.map(None, ("far",), "val")
1806
child.map(None, ("faz",), "val")
1807
node.add_node("fa", child)
1810
def test__iter_nodes_single_entry(self):
1811
node = self.make_fo_fa_node()
1812
key_filter = [('foo',)]
1813
nodes = list(node._iter_nodes(None, key_filter=key_filter))
1814
self.assertEqual(1, len(nodes))
1815
self.assertEqual(key_filter, nodes[0][1])
1817
def test__iter_nodes_single_entry_misses(self):
1818
node = self.make_fo_fa_node()
1819
key_filter = [('bar',)]
1820
nodes = list(node._iter_nodes(None, key_filter=key_filter))
1821
self.assertEqual(0, len(nodes))
1823
def test__iter_nodes_mixed_key_width(self):
1824
node = self.make_fo_fa_node()
1825
key_filter = [('foo', 'bar'), ('foo',), ('fo',), ('b',)]
1826
nodes = list(node._iter_nodes(None, key_filter=key_filter))
1827
self.assertEqual(1, len(nodes))
1828
matches = key_filter[:]
1829
matches.remove(('b',))
1830
self.assertEqual(sorted(matches), sorted(nodes[0][1]))
1832
def test__iter_nodes_match_all(self):
1833
node = self.make_fo_fa_node()
1834
key_filter = [('foo', 'bar'), ('foo',), ('fo',), ('f',)]
1835
nodes = list(node._iter_nodes(None, key_filter=key_filter))
1836
self.assertEqual(2, len(nodes))
1838
def test__iter_nodes_fixed_widths_and_misses(self):
1839
node = self.make_fo_fa_node()
1840
# foo and faa should both match one child, baz should miss
1841
key_filter = [('foo',), ('faa',), ('baz',)]
1842
nodes = list(node._iter_nodes(None, key_filter=key_filter))
1843
self.assertEqual(2, len(nodes))
1844
for node, matches in nodes:
1845
self.assertEqual(1, len(matches))
1847
def test_iteritems_empty_new(self):
1848
node = InternalNode()
1849
self.assertEqual([], sorted(node.iteritems(None)))
1851
def test_iteritems_two_children(self):
1852
node = InternalNode()
1854
leaf1.map(None, ('foo bar',), 'quux')
1856
leaf2.map(None, ('strange',), 'beast')
1857
node.add_node("f", leaf1)
1858
node.add_node("s", leaf2)
1859
self.assertEqual([(('foo bar',), 'quux'), (('strange',), 'beast')],
1860
sorted(node.iteritems(None)))
1862
def test_iteritems_two_children_partial(self):
1863
node = InternalNode()
1865
leaf1.map(None, ('foo bar',), 'quux')
1867
leaf2.map(None, ('strange',), 'beast')
1868
node.add_node("f", leaf1)
1869
# This sets up a path that should not be followed - it will error if
1870
# the code tries to.
1871
node._items['f'] = None
1872
node.add_node("s", leaf2)
1873
self.assertEqual([(('strange',), 'beast')],
1874
sorted(node.iteritems(None, [('strange',), ('weird',)])))
1876
def test_iteritems_two_children_with_hash(self):
1877
search_key_func = chk_map.search_key_registry.get('hash-255-way')
1878
node = InternalNode(search_key_func=search_key_func)
1879
leaf1 = LeafNode(search_key_func=search_key_func)
1880
leaf1.map(None, StaticTuple('foo bar',), 'quux')
1881
leaf2 = LeafNode(search_key_func=search_key_func)
1882
leaf2.map(None, StaticTuple('strange',), 'beast')
1883
self.assertEqual('\xbeF\x014', search_key_func(StaticTuple('foo bar',)))
1884
self.assertEqual('\x85\xfa\xf7K', search_key_func(StaticTuple('strange',)))
1885
node.add_node("\xbe", leaf1)
1886
# This sets up a path that should not be followed - it will error if
1887
# the code tries to.
1888
node._items['\xbe'] = None
1889
node.add_node("\x85", leaf2)
1890
self.assertEqual([(('strange',), 'beast')],
1891
sorted(node.iteritems(None, [StaticTuple('strange',),
1892
StaticTuple('weird',)])))
1894
def test_iteritems_partial_empty(self):
1895
node = InternalNode()
1896
self.assertEqual([], sorted(node.iteritems([('missing',)])))
1898
def test_map_to_new_child_new(self):
1899
chkmap = self._get_map({('k1',):'foo', ('k2',):'bar'}, maximum_size=10)
1900
chkmap._ensure_root()
1901
node = chkmap._root_node
1902
# Ensure test validity: nothing paged in below the root.
1904
len([value for value in node._items.values()
1905
if type(value) is StaticTuple]))
1906
# now, mapping to k3 should add a k3 leaf
1907
prefix, nodes = node.map(None, ('k3',), 'quux')
1908
self.assertEqual("k", prefix)
1909
self.assertEqual([("", node)], nodes)
1910
# check new child details
1911
child = node._items['k3']
1912
self.assertIsInstance(child, LeafNode)
1913
self.assertEqual(1, len(child))
1914
self.assertEqual({('k3',): 'quux'}, self.to_dict(child, None))
1915
self.assertEqual(None, child._key)
1916
self.assertEqual(10, child.maximum_size)
1917
self.assertEqual(1, child._key_width)
1918
# Check overall structure:
1919
self.assertEqual(3, len(chkmap))
1920
self.assertEqual({('k1',): 'foo', ('k2',): 'bar', ('k3',): 'quux'},
1921
self.to_dict(chkmap))
1922
# serialising should only serialise the new data - k3 and the internal
1924
keys = list(node.serialise(chkmap._store))
1925
child_key = child.serialise(chkmap._store)[0]
1926
self.assertEqual([child_key, keys[1]], keys)
1928
def test_map_to_child_child_splits_new(self):
1929
chkmap = self._get_map({('k1',):'foo', ('k22',):'bar'}, maximum_size=10)
1930
# Check for the canonical root value for this tree:
1931
self.assertEqualDiff("'' InternalNode\n"
1936
, chkmap._dump_tree())
1937
# _dump_tree pages everything in, so reload using just the root
1938
chkmap = CHKMap(chkmap._store, chkmap._root_node)
1939
chkmap._ensure_root()
1940
node = chkmap._root_node
1941
# Ensure test validity: nothing paged in below the root.
1943
len([value for value in node._items.values()
1944
if type(value) is StaticTuple]))
1945
# now, mapping to k23 causes k22 ('k2' in node) to split into k22 and
1946
# k23, which for simplicity in the current implementation generates
1947
# a new internal node between node, and k22/k23.
1948
prefix, nodes = node.map(chkmap._store, ('k23',), 'quux')
1949
self.assertEqual("k", prefix)
1950
self.assertEqual([("", node)], nodes)
1951
# check new child details
1952
child = node._items['k2']
1953
self.assertIsInstance(child, InternalNode)
1954
self.assertEqual(2, len(child))
1955
self.assertEqual({('k22',): 'bar', ('k23',): 'quux'},
1956
self.to_dict(child, None))
1957
self.assertEqual(None, child._key)
1958
self.assertEqual(10, child.maximum_size)
1959
self.assertEqual(1, child._key_width)
1960
self.assertEqual(3, child._node_width)
1961
# Check overall structure:
1962
self.assertEqual(3, len(chkmap))
1963
self.assertEqual({('k1',): 'foo', ('k22',): 'bar', ('k23',): 'quux'},
1964
self.to_dict(chkmap))
1965
# serialising should only serialise the new data - although k22 hasn't
1966
# changed because its a special corner case (splitting on with only one
1967
# key leaves one node unaltered), in general k22 is serialised, so we
1968
# expect k22, k23, the new internal node, and node, to be serialised.
1969
keys = list(node.serialise(chkmap._store))
1970
child_key = child._key
1971
k22_key = child._items['k22']._key
1972
k23_key = child._items['k23']._key
1973
self.assertEqual([k22_key, k23_key, child_key, node.key()], keys)
1974
self.assertEqualDiff("'' InternalNode\n"
1977
" 'k2' InternalNode\n"
1981
" ('k23',) 'quux'\n"
1982
, chkmap._dump_tree())
1984
def test__search_prefix_filter_with_hash(self):
1985
search_key_func = chk_map.search_key_registry.get('hash-16-way')
1986
node = InternalNode(search_key_func=search_key_func)
1988
node._node_width = 4
1989
self.assertEqual('E8B7BE43\x0071BEEFF9', search_key_func(
1990
StaticTuple('a', 'b')))
1991
self.assertEqual('E8B7', node._search_prefix_filter(
1992
StaticTuple('a', 'b')))
1993
self.assertEqual('E8B7', node._search_prefix_filter(
1996
def test_unmap_k23_from_k1_k22_k23_gives_k1_k22_tree_new(self):
1997
chkmap = self._get_map(
1998
{('k1',):'foo', ('k22',):'bar', ('k23',): 'quux'}, maximum_size=10)
1999
# Check we have the expected tree.
2000
self.assertEqualDiff("'' InternalNode\n"
2003
" 'k2' InternalNode\n"
2007
" ('k23',) 'quux'\n"
2008
, chkmap._dump_tree())
2009
chkmap = CHKMap(chkmap._store, chkmap._root_node)
2010
chkmap._ensure_root()
2011
node = chkmap._root_node
2012
# unmapping k23 should give us a root, with k1 and k22 as direct
2014
result = node.unmap(chkmap._store, ('k23',))
2015
# check the pointed-at object within node - k2 should now point at the
2016
# k22 leaf (which has been paged in to see if we can collapse the tree)
2017
child = node._items['k2']
2018
self.assertIsInstance(child, LeafNode)
2019
self.assertEqual(1, len(child))
2020
self.assertEqual({('k22',): 'bar'},
2021
self.to_dict(child, None))
2022
# Check overall structure is instact:
2023
self.assertEqual(2, len(chkmap))
2024
self.assertEqual({('k1',): 'foo', ('k22',): 'bar'},
2025
self.to_dict(chkmap))
2026
# serialising should only serialise the new data - the root node.
2027
keys = list(node.serialise(chkmap._store))
2028
self.assertEqual([keys[-1]], keys)
2029
chkmap = CHKMap(chkmap._store, keys[-1])
2030
self.assertEqualDiff("'' InternalNode\n"
2035
, chkmap._dump_tree())
2037
def test_unmap_k1_from_k1_k22_k23_gives_k22_k23_tree_new(self):
2038
chkmap = self._get_map(
2039
{('k1',):'foo', ('k22',):'bar', ('k23',): 'quux'}, maximum_size=10)
2040
self.assertEqualDiff("'' InternalNode\n"
2043
" 'k2' InternalNode\n"
2047
" ('k23',) 'quux'\n"
2048
, chkmap._dump_tree())
2049
orig_root = chkmap._root_node
2050
chkmap = CHKMap(chkmap._store, orig_root)
2051
chkmap._ensure_root()
2052
node = chkmap._root_node
2053
k2_ptr = node._items['k2']
2054
# unmapping k1 should give us a root, with k22 and k23 as direct
2055
# children, and should not have needed to page in the subtree.
2056
result = node.unmap(chkmap._store, ('k1',))
2057
self.assertEqual(k2_ptr, result)
2058
chkmap = CHKMap(chkmap._store, orig_root)
2059
# Unmapping at the CHKMap level should switch to the new root
2060
chkmap.unmap(('k1',))
2061
self.assertEqual(k2_ptr, chkmap._root_node)
2062
self.assertEqualDiff("'' InternalNode\n"
2066
" ('k23',) 'quux'\n"
2067
, chkmap._dump_tree())
2071
# map -> fits - done
2072
# map -> doesn't fit - shrink from left till fits
2073
# key data to return: the common prefix, new nodes.
2075
# unmap -> how to tell if siblings can be combined.
2076
# combing leaf nodes means expanding the prefix to the left; so gather the size of
2077
# all the leaf nodes addressed by expanding the prefix by 1; if any adjacent node
2078
# is an internal node, we know that that is a dense subtree - can't combine.
2079
# otherwise as soon as the sum of serialised values exceeds the split threshold
2080
# we know we can't combine - stop.
2081
# unmap -> key return data - space in node, common prefix length? and key count
2083
# variable length prefixes? -> later start with fixed width to get something going
2084
# map -> fits - update pointer to leaf
2085
# return [prefix and node] - seems sound.
2086
# map -> doesn't fit - find unique prefix and shift right
2087
# create internal nodes for all the partitions, return list of unique
2088
# prefixes and nodes.
2089
# map -> new prefix - create a leaf
2090
# unmap -> if child key count 0, remove
2091
# unmap -> return space in node, common prefix length? (why?), key count
2093
# map, if 1 node returned, use it, otherwise make an internal and populate.
2094
# map - unmap - if empty, use empty leafnode (avoids special cases in driver
2096
# map inits as empty leafnode.
2102
# AA, AB, AC, AD, BA
2103
# packed internal node - ideal:
2104
# AA, AB, AC, AD, BA
2105
# single byte fanout - A,B, AA,AB,AC,AD, BA
2108
# AB - split, but we want to end up with AB, BA, in one node, with
2112
class TestCHKMapDifference(TestCaseWithExampleMaps):
2114
def get_difference(self, new_roots, old_roots,
2115
search_key_func=None):
2116
if search_key_func is None:
2117
search_key_func = chk_map._search_key_plain
2118
return chk_map.CHKMapDifference(self.get_chk_bytes(),
2119
new_roots, old_roots, search_key_func)
2121
def test__init__(self):
2122
c_map = self.make_root_only_map()
2124
c_map.map(('aaa',), 'new aaa content')
2125
key2 = c_map._save()
2126
diff = self.get_difference([key2], [key1])
2127
self.assertEqual(set([key1]), diff._all_old_chks)
2128
self.assertEqual([], diff._old_queue)
2129
self.assertEqual([], diff._new_queue)
2131
def help__read_all_roots(self, search_key_func):
2132
c_map = self.make_root_only_map(search_key_func=search_key_func)
2134
c_map.map(('aaa',), 'new aaa content')
2135
key2 = c_map._save()
2136
diff = self.get_difference([key2], [key1], search_key_func)
2137
root_results = [record.key for record in diff._read_all_roots()]
2138
self.assertEqual([key2], root_results)
2139
# We should have queued up only items that aren't in the old
2141
self.assertEqual([(('aaa',), 'new aaa content')],
2142
diff._new_item_queue)
2143
self.assertEqual([], diff._new_queue)
2144
# And there are no old references, so that queue should be
2146
self.assertEqual([], diff._old_queue)
2148
def test__read_all_roots_plain(self):
2149
self.help__read_all_roots(search_key_func=chk_map._search_key_plain)
2151
def test__read_all_roots_16(self):
2152
self.help__read_all_roots(search_key_func=chk_map._search_key_16)
2154
def test__read_all_roots_skips_known_old(self):
2155
c_map = self.make_one_deep_map(chk_map._search_key_plain)
2157
c_map2 = self.make_root_only_map(chk_map._search_key_plain)
2159
diff = self.get_difference([key2], [key1], chk_map._search_key_plain)
2160
root_results = [record.key for record in diff._read_all_roots()]
2161
# We should have no results. key2 is completely contained within key1,
2162
# and we should have seen that in the first pass
2163
self.assertEqual([], root_results)
2165
def test__read_all_roots_prepares_queues(self):
2166
c_map = self.make_one_deep_map(chk_map._search_key_plain)
2168
c_map._dump_tree() # load everything
2169
key1_a = c_map._root_node._items['a'].key()
2170
c_map.map(('abb',), 'new abb content')
2171
key2 = c_map._save()
2172
key2_a = c_map._root_node._items['a'].key()
2173
diff = self.get_difference([key2], [key1], chk_map._search_key_plain)
2174
root_results = [record.key for record in diff._read_all_roots()]
2175
self.assertEqual([key2], root_results)
2176
# At this point, we should have queued up only the 'a' Leaf on both
2177
# sides, both 'c' and 'd' are known to not have changed on both sides
2178
self.assertEqual([key2_a], diff._new_queue)
2179
self.assertEqual([], diff._new_item_queue)
2180
self.assertEqual([key1_a], diff._old_queue)
2182
def test__read_all_roots_multi_new_prepares_queues(self):
2183
c_map = self.make_one_deep_map(chk_map._search_key_plain)
2185
c_map._dump_tree() # load everything
2186
key1_a = c_map._root_node._items['a'].key()
2187
key1_c = c_map._root_node._items['c'].key()
2188
c_map.map(('abb',), 'new abb content')
2189
key2 = c_map._save()
2190
key2_a = c_map._root_node._items['a'].key()
2191
key2_c = c_map._root_node._items['c'].key()
2192
c_map = chk_map.CHKMap(self.get_chk_bytes(), key1,
2193
chk_map._search_key_plain)
2194
c_map.map(('ccc',), 'new ccc content')
2195
key3 = c_map._save()
2196
key3_a = c_map._root_node._items['a'].key()
2197
key3_c = c_map._root_node._items['c'].key()
2198
diff = self.get_difference([key2, key3], [key1],
2199
chk_map._search_key_plain)
2200
root_results = [record.key for record in diff._read_all_roots()]
2201
self.assertEqual(sorted([key2, key3]), sorted(root_results))
2202
# We should have queued up key2_a, and key3_c, but not key2_c or key3_c
2203
self.assertEqual([key2_a, key3_c], diff._new_queue)
2204
self.assertEqual([], diff._new_item_queue)
2205
# And we should have queued up both a and c for the old set
2206
self.assertEqual([key1_a, key1_c], diff._old_queue)
2208
def test__read_all_roots_different_depths(self):
2209
c_map = self.make_two_deep_map(chk_map._search_key_plain)
2210
c_map._dump_tree() # load everything
2212
key1_a = c_map._root_node._items['a'].key()
2213
key1_c = c_map._root_node._items['c'].key()
2214
key1_d = c_map._root_node._items['d'].key()
2216
c_map2 = self.make_one_deep_two_prefix_map(chk_map._search_key_plain)
2219
key2_aa = c_map2._root_node._items['aa'].key()
2220
key2_ad = c_map2._root_node._items['ad'].key()
2222
diff = self.get_difference([key2], [key1], chk_map._search_key_plain)
2223
root_results = [record.key for record in diff._read_all_roots()]
2224
self.assertEqual([key2], root_results)
2225
# Only the 'a' subset should be queued up, since 'c' and 'd' cannot be
2227
self.assertEqual([key1_a], diff._old_queue)
2228
self.assertEqual([key2_aa, key2_ad], diff._new_queue)
2229
self.assertEqual([], diff._new_item_queue)
2231
diff = self.get_difference([key1], [key2], chk_map._search_key_plain)
2232
root_results = [record.key for record in diff._read_all_roots()]
2233
self.assertEqual([key1], root_results)
2235
self.assertEqual([key2_aa, key2_ad], diff._old_queue)
2236
self.assertEqual([key1_a, key1_c, key1_d], diff._new_queue)
2237
self.assertEqual([], diff._new_item_queue)
2239
def test__read_all_roots_different_depths_16(self):
2240
c_map = self.make_two_deep_map(chk_map._search_key_16)
2241
c_map._dump_tree() # load everything
2243
key1_2 = c_map._root_node._items['2'].key()
2244
key1_4 = c_map._root_node._items['4'].key()
2245
key1_C = c_map._root_node._items['C'].key()
2246
key1_F = c_map._root_node._items['F'].key()
2248
c_map2 = self.make_one_deep_two_prefix_map(chk_map._search_key_16)
2251
key2_F0 = c_map2._root_node._items['F0'].key()
2252
key2_F3 = c_map2._root_node._items['F3'].key()
2253
key2_F4 = c_map2._root_node._items['F4'].key()
2254
key2_FD = c_map2._root_node._items['FD'].key()
2256
diff = self.get_difference([key2], [key1], chk_map._search_key_16)
2257
root_results = [record.key for record in diff._read_all_roots()]
2258
self.assertEqual([key2], root_results)
2259
# Only the subset of keys that may be present should be queued up.
2260
self.assertEqual([key1_F], diff._old_queue)
2261
self.assertEqual(sorted([key2_F0, key2_F3, key2_F4, key2_FD]),
2262
sorted(diff._new_queue))
2263
self.assertEqual([], diff._new_item_queue)
2265
diff = self.get_difference([key1], [key2], chk_map._search_key_16)
2266
root_results = [record.key for record in diff._read_all_roots()]
2267
self.assertEqual([key1], root_results)
2269
self.assertEqual(sorted([key2_F0, key2_F3, key2_F4, key2_FD]),
2270
sorted(diff._old_queue))
2271
self.assertEqual(sorted([key1_2, key1_4, key1_C, key1_F]),
2272
sorted(diff._new_queue))
2273
self.assertEqual([], diff._new_item_queue)
2275
def test__read_all_roots_mixed_depth(self):
2276
c_map = self.make_one_deep_two_prefix_map(chk_map._search_key_plain)
2277
c_map._dump_tree() # load everything
2279
key1_aa = c_map._root_node._items['aa'].key()
2280
key1_ad = c_map._root_node._items['ad'].key()
2282
c_map2 = self.make_one_deep_one_prefix_map(chk_map._search_key_plain)
2285
key2_a = c_map2._root_node._items['a'].key()
2286
key2_b = c_map2._root_node._items['b'].key()
2288
diff = self.get_difference([key2], [key1], chk_map._search_key_plain)
2289
root_results = [record.key for record in diff._read_all_roots()]
2290
self.assertEqual([key2], root_results)
2291
# 'ad' matches exactly 'a' on the other side, so it should be removed,
2292
# and neither side should have it queued for walking
2293
self.assertEqual([], diff._old_queue)
2294
self.assertEqual([key2_b], diff._new_queue)
2295
self.assertEqual([], diff._new_item_queue)
2297
diff = self.get_difference([key1], [key2], chk_map._search_key_plain)
2298
root_results = [record.key for record in diff._read_all_roots()]
2299
self.assertEqual([key1], root_results)
2300
# Note: This is technically not the 'true minimal' set that we could
2301
# use The reason is that 'a' was matched exactly to 'ad' (by sha
2302
# sum). However, the code gets complicated in the case of more
2303
# than one interesting key, so for now, we live with this
2304
# Consider revising, though benchmarking showing it to be a
2305
# real-world issue should be done
2306
self.assertEqual([key2_a], diff._old_queue)
2307
# self.assertEqual([], diff._old_queue)
2308
self.assertEqual([key1_aa], diff._new_queue)
2309
self.assertEqual([], diff._new_item_queue)
2311
def test__read_all_roots_yields_extra_deep_records(self):
2312
# This is slightly controversial, as we will yield a chk page that we
2313
# might later on find out could be filtered out. (If a root node is
2314
# referenced deeper in the old set.)
2315
# However, even with stacking, we always have all chk pages that we
2316
# will need. So as long as we filter out the referenced keys, we'll
2317
# never run into problems.
2318
# This allows us to yield a root node record immediately, without any
2320
c_map = self.make_two_deep_map(chk_map._search_key_plain)
2321
c_map._dump_tree() # load all keys
2323
key1_a = c_map._root_node._items['a'].key()
2324
c_map2 = self.get_map({
2325
('acc',): 'initial acc content',
2326
('ace',): 'initial ace content',
2327
}, maximum_size=100)
2328
self.assertEqualDiff(
2330
" ('acc',) 'initial acc content'\n"
2331
" ('ace',) 'initial ace content'\n",
2332
c_map2._dump_tree())
2334
diff = self.get_difference([key2], [key1], chk_map._search_key_plain)
2335
root_results = [record.key for record in diff._read_all_roots()]
2336
self.assertEqual([key2], root_results)
2337
# However, even though we have yielded the root node to be fetched,
2338
# we should have enqued all of the chk pages to be walked, so that we
2339
# can find the keys if they are present
2340
self.assertEqual([key1_a], diff._old_queue)
2341
self.assertEqual([(('acc',), 'initial acc content'),
2342
(('ace',), 'initial ace content'),
2343
], diff._new_item_queue)
2345
def test__read_all_roots_multiple_targets(self):
2346
c_map = self.make_root_only_map()
2348
c_map = self.make_one_deep_map()
2351
key2_c = c_map._root_node._items['c'].key()
2352
key2_d = c_map._root_node._items['d'].key()
2353
c_map.map(('ccc',), 'new ccc value')
2354
key3 = c_map._save()
2355
key3_c = c_map._root_node._items['c'].key()
2356
diff = self.get_difference([key2, key3], [key1],
2357
chk_map._search_key_plain)
2358
root_results = [record.key for record in diff._read_all_roots()]
2359
self.assertEqual(sorted([key2, key3]), sorted(root_results))
2360
self.assertEqual([], diff._old_queue)
2361
# the key 'd' is interesting from key2 and key3, but should only be
2362
# entered into the queue 1 time
2363
self.assertEqual(sorted([key2_c, key3_c, key2_d]),
2364
sorted(diff._new_queue))
2365
self.assertEqual([], diff._new_item_queue)
2367
def test__read_all_roots_no_old(self):
2368
# This is the 'initial branch' case. With nothing in the old
2369
# set, we can just queue up all root nodes into interesting queue, and
2370
# then have them fast-path flushed via _flush_new_queue
2371
c_map = self.make_two_deep_map()
2373
diff = self.get_difference([key1], [], chk_map._search_key_plain)
2374
root_results = [record.key for record in diff._read_all_roots()]
2375
self.assertEqual([], root_results)
2376
self.assertEqual([], diff._old_queue)
2377
self.assertEqual([key1], diff._new_queue)
2378
self.assertEqual([], diff._new_item_queue)
2380
c_map2 = self.make_one_deep_map()
2382
diff = self.get_difference([key1, key2], [], chk_map._search_key_plain)
2383
root_results = [record.key for record in diff._read_all_roots()]
2384
self.assertEqual([], root_results)
2385
self.assertEqual([], diff._old_queue)
2386
self.assertEqual(sorted([key1, key2]), sorted(diff._new_queue))
2387
self.assertEqual([], diff._new_item_queue)
2389
def test__read_all_roots_no_old_16(self):
2390
c_map = self.make_two_deep_map(chk_map._search_key_16)
2392
diff = self.get_difference([key1], [], chk_map._search_key_16)
2393
root_results = [record.key for record in diff._read_all_roots()]
2394
self.assertEqual([], root_results)
2395
self.assertEqual([], diff._old_queue)
2396
self.assertEqual([key1], diff._new_queue)
2397
self.assertEqual([], diff._new_item_queue)
2399
c_map2 = self.make_one_deep_map(chk_map._search_key_16)
2401
diff = self.get_difference([key1, key2], [],
2402
chk_map._search_key_16)
2403
root_results = [record.key for record in diff._read_all_roots()]
2404
self.assertEqual([], root_results)
2405
self.assertEqual([], diff._old_queue)
2406
self.assertEqual(sorted([key1, key2]),
2407
sorted(diff._new_queue))
2408
self.assertEqual([], diff._new_item_queue)
2410
def test__read_all_roots_multiple_old(self):
2411
c_map = self.make_two_deep_map()
2413
c_map._dump_tree() # load everything
2414
key1_a = c_map._root_node._items['a'].key()
2415
c_map.map(('ccc',), 'new ccc value')
2416
key2 = c_map._save()
2417
key2_a = c_map._root_node._items['a'].key()
2418
c_map.map(('add',), 'new add value')
2419
key3 = c_map._save()
2420
key3_a = c_map._root_node._items['a'].key()
2421
diff = self.get_difference([key3], [key1, key2],
2422
chk_map._search_key_plain)
2423
root_results = [record.key for record in diff._read_all_roots()]
2424
self.assertEqual([key3], root_results)
2425
# the 'a' keys should not be queued up 2 times, since they are
2427
self.assertEqual([key1_a], diff._old_queue)
2428
self.assertEqual([key3_a], diff._new_queue)
2429
self.assertEqual([], diff._new_item_queue)
2431
def test__process_next_old_batched_no_dupes(self):
2432
c_map = self.make_two_deep_map()
2434
c_map._dump_tree() # load everything
2435
key1_a = c_map._root_node._items['a'].key()
2436
key1_aa = c_map._root_node._items['a']._items['aa'].key()
2437
key1_ab = c_map._root_node._items['a']._items['ab'].key()
2438
key1_ac = c_map._root_node._items['a']._items['ac'].key()
2439
key1_ad = c_map._root_node._items['a']._items['ad'].key()
2440
c_map.map(('aaa',), 'new aaa value')
2441
key2 = c_map._save()
2442
key2_a = c_map._root_node._items['a'].key()
2443
key2_aa = c_map._root_node._items['a']._items['aa'].key()
2444
c_map.map(('acc',), 'new acc content')
2445
key3 = c_map._save()
2446
key3_a = c_map._root_node._items['a'].key()
2447
key3_ac = c_map._root_node._items['a']._items['ac'].key()
2448
diff = self.get_difference([key3], [key1, key2],
2449
chk_map._search_key_plain)
2450
root_results = [record.key for record in diff._read_all_roots()]
2451
self.assertEqual([key3], root_results)
2452
self.assertEqual(sorted([key1_a, key2_a]),
2453
sorted(diff._old_queue))
2454
self.assertEqual([key3_a], diff._new_queue)
2455
self.assertEqual([], diff._new_item_queue)
2456
diff._process_next_old()
2457
# All of the old records should be brought in and queued up,
2458
# but we should not have any duplicates
2459
self.assertEqual(sorted([key1_aa, key1_ab, key1_ac, key1_ad, key2_aa]),
2460
sorted(diff._old_queue))
2463
class TestIterInterestingNodes(TestCaseWithExampleMaps):
2465
def get_map_key(self, a_dict, maximum_size=10):
2466
c_map = self.get_map(a_dict, maximum_size=maximum_size)
2469
def assertIterInteresting(self, records, items, interesting_keys,
2471
"""Check the result of iter_interesting_nodes.
2473
Note that we no longer care how many steps are taken, etc, just that
2474
the right contents are returned.
2476
:param records: A list of record keys that should be yielded
2477
:param items: A list of items (key,value) that should be yielded.
2479
store = self.get_chk_bytes()
2480
store._search_key_func = chk_map._search_key_plain
2481
iter_nodes = chk_map.iter_interesting_nodes(store, interesting_keys,
2485
for record, new_items in iter_nodes:
2486
if record is not None:
2487
record_keys.append(record.key)
2489
all_items.extend(new_items)
2490
self.assertEqual(sorted(records), sorted(record_keys))
2491
self.assertEqual(sorted(items), sorted(all_items))
2493
def test_empty_to_one_keys(self):
2494
target = self.get_map_key({('a',): 'content'})
2495
self.assertIterInteresting([target],
2496
[(('a',), 'content')],
2499
def test_none_to_one_key(self):
2500
basis = self.get_map_key({})
2501
target = self.get_map_key({('a',): 'content'})
2502
self.assertIterInteresting([target],
2503
[(('a',), 'content')],
2506
def test_one_to_none_key(self):
2507
basis = self.get_map_key({('a',): 'content'})
2508
target = self.get_map_key({})
2509
self.assertIterInteresting([target],
2513
def test_common_pages(self):
2514
basis = self.get_map_key({('a',): 'content',
2518
target = self.get_map_key({('a',): 'content',
2519
('b',): 'other content',
2522
target_map = CHKMap(self.get_chk_bytes(), target)
2523
self.assertEqualDiff(
2526
" ('a',) 'content'\n"
2528
" ('b',) 'other content'\n"
2530
" ('c',) 'content'\n",
2531
target_map._dump_tree())
2532
b_key = target_map._root_node._items['b'].key()
2533
# This should return the root node, and the node for the 'b' key
2534
self.assertIterInteresting([target, b_key],
2535
[(('b',), 'other content')],
2538
def test_common_sub_page(self):
2539
basis = self.get_map_key({('aaa',): 'common',
2542
target = self.get_map_key({('aaa',): 'common',
2546
target_map = CHKMap(self.get_chk_bytes(), target)
2547
self.assertEqualDiff(
2549
" 'a' InternalNode\n"
2551
" ('aaa',) 'common'\n"
2555
" ('c',) 'common'\n",
2556
target_map._dump_tree())
2557
# The key for the internal aa node
2558
a_key = target_map._root_node._items['a'].key()
2559
# The key for the leaf aab node
2560
# aaa_key = target_map._root_node._items['a']._items['aaa'].key()
2561
aab_key = target_map._root_node._items['a']._items['aab'].key()
2562
self.assertIterInteresting([target, a_key, aab_key],
2563
[(('aab',), 'new')],
2566
def test_common_leaf(self):
2567
basis = self.get_map_key({})
2568
target1 = self.get_map_key({('aaa',): 'common'})
2569
target2 = self.get_map_key({('aaa',): 'common',
2572
target3 = self.get_map_key({('aaa',): 'common',
2576
# The LeafNode containing 'aaa': 'common' occurs at 3 different levels.
2577
# Once as a root node, once as a second layer, and once as a third
2578
# layer. It should only be returned one time regardless
2579
target1_map = CHKMap(self.get_chk_bytes(), target1)
2580
self.assertEqualDiff(
2582
" ('aaa',) 'common'\n",
2583
target1_map._dump_tree())
2584
target2_map = CHKMap(self.get_chk_bytes(), target2)
2585
self.assertEqualDiff(
2588
" ('aaa',) 'common'\n"
2590
" ('bbb',) 'new'\n",
2591
target2_map._dump_tree())
2592
target3_map = CHKMap(self.get_chk_bytes(), target3)
2593
self.assertEqualDiff(
2595
" 'a' InternalNode\n"
2597
" ('aaa',) 'common'\n"
2599
" ('aac',) 'other'\n"
2601
" ('bbb',) 'new'\n",
2602
target3_map._dump_tree())
2603
aaa_key = target1_map._root_node.key()
2604
b_key = target2_map._root_node._items['b'].key()
2605
a_key = target3_map._root_node._items['a'].key()
2606
aac_key = target3_map._root_node._items['a']._items['aac'].key()
2607
self.assertIterInteresting(
2608
[target1, target2, target3, a_key, aac_key, b_key],
2609
[(('aaa',), 'common'), (('bbb',), 'new'), (('aac',), 'other')],
2610
[target1, target2, target3], [basis])
2612
self.assertIterInteresting(
2613
[target2, target3, a_key, aac_key, b_key],
2614
[(('bbb',), 'new'), (('aac',), 'other')],
2615
[target2, target3], [target1])
2617
# Technically, target1 could be filtered out, but since it is a root
2618
# node, we yield it immediately, rather than waiting to find out much
2620
self.assertIterInteresting(
2623
[target1], [target3])
2625
def test_multiple_maps(self):
2626
basis1 = self.get_map_key({('aaa',): 'common',
2629
basis2 = self.get_map_key({('bbb',): 'common',
2632
target1 = self.get_map_key({('aaa',): 'common',
2633
('aac',): 'target1',
2636
target2 = self.get_map_key({('aaa',): 'common',
2637
('bba',): 'target2',
2640
target1_map = CHKMap(self.get_chk_bytes(), target1)
2641
self.assertEqualDiff(
2643
" 'a' InternalNode\n"
2645
" ('aaa',) 'common'\n"
2647
" ('aac',) 'target1'\n"
2649
" ('bbb',) 'common'\n",
2650
target1_map._dump_tree())
2651
# The key for the target1 internal a node
2652
a_key = target1_map._root_node._items['a'].key()
2653
# The key for the leaf aac node
2654
aac_key = target1_map._root_node._items['a']._items['aac'].key()
2656
target2_map = CHKMap(self.get_chk_bytes(), target2)
2657
self.assertEqualDiff(
2660
" ('aaa',) 'common'\n"
2661
" 'b' InternalNode\n"
2663
" ('bba',) 'target2'\n"
2665
" ('bbb',) 'common'\n",
2666
target2_map._dump_tree())
2667
# The key for the target2 internal bb node
2668
b_key = target2_map._root_node._items['b'].key()
2669
# The key for the leaf bba node
2670
bba_key = target2_map._root_node._items['b']._items['bba'].key()
2671
self.assertIterInteresting(
2672
[target1, target2, a_key, aac_key, b_key, bba_key],
2673
[(('aac',), 'target1'), (('bba',), 'target2')],
2674
[target1, target2], [basis1, basis2])
2676
def test_multiple_maps_overlapping_common_new(self):
2677
# Test that when a node found through the interesting_keys iteration
2678
# for *some roots* and also via the old keys iteration, that
2679
# it is still scanned for old refs and items, because its
2680
# not truely new. This requires 2 levels of InternalNodes to expose,
2681
# because of the way the bootstrap in _find_children_info works.
2682
# This suggests that the code is probably amenable to/benefit from
2684
# How does this test work?
2685
# 1) We need a second level InternalNode present in a basis tree.
2686
# 2) We need a left side new tree that uses that InternalNode
2687
# 3) We need a right side new tree that does not use that InternalNode
2688
# at all but that has an unchanged *value* that was reachable inside
2690
basis = self.get_map_key({
2691
# InternalNode, unchanged in left:
2694
# Forces an internalNode at 'a'
2697
left = self.get_map_key({
2698
# All of basis unchanged
2702
# And a new top level node so the root key is different
2705
right = self.get_map_key({
2706
# A value that is unchanged from basis and thus should be filtered
2710
basis_map = CHKMap(self.get_chk_bytes(), basis)
2711
self.assertEqualDiff(
2713
" 'a' InternalNode\n"
2715
" ('aaa',) 'left'\n"
2717
" ('abb',) 'right'\n"
2719
" ('ccc',) 'common'\n",
2720
basis_map._dump_tree())
2721
# Get left expected data
2722
left_map = CHKMap(self.get_chk_bytes(), left)
2723
self.assertEqualDiff(
2725
" 'a' InternalNode\n"
2727
" ('aaa',) 'left'\n"
2729
" ('abb',) 'right'\n"
2731
" ('ccc',) 'common'\n"
2733
" ('ddd',) 'change'\n",
2734
left_map._dump_tree())
2735
# Keys from left side target
2736
l_d_key = left_map._root_node._items['d'].key()
2737
# Get right expected data
2738
right_map = CHKMap(self.get_chk_bytes(), right)
2739
self.assertEqualDiff(
2741
" ('abb',) 'right'\n",
2742
right_map._dump_tree())
2743
# Keys from the right side target - none, the root is enough.
2745
self.assertIterInteresting(
2746
[right, left, l_d_key],
2747
[(('ddd',), 'change')],
2748
[left, right], [basis])
2750
def test_multiple_maps_similar(self):
2751
# We want to have a depth=2 tree, with multiple entries in each leaf
2753
basis = self.get_map_key({
2754
('aaa',): 'unchanged',
2755
('abb',): 'will change left',
2756
('caa',): 'unchanged',
2757
('cbb',): 'will change right',
2759
left = self.get_map_key({
2760
('aaa',): 'unchanged',
2761
('abb',): 'changed left',
2762
('caa',): 'unchanged',
2763
('cbb',): 'will change right',
2765
right = self.get_map_key({
2766
('aaa',): 'unchanged',
2767
('abb',): 'will change left',
2768
('caa',): 'unchanged',
2769
('cbb',): 'changed right',
2771
basis_map = CHKMap(self.get_chk_bytes(), basis)
2772
self.assertEqualDiff(
2775
" ('aaa',) 'unchanged'\n"
2776
" ('abb',) 'will change left'\n"
2778
" ('caa',) 'unchanged'\n"
2779
" ('cbb',) 'will change right'\n",
2780
basis_map._dump_tree())
2781
# Get left expected data
2782
left_map = CHKMap(self.get_chk_bytes(), left)
2783
self.assertEqualDiff(
2786
" ('aaa',) 'unchanged'\n"
2787
" ('abb',) 'changed left'\n"
2789
" ('caa',) 'unchanged'\n"
2790
" ('cbb',) 'will change right'\n",
2791
left_map._dump_tree())
2792
# Keys from left side target
2793
l_a_key = left_map._root_node._items['a'].key()
2794
l_c_key = left_map._root_node._items['c'].key()
2795
# Get right expected data
2796
right_map = CHKMap(self.get_chk_bytes(), right)
2797
self.assertEqualDiff(
2800
" ('aaa',) 'unchanged'\n"
2801
" ('abb',) 'will change left'\n"
2803
" ('caa',) 'unchanged'\n"
2804
" ('cbb',) 'changed right'\n",
2805
right_map._dump_tree())
2806
r_a_key = right_map._root_node._items['a'].key()
2807
r_c_key = right_map._root_node._items['c'].key()
2808
self.assertIterInteresting(
2809
[right, left, l_a_key, r_c_key],
2810
[(('abb',), 'changed left'), (('cbb',), 'changed right')],
2811
[left, right], [basis])