56
54
class TestMap(TestCaseWithStore):
58
56
def assertHasABMap(self, chk_bytes):
59
root_key = ('sha1:29f1da33ce2323d754485fd308abc5ff17f3856e',)
57
root_key = ('sha1:f14dd34def95036bc06bb5c0ed95437d7383a04a',)
61
"chkroot:\n0\n1\na\x00sha1:cb29f32e561a1b7f862c38ccfd6bc7c7d892f04b\n",
59
'chkleaf:\n0\n1\n1\na\x00b\n',
62
60
self.read_bytes(chk_bytes, root_key))
65
self.read_bytes(chk_bytes,
66
("sha1:cb29f32e561a1b7f862c38ccfd6bc7c7d892f04b",)))
68
63
def assertHasEmptyMap(self, chk_bytes):
69
root_key = ('sha1:d0826cf765ff45bd602f602ecb0efbe375ea3b50',)
70
self.assertEqual("chkroot:\n0\n0\n", self.read_bytes(chk_bytes, root_key))
64
root_key = ('sha1:4e6482a3a5cb2d61699971ac77befe11a0ec5779',)
65
self.assertEqual("chkleaf:\n0\n1\n0\n", self.read_bytes(chk_bytes, root_key))
72
68
def test_from_dict_empty(self):
73
69
chk_bytes = self.get_chk_bytes()
74
70
root_key = CHKMap.from_dict(chk_bytes, {})
75
self.assertEqual(('sha1:d0826cf765ff45bd602f602ecb0efbe375ea3b50',),
77
self.assertHasEmptyMap(chk_bytes)
71
# Check the data was saved and inserted correctly.
72
expected_root_key = self.assertHasEmptyMap(chk_bytes)
73
self.assertEqual(expected_root_key, root_key)
79
75
def test_from_dict_ab(self):
80
76
chk_bytes = self.get_chk_bytes()
81
77
root_key = CHKMap.from_dict(chk_bytes, {"a":"b"})
82
self.assertEqual(('sha1:29f1da33ce2323d754485fd308abc5ff17f3856e',),
84
self.assertHasABMap(chk_bytes)
78
# Check the data was saved and inserted correctly.
79
expected_root_key = self.assertHasABMap(chk_bytes)
80
self.assertEqual(expected_root_key, root_key)
86
82
def test_apply_empty_ab(self):
87
83
# applying a delta (None, "a", "b") to an empty chkmap generates the
101
97
# applying a delta ("a", None, None) to an empty chkmap generates the
102
98
# same map as from_dict_ab.
103
99
chk_bytes = self.get_chk_bytes()
104
root_key = CHKMap.from_dict(chk_bytes, {"a":"b"})
100
root_key = CHKMap.from_dict(chk_bytes, {("a",):"b"})
105
101
chkmap = CHKMap(chk_bytes, root_key)
106
new_root = chkmap.apply_delta([("a", None, None)])
107
self.assertEqual(('sha1:d0826cf765ff45bd602f602ecb0efbe375ea3b50',),
109
self.assertHasEmptyMap(chk_bytes)
102
new_root = chkmap.apply_delta([(("a",), None, None)])
103
# Check the data was saved and inserted correctly.
104
expected_root_key = self.assertHasEmptyMap(chk_bytes)
105
self.assertEqual(expected_root_key, new_root)
110
106
# The update should have left us with an in memory root node, with an
112
108
self.assertEqual(new_root, chkmap._root_node._key)
122
118
root_key = CHKMap.from_dict(chk_bytes,
123
119
{"a":"content here", "b":"more content"})
124
120
chkmap = CHKMap(chk_bytes, root_key)
125
self.assertEqual([("a", "content here"), ("b", "more content")],
121
self.assertEqual([(("a",), "content here"), (("b",), "more content")],
126
122
sorted(list(chkmap.iteritems())))
128
124
def test_iteritems_selected_one_of_two_items(self):
129
chkmap = self._get_map( {"a":"content here", "b":"more content"})
130
self.assertEqual([("a", "content here")],
131
sorted(list(chkmap.iteritems(["a"]))))
125
chkmap = self._get_map( {("a",):"content here", ("b",):"more content"})
126
self.assertEqual({("a",): "content here"},
127
self.to_dict(chkmap, [("a",)]))
133
129
def test___len__empty(self):
134
130
chkmap = self._get_map({})
212
208
self.assertEqual([key], leaf_node.serialise(chkmap._store))
215
class TestRootNode(TestCaseWithTransport):
217
def test__current_size(self):
219
self.assertEqual(15, node._current_size())
220
node.add_child("cd", ("sha1:12345",))
221
self.assertEqual(29, node._current_size())
222
self.assertEqual(29, len(node.serialise()))
223
node.add_child("cd", ("sha1:123456",))
224
self.assertEqual(30, node._current_size())
225
self.assertEqual(30, len(node.serialise()))
226
node.remove_child("cd")
227
self.assertEqual(15, node._current_size())
228
self.assertEqual(15, len(node.serialise()))
229
node.set_maximum_size(100)
230
self.assertEqual(17, node._current_size())
232
def test_serialise_empty(self):
234
bytes = node.serialise()
235
self.assertEqual("chkroot:\n0\n0\n0\n", bytes)
237
def test_add_child_over_limit(self):
239
node.set_maximum_size(20)
240
node.add_child("abcdef", ("sha1:12345",))
241
size = node._current_size()
242
self.assertTrue(20 < size)
243
self.assertEqual(False, node.add_child("12345", ("sha1:34",)))
244
# Nothing should have changed
245
self.assertEqual(size, node._current_size())
246
self.assertEqual(1, len(node))
248
def test_add_child_resets_key(self):
250
node._key = ("something",)
251
node.add_child("c", ("sha1:1234",))
252
self.assertEqual(None, node._key)
254
def test_add_child_returns_True(self):
256
node._key = ("something",)
257
self.assertEqual(True, node.add_child("c", ("sha1:1234",)))
259
def test_add_child_increases_len(self):
261
node._key = ("something",)
262
node.add_child("c", ("sha1:1234",))
263
self.assertEqual(1, len(node))
265
def test_remove_child_decreases_len(self):
267
node.add_child("c", ("sha1:1234",))
268
node._key = ("something",)
269
node.remove_child("c")
270
self.assertEqual(0, len(node))
272
def test_remove_child_removes_child(self):
274
node.add_child("a", ("sha1:4321",))
275
node.add_child("c", ("sha1:1234",))
276
node._key = ("something",)
277
node.remove_child("a")
278
self.assertEqual({"c":("sha1:1234",)}, node._nodes)
280
def test_remove_child_resets_key(self):
282
node.add_child("c", ("sha1:1234",))
283
node._key = ("something",)
284
node.remove_child("c")
285
self.assertEqual(None, node._key)
287
def test_deserialise(self):
288
# deserialising from a bytestring & key sets the nodes and the known
291
node.deserialise("chkroot:\n0\n0\n1\nc\x00sha1:1234\n", ("foo",))
292
self.assertEqual({"c": ("sha1:1234",)}, node._nodes)
293
self.assertEqual(("foo",), node._key)
294
self.assertEqual(1, len(node))
295
self.assertEqual(0, node.maximum_size)
297
def test_serialise_with_child(self):
299
node.add_child("c", ("sha1:1234",))
300
bytes = node.serialise()
301
# type 0-max-length 1-value key\x00CHK
302
self.assertEqual("chkroot:\n0\n0\n1\nc\x00sha1:1234\n", bytes)
304
def test_deserialise_max_size(self):
306
node.deserialise("chkroot:\n100\n0\n1\nc\x00sha1:1234\n", ("foo",))
307
self.assertEqual(100, node.maximum_size)
309
def test_deserialise_key_prefix(self):
311
node.deserialise("chkroot:\n100\n10\n1\nc\x00sha1:1234\n", ("foo",))
312
self.assertEqual(10, node.prefix_width)
315
class TestValueNode(TestCaseWithTransport):
317
def test_deserialise(self):
318
node = ValueNode.deserialise("chkvalue:\nfoo bar baz\n")
319
self.assertEqual("foo bar baz\n", node.value)
321
def test_serialise(self):
322
node = ValueNode("b")
323
bytes = node.serialise()
324
self.assertEqual("chkvalue:\nb", bytes)
327
211
class TestLeafNode(TestCaseWithStore):
329
213
def test_current_size_empty(self):
357
241
"chkleaf:\n0\n1\n2\nfoo bar\x00baz\nquux\x00blarh\n", ("sha1:1234",))
358
242
self.assertEqual(2, len(node))
359
243
self.assertEqual([(("foo bar",), "baz"), (("quux",), "blarh")],
360
sorted(node.iteritems()))
244
sorted(node.iteritems(None)))
362
246
def test_iteritems_selected_one_of_two_items(self):
363
247
node = LeafNode.deserialise(
364
248
"chkleaf:\n0\n1\n2\nfoo bar\x00baz\nquux\x00blarh\n", ("sha1:1234",))
365
249
self.assertEqual(2, len(node))
366
250
self.assertEqual([(("quux",), "blarh")],
367
sorted(node.iteritems([("quux",), ("qaz",)])))
251
sorted(node.iteritems(None, [("quux",), ("qaz",)])))
369
253
def test_key_new(self):
370
254
node = LeafNode()
373
257
def test_key_after_map(self):
374
258
node = LeafNode.deserialise("chkleaf:\n10\n1\n0\n", ("sha1:1234",))
375
node = node.map(("foo bar",), "baz quux")
259
node.map(None, ("foo bar",), "baz quux")
376
260
self.assertEqual(None, node.key())
378
262
def test_key_after_unmap(self):
379
263
node = LeafNode.deserialise(
380
264
"chkleaf:\n0\n1\n2\nfoo bar\x00baz\nquux\x00blarh\n", ("sha1:1234",))
381
node = node.unmap(("foo bar",))
265
node.unmap(None, ("foo bar",))
382
266
self.assertEqual(None, node.key())
384
268
def test_map_exceeding_max_size_only_entry_new(self):
407
291
self.assertEqual(10, node.maximum_size)
408
292
self.assertEqual(1, node._key_width)
410
def test_map_exceeding_max_size_second_entry_last_octect_changed(self):
412
node.set_maximum_size(10)
413
node = node.map(None, ("foo bar",), "baz quux")
414
result = node.map(None, ("foo baz",), "red")
415
self.assertIsInstance(result, InternalNode)
416
# should have copied the data in:
417
self.assertEqual(2, len(result))
418
self.assertEqual({('foo baz',): 'red', ('foo bar',): 'baz quux'},
419
self.to_dict(result))
420
self.assertEqual(10, result.maximum_size)
421
self.assertEqual(1, result._key_width)
423
294
def test_map_first(self):
424
295
node = LeafNode()
425
result = node.map(("foo bar",), "baz quux")
426
self.assertEqual(result, node)
427
self.assertEqual({("foo bar",):"baz quux"}, self.to_dict(node))
296
result = node.map(None, ("foo bar",), "baz quux")
297
self.assertEqual(("foo bar", [("", node)]), result)
298
self.assertEqual({("foo bar",):"baz quux"}, self.to_dict(node, None))
428
299
self.assertEqual(1, len(node))
430
301
def test_map_second(self):
431
302
node = LeafNode()
432
node = node.map(("foo bar",), "baz quux")
433
result = node.map(("bingo",), "bango")
434
self.assertEqual(result, node)
303
node.map(None, ("foo bar",), "baz quux")
304
result = node.map(None, ("bingo",), "bango")
305
self.assertEqual(("", [("", node)]), result)
435
306
self.assertEqual({("foo bar",):"baz quux", ("bingo",):"bango"},
307
self.to_dict(node, None))
437
308
self.assertEqual(2, len(node))
439
310
def test_map_replacement(self):
440
311
node = LeafNode()
441
node = node.map(("foo bar",), "baz quux")
442
result = node.map(("foo bar",), "bango")
443
self.assertEqual(result, node)
312
node.map(None, ("foo bar",), "baz quux")
313
result = node.map(None, ("foo bar",), "bango")
314
self.assertEqual(("foo bar", [("", node)]), result)
444
315
self.assertEqual({("foo bar",): "bango"},
316
self.to_dict(node, None))
446
317
self.assertEqual(1, len(node))
448
319
def test_serialise_empty(self):
470
341
def test_unique_serialised_prefix_empty_new(self):
471
342
node = LeafNode()
472
343
self.assertEqual("", node.unique_serialised_prefix())
475
345
def test_unique_serialised_prefix_one_item_new(self):
476
346
node = LeafNode()
477
result = node.map(None, ("foo bar", "baz"), "baz quux")
347
node.map(None, ("foo bar", "baz"), "baz quux")
478
348
self.assertEqual("foo bar\x00baz", node.unique_serialised_prefix())
480
350
def test_unmap_missing(self):
481
351
node = LeafNode()
482
self.assertRaises(KeyError, node.unmap, ("foo bar",))
352
self.assertRaises(KeyError, node.unmap, None, ("foo bar",))
484
354
def test_unmap_present(self):
485
355
node = LeafNode()
486
node = node.map(None, ("foo bar",), "baz quux")
487
result = node.unmap(("foo bar",))
488
self.assertEqual(result, node)
489
self.assertEqual({}, self.to_dict(node))
356
node.map(None, ("foo bar",), "baz quux")
357
result = node.unmap(None, ("foo bar",))
358
self.assertEqual(node, result)
359
self.assertEqual({}, self.to_dict(node, None))
490
360
self.assertEqual(0, len(node))
537
407
# def test_add_node_two_oversized_third_kept_minimum_fan(self):
538
408
# def test_add_node_one_oversized_second_splits_errors(self):
540
def test_add_node_empty_oversized_no_common_sets_prefix(self):
541
# adding a node with two children that is oversized will generate two
542
# new leaf nodes, and a prefix width that cuts one byte off the longest
543
# key (because that is sufficient to guarantee a split
544
overpacked = LeafNode()
545
overpacked.set_maximum_size(10)
546
overpacked.map(None, ("foo bar",), "baz")
547
overpacked.map(None, ("strange thing",), "it is")
548
# at this point, map returned a new internal node that is already
549
# packed, but that should have preserved the old node due to the
550
# functional idioms.. check to be sure:
551
self.assertTrue(overpacked.maximum_size < overpacked._current_size())
552
node = InternalNode()
553
# We're not testing that the internal node rebalances yet
554
node.set_maximum_size(0)
555
node._add_node(overpacked)
556
# 13 is the length of strange_thing serialised; as there is no node size
557
# set, we pack the internal node as densely as possible.
558
self.assertEqual(13, node._node_width)
559
self.assertEqual(set(["strange thing", "foo bar\x00\x00\x00\x00\x00\x00"]),
560
set(node._items.keys()))
561
self.assertEqual(2, len(node))
562
self.assertEqual({('strange thing',): 'it is'},
563
self.to_dict(node._items["strange thing"]))
564
self.assertEqual({('foo bar',): 'baz'},
565
self.to_dict(node._items["foo bar\x00\x00\x00\x00\x00\x00"]))
567
410
def test_iteritems_empty_new(self):
568
411
node = InternalNode()
569
412
self.assertEqual([], sorted(node.iteritems(None)))
573
416
leaf1 = LeafNode()
574
417
leaf1.map(None, ('foo bar',), 'quux')
575
418
leaf2 = LeafNode()
577
419
leaf2.map(None, ('strange',), 'beast')
578
node._items['foo ba'] = leaf1
579
node._items['strang'] = leaf2
420
node.add_node("f", leaf1)
421
node.add_node("s", leaf2)
580
422
self.assertEqual([(('foo bar',), 'quux'), (('strange',), 'beast')],
581
sorted(node.iteritems()))
423
sorted(node.iteritems(None)))
583
425
def test_iteritems_two_children_partial(self):
584
426
node = InternalNode()
428
leaf1.map(None, ('foo bar',), 'quux')
586
429
leaf2 = LeafNode()
587
430
leaf2.map(None, ('strange',), 'beast')
431
node.add_node("f", leaf1)
588
432
# This sets up a path that should not be followed - it will error if
589
433
# the code tries to.
590
node._items['foo ba'] = None
591
node._items['strang'] = leaf2
434
node._items['f'] = None
435
node.add_node("s", leaf2)
593
436
self.assertEqual([(('strange',), 'beast')],
594
sorted(node.iteritems([('strange',), ('weird',)])))
437
sorted(node.iteritems(None, [('strange',), ('weird',)])))
596
439
def test_iteritems_partial_empty(self):
597
440
node = InternalNode()
598
441
self.assertEqual([], sorted(node.iteritems([('missing',)])))
600
def test_map_to_existing_child(self):
601
# mapping a new key which is in a child of an internal node maps
603
overpacked = LeafNode()
604
overpacked.set_maximum_size(10)
605
overpacked.map(None, ("foo bar",), "baz")
606
node = overpacked.map(None, ("foo baz",), "it is")
607
self.assertIsInstance(node, InternalNode)
608
# Now, increase the maximum size limit on the subnode for foo bar
609
child = node._items[node._serialised_key(("foo bar",))]
610
child.set_maximum_size(200)
611
# And map a new key into node, which will land in the same child node
612
result = node.map(None, ("foo bar baz",), "new value")
613
self.assertTrue(result is node)
614
self.assertEqual(3, len(result))
615
self.assertEqual(2, len(child))
616
self.assertEqual({('foo bar',): 'baz',
617
('foo bar baz',): 'new value', ('foo baz',): 'it is'},
620
def test_map_to_existing_child_exceed_child_size_not_internal_size(self):
621
# mapping a new key which is in a child of an internal node maps
622
# recursively, and when the child splits that is accomodated within the
623
# internal node if there is room for another child pointer.
624
overpacked = LeafNode()
625
# 3 pointers, 7 bytes offset, 45 byte pointers, + prelude.
626
overpacked.set_maximum_size(180)
627
overpacked.map(None, ("foo bar",), "baz " * 40)
628
node = overpacked.map(None, ("foo baz",), "itis" * 40)
629
self.assertIsInstance(node, InternalNode)
630
# And map a new key into node, which will land in the same child path
631
# within node, but trigger a spill event on the child, and should end
632
# up with 3 pointers in node (as the pointers can fit in the node
634
result = node.map(None, ("foo bar baz",), "new " * 60)
635
self.assertTrue(result is node)
636
self.assertEqual(3, len(result))
637
# We should have one child for foo bar
638
child = node._items[node._serialised_key(("foo bar\x00",))]
639
self.assertIsInstance(child, LeafNode)
640
self.assertEqual(1, len(child))
641
# And one for 'foo bar '
642
child = node._items[node._serialised_key(("foo bar ",))]
643
self.assertIsInstance(child, LeafNode)
644
self.assertEqual(1, len(child))
645
self.assertEqual({('foo bar',): 'baz ' * 60,
646
('foo bar baz',): 'new ' * 60,
647
('foo baz',): 'itis' * 60},
650
443
def test_map_to_new_child_new(self):
651
444
chkmap = self._get_map({('k1',):'foo', ('k2',):'bar'}, maximum_size=10)
652
445
chkmap._ensure_root()