~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_btree_index.py

  • Committer: Vincent Ladeuil
  • Date: 2010-04-23 08:51:52 UTC
  • mfrom: (5131.2.6 support_OO_flag)
  • mto: This revision was merged to the branch mainline in revision 5179.
  • Revision ID: v.ladeuil+lp@free.fr-20100423085152-uoewc1vnkwqhw0pj
Manually assign docstrings to command objects, so that they work with python -OO

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2008 Canonical Ltd
 
1
# Copyright (C) 2008, 2009, 2010 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
23
23
from bzrlib import (
24
24
    btree_index,
25
25
    errors,
 
26
    fifo_cache,
 
27
    lru_cache,
 
28
    osutils,
26
29
    tests,
27
30
    )
28
31
from bzrlib.tests import (
40
43
        condition_isinstance(TestBTreeNodes))
41
44
    import bzrlib._btree_serializer_py as py_module
42
45
    scenarios = [('python', {'parse_btree': py_module})]
43
 
    if CompiledBtreeParserFeature.available():
44
 
        # Is there a way to do this that gets missing feature failures rather
45
 
        # than no indication to the user?
46
 
        import bzrlib._btree_serializer_c as c_module
47
 
        scenarios.append(('C', {'parse_btree': c_module}))
 
46
    if compiled_btreeparser_feature.available():
 
47
        scenarios.append(('C', {'parse_btree':
 
48
                                compiled_btreeparser_feature.module}))
48
49
    return multiply_tests(node_tests, scenarios, others)
49
50
 
50
51
 
51
 
class _CompiledBtreeParserFeature(tests.Feature):
52
 
    def _probe(self):
53
 
        try:
54
 
            import bzrlib._btree_serializer_c
55
 
        except ImportError:
56
 
            return False
57
 
        return True
58
 
 
59
 
    def feature_name(self):
60
 
        return 'bzrlib._btree_serializer_c'
61
 
 
62
 
CompiledBtreeParserFeature = _CompiledBtreeParserFeature()
 
52
compiled_btreeparser_feature = tests.ModuleAvailableFeature(
 
53
                                'bzrlib._btree_serializer_pyx')
63
54
 
64
55
 
65
56
class BTreeTestCase(TestCaseWithTransport):
68
59
 
69
60
    def setUp(self):
70
61
        TestCaseWithTransport.setUp(self)
71
 
        self._original_header = btree_index._RESERVED_HEADER_BYTES
72
 
        def restore():
73
 
            btree_index._RESERVED_HEADER_BYTES = self._original_header
74
 
        self.addCleanup(restore)
75
 
        btree_index._RESERVED_HEADER_BYTES = 100
 
62
        self.overrideAttr(btree_index, '_RESERVED_HEADER_BYTES', 100)
76
63
 
77
64
    def make_nodes(self, count, key_elements, reference_lists):
78
65
        """Generate count*key_elements sample nodes."""
112
99
 
113
100
    def shrink_page_size(self):
114
101
        """Shrink the default page size so that less fits in a page."""
115
 
        old_page_size = btree_index._PAGE_SIZE
116
 
        def cleanup():
117
 
            btree_index._PAGE_SIZE = old_page_size
118
 
        self.addCleanup(cleanup)
 
102
        self.overrideAttr(btree_index, '_PAGE_SIZE')
119
103
        btree_index._PAGE_SIZE = 2048
120
104
 
121
105
 
122
106
class TestBTreeBuilder(BTreeTestCase):
123
107
 
 
108
    def test_clear_cache(self):
 
109
        builder = btree_index.BTreeBuilder(reference_lists=0, key_elements=1)
 
110
        # This is a no-op, but we need the api to be consistent with other
 
111
        # BTreeGraphIndex apis.
 
112
        builder.clear_cache()
 
113
 
124
114
    def test_empty_1_0(self):
125
115
        builder = btree_index.BTreeBuilder(key_elements=1, reference_lists=0)
126
116
        # NamedTemporaryFile dies on builder.finish().read(). weird.
152
142
        temp_file = builder.finish()
153
143
        content = temp_file.read()
154
144
        del temp_file
155
 
        self.assertEqual(158, len(content))
 
145
        self.assertEqual(131, len(content))
156
146
        self.assertEqual(
157
147
            "B+Tree Graph Index 2\nnode_ref_lists=0\nkey_elements=1\nlen=5\n"
158
148
            "row_lengths=1\n",
176
166
        temp_file = builder.finish()
177
167
        content = temp_file.read()
178
168
        del temp_file
179
 
        self.assertEqual(264, len(content))
 
169
        self.assertEqual(238, len(content))
180
170
        self.assertEqual(
181
171
            "B+Tree Graph Index 2\nnode_ref_lists=2\nkey_elements=2\nlen=10\n"
182
172
            "row_lengths=1\n",
242
232
        temp_file = builder.finish()
243
233
        content = temp_file.read()
244
234
        del temp_file
245
 
        self.assertEqual(181, len(content))
 
235
        self.assertEqual(155, len(content))
246
236
        self.assertEqual(
247
237
            "B+Tree Graph Index 2\nnode_ref_lists=0\nkey_elements=1\nlen=10\n"
248
238
            "row_lengths=1\n",
350
340
        # Test the parts of the index that take up memory are doing so
351
341
        # predictably.
352
342
        self.assertEqual(1, len(builder._nodes))
353
 
        self.assertEqual(1, len(builder._keys))
354
343
        self.assertIs(None, builder._nodes_by_key)
355
344
        builder.add_node(*nodes[1])
356
345
        self.assertEqual(0, len(builder._nodes))
357
 
        self.assertEqual(0, len(builder._keys))
358
346
        self.assertIs(None, builder._nodes_by_key)
359
347
        self.assertEqual(1, len(builder._backing_indices))
360
348
        self.assertEqual(2, builder._backing_indices[0].key_count())
361
349
        # now back to memory
362
350
        builder.add_node(*nodes[2])
363
351
        self.assertEqual(1, len(builder._nodes))
364
 
        self.assertEqual(1, len(builder._keys))
365
352
        self.assertIs(None, builder._nodes_by_key)
366
353
        # And spills to a second backing index combing all
367
354
        builder.add_node(*nodes[3])
368
355
        self.assertEqual(0, len(builder._nodes))
369
 
        self.assertEqual(0, len(builder._keys))
370
356
        self.assertIs(None, builder._nodes_by_key)
371
357
        self.assertEqual(2, len(builder._backing_indices))
372
358
        self.assertEqual(None, builder._backing_indices[0])
375
361
        builder.add_node(*nodes[4])
376
362
        builder.add_node(*nodes[5])
377
363
        self.assertEqual(0, len(builder._nodes))
378
 
        self.assertEqual(0, len(builder._keys))
379
364
        self.assertIs(None, builder._nodes_by_key)
380
365
        self.assertEqual(2, len(builder._backing_indices))
381
366
        self.assertEqual(2, builder._backing_indices[0].key_count())
439
424
        # Test the parts of the index that take up memory are doing so
440
425
        # predictably.
441
426
        self.assertEqual(1, len(builder._nodes))
442
 
        self.assertEqual(1, len(builder._keys))
443
427
        self.assertIs(None, builder._nodes_by_key)
444
428
        builder.add_node(*nodes[1])
445
429
        self.assertEqual(0, len(builder._nodes))
446
 
        self.assertEqual(0, len(builder._keys))
447
430
        self.assertIs(None, builder._nodes_by_key)
448
431
        self.assertEqual(1, len(builder._backing_indices))
449
432
        self.assertEqual(2, builder._backing_indices[0].key_count())
450
433
        # now back to memory
451
434
        builder.add_node(*nodes[2])
452
435
        self.assertEqual(1, len(builder._nodes))
453
 
        self.assertEqual(1, len(builder._keys))
454
436
        self.assertIs(None, builder._nodes_by_key)
455
437
        # And spills to a second backing index but doesn't combine
456
438
        builder.add_node(*nodes[3])
457
439
        self.assertEqual(0, len(builder._nodes))
458
 
        self.assertEqual(0, len(builder._keys))
459
440
        self.assertIs(None, builder._nodes_by_key)
460
441
        self.assertEqual(2, len(builder._backing_indices))
461
442
        for backing_index in builder._backing_indices:
464
445
        builder.add_node(*nodes[4])
465
446
        builder.add_node(*nodes[5])
466
447
        self.assertEqual(0, len(builder._nodes))
467
 
        self.assertEqual(0, len(builder._keys))
468
448
        self.assertIs(None, builder._nodes_by_key)
469
449
        self.assertEqual(3, len(builder._backing_indices))
470
450
        for backing_index in builder._backing_indices:
529
509
        builder.add_node(*nodes[0])
530
510
        # Test the parts of the index that take up memory are doing so
531
511
        # predictably.
532
 
        self.assertEqual(1, len(builder._keys))
533
512
        self.assertEqual(1, len(builder._nodes))
534
513
        self.assertIs(None, builder._nodes_by_key)
535
514
        builder.add_node(*nodes[1])
536
 
        self.assertEqual(0, len(builder._keys))
537
515
        self.assertEqual(0, len(builder._nodes))
538
516
        self.assertIs(None, builder._nodes_by_key)
539
517
        self.assertEqual(1, len(builder._backing_indices))
542
520
        old = dict(builder._get_nodes_by_key()) #Build up the nodes by key dict
543
521
        builder.add_node(*nodes[2])
544
522
        self.assertEqual(1, len(builder._nodes))
545
 
        self.assertEqual(1, len(builder._keys))
546
523
        self.assertIsNot(None, builder._nodes_by_key)
547
524
        self.assertNotEqual({}, builder._nodes_by_key)
548
525
        # We should have a new entry
550
527
        # And spills to a second backing index combing all
551
528
        builder.add_node(*nodes[3])
552
529
        self.assertEqual(0, len(builder._nodes))
553
 
        self.assertEqual(0, len(builder._keys))
554
530
        self.assertIs(None, builder._nodes_by_key)
555
531
        self.assertEqual(2, len(builder._backing_indices))
556
532
        self.assertEqual(None, builder._backing_indices[0])
559
535
        builder.add_node(*nodes[4])
560
536
        builder.add_node(*nodes[5])
561
537
        self.assertEqual(0, len(builder._nodes))
562
 
        self.assertEqual(0, len(builder._keys))
563
538
        self.assertIs(None, builder._nodes_by_key)
564
539
        self.assertEqual(2, len(builder._backing_indices))
565
540
        self.assertEqual(2, builder._backing_indices[0].key_count())
636
611
        size = trans.put_file('index', stream)
637
612
        return btree_index.BTreeGraphIndex(trans, 'index', size)
638
613
 
 
614
    def make_index_with_offset(self, ref_lists=1, key_elements=1, nodes=[],
 
615
                               offset=0):
 
616
        builder = btree_index.BTreeBuilder(key_elements=key_elements,
 
617
                                           reference_lists=ref_lists)
 
618
        builder.add_nodes(nodes)
 
619
        transport = self.get_transport('')
 
620
        # NamedTemporaryFile dies on builder.finish().read(). weird.
 
621
        temp_file = builder.finish()
 
622
        content = temp_file.read()
 
623
        del temp_file
 
624
        size = len(content)
 
625
        transport.put_bytes('index', (' '*offset)+content)
 
626
        return btree_index.BTreeGraphIndex(transport, 'index', size=size,
 
627
                                           offset=offset)
 
628
 
 
629
    def test_clear_cache(self):
 
630
        nodes = self.make_nodes(160, 2, 2)
 
631
        index = self.make_index(ref_lists=2, key_elements=2, nodes=nodes)
 
632
        self.assertEqual(1, len(list(index.iter_entries([nodes[30][0]]))))
 
633
        self.assertEqual([1, 4], index._row_lengths)
 
634
        self.assertIsNot(None, index._root_node)
 
635
        internal_node_pre_clear = index._internal_node_cache.keys()
 
636
        self.assertTrue(len(index._leaf_node_cache) > 0)
 
637
        index.clear_cache()
 
638
        # We don't touch _root_node or _internal_node_cache, both should be
 
639
        # small, and can save a round trip or two
 
640
        self.assertIsNot(None, index._root_node)
 
641
        # NOTE: We don't want to affect the _internal_node_cache, as we expect
 
642
        #       it will be small, and if we ever do touch this index again, it
 
643
        #       will save round-trips.  This assertion isn't very strong,
 
644
        #       becuase without a 3-level index, we don't have any internal
 
645
        #       nodes cached.
 
646
        self.assertEqual(internal_node_pre_clear,
 
647
                         index._internal_node_cache.keys())
 
648
        self.assertEqual(0, len(index._leaf_node_cache))
 
649
 
639
650
    def test_trivial_constructor(self):
640
651
        transport = get_transport('trace+' + self.get_url(''))
641
652
        index = btree_index.BTreeGraphIndex(transport, 'index', None)
688
699
        # The entire index should have been read, as it is one page long.
689
700
        self.assertEqual([('readv', 'index', [(0, size)], False, None)],
690
701
            transport._activity)
691
 
        self.assertEqual(1199, size)
 
702
        self.assertEqual(1173, size)
 
703
 
 
704
    def test_with_offset_no_size(self):
 
705
        index = self.make_index_with_offset(key_elements=1, ref_lists=1,
 
706
                                            offset=1234,
 
707
                                            nodes=self.make_nodes(200, 1, 1))
 
708
        index._size = None # throw away the size info
 
709
        self.assertEqual(200, index.key_count())
 
710
 
 
711
    def test_with_small_offset(self):
 
712
        index = self.make_index_with_offset(key_elements=1, ref_lists=1,
 
713
                                            offset=1234,
 
714
                                            nodes=self.make_nodes(200, 1, 1))
 
715
        self.assertEqual(200, index.key_count())
 
716
 
 
717
    def test_with_large_offset(self):
 
718
        index = self.make_index_with_offset(key_elements=1, ref_lists=1,
 
719
                                            offset=123456,
 
720
                                            nodes=self.make_nodes(200, 1, 1))
 
721
        self.assertEqual(200, index.key_count())
692
722
 
693
723
    def test__read_nodes_no_size_one_page_reads_once(self):
694
724
        self.make_index(nodes=[(('key',), 'value', ())])
742
772
        # The entire index should have been read linearly.
743
773
        self.assertEqual([('readv', 'index', [(0, size)], False, None)],
744
774
            transport._activity)
745
 
        self.assertEqual(1514, size)
 
775
        self.assertEqual(1488, size)
746
776
 
747
777
    def test_validate_two_pages(self):
748
778
        builder = btree_index.BTreeBuilder(key_elements=2, reference_lists=2)
980
1010
            ])
981
1011
        self.assertEqual(set([]), index.external_references(0))
982
1012
 
 
1013
    def test__find_ancestors_one_page(self):
 
1014
        key1 = ('key-1',)
 
1015
        key2 = ('key-2',)
 
1016
        index = self.make_index(ref_lists=1, key_elements=1, nodes=[
 
1017
            (key1, 'value', ([key2],)),
 
1018
            (key2, 'value', ([],)),
 
1019
            ])
 
1020
        parent_map = {}
 
1021
        missing_keys = set()
 
1022
        search_keys = index._find_ancestors([key1], 0, parent_map, missing_keys)
 
1023
        self.assertEqual({key1: (key2,), key2: ()}, parent_map)
 
1024
        self.assertEqual(set(), missing_keys)
 
1025
        self.assertEqual(set(), search_keys)
 
1026
 
 
1027
    def test__find_ancestors_one_page_w_missing(self):
 
1028
        key1 = ('key-1',)
 
1029
        key2 = ('key-2',)
 
1030
        key3 = ('key-3',)
 
1031
        index = self.make_index(ref_lists=1, key_elements=1, nodes=[
 
1032
            (key1, 'value', ([key2],)),
 
1033
            (key2, 'value', ([],)),
 
1034
            ])
 
1035
        parent_map = {}
 
1036
        missing_keys = set()
 
1037
        search_keys = index._find_ancestors([key2, key3], 0, parent_map,
 
1038
                                            missing_keys)
 
1039
        self.assertEqual({key2: ()}, parent_map)
 
1040
        # we know that key3 is missing because we read the page that it would
 
1041
        # otherwise be on
 
1042
        self.assertEqual(set([key3]), missing_keys)
 
1043
        self.assertEqual(set(), search_keys)
 
1044
 
 
1045
    def test__find_ancestors_one_parent_missing(self):
 
1046
        key1 = ('key-1',)
 
1047
        key2 = ('key-2',)
 
1048
        key3 = ('key-3',)
 
1049
        index = self.make_index(ref_lists=1, key_elements=1, nodes=[
 
1050
            (key1, 'value', ([key2],)),
 
1051
            (key2, 'value', ([key3],)),
 
1052
            ])
 
1053
        parent_map = {}
 
1054
        missing_keys = set()
 
1055
        search_keys = index._find_ancestors([key1], 0, parent_map,
 
1056
                                            missing_keys)
 
1057
        self.assertEqual({key1: (key2,), key2: (key3,)}, parent_map)
 
1058
        self.assertEqual(set(), missing_keys)
 
1059
        # all we know is that key3 wasn't present on the page we were reading
 
1060
        # but if you look, the last key is key2 which comes before key3, so we
 
1061
        # don't know whether key3 would land on this page or not.
 
1062
        self.assertEqual(set([key3]), search_keys)
 
1063
        search_keys = index._find_ancestors(search_keys, 0, parent_map,
 
1064
                                            missing_keys)
 
1065
        # passing it back in, we are sure it is 'missing'
 
1066
        self.assertEqual({key1: (key2,), key2: (key3,)}, parent_map)
 
1067
        self.assertEqual(set([key3]), missing_keys)
 
1068
        self.assertEqual(set([]), search_keys)
 
1069
 
 
1070
    def test__find_ancestors_dont_search_known(self):
 
1071
        key1 = ('key-1',)
 
1072
        key2 = ('key-2',)
 
1073
        key3 = ('key-3',)
 
1074
        index = self.make_index(ref_lists=1, key_elements=1, nodes=[
 
1075
            (key1, 'value', ([key2],)),
 
1076
            (key2, 'value', ([key3],)),
 
1077
            (key3, 'value', ([],)),
 
1078
            ])
 
1079
        # We already know about key2, so we won't try to search for key3
 
1080
        parent_map = {key2: (key3,)}
 
1081
        missing_keys = set()
 
1082
        search_keys = index._find_ancestors([key1], 0, parent_map,
 
1083
                                            missing_keys)
 
1084
        self.assertEqual({key1: (key2,), key2: (key3,)}, parent_map)
 
1085
        self.assertEqual(set(), missing_keys)
 
1086
        self.assertEqual(set(), search_keys)
 
1087
 
 
1088
    def test__find_ancestors_multiple_pages(self):
 
1089
        # We need to use enough keys that we actually cause a split
 
1090
        start_time = 1249671539
 
1091
        email = "joebob@example.com"
 
1092
        nodes = []
 
1093
        ref_lists = ((),)
 
1094
        rev_keys = []
 
1095
        for i in xrange(400):
 
1096
            rev_id = '%s-%s-%s' % (email,
 
1097
                                   osutils.compact_date(start_time + i),
 
1098
                                   osutils.rand_chars(16))
 
1099
            rev_key = (rev_id,)
 
1100
            nodes.append((rev_key, 'value', ref_lists))
 
1101
            # We have a ref 'list' of length 1, with a list of parents, with 1
 
1102
            # parent which is a key
 
1103
            ref_lists = ((rev_key,),)
 
1104
            rev_keys.append(rev_key)
 
1105
        index = self.make_index(ref_lists=1, key_elements=1, nodes=nodes)
 
1106
        self.assertEqual(400, index.key_count())
 
1107
        self.assertEqual(3, len(index._row_offsets))
 
1108
        nodes = dict(index._read_nodes([1, 2]))
 
1109
        l1 = nodes[1]
 
1110
        l2 = nodes[2]
 
1111
        min_l2_key = l2.min_key
 
1112
        max_l1_key = l1.max_key
 
1113
        self.assertTrue(max_l1_key < min_l2_key)
 
1114
        parents_min_l2_key = l2.keys[min_l2_key][1][0]
 
1115
        self.assertEqual((l1.max_key,), parents_min_l2_key)
 
1116
        # Now, whatever key we select that would fall on the second page,
 
1117
        # should give us all the parents until the page break
 
1118
        key_idx = rev_keys.index(min_l2_key)
 
1119
        next_key = rev_keys[key_idx+1]
 
1120
        # So now when we get the parent map, we should get the key we are
 
1121
        # looking for, min_l2_key, and then a reference to go look for the
 
1122
        # parent of that key
 
1123
        parent_map = {}
 
1124
        missing_keys = set()
 
1125
        search_keys = index._find_ancestors([next_key], 0, parent_map,
 
1126
                                            missing_keys)
 
1127
        self.assertEqual([min_l2_key, next_key], sorted(parent_map))
 
1128
        self.assertEqual(set(), missing_keys)
 
1129
        self.assertEqual(set([max_l1_key]), search_keys)
 
1130
        parent_map = {}
 
1131
        search_keys = index._find_ancestors([max_l1_key], 0, parent_map,
 
1132
                                            missing_keys)
 
1133
        self.assertEqual(sorted(l1.keys), sorted(parent_map))
 
1134
        self.assertEqual(set(), missing_keys)
 
1135
        self.assertEqual(set(), search_keys)
 
1136
 
 
1137
    def test__find_ancestors_empty_index(self):
 
1138
        index = self.make_index(ref_lists=1, key_elements=1, nodes=[])
 
1139
        parent_map = {}
 
1140
        missing_keys = set()
 
1141
        search_keys = index._find_ancestors([('one',), ('two',)], 0, parent_map,
 
1142
                                            missing_keys)
 
1143
        self.assertEqual(set(), search_keys)
 
1144
        self.assertEqual({}, parent_map)
 
1145
        self.assertEqual(set([('one',), ('two',)]), missing_keys)
 
1146
 
 
1147
    def test_supports_unlimited_cache(self):
 
1148
        builder = btree_index.BTreeBuilder(reference_lists=0, key_elements=1)
 
1149
        # We need enough nodes to cause a page split (so we have both an
 
1150
        # internal node and a couple leaf nodes. 500 seems to be enough.)
 
1151
        nodes = self.make_nodes(500, 1, 0)
 
1152
        for node in nodes:
 
1153
            builder.add_node(*node)
 
1154
        stream = builder.finish()
 
1155
        trans = get_transport(self.get_url())
 
1156
        size = trans.put_file('index', stream)
 
1157
        index = btree_index.BTreeGraphIndex(trans, 'index', size)
 
1158
        self.assertEqual(500, index.key_count())
 
1159
        # We have an internal node
 
1160
        self.assertEqual(2, len(index._row_lengths))
 
1161
        # We have at least 2 leaf nodes
 
1162
        self.assertTrue(index._row_lengths[-1] >= 2)
 
1163
        self.assertIsInstance(index._leaf_node_cache, lru_cache.LRUCache)
 
1164
        self.assertEqual(btree_index._NODE_CACHE_SIZE,
 
1165
                         index._leaf_node_cache._max_cache)
 
1166
        self.assertIsInstance(index._internal_node_cache, fifo_cache.FIFOCache)
 
1167
        self.assertEqual(100, index._internal_node_cache._max_cache)
 
1168
        # No change if unlimited_cache=False is passed
 
1169
        index = btree_index.BTreeGraphIndex(trans, 'index', size,
 
1170
                                            unlimited_cache=False)
 
1171
        self.assertIsInstance(index._leaf_node_cache, lru_cache.LRUCache)
 
1172
        self.assertEqual(btree_index._NODE_CACHE_SIZE,
 
1173
                         index._leaf_node_cache._max_cache)
 
1174
        self.assertIsInstance(index._internal_node_cache, fifo_cache.FIFOCache)
 
1175
        self.assertEqual(100, index._internal_node_cache._max_cache)
 
1176
        index = btree_index.BTreeGraphIndex(trans, 'index', size,
 
1177
                                            unlimited_cache=True)
 
1178
        self.assertIsInstance(index._leaf_node_cache, dict)
 
1179
        self.assertIs(type(index._internal_node_cache), dict)
 
1180
        # Exercise the lookup code
 
1181
        entries = set(index.iter_entries([n[0] for n in nodes]))
 
1182
        self.assertEqual(500, len(entries))
 
1183
 
983
1184
 
984
1185
class TestBTreeNodes(BTreeTestCase):
985
1186
 
986
 
    def restore_parser(self):
987
 
        btree_index._btree_serializer = self.saved_parser
988
 
 
989
1187
    def setUp(self):
990
1188
        BTreeTestCase.setUp(self)
991
 
        self.saved_parser = btree_index._btree_serializer
992
 
        self.addCleanup(self.restore_parser)
993
 
        btree_index._btree_serializer = self.parse_btree
 
1189
        self.overrideAttr(btree_index, '_btree_serializer', self.parse_btree)
994
1190
 
995
1191
    def test_LeafNode_1_0(self):
996
1192
        node_bytes = ("type=leaf\n"
1107
1303
    def test_exists(self):
1108
1304
        # This is just to let the user know if they don't have the feature
1109
1305
        # available
1110
 
        self.requireFeature(CompiledBtreeParserFeature)
 
1306
        self.requireFeature(compiled_btreeparser_feature)
1111
1307
 
1112
1308
 
1113
1309
class TestMultiBisectRight(tests.TestCase):