~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_groupcompress.py

  • Committer: Patch Queue Manager
  • Date: 2016-04-21 05:06:57 UTC
  • mfrom: (6603.4.1 bzr)
  • Revision ID: pqm@pqm.ubuntu.com-20160421050657-ygnzfybewvudf1j9
(richard-wilbur) Use initial_comment as commit_message for lp_propose.(Shawn
 Wang) (Shawn Wang)

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2008, 2009, 2010 Canonical Ltd
 
1
# Copyright (C) 2008-2011 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
20
20
 
21
21
from bzrlib import (
22
22
    btree_index,
 
23
    config,
23
24
    groupcompress,
24
25
    errors,
25
26
    index as _mod_index,
30
31
    )
31
32
from bzrlib.osutils import sha_string
32
33
from bzrlib.tests.test__groupcompress import compiled_groupcompress_feature
33
 
 
34
 
 
35
 
def load_tests(standard_tests, module, loader):
36
 
    """Parameterize tests for all versions of groupcompress."""
37
 
    to_adapt, result = tests.split_suite_by_condition(
38
 
        standard_tests, tests.condition_isinstance(TestAllGroupCompressors))
 
34
from bzrlib.tests.scenarios import load_tests_apply_scenarios
 
35
 
 
36
 
 
37
def group_compress_implementation_scenarios():
39
38
    scenarios = [
40
39
        ('python', {'compressor': groupcompress.PythonGroupCompressor}),
41
40
        ]
42
41
    if compiled_groupcompress_feature.available():
43
42
        scenarios.append(('C',
44
43
            {'compressor': groupcompress.PyrexGroupCompressor}))
45
 
    return tests.multiply_tests(to_adapt, scenarios, result)
 
44
    return scenarios
 
45
 
 
46
 
 
47
load_tests = load_tests_apply_scenarios
46
48
 
47
49
 
48
50
class TestGroupCompressor(tests.TestCase):
66
68
class TestAllGroupCompressors(TestGroupCompressor):
67
69
    """Tests for GroupCompressor"""
68
70
 
69
 
    compressor = None # Set by multiply_tests
 
71
    scenarios = group_compress_implementation_scenarios()
 
72
    compressor = None # Set by scenario
70
73
 
71
74
    def test_empty_delta(self):
72
75
        compressor = self.compressor()
347
350
        self.assertEqual(z_content, block._z_content)
348
351
        self.assertEqual(content, block._content)
349
352
 
 
353
    def test_to_chunks(self):
 
354
        content_chunks = ['this is some content\n',
 
355
                          'this content will be compressed\n']
 
356
        content_len = sum(map(len, content_chunks))
 
357
        content = ''.join(content_chunks)
 
358
        gcb = groupcompress.GroupCompressBlock()
 
359
        gcb.set_chunked_content(content_chunks, content_len)
 
360
        total_len, block_chunks = gcb.to_chunks()
 
361
        block_bytes = ''.join(block_chunks)
 
362
        self.assertEqual(gcb._z_content_length, len(gcb._z_content))
 
363
        self.assertEqual(total_len, len(block_bytes))
 
364
        self.assertEqual(gcb._content_length, content_len)
 
365
        expected_header =('gcb1z\n' # group compress block v1 zlib
 
366
                          '%d\n' # Length of compressed content
 
367
                          '%d\n' # Length of uncompressed content
 
368
                         ) % (gcb._z_content_length, gcb._content_length)
 
369
        # The first chunk should be the header chunk. It is small, fixed size,
 
370
        # and there is no compelling reason to split it up
 
371
        self.assertEqual(expected_header, block_chunks[0])
 
372
        self.assertStartsWith(block_bytes, expected_header)
 
373
        remaining_bytes = block_bytes[len(expected_header):]
 
374
        raw_bytes = zlib.decompress(remaining_bytes)
 
375
        self.assertEqual(content, raw_bytes)
 
376
 
350
377
    def test_to_bytes(self):
351
378
        content = ('this is some content\n'
352
379
                   'this content will be compressed\n')
389
416
        z_content = zlib.compress(content)
390
417
        self.assertEqual(57182, len(z_content))
391
418
        block = groupcompress.GroupCompressBlock()
392
 
        block._z_content = z_content
 
419
        block._z_content_chunks = (z_content,)
393
420
        block._z_content_length = len(z_content)
394
421
        block._compressor_name = 'zlib'
395
422
        block._content_length = 158634
434
461
        z_content = zlib.compress(content)
435
462
        self.assertEqual(57182, len(z_content))
436
463
        block = groupcompress.GroupCompressBlock()
437
 
        block._z_content = z_content
 
464
        block._z_content_chunks = (z_content,)
438
465
        block._z_content_length = len(z_content)
439
466
        block._compressor_name = 'zlib'
440
467
        block._content_length = 158634
526
553
                    'as-requested', False)]
527
554
        self.assertEqual([('b',), ('a',), ('d',), ('c',)], keys)
528
555
 
 
556
    def test_get_record_stream_max_bytes_to_index_default(self):
 
557
        vf = self.make_test_vf(True, dir='source')
 
558
        vf.add_lines(('a',), (), ['lines\n'])
 
559
        vf.writer.end()
 
560
        record = vf.get_record_stream([('a',)], 'unordered', True).next()
 
561
        self.assertEqual(vf._DEFAULT_COMPRESSOR_SETTINGS,
 
562
                         record._manager._get_compressor_settings())
 
563
 
 
564
    def test_get_record_stream_accesses_compressor_settings(self):
 
565
        vf = self.make_test_vf(True, dir='source')
 
566
        vf.add_lines(('a',), (), ['lines\n'])
 
567
        vf.writer.end()
 
568
        vf._max_bytes_to_index = 1234
 
569
        record = vf.get_record_stream([('a',)], 'unordered', True).next()
 
570
        self.assertEqual(dict(max_bytes_to_index=1234),
 
571
                         record._manager._get_compressor_settings())
 
572
 
529
573
    def test_insert_record_stream_reuses_blocks(self):
530
574
        vf = self.make_test_vf(True, dir='source')
531
575
        def grouped_stream(revision_ids, first_parents=()):
744
788
        self.assertEqual(0, len(vf._group_cache))
745
789
 
746
790
 
 
791
class TestGroupCompressConfig(tests.TestCaseWithTransport):
 
792
 
 
793
    def make_test_vf(self):
 
794
        t = self.get_transport('.')
 
795
        t.ensure_base()
 
796
        factory = groupcompress.make_pack_factory(graph=True,
 
797
            delta=False, keylength=1, inconsistency_fatal=True)
 
798
        vf = factory(t)
 
799
        self.addCleanup(groupcompress.cleanup_pack_group, vf)
 
800
        return vf
 
801
 
 
802
    def test_max_bytes_to_index_default(self):
 
803
        vf = self.make_test_vf()
 
804
        gc = vf._make_group_compressor()
 
805
        self.assertEqual(vf._DEFAULT_MAX_BYTES_TO_INDEX,
 
806
                         vf._max_bytes_to_index)
 
807
        if isinstance(gc, groupcompress.PyrexGroupCompressor):
 
808
            self.assertEqual(vf._DEFAULT_MAX_BYTES_TO_INDEX,
 
809
                             gc._delta_index._max_bytes_to_index)
 
810
 
 
811
    def test_max_bytes_to_index_in_config(self):
 
812
        c = config.GlobalConfig()
 
813
        c.set_user_option('bzr.groupcompress.max_bytes_to_index', '10000')
 
814
        vf = self.make_test_vf()
 
815
        gc = vf._make_group_compressor()
 
816
        self.assertEqual(10000, vf._max_bytes_to_index)
 
817
        if isinstance(gc, groupcompress.PyrexGroupCompressor):
 
818
            self.assertEqual(10000, gc._delta_index._max_bytes_to_index)
 
819
 
 
820
    def test_max_bytes_to_index_bad_config(self):
 
821
        c = config.GlobalConfig()
 
822
        c.set_user_option('bzr.groupcompress.max_bytes_to_index', 'boogah')
 
823
        vf = self.make_test_vf()
 
824
        # TODO: This is triggering a warning, we might want to trap and make
 
825
        #       sure it is readable.
 
826
        gc = vf._make_group_compressor()
 
827
        self.assertEqual(vf._DEFAULT_MAX_BYTES_TO_INDEX,
 
828
                         vf._max_bytes_to_index)
 
829
        if isinstance(gc, groupcompress.PyrexGroupCompressor):
 
830
            self.assertEqual(vf._DEFAULT_MAX_BYTES_TO_INDEX,
 
831
                             gc._delta_index._max_bytes_to_index)
 
832
 
747
833
 
748
834
class StubGCVF(object):
749
835
    def __init__(self, canned_get_blocks=None):
1020
1106
            self.assertEqual(self._texts[record.key],
1021
1107
                             record.get_bytes_as('fulltext'))
1022
1108
 
 
1109
    def test_manager_default_compressor_settings(self):
 
1110
        locations, old_block = self.make_block(self._texts)
 
1111
        manager = groupcompress._LazyGroupContentManager(old_block)
 
1112
        gcvf = groupcompress.GroupCompressVersionedFiles
 
1113
        # It doesn't greedily evaluate _max_bytes_to_index
 
1114
        self.assertIs(None, manager._compressor_settings)
 
1115
        self.assertEqual(gcvf._DEFAULT_COMPRESSOR_SETTINGS,
 
1116
                         manager._get_compressor_settings())
 
1117
 
 
1118
    def test_manager_custom_compressor_settings(self):
 
1119
        locations, old_block = self.make_block(self._texts)
 
1120
        called = []
 
1121
        def compressor_settings():
 
1122
            called.append('called')
 
1123
            return (10,)
 
1124
        manager = groupcompress._LazyGroupContentManager(old_block,
 
1125
            get_compressor_settings=compressor_settings)
 
1126
        gcvf = groupcompress.GroupCompressVersionedFiles
 
1127
        # It doesn't greedily evaluate compressor_settings
 
1128
        self.assertIs(None, manager._compressor_settings)
 
1129
        self.assertEqual((10,), manager._get_compressor_settings())
 
1130
        self.assertEqual((10,), manager._get_compressor_settings())
 
1131
        self.assertEqual((10,), manager._compressor_settings)
 
1132
        # Only called 1 time
 
1133
        self.assertEqual(['called'], called)
 
1134
 
 
1135
    def test__rebuild_handles_compressor_settings(self):
 
1136
        if not isinstance(groupcompress.GroupCompressor,
 
1137
                          groupcompress.PyrexGroupCompressor):
 
1138
            raise tests.TestNotApplicable('pure-python compressor'
 
1139
                ' does not handle compressor_settings')
 
1140
        locations, old_block = self.make_block(self._texts)
 
1141
        manager = groupcompress._LazyGroupContentManager(old_block,
 
1142
            get_compressor_settings=lambda: dict(max_bytes_to_index=32))
 
1143
        gc = manager._make_group_compressor()
 
1144
        self.assertEqual(32, gc._delta_index._max_bytes_to_index)
 
1145
        self.add_key_to_manager(('key3',), locations, old_block, manager)
 
1146
        self.add_key_to_manager(('key4',), locations, old_block, manager)
 
1147
        action, last_byte, total_bytes = manager._check_rebuild_action()
 
1148
        self.assertEqual('rebuild', action)
 
1149
        manager._rebuild_block()
 
1150
        new_block = manager._block
 
1151
        self.assertIsNot(old_block, new_block)
 
1152
        # Because of the new max_bytes_to_index, we do a poor job of
 
1153
        # rebuilding. This is a side-effect of the change, but at least it does
 
1154
        # show the setting had an effect.
 
1155
        self.assertTrue(old_block._content_length < new_block._content_length)
 
1156
 
1023
1157
    def test_check_is_well_utilized_all_keys(self):
1024
1158
        block, manager = self.make_block_and_full_manager(self._texts)
1025
1159
        self.assertFalse(manager.check_is_well_utilized())
1066
1200
        # consumption
1067
1201
        self.add_key_to_manager(('key4',), locations, block, manager)
1068
1202
        self.assertTrue(manager.check_is_well_utilized())
 
1203
 
 
1204
 
 
1205
class Test_GCBuildDetails(tests.TestCase):
 
1206
 
 
1207
    def test_acts_like_tuple(self):
 
1208
        # _GCBuildDetails inlines some of the data that used to be spread out
 
1209
        # across a bunch of tuples
 
1210
        bd = groupcompress._GCBuildDetails((('parent1',), ('parent2',)),
 
1211
            ('INDEX', 10, 20, 0, 5))
 
1212
        self.assertEqual(4, len(bd))
 
1213
        self.assertEqual(('INDEX', 10, 20, 0, 5), bd[0])
 
1214
        self.assertEqual(None, bd[1]) # Compression Parent is always None
 
1215
        self.assertEqual((('parent1',), ('parent2',)), bd[2])
 
1216
        self.assertEqual(('group', None), bd[3]) # Record details
 
1217
 
 
1218
    def test__repr__(self):
 
1219
        bd = groupcompress._GCBuildDetails((('parent1',), ('parent2',)),
 
1220
            ('INDEX', 10, 20, 0, 5))
 
1221
        self.assertEqual("_GCBuildDetails(('INDEX', 10, 20, 0, 5),"
 
1222
                         " (('parent1',), ('parent2',)))",
 
1223
                         repr(bd))
 
1224