~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: John Arbash Meinel
  • Date: 2009-07-29 21:35:05 UTC
  • mfrom: (4576 +trunk)
  • mto: This revision was merged to the branch mainline in revision 4577.
  • Revision ID: john@arbash-meinel.com-20090729213505-tkqsvy1zfpocu75w
Merge bzr.dev 4576 in prep for NEWS

Show diffs side-by-side

added added

removed removed

Lines of Context:
31
31
                           UnknownFormatError,
32
32
                           UnsupportedFormatError,
33
33
                           )
34
 
from bzrlib import graph
 
34
from bzrlib import (
 
35
    graph,
 
36
    tests,
 
37
    )
35
38
from bzrlib.branchbuilder import BranchBuilder
36
39
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
37
40
from bzrlib.index import GraphIndex, InMemoryGraphIndex
670
673
        self.assertFalse(repo._format.supports_external_lookups)
671
674
 
672
675
 
673
 
class TestDevelopment6(TestCaseWithTransport):
 
676
class Test2a(TestCaseWithTransport):
 
677
 
 
678
    def test_format_pack_compresses_True(self):
 
679
        repo = self.make_repository('repo', format='2a')
 
680
        self.assertTrue(repo._format.pack_compresses)
674
681
 
675
682
    def test_inventories_use_chk_map_with_parent_base_dict(self):
676
 
        tree = self.make_branch_and_tree('repo', format="development6-rich-root")
 
683
        tree = self.make_branch_and_tree('repo', format="2a")
677
684
        revid = tree.commit("foo")
678
685
        tree.lock_read()
679
686
        self.addCleanup(tree.unlock)
685
692
        self.assertEqual(65536,
686
693
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
687
694
 
 
695
    def test_autopack_unchanged_chk_nodes(self):
 
696
        # at 20 unchanged commits, chk pages are packed that are split into
 
697
        # two groups such that the new pack being made doesn't have all its
 
698
        # pages in the source packs (though they are in the repository).
 
699
        tree = self.make_branch_and_tree('tree', format='2a')
 
700
        for pos in range(20):
 
701
            tree.commit(str(pos))
 
702
 
 
703
    def test_pack_with_hint(self):
 
704
        tree = self.make_branch_and_tree('tree', format='2a')
 
705
        # 1 commit to leave untouched
 
706
        tree.commit('1')
 
707
        to_keep = tree.branch.repository._pack_collection.names()
 
708
        # 2 to combine
 
709
        tree.commit('2')
 
710
        tree.commit('3')
 
711
        all = tree.branch.repository._pack_collection.names()
 
712
        combine = list(set(all) - set(to_keep))
 
713
        self.assertLength(3, all)
 
714
        self.assertLength(2, combine)
 
715
        tree.branch.repository.pack(hint=combine)
 
716
        final = tree.branch.repository._pack_collection.names()
 
717
        self.assertLength(2, final)
 
718
        self.assertFalse(combine[0] in final)
 
719
        self.assertFalse(combine[1] in final)
 
720
        self.assertSubset(to_keep, final)
 
721
 
 
722
    def test_stream_source_to_gc(self):
 
723
        source = self.make_repository('source', format='2a')
 
724
        target = self.make_repository('target', format='2a')
 
725
        stream = source._get_source(target._format)
 
726
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
 
727
 
 
728
    def test_stream_source_to_non_gc(self):
 
729
        source = self.make_repository('source', format='2a')
 
730
        target = self.make_repository('target', format='rich-root-pack')
 
731
        stream = source._get_source(target._format)
 
732
        # We don't want the child GroupCHKStreamSource
 
733
        self.assertIs(type(stream), repository.StreamSource)
 
734
 
 
735
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
 
736
        source_builder = self.make_branch_builder('source',
 
737
                            format='2a')
 
738
        # We have to build a fairly large tree, so that we are sure the chk
 
739
        # pages will have split into multiple pages.
 
740
        entries = [('add', ('', 'a-root-id', 'directory', None))]
 
741
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
 
742
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
 
743
                fname = i + j
 
744
                fid = fname + '-id'
 
745
                content = 'content for %s\n' % (fname,)
 
746
                entries.append(('add', (fname, fid, 'file', content)))
 
747
        source_builder.start_series()
 
748
        source_builder.build_snapshot('rev-1', None, entries)
 
749
        # Now change a few of them, so we get a few new pages for the second
 
750
        # revision
 
751
        source_builder.build_snapshot('rev-2', ['rev-1'], [
 
752
            ('modify', ('aa-id', 'new content for aa-id\n')),
 
753
            ('modify', ('cc-id', 'new content for cc-id\n')),
 
754
            ('modify', ('zz-id', 'new content for zz-id\n')),
 
755
            ])
 
756
        source_builder.finish_series()
 
757
        source_branch = source_builder.get_branch()
 
758
        source_branch.lock_read()
 
759
        self.addCleanup(source_branch.unlock)
 
760
        target = self.make_repository('target', format='2a')
 
761
        source = source_branch.repository._get_source(target._format)
 
762
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
 
763
 
 
764
        # On a regular pass, getting the inventories and chk pages for rev-2
 
765
        # would only get the newly created chk pages
 
766
        search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
 
767
                                    set(['rev-2']))
 
768
        simple_chk_records = []
 
769
        for vf_name, substream in source.get_stream(search):
 
770
            if vf_name == 'chk_bytes':
 
771
                for record in substream:
 
772
                    simple_chk_records.append(record.key)
 
773
            else:
 
774
                for _ in substream:
 
775
                    continue
 
776
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
 
777
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
 
778
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
 
779
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
 
780
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
 
781
                         simple_chk_records)
 
782
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
 
783
        # we should get a much larger set of pages.
 
784
        missing = [('inventories', 'rev-2')]
 
785
        full_chk_records = []
 
786
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
 
787
            if vf_name == 'inventories':
 
788
                for record in substream:
 
789
                    self.assertEqual(('rev-2',), record.key)
 
790
            elif vf_name == 'chk_bytes':
 
791
                for record in substream:
 
792
                    full_chk_records.append(record.key)
 
793
            else:
 
794
                self.fail('Should not be getting a stream of %s' % (vf_name,))
 
795
        # We have 257 records now. This is because we have 1 root page, and 256
 
796
        # leaf pages in a complete listing.
 
797
        self.assertEqual(257, len(full_chk_records))
 
798
        self.assertSubset(simple_chk_records, full_chk_records)
 
799
 
 
800
    def test_inconsistency_fatal(self):
 
801
        repo = self.make_repository('repo', format='2a')
 
802
        self.assertTrue(repo.revisions._index._inconsistency_fatal)
 
803
        self.assertFalse(repo.texts._index._inconsistency_fatal)
 
804
        self.assertFalse(repo.inventories._index._inconsistency_fatal)
 
805
        self.assertFalse(repo.signatures._index._inconsistency_fatal)
 
806
        self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
 
807
 
 
808
 
 
809
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
 
810
 
 
811
    def test_source_to_exact_pack_092(self):
 
812
        source = self.make_repository('source', format='pack-0.92')
 
813
        target = self.make_repository('target', format='pack-0.92')
 
814
        stream_source = source._get_source(target._format)
 
815
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
816
 
 
817
    def test_source_to_exact_pack_rich_root_pack(self):
 
818
        source = self.make_repository('source', format='rich-root-pack')
 
819
        target = self.make_repository('target', format='rich-root-pack')
 
820
        stream_source = source._get_source(target._format)
 
821
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
822
 
 
823
    def test_source_to_exact_pack_19(self):
 
824
        source = self.make_repository('source', format='1.9')
 
825
        target = self.make_repository('target', format='1.9')
 
826
        stream_source = source._get_source(target._format)
 
827
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
828
 
 
829
    def test_source_to_exact_pack_19_rich_root(self):
 
830
        source = self.make_repository('source', format='1.9-rich-root')
 
831
        target = self.make_repository('target', format='1.9-rich-root')
 
832
        stream_source = source._get_source(target._format)
 
833
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
834
 
 
835
    def test_source_to_remote_exact_pack_19(self):
 
836
        trans = self.make_smart_server('target')
 
837
        trans.ensure_base()
 
838
        source = self.make_repository('source', format='1.9')
 
839
        target = self.make_repository('target', format='1.9')
 
840
        target = repository.Repository.open(trans.base)
 
841
        stream_source = source._get_source(target._format)
 
842
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
843
 
 
844
    def test_stream_source_to_non_exact(self):
 
845
        source = self.make_repository('source', format='pack-0.92')
 
846
        target = self.make_repository('target', format='1.9')
 
847
        stream = source._get_source(target._format)
 
848
        self.assertIs(type(stream), repository.StreamSource)
 
849
 
 
850
    def test_stream_source_to_non_exact_rich_root(self):
 
851
        source = self.make_repository('source', format='1.9')
 
852
        target = self.make_repository('target', format='1.9-rich-root')
 
853
        stream = source._get_source(target._format)
 
854
        self.assertIs(type(stream), repository.StreamSource)
 
855
 
 
856
    def test_source_to_remote_non_exact_pack_19(self):
 
857
        trans = self.make_smart_server('target')
 
858
        trans.ensure_base()
 
859
        source = self.make_repository('source', format='1.9')
 
860
        target = self.make_repository('target', format='1.6')
 
861
        target = repository.Repository.open(trans.base)
 
862
        stream_source = source._get_source(target._format)
 
863
        self.assertIs(type(stream_source), repository.StreamSource)
 
864
 
 
865
    def test_stream_source_to_knit(self):
 
866
        source = self.make_repository('source', format='pack-0.92')
 
867
        target = self.make_repository('target', format='dirstate')
 
868
        stream = source._get_source(target._format)
 
869
        self.assertIs(type(stream), repository.StreamSource)
 
870
 
688
871
 
689
872
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
690
873
    """Tests for _find_parent_ids_of_revisions."""
825
1008
        """
826
1009
        broken_repo = self.make_broken_repository()
827
1010
        empty_repo = self.make_repository('empty-repo')
 
1011
        # See bug https://bugs.launchpad.net/bzr/+bug/389141 for information
 
1012
        # about why this was turned into expectFailure
 
1013
        self.expectFailure('new Stream fetch fills in missing compression'
 
1014
           ' parents (bug #389141)',
 
1015
           self.assertRaises, (errors.RevisionNotPresent, errors.BzrCheckError),
 
1016
                              empty_repo.fetch, broken_repo)
828
1017
        self.assertRaises((errors.RevisionNotPresent, errors.BzrCheckError),
829
1018
                          empty_repo.fetch, broken_repo)
830
1019
 
1206
1395
        self.assertTrue(new_pack.signature_index._optimize_for_size)
1207
1396
 
1208
1397
 
1209
 
class TestGCCHKPackCollection(TestCaseWithTransport):
1210
 
 
1211
 
    def test_stream_source_to_gc(self):
1212
 
        source = self.make_repository('source', format='development6-rich-root')
1213
 
        target = self.make_repository('target', format='development6-rich-root')
1214
 
        stream = source._get_source(target._format)
1215
 
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
1216
 
 
1217
 
    def test_stream_source_to_non_gc(self):
1218
 
        source = self.make_repository('source', format='development6-rich-root')
1219
 
        target = self.make_repository('target', format='rich-root-pack')
1220
 
        stream = source._get_source(target._format)
1221
 
        # We don't want the child GroupCHKStreamSource
1222
 
        self.assertIs(type(stream), repository.StreamSource)
1223
 
 
1224
 
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
1225
 
        source_builder = self.make_branch_builder('source',
1226
 
                            format='development6-rich-root')
1227
 
        # We have to build a fairly large tree, so that we are sure the chk
1228
 
        # pages will have split into multiple pages.
1229
 
        entries = [('add', ('', 'a-root-id', 'directory', None))]
1230
 
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
1231
 
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
1232
 
                fname = i + j
1233
 
                fid = fname + '-id'
1234
 
                content = 'content for %s\n' % (fname,)
1235
 
                entries.append(('add', (fname, fid, 'file', content)))
1236
 
        source_builder.start_series()
1237
 
        source_builder.build_snapshot('rev-1', None, entries)
1238
 
        # Now change a few of them, so we get a few new pages for the second
1239
 
        # revision
1240
 
        source_builder.build_snapshot('rev-2', ['rev-1'], [
1241
 
            ('modify', ('aa-id', 'new content for aa-id\n')),
1242
 
            ('modify', ('cc-id', 'new content for cc-id\n')),
1243
 
            ('modify', ('zz-id', 'new content for zz-id\n')),
1244
 
            ])
1245
 
        source_builder.finish_series()
1246
 
        source_branch = source_builder.get_branch()
1247
 
        source_branch.lock_read()
1248
 
        self.addCleanup(source_branch.unlock)
1249
 
        target = self.make_repository('target', format='development6-rich-root')
1250
 
        source = source_branch.repository._get_source(target._format)
1251
 
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
1252
 
 
1253
 
        # On a regular pass, getting the inventories and chk pages for rev-2
1254
 
        # would only get the newly created chk pages
1255
 
        search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
1256
 
                                    set(['rev-2']))
1257
 
        simple_chk_records = []
1258
 
        for vf_name, substream in source.get_stream(search):
1259
 
            if vf_name == 'chk_bytes':
1260
 
                for record in substream:
1261
 
                    simple_chk_records.append(record.key)
1262
 
            else:
1263
 
                for _ in substream:
1264
 
                    continue
1265
 
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
1266
 
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
1267
 
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
1268
 
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
1269
 
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
1270
 
                         simple_chk_records)
1271
 
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
1272
 
        # we should get a much larger set of pages.
1273
 
        missing = [('inventories', 'rev-2')]
1274
 
        full_chk_records = []
1275
 
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
1276
 
            if vf_name == 'inventories':
1277
 
                for record in substream:
1278
 
                    self.assertEqual(('rev-2',), record.key)
1279
 
            elif vf_name == 'chk_bytes':
1280
 
                for record in substream:
1281
 
                    full_chk_records.append(record.key)
1282
 
            else:
1283
 
                self.fail('Should not be getting a stream of %s' % (vf_name,))
1284
 
        # We have 257 records now. This is because we have 1 root page, and 256
1285
 
        # leaf pages in a complete listing.
1286
 
        self.assertEqual(257, len(full_chk_records))
1287
 
        self.assertSubset(simple_chk_records, full_chk_records)
 
1398
class TestCrossFormatPacks(TestCaseWithTransport):
 
1399
 
 
1400
    def log_pack(self, hint=None):
 
1401
        self.calls.append(('pack', hint))
 
1402
        self.orig_pack(hint=hint)
 
1403
        if self.expect_hint:
 
1404
            self.assertTrue(hint)
 
1405
 
 
1406
    def run_stream(self, src_fmt, target_fmt, expect_pack_called):
 
1407
        self.expect_hint = expect_pack_called
 
1408
        self.calls = []
 
1409
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1410
        source_tree.lock_write()
 
1411
        self.addCleanup(source_tree.unlock)
 
1412
        tip = source_tree.commit('foo')
 
1413
        target = self.make_repository('target', format=target_fmt)
 
1414
        target.lock_write()
 
1415
        self.addCleanup(target.unlock)
 
1416
        source = source_tree.branch.repository._get_source(target._format)
 
1417
        self.orig_pack = target.pack
 
1418
        target.pack = self.log_pack
 
1419
        search = target.search_missing_revision_ids(
 
1420
            source_tree.branch.repository, tip)
 
1421
        stream = source.get_stream(search)
 
1422
        from_format = source_tree.branch.repository._format
 
1423
        sink = target._get_sink()
 
1424
        sink.insert_stream(stream, from_format, [])
 
1425
        if expect_pack_called:
 
1426
            self.assertLength(1, self.calls)
 
1427
        else:
 
1428
            self.assertLength(0, self.calls)
 
1429
 
 
1430
    def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
 
1431
        self.expect_hint = expect_pack_called
 
1432
        self.calls = []
 
1433
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1434
        source_tree.lock_write()
 
1435
        self.addCleanup(source_tree.unlock)
 
1436
        tip = source_tree.commit('foo')
 
1437
        target = self.make_repository('target', format=target_fmt)
 
1438
        target.lock_write()
 
1439
        self.addCleanup(target.unlock)
 
1440
        source = source_tree.branch.repository
 
1441
        self.orig_pack = target.pack
 
1442
        target.pack = self.log_pack
 
1443
        target.fetch(source)
 
1444
        if expect_pack_called:
 
1445
            self.assertLength(1, self.calls)
 
1446
        else:
 
1447
            self.assertLength(0, self.calls)
 
1448
 
 
1449
    def test_sink_format_hint_no(self):
 
1450
        # When the target format says packing makes no difference, pack is not
 
1451
        # called.
 
1452
        self.run_stream('1.9', 'rich-root-pack', False)
 
1453
 
 
1454
    def test_sink_format_hint_yes(self):
 
1455
        # When the target format says packing makes a difference, pack is
 
1456
        # called.
 
1457
        self.run_stream('1.9', '2a', True)
 
1458
 
 
1459
    def test_sink_format_same_no(self):
 
1460
        # When the formats are the same, pack is not called.
 
1461
        self.run_stream('2a', '2a', False)
 
1462
 
 
1463
    def test_IDS_format_hint_no(self):
 
1464
        # When the target format says packing makes no difference, pack is not
 
1465
        # called.
 
1466
        self.run_fetch('1.9', 'rich-root-pack', False)
 
1467
 
 
1468
    def test_IDS_format_hint_yes(self):
 
1469
        # When the target format says packing makes a difference, pack is
 
1470
        # called.
 
1471
        self.run_fetch('1.9', '2a', True)
 
1472
 
 
1473
    def test_IDS_format_same_no(self):
 
1474
        # When the formats are the same, pack is not called.
 
1475
        self.run_fetch('2a', '2a', False)