~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: Canonical.com Patch Queue Manager
  • Date: 2009-06-18 19:13:45 UTC
  • mfrom: (4360.4.17 1.15-pack-source)
  • Revision ID: pqm@pqm.ubuntu.com-20090618191345-vgsr5zv78uesqsdg
(jam) Get rid of InterPackRepository in favor of PackStreamSource.

Show diffs side-by-side

added added

removed removed

Lines of Context:
31
31
                           UnknownFormatError,
32
32
                           UnsupportedFormatError,
33
33
                           )
34
 
from bzrlib import graph
 
34
from bzrlib import (
 
35
    graph,
 
36
    tests,
 
37
    )
35
38
from bzrlib.branchbuilder import BranchBuilder
36
39
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
37
40
from bzrlib.index import GraphIndex, InMemoryGraphIndex
685
688
        self.assertEqual(65536,
686
689
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
687
690
 
 
691
    def test_stream_source_to_gc(self):
 
692
        source = self.make_repository('source', format='development6-rich-root')
 
693
        target = self.make_repository('target', format='development6-rich-root')
 
694
        stream = source._get_source(target._format)
 
695
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
 
696
 
 
697
    def test_stream_source_to_non_gc(self):
 
698
        source = self.make_repository('source', format='development6-rich-root')
 
699
        target = self.make_repository('target', format='rich-root-pack')
 
700
        stream = source._get_source(target._format)
 
701
        # We don't want the child GroupCHKStreamSource
 
702
        self.assertIs(type(stream), repository.StreamSource)
 
703
 
 
704
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
 
705
        source_builder = self.make_branch_builder('source',
 
706
                            format='development6-rich-root')
 
707
        # We have to build a fairly large tree, so that we are sure the chk
 
708
        # pages will have split into multiple pages.
 
709
        entries = [('add', ('', 'a-root-id', 'directory', None))]
 
710
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
 
711
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
 
712
                fname = i + j
 
713
                fid = fname + '-id'
 
714
                content = 'content for %s\n' % (fname,)
 
715
                entries.append(('add', (fname, fid, 'file', content)))
 
716
        source_builder.start_series()
 
717
        source_builder.build_snapshot('rev-1', None, entries)
 
718
        # Now change a few of them, so we get a few new pages for the second
 
719
        # revision
 
720
        source_builder.build_snapshot('rev-2', ['rev-1'], [
 
721
            ('modify', ('aa-id', 'new content for aa-id\n')),
 
722
            ('modify', ('cc-id', 'new content for cc-id\n')),
 
723
            ('modify', ('zz-id', 'new content for zz-id\n')),
 
724
            ])
 
725
        source_builder.finish_series()
 
726
        source_branch = source_builder.get_branch()
 
727
        source_branch.lock_read()
 
728
        self.addCleanup(source_branch.unlock)
 
729
        target = self.make_repository('target', format='development6-rich-root')
 
730
        source = source_branch.repository._get_source(target._format)
 
731
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
 
732
 
 
733
        # On a regular pass, getting the inventories and chk pages for rev-2
 
734
        # would only get the newly created chk pages
 
735
        search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
 
736
                                    set(['rev-2']))
 
737
        simple_chk_records = []
 
738
        for vf_name, substream in source.get_stream(search):
 
739
            if vf_name == 'chk_bytes':
 
740
                for record in substream:
 
741
                    simple_chk_records.append(record.key)
 
742
            else:
 
743
                for _ in substream:
 
744
                    continue
 
745
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
 
746
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
 
747
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
 
748
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
 
749
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
 
750
                         simple_chk_records)
 
751
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
 
752
        # we should get a much larger set of pages.
 
753
        missing = [('inventories', 'rev-2')]
 
754
        full_chk_records = []
 
755
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
 
756
            if vf_name == 'inventories':
 
757
                for record in substream:
 
758
                    self.assertEqual(('rev-2',), record.key)
 
759
            elif vf_name == 'chk_bytes':
 
760
                for record in substream:
 
761
                    full_chk_records.append(record.key)
 
762
            else:
 
763
                self.fail('Should not be getting a stream of %s' % (vf_name,))
 
764
        # We have 257 records now. This is because we have 1 root page, and 256
 
765
        # leaf pages in a complete listing.
 
766
        self.assertEqual(257, len(full_chk_records))
 
767
        self.assertSubset(simple_chk_records, full_chk_records)
 
768
 
 
769
 
 
770
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
 
771
 
 
772
    def test_source_to_exact_pack_092(self):
 
773
        source = self.make_repository('source', format='pack-0.92')
 
774
        target = self.make_repository('target', format='pack-0.92')
 
775
        stream_source = source._get_source(target._format)
 
776
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
777
 
 
778
    def test_source_to_exact_pack_rich_root_pack(self):
 
779
        source = self.make_repository('source', format='rich-root-pack')
 
780
        target = self.make_repository('target', format='rich-root-pack')
 
781
        stream_source = source._get_source(target._format)
 
782
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
783
 
 
784
    def test_source_to_exact_pack_19(self):
 
785
        source = self.make_repository('source', format='1.9')
 
786
        target = self.make_repository('target', format='1.9')
 
787
        stream_source = source._get_source(target._format)
 
788
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
789
 
 
790
    def test_source_to_exact_pack_19_rich_root(self):
 
791
        source = self.make_repository('source', format='1.9-rich-root')
 
792
        target = self.make_repository('target', format='1.9-rich-root')
 
793
        stream_source = source._get_source(target._format)
 
794
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
795
 
 
796
    def test_source_to_remote_exact_pack_19(self):
 
797
        trans = self.make_smart_server('target')
 
798
        trans.ensure_base()
 
799
        source = self.make_repository('source', format='1.9')
 
800
        target = self.make_repository('target', format='1.9')
 
801
        target = repository.Repository.open(trans.base)
 
802
        stream_source = source._get_source(target._format)
 
803
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
804
 
 
805
    def test_stream_source_to_non_exact(self):
 
806
        source = self.make_repository('source', format='pack-0.92')
 
807
        target = self.make_repository('target', format='1.9')
 
808
        stream = source._get_source(target._format)
 
809
        self.assertIs(type(stream), repository.StreamSource)
 
810
 
 
811
    def test_stream_source_to_non_exact_rich_root(self):
 
812
        source = self.make_repository('source', format='1.9')
 
813
        target = self.make_repository('target', format='1.9-rich-root')
 
814
        stream = source._get_source(target._format)
 
815
        self.assertIs(type(stream), repository.StreamSource)
 
816
 
 
817
    def test_source_to_remote_non_exact_pack_19(self):
 
818
        trans = self.make_smart_server('target')
 
819
        trans.ensure_base()
 
820
        source = self.make_repository('source', format='1.9')
 
821
        target = self.make_repository('target', format='1.6')
 
822
        target = repository.Repository.open(trans.base)
 
823
        stream_source = source._get_source(target._format)
 
824
        self.assertIs(type(stream_source), repository.StreamSource)
 
825
 
 
826
    def test_stream_source_to_knit(self):
 
827
        source = self.make_repository('source', format='pack-0.92')
 
828
        target = self.make_repository('target', format='dirstate')
 
829
        stream = source._get_source(target._format)
 
830
        self.assertIs(type(stream), repository.StreamSource)
 
831
 
688
832
 
689
833
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
690
834
    """Tests for _find_parent_ids_of_revisions."""
825
969
        """
826
970
        broken_repo = self.make_broken_repository()
827
971
        empty_repo = self.make_repository('empty-repo')
 
972
        # See bug https://bugs.launchpad.net/bzr/+bug/389141 for information
 
973
        # about why this was turned into expectFailure
 
974
        self.expectFailure('new Stream fetch fills in missing compression'
 
975
           ' parents (bug #389141)',
 
976
           self.assertRaises, (errors.RevisionNotPresent, errors.BzrCheckError),
 
977
                              empty_repo.fetch, broken_repo)
828
978
        self.assertRaises((errors.RevisionNotPresent, errors.BzrCheckError),
829
979
                          empty_repo.fetch, broken_repo)
830
980
 
1204
1354
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
1205
1355
        self.assertTrue(new_pack.text_index._optimize_for_size)
1206
1356
        self.assertTrue(new_pack.signature_index._optimize_for_size)
1207
 
 
1208
 
 
1209
 
class TestGCCHKPackCollection(TestCaseWithTransport):
1210
 
 
1211
 
    def test_stream_source_to_gc(self):
1212
 
        source = self.make_repository('source', format='development6-rich-root')
1213
 
        target = self.make_repository('target', format='development6-rich-root')
1214
 
        stream = source._get_source(target._format)
1215
 
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
1216
 
 
1217
 
    def test_stream_source_to_non_gc(self):
1218
 
        source = self.make_repository('source', format='development6-rich-root')
1219
 
        target = self.make_repository('target', format='rich-root-pack')
1220
 
        stream = source._get_source(target._format)
1221
 
        # We don't want the child GroupCHKStreamSource
1222
 
        self.assertIs(type(stream), repository.StreamSource)
1223
 
 
1224
 
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
1225
 
        source_builder = self.make_branch_builder('source',
1226
 
                            format='development6-rich-root')
1227
 
        # We have to build a fairly large tree, so that we are sure the chk
1228
 
        # pages will have split into multiple pages.
1229
 
        entries = [('add', ('', 'a-root-id', 'directory', None))]
1230
 
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
1231
 
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
1232
 
                fname = i + j
1233
 
                fid = fname + '-id'
1234
 
                content = 'content for %s\n' % (fname,)
1235
 
                entries.append(('add', (fname, fid, 'file', content)))
1236
 
        source_builder.start_series()
1237
 
        source_builder.build_snapshot('rev-1', None, entries)
1238
 
        # Now change a few of them, so we get a few new pages for the second
1239
 
        # revision
1240
 
        source_builder.build_snapshot('rev-2', ['rev-1'], [
1241
 
            ('modify', ('aa-id', 'new content for aa-id\n')),
1242
 
            ('modify', ('cc-id', 'new content for cc-id\n')),
1243
 
            ('modify', ('zz-id', 'new content for zz-id\n')),
1244
 
            ])
1245
 
        source_builder.finish_series()
1246
 
        source_branch = source_builder.get_branch()
1247
 
        source_branch.lock_read()
1248
 
        self.addCleanup(source_branch.unlock)
1249
 
        target = self.make_repository('target', format='development6-rich-root')
1250
 
        source = source_branch.repository._get_source(target._format)
1251
 
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
1252
 
 
1253
 
        # On a regular pass, getting the inventories and chk pages for rev-2
1254
 
        # would only get the newly created chk pages
1255
 
        search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
1256
 
                                    set(['rev-2']))
1257
 
        simple_chk_records = []
1258
 
        for vf_name, substream in source.get_stream(search):
1259
 
            if vf_name == 'chk_bytes':
1260
 
                for record in substream:
1261
 
                    simple_chk_records.append(record.key)
1262
 
            else:
1263
 
                for _ in substream:
1264
 
                    continue
1265
 
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
1266
 
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
1267
 
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
1268
 
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
1269
 
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
1270
 
                         simple_chk_records)
1271
 
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
1272
 
        # we should get a much larger set of pages.
1273
 
        missing = [('inventories', 'rev-2')]
1274
 
        full_chk_records = []
1275
 
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
1276
 
            if vf_name == 'inventories':
1277
 
                for record in substream:
1278
 
                    self.assertEqual(('rev-2',), record.key)
1279
 
            elif vf_name == 'chk_bytes':
1280
 
                for record in substream:
1281
 
                    full_chk_records.append(record.key)
1282
 
            else:
1283
 
                self.fail('Should not be getting a stream of %s' % (vf_name,))
1284
 
        # We have 257 records now. This is because we have 1 root page, and 256
1285
 
        # leaf pages in a complete listing.
1286
 
        self.assertEqual(257, len(full_chk_records))
1287
 
        self.assertSubset(simple_chk_records, full_chk_records)