~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006, 2007, 2008, 2009 Canonical Ltd
 
1
# Copyright (C) 2006-2010 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
23
23
"""
24
24
 
25
25
from stat import S_ISDIR
26
 
from StringIO import StringIO
 
26
import sys
27
27
 
28
28
import bzrlib
29
 
from bzrlib.errors import (NotBranchError,
30
 
                           NoSuchFile,
 
29
from bzrlib.errors import (NoSuchFile,
31
30
                           UnknownFormatError,
32
31
                           UnsupportedFormatError,
33
32
                           )
34
 
from bzrlib import graph
35
 
from bzrlib.branchbuilder import BranchBuilder
 
33
from bzrlib import (
 
34
    graph,
 
35
    tests,
 
36
    )
36
37
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
37
 
from bzrlib.index import GraphIndex, InMemoryGraphIndex
 
38
from bzrlib.index import GraphIndex
38
39
from bzrlib.repository import RepositoryFormat
39
 
from bzrlib.smart import server
40
40
from bzrlib.tests import (
41
41
    TestCase,
42
42
    TestCaseWithTransport,
43
 
    TestSkipped,
44
 
    test_knit,
45
43
    )
46
44
from bzrlib.transport import (
47
 
    fakenfs,
48
45
    get_transport,
49
46
    )
50
 
from bzrlib.transport.memory import MemoryServer
51
 
from bzrlib.util import bencode
52
47
from bzrlib import (
53
48
    bzrdir,
54
49
    errors,
55
50
    inventory,
56
51
    osutils,
57
 
    progress,
58
52
    repository,
59
53
    revision as _mod_revision,
60
 
    symbol_versioning,
61
54
    upgrade,
 
55
    versionedfile,
62
56
    workingtree,
63
57
    )
64
58
from bzrlib.repofmt import (
249
243
        tree = control.create_workingtree()
250
244
        tree.add(['foo'], ['Foo:Bar'], ['file'])
251
245
        tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
252
 
        tree.commit('first post', rev_id='first')
 
246
        try:
 
247
            tree.commit('first post', rev_id='first')
 
248
        except errors.IllegalPath:
 
249
            if sys.platform != 'win32':
 
250
                raise
 
251
            self.knownFailure('Foo:Bar cannot be used as a file-id on windows'
 
252
                              ' in repo format 7')
 
253
            return
253
254
        self.assertEqualDiff(
254
255
            '# bzr weave file v5\n'
255
256
            'i\n'
453
454
        repo = self.make_repository('.',
454
455
                format=bzrdir.format_registry.get('knit')())
455
456
        inv_xml = '<inventory format="5">\n</inventory>\n'
456
 
        inv = repo.deserialise_inventory('test-rev-id', inv_xml)
 
457
        inv = repo._deserialise_inventory('test-rev-id', inv_xml)
457
458
        self.assertEqual('test-rev-id', inv.root.revision)
458
459
 
459
460
    def test_deserialise_uses_global_revision_id(self):
465
466
        # Arguably, the deserialise_inventory should detect a mismatch, and
466
467
        # raise an error, rather than silently using one revision_id over the
467
468
        # other.
468
 
        self.assertRaises(AssertionError, repo.deserialise_inventory,
 
469
        self.assertRaises(AssertionError, repo._deserialise_inventory,
469
470
            'test-rev-id', inv_xml)
470
 
        inv = repo.deserialise_inventory('other-rev-id', inv_xml)
 
471
        inv = repo._deserialise_inventory('other-rev-id', inv_xml)
471
472
        self.assertEqual('other-rev-id', inv.root.revision)
472
473
 
473
474
    def test_supports_external_lookups(self):
483
484
    _serializer = None
484
485
 
485
486
    def supports_rich_root(self):
 
487
        if self._format is not None:
 
488
            return self._format.rich_root_data
486
489
        return False
487
490
 
488
491
    def get_graph(self):
539
542
        # pair that it returns true on for the is_compatible static method
540
543
        # check
541
544
        dummy_a = DummyRepository()
 
545
        dummy_a._format = RepositoryFormat()
542
546
        dummy_b = DummyRepository()
 
547
        dummy_b._format = RepositoryFormat()
543
548
        repo = self.make_repository('.')
544
549
        # hack dummies to look like repo somewhat.
545
550
        dummy_a._serializer = repo._serializer
 
551
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
 
552
        dummy_a._format.rich_root_data = repo._format.rich_root_data
546
553
        dummy_b._serializer = repo._serializer
 
554
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
 
555
        dummy_b._format.rich_root_data = repo._format.rich_root_data
547
556
        repository.InterRepository.register_optimiser(InterDummy)
548
557
        try:
549
558
            # we should get the default for something InterDummy returns False
670
679
        self.assertFalse(repo._format.supports_external_lookups)
671
680
 
672
681
 
673
 
class TestDevelopment6(TestCaseWithTransport):
 
682
class Test2a(tests.TestCaseWithMemoryTransport):
 
683
 
 
684
    def test_fetch_combines_groups(self):
 
685
        builder = self.make_branch_builder('source', format='2a')
 
686
        builder.start_series()
 
687
        builder.build_snapshot('1', None, [
 
688
            ('add', ('', 'root-id', 'directory', '')),
 
689
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
690
        builder.build_snapshot('2', ['1'], [
 
691
            ('modify', ('file-id', 'content-2\n'))])
 
692
        builder.finish_series()
 
693
        source = builder.get_branch()
 
694
        target = self.make_repository('target', format='2a')
 
695
        target.fetch(source.repository)
 
696
        target.lock_read()
 
697
        self.addCleanup(target.unlock)
 
698
        details = target.texts._index.get_build_details(
 
699
            [('file-id', '1',), ('file-id', '2',)])
 
700
        file_1_details = details[('file-id', '1')]
 
701
        file_2_details = details[('file-id', '2')]
 
702
        # The index, and what to read off disk, should be the same for both
 
703
        # versions of the file.
 
704
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
705
 
 
706
    def test_fetch_combines_groups(self):
 
707
        builder = self.make_branch_builder('source', format='2a')
 
708
        builder.start_series()
 
709
        builder.build_snapshot('1', None, [
 
710
            ('add', ('', 'root-id', 'directory', '')),
 
711
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
712
        builder.build_snapshot('2', ['1'], [
 
713
            ('modify', ('file-id', 'content-2\n'))])
 
714
        builder.finish_series()
 
715
        source = builder.get_branch()
 
716
        target = self.make_repository('target', format='2a')
 
717
        target.fetch(source.repository)
 
718
        target.lock_read()
 
719
        self.addCleanup(target.unlock)
 
720
        details = target.texts._index.get_build_details(
 
721
            [('file-id', '1',), ('file-id', '2',)])
 
722
        file_1_details = details[('file-id', '1')]
 
723
        file_2_details = details[('file-id', '2')]
 
724
        # The index, and what to read off disk, should be the same for both
 
725
        # versions of the file.
 
726
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
727
 
 
728
    def test_fetch_combines_groups(self):
 
729
        builder = self.make_branch_builder('source', format='2a')
 
730
        builder.start_series()
 
731
        builder.build_snapshot('1', None, [
 
732
            ('add', ('', 'root-id', 'directory', '')),
 
733
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
734
        builder.build_snapshot('2', ['1'], [
 
735
            ('modify', ('file-id', 'content-2\n'))])
 
736
        builder.finish_series()
 
737
        source = builder.get_branch()
 
738
        target = self.make_repository('target', format='2a')
 
739
        target.fetch(source.repository)
 
740
        target.lock_read()
 
741
        self.addCleanup(target.unlock)
 
742
        details = target.texts._index.get_build_details(
 
743
            [('file-id', '1',), ('file-id', '2',)])
 
744
        file_1_details = details[('file-id', '1')]
 
745
        file_2_details = details[('file-id', '2')]
 
746
        # The index, and what to read off disk, should be the same for both
 
747
        # versions of the file.
 
748
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
749
 
 
750
    def test_format_pack_compresses_True(self):
 
751
        repo = self.make_repository('repo', format='2a')
 
752
        self.assertTrue(repo._format.pack_compresses)
674
753
 
675
754
    def test_inventories_use_chk_map_with_parent_base_dict(self):
676
 
        tree = self.make_branch_and_tree('repo', format="development6-rich-root")
 
755
        tree = self.make_branch_and_memory_tree('repo', format="2a")
 
756
        tree.lock_write()
 
757
        tree.add([''], ['TREE_ROOT'])
677
758
        revid = tree.commit("foo")
 
759
        tree.unlock()
678
760
        tree.lock_read()
679
761
        self.addCleanup(tree.unlock)
680
762
        inv = tree.branch.repository.get_inventory(revid)
685
767
        self.assertEqual(65536,
686
768
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
687
769
 
688
 
 
689
 
class TestDevelopment6FindRevisionOutsideSet(TestCaseWithTransport):
690
 
    """Tests for _find_revision_outside_set."""
 
770
    def test_autopack_unchanged_chk_nodes(self):
 
771
        # at 20 unchanged commits, chk pages are packed that are split into
 
772
        # two groups such that the new pack being made doesn't have all its
 
773
        # pages in the source packs (though they are in the repository).
 
774
        # Use a memory backed repository, we don't need to hit disk for this
 
775
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
776
        tree.lock_write()
 
777
        self.addCleanup(tree.unlock)
 
778
        tree.add([''], ['TREE_ROOT'])
 
779
        for pos in range(20):
 
780
            tree.commit(str(pos))
 
781
 
 
782
    def test_pack_with_hint(self):
 
783
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
784
        tree.lock_write()
 
785
        self.addCleanup(tree.unlock)
 
786
        tree.add([''], ['TREE_ROOT'])
 
787
        # 1 commit to leave untouched
 
788
        tree.commit('1')
 
789
        to_keep = tree.branch.repository._pack_collection.names()
 
790
        # 2 to combine
 
791
        tree.commit('2')
 
792
        tree.commit('3')
 
793
        all = tree.branch.repository._pack_collection.names()
 
794
        combine = list(set(all) - set(to_keep))
 
795
        self.assertLength(3, all)
 
796
        self.assertLength(2, combine)
 
797
        tree.branch.repository.pack(hint=combine)
 
798
        final = tree.branch.repository._pack_collection.names()
 
799
        self.assertLength(2, final)
 
800
        self.assertFalse(combine[0] in final)
 
801
        self.assertFalse(combine[1] in final)
 
802
        self.assertSubset(to_keep, final)
 
803
 
 
804
    def test_stream_source_to_gc(self):
 
805
        source = self.make_repository('source', format='2a')
 
806
        target = self.make_repository('target', format='2a')
 
807
        stream = source._get_source(target._format)
 
808
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
 
809
 
 
810
    def test_stream_source_to_non_gc(self):
 
811
        source = self.make_repository('source', format='2a')
 
812
        target = self.make_repository('target', format='rich-root-pack')
 
813
        stream = source._get_source(target._format)
 
814
        # We don't want the child GroupCHKStreamSource
 
815
        self.assertIs(type(stream), repository.StreamSource)
 
816
 
 
817
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
 
818
        source_builder = self.make_branch_builder('source',
 
819
                            format='2a')
 
820
        # We have to build a fairly large tree, so that we are sure the chk
 
821
        # pages will have split into multiple pages.
 
822
        entries = [('add', ('', 'a-root-id', 'directory', None))]
 
823
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
 
824
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
 
825
                fname = i + j
 
826
                fid = fname + '-id'
 
827
                content = 'content for %s\n' % (fname,)
 
828
                entries.append(('add', (fname, fid, 'file', content)))
 
829
        source_builder.start_series()
 
830
        source_builder.build_snapshot('rev-1', None, entries)
 
831
        # Now change a few of them, so we get a few new pages for the second
 
832
        # revision
 
833
        source_builder.build_snapshot('rev-2', ['rev-1'], [
 
834
            ('modify', ('aa-id', 'new content for aa-id\n')),
 
835
            ('modify', ('cc-id', 'new content for cc-id\n')),
 
836
            ('modify', ('zz-id', 'new content for zz-id\n')),
 
837
            ])
 
838
        source_builder.finish_series()
 
839
        source_branch = source_builder.get_branch()
 
840
        source_branch.lock_read()
 
841
        self.addCleanup(source_branch.unlock)
 
842
        target = self.make_repository('target', format='2a')
 
843
        source = source_branch.repository._get_source(target._format)
 
844
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
 
845
 
 
846
        # On a regular pass, getting the inventories and chk pages for rev-2
 
847
        # would only get the newly created chk pages
 
848
        search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
 
849
                                    set(['rev-2']))
 
850
        simple_chk_records = []
 
851
        for vf_name, substream in source.get_stream(search):
 
852
            if vf_name == 'chk_bytes':
 
853
                for record in substream:
 
854
                    simple_chk_records.append(record.key)
 
855
            else:
 
856
                for _ in substream:
 
857
                    continue
 
858
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
 
859
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
 
860
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
 
861
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
 
862
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
 
863
                         simple_chk_records)
 
864
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
 
865
        # we should get a much larger set of pages.
 
866
        missing = [('inventories', 'rev-2')]
 
867
        full_chk_records = []
 
868
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
 
869
            if vf_name == 'inventories':
 
870
                for record in substream:
 
871
                    self.assertEqual(('rev-2',), record.key)
 
872
            elif vf_name == 'chk_bytes':
 
873
                for record in substream:
 
874
                    full_chk_records.append(record.key)
 
875
            else:
 
876
                self.fail('Should not be getting a stream of %s' % (vf_name,))
 
877
        # We have 257 records now. This is because we have 1 root page, and 256
 
878
        # leaf pages in a complete listing.
 
879
        self.assertEqual(257, len(full_chk_records))
 
880
        self.assertSubset(simple_chk_records, full_chk_records)
 
881
 
 
882
    def test_inconsistency_fatal(self):
 
883
        repo = self.make_repository('repo', format='2a')
 
884
        self.assertTrue(repo.revisions._index._inconsistency_fatal)
 
885
        self.assertFalse(repo.texts._index._inconsistency_fatal)
 
886
        self.assertFalse(repo.inventories._index._inconsistency_fatal)
 
887
        self.assertFalse(repo.signatures._index._inconsistency_fatal)
 
888
        self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
 
889
 
 
890
 
 
891
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
 
892
 
 
893
    def test_source_to_exact_pack_092(self):
 
894
        source = self.make_repository('source', format='pack-0.92')
 
895
        target = self.make_repository('target', format='pack-0.92')
 
896
        stream_source = source._get_source(target._format)
 
897
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
898
 
 
899
    def test_source_to_exact_pack_rich_root_pack(self):
 
900
        source = self.make_repository('source', format='rich-root-pack')
 
901
        target = self.make_repository('target', format='rich-root-pack')
 
902
        stream_source = source._get_source(target._format)
 
903
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
904
 
 
905
    def test_source_to_exact_pack_19(self):
 
906
        source = self.make_repository('source', format='1.9')
 
907
        target = self.make_repository('target', format='1.9')
 
908
        stream_source = source._get_source(target._format)
 
909
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
910
 
 
911
    def test_source_to_exact_pack_19_rich_root(self):
 
912
        source = self.make_repository('source', format='1.9-rich-root')
 
913
        target = self.make_repository('target', format='1.9-rich-root')
 
914
        stream_source = source._get_source(target._format)
 
915
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
916
 
 
917
    def test_source_to_remote_exact_pack_19(self):
 
918
        trans = self.make_smart_server('target')
 
919
        trans.ensure_base()
 
920
        source = self.make_repository('source', format='1.9')
 
921
        target = self.make_repository('target', format='1.9')
 
922
        target = repository.Repository.open(trans.base)
 
923
        stream_source = source._get_source(target._format)
 
924
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
925
 
 
926
    def test_stream_source_to_non_exact(self):
 
927
        source = self.make_repository('source', format='pack-0.92')
 
928
        target = self.make_repository('target', format='1.9')
 
929
        stream = source._get_source(target._format)
 
930
        self.assertIs(type(stream), repository.StreamSource)
 
931
 
 
932
    def test_stream_source_to_non_exact_rich_root(self):
 
933
        source = self.make_repository('source', format='1.9')
 
934
        target = self.make_repository('target', format='1.9-rich-root')
 
935
        stream = source._get_source(target._format)
 
936
        self.assertIs(type(stream), repository.StreamSource)
 
937
 
 
938
    def test_source_to_remote_non_exact_pack_19(self):
 
939
        trans = self.make_smart_server('target')
 
940
        trans.ensure_base()
 
941
        source = self.make_repository('source', format='1.9')
 
942
        target = self.make_repository('target', format='1.6')
 
943
        target = repository.Repository.open(trans.base)
 
944
        stream_source = source._get_source(target._format)
 
945
        self.assertIs(type(stream_source), repository.StreamSource)
 
946
 
 
947
    def test_stream_source_to_knit(self):
 
948
        source = self.make_repository('source', format='pack-0.92')
 
949
        target = self.make_repository('target', format='dirstate')
 
950
        stream = source._get_source(target._format)
 
951
        self.assertIs(type(stream), repository.StreamSource)
 
952
 
 
953
 
 
954
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
 
955
    """Tests for _find_parent_ids_of_revisions."""
691
956
 
692
957
    def setUp(self):
693
 
        super(TestDevelopment6FindRevisionOutsideSet, self).setUp()
 
958
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
694
959
        self.builder = self.make_branch_builder('source',
695
960
            format='development6-rich-root')
696
961
        self.builder.start_series()
699
964
        self.repo = self.builder.get_branch().repository
700
965
        self.addCleanup(self.builder.finish_series)
701
966
 
702
 
    def assertRevisionOutsideSet(self, expected_result, rev_set):
703
 
        self.assertEqual(
704
 
            expected_result, self.repo._find_revision_outside_set(rev_set))
 
967
    def assertParentIds(self, expected_result, rev_set):
 
968
        self.assertEqual(sorted(expected_result),
 
969
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
705
970
 
706
971
    def test_simple(self):
707
972
        self.builder.build_snapshot('revid1', None, [])
708
 
        self.builder.build_snapshot('revid2', None, [])
 
973
        self.builder.build_snapshot('revid2', ['revid1'], [])
709
974
        rev_set = ['revid2']
710
 
        self.assertRevisionOutsideSet('revid1', rev_set)
 
975
        self.assertParentIds(['revid1'], rev_set)
711
976
 
712
977
    def test_not_first_parent(self):
713
978
        self.builder.build_snapshot('revid1', None, [])
714
 
        self.builder.build_snapshot('revid2', None, [])
715
 
        self.builder.build_snapshot('revid3', None, [])
 
979
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
980
        self.builder.build_snapshot('revid3', ['revid2'], [])
716
981
        rev_set = ['revid3', 'revid2']
717
 
        self.assertRevisionOutsideSet('revid1', rev_set)
 
982
        self.assertParentIds(['revid1'], rev_set)
718
983
 
719
984
    def test_not_null(self):
720
985
        rev_set = ['initial']
721
 
        self.assertRevisionOutsideSet(_mod_revision.NULL_REVISION, rev_set)
 
986
        self.assertParentIds([], rev_set)
722
987
 
723
988
    def test_not_null_set(self):
724
989
        self.builder.build_snapshot('revid1', None, [])
725
990
        rev_set = [_mod_revision.NULL_REVISION]
726
 
        self.assertRevisionOutsideSet(_mod_revision.NULL_REVISION, rev_set)
 
991
        self.assertParentIds([], rev_set)
727
992
 
728
993
    def test_ghost(self):
729
994
        self.builder.build_snapshot('revid1', None, [])
730
995
        rev_set = ['ghost', 'revid1']
731
 
        self.assertRevisionOutsideSet('initial', rev_set)
 
996
        self.assertParentIds(['initial'], rev_set)
732
997
 
733
998
    def test_ghost_parent(self):
734
999
        self.builder.build_snapshot('revid1', None, [])
735
1000
        self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
736
1001
        rev_set = ['revid2', 'revid1']
737
 
        self.assertRevisionOutsideSet('initial', rev_set)
 
1002
        self.assertParentIds(['ghost', 'initial'], rev_set)
738
1003
 
739
1004
    def test_righthand_parent(self):
740
1005
        self.builder.build_snapshot('revid1', None, [])
742
1007
        self.builder.build_snapshot('revid2b', ['revid1'], [])
743
1008
        self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
744
1009
        rev_set = ['revid3', 'revid2a']
745
 
        self.assertRevisionOutsideSet('revid2b', rev_set)
 
1010
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
746
1011
 
747
1012
 
748
1013
class TestWithBrokenRepo(TestCaseWithTransport):
763
1028
            inv = inventory.Inventory(revision_id='rev1a')
764
1029
            inv.root.revision = 'rev1a'
765
1030
            self.add_file(repo, inv, 'file1', 'rev1a', [])
 
1031
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
766
1032
            repo.add_inventory('rev1a', inv, [])
767
1033
            revision = _mod_revision.Revision('rev1a',
768
1034
                committer='jrandom@example.com', timestamp=0,
803
1069
    def add_revision(self, repo, revision_id, inv, parent_ids):
804
1070
        inv.revision_id = revision_id
805
1071
        inv.root.revision = revision_id
 
1072
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
806
1073
        repo.add_inventory(revision_id, inv, parent_ids)
807
1074
        revision = _mod_revision.Revision(revision_id,
808
1075
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
825
1092
        """
826
1093
        broken_repo = self.make_broken_repository()
827
1094
        empty_repo = self.make_repository('empty-repo')
828
 
        self.assertRaises((errors.RevisionNotPresent, errors.BzrCheckError),
829
 
                          empty_repo.fetch, broken_repo)
 
1095
        try:
 
1096
            empty_repo.fetch(broken_repo)
 
1097
        except (errors.RevisionNotPresent, errors.BzrCheckError):
 
1098
            # Test successful: compression parent not being copied leads to
 
1099
            # error.
 
1100
            return
 
1101
        empty_repo.lock_read()
 
1102
        self.addCleanup(empty_repo.unlock)
 
1103
        text = empty_repo.texts.get_record_stream(
 
1104
            [('file2-id', 'rev3')], 'topological', True).next()
 
1105
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
830
1106
 
831
1107
 
832
1108
class TestRepositoryPackCollection(TestCaseWithTransport):
841
1117
 
842
1118
    def make_packs_and_alt_repo(self, write_lock=False):
843
1119
        """Create a pack repo with 3 packs, and access it via a second repo."""
844
 
        tree = self.make_branch_and_tree('.')
 
1120
        tree = self.make_branch_and_tree('.', format=self.get_format())
845
1121
        tree.lock_write()
846
1122
        self.addCleanup(tree.unlock)
847
1123
        rev1 = tree.commit('one')
857
1133
        packs.ensure_loaded()
858
1134
        return tree, r, packs, [rev1, rev2, rev3]
859
1135
 
 
1136
    def test__clear_obsolete_packs(self):
 
1137
        packs = self.get_packs()
 
1138
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
 
1139
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1140
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1141
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1142
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1143
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1144
        res = packs._clear_obsolete_packs()
 
1145
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
 
1146
        self.assertEqual([], obsolete_pack_trans.list_dir('.'))
 
1147
 
 
1148
    def test__clear_obsolete_packs_preserve(self):
 
1149
        packs = self.get_packs()
 
1150
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
 
1151
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1152
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1153
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1154
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1155
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1156
        res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
 
1157
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
 
1158
        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
 
1159
                         sorted(obsolete_pack_trans.list_dir('.')))
 
1160
 
860
1161
    def test__max_pack_count(self):
861
1162
        """The maximum pack count is a function of the number of revisions."""
862
1163
        # no revisions - one pack, so that we can have a revision free repo
882
1183
        # check some arbitrary big numbers
883
1184
        self.assertEqual(25, packs._max_pack_count(112894))
884
1185
 
 
1186
    def test_repr(self):
 
1187
        packs = self.get_packs()
 
1188
        self.assertContainsRe(repr(packs),
 
1189
            'RepositoryPackCollection(.*Repository(.*))')
 
1190
 
 
1191
    def test__obsolete_packs(self):
 
1192
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1193
        names = packs.names()
 
1194
        pack = packs.get_pack_by_name(names[0])
 
1195
        # Schedule this one for removal
 
1196
        packs._remove_pack_from_memory(pack)
 
1197
        # Simulate a concurrent update by renaming the .pack file and one of
 
1198
        # the indices
 
1199
        packs.transport.rename('packs/%s.pack' % (names[0],),
 
1200
                               'obsolete_packs/%s.pack' % (names[0],))
 
1201
        packs.transport.rename('indices/%s.iix' % (names[0],),
 
1202
                               'obsolete_packs/%s.iix' % (names[0],))
 
1203
        # Now trigger the obsoletion, and ensure that all the remaining files
 
1204
        # are still renamed
 
1205
        packs._obsolete_packs([pack])
 
1206
        self.assertEqual([n + '.pack' for n in names[1:]],
 
1207
                         sorted(packs._pack_transport.list_dir('.')))
 
1208
        # names[0] should not be present in the index anymore
 
1209
        self.assertEqual(names[1:],
 
1210
            sorted(set([osutils.splitext(n)[0] for n in
 
1211
                        packs._index_transport.list_dir('.')])))
 
1212
 
885
1213
    def test_pack_distribution_zero(self):
886
1214
        packs = self.get_packs()
887
1215
        self.assertEqual([0], packs.pack_distribution(0))
1055
1383
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1056
1384
        self.assertFalse(packs.reload_pack_names())
1057
1385
 
 
1386
    def test_reload_pack_names_preserves_pending(self):
 
1387
        # TODO: Update this to also test for pending-deleted names
 
1388
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1389
        # We will add one pack (via start_write_group + insert_record_stream),
 
1390
        # and remove another pack (via _remove_pack_from_memory)
 
1391
        orig_names = packs.names()
 
1392
        orig_at_load = packs._packs_at_load
 
1393
        to_remove_name = iter(orig_names).next()
 
1394
        r.start_write_group()
 
1395
        self.addCleanup(r.abort_write_group)
 
1396
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
 
1397
            ('text', 'rev'), (), None, 'content\n')])
 
1398
        new_pack = packs._new_pack
 
1399
        self.assertTrue(new_pack.data_inserted())
 
1400
        new_pack.finish()
 
1401
        packs.allocate(new_pack)
 
1402
        packs._new_pack = None
 
1403
        removed_pack = packs.get_pack_by_name(to_remove_name)
 
1404
        packs._remove_pack_from_memory(removed_pack)
 
1405
        names = packs.names()
 
1406
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1407
        new_names = set([x[0][0] for x in new_nodes])
 
1408
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1409
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1410
        self.assertEqual(set([new_pack.name]), new_names)
 
1411
        self.assertEqual([to_remove_name],
 
1412
                         sorted([x[0][0] for x in deleted_nodes]))
 
1413
        packs.reload_pack_names()
 
1414
        reloaded_names = packs.names()
 
1415
        self.assertEqual(orig_at_load, packs._packs_at_load)
 
1416
        self.assertEqual(names, reloaded_names)
 
1417
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1418
        new_names = set([x[0][0] for x in new_nodes])
 
1419
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1420
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1421
        self.assertEqual(set([new_pack.name]), new_names)
 
1422
        self.assertEqual([to_remove_name],
 
1423
                         sorted([x[0][0] for x in deleted_nodes]))
 
1424
 
 
1425
    def test_autopack_obsoletes_new_pack(self):
 
1426
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1427
        packs._max_pack_count = lambda x: 1
 
1428
        packs.pack_distribution = lambda x: [10]
 
1429
        r.start_write_group()
 
1430
        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
 
1431
            ('bogus-rev',), (), None, 'bogus-content\n')])
 
1432
        # This should trigger an autopack, which will combine everything into a
 
1433
        # single pack file.
 
1434
        new_names = r.commit_write_group()
 
1435
        names = packs.names()
 
1436
        self.assertEqual(1, len(names))
 
1437
        self.assertEqual([names[0] + '.pack'],
 
1438
                         packs._pack_transport.list_dir('.'))
 
1439
 
1058
1440
    def test_autopack_reloads_and_stops(self):
1059
1441
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1060
1442
        # After we have determined what needs to be autopacked, trigger a
1072
1454
        self.assertEqual(tree.branch.repository._pack_collection.names(),
1073
1455
                         packs.names())
1074
1456
 
 
1457
    def test__save_pack_names(self):
 
1458
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1459
        names = packs.names()
 
1460
        pack = packs.get_pack_by_name(names[0])
 
1461
        packs._remove_pack_from_memory(pack)
 
1462
        packs._save_pack_names(obsolete_packs=[pack])
 
1463
        cur_packs = packs._pack_transport.list_dir('.')
 
1464
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1465
        # obsolete_packs will also have stuff like .rix and .iix present.
 
1466
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1467
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1468
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1469
 
 
1470
    def test__save_pack_names_already_obsoleted(self):
 
1471
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1472
        names = packs.names()
 
1473
        pack = packs.get_pack_by_name(names[0])
 
1474
        packs._remove_pack_from_memory(pack)
 
1475
        # We are going to simulate a concurrent autopack by manually obsoleting
 
1476
        # the pack directly.
 
1477
        packs._obsolete_packs([pack])
 
1478
        packs._save_pack_names(clear_obsolete_packs=True,
 
1479
                               obsolete_packs=[pack])
 
1480
        cur_packs = packs._pack_transport.list_dir('.')
 
1481
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1482
        # Note that while we set clear_obsolete_packs=True, it should not
 
1483
        # delete a pack file that we have also scheduled for obsoletion.
 
1484
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1485
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1486
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1487
 
 
1488
 
1075
1489
 
1076
1490
class TestPack(TestCaseWithTransport):
1077
1491
    """Tests for the Pack object."""
1141
1555
            index_class=BTreeGraphIndex,
1142
1556
            use_chk_index=False)
1143
1557
        pack = pack_repo.NewPack(collection)
 
1558
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
1144
1559
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
1145
1560
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1146
1561
        self.assertIsInstance(pack._hash, type(osutils.md5()))
1157
1572
    """Tests for the packs repository Packer class."""
1158
1573
 
1159
1574
    def test_pack_optimizes_pack_order(self):
1160
 
        builder = self.make_branch_builder('.')
 
1575
        builder = self.make_branch_builder('.', format="1.9")
1161
1576
        builder.start_series()
1162
1577
        builder.build_snapshot('A', None, [
1163
1578
            ('add', ('', 'root-id', 'directory', None)),
1199
1614
        packer = pack_repo.OptimisingPacker(self.get_pack_collection(),
1200
1615
                                            [], '.test')
1201
1616
        new_pack = packer.open_pack()
 
1617
        self.addCleanup(new_pack.abort) # ensure cleanup
1202
1618
        self.assertIsInstance(new_pack, pack_repo.NewPack)
1203
1619
        self.assertTrue(new_pack.revision_index._optimize_for_size)
1204
1620
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
1206
1622
        self.assertTrue(new_pack.signature_index._optimize_for_size)
1207
1623
 
1208
1624
 
1209
 
class TestGCCHKPackCollection(TestCaseWithTransport):
1210
 
 
1211
 
    def test_stream_source_to_gc(self):
1212
 
        source = self.make_repository('source', format='development6-rich-root')
1213
 
        target = self.make_repository('target', format='development6-rich-root')
1214
 
        stream = source._get_source(target._format)
1215
 
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
1216
 
 
1217
 
    def test_stream_source_to_non_gc(self):
1218
 
        source = self.make_repository('source', format='development6-rich-root')
1219
 
        target = self.make_repository('target', format='rich-root-pack')
1220
 
        stream = source._get_source(target._format)
1221
 
        # We don't want the child GroupCHKStreamSource
1222
 
        self.assertIs(type(stream), repository.StreamSource)
 
1625
class TestCrossFormatPacks(TestCaseWithTransport):
 
1626
 
 
1627
    def log_pack(self, hint=None):
 
1628
        self.calls.append(('pack', hint))
 
1629
        self.orig_pack(hint=hint)
 
1630
        if self.expect_hint:
 
1631
            self.assertTrue(hint)
 
1632
 
 
1633
    def run_stream(self, src_fmt, target_fmt, expect_pack_called):
 
1634
        self.expect_hint = expect_pack_called
 
1635
        self.calls = []
 
1636
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1637
        source_tree.lock_write()
 
1638
        self.addCleanup(source_tree.unlock)
 
1639
        tip = source_tree.commit('foo')
 
1640
        target = self.make_repository('target', format=target_fmt)
 
1641
        target.lock_write()
 
1642
        self.addCleanup(target.unlock)
 
1643
        source = source_tree.branch.repository._get_source(target._format)
 
1644
        self.orig_pack = target.pack
 
1645
        target.pack = self.log_pack
 
1646
        search = target.search_missing_revision_ids(
 
1647
            source_tree.branch.repository, tip)
 
1648
        stream = source.get_stream(search)
 
1649
        from_format = source_tree.branch.repository._format
 
1650
        sink = target._get_sink()
 
1651
        sink.insert_stream(stream, from_format, [])
 
1652
        if expect_pack_called:
 
1653
            self.assertLength(1, self.calls)
 
1654
        else:
 
1655
            self.assertLength(0, self.calls)
 
1656
 
 
1657
    def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
 
1658
        self.expect_hint = expect_pack_called
 
1659
        self.calls = []
 
1660
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1661
        source_tree.lock_write()
 
1662
        self.addCleanup(source_tree.unlock)
 
1663
        tip = source_tree.commit('foo')
 
1664
        target = self.make_repository('target', format=target_fmt)
 
1665
        target.lock_write()
 
1666
        self.addCleanup(target.unlock)
 
1667
        source = source_tree.branch.repository
 
1668
        self.orig_pack = target.pack
 
1669
        target.pack = self.log_pack
 
1670
        target.fetch(source)
 
1671
        if expect_pack_called:
 
1672
            self.assertLength(1, self.calls)
 
1673
        else:
 
1674
            self.assertLength(0, self.calls)
 
1675
 
 
1676
    def test_sink_format_hint_no(self):
 
1677
        # When the target format says packing makes no difference, pack is not
 
1678
        # called.
 
1679
        self.run_stream('1.9', 'rich-root-pack', False)
 
1680
 
 
1681
    def test_sink_format_hint_yes(self):
 
1682
        # When the target format says packing makes a difference, pack is
 
1683
        # called.
 
1684
        self.run_stream('1.9', '2a', True)
 
1685
 
 
1686
    def test_sink_format_same_no(self):
 
1687
        # When the formats are the same, pack is not called.
 
1688
        self.run_stream('2a', '2a', False)
 
1689
 
 
1690
    def test_IDS_format_hint_no(self):
 
1691
        # When the target format says packing makes no difference, pack is not
 
1692
        # called.
 
1693
        self.run_fetch('1.9', 'rich-root-pack', False)
 
1694
 
 
1695
    def test_IDS_format_hint_yes(self):
 
1696
        # When the target format says packing makes a difference, pack is
 
1697
        # called.
 
1698
        self.run_fetch('1.9', '2a', True)
 
1699
 
 
1700
    def test_IDS_format_same_no(self):
 
1701
        # When the formats are the same, pack is not called.
 
1702
        self.run_fetch('2a', '2a', False)