~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: Vincent Ladeuil
  • Date: 2009-04-27 16:10:10 UTC
  • mto: (4310.1.1 integration)
  • mto: This revision was merged to the branch mainline in revision 4311.
  • Revision ID: v.ladeuil+lp@free.fr-20090427161010-7swfzeagf63cpixd
Fix bug #367726 by reverting some default user handling introduced
while fixing bug #256612.

* bzrlib/transport/ssh.py:
(_paramiko_auth): Explicitly use getpass.getuser() as default
user.

* bzrlib/transport/ftp/_gssapi.py:
(GSSAPIFtpTransport._create_connection): Explicitly use
getpass.getuser() as default user.

* bzrlib/transport/ftp/__init__.py:
(FtpTransport._create_connection): Explicitly use
getpass.getuser() as default user.

* bzrlib/tests/test_sftp_transport.py:
(TestUsesAuthConfig.test_sftp_is_none_if_no_config)
(TestUsesAuthConfig.test_sftp_doesnt_prompt_username): Revert to
None as the default user.

* bzrlib/tests/test_remote.py:
(TestRemoteSSHTransportAuthentication): The really offending one:
revert to None as the default user.

* bzrlib/tests/test_config.py:
(TestAuthenticationConfig.test_username_default_no_prompt): Update
test (and some PEP8).

* bzrlib/smtp_connection.py:
(SMTPConnection._authenticate): Revert to None as the default
user.

* bzrlib/plugins/launchpad/account.py:
(_get_auth_user): Revert default value handling.

* bzrlib/config.py:
(AuthenticationConfig.get_user): Fix doc-string. Leave default
value handling to callers.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2010 Canonical Ltd
 
1
# Copyright (C) 2006, 2007, 2008, 2009 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
23
23
"""
24
24
 
25
25
from stat import S_ISDIR
26
 
import sys
 
26
from StringIO import StringIO
27
27
 
28
28
import bzrlib
29
 
from bzrlib.errors import (NoSuchFile,
 
29
from bzrlib.errors import (NotBranchError,
 
30
                           NoSuchFile,
30
31
                           UnknownFormatError,
31
32
                           UnsupportedFormatError,
32
33
                           )
33
 
from bzrlib import (
34
 
    btree_index,
35
 
    graph,
36
 
    tests,
37
 
    )
 
34
from bzrlib import graph
 
35
from bzrlib.branchbuilder import BranchBuilder
38
36
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
39
 
from bzrlib.index import GraphIndex
 
37
from bzrlib.index import GraphIndex, InMemoryGraphIndex
40
38
from bzrlib.repository import RepositoryFormat
 
39
from bzrlib.smart import server
41
40
from bzrlib.tests import (
42
41
    TestCase,
43
42
    TestCaseWithTransport,
 
43
    TestSkipped,
 
44
    test_knit,
44
45
    )
45
46
from bzrlib.transport import (
 
47
    fakenfs,
46
48
    get_transport,
47
49
    )
 
50
from bzrlib.transport.memory import MemoryServer
 
51
from bzrlib.util import bencode
48
52
from bzrlib import (
49
53
    bzrdir,
50
54
    errors,
51
55
    inventory,
52
56
    osutils,
 
57
    progress,
53
58
    repository,
54
59
    revision as _mod_revision,
 
60
    symbol_versioning,
55
61
    upgrade,
56
 
    versionedfile,
57
62
    workingtree,
58
63
    )
59
64
from bzrlib.repofmt import (
244
249
        tree = control.create_workingtree()
245
250
        tree.add(['foo'], ['Foo:Bar'], ['file'])
246
251
        tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
247
 
        try:
248
 
            tree.commit('first post', rev_id='first')
249
 
        except errors.IllegalPath:
250
 
            if sys.platform != 'win32':
251
 
                raise
252
 
            self.knownFailure('Foo:Bar cannot be used as a file-id on windows'
253
 
                              ' in repo format 7')
254
 
            return
 
252
        tree.commit('first post', rev_id='first')
255
253
        self.assertEqualDiff(
256
254
            '# bzr weave file v5\n'
257
255
            'i\n'
455
453
        repo = self.make_repository('.',
456
454
                format=bzrdir.format_registry.get('knit')())
457
455
        inv_xml = '<inventory format="5">\n</inventory>\n'
458
 
        inv = repo._deserialise_inventory('test-rev-id', inv_xml)
 
456
        inv = repo.deserialise_inventory('test-rev-id', inv_xml)
459
457
        self.assertEqual('test-rev-id', inv.root.revision)
460
458
 
461
459
    def test_deserialise_uses_global_revision_id(self):
467
465
        # Arguably, the deserialise_inventory should detect a mismatch, and
468
466
        # raise an error, rather than silently using one revision_id over the
469
467
        # other.
470
 
        self.assertRaises(AssertionError, repo._deserialise_inventory,
 
468
        self.assertRaises(AssertionError, repo.deserialise_inventory,
471
469
            'test-rev-id', inv_xml)
472
 
        inv = repo._deserialise_inventory('other-rev-id', inv_xml)
 
470
        inv = repo.deserialise_inventory('other-rev-id', inv_xml)
473
471
        self.assertEqual('other-rev-id', inv.root.revision)
474
472
 
475
473
    def test_supports_external_lookups(self):
485
483
    _serializer = None
486
484
 
487
485
    def supports_rich_root(self):
488
 
        if self._format is not None:
489
 
            return self._format.rich_root_data
490
486
        return False
491
487
 
492
488
    def get_graph(self):
543
539
        # pair that it returns true on for the is_compatible static method
544
540
        # check
545
541
        dummy_a = DummyRepository()
546
 
        dummy_a._format = RepositoryFormat()
547
542
        dummy_b = DummyRepository()
548
 
        dummy_b._format = RepositoryFormat()
549
543
        repo = self.make_repository('.')
550
544
        # hack dummies to look like repo somewhat.
551
545
        dummy_a._serializer = repo._serializer
552
 
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
553
 
        dummy_a._format.rich_root_data = repo._format.rich_root_data
554
546
        dummy_b._serializer = repo._serializer
555
 
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
556
 
        dummy_b._format.rich_root_data = repo._format.rich_root_data
557
547
        repository.InterRepository.register_optimiser(InterDummy)
558
548
        try:
559
549
            # we should get the default for something InterDummy returns False
680
670
        self.assertFalse(repo._format.supports_external_lookups)
681
671
 
682
672
 
683
 
class Test2a(tests.TestCaseWithMemoryTransport):
684
 
 
685
 
    def test_chk_bytes_uses_custom_btree_parser(self):
686
 
        mt = self.make_branch_and_memory_tree('test', format='2a')
687
 
        mt.lock_write()
688
 
        self.addCleanup(mt.unlock)
689
 
        mt.add([''], ['root-id'])
690
 
        mt.commit('first')
691
 
        index = mt.branch.repository.chk_bytes._index._graph_index._indices[0]
692
 
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
693
 
        # It should also work if we re-open the repo
694
 
        repo = mt.branch.repository.bzrdir.open_repository()
695
 
        repo.lock_read()
696
 
        self.addCleanup(repo.unlock)
697
 
        index = repo.chk_bytes._index._graph_index._indices[0]
698
 
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
699
 
 
700
 
    def test_fetch_combines_groups(self):
701
 
        builder = self.make_branch_builder('source', format='2a')
702
 
        builder.start_series()
703
 
        builder.build_snapshot('1', None, [
704
 
            ('add', ('', 'root-id', 'directory', '')),
705
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
706
 
        builder.build_snapshot('2', ['1'], [
707
 
            ('modify', ('file-id', 'content-2\n'))])
708
 
        builder.finish_series()
709
 
        source = builder.get_branch()
710
 
        target = self.make_repository('target', format='2a')
711
 
        target.fetch(source.repository)
712
 
        target.lock_read()
713
 
        self.addCleanup(target.unlock)
714
 
        details = target.texts._index.get_build_details(
715
 
            [('file-id', '1',), ('file-id', '2',)])
716
 
        file_1_details = details[('file-id', '1')]
717
 
        file_2_details = details[('file-id', '2')]
718
 
        # The index, and what to read off disk, should be the same for both
719
 
        # versions of the file.
720
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
721
 
 
722
 
    def test_fetch_combines_groups(self):
723
 
        builder = self.make_branch_builder('source', format='2a')
724
 
        builder.start_series()
725
 
        builder.build_snapshot('1', None, [
726
 
            ('add', ('', 'root-id', 'directory', '')),
727
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
728
 
        builder.build_snapshot('2', ['1'], [
729
 
            ('modify', ('file-id', 'content-2\n'))])
730
 
        builder.finish_series()
731
 
        source = builder.get_branch()
732
 
        target = self.make_repository('target', format='2a')
733
 
        target.fetch(source.repository)
734
 
        target.lock_read()
735
 
        self.addCleanup(target.unlock)
736
 
        details = target.texts._index.get_build_details(
737
 
            [('file-id', '1',), ('file-id', '2',)])
738
 
        file_1_details = details[('file-id', '1')]
739
 
        file_2_details = details[('file-id', '2')]
740
 
        # The index, and what to read off disk, should be the same for both
741
 
        # versions of the file.
742
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
743
 
 
744
 
    def test_fetch_combines_groups(self):
745
 
        builder = self.make_branch_builder('source', format='2a')
746
 
        builder.start_series()
747
 
        builder.build_snapshot('1', None, [
748
 
            ('add', ('', 'root-id', 'directory', '')),
749
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
750
 
        builder.build_snapshot('2', ['1'], [
751
 
            ('modify', ('file-id', 'content-2\n'))])
752
 
        builder.finish_series()
753
 
        source = builder.get_branch()
754
 
        target = self.make_repository('target', format='2a')
755
 
        target.fetch(source.repository)
756
 
        target.lock_read()
757
 
        self.addCleanup(target.unlock)
758
 
        details = target.texts._index.get_build_details(
759
 
            [('file-id', '1',), ('file-id', '2',)])
760
 
        file_1_details = details[('file-id', '1')]
761
 
        file_2_details = details[('file-id', '2')]
762
 
        # The index, and what to read off disk, should be the same for both
763
 
        # versions of the file.
764
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
765
 
 
766
 
    def test_format_pack_compresses_True(self):
767
 
        repo = self.make_repository('repo', format='2a')
768
 
        self.assertTrue(repo._format.pack_compresses)
 
673
class TestDevelopment6(TestCaseWithTransport):
769
674
 
770
675
    def test_inventories_use_chk_map_with_parent_base_dict(self):
771
 
        tree = self.make_branch_and_memory_tree('repo', format="2a")
772
 
        tree.lock_write()
773
 
        tree.add([''], ['TREE_ROOT'])
 
676
        tree = self.make_branch_and_tree('repo', format="development6-rich-root")
774
677
        revid = tree.commit("foo")
775
 
        tree.unlock()
776
678
        tree.lock_read()
777
679
        self.addCleanup(tree.unlock)
778
680
        inv = tree.branch.repository.get_inventory(revid)
783
685
        self.assertEqual(65536,
784
686
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
785
687
 
786
 
    def test_autopack_unchanged_chk_nodes(self):
787
 
        # at 20 unchanged commits, chk pages are packed that are split into
788
 
        # two groups such that the new pack being made doesn't have all its
789
 
        # pages in the source packs (though they are in the repository).
790
 
        # Use a memory backed repository, we don't need to hit disk for this
791
 
        tree = self.make_branch_and_memory_tree('tree', format='2a')
792
 
        tree.lock_write()
793
 
        self.addCleanup(tree.unlock)
794
 
        tree.add([''], ['TREE_ROOT'])
795
 
        for pos in range(20):
796
 
            tree.commit(str(pos))
797
 
 
798
 
    def test_pack_with_hint(self):
799
 
        tree = self.make_branch_and_memory_tree('tree', format='2a')
800
 
        tree.lock_write()
801
 
        self.addCleanup(tree.unlock)
802
 
        tree.add([''], ['TREE_ROOT'])
803
 
        # 1 commit to leave untouched
804
 
        tree.commit('1')
805
 
        to_keep = tree.branch.repository._pack_collection.names()
806
 
        # 2 to combine
807
 
        tree.commit('2')
808
 
        tree.commit('3')
809
 
        all = tree.branch.repository._pack_collection.names()
810
 
        combine = list(set(all) - set(to_keep))
811
 
        self.assertLength(3, all)
812
 
        self.assertLength(2, combine)
813
 
        tree.branch.repository.pack(hint=combine)
814
 
        final = tree.branch.repository._pack_collection.names()
815
 
        self.assertLength(2, final)
816
 
        self.assertFalse(combine[0] in final)
817
 
        self.assertFalse(combine[1] in final)
818
 
        self.assertSubset(to_keep, final)
819
 
 
820
 
    def test_stream_source_to_gc(self):
821
 
        source = self.make_repository('source', format='2a')
822
 
        target = self.make_repository('target', format='2a')
823
 
        stream = source._get_source(target._format)
824
 
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
825
 
 
826
 
    def test_stream_source_to_non_gc(self):
827
 
        source = self.make_repository('source', format='2a')
828
 
        target = self.make_repository('target', format='rich-root-pack')
829
 
        stream = source._get_source(target._format)
830
 
        # We don't want the child GroupCHKStreamSource
831
 
        self.assertIs(type(stream), repository.StreamSource)
832
 
 
833
 
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
834
 
        source_builder = self.make_branch_builder('source',
835
 
                            format='2a')
836
 
        # We have to build a fairly large tree, so that we are sure the chk
837
 
        # pages will have split into multiple pages.
838
 
        entries = [('add', ('', 'a-root-id', 'directory', None))]
839
 
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
840
 
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
841
 
                fname = i + j
842
 
                fid = fname + '-id'
843
 
                content = 'content for %s\n' % (fname,)
844
 
                entries.append(('add', (fname, fid, 'file', content)))
845
 
        source_builder.start_series()
846
 
        source_builder.build_snapshot('rev-1', None, entries)
847
 
        # Now change a few of them, so we get a few new pages for the second
848
 
        # revision
849
 
        source_builder.build_snapshot('rev-2', ['rev-1'], [
850
 
            ('modify', ('aa-id', 'new content for aa-id\n')),
851
 
            ('modify', ('cc-id', 'new content for cc-id\n')),
852
 
            ('modify', ('zz-id', 'new content for zz-id\n')),
853
 
            ])
854
 
        source_builder.finish_series()
855
 
        source_branch = source_builder.get_branch()
856
 
        source_branch.lock_read()
857
 
        self.addCleanup(source_branch.unlock)
858
 
        target = self.make_repository('target', format='2a')
859
 
        source = source_branch.repository._get_source(target._format)
860
 
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
861
 
 
862
 
        # On a regular pass, getting the inventories and chk pages for rev-2
863
 
        # would only get the newly created chk pages
864
 
        search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
865
 
                                    set(['rev-2']))
866
 
        simple_chk_records = []
867
 
        for vf_name, substream in source.get_stream(search):
868
 
            if vf_name == 'chk_bytes':
869
 
                for record in substream:
870
 
                    simple_chk_records.append(record.key)
871
 
            else:
872
 
                for _ in substream:
873
 
                    continue
874
 
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
875
 
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
876
 
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
877
 
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
878
 
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
879
 
                         simple_chk_records)
880
 
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
881
 
        # we should get a much larger set of pages.
882
 
        missing = [('inventories', 'rev-2')]
883
 
        full_chk_records = []
884
 
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
885
 
            if vf_name == 'inventories':
886
 
                for record in substream:
887
 
                    self.assertEqual(('rev-2',), record.key)
888
 
            elif vf_name == 'chk_bytes':
889
 
                for record in substream:
890
 
                    full_chk_records.append(record.key)
891
 
            else:
892
 
                self.fail('Should not be getting a stream of %s' % (vf_name,))
893
 
        # We have 257 records now. This is because we have 1 root page, and 256
894
 
        # leaf pages in a complete listing.
895
 
        self.assertEqual(257, len(full_chk_records))
896
 
        self.assertSubset(simple_chk_records, full_chk_records)
897
 
 
898
 
    def test_inconsistency_fatal(self):
899
 
        repo = self.make_repository('repo', format='2a')
900
 
        self.assertTrue(repo.revisions._index._inconsistency_fatal)
901
 
        self.assertFalse(repo.texts._index._inconsistency_fatal)
902
 
        self.assertFalse(repo.inventories._index._inconsistency_fatal)
903
 
        self.assertFalse(repo.signatures._index._inconsistency_fatal)
904
 
        self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
905
 
 
906
 
 
907
 
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
908
 
 
909
 
    def test_source_to_exact_pack_092(self):
910
 
        source = self.make_repository('source', format='pack-0.92')
911
 
        target = self.make_repository('target', format='pack-0.92')
912
 
        stream_source = source._get_source(target._format)
913
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
914
 
 
915
 
    def test_source_to_exact_pack_rich_root_pack(self):
916
 
        source = self.make_repository('source', format='rich-root-pack')
917
 
        target = self.make_repository('target', format='rich-root-pack')
918
 
        stream_source = source._get_source(target._format)
919
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
920
 
 
921
 
    def test_source_to_exact_pack_19(self):
922
 
        source = self.make_repository('source', format='1.9')
923
 
        target = self.make_repository('target', format='1.9')
924
 
        stream_source = source._get_source(target._format)
925
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
926
 
 
927
 
    def test_source_to_exact_pack_19_rich_root(self):
928
 
        source = self.make_repository('source', format='1.9-rich-root')
929
 
        target = self.make_repository('target', format='1.9-rich-root')
930
 
        stream_source = source._get_source(target._format)
931
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
932
 
 
933
 
    def test_source_to_remote_exact_pack_19(self):
934
 
        trans = self.make_smart_server('target')
935
 
        trans.ensure_base()
936
 
        source = self.make_repository('source', format='1.9')
937
 
        target = self.make_repository('target', format='1.9')
938
 
        target = repository.Repository.open(trans.base)
939
 
        stream_source = source._get_source(target._format)
940
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
941
 
 
942
 
    def test_stream_source_to_non_exact(self):
943
 
        source = self.make_repository('source', format='pack-0.92')
944
 
        target = self.make_repository('target', format='1.9')
945
 
        stream = source._get_source(target._format)
946
 
        self.assertIs(type(stream), repository.StreamSource)
947
 
 
948
 
    def test_stream_source_to_non_exact_rich_root(self):
949
 
        source = self.make_repository('source', format='1.9')
950
 
        target = self.make_repository('target', format='1.9-rich-root')
951
 
        stream = source._get_source(target._format)
952
 
        self.assertIs(type(stream), repository.StreamSource)
953
 
 
954
 
    def test_source_to_remote_non_exact_pack_19(self):
955
 
        trans = self.make_smart_server('target')
956
 
        trans.ensure_base()
957
 
        source = self.make_repository('source', format='1.9')
958
 
        target = self.make_repository('target', format='1.6')
959
 
        target = repository.Repository.open(trans.base)
960
 
        stream_source = source._get_source(target._format)
961
 
        self.assertIs(type(stream_source), repository.StreamSource)
962
 
 
963
 
    def test_stream_source_to_knit(self):
964
 
        source = self.make_repository('source', format='pack-0.92')
965
 
        target = self.make_repository('target', format='dirstate')
966
 
        stream = source._get_source(target._format)
967
 
        self.assertIs(type(stream), repository.StreamSource)
968
 
 
969
 
 
970
 
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
971
 
    """Tests for _find_parent_ids_of_revisions."""
 
688
 
 
689
class TestDevelopment6FindRevisionOutsideSet(TestCaseWithTransport):
 
690
    """Tests for _find_revision_outside_set."""
972
691
 
973
692
    def setUp(self):
974
 
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
 
693
        super(TestDevelopment6FindRevisionOutsideSet, self).setUp()
975
694
        self.builder = self.make_branch_builder('source',
976
695
            format='development6-rich-root')
977
696
        self.builder.start_series()
980
699
        self.repo = self.builder.get_branch().repository
981
700
        self.addCleanup(self.builder.finish_series)
982
701
 
983
 
    def assertParentIds(self, expected_result, rev_set):
984
 
        self.assertEqual(sorted(expected_result),
985
 
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
 
702
    def assertRevisionOutsideSet(self, expected_result, rev_set):
 
703
        self.assertEqual(
 
704
            expected_result, self.repo._find_revision_outside_set(rev_set))
986
705
 
987
706
    def test_simple(self):
988
707
        self.builder.build_snapshot('revid1', None, [])
989
 
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
708
        self.builder.build_snapshot('revid2', None, [])
990
709
        rev_set = ['revid2']
991
 
        self.assertParentIds(['revid1'], rev_set)
 
710
        self.assertRevisionOutsideSet('revid1', rev_set)
992
711
 
993
712
    def test_not_first_parent(self):
994
713
        self.builder.build_snapshot('revid1', None, [])
995
 
        self.builder.build_snapshot('revid2', ['revid1'], [])
996
 
        self.builder.build_snapshot('revid3', ['revid2'], [])
 
714
        self.builder.build_snapshot('revid2', None, [])
 
715
        self.builder.build_snapshot('revid3', None, [])
997
716
        rev_set = ['revid3', 'revid2']
998
 
        self.assertParentIds(['revid1'], rev_set)
 
717
        self.assertRevisionOutsideSet('revid1', rev_set)
999
718
 
1000
719
    def test_not_null(self):
1001
720
        rev_set = ['initial']
1002
 
        self.assertParentIds([], rev_set)
 
721
        self.assertRevisionOutsideSet(_mod_revision.NULL_REVISION, rev_set)
1003
722
 
1004
723
    def test_not_null_set(self):
1005
724
        self.builder.build_snapshot('revid1', None, [])
1006
725
        rev_set = [_mod_revision.NULL_REVISION]
1007
 
        self.assertParentIds([], rev_set)
 
726
        self.assertRevisionOutsideSet(_mod_revision.NULL_REVISION, rev_set)
1008
727
 
1009
728
    def test_ghost(self):
1010
729
        self.builder.build_snapshot('revid1', None, [])
1011
730
        rev_set = ['ghost', 'revid1']
1012
 
        self.assertParentIds(['initial'], rev_set)
 
731
        self.assertRevisionOutsideSet('initial', rev_set)
1013
732
 
1014
733
    def test_ghost_parent(self):
1015
734
        self.builder.build_snapshot('revid1', None, [])
1016
735
        self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
1017
736
        rev_set = ['revid2', 'revid1']
1018
 
        self.assertParentIds(['ghost', 'initial'], rev_set)
 
737
        self.assertRevisionOutsideSet('initial', rev_set)
1019
738
 
1020
739
    def test_righthand_parent(self):
1021
740
        self.builder.build_snapshot('revid1', None, [])
1023
742
        self.builder.build_snapshot('revid2b', ['revid1'], [])
1024
743
        self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
1025
744
        rev_set = ['revid3', 'revid2a']
1026
 
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
 
745
        self.assertRevisionOutsideSet('revid2b', rev_set)
1027
746
 
1028
747
 
1029
748
class TestWithBrokenRepo(TestCaseWithTransport):
1044
763
            inv = inventory.Inventory(revision_id='rev1a')
1045
764
            inv.root.revision = 'rev1a'
1046
765
            self.add_file(repo, inv, 'file1', 'rev1a', [])
1047
 
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
1048
766
            repo.add_inventory('rev1a', inv, [])
1049
767
            revision = _mod_revision.Revision('rev1a',
1050
768
                committer='jrandom@example.com', timestamp=0,
1085
803
    def add_revision(self, repo, revision_id, inv, parent_ids):
1086
804
        inv.revision_id = revision_id
1087
805
        inv.root.revision = revision_id
1088
 
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
1089
806
        repo.add_inventory(revision_id, inv, parent_ids)
1090
807
        revision = _mod_revision.Revision(revision_id,
1091
808
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
1108
825
        """
1109
826
        broken_repo = self.make_broken_repository()
1110
827
        empty_repo = self.make_repository('empty-repo')
1111
 
        try:
1112
 
            empty_repo.fetch(broken_repo)
1113
 
        except (errors.RevisionNotPresent, errors.BzrCheckError):
1114
 
            # Test successful: compression parent not being copied leads to
1115
 
            # error.
1116
 
            return
1117
 
        empty_repo.lock_read()
1118
 
        self.addCleanup(empty_repo.unlock)
1119
 
        text = empty_repo.texts.get_record_stream(
1120
 
            [('file2-id', 'rev3')], 'topological', True).next()
1121
 
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
 
828
        self.assertRaises((errors.RevisionNotPresent, errors.BzrCheckError),
 
829
                          empty_repo.fetch, broken_repo)
1122
830
 
1123
831
 
1124
832
class TestRepositoryPackCollection(TestCaseWithTransport):
1133
841
 
1134
842
    def make_packs_and_alt_repo(self, write_lock=False):
1135
843
        """Create a pack repo with 3 packs, and access it via a second repo."""
1136
 
        tree = self.make_branch_and_tree('.', format=self.get_format())
 
844
        tree = self.make_branch_and_tree('.')
1137
845
        tree.lock_write()
1138
846
        self.addCleanup(tree.unlock)
1139
847
        rev1 = tree.commit('one')
1149
857
        packs.ensure_loaded()
1150
858
        return tree, r, packs, [rev1, rev2, rev3]
1151
859
 
1152
 
    def test__clear_obsolete_packs(self):
1153
 
        packs = self.get_packs()
1154
 
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1155
 
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1156
 
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1157
 
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1158
 
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1159
 
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1160
 
        res = packs._clear_obsolete_packs()
1161
 
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1162
 
        self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1163
 
 
1164
 
    def test__clear_obsolete_packs_preserve(self):
1165
 
        packs = self.get_packs()
1166
 
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1167
 
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1168
 
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1169
 
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1170
 
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1171
 
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1172
 
        res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
1173
 
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1174
 
        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1175
 
                         sorted(obsolete_pack_trans.list_dir('.')))
1176
 
 
1177
860
    def test__max_pack_count(self):
1178
861
        """The maximum pack count is a function of the number of revisions."""
1179
862
        # no revisions - one pack, so that we can have a revision free repo
1199
882
        # check some arbitrary big numbers
1200
883
        self.assertEqual(25, packs._max_pack_count(112894))
1201
884
 
1202
 
    def test_repr(self):
1203
 
        packs = self.get_packs()
1204
 
        self.assertContainsRe(repr(packs),
1205
 
            'RepositoryPackCollection(.*Repository(.*))')
1206
 
 
1207
 
    def test__obsolete_packs(self):
1208
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1209
 
        names = packs.names()
1210
 
        pack = packs.get_pack_by_name(names[0])
1211
 
        # Schedule this one for removal
1212
 
        packs._remove_pack_from_memory(pack)
1213
 
        # Simulate a concurrent update by renaming the .pack file and one of
1214
 
        # the indices
1215
 
        packs.transport.rename('packs/%s.pack' % (names[0],),
1216
 
                               'obsolete_packs/%s.pack' % (names[0],))
1217
 
        packs.transport.rename('indices/%s.iix' % (names[0],),
1218
 
                               'obsolete_packs/%s.iix' % (names[0],))
1219
 
        # Now trigger the obsoletion, and ensure that all the remaining files
1220
 
        # are still renamed
1221
 
        packs._obsolete_packs([pack])
1222
 
        self.assertEqual([n + '.pack' for n in names[1:]],
1223
 
                         sorted(packs._pack_transport.list_dir('.')))
1224
 
        # names[0] should not be present in the index anymore
1225
 
        self.assertEqual(names[1:],
1226
 
            sorted(set([osutils.splitext(n)[0] for n in
1227
 
                        packs._index_transport.list_dir('.')])))
1228
 
 
1229
885
    def test_pack_distribution_zero(self):
1230
886
        packs = self.get_packs()
1231
887
        self.assertEqual([0], packs.pack_distribution(0))
1399
1055
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1400
1056
        self.assertFalse(packs.reload_pack_names())
1401
1057
 
1402
 
    def test_reload_pack_names_preserves_pending(self):
1403
 
        # TODO: Update this to also test for pending-deleted names
1404
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1405
 
        # We will add one pack (via start_write_group + insert_record_stream),
1406
 
        # and remove another pack (via _remove_pack_from_memory)
1407
 
        orig_names = packs.names()
1408
 
        orig_at_load = packs._packs_at_load
1409
 
        to_remove_name = iter(orig_names).next()
1410
 
        r.start_write_group()
1411
 
        self.addCleanup(r.abort_write_group)
1412
 
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1413
 
            ('text', 'rev'), (), None, 'content\n')])
1414
 
        new_pack = packs._new_pack
1415
 
        self.assertTrue(new_pack.data_inserted())
1416
 
        new_pack.finish()
1417
 
        packs.allocate(new_pack)
1418
 
        packs._new_pack = None
1419
 
        removed_pack = packs.get_pack_by_name(to_remove_name)
1420
 
        packs._remove_pack_from_memory(removed_pack)
1421
 
        names = packs.names()
1422
 
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1423
 
        new_names = set([x[0][0] for x in new_nodes])
1424
 
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1425
 
        self.assertEqual(set(names) - set(orig_names), new_names)
1426
 
        self.assertEqual(set([new_pack.name]), new_names)
1427
 
        self.assertEqual([to_remove_name],
1428
 
                         sorted([x[0][0] for x in deleted_nodes]))
1429
 
        packs.reload_pack_names()
1430
 
        reloaded_names = packs.names()
1431
 
        self.assertEqual(orig_at_load, packs._packs_at_load)
1432
 
        self.assertEqual(names, reloaded_names)
1433
 
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1434
 
        new_names = set([x[0][0] for x in new_nodes])
1435
 
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1436
 
        self.assertEqual(set(names) - set(orig_names), new_names)
1437
 
        self.assertEqual(set([new_pack.name]), new_names)
1438
 
        self.assertEqual([to_remove_name],
1439
 
                         sorted([x[0][0] for x in deleted_nodes]))
1440
 
 
1441
 
    def test_autopack_obsoletes_new_pack(self):
1442
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1443
 
        packs._max_pack_count = lambda x: 1
1444
 
        packs.pack_distribution = lambda x: [10]
1445
 
        r.start_write_group()
1446
 
        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1447
 
            ('bogus-rev',), (), None, 'bogus-content\n')])
1448
 
        # This should trigger an autopack, which will combine everything into a
1449
 
        # single pack file.
1450
 
        new_names = r.commit_write_group()
1451
 
        names = packs.names()
1452
 
        self.assertEqual(1, len(names))
1453
 
        self.assertEqual([names[0] + '.pack'],
1454
 
                         packs._pack_transport.list_dir('.'))
1455
 
 
1456
1058
    def test_autopack_reloads_and_stops(self):
1457
1059
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1458
1060
        # After we have determined what needs to be autopacked, trigger a
1470
1072
        self.assertEqual(tree.branch.repository._pack_collection.names(),
1471
1073
                         packs.names())
1472
1074
 
1473
 
    def test__save_pack_names(self):
1474
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1475
 
        names = packs.names()
1476
 
        pack = packs.get_pack_by_name(names[0])
1477
 
        packs._remove_pack_from_memory(pack)
1478
 
        packs._save_pack_names(obsolete_packs=[pack])
1479
 
        cur_packs = packs._pack_transport.list_dir('.')
1480
 
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1481
 
        # obsolete_packs will also have stuff like .rix and .iix present.
1482
 
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
1483
 
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1484
 
        self.assertEqual([pack.name], sorted(obsolete_names))
1485
 
 
1486
 
    def test__save_pack_names_already_obsoleted(self):
1487
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1488
 
        names = packs.names()
1489
 
        pack = packs.get_pack_by_name(names[0])
1490
 
        packs._remove_pack_from_memory(pack)
1491
 
        # We are going to simulate a concurrent autopack by manually obsoleting
1492
 
        # the pack directly.
1493
 
        packs._obsolete_packs([pack])
1494
 
        packs._save_pack_names(clear_obsolete_packs=True,
1495
 
                               obsolete_packs=[pack])
1496
 
        cur_packs = packs._pack_transport.list_dir('.')
1497
 
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1498
 
        # Note that while we set clear_obsolete_packs=True, it should not
1499
 
        # delete a pack file that we have also scheduled for obsoletion.
1500
 
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
1501
 
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1502
 
        self.assertEqual([pack.name], sorted(obsolete_names))
1503
 
 
1504
 
 
1505
1075
 
1506
1076
class TestPack(TestCaseWithTransport):
1507
1077
    """Tests for the Pack object."""
1571
1141
            index_class=BTreeGraphIndex,
1572
1142
            use_chk_index=False)
1573
1143
        pack = pack_repo.NewPack(collection)
1574
 
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
1575
1144
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
1576
1145
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1577
1146
        self.assertIsInstance(pack._hash, type(osutils.md5()))
1588
1157
    """Tests for the packs repository Packer class."""
1589
1158
 
1590
1159
    def test_pack_optimizes_pack_order(self):
1591
 
        builder = self.make_branch_builder('.', format="1.9")
 
1160
        builder = self.make_branch_builder('.')
1592
1161
        builder.start_series()
1593
1162
        builder.build_snapshot('A', None, [
1594
1163
            ('add', ('', 'root-id', 'directory', None)),
1630
1199
        packer = pack_repo.OptimisingPacker(self.get_pack_collection(),
1631
1200
                                            [], '.test')
1632
1201
        new_pack = packer.open_pack()
1633
 
        self.addCleanup(new_pack.abort) # ensure cleanup
1634
1202
        self.assertIsInstance(new_pack, pack_repo.NewPack)
1635
1203
        self.assertTrue(new_pack.revision_index._optimize_for_size)
1636
1204
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
1638
1206
        self.assertTrue(new_pack.signature_index._optimize_for_size)
1639
1207
 
1640
1208
 
1641
 
class TestCrossFormatPacks(TestCaseWithTransport):
1642
 
 
1643
 
    def log_pack(self, hint=None):
1644
 
        self.calls.append(('pack', hint))
1645
 
        self.orig_pack(hint=hint)
1646
 
        if self.expect_hint:
1647
 
            self.assertTrue(hint)
1648
 
 
1649
 
    def run_stream(self, src_fmt, target_fmt, expect_pack_called):
1650
 
        self.expect_hint = expect_pack_called
1651
 
        self.calls = []
1652
 
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
1653
 
        source_tree.lock_write()
1654
 
        self.addCleanup(source_tree.unlock)
1655
 
        tip = source_tree.commit('foo')
1656
 
        target = self.make_repository('target', format=target_fmt)
1657
 
        target.lock_write()
1658
 
        self.addCleanup(target.unlock)
1659
 
        source = source_tree.branch.repository._get_source(target._format)
1660
 
        self.orig_pack = target.pack
1661
 
        target.pack = self.log_pack
1662
 
        search = target.search_missing_revision_ids(
1663
 
            source_tree.branch.repository, tip)
1664
 
        stream = source.get_stream(search)
1665
 
        from_format = source_tree.branch.repository._format
1666
 
        sink = target._get_sink()
1667
 
        sink.insert_stream(stream, from_format, [])
1668
 
        if expect_pack_called:
1669
 
            self.assertLength(1, self.calls)
1670
 
        else:
1671
 
            self.assertLength(0, self.calls)
1672
 
 
1673
 
    def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
1674
 
        self.expect_hint = expect_pack_called
1675
 
        self.calls = []
1676
 
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
1677
 
        source_tree.lock_write()
1678
 
        self.addCleanup(source_tree.unlock)
1679
 
        tip = source_tree.commit('foo')
1680
 
        target = self.make_repository('target', format=target_fmt)
1681
 
        target.lock_write()
1682
 
        self.addCleanup(target.unlock)
1683
 
        source = source_tree.branch.repository
1684
 
        self.orig_pack = target.pack
1685
 
        target.pack = self.log_pack
1686
 
        target.fetch(source)
1687
 
        if expect_pack_called:
1688
 
            self.assertLength(1, self.calls)
1689
 
        else:
1690
 
            self.assertLength(0, self.calls)
1691
 
 
1692
 
    def test_sink_format_hint_no(self):
1693
 
        # When the target format says packing makes no difference, pack is not
1694
 
        # called.
1695
 
        self.run_stream('1.9', 'rich-root-pack', False)
1696
 
 
1697
 
    def test_sink_format_hint_yes(self):
1698
 
        # When the target format says packing makes a difference, pack is
1699
 
        # called.
1700
 
        self.run_stream('1.9', '2a', True)
1701
 
 
1702
 
    def test_sink_format_same_no(self):
1703
 
        # When the formats are the same, pack is not called.
1704
 
        self.run_stream('2a', '2a', False)
1705
 
 
1706
 
    def test_IDS_format_hint_no(self):
1707
 
        # When the target format says packing makes no difference, pack is not
1708
 
        # called.
1709
 
        self.run_fetch('1.9', 'rich-root-pack', False)
1710
 
 
1711
 
    def test_IDS_format_hint_yes(self):
1712
 
        # When the target format says packing makes a difference, pack is
1713
 
        # called.
1714
 
        self.run_fetch('1.9', '2a', True)
1715
 
 
1716
 
    def test_IDS_format_same_no(self):
1717
 
        # When the formats are the same, pack is not called.
1718
 
        self.run_fetch('2a', '2a', False)
 
1209
class TestGCCHKPackCollection(TestCaseWithTransport):
 
1210
 
 
1211
    def test_stream_source_to_gc(self):
 
1212
        source = self.make_repository('source', format='development6-rich-root')
 
1213
        target = self.make_repository('target', format='development6-rich-root')
 
1214
        stream = source._get_source(target._format)
 
1215
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
 
1216
 
 
1217
    def test_stream_source_to_non_gc(self):
 
1218
        source = self.make_repository('source', format='development6-rich-root')
 
1219
        target = self.make_repository('target', format='rich-root-pack')
 
1220
        stream = source._get_source(target._format)
 
1221
        # We don't want the child GroupCHKStreamSource
 
1222
        self.assertIs(type(stream), repository.StreamSource)