~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: Mark Hammond
  • Date: 2009-01-12 01:55:34 UTC
  • mto: (3995.8.2 prepare-1.12)
  • mto: This revision was merged to the branch mainline in revision 4007.
  • Revision ID: mhammond@skippinet.com.au-20090112015534-yfxg50p7mpds9j4v
Include all .html files from the tortoise doc directory.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006, 2007, 2008, 2009 Canonical Ltd
 
1
# Copyright (C) 2006, 2007, 2008 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
12
12
#
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
16
16
 
17
17
"""Tests for the Repository facility that are not interface tests.
18
18
 
24
24
 
25
25
from stat import S_ISDIR
26
26
from StringIO import StringIO
27
 
import sys
28
27
 
29
28
import bzrlib
30
29
from bzrlib.errors import (NotBranchError,
32
31
                           UnknownFormatError,
33
32
                           UnsupportedFormatError,
34
33
                           )
35
 
from bzrlib import (
36
 
    graph,
37
 
    tests,
38
 
    )
39
 
from bzrlib.branchbuilder import BranchBuilder
 
34
from bzrlib import graph
40
35
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
41
36
from bzrlib.index import GraphIndex, InMemoryGraphIndex
42
37
from bzrlib.repository import RepositoryFormat
52
47
    get_transport,
53
48
    )
54
49
from bzrlib.transport.memory import MemoryServer
 
50
from bzrlib.util import bencode
55
51
from bzrlib import (
56
 
    bencode,
57
52
    bzrdir,
58
53
    errors,
59
54
    inventory,
65
60
    upgrade,
66
61
    workingtree,
67
62
    )
68
 
from bzrlib.repofmt import (
69
 
    groupcompress_repo,
70
 
    knitrepo,
71
 
    pack_repo,
72
 
    weaverepo,
73
 
    )
 
63
from bzrlib.repofmt import knitrepo, weaverepo, pack_repo
74
64
 
75
65
 
76
66
class TestDefaultFormat(TestCase):
105
95
class SampleRepositoryFormat(repository.RepositoryFormat):
106
96
    """A sample format
107
97
 
108
 
    this format is initializable, unsupported to aid in testing the
 
98
    this format is initializable, unsupported to aid in testing the 
109
99
    open and open(unsupported=True) routines.
110
100
    """
111
101
 
132
122
    def test_find_format(self):
133
123
        # is the right format object found for a repository?
134
124
        # create a branch with a few known format objects.
135
 
        # this is not quite the same as
 
125
        # this is not quite the same as 
136
126
        self.build_tree(["foo/", "bar/"])
137
127
        def check_format(format, url):
138
128
            dir = format._matchingbzrdir.initialize(url)
141
131
            found_format = repository.RepositoryFormat.find_format(dir)
142
132
            self.failUnless(isinstance(found_format, format.__class__))
143
133
        check_format(weaverepo.RepositoryFormat7(), "bar")
144
 
 
 
134
        
145
135
    def test_find_format_no_repository(self):
146
136
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
147
137
        self.assertRaises(errors.NoRepositoryPresent,
177
167
        """Weaves need topological data insertion."""
178
168
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
179
169
        repo = weaverepo.RepositoryFormat6().initialize(control)
180
 
        self.assertEqual('topological', repo._format._fetch_order)
 
170
        self.assertEqual('topological', repo._fetch_order)
181
171
 
182
172
    def test_attribute__fetch_uses_deltas(self):
183
173
        """Weaves do not reuse deltas."""
184
174
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
185
175
        repo = weaverepo.RepositoryFormat6().initialize(control)
186
 
        self.assertEqual(False, repo._format._fetch_uses_deltas)
 
176
        self.assertEqual(False, repo._fetch_uses_deltas)
187
177
 
188
178
    def test_attribute__fetch_reconcile(self):
189
179
        """Weave repositories need a reconcile after fetch."""
190
180
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
191
181
        repo = weaverepo.RepositoryFormat6().initialize(control)
192
 
        self.assertEqual(True, repo._format._fetch_reconcile)
 
182
        self.assertEqual(True, repo._fetch_reconcile)
193
183
 
194
184
    def test_no_ancestry_weave(self):
195
185
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
212
202
        """Weaves need topological data insertion."""
213
203
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
214
204
        repo = weaverepo.RepositoryFormat7().initialize(control)
215
 
        self.assertEqual('topological', repo._format._fetch_order)
 
205
        self.assertEqual('topological', repo._fetch_order)
216
206
 
217
207
    def test_attribute__fetch_uses_deltas(self):
218
208
        """Weaves do not reuse deltas."""
219
209
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
220
210
        repo = weaverepo.RepositoryFormat7().initialize(control)
221
 
        self.assertEqual(False, repo._format._fetch_uses_deltas)
 
211
        self.assertEqual(False, repo._fetch_uses_deltas)
222
212
 
223
213
    def test_attribute__fetch_reconcile(self):
224
214
        """Weave repositories need a reconcile after fetch."""
225
215
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
226
216
        repo = weaverepo.RepositoryFormat7().initialize(control)
227
 
        self.assertEqual(True, repo._format._fetch_reconcile)
 
217
        self.assertEqual(True, repo._fetch_reconcile)
228
218
 
229
219
    def test_disk_layout(self):
230
220
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
253
243
        tree = control.create_workingtree()
254
244
        tree.add(['foo'], ['Foo:Bar'], ['file'])
255
245
        tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
256
 
        try:
257
 
            tree.commit('first post', rev_id='first')
258
 
        except errors.IllegalPath:
259
 
            if sys.platform != 'win32':
260
 
                raise
261
 
            self.knownFailure('Foo:Bar cannot be used as a file-id on windows'
262
 
                              ' in repo format 7')
263
 
            return
 
246
        tree.commit('first post', rev_id='first')
264
247
        self.assertEqualDiff(
265
248
            '# bzr weave file v5\n'
266
249
            'i\n'
301
284
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
302
285
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
303
286
        t = control.get_repository_transport(None)
304
 
        # TODO: Should check there is a 'lock' toplevel directory,
 
287
        # TODO: Should check there is a 'lock' toplevel directory, 
305
288
        # regardless of contents
306
289
        self.assertFalse(t.has('lock/held/info'))
307
290
        repo.lock_write()
360
343
 
361
344
 
362
345
class TestFormatKnit1(TestCaseWithTransport):
363
 
 
 
346
    
364
347
    def test_attribute__fetch_order(self):
365
348
        """Knits need topological data insertion."""
366
349
        repo = self.make_repository('.',
367
350
                format=bzrdir.format_registry.get('knit')())
368
 
        self.assertEqual('topological', repo._format._fetch_order)
 
351
        self.assertEqual('topological', repo._fetch_order)
369
352
 
370
353
    def test_attribute__fetch_uses_deltas(self):
371
354
        """Knits reuse deltas."""
372
355
        repo = self.make_repository('.',
373
356
                format=bzrdir.format_registry.get('knit')())
374
 
        self.assertEqual(True, repo._format._fetch_uses_deltas)
 
357
        self.assertEqual(True, repo._fetch_uses_deltas)
375
358
 
376
359
    def test_disk_layout(self):
377
360
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
494
477
    _serializer = None
495
478
 
496
479
    def supports_rich_root(self):
497
 
        if self._format is not None:
498
 
            return self._format.rich_root_data
499
480
        return False
500
481
 
501
482
    def get_graph(self):
516
497
    @staticmethod
517
498
    def is_compatible(repo_source, repo_target):
518
499
        """InterDummy is compatible with DummyRepository."""
519
 
        return (isinstance(repo_source, DummyRepository) and
 
500
        return (isinstance(repo_source, DummyRepository) and 
520
501
            isinstance(repo_target, DummyRepository))
521
502
 
522
503
 
535
516
 
536
517
    def assertGetsDefaultInterRepository(self, repo_a, repo_b):
537
518
        """Asserts that InterRepository.get(repo_a, repo_b) -> the default.
538
 
 
 
519
        
539
520
        The effective default is now InterSameDataRepository because there is
540
521
        no actual sane default in the presence of incompatible data models.
541
522
        """
552
533
        # pair that it returns true on for the is_compatible static method
553
534
        # check
554
535
        dummy_a = DummyRepository()
555
 
        dummy_a._format = RepositoryFormat()
556
536
        dummy_b = DummyRepository()
557
 
        dummy_b._format = RepositoryFormat()
558
537
        repo = self.make_repository('.')
559
538
        # hack dummies to look like repo somewhat.
560
539
        dummy_a._serializer = repo._serializer
561
 
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
562
 
        dummy_a._format.rich_root_data = repo._format.rich_root_data
563
540
        dummy_b._serializer = repo._serializer
564
 
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
565
 
        dummy_b._format.rich_root_data = repo._format.rich_root_data
566
541
        repository.InterRepository.register_optimiser(InterDummy)
567
542
        try:
568
543
            # we should get the default for something InterDummy returns False
631
606
 
632
607
 
633
608
class TestMisc(TestCase):
634
 
 
 
609
    
635
610
    def test_unescape_xml(self):
636
611
        """We get some kind of error when malformed entities are passed"""
637
 
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;')
 
612
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;') 
638
613
 
639
614
 
640
615
class TestRepositoryFormatKnit3(TestCaseWithTransport):
644
619
        format = bzrdir.BzrDirMetaFormat1()
645
620
        format.repository_format = knitrepo.RepositoryFormatKnit3()
646
621
        repo = self.make_repository('.', format=format)
647
 
        self.assertEqual('topological', repo._format._fetch_order)
 
622
        self.assertEqual('topological', repo._fetch_order)
648
623
 
649
624
    def test_attribute__fetch_uses_deltas(self):
650
625
        """Knits reuse deltas."""
651
626
        format = bzrdir.BzrDirMetaFormat1()
652
627
        format.repository_format = knitrepo.RepositoryFormatKnit3()
653
628
        repo = self.make_repository('.', format=format)
654
 
        self.assertEqual(True, repo._format._fetch_uses_deltas)
 
629
        self.assertEqual(True, repo._fetch_uses_deltas)
655
630
 
656
631
    def test_convert(self):
657
632
        """Ensure the upgrade adds weaves for roots"""
689
664
        self.assertFalse(repo._format.supports_external_lookups)
690
665
 
691
666
 
692
 
class Test2a(tests.TestCaseWithMemoryTransport):
693
 
 
694
 
    def test_fetch_combines_groups(self):
695
 
        builder = self.make_branch_builder('source', format='2a')
696
 
        builder.start_series()
697
 
        builder.build_snapshot('1', None, [
698
 
            ('add', ('', 'root-id', 'directory', '')),
699
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
700
 
        builder.build_snapshot('2', ['1'], [
701
 
            ('modify', ('file-id', 'content-2\n'))])
702
 
        builder.finish_series()
703
 
        source = builder.get_branch()
704
 
        target = self.make_repository('target', format='2a')
705
 
        target.fetch(source.repository)
706
 
        target.lock_read()
707
 
        self.addCleanup(target.unlock)
708
 
        details = target.texts._index.get_build_details(
709
 
            [('file-id', '1',), ('file-id', '2',)])
710
 
        file_1_details = details[('file-id', '1')]
711
 
        file_2_details = details[('file-id', '2')]
712
 
        # The index, and what to read off disk, should be the same for both
713
 
        # versions of the file.
714
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
715
 
 
716
 
    def test_fetch_combines_groups(self):
717
 
        builder = self.make_branch_builder('source', format='2a')
718
 
        builder.start_series()
719
 
        builder.build_snapshot('1', None, [
720
 
            ('add', ('', 'root-id', 'directory', '')),
721
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
722
 
        builder.build_snapshot('2', ['1'], [
723
 
            ('modify', ('file-id', 'content-2\n'))])
724
 
        builder.finish_series()
725
 
        source = builder.get_branch()
726
 
        target = self.make_repository('target', format='2a')
727
 
        target.fetch(source.repository)
728
 
        target.lock_read()
729
 
        self.addCleanup(target.unlock)
730
 
        details = target.texts._index.get_build_details(
731
 
            [('file-id', '1',), ('file-id', '2',)])
732
 
        file_1_details = details[('file-id', '1')]
733
 
        file_2_details = details[('file-id', '2')]
734
 
        # The index, and what to read off disk, should be the same for both
735
 
        # versions of the file.
736
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
737
 
 
738
 
    def test_fetch_combines_groups(self):
739
 
        builder = self.make_branch_builder('source', format='2a')
740
 
        builder.start_series()
741
 
        builder.build_snapshot('1', None, [
742
 
            ('add', ('', 'root-id', 'directory', '')),
743
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
744
 
        builder.build_snapshot('2', ['1'], [
745
 
            ('modify', ('file-id', 'content-2\n'))])
746
 
        builder.finish_series()
747
 
        source = builder.get_branch()
748
 
        target = self.make_repository('target', format='2a')
749
 
        target.fetch(source.repository)
750
 
        target.lock_read()
751
 
        self.addCleanup(target.unlock)
752
 
        details = target.texts._index.get_build_details(
753
 
            [('file-id', '1',), ('file-id', '2',)])
754
 
        file_1_details = details[('file-id', '1')]
755
 
        file_2_details = details[('file-id', '2')]
756
 
        # The index, and what to read off disk, should be the same for both
757
 
        # versions of the file.
758
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
759
 
 
760
 
    def test_format_pack_compresses_True(self):
761
 
        repo = self.make_repository('repo', format='2a')
762
 
        self.assertTrue(repo._format.pack_compresses)
763
 
 
764
 
    def test_inventories_use_chk_map_with_parent_base_dict(self):
765
 
        tree = self.make_branch_and_memory_tree('repo', format="2a")
766
 
        tree.lock_write()
767
 
        tree.add([''], ['TREE_ROOT'])
768
 
        revid = tree.commit("foo")
769
 
        tree.unlock()
770
 
        tree.lock_read()
771
 
        self.addCleanup(tree.unlock)
772
 
        inv = tree.branch.repository.get_inventory(revid)
773
 
        self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
774
 
        inv.parent_id_basename_to_file_id._ensure_root()
775
 
        inv.id_to_entry._ensure_root()
776
 
        self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
777
 
        self.assertEqual(65536,
778
 
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
779
 
 
780
 
    def test_autopack_unchanged_chk_nodes(self):
781
 
        # at 20 unchanged commits, chk pages are packed that are split into
782
 
        # two groups such that the new pack being made doesn't have all its
783
 
        # pages in the source packs (though they are in the repository).
784
 
        # Use a memory backed repository, we don't need to hit disk for this
785
 
        tree = self.make_branch_and_memory_tree('tree', format='2a')
786
 
        tree.lock_write()
787
 
        self.addCleanup(tree.unlock)
788
 
        tree.add([''], ['TREE_ROOT'])
789
 
        for pos in range(20):
790
 
            tree.commit(str(pos))
791
 
 
792
 
    def test_pack_with_hint(self):
793
 
        tree = self.make_branch_and_memory_tree('tree', format='2a')
794
 
        tree.lock_write()
795
 
        self.addCleanup(tree.unlock)
796
 
        tree.add([''], ['TREE_ROOT'])
797
 
        # 1 commit to leave untouched
798
 
        tree.commit('1')
799
 
        to_keep = tree.branch.repository._pack_collection.names()
800
 
        # 2 to combine
801
 
        tree.commit('2')
802
 
        tree.commit('3')
803
 
        all = tree.branch.repository._pack_collection.names()
804
 
        combine = list(set(all) - set(to_keep))
805
 
        self.assertLength(3, all)
806
 
        self.assertLength(2, combine)
807
 
        tree.branch.repository.pack(hint=combine)
808
 
        final = tree.branch.repository._pack_collection.names()
809
 
        self.assertLength(2, final)
810
 
        self.assertFalse(combine[0] in final)
811
 
        self.assertFalse(combine[1] in final)
812
 
        self.assertSubset(to_keep, final)
813
 
 
814
 
    def test_stream_source_to_gc(self):
815
 
        source = self.make_repository('source', format='2a')
816
 
        target = self.make_repository('target', format='2a')
817
 
        stream = source._get_source(target._format)
818
 
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
819
 
 
820
 
    def test_stream_source_to_non_gc(self):
821
 
        source = self.make_repository('source', format='2a')
822
 
        target = self.make_repository('target', format='rich-root-pack')
823
 
        stream = source._get_source(target._format)
824
 
        # We don't want the child GroupCHKStreamSource
825
 
        self.assertIs(type(stream), repository.StreamSource)
826
 
 
827
 
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
828
 
        source_builder = self.make_branch_builder('source',
829
 
                            format='2a')
830
 
        # We have to build a fairly large tree, so that we are sure the chk
831
 
        # pages will have split into multiple pages.
832
 
        entries = [('add', ('', 'a-root-id', 'directory', None))]
833
 
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
834
 
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
835
 
                fname = i + j
836
 
                fid = fname + '-id'
837
 
                content = 'content for %s\n' % (fname,)
838
 
                entries.append(('add', (fname, fid, 'file', content)))
839
 
        source_builder.start_series()
840
 
        source_builder.build_snapshot('rev-1', None, entries)
841
 
        # Now change a few of them, so we get a few new pages for the second
842
 
        # revision
843
 
        source_builder.build_snapshot('rev-2', ['rev-1'], [
844
 
            ('modify', ('aa-id', 'new content for aa-id\n')),
845
 
            ('modify', ('cc-id', 'new content for cc-id\n')),
846
 
            ('modify', ('zz-id', 'new content for zz-id\n')),
847
 
            ])
848
 
        source_builder.finish_series()
849
 
        source_branch = source_builder.get_branch()
850
 
        source_branch.lock_read()
851
 
        self.addCleanup(source_branch.unlock)
852
 
        target = self.make_repository('target', format='2a')
853
 
        source = source_branch.repository._get_source(target._format)
854
 
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
855
 
 
856
 
        # On a regular pass, getting the inventories and chk pages for rev-2
857
 
        # would only get the newly created chk pages
858
 
        search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
859
 
                                    set(['rev-2']))
860
 
        simple_chk_records = []
861
 
        for vf_name, substream in source.get_stream(search):
862
 
            if vf_name == 'chk_bytes':
863
 
                for record in substream:
864
 
                    simple_chk_records.append(record.key)
865
 
            else:
866
 
                for _ in substream:
867
 
                    continue
868
 
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
869
 
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
870
 
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
871
 
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
872
 
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
873
 
                         simple_chk_records)
874
 
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
875
 
        # we should get a much larger set of pages.
876
 
        missing = [('inventories', 'rev-2')]
877
 
        full_chk_records = []
878
 
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
879
 
            if vf_name == 'inventories':
880
 
                for record in substream:
881
 
                    self.assertEqual(('rev-2',), record.key)
882
 
            elif vf_name == 'chk_bytes':
883
 
                for record in substream:
884
 
                    full_chk_records.append(record.key)
885
 
            else:
886
 
                self.fail('Should not be getting a stream of %s' % (vf_name,))
887
 
        # We have 257 records now. This is because we have 1 root page, and 256
888
 
        # leaf pages in a complete listing.
889
 
        self.assertEqual(257, len(full_chk_records))
890
 
        self.assertSubset(simple_chk_records, full_chk_records)
891
 
 
892
 
    def test_inconsistency_fatal(self):
893
 
        repo = self.make_repository('repo', format='2a')
894
 
        self.assertTrue(repo.revisions._index._inconsistency_fatal)
895
 
        self.assertFalse(repo.texts._index._inconsistency_fatal)
896
 
        self.assertFalse(repo.inventories._index._inconsistency_fatal)
897
 
        self.assertFalse(repo.signatures._index._inconsistency_fatal)
898
 
        self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
899
 
 
900
 
 
901
 
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
902
 
 
903
 
    def test_source_to_exact_pack_092(self):
904
 
        source = self.make_repository('source', format='pack-0.92')
905
 
        target = self.make_repository('target', format='pack-0.92')
906
 
        stream_source = source._get_source(target._format)
907
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
908
 
 
909
 
    def test_source_to_exact_pack_rich_root_pack(self):
910
 
        source = self.make_repository('source', format='rich-root-pack')
911
 
        target = self.make_repository('target', format='rich-root-pack')
912
 
        stream_source = source._get_source(target._format)
913
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
914
 
 
915
 
    def test_source_to_exact_pack_19(self):
916
 
        source = self.make_repository('source', format='1.9')
917
 
        target = self.make_repository('target', format='1.9')
918
 
        stream_source = source._get_source(target._format)
919
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
920
 
 
921
 
    def test_source_to_exact_pack_19_rich_root(self):
922
 
        source = self.make_repository('source', format='1.9-rich-root')
923
 
        target = self.make_repository('target', format='1.9-rich-root')
924
 
        stream_source = source._get_source(target._format)
925
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
926
 
 
927
 
    def test_source_to_remote_exact_pack_19(self):
928
 
        trans = self.make_smart_server('target')
929
 
        trans.ensure_base()
930
 
        source = self.make_repository('source', format='1.9')
931
 
        target = self.make_repository('target', format='1.9')
932
 
        target = repository.Repository.open(trans.base)
933
 
        stream_source = source._get_source(target._format)
934
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
935
 
 
936
 
    def test_stream_source_to_non_exact(self):
937
 
        source = self.make_repository('source', format='pack-0.92')
938
 
        target = self.make_repository('target', format='1.9')
939
 
        stream = source._get_source(target._format)
940
 
        self.assertIs(type(stream), repository.StreamSource)
941
 
 
942
 
    def test_stream_source_to_non_exact_rich_root(self):
943
 
        source = self.make_repository('source', format='1.9')
944
 
        target = self.make_repository('target', format='1.9-rich-root')
945
 
        stream = source._get_source(target._format)
946
 
        self.assertIs(type(stream), repository.StreamSource)
947
 
 
948
 
    def test_source_to_remote_non_exact_pack_19(self):
949
 
        trans = self.make_smart_server('target')
950
 
        trans.ensure_base()
951
 
        source = self.make_repository('source', format='1.9')
952
 
        target = self.make_repository('target', format='1.6')
953
 
        target = repository.Repository.open(trans.base)
954
 
        stream_source = source._get_source(target._format)
955
 
        self.assertIs(type(stream_source), repository.StreamSource)
956
 
 
957
 
    def test_stream_source_to_knit(self):
958
 
        source = self.make_repository('source', format='pack-0.92')
959
 
        target = self.make_repository('target', format='dirstate')
960
 
        stream = source._get_source(target._format)
961
 
        self.assertIs(type(stream), repository.StreamSource)
962
 
 
963
 
 
964
 
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
965
 
    """Tests for _find_parent_ids_of_revisions."""
966
 
 
967
 
    def setUp(self):
968
 
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
969
 
        self.builder = self.make_branch_builder('source',
970
 
            format='development6-rich-root')
971
 
        self.builder.start_series()
972
 
        self.builder.build_snapshot('initial', None,
973
 
            [('add', ('', 'tree-root', 'directory', None))])
974
 
        self.repo = self.builder.get_branch().repository
975
 
        self.addCleanup(self.builder.finish_series)
976
 
 
977
 
    def assertParentIds(self, expected_result, rev_set):
978
 
        self.assertEqual(sorted(expected_result),
979
 
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
980
 
 
981
 
    def test_simple(self):
982
 
        self.builder.build_snapshot('revid1', None, [])
983
 
        self.builder.build_snapshot('revid2', ['revid1'], [])
984
 
        rev_set = ['revid2']
985
 
        self.assertParentIds(['revid1'], rev_set)
986
 
 
987
 
    def test_not_first_parent(self):
988
 
        self.builder.build_snapshot('revid1', None, [])
989
 
        self.builder.build_snapshot('revid2', ['revid1'], [])
990
 
        self.builder.build_snapshot('revid3', ['revid2'], [])
991
 
        rev_set = ['revid3', 'revid2']
992
 
        self.assertParentIds(['revid1'], rev_set)
993
 
 
994
 
    def test_not_null(self):
995
 
        rev_set = ['initial']
996
 
        self.assertParentIds([], rev_set)
997
 
 
998
 
    def test_not_null_set(self):
999
 
        self.builder.build_snapshot('revid1', None, [])
1000
 
        rev_set = [_mod_revision.NULL_REVISION]
1001
 
        self.assertParentIds([], rev_set)
1002
 
 
1003
 
    def test_ghost(self):
1004
 
        self.builder.build_snapshot('revid1', None, [])
1005
 
        rev_set = ['ghost', 'revid1']
1006
 
        self.assertParentIds(['initial'], rev_set)
1007
 
 
1008
 
    def test_ghost_parent(self):
1009
 
        self.builder.build_snapshot('revid1', None, [])
1010
 
        self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
1011
 
        rev_set = ['revid2', 'revid1']
1012
 
        self.assertParentIds(['ghost', 'initial'], rev_set)
1013
 
 
1014
 
    def test_righthand_parent(self):
1015
 
        self.builder.build_snapshot('revid1', None, [])
1016
 
        self.builder.build_snapshot('revid2a', ['revid1'], [])
1017
 
        self.builder.build_snapshot('revid2b', ['revid1'], [])
1018
 
        self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
1019
 
        rev_set = ['revid3', 'revid2a']
1020
 
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
1021
 
 
1022
 
 
1023
667
class TestWithBrokenRepo(TestCaseWithTransport):
1024
668
    """These tests seem to be more appropriate as interface tests?"""
1025
669
 
1038
682
            inv = inventory.Inventory(revision_id='rev1a')
1039
683
            inv.root.revision = 'rev1a'
1040
684
            self.add_file(repo, inv, 'file1', 'rev1a', [])
1041
 
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
1042
685
            repo.add_inventory('rev1a', inv, [])
1043
686
            revision = _mod_revision.Revision('rev1a',
1044
687
                committer='jrandom@example.com', timestamp=0,
1079
722
    def add_revision(self, repo, revision_id, inv, parent_ids):
1080
723
        inv.revision_id = revision_id
1081
724
        inv.root.revision = revision_id
1082
 
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
1083
725
        repo.add_inventory(revision_id, inv, parent_ids)
1084
726
        revision = _mod_revision.Revision(revision_id,
1085
727
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
1102
744
        """
1103
745
        broken_repo = self.make_broken_repository()
1104
746
        empty_repo = self.make_repository('empty-repo')
1105
 
        try:
1106
 
            empty_repo.fetch(broken_repo)
1107
 
        except (errors.RevisionNotPresent, errors.BzrCheckError):
1108
 
            # Test successful: compression parent not being copied leads to
1109
 
            # error.
1110
 
            return
1111
 
        empty_repo.lock_read()
1112
 
        self.addCleanup(empty_repo.unlock)
1113
 
        text = empty_repo.texts.get_record_stream(
1114
 
            [('file2-id', 'rev3')], 'topological', True).next()
1115
 
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
 
747
        self.assertRaises((errors.RevisionNotPresent, errors.BzrCheckError),
 
748
                          empty_repo.fetch, broken_repo)
1116
749
 
1117
750
 
1118
751
class TestRepositoryPackCollection(TestCaseWithTransport):
1127
760
 
1128
761
    def make_packs_and_alt_repo(self, write_lock=False):
1129
762
        """Create a pack repo with 3 packs, and access it via a second repo."""
1130
 
        tree = self.make_branch_and_tree('.', format=self.get_format())
 
763
        tree = self.make_branch_and_tree('.')
1131
764
        tree.lock_write()
1132
765
        self.addCleanup(tree.unlock)
1133
766
        rev1 = tree.commit('one')
1168
801
        # check some arbitrary big numbers
1169
802
        self.assertEqual(25, packs._max_pack_count(112894))
1170
803
 
1171
 
    def test_repr(self):
1172
 
        packs = self.get_packs()
1173
 
        self.assertContainsRe(repr(packs),
1174
 
            'RepositoryPackCollection(.*Repository(.*))')
1175
 
 
1176
804
    def test_pack_distribution_zero(self):
1177
805
        packs = self.get_packs()
1178
806
        self.assertEqual([0], packs.pack_distribution(0))
1301
929
        tree.lock_read()
1302
930
        self.addCleanup(tree.unlock)
1303
931
        packs = tree.branch.repository._pack_collection
1304
 
        packs.reset()
1305
932
        packs.ensure_loaded()
1306
933
        name = packs.names()[0]
1307
934
        pack_1 = packs.get_pack_by_name(name)
1422
1049
        pack_transport = self.get_transport('pack')
1423
1050
        index_transport = self.get_transport('index')
1424
1051
        upload_transport.mkdir('.')
1425
 
        collection = pack_repo.RepositoryPackCollection(
1426
 
            repo=None,
 
1052
        collection = pack_repo.RepositoryPackCollection(repo=None,
1427
1053
            transport=self.get_transport('.'),
1428
1054
            index_transport=index_transport,
1429
1055
            upload_transport=upload_transport,
1430
1056
            pack_transport=pack_transport,
1431
1057
            index_builder_class=BTreeBuilder,
1432
 
            index_class=BTreeGraphIndex,
1433
 
            use_chk_index=False)
 
1058
            index_class=BTreeGraphIndex)
1434
1059
        pack = pack_repo.NewPack(collection)
1435
 
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
1436
1060
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
1437
1061
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1438
1062
        self.assertIsInstance(pack._hash, type(osutils.md5()))
1449
1073
    """Tests for the packs repository Packer class."""
1450
1074
 
1451
1075
    def test_pack_optimizes_pack_order(self):
1452
 
        builder = self.make_branch_builder('.', format="1.9")
 
1076
        builder = self.make_branch_builder('.')
1453
1077
        builder.start_series()
1454
1078
        builder.build_snapshot('A', None, [
1455
1079
            ('add', ('', 'root-id', 'directory', None)),
1491
1115
        packer = pack_repo.OptimisingPacker(self.get_pack_collection(),
1492
1116
                                            [], '.test')
1493
1117
        new_pack = packer.open_pack()
1494
 
        self.addCleanup(new_pack.abort) # ensure cleanup
1495
1118
        self.assertIsInstance(new_pack, pack_repo.NewPack)
1496
1119
        self.assertTrue(new_pack.revision_index._optimize_for_size)
1497
1120
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
1499
1122
        self.assertTrue(new_pack.signature_index._optimize_for_size)
1500
1123
 
1501
1124
 
1502
 
class TestCrossFormatPacks(TestCaseWithTransport):
1503
 
 
1504
 
    def log_pack(self, hint=None):
1505
 
        self.calls.append(('pack', hint))
1506
 
        self.orig_pack(hint=hint)
1507
 
        if self.expect_hint:
1508
 
            self.assertTrue(hint)
1509
 
 
1510
 
    def run_stream(self, src_fmt, target_fmt, expect_pack_called):
1511
 
        self.expect_hint = expect_pack_called
1512
 
        self.calls = []
1513
 
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
1514
 
        source_tree.lock_write()
1515
 
        self.addCleanup(source_tree.unlock)
1516
 
        tip = source_tree.commit('foo')
1517
 
        target = self.make_repository('target', format=target_fmt)
1518
 
        target.lock_write()
1519
 
        self.addCleanup(target.unlock)
1520
 
        source = source_tree.branch.repository._get_source(target._format)
1521
 
        self.orig_pack = target.pack
1522
 
        target.pack = self.log_pack
1523
 
        search = target.search_missing_revision_ids(
1524
 
            source_tree.branch.repository, tip)
1525
 
        stream = source.get_stream(search)
1526
 
        from_format = source_tree.branch.repository._format
1527
 
        sink = target._get_sink()
1528
 
        sink.insert_stream(stream, from_format, [])
1529
 
        if expect_pack_called:
1530
 
            self.assertLength(1, self.calls)
1531
 
        else:
1532
 
            self.assertLength(0, self.calls)
1533
 
 
1534
 
    def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
1535
 
        self.expect_hint = expect_pack_called
1536
 
        self.calls = []
1537
 
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
1538
 
        source_tree.lock_write()
1539
 
        self.addCleanup(source_tree.unlock)
1540
 
        tip = source_tree.commit('foo')
1541
 
        target = self.make_repository('target', format=target_fmt)
1542
 
        target.lock_write()
1543
 
        self.addCleanup(target.unlock)
1544
 
        source = source_tree.branch.repository
1545
 
        self.orig_pack = target.pack
1546
 
        target.pack = self.log_pack
1547
 
        target.fetch(source)
1548
 
        if expect_pack_called:
1549
 
            self.assertLength(1, self.calls)
1550
 
        else:
1551
 
            self.assertLength(0, self.calls)
1552
 
 
1553
 
    def test_sink_format_hint_no(self):
1554
 
        # When the target format says packing makes no difference, pack is not
1555
 
        # called.
1556
 
        self.run_stream('1.9', 'rich-root-pack', False)
1557
 
 
1558
 
    def test_sink_format_hint_yes(self):
1559
 
        # When the target format says packing makes a difference, pack is
1560
 
        # called.
1561
 
        self.run_stream('1.9', '2a', True)
1562
 
 
1563
 
    def test_sink_format_same_no(self):
1564
 
        # When the formats are the same, pack is not called.
1565
 
        self.run_stream('2a', '2a', False)
1566
 
 
1567
 
    def test_IDS_format_hint_no(self):
1568
 
        # When the target format says packing makes no difference, pack is not
1569
 
        # called.
1570
 
        self.run_fetch('1.9', 'rich-root-pack', False)
1571
 
 
1572
 
    def test_IDS_format_hint_yes(self):
1573
 
        # When the target format says packing makes a difference, pack is
1574
 
        # called.
1575
 
        self.run_fetch('1.9', '2a', True)
1576
 
 
1577
 
    def test_IDS_format_same_no(self):
1578
 
        # When the formats are the same, pack is not called.
1579
 
        self.run_fetch('2a', '2a', False)
 
1125
class TestInterDifferingSerializer(TestCaseWithTransport):
 
1126
 
 
1127
    def test_progress_bar(self):
 
1128
        tree = self.make_branch_and_tree('tree')
 
1129
        tree.commit('rev1', rev_id='rev-1')
 
1130
        tree.commit('rev2', rev_id='rev-2')
 
1131
        tree.commit('rev3', rev_id='rev-3')
 
1132
        repo = self.make_repository('repo')
 
1133
        inter_repo = repository.InterDifferingSerializer(
 
1134
            tree.branch.repository, repo)
 
1135
        pb = progress.InstrumentedProgress(to_file=StringIO())
 
1136
        pb.never_throttle = True
 
1137
        inter_repo.fetch('rev-1', pb)
 
1138
        self.assertEqual('Transferring revisions', pb.last_msg)
 
1139
        self.assertEqual(1, pb.last_cnt)
 
1140
        self.assertEqual(1, pb.last_total)
 
1141
        inter_repo.fetch('rev-3', pb)
 
1142
        self.assertEqual(2, pb.last_cnt)
 
1143
        self.assertEqual(2, pb.last_total)