~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: Vincent Ladeuil
  • Date: 2010-02-10 15:46:03 UTC
  • mfrom: (4985.3.21 update)
  • mto: This revision was merged to the branch mainline in revision 5021.
  • Revision ID: v.ladeuil+lp@free.fr-20100210154603-k4no1gvfuqpzrw7p
Update performs two merges in a more logical order but stop on conflicts

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006, 2007, 2008 Canonical Ltd
 
1
# Copyright (C) 2006-2010 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
12
12
#
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16
16
 
17
17
"""Tests for the Repository facility that are not interface tests.
18
18
 
24
24
 
25
25
from stat import S_ISDIR
26
26
from StringIO import StringIO
 
27
import sys
27
28
 
28
29
import bzrlib
29
30
from bzrlib.errors import (NotBranchError,
31
32
                           UnknownFormatError,
32
33
                           UnsupportedFormatError,
33
34
                           )
34
 
from bzrlib import graph
 
35
from bzrlib import (
 
36
    graph,
 
37
    tests,
 
38
    )
 
39
from bzrlib.branchbuilder import BranchBuilder
35
40
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
36
41
from bzrlib.index import GraphIndex, InMemoryGraphIndex
37
42
from bzrlib.repository import RepositoryFormat
47
52
    get_transport,
48
53
    )
49
54
from bzrlib.transport.memory import MemoryServer
50
 
from bzrlib.util import bencode
51
55
from bzrlib import (
 
56
    bencode,
52
57
    bzrdir,
53
58
    errors,
54
59
    inventory,
58
63
    revision as _mod_revision,
59
64
    symbol_versioning,
60
65
    upgrade,
 
66
    versionedfile,
61
67
    workingtree,
62
68
    )
63
 
from bzrlib.repofmt import knitrepo, weaverepo, pack_repo
 
69
from bzrlib.repofmt import (
 
70
    groupcompress_repo,
 
71
    knitrepo,
 
72
    pack_repo,
 
73
    weaverepo,
 
74
    )
64
75
 
65
76
 
66
77
class TestDefaultFormat(TestCase):
95
106
class SampleRepositoryFormat(repository.RepositoryFormat):
96
107
    """A sample format
97
108
 
98
 
    this format is initializable, unsupported to aid in testing the 
 
109
    this format is initializable, unsupported to aid in testing the
99
110
    open and open(unsupported=True) routines.
100
111
    """
101
112
 
122
133
    def test_find_format(self):
123
134
        # is the right format object found for a repository?
124
135
        # create a branch with a few known format objects.
125
 
        # this is not quite the same as 
 
136
        # this is not quite the same as
126
137
        self.build_tree(["foo/", "bar/"])
127
138
        def check_format(format, url):
128
139
            dir = format._matchingbzrdir.initialize(url)
131
142
            found_format = repository.RepositoryFormat.find_format(dir)
132
143
            self.failUnless(isinstance(found_format, format.__class__))
133
144
        check_format(weaverepo.RepositoryFormat7(), "bar")
134
 
        
 
145
 
135
146
    def test_find_format_no_repository(self):
136
147
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
137
148
        self.assertRaises(errors.NoRepositoryPresent,
167
178
        """Weaves need topological data insertion."""
168
179
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
169
180
        repo = weaverepo.RepositoryFormat6().initialize(control)
170
 
        self.assertEqual('topological', repo._fetch_order)
 
181
        self.assertEqual('topological', repo._format._fetch_order)
171
182
 
172
183
    def test_attribute__fetch_uses_deltas(self):
173
184
        """Weaves do not reuse deltas."""
174
185
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
175
186
        repo = weaverepo.RepositoryFormat6().initialize(control)
176
 
        self.assertEqual(False, repo._fetch_uses_deltas)
 
187
        self.assertEqual(False, repo._format._fetch_uses_deltas)
177
188
 
178
189
    def test_attribute__fetch_reconcile(self):
179
190
        """Weave repositories need a reconcile after fetch."""
180
191
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
181
192
        repo = weaverepo.RepositoryFormat6().initialize(control)
182
 
        self.assertEqual(True, repo._fetch_reconcile)
 
193
        self.assertEqual(True, repo._format._fetch_reconcile)
183
194
 
184
195
    def test_no_ancestry_weave(self):
185
196
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
202
213
        """Weaves need topological data insertion."""
203
214
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
204
215
        repo = weaverepo.RepositoryFormat7().initialize(control)
205
 
        self.assertEqual('topological', repo._fetch_order)
 
216
        self.assertEqual('topological', repo._format._fetch_order)
206
217
 
207
218
    def test_attribute__fetch_uses_deltas(self):
208
219
        """Weaves do not reuse deltas."""
209
220
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
210
221
        repo = weaverepo.RepositoryFormat7().initialize(control)
211
 
        self.assertEqual(False, repo._fetch_uses_deltas)
 
222
        self.assertEqual(False, repo._format._fetch_uses_deltas)
212
223
 
213
224
    def test_attribute__fetch_reconcile(self):
214
225
        """Weave repositories need a reconcile after fetch."""
215
226
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
216
227
        repo = weaverepo.RepositoryFormat7().initialize(control)
217
 
        self.assertEqual(True, repo._fetch_reconcile)
 
228
        self.assertEqual(True, repo._format._fetch_reconcile)
218
229
 
219
230
    def test_disk_layout(self):
220
231
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
243
254
        tree = control.create_workingtree()
244
255
        tree.add(['foo'], ['Foo:Bar'], ['file'])
245
256
        tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
246
 
        tree.commit('first post', rev_id='first')
 
257
        try:
 
258
            tree.commit('first post', rev_id='first')
 
259
        except errors.IllegalPath:
 
260
            if sys.platform != 'win32':
 
261
                raise
 
262
            self.knownFailure('Foo:Bar cannot be used as a file-id on windows'
 
263
                              ' in repo format 7')
 
264
            return
247
265
        self.assertEqualDiff(
248
266
            '# bzr weave file v5\n'
249
267
            'i\n'
284
302
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
285
303
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
286
304
        t = control.get_repository_transport(None)
287
 
        # TODO: Should check there is a 'lock' toplevel directory, 
 
305
        # TODO: Should check there is a 'lock' toplevel directory,
288
306
        # regardless of contents
289
307
        self.assertFalse(t.has('lock/held/info'))
290
308
        repo.lock_write()
343
361
 
344
362
 
345
363
class TestFormatKnit1(TestCaseWithTransport):
346
 
    
 
364
 
347
365
    def test_attribute__fetch_order(self):
348
366
        """Knits need topological data insertion."""
349
367
        repo = self.make_repository('.',
350
368
                format=bzrdir.format_registry.get('knit')())
351
 
        self.assertEqual('topological', repo._fetch_order)
 
369
        self.assertEqual('topological', repo._format._fetch_order)
352
370
 
353
371
    def test_attribute__fetch_uses_deltas(self):
354
372
        """Knits reuse deltas."""
355
373
        repo = self.make_repository('.',
356
374
                format=bzrdir.format_registry.get('knit')())
357
 
        self.assertEqual(True, repo._fetch_uses_deltas)
 
375
        self.assertEqual(True, repo._format._fetch_uses_deltas)
358
376
 
359
377
    def test_disk_layout(self):
360
378
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
447
465
        repo = self.make_repository('.',
448
466
                format=bzrdir.format_registry.get('knit')())
449
467
        inv_xml = '<inventory format="5">\n</inventory>\n'
450
 
        inv = repo.deserialise_inventory('test-rev-id', inv_xml)
 
468
        inv = repo._deserialise_inventory('test-rev-id', inv_xml)
451
469
        self.assertEqual('test-rev-id', inv.root.revision)
452
470
 
453
471
    def test_deserialise_uses_global_revision_id(self):
459
477
        # Arguably, the deserialise_inventory should detect a mismatch, and
460
478
        # raise an error, rather than silently using one revision_id over the
461
479
        # other.
462
 
        self.assertRaises(AssertionError, repo.deserialise_inventory,
 
480
        self.assertRaises(AssertionError, repo._deserialise_inventory,
463
481
            'test-rev-id', inv_xml)
464
 
        inv = repo.deserialise_inventory('other-rev-id', inv_xml)
 
482
        inv = repo._deserialise_inventory('other-rev-id', inv_xml)
465
483
        self.assertEqual('other-rev-id', inv.root.revision)
466
484
 
467
485
    def test_supports_external_lookups(self):
477
495
    _serializer = None
478
496
 
479
497
    def supports_rich_root(self):
 
498
        if self._format is not None:
 
499
            return self._format.rich_root_data
480
500
        return False
481
501
 
482
502
    def get_graph(self):
497
517
    @staticmethod
498
518
    def is_compatible(repo_source, repo_target):
499
519
        """InterDummy is compatible with DummyRepository."""
500
 
        return (isinstance(repo_source, DummyRepository) and 
 
520
        return (isinstance(repo_source, DummyRepository) and
501
521
            isinstance(repo_target, DummyRepository))
502
522
 
503
523
 
516
536
 
517
537
    def assertGetsDefaultInterRepository(self, repo_a, repo_b):
518
538
        """Asserts that InterRepository.get(repo_a, repo_b) -> the default.
519
 
        
 
539
 
520
540
        The effective default is now InterSameDataRepository because there is
521
541
        no actual sane default in the presence of incompatible data models.
522
542
        """
533
553
        # pair that it returns true on for the is_compatible static method
534
554
        # check
535
555
        dummy_a = DummyRepository()
 
556
        dummy_a._format = RepositoryFormat()
536
557
        dummy_b = DummyRepository()
 
558
        dummy_b._format = RepositoryFormat()
537
559
        repo = self.make_repository('.')
538
560
        # hack dummies to look like repo somewhat.
539
561
        dummy_a._serializer = repo._serializer
 
562
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
 
563
        dummy_a._format.rich_root_data = repo._format.rich_root_data
540
564
        dummy_b._serializer = repo._serializer
 
565
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
 
566
        dummy_b._format.rich_root_data = repo._format.rich_root_data
541
567
        repository.InterRepository.register_optimiser(InterDummy)
542
568
        try:
543
569
            # we should get the default for something InterDummy returns False
606
632
 
607
633
 
608
634
class TestMisc(TestCase):
609
 
    
 
635
 
610
636
    def test_unescape_xml(self):
611
637
        """We get some kind of error when malformed entities are passed"""
612
 
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;') 
 
638
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;')
613
639
 
614
640
 
615
641
class TestRepositoryFormatKnit3(TestCaseWithTransport):
619
645
        format = bzrdir.BzrDirMetaFormat1()
620
646
        format.repository_format = knitrepo.RepositoryFormatKnit3()
621
647
        repo = self.make_repository('.', format=format)
622
 
        self.assertEqual('topological', repo._fetch_order)
 
648
        self.assertEqual('topological', repo._format._fetch_order)
623
649
 
624
650
    def test_attribute__fetch_uses_deltas(self):
625
651
        """Knits reuse deltas."""
626
652
        format = bzrdir.BzrDirMetaFormat1()
627
653
        format.repository_format = knitrepo.RepositoryFormatKnit3()
628
654
        repo = self.make_repository('.', format=format)
629
 
        self.assertEqual(True, repo._fetch_uses_deltas)
 
655
        self.assertEqual(True, repo._format._fetch_uses_deltas)
630
656
 
631
657
    def test_convert(self):
632
658
        """Ensure the upgrade adds weaves for roots"""
664
690
        self.assertFalse(repo._format.supports_external_lookups)
665
691
 
666
692
 
 
693
class Test2a(tests.TestCaseWithMemoryTransport):
 
694
 
 
695
    def test_fetch_combines_groups(self):
 
696
        builder = self.make_branch_builder('source', format='2a')
 
697
        builder.start_series()
 
698
        builder.build_snapshot('1', None, [
 
699
            ('add', ('', 'root-id', 'directory', '')),
 
700
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
701
        builder.build_snapshot('2', ['1'], [
 
702
            ('modify', ('file-id', 'content-2\n'))])
 
703
        builder.finish_series()
 
704
        source = builder.get_branch()
 
705
        target = self.make_repository('target', format='2a')
 
706
        target.fetch(source.repository)
 
707
        target.lock_read()
 
708
        self.addCleanup(target.unlock)
 
709
        details = target.texts._index.get_build_details(
 
710
            [('file-id', '1',), ('file-id', '2',)])
 
711
        file_1_details = details[('file-id', '1')]
 
712
        file_2_details = details[('file-id', '2')]
 
713
        # The index, and what to read off disk, should be the same for both
 
714
        # versions of the file.
 
715
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
716
 
 
717
    def test_fetch_combines_groups(self):
 
718
        builder = self.make_branch_builder('source', format='2a')
 
719
        builder.start_series()
 
720
        builder.build_snapshot('1', None, [
 
721
            ('add', ('', 'root-id', 'directory', '')),
 
722
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
723
        builder.build_snapshot('2', ['1'], [
 
724
            ('modify', ('file-id', 'content-2\n'))])
 
725
        builder.finish_series()
 
726
        source = builder.get_branch()
 
727
        target = self.make_repository('target', format='2a')
 
728
        target.fetch(source.repository)
 
729
        target.lock_read()
 
730
        self.addCleanup(target.unlock)
 
731
        details = target.texts._index.get_build_details(
 
732
            [('file-id', '1',), ('file-id', '2',)])
 
733
        file_1_details = details[('file-id', '1')]
 
734
        file_2_details = details[('file-id', '2')]
 
735
        # The index, and what to read off disk, should be the same for both
 
736
        # versions of the file.
 
737
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
738
 
 
739
    def test_fetch_combines_groups(self):
 
740
        builder = self.make_branch_builder('source', format='2a')
 
741
        builder.start_series()
 
742
        builder.build_snapshot('1', None, [
 
743
            ('add', ('', 'root-id', 'directory', '')),
 
744
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
745
        builder.build_snapshot('2', ['1'], [
 
746
            ('modify', ('file-id', 'content-2\n'))])
 
747
        builder.finish_series()
 
748
        source = builder.get_branch()
 
749
        target = self.make_repository('target', format='2a')
 
750
        target.fetch(source.repository)
 
751
        target.lock_read()
 
752
        self.addCleanup(target.unlock)
 
753
        details = target.texts._index.get_build_details(
 
754
            [('file-id', '1',), ('file-id', '2',)])
 
755
        file_1_details = details[('file-id', '1')]
 
756
        file_2_details = details[('file-id', '2')]
 
757
        # The index, and what to read off disk, should be the same for both
 
758
        # versions of the file.
 
759
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
760
 
 
761
    def test_format_pack_compresses_True(self):
 
762
        repo = self.make_repository('repo', format='2a')
 
763
        self.assertTrue(repo._format.pack_compresses)
 
764
 
 
765
    def test_inventories_use_chk_map_with_parent_base_dict(self):
 
766
        tree = self.make_branch_and_memory_tree('repo', format="2a")
 
767
        tree.lock_write()
 
768
        tree.add([''], ['TREE_ROOT'])
 
769
        revid = tree.commit("foo")
 
770
        tree.unlock()
 
771
        tree.lock_read()
 
772
        self.addCleanup(tree.unlock)
 
773
        inv = tree.branch.repository.get_inventory(revid)
 
774
        self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
 
775
        inv.parent_id_basename_to_file_id._ensure_root()
 
776
        inv.id_to_entry._ensure_root()
 
777
        self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
 
778
        self.assertEqual(65536,
 
779
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
 
780
 
 
781
    def test_autopack_unchanged_chk_nodes(self):
 
782
        # at 20 unchanged commits, chk pages are packed that are split into
 
783
        # two groups such that the new pack being made doesn't have all its
 
784
        # pages in the source packs (though they are in the repository).
 
785
        # Use a memory backed repository, we don't need to hit disk for this
 
786
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
787
        tree.lock_write()
 
788
        self.addCleanup(tree.unlock)
 
789
        tree.add([''], ['TREE_ROOT'])
 
790
        for pos in range(20):
 
791
            tree.commit(str(pos))
 
792
 
 
793
    def test_pack_with_hint(self):
 
794
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
795
        tree.lock_write()
 
796
        self.addCleanup(tree.unlock)
 
797
        tree.add([''], ['TREE_ROOT'])
 
798
        # 1 commit to leave untouched
 
799
        tree.commit('1')
 
800
        to_keep = tree.branch.repository._pack_collection.names()
 
801
        # 2 to combine
 
802
        tree.commit('2')
 
803
        tree.commit('3')
 
804
        all = tree.branch.repository._pack_collection.names()
 
805
        combine = list(set(all) - set(to_keep))
 
806
        self.assertLength(3, all)
 
807
        self.assertLength(2, combine)
 
808
        tree.branch.repository.pack(hint=combine)
 
809
        final = tree.branch.repository._pack_collection.names()
 
810
        self.assertLength(2, final)
 
811
        self.assertFalse(combine[0] in final)
 
812
        self.assertFalse(combine[1] in final)
 
813
        self.assertSubset(to_keep, final)
 
814
 
 
815
    def test_stream_source_to_gc(self):
 
816
        source = self.make_repository('source', format='2a')
 
817
        target = self.make_repository('target', format='2a')
 
818
        stream = source._get_source(target._format)
 
819
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
 
820
 
 
821
    def test_stream_source_to_non_gc(self):
 
822
        source = self.make_repository('source', format='2a')
 
823
        target = self.make_repository('target', format='rich-root-pack')
 
824
        stream = source._get_source(target._format)
 
825
        # We don't want the child GroupCHKStreamSource
 
826
        self.assertIs(type(stream), repository.StreamSource)
 
827
 
 
828
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
 
829
        source_builder = self.make_branch_builder('source',
 
830
                            format='2a')
 
831
        # We have to build a fairly large tree, so that we are sure the chk
 
832
        # pages will have split into multiple pages.
 
833
        entries = [('add', ('', 'a-root-id', 'directory', None))]
 
834
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
 
835
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
 
836
                fname = i + j
 
837
                fid = fname + '-id'
 
838
                content = 'content for %s\n' % (fname,)
 
839
                entries.append(('add', (fname, fid, 'file', content)))
 
840
        source_builder.start_series()
 
841
        source_builder.build_snapshot('rev-1', None, entries)
 
842
        # Now change a few of them, so we get a few new pages for the second
 
843
        # revision
 
844
        source_builder.build_snapshot('rev-2', ['rev-1'], [
 
845
            ('modify', ('aa-id', 'new content for aa-id\n')),
 
846
            ('modify', ('cc-id', 'new content for cc-id\n')),
 
847
            ('modify', ('zz-id', 'new content for zz-id\n')),
 
848
            ])
 
849
        source_builder.finish_series()
 
850
        source_branch = source_builder.get_branch()
 
851
        source_branch.lock_read()
 
852
        self.addCleanup(source_branch.unlock)
 
853
        target = self.make_repository('target', format='2a')
 
854
        source = source_branch.repository._get_source(target._format)
 
855
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
 
856
 
 
857
        # On a regular pass, getting the inventories and chk pages for rev-2
 
858
        # would only get the newly created chk pages
 
859
        search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
 
860
                                    set(['rev-2']))
 
861
        simple_chk_records = []
 
862
        for vf_name, substream in source.get_stream(search):
 
863
            if vf_name == 'chk_bytes':
 
864
                for record in substream:
 
865
                    simple_chk_records.append(record.key)
 
866
            else:
 
867
                for _ in substream:
 
868
                    continue
 
869
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
 
870
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
 
871
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
 
872
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
 
873
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
 
874
                         simple_chk_records)
 
875
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
 
876
        # we should get a much larger set of pages.
 
877
        missing = [('inventories', 'rev-2')]
 
878
        full_chk_records = []
 
879
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
 
880
            if vf_name == 'inventories':
 
881
                for record in substream:
 
882
                    self.assertEqual(('rev-2',), record.key)
 
883
            elif vf_name == 'chk_bytes':
 
884
                for record in substream:
 
885
                    full_chk_records.append(record.key)
 
886
            else:
 
887
                self.fail('Should not be getting a stream of %s' % (vf_name,))
 
888
        # We have 257 records now. This is because we have 1 root page, and 256
 
889
        # leaf pages in a complete listing.
 
890
        self.assertEqual(257, len(full_chk_records))
 
891
        self.assertSubset(simple_chk_records, full_chk_records)
 
892
 
 
893
    def test_inconsistency_fatal(self):
 
894
        repo = self.make_repository('repo', format='2a')
 
895
        self.assertTrue(repo.revisions._index._inconsistency_fatal)
 
896
        self.assertFalse(repo.texts._index._inconsistency_fatal)
 
897
        self.assertFalse(repo.inventories._index._inconsistency_fatal)
 
898
        self.assertFalse(repo.signatures._index._inconsistency_fatal)
 
899
        self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
 
900
 
 
901
 
 
902
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
 
903
 
 
904
    def test_source_to_exact_pack_092(self):
 
905
        source = self.make_repository('source', format='pack-0.92')
 
906
        target = self.make_repository('target', format='pack-0.92')
 
907
        stream_source = source._get_source(target._format)
 
908
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
909
 
 
910
    def test_source_to_exact_pack_rich_root_pack(self):
 
911
        source = self.make_repository('source', format='rich-root-pack')
 
912
        target = self.make_repository('target', format='rich-root-pack')
 
913
        stream_source = source._get_source(target._format)
 
914
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
915
 
 
916
    def test_source_to_exact_pack_19(self):
 
917
        source = self.make_repository('source', format='1.9')
 
918
        target = self.make_repository('target', format='1.9')
 
919
        stream_source = source._get_source(target._format)
 
920
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
921
 
 
922
    def test_source_to_exact_pack_19_rich_root(self):
 
923
        source = self.make_repository('source', format='1.9-rich-root')
 
924
        target = self.make_repository('target', format='1.9-rich-root')
 
925
        stream_source = source._get_source(target._format)
 
926
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
927
 
 
928
    def test_source_to_remote_exact_pack_19(self):
 
929
        trans = self.make_smart_server('target')
 
930
        trans.ensure_base()
 
931
        source = self.make_repository('source', format='1.9')
 
932
        target = self.make_repository('target', format='1.9')
 
933
        target = repository.Repository.open(trans.base)
 
934
        stream_source = source._get_source(target._format)
 
935
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
936
 
 
937
    def test_stream_source_to_non_exact(self):
 
938
        source = self.make_repository('source', format='pack-0.92')
 
939
        target = self.make_repository('target', format='1.9')
 
940
        stream = source._get_source(target._format)
 
941
        self.assertIs(type(stream), repository.StreamSource)
 
942
 
 
943
    def test_stream_source_to_non_exact_rich_root(self):
 
944
        source = self.make_repository('source', format='1.9')
 
945
        target = self.make_repository('target', format='1.9-rich-root')
 
946
        stream = source._get_source(target._format)
 
947
        self.assertIs(type(stream), repository.StreamSource)
 
948
 
 
949
    def test_source_to_remote_non_exact_pack_19(self):
 
950
        trans = self.make_smart_server('target')
 
951
        trans.ensure_base()
 
952
        source = self.make_repository('source', format='1.9')
 
953
        target = self.make_repository('target', format='1.6')
 
954
        target = repository.Repository.open(trans.base)
 
955
        stream_source = source._get_source(target._format)
 
956
        self.assertIs(type(stream_source), repository.StreamSource)
 
957
 
 
958
    def test_stream_source_to_knit(self):
 
959
        source = self.make_repository('source', format='pack-0.92')
 
960
        target = self.make_repository('target', format='dirstate')
 
961
        stream = source._get_source(target._format)
 
962
        self.assertIs(type(stream), repository.StreamSource)
 
963
 
 
964
 
 
965
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
 
966
    """Tests for _find_parent_ids_of_revisions."""
 
967
 
 
968
    def setUp(self):
 
969
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
 
970
        self.builder = self.make_branch_builder('source',
 
971
            format='development6-rich-root')
 
972
        self.builder.start_series()
 
973
        self.builder.build_snapshot('initial', None,
 
974
            [('add', ('', 'tree-root', 'directory', None))])
 
975
        self.repo = self.builder.get_branch().repository
 
976
        self.addCleanup(self.builder.finish_series)
 
977
 
 
978
    def assertParentIds(self, expected_result, rev_set):
 
979
        self.assertEqual(sorted(expected_result),
 
980
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
 
981
 
 
982
    def test_simple(self):
 
983
        self.builder.build_snapshot('revid1', None, [])
 
984
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
985
        rev_set = ['revid2']
 
986
        self.assertParentIds(['revid1'], rev_set)
 
987
 
 
988
    def test_not_first_parent(self):
 
989
        self.builder.build_snapshot('revid1', None, [])
 
990
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
991
        self.builder.build_snapshot('revid3', ['revid2'], [])
 
992
        rev_set = ['revid3', 'revid2']
 
993
        self.assertParentIds(['revid1'], rev_set)
 
994
 
 
995
    def test_not_null(self):
 
996
        rev_set = ['initial']
 
997
        self.assertParentIds([], rev_set)
 
998
 
 
999
    def test_not_null_set(self):
 
1000
        self.builder.build_snapshot('revid1', None, [])
 
1001
        rev_set = [_mod_revision.NULL_REVISION]
 
1002
        self.assertParentIds([], rev_set)
 
1003
 
 
1004
    def test_ghost(self):
 
1005
        self.builder.build_snapshot('revid1', None, [])
 
1006
        rev_set = ['ghost', 'revid1']
 
1007
        self.assertParentIds(['initial'], rev_set)
 
1008
 
 
1009
    def test_ghost_parent(self):
 
1010
        self.builder.build_snapshot('revid1', None, [])
 
1011
        self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
 
1012
        rev_set = ['revid2', 'revid1']
 
1013
        self.assertParentIds(['ghost', 'initial'], rev_set)
 
1014
 
 
1015
    def test_righthand_parent(self):
 
1016
        self.builder.build_snapshot('revid1', None, [])
 
1017
        self.builder.build_snapshot('revid2a', ['revid1'], [])
 
1018
        self.builder.build_snapshot('revid2b', ['revid1'], [])
 
1019
        self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
 
1020
        rev_set = ['revid3', 'revid2a']
 
1021
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
 
1022
 
 
1023
 
667
1024
class TestWithBrokenRepo(TestCaseWithTransport):
668
1025
    """These tests seem to be more appropriate as interface tests?"""
669
1026
 
682
1039
            inv = inventory.Inventory(revision_id='rev1a')
683
1040
            inv.root.revision = 'rev1a'
684
1041
            self.add_file(repo, inv, 'file1', 'rev1a', [])
 
1042
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
685
1043
            repo.add_inventory('rev1a', inv, [])
686
1044
            revision = _mod_revision.Revision('rev1a',
687
1045
                committer='jrandom@example.com', timestamp=0,
722
1080
    def add_revision(self, repo, revision_id, inv, parent_ids):
723
1081
        inv.revision_id = revision_id
724
1082
        inv.root.revision = revision_id
 
1083
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
725
1084
        repo.add_inventory(revision_id, inv, parent_ids)
726
1085
        revision = _mod_revision.Revision(revision_id,
727
1086
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
744
1103
        """
745
1104
        broken_repo = self.make_broken_repository()
746
1105
        empty_repo = self.make_repository('empty-repo')
747
 
        self.assertRaises((errors.RevisionNotPresent, errors.BzrCheckError),
748
 
                          empty_repo.fetch, broken_repo)
 
1106
        try:
 
1107
            empty_repo.fetch(broken_repo)
 
1108
        except (errors.RevisionNotPresent, errors.BzrCheckError):
 
1109
            # Test successful: compression parent not being copied leads to
 
1110
            # error.
 
1111
            return
 
1112
        empty_repo.lock_read()
 
1113
        self.addCleanup(empty_repo.unlock)
 
1114
        text = empty_repo.texts.get_record_stream(
 
1115
            [('file2-id', 'rev3')], 'topological', True).next()
 
1116
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
749
1117
 
750
1118
 
751
1119
class TestRepositoryPackCollection(TestCaseWithTransport):
760
1128
 
761
1129
    def make_packs_and_alt_repo(self, write_lock=False):
762
1130
        """Create a pack repo with 3 packs, and access it via a second repo."""
763
 
        tree = self.make_branch_and_tree('.')
 
1131
        tree = self.make_branch_and_tree('.', format=self.get_format())
764
1132
        tree.lock_write()
765
1133
        self.addCleanup(tree.unlock)
766
1134
        rev1 = tree.commit('one')
776
1144
        packs.ensure_loaded()
777
1145
        return tree, r, packs, [rev1, rev2, rev3]
778
1146
 
 
1147
    def test__clear_obsolete_packs(self):
 
1148
        packs = self.get_packs()
 
1149
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
 
1150
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1151
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1152
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1153
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1154
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1155
        res = packs._clear_obsolete_packs()
 
1156
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
 
1157
        self.assertEqual([], obsolete_pack_trans.list_dir('.'))
 
1158
 
 
1159
    def test__clear_obsolete_packs_preserve(self):
 
1160
        packs = self.get_packs()
 
1161
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
 
1162
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1163
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1164
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1165
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1166
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1167
        res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
 
1168
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
 
1169
        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
 
1170
                         sorted(obsolete_pack_trans.list_dir('.')))
 
1171
 
779
1172
    def test__max_pack_count(self):
780
1173
        """The maximum pack count is a function of the number of revisions."""
781
1174
        # no revisions - one pack, so that we can have a revision free repo
801
1194
        # check some arbitrary big numbers
802
1195
        self.assertEqual(25, packs._max_pack_count(112894))
803
1196
 
 
1197
    def test_repr(self):
 
1198
        packs = self.get_packs()
 
1199
        self.assertContainsRe(repr(packs),
 
1200
            'RepositoryPackCollection(.*Repository(.*))')
 
1201
 
 
1202
    def test__obsolete_packs(self):
 
1203
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1204
        names = packs.names()
 
1205
        pack = packs.get_pack_by_name(names[0])
 
1206
        # Schedule this one for removal
 
1207
        packs._remove_pack_from_memory(pack)
 
1208
        # Simulate a concurrent update by renaming the .pack file and one of
 
1209
        # the indices
 
1210
        packs.transport.rename('packs/%s.pack' % (names[0],),
 
1211
                               'obsolete_packs/%s.pack' % (names[0],))
 
1212
        packs.transport.rename('indices/%s.iix' % (names[0],),
 
1213
                               'obsolete_packs/%s.iix' % (names[0],))
 
1214
        # Now trigger the obsoletion, and ensure that all the remaining files
 
1215
        # are still renamed
 
1216
        packs._obsolete_packs([pack])
 
1217
        self.assertEqual([n + '.pack' for n in names[1:]],
 
1218
                         sorted(packs._pack_transport.list_dir('.')))
 
1219
        # names[0] should not be present in the index anymore
 
1220
        self.assertEqual(names[1:],
 
1221
            sorted(set([osutils.splitext(n)[0] for n in
 
1222
                        packs._index_transport.list_dir('.')])))
 
1223
 
804
1224
    def test_pack_distribution_zero(self):
805
1225
        packs = self.get_packs()
806
1226
        self.assertEqual([0], packs.pack_distribution(0))
929
1349
        tree.lock_read()
930
1350
        self.addCleanup(tree.unlock)
931
1351
        packs = tree.branch.repository._pack_collection
 
1352
        packs.reset()
932
1353
        packs.ensure_loaded()
933
1354
        name = packs.names()[0]
934
1355
        pack_1 = packs.get_pack_by_name(name)
973
1394
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
974
1395
        self.assertFalse(packs.reload_pack_names())
975
1396
 
 
1397
    def test_reload_pack_names_preserves_pending(self):
 
1398
        # TODO: Update this to also test for pending-deleted names
 
1399
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1400
        # We will add one pack (via start_write_group + insert_record_stream),
 
1401
        # and remove another pack (via _remove_pack_from_memory)
 
1402
        orig_names = packs.names()
 
1403
        orig_at_load = packs._packs_at_load
 
1404
        to_remove_name = iter(orig_names).next()
 
1405
        r.start_write_group()
 
1406
        self.addCleanup(r.abort_write_group)
 
1407
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
 
1408
            ('text', 'rev'), (), None, 'content\n')])
 
1409
        new_pack = packs._new_pack
 
1410
        self.assertTrue(new_pack.data_inserted())
 
1411
        new_pack.finish()
 
1412
        packs.allocate(new_pack)
 
1413
        packs._new_pack = None
 
1414
        removed_pack = packs.get_pack_by_name(to_remove_name)
 
1415
        packs._remove_pack_from_memory(removed_pack)
 
1416
        names = packs.names()
 
1417
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1418
        new_names = set([x[0][0] for x in new_nodes])
 
1419
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1420
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1421
        self.assertEqual(set([new_pack.name]), new_names)
 
1422
        self.assertEqual([to_remove_name],
 
1423
                         sorted([x[0][0] for x in deleted_nodes]))
 
1424
        packs.reload_pack_names()
 
1425
        reloaded_names = packs.names()
 
1426
        self.assertEqual(orig_at_load, packs._packs_at_load)
 
1427
        self.assertEqual(names, reloaded_names)
 
1428
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1429
        new_names = set([x[0][0] for x in new_nodes])
 
1430
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1431
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1432
        self.assertEqual(set([new_pack.name]), new_names)
 
1433
        self.assertEqual([to_remove_name],
 
1434
                         sorted([x[0][0] for x in deleted_nodes]))
 
1435
 
 
1436
    def test_autopack_obsoletes_new_pack(self):
 
1437
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1438
        packs._max_pack_count = lambda x: 1
 
1439
        packs.pack_distribution = lambda x: [10]
 
1440
        r.start_write_group()
 
1441
        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
 
1442
            ('bogus-rev',), (), None, 'bogus-content\n')])
 
1443
        # This should trigger an autopack, which will combine everything into a
 
1444
        # single pack file.
 
1445
        new_names = r.commit_write_group()
 
1446
        names = packs.names()
 
1447
        self.assertEqual(1, len(names))
 
1448
        self.assertEqual([names[0] + '.pack'],
 
1449
                         packs._pack_transport.list_dir('.'))
 
1450
 
976
1451
    def test_autopack_reloads_and_stops(self):
977
1452
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
978
1453
        # After we have determined what needs to be autopacked, trigger a
990
1465
        self.assertEqual(tree.branch.repository._pack_collection.names(),
991
1466
                         packs.names())
992
1467
 
 
1468
    def test__save_pack_names(self):
 
1469
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1470
        names = packs.names()
 
1471
        pack = packs.get_pack_by_name(names[0])
 
1472
        packs._remove_pack_from_memory(pack)
 
1473
        packs._save_pack_names(obsolete_packs=[pack])
 
1474
        cur_packs = packs._pack_transport.list_dir('.')
 
1475
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1476
        # obsolete_packs will also have stuff like .rix and .iix present.
 
1477
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1478
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1479
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1480
 
 
1481
    def test__save_pack_names_already_obsoleted(self):
 
1482
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1483
        names = packs.names()
 
1484
        pack = packs.get_pack_by_name(names[0])
 
1485
        packs._remove_pack_from_memory(pack)
 
1486
        # We are going to simulate a concurrent autopack by manually obsoleting
 
1487
        # the pack directly.
 
1488
        packs._obsolete_packs([pack])
 
1489
        packs._save_pack_names(clear_obsolete_packs=True,
 
1490
                               obsolete_packs=[pack])
 
1491
        cur_packs = packs._pack_transport.list_dir('.')
 
1492
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1493
        # Note that while we set clear_obsolete_packs=True, it should not
 
1494
        # delete a pack file that we have also scheduled for obsoletion.
 
1495
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1496
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1497
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1498
 
 
1499
 
993
1500
 
994
1501
class TestPack(TestCaseWithTransport):
995
1502
    """Tests for the Pack object."""
1049
1556
        pack_transport = self.get_transport('pack')
1050
1557
        index_transport = self.get_transport('index')
1051
1558
        upload_transport.mkdir('.')
1052
 
        collection = pack_repo.RepositoryPackCollection(repo=None,
 
1559
        collection = pack_repo.RepositoryPackCollection(
 
1560
            repo=None,
1053
1561
            transport=self.get_transport('.'),
1054
1562
            index_transport=index_transport,
1055
1563
            upload_transport=upload_transport,
1056
1564
            pack_transport=pack_transport,
1057
1565
            index_builder_class=BTreeBuilder,
1058
 
            index_class=BTreeGraphIndex)
 
1566
            index_class=BTreeGraphIndex,
 
1567
            use_chk_index=False)
1059
1568
        pack = pack_repo.NewPack(collection)
 
1569
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
1060
1570
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
1061
1571
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1062
1572
        self.assertIsInstance(pack._hash, type(osutils.md5()))
1073
1583
    """Tests for the packs repository Packer class."""
1074
1584
 
1075
1585
    def test_pack_optimizes_pack_order(self):
1076
 
        builder = self.make_branch_builder('.')
 
1586
        builder = self.make_branch_builder('.', format="1.9")
1077
1587
        builder.start_series()
1078
1588
        builder.build_snapshot('A', None, [
1079
1589
            ('add', ('', 'root-id', 'directory', None)),
1115
1625
        packer = pack_repo.OptimisingPacker(self.get_pack_collection(),
1116
1626
                                            [], '.test')
1117
1627
        new_pack = packer.open_pack()
 
1628
        self.addCleanup(new_pack.abort) # ensure cleanup
1118
1629
        self.assertIsInstance(new_pack, pack_repo.NewPack)
1119
1630
        self.assertTrue(new_pack.revision_index._optimize_for_size)
1120
1631
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
1122
1633
        self.assertTrue(new_pack.signature_index._optimize_for_size)
1123
1634
 
1124
1635
 
1125
 
class TestInterDifferingSerializer(TestCaseWithTransport):
1126
 
 
1127
 
    def test_progress_bar(self):
1128
 
        tree = self.make_branch_and_tree('tree')
1129
 
        tree.commit('rev1', rev_id='rev-1')
1130
 
        tree.commit('rev2', rev_id='rev-2')
1131
 
        tree.commit('rev3', rev_id='rev-3')
1132
 
        repo = self.make_repository('repo')
1133
 
        inter_repo = repository.InterDifferingSerializer(
1134
 
            tree.branch.repository, repo)
1135
 
        pb = progress.InstrumentedProgress(to_file=StringIO())
1136
 
        pb.never_throttle = True
1137
 
        inter_repo.fetch('rev-1', pb)
1138
 
        self.assertEqual('Transferring revisions', pb.last_msg)
1139
 
        self.assertEqual(1, pb.last_cnt)
1140
 
        self.assertEqual(1, pb.last_total)
1141
 
        inter_repo.fetch('rev-3', pb)
1142
 
        self.assertEqual(2, pb.last_cnt)
1143
 
        self.assertEqual(2, pb.last_total)
 
1636
class TestCrossFormatPacks(TestCaseWithTransport):
 
1637
 
 
1638
    def log_pack(self, hint=None):
 
1639
        self.calls.append(('pack', hint))
 
1640
        self.orig_pack(hint=hint)
 
1641
        if self.expect_hint:
 
1642
            self.assertTrue(hint)
 
1643
 
 
1644
    def run_stream(self, src_fmt, target_fmt, expect_pack_called):
 
1645
        self.expect_hint = expect_pack_called
 
1646
        self.calls = []
 
1647
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1648
        source_tree.lock_write()
 
1649
        self.addCleanup(source_tree.unlock)
 
1650
        tip = source_tree.commit('foo')
 
1651
        target = self.make_repository('target', format=target_fmt)
 
1652
        target.lock_write()
 
1653
        self.addCleanup(target.unlock)
 
1654
        source = source_tree.branch.repository._get_source(target._format)
 
1655
        self.orig_pack = target.pack
 
1656
        target.pack = self.log_pack
 
1657
        search = target.search_missing_revision_ids(
 
1658
            source_tree.branch.repository, tip)
 
1659
        stream = source.get_stream(search)
 
1660
        from_format = source_tree.branch.repository._format
 
1661
        sink = target._get_sink()
 
1662
        sink.insert_stream(stream, from_format, [])
 
1663
        if expect_pack_called:
 
1664
            self.assertLength(1, self.calls)
 
1665
        else:
 
1666
            self.assertLength(0, self.calls)
 
1667
 
 
1668
    def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
 
1669
        self.expect_hint = expect_pack_called
 
1670
        self.calls = []
 
1671
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1672
        source_tree.lock_write()
 
1673
        self.addCleanup(source_tree.unlock)
 
1674
        tip = source_tree.commit('foo')
 
1675
        target = self.make_repository('target', format=target_fmt)
 
1676
        target.lock_write()
 
1677
        self.addCleanup(target.unlock)
 
1678
        source = source_tree.branch.repository
 
1679
        self.orig_pack = target.pack
 
1680
        target.pack = self.log_pack
 
1681
        target.fetch(source)
 
1682
        if expect_pack_called:
 
1683
            self.assertLength(1, self.calls)
 
1684
        else:
 
1685
            self.assertLength(0, self.calls)
 
1686
 
 
1687
    def test_sink_format_hint_no(self):
 
1688
        # When the target format says packing makes no difference, pack is not
 
1689
        # called.
 
1690
        self.run_stream('1.9', 'rich-root-pack', False)
 
1691
 
 
1692
    def test_sink_format_hint_yes(self):
 
1693
        # When the target format says packing makes a difference, pack is
 
1694
        # called.
 
1695
        self.run_stream('1.9', '2a', True)
 
1696
 
 
1697
    def test_sink_format_same_no(self):
 
1698
        # When the formats are the same, pack is not called.
 
1699
        self.run_stream('2a', '2a', False)
 
1700
 
 
1701
    def test_IDS_format_hint_no(self):
 
1702
        # When the target format says packing makes no difference, pack is not
 
1703
        # called.
 
1704
        self.run_fetch('1.9', 'rich-root-pack', False)
 
1705
 
 
1706
    def test_IDS_format_hint_yes(self):
 
1707
        # When the target format says packing makes a difference, pack is
 
1708
        # called.
 
1709
        self.run_fetch('1.9', '2a', True)
 
1710
 
 
1711
    def test_IDS_format_same_no(self):
 
1712
        # When the formats are the same, pack is not called.
 
1713
        self.run_fetch('2a', '2a', False)