~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: Robert Collins
  • Date: 2007-07-15 15:40:37 UTC
  • mto: (2592.3.33 repository)
  • mto: This revision was merged to the branch mainline in revision 2624.
  • Revision ID: robertc@robertcollins.net-20070715154037-3ar8g89decddc9su
Make GraphIndex accept nodes as key, value, references, so that the method
signature is closer to what a simple key->value index delivers. Also
change the behaviour when the reference list count is zero to accept
key, value as nodes, and emit key, value to make it identical in that case
to a simple key->value index. This may not be a good idea, but for now it
seems ok.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006, 2007, 2008, 2009 Canonical Ltd
 
1
# Copyright (C) 2006, 2007 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
12
12
#
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
16
16
 
17
17
"""Tests for the Repository facility that are not interface tests.
18
18
 
19
 
For interface tests see tests/per_repository/*.py.
 
19
For interface tests see tests/repository_implementations/*.py.
20
20
 
21
21
For concrete class tests see this file, and for storage formats tests
22
22
also see this file.
25
25
from stat import S_ISDIR
26
26
from StringIO import StringIO
27
27
 
 
28
from bzrlib import symbol_versioning
28
29
import bzrlib
 
30
import bzrlib.bzrdir as bzrdir
 
31
import bzrlib.errors as errors
29
32
from bzrlib.errors import (NotBranchError,
30
33
                           NoSuchFile,
31
34
                           UnknownFormatError,
32
35
                           UnsupportedFormatError,
33
36
                           )
34
 
from bzrlib import (
35
 
    graph,
36
 
    tests,
37
 
    )
38
 
from bzrlib.branchbuilder import BranchBuilder
39
 
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
40
 
from bzrlib.index import GraphIndex, InMemoryGraphIndex
41
37
from bzrlib.repository import RepositoryFormat
42
 
from bzrlib.smart import server
43
 
from bzrlib.tests import (
44
 
    TestCase,
45
 
    TestCaseWithTransport,
46
 
    TestSkipped,
47
 
    test_knit,
48
 
    )
49
 
from bzrlib.transport import (
50
 
    fakenfs,
51
 
    get_transport,
52
 
    )
 
38
from bzrlib.tests import TestCase, TestCaseWithTransport
 
39
from bzrlib.transport import get_transport
53
40
from bzrlib.transport.memory import MemoryServer
54
41
from bzrlib import (
55
 
    bencode,
56
 
    bzrdir,
57
 
    errors,
58
 
    inventory,
59
 
    osutils,
60
 
    progress,
61
42
    repository,
62
 
    revision as _mod_revision,
63
 
    symbol_versioning,
64
43
    upgrade,
65
44
    workingtree,
66
45
    )
67
 
from bzrlib.repofmt import (
68
 
    groupcompress_repo,
69
 
    knitrepo,
70
 
    pack_repo,
71
 
    weaverepo,
72
 
    )
 
46
from bzrlib.repofmt import knitrepo, weaverepo
73
47
 
74
48
 
75
49
class TestDefaultFormat(TestCase):
104
78
class SampleRepositoryFormat(repository.RepositoryFormat):
105
79
    """A sample format
106
80
 
107
 
    this format is initializable, unsupported to aid in testing the
 
81
    this format is initializable, unsupported to aid in testing the 
108
82
    open and open(unsupported=True) routines.
109
83
    """
110
84
 
131
105
    def test_find_format(self):
132
106
        # is the right format object found for a repository?
133
107
        # create a branch with a few known format objects.
134
 
        # this is not quite the same as
 
108
        # this is not quite the same as 
135
109
        self.build_tree(["foo/", "bar/"])
136
110
        def check_format(format, url):
137
111
            dir = format._matchingbzrdir.initialize(url)
140
114
            found_format = repository.RepositoryFormat.find_format(dir)
141
115
            self.failUnless(isinstance(found_format, format.__class__))
142
116
        check_format(weaverepo.RepositoryFormat7(), "bar")
143
 
 
 
117
        
144
118
    def test_find_format_no_repository(self):
145
119
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
146
120
        self.assertRaises(errors.NoRepositoryPresent,
172
146
 
173
147
class TestFormat6(TestCaseWithTransport):
174
148
 
175
 
    def test_attribute__fetch_order(self):
176
 
        """Weaves need topological data insertion."""
177
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
178
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
179
 
        self.assertEqual('topological', repo._format._fetch_order)
180
 
 
181
 
    def test_attribute__fetch_uses_deltas(self):
182
 
        """Weaves do not reuse deltas."""
183
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
184
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
185
 
        self.assertEqual(False, repo._format._fetch_uses_deltas)
186
 
 
187
 
    def test_attribute__fetch_reconcile(self):
188
 
        """Weave repositories need a reconcile after fetch."""
189
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
190
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
191
 
        self.assertEqual(True, repo._format._fetch_reconcile)
192
 
 
193
149
    def test_no_ancestry_weave(self):
194
150
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
195
151
        repo = weaverepo.RepositoryFormat6().initialize(control)
199
155
                          control.transport.get,
200
156
                          'ancestry.weave')
201
157
 
202
 
    def test_supports_external_lookups(self):
203
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
204
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
205
 
        self.assertFalse(repo._format.supports_external_lookups)
206
 
 
207
158
 
208
159
class TestFormat7(TestCaseWithTransport):
209
 
 
210
 
    def test_attribute__fetch_order(self):
211
 
        """Weaves need topological data insertion."""
212
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
213
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
214
 
        self.assertEqual('topological', repo._format._fetch_order)
215
 
 
216
 
    def test_attribute__fetch_uses_deltas(self):
217
 
        """Weaves do not reuse deltas."""
218
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
219
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
220
 
        self.assertEqual(False, repo._format._fetch_uses_deltas)
221
 
 
222
 
    def test_attribute__fetch_reconcile(self):
223
 
        """Weave repositories need a reconcile after fetch."""
224
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
225
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
226
 
        self.assertEqual(True, repo._format._fetch_reconcile)
227
 
 
 
160
    
228
161
    def test_disk_layout(self):
229
162
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
230
163
        repo = weaverepo.RepositoryFormat7().initialize(control)
246
179
                             'w\n'
247
180
                             'W\n',
248
181
                             t.get('inventory.weave').read())
249
 
        # Creating a file with id Foo:Bar results in a non-escaped file name on
250
 
        # disk.
251
 
        control.create_branch()
252
 
        tree = control.create_workingtree()
253
 
        tree.add(['foo'], ['Foo:Bar'], ['file'])
254
 
        tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
255
 
        tree.commit('first post', rev_id='first')
256
 
        self.assertEqualDiff(
257
 
            '# bzr weave file v5\n'
258
 
            'i\n'
259
 
            '1 7fe70820e08a1aac0ef224d9c66ab66831cc4ab1\n'
260
 
            'n first\n'
261
 
            '\n'
262
 
            'w\n'
263
 
            '{ 0\n'
264
 
            '. content\n'
265
 
            '}\n'
266
 
            'W\n',
267
 
            t.get('weaves/74/Foo%3ABar.weave').read())
268
182
 
269
183
    def test_shared_disk_layout(self):
270
184
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
293
207
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
294
208
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
295
209
        t = control.get_repository_transport(None)
296
 
        # TODO: Should check there is a 'lock' toplevel directory,
 
210
        # TODO: Should check there is a 'lock' toplevel directory, 
297
211
        # regardless of contents
298
212
        self.assertFalse(t.has('lock/held/info'))
299
213
        repo.lock_write()
345
259
                             'W\n',
346
260
                             t.get('inventory.weave').read())
347
261
 
348
 
    def test_supports_external_lookups(self):
349
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
350
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
351
 
        self.assertFalse(repo._format.supports_external_lookups)
352
 
 
353
262
 
354
263
class TestFormatKnit1(TestCaseWithTransport):
355
 
 
356
 
    def test_attribute__fetch_order(self):
357
 
        """Knits need topological data insertion."""
358
 
        repo = self.make_repository('.',
359
 
                format=bzrdir.format_registry.get('knit')())
360
 
        self.assertEqual('topological', repo._format._fetch_order)
361
 
 
362
 
    def test_attribute__fetch_uses_deltas(self):
363
 
        """Knits reuse deltas."""
364
 
        repo = self.make_repository('.',
365
 
                format=bzrdir.format_registry.get('knit')())
366
 
        self.assertEqual(True, repo._format._fetch_uses_deltas)
367
 
 
 
264
    
368
265
    def test_disk_layout(self):
369
266
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
370
267
        repo = knitrepo.RepositoryFormatKnit1().initialize(control)
384
281
        # self.assertEqualDiff('', t.get('lock').read())
385
282
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
386
283
        self.check_knits(t)
387
 
        # Check per-file knits.
388
 
        branch = control.create_branch()
389
 
        tree = control.create_workingtree()
390
 
        tree.add(['foo'], ['Nasty-IdC:'], ['file'])
391
 
        tree.put_file_bytes_non_atomic('Nasty-IdC:', '')
392
 
        tree.commit('1st post', rev_id='foo')
393
 
        self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
394
 
            '\nfoo fulltext 0 81  :')
395
284
 
396
 
    def assertHasKnit(self, t, knit_name, extra_content=''):
 
285
    def assertHasKnit(self, t, knit_name):
397
286
        """Assert that knit_name exists on t."""
398
 
        self.assertEqualDiff('# bzr knit index 8\n' + extra_content,
 
287
        self.assertEqualDiff('# bzr knit index 8\n',
399
288
                             t.get(knit_name + '.kndx').read())
 
289
        # no default content
 
290
        self.assertTrue(t.has(knit_name + '.knit'))
400
291
 
401
292
    def check_knits(self, t):
402
293
        """check knit content for a repository."""
446
337
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
447
338
        self.check_knits(t)
448
339
 
449
 
    def test_deserialise_sets_root_revision(self):
450
 
        """We must have a inventory.root.revision
451
 
 
452
 
        Old versions of the XML5 serializer did not set the revision_id for
453
 
        the whole inventory. So we grab the one from the expected text. Which
454
 
        is valid when the api is not being abused.
455
 
        """
456
 
        repo = self.make_repository('.',
457
 
                format=bzrdir.format_registry.get('knit')())
458
 
        inv_xml = '<inventory format="5">\n</inventory>\n'
459
 
        inv = repo.deserialise_inventory('test-rev-id', inv_xml)
460
 
        self.assertEqual('test-rev-id', inv.root.revision)
461
 
 
462
 
    def test_deserialise_uses_global_revision_id(self):
463
 
        """If it is set, then we re-use the global revision id"""
464
 
        repo = self.make_repository('.',
465
 
                format=bzrdir.format_registry.get('knit')())
466
 
        inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n'
467
 
                   '</inventory>\n')
468
 
        # Arguably, the deserialise_inventory should detect a mismatch, and
469
 
        # raise an error, rather than silently using one revision_id over the
470
 
        # other.
471
 
        self.assertRaises(AssertionError, repo.deserialise_inventory,
472
 
            'test-rev-id', inv_xml)
473
 
        inv = repo.deserialise_inventory('other-rev-id', inv_xml)
474
 
        self.assertEqual('other-rev-id', inv.root.revision)
475
 
 
476
 
    def test_supports_external_lookups(self):
477
 
        repo = self.make_repository('.',
478
 
                format=bzrdir.format_registry.get('knit')())
479
 
        self.assertFalse(repo._format.supports_external_lookups)
480
 
 
481
340
 
482
341
class DummyRepository(object):
483
342
    """A dummy repository for testing."""
484
343
 
485
 
    _format = None
486
344
    _serializer = None
487
345
 
488
346
    def supports_rich_root(self):
489
347
        return False
490
348
 
491
 
    def get_graph(self):
492
 
        raise NotImplementedError
493
 
 
494
 
    def get_parent_map(self, revision_ids):
495
 
        raise NotImplementedError
496
 
 
497
349
 
498
350
class InterDummy(repository.InterRepository):
499
351
    """An inter-repository optimised code path for DummyRepository.
500
352
 
501
353
    This is for use during testing where we use DummyRepository as repositories
502
354
    so that none of the default regsitered inter-repository classes will
503
 
    MATCH.
 
355
    match.
504
356
    """
505
357
 
506
358
    @staticmethod
507
359
    def is_compatible(repo_source, repo_target):
508
360
        """InterDummy is compatible with DummyRepository."""
509
 
        return (isinstance(repo_source, DummyRepository) and
 
361
        return (isinstance(repo_source, DummyRepository) and 
510
362
            isinstance(repo_target, DummyRepository))
511
363
 
512
364
 
525
377
 
526
378
    def assertGetsDefaultInterRepository(self, repo_a, repo_b):
527
379
        """Asserts that InterRepository.get(repo_a, repo_b) -> the default.
528
 
 
 
380
        
529
381
        The effective default is now InterSameDataRepository because there is
530
382
        no actual sane default in the presence of incompatible data models.
531
383
        """
615
467
 
616
468
 
617
469
class TestMisc(TestCase):
618
 
 
 
470
    
619
471
    def test_unescape_xml(self):
620
472
        """We get some kind of error when malformed entities are passed"""
621
 
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;')
 
473
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;') 
622
474
 
623
475
 
624
476
class TestRepositoryFormatKnit3(TestCaseWithTransport):
625
477
 
626
 
    def test_attribute__fetch_order(self):
627
 
        """Knits need topological data insertion."""
628
 
        format = bzrdir.BzrDirMetaFormat1()
629
 
        format.repository_format = knitrepo.RepositoryFormatKnit3()
630
 
        repo = self.make_repository('.', format=format)
631
 
        self.assertEqual('topological', repo._format._fetch_order)
632
 
 
633
 
    def test_attribute__fetch_uses_deltas(self):
634
 
        """Knits reuse deltas."""
635
 
        format = bzrdir.BzrDirMetaFormat1()
636
 
        format.repository_format = knitrepo.RepositoryFormatKnit3()
637
 
        repo = self.make_repository('.', format=format)
638
 
        self.assertEqual(True, repo._format._fetch_uses_deltas)
639
 
 
640
478
    def test_convert(self):
641
479
        """Ensure the upgrade adds weaves for roots"""
642
480
        format = bzrdir.BzrDirMetaFormat1()
644
482
        tree = self.make_branch_and_tree('.', format)
645
483
        tree.commit("Dull commit", rev_id="dull")
646
484
        revision_tree = tree.branch.repository.revision_tree('dull')
647
 
        revision_tree.lock_read()
648
 
        try:
649
 
            self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
650
 
                revision_tree.inventory.root.file_id)
651
 
        finally:
652
 
            revision_tree.unlock()
 
485
        self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
 
486
            revision_tree.inventory.root.file_id)
653
487
        format = bzrdir.BzrDirMetaFormat1()
654
488
        format.repository_format = knitrepo.RepositoryFormatKnit3()
655
489
        upgrade.Convert('.', format)
656
490
        tree = workingtree.WorkingTree.open('.')
657
491
        revision_tree = tree.branch.repository.revision_tree('dull')
658
 
        revision_tree.lock_read()
659
 
        try:
660
 
            revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
661
 
        finally:
662
 
            revision_tree.unlock()
 
492
        revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
663
493
        tree.commit("Another dull commit", rev_id='dull2')
664
494
        revision_tree = tree.branch.repository.revision_tree('dull2')
665
 
        revision_tree.lock_read()
666
 
        self.addCleanup(revision_tree.unlock)
667
495
        self.assertEqual('dull', revision_tree.inventory.root.revision)
668
496
 
669
 
    def test_supports_external_lookups(self):
670
 
        format = bzrdir.BzrDirMetaFormat1()
671
 
        format.repository_format = knitrepo.RepositoryFormatKnit3()
672
 
        repo = self.make_repository('.', format=format)
673
 
        self.assertFalse(repo._format.supports_external_lookups)
674
 
 
675
 
 
676
 
class Test2a(TestCaseWithTransport):
677
 
 
678
 
    def test_format_pack_compresses_True(self):
679
 
        repo = self.make_repository('repo', format='2a')
680
 
        self.assertTrue(repo._format.pack_compresses)
681
 
 
682
 
    def test_inventories_use_chk_map_with_parent_base_dict(self):
683
 
        tree = self.make_branch_and_tree('repo', format="2a")
684
 
        revid = tree.commit("foo")
685
 
        tree.lock_read()
686
 
        self.addCleanup(tree.unlock)
687
 
        inv = tree.branch.repository.get_inventory(revid)
688
 
        self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
689
 
        inv.parent_id_basename_to_file_id._ensure_root()
690
 
        inv.id_to_entry._ensure_root()
691
 
        self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
692
 
        self.assertEqual(65536,
693
 
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
694
 
 
695
 
    def test_autopack_unchanged_chk_nodes(self):
696
 
        # at 20 unchanged commits, chk pages are packed that are split into
697
 
        # two groups such that the new pack being made doesn't have all its
698
 
        # pages in the source packs (though they are in the repository).
699
 
        tree = self.make_branch_and_tree('tree', format='2a')
700
 
        for pos in range(20):
701
 
            tree.commit(str(pos))
702
 
 
703
 
    def test_pack_with_hint(self):
704
 
        tree = self.make_branch_and_tree('tree', format='2a')
705
 
        # 1 commit to leave untouched
706
 
        tree.commit('1')
707
 
        to_keep = tree.branch.repository._pack_collection.names()
708
 
        # 2 to combine
709
 
        tree.commit('2')
710
 
        tree.commit('3')
711
 
        all = tree.branch.repository._pack_collection.names()
712
 
        combine = list(set(all) - set(to_keep))
713
 
        self.assertLength(3, all)
714
 
        self.assertLength(2, combine)
715
 
        tree.branch.repository.pack(hint=combine)
716
 
        final = tree.branch.repository._pack_collection.names()
717
 
        self.assertLength(2, final)
718
 
        self.assertFalse(combine[0] in final)
719
 
        self.assertFalse(combine[1] in final)
720
 
        self.assertSubset(to_keep, final)
721
 
 
722
 
    def test_stream_source_to_gc(self):
723
 
        source = self.make_repository('source', format='2a')
724
 
        target = self.make_repository('target', format='2a')
725
 
        stream = source._get_source(target._format)
726
 
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
727
 
 
728
 
    def test_stream_source_to_non_gc(self):
729
 
        source = self.make_repository('source', format='2a')
730
 
        target = self.make_repository('target', format='rich-root-pack')
731
 
        stream = source._get_source(target._format)
732
 
        # We don't want the child GroupCHKStreamSource
733
 
        self.assertIs(type(stream), repository.StreamSource)
734
 
 
735
 
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
736
 
        source_builder = self.make_branch_builder('source',
737
 
                            format='2a')
738
 
        # We have to build a fairly large tree, so that we are sure the chk
739
 
        # pages will have split into multiple pages.
740
 
        entries = [('add', ('', 'a-root-id', 'directory', None))]
741
 
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
742
 
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
743
 
                fname = i + j
744
 
                fid = fname + '-id'
745
 
                content = 'content for %s\n' % (fname,)
746
 
                entries.append(('add', (fname, fid, 'file', content)))
747
 
        source_builder.start_series()
748
 
        source_builder.build_snapshot('rev-1', None, entries)
749
 
        # Now change a few of them, so we get a few new pages for the second
750
 
        # revision
751
 
        source_builder.build_snapshot('rev-2', ['rev-1'], [
752
 
            ('modify', ('aa-id', 'new content for aa-id\n')),
753
 
            ('modify', ('cc-id', 'new content for cc-id\n')),
754
 
            ('modify', ('zz-id', 'new content for zz-id\n')),
755
 
            ])
756
 
        source_builder.finish_series()
757
 
        source_branch = source_builder.get_branch()
758
 
        source_branch.lock_read()
759
 
        self.addCleanup(source_branch.unlock)
760
 
        target = self.make_repository('target', format='2a')
761
 
        source = source_branch.repository._get_source(target._format)
762
 
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
763
 
 
764
 
        # On a regular pass, getting the inventories and chk pages for rev-2
765
 
        # would only get the newly created chk pages
766
 
        search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
767
 
                                    set(['rev-2']))
768
 
        simple_chk_records = []
769
 
        for vf_name, substream in source.get_stream(search):
770
 
            if vf_name == 'chk_bytes':
771
 
                for record in substream:
772
 
                    simple_chk_records.append(record.key)
773
 
            else:
774
 
                for _ in substream:
775
 
                    continue
776
 
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
777
 
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
778
 
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
779
 
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
780
 
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
781
 
                         simple_chk_records)
782
 
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
783
 
        # we should get a much larger set of pages.
784
 
        missing = [('inventories', 'rev-2')]
785
 
        full_chk_records = []
786
 
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
787
 
            if vf_name == 'inventories':
788
 
                for record in substream:
789
 
                    self.assertEqual(('rev-2',), record.key)
790
 
            elif vf_name == 'chk_bytes':
791
 
                for record in substream:
792
 
                    full_chk_records.append(record.key)
793
 
            else:
794
 
                self.fail('Should not be getting a stream of %s' % (vf_name,))
795
 
        # We have 257 records now. This is because we have 1 root page, and 256
796
 
        # leaf pages in a complete listing.
797
 
        self.assertEqual(257, len(full_chk_records))
798
 
        self.assertSubset(simple_chk_records, full_chk_records)
799
 
 
800
 
    def test_inconsistency_fatal(self):
801
 
        repo = self.make_repository('repo', format='2a')
802
 
        self.assertTrue(repo.revisions._index._inconsistency_fatal)
803
 
        self.assertFalse(repo.texts._index._inconsistency_fatal)
804
 
        self.assertFalse(repo.inventories._index._inconsistency_fatal)
805
 
        self.assertFalse(repo.signatures._index._inconsistency_fatal)
806
 
        self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
807
 
 
808
 
 
809
 
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
810
 
 
811
 
    def test_source_to_exact_pack_092(self):
812
 
        source = self.make_repository('source', format='pack-0.92')
813
 
        target = self.make_repository('target', format='pack-0.92')
814
 
        stream_source = source._get_source(target._format)
815
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
816
 
 
817
 
    def test_source_to_exact_pack_rich_root_pack(self):
818
 
        source = self.make_repository('source', format='rich-root-pack')
819
 
        target = self.make_repository('target', format='rich-root-pack')
820
 
        stream_source = source._get_source(target._format)
821
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
822
 
 
823
 
    def test_source_to_exact_pack_19(self):
824
 
        source = self.make_repository('source', format='1.9')
825
 
        target = self.make_repository('target', format='1.9')
826
 
        stream_source = source._get_source(target._format)
827
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
828
 
 
829
 
    def test_source_to_exact_pack_19_rich_root(self):
830
 
        source = self.make_repository('source', format='1.9-rich-root')
831
 
        target = self.make_repository('target', format='1.9-rich-root')
832
 
        stream_source = source._get_source(target._format)
833
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
834
 
 
835
 
    def test_source_to_remote_exact_pack_19(self):
836
 
        trans = self.make_smart_server('target')
837
 
        trans.ensure_base()
838
 
        source = self.make_repository('source', format='1.9')
839
 
        target = self.make_repository('target', format='1.9')
840
 
        target = repository.Repository.open(trans.base)
841
 
        stream_source = source._get_source(target._format)
842
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
843
 
 
844
 
    def test_stream_source_to_non_exact(self):
845
 
        source = self.make_repository('source', format='pack-0.92')
846
 
        target = self.make_repository('target', format='1.9')
847
 
        stream = source._get_source(target._format)
848
 
        self.assertIs(type(stream), repository.StreamSource)
849
 
 
850
 
    def test_stream_source_to_non_exact_rich_root(self):
851
 
        source = self.make_repository('source', format='1.9')
852
 
        target = self.make_repository('target', format='1.9-rich-root')
853
 
        stream = source._get_source(target._format)
854
 
        self.assertIs(type(stream), repository.StreamSource)
855
 
 
856
 
    def test_source_to_remote_non_exact_pack_19(self):
857
 
        trans = self.make_smart_server('target')
858
 
        trans.ensure_base()
859
 
        source = self.make_repository('source', format='1.9')
860
 
        target = self.make_repository('target', format='1.6')
861
 
        target = repository.Repository.open(trans.base)
862
 
        stream_source = source._get_source(target._format)
863
 
        self.assertIs(type(stream_source), repository.StreamSource)
864
 
 
865
 
    def test_stream_source_to_knit(self):
866
 
        source = self.make_repository('source', format='pack-0.92')
867
 
        target = self.make_repository('target', format='dirstate')
868
 
        stream = source._get_source(target._format)
869
 
        self.assertIs(type(stream), repository.StreamSource)
870
 
 
871
 
 
872
 
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
873
 
    """Tests for _find_parent_ids_of_revisions."""
874
 
 
875
 
    def setUp(self):
876
 
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
877
 
        self.builder = self.make_branch_builder('source',
878
 
            format='development6-rich-root')
879
 
        self.builder.start_series()
880
 
        self.builder.build_snapshot('initial', None,
881
 
            [('add', ('', 'tree-root', 'directory', None))])
882
 
        self.repo = self.builder.get_branch().repository
883
 
        self.addCleanup(self.builder.finish_series)
884
 
 
885
 
    def assertParentIds(self, expected_result, rev_set):
886
 
        self.assertEqual(sorted(expected_result),
887
 
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
888
 
 
889
 
    def test_simple(self):
890
 
        self.builder.build_snapshot('revid1', None, [])
891
 
        self.builder.build_snapshot('revid2', ['revid1'], [])
892
 
        rev_set = ['revid2']
893
 
        self.assertParentIds(['revid1'], rev_set)
894
 
 
895
 
    def test_not_first_parent(self):
896
 
        self.builder.build_snapshot('revid1', None, [])
897
 
        self.builder.build_snapshot('revid2', ['revid1'], [])
898
 
        self.builder.build_snapshot('revid3', ['revid2'], [])
899
 
        rev_set = ['revid3', 'revid2']
900
 
        self.assertParentIds(['revid1'], rev_set)
901
 
 
902
 
    def test_not_null(self):
903
 
        rev_set = ['initial']
904
 
        self.assertParentIds([], rev_set)
905
 
 
906
 
    def test_not_null_set(self):
907
 
        self.builder.build_snapshot('revid1', None, [])
908
 
        rev_set = [_mod_revision.NULL_REVISION]
909
 
        self.assertParentIds([], rev_set)
910
 
 
911
 
    def test_ghost(self):
912
 
        self.builder.build_snapshot('revid1', None, [])
913
 
        rev_set = ['ghost', 'revid1']
914
 
        self.assertParentIds(['initial'], rev_set)
915
 
 
916
 
    def test_ghost_parent(self):
917
 
        self.builder.build_snapshot('revid1', None, [])
918
 
        self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
919
 
        rev_set = ['revid2', 'revid1']
920
 
        self.assertParentIds(['ghost', 'initial'], rev_set)
921
 
 
922
 
    def test_righthand_parent(self):
923
 
        self.builder.build_snapshot('revid1', None, [])
924
 
        self.builder.build_snapshot('revid2a', ['revid1'], [])
925
 
        self.builder.build_snapshot('revid2b', ['revid1'], [])
926
 
        self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
927
 
        rev_set = ['revid3', 'revid2a']
928
 
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
929
 
 
930
 
 
931
 
class TestWithBrokenRepo(TestCaseWithTransport):
932
 
    """These tests seem to be more appropriate as interface tests?"""
933
 
 
934
 
    def make_broken_repository(self):
935
 
        # XXX: This function is borrowed from Aaron's "Reconcile can fix bad
936
 
        # parent references" branch which is due to land in bzr.dev soon.  Once
937
 
        # it does, this duplication should be removed.
938
 
        repo = self.make_repository('broken-repo')
939
 
        cleanups = []
940
 
        try:
941
 
            repo.lock_write()
942
 
            cleanups.append(repo.unlock)
943
 
            repo.start_write_group()
944
 
            cleanups.append(repo.commit_write_group)
945
 
            # make rev1a: A well-formed revision, containing 'file1'
946
 
            inv = inventory.Inventory(revision_id='rev1a')
947
 
            inv.root.revision = 'rev1a'
948
 
            self.add_file(repo, inv, 'file1', 'rev1a', [])
949
 
            repo.add_inventory('rev1a', inv, [])
950
 
            revision = _mod_revision.Revision('rev1a',
951
 
                committer='jrandom@example.com', timestamp=0,
952
 
                inventory_sha1='', timezone=0, message='foo', parent_ids=[])
953
 
            repo.add_revision('rev1a',revision, inv)
954
 
 
955
 
            # make rev1b, which has no Revision, but has an Inventory, and
956
 
            # file1
957
 
            inv = inventory.Inventory(revision_id='rev1b')
958
 
            inv.root.revision = 'rev1b'
959
 
            self.add_file(repo, inv, 'file1', 'rev1b', [])
960
 
            repo.add_inventory('rev1b', inv, [])
961
 
 
962
 
            # make rev2, with file1 and file2
963
 
            # file2 is sane
964
 
            # file1 has 'rev1b' as an ancestor, even though this is not
965
 
            # mentioned by 'rev1a', making it an unreferenced ancestor
966
 
            inv = inventory.Inventory()
967
 
            self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b'])
968
 
            self.add_file(repo, inv, 'file2', 'rev2', [])
969
 
            self.add_revision(repo, 'rev2', inv, ['rev1a'])
970
 
 
971
 
            # make ghost revision rev1c
972
 
            inv = inventory.Inventory()
973
 
            self.add_file(repo, inv, 'file2', 'rev1c', [])
974
 
 
975
 
            # make rev3 with file2
976
 
            # file2 refers to 'rev1c', which is a ghost in this repository, so
977
 
            # file2 cannot have rev1c as its ancestor.
978
 
            inv = inventory.Inventory()
979
 
            self.add_file(repo, inv, 'file2', 'rev3', ['rev1c'])
980
 
            self.add_revision(repo, 'rev3', inv, ['rev1c'])
981
 
            return repo
982
 
        finally:
983
 
            for cleanup in reversed(cleanups):
984
 
                cleanup()
985
 
 
986
 
    def add_revision(self, repo, revision_id, inv, parent_ids):
987
 
        inv.revision_id = revision_id
988
 
        inv.root.revision = revision_id
989
 
        repo.add_inventory(revision_id, inv, parent_ids)
990
 
        revision = _mod_revision.Revision(revision_id,
991
 
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
992
 
            timezone=0, message='foo', parent_ids=parent_ids)
993
 
        repo.add_revision(revision_id,revision, inv)
994
 
 
995
 
    def add_file(self, repo, inv, filename, revision, parents):
996
 
        file_id = filename + '-id'
997
 
        entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT')
998
 
        entry.revision = revision
999
 
        entry.text_size = 0
1000
 
        inv.add(entry)
1001
 
        text_key = (file_id, revision)
1002
 
        parent_keys = [(file_id, parent) for parent in parents]
1003
 
        repo.texts.add_lines(text_key, parent_keys, ['line\n'])
1004
 
 
1005
 
    def test_insert_from_broken_repo(self):
1006
 
        """Inserting a data stream from a broken repository won't silently
1007
 
        corrupt the target repository.
1008
 
        """
1009
 
        broken_repo = self.make_broken_repository()
1010
 
        empty_repo = self.make_repository('empty-repo')
1011
 
        # See bug https://bugs.launchpad.net/bzr/+bug/389141 for information
1012
 
        # about why this was turned into expectFailure
1013
 
        self.expectFailure('new Stream fetch fills in missing compression'
1014
 
           ' parents (bug #389141)',
1015
 
           self.assertRaises, (errors.RevisionNotPresent, errors.BzrCheckError),
1016
 
                              empty_repo.fetch, broken_repo)
1017
 
        self.assertRaises((errors.RevisionNotPresent, errors.BzrCheckError),
1018
 
                          empty_repo.fetch, broken_repo)
1019
 
 
1020
 
 
1021
 
class TestRepositoryPackCollection(TestCaseWithTransport):
1022
 
 
1023
 
    def get_format(self):
1024
 
        return bzrdir.format_registry.make_bzrdir('pack-0.92')
1025
 
 
1026
 
    def get_packs(self):
1027
 
        format = self.get_format()
1028
 
        repo = self.make_repository('.', format=format)
1029
 
        return repo._pack_collection
1030
 
 
1031
 
    def make_packs_and_alt_repo(self, write_lock=False):
1032
 
        """Create a pack repo with 3 packs, and access it via a second repo."""
1033
 
        tree = self.make_branch_and_tree('.')
1034
 
        tree.lock_write()
1035
 
        self.addCleanup(tree.unlock)
1036
 
        rev1 = tree.commit('one')
1037
 
        rev2 = tree.commit('two')
1038
 
        rev3 = tree.commit('three')
1039
 
        r = repository.Repository.open('.')
1040
 
        if write_lock:
1041
 
            r.lock_write()
1042
 
        else:
1043
 
            r.lock_read()
1044
 
        self.addCleanup(r.unlock)
1045
 
        packs = r._pack_collection
1046
 
        packs.ensure_loaded()
1047
 
        return tree, r, packs, [rev1, rev2, rev3]
1048
 
 
1049
 
    def test__max_pack_count(self):
1050
 
        """The maximum pack count is a function of the number of revisions."""
1051
 
        # no revisions - one pack, so that we can have a revision free repo
1052
 
        # without it blowing up
1053
 
        packs = self.get_packs()
1054
 
        self.assertEqual(1, packs._max_pack_count(0))
1055
 
        # after that the sum of the digits, - check the first 1-9
1056
 
        self.assertEqual(1, packs._max_pack_count(1))
1057
 
        self.assertEqual(2, packs._max_pack_count(2))
1058
 
        self.assertEqual(3, packs._max_pack_count(3))
1059
 
        self.assertEqual(4, packs._max_pack_count(4))
1060
 
        self.assertEqual(5, packs._max_pack_count(5))
1061
 
        self.assertEqual(6, packs._max_pack_count(6))
1062
 
        self.assertEqual(7, packs._max_pack_count(7))
1063
 
        self.assertEqual(8, packs._max_pack_count(8))
1064
 
        self.assertEqual(9, packs._max_pack_count(9))
1065
 
        # check the boundary cases with two digits for the next decade
1066
 
        self.assertEqual(1, packs._max_pack_count(10))
1067
 
        self.assertEqual(2, packs._max_pack_count(11))
1068
 
        self.assertEqual(10, packs._max_pack_count(19))
1069
 
        self.assertEqual(2, packs._max_pack_count(20))
1070
 
        self.assertEqual(3, packs._max_pack_count(21))
1071
 
        # check some arbitrary big numbers
1072
 
        self.assertEqual(25, packs._max_pack_count(112894))
1073
 
 
1074
 
    def test_pack_distribution_zero(self):
1075
 
        packs = self.get_packs()
1076
 
        self.assertEqual([0], packs.pack_distribution(0))
1077
 
 
1078
 
    def test_ensure_loaded_unlocked(self):
1079
 
        packs = self.get_packs()
1080
 
        self.assertRaises(errors.ObjectNotLocked,
1081
 
                          packs.ensure_loaded)
1082
 
 
1083
 
    def test_pack_distribution_one_to_nine(self):
1084
 
        packs = self.get_packs()
1085
 
        self.assertEqual([1],
1086
 
            packs.pack_distribution(1))
1087
 
        self.assertEqual([1, 1],
1088
 
            packs.pack_distribution(2))
1089
 
        self.assertEqual([1, 1, 1],
1090
 
            packs.pack_distribution(3))
1091
 
        self.assertEqual([1, 1, 1, 1],
1092
 
            packs.pack_distribution(4))
1093
 
        self.assertEqual([1, 1, 1, 1, 1],
1094
 
            packs.pack_distribution(5))
1095
 
        self.assertEqual([1, 1, 1, 1, 1, 1],
1096
 
            packs.pack_distribution(6))
1097
 
        self.assertEqual([1, 1, 1, 1, 1, 1, 1],
1098
 
            packs.pack_distribution(7))
1099
 
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1],
1100
 
            packs.pack_distribution(8))
1101
 
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1],
1102
 
            packs.pack_distribution(9))
1103
 
 
1104
 
    def test_pack_distribution_stable_at_boundaries(self):
1105
 
        """When there are multi-rev packs the counts are stable."""
1106
 
        packs = self.get_packs()
1107
 
        # in 10s:
1108
 
        self.assertEqual([10], packs.pack_distribution(10))
1109
 
        self.assertEqual([10, 1], packs.pack_distribution(11))
1110
 
        self.assertEqual([10, 10], packs.pack_distribution(20))
1111
 
        self.assertEqual([10, 10, 1], packs.pack_distribution(21))
1112
 
        # 100s
1113
 
        self.assertEqual([100], packs.pack_distribution(100))
1114
 
        self.assertEqual([100, 1], packs.pack_distribution(101))
1115
 
        self.assertEqual([100, 10, 1], packs.pack_distribution(111))
1116
 
        self.assertEqual([100, 100], packs.pack_distribution(200))
1117
 
        self.assertEqual([100, 100, 1], packs.pack_distribution(201))
1118
 
        self.assertEqual([100, 100, 10, 1], packs.pack_distribution(211))
1119
 
 
1120
 
    def test_plan_pack_operations_2009_revisions_skip_all_packs(self):
1121
 
        packs = self.get_packs()
1122
 
        existing_packs = [(2000, "big"), (9, "medium")]
1123
 
        # rev count - 2009 -> 2x1000 + 9x1
1124
 
        pack_operations = packs.plan_autopack_combinations(
1125
 
            existing_packs, [1000, 1000, 1, 1, 1, 1, 1, 1, 1, 1, 1])
1126
 
        self.assertEqual([], pack_operations)
1127
 
 
1128
 
    def test_plan_pack_operations_2010_revisions_skip_all_packs(self):
1129
 
        packs = self.get_packs()
1130
 
        existing_packs = [(2000, "big"), (9, "medium"), (1, "single")]
1131
 
        # rev count - 2010 -> 2x1000 + 1x10
1132
 
        pack_operations = packs.plan_autopack_combinations(
1133
 
            existing_packs, [1000, 1000, 10])
1134
 
        self.assertEqual([], pack_operations)
1135
 
 
1136
 
    def test_plan_pack_operations_2010_combines_smallest_two(self):
1137
 
        packs = self.get_packs()
1138
 
        existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"),
1139
 
            (1, "single1")]
1140
 
        # rev count - 2010 -> 2x1000 + 1x10 (3)
1141
 
        pack_operations = packs.plan_autopack_combinations(
1142
 
            existing_packs, [1000, 1000, 10])
1143
 
        self.assertEqual([[2, ["single2", "single1"]]], pack_operations)
1144
 
 
1145
 
    def test_plan_pack_operations_creates_a_single_op(self):
1146
 
        packs = self.get_packs()
1147
 
        existing_packs = [(50, 'a'), (40, 'b'), (30, 'c'), (10, 'd'),
1148
 
                          (10, 'e'), (6, 'f'), (4, 'g')]
1149
 
        # rev count 150 -> 1x100 and 5x10
1150
 
        # The two size 10 packs do not need to be touched. The 50, 40, 30 would
1151
 
        # be combined into a single 120 size pack, and the 6 & 4 would
1152
 
        # becombined into a size 10 pack. However, if we have to rewrite them,
1153
 
        # we save a pack file with no increased I/O by putting them into the
1154
 
        # same file.
1155
 
        distribution = packs.pack_distribution(150)
1156
 
        pack_operations = packs.plan_autopack_combinations(existing_packs,
1157
 
                                                           distribution)
1158
 
        self.assertEqual([[130, ['a', 'b', 'c', 'f', 'g']]], pack_operations)
1159
 
 
1160
 
    def test_all_packs_none(self):
1161
 
        format = self.get_format()
1162
 
        tree = self.make_branch_and_tree('.', format=format)
1163
 
        tree.lock_read()
1164
 
        self.addCleanup(tree.unlock)
1165
 
        packs = tree.branch.repository._pack_collection
1166
 
        packs.ensure_loaded()
1167
 
        self.assertEqual([], packs.all_packs())
1168
 
 
1169
 
    def test_all_packs_one(self):
1170
 
        format = self.get_format()
1171
 
        tree = self.make_branch_and_tree('.', format=format)
1172
 
        tree.commit('start')
1173
 
        tree.lock_read()
1174
 
        self.addCleanup(tree.unlock)
1175
 
        packs = tree.branch.repository._pack_collection
1176
 
        packs.ensure_loaded()
1177
 
        self.assertEqual([
1178
 
            packs.get_pack_by_name(packs.names()[0])],
1179
 
            packs.all_packs())
1180
 
 
1181
 
    def test_all_packs_two(self):
1182
 
        format = self.get_format()
1183
 
        tree = self.make_branch_and_tree('.', format=format)
1184
 
        tree.commit('start')
1185
 
        tree.commit('continue')
1186
 
        tree.lock_read()
1187
 
        self.addCleanup(tree.unlock)
1188
 
        packs = tree.branch.repository._pack_collection
1189
 
        packs.ensure_loaded()
1190
 
        self.assertEqual([
1191
 
            packs.get_pack_by_name(packs.names()[0]),
1192
 
            packs.get_pack_by_name(packs.names()[1]),
1193
 
            ], packs.all_packs())
1194
 
 
1195
 
    def test_get_pack_by_name(self):
1196
 
        format = self.get_format()
1197
 
        tree = self.make_branch_and_tree('.', format=format)
1198
 
        tree.commit('start')
1199
 
        tree.lock_read()
1200
 
        self.addCleanup(tree.unlock)
1201
 
        packs = tree.branch.repository._pack_collection
1202
 
        packs.reset()
1203
 
        packs.ensure_loaded()
1204
 
        name = packs.names()[0]
1205
 
        pack_1 = packs.get_pack_by_name(name)
1206
 
        # the pack should be correctly initialised
1207
 
        sizes = packs._names[name]
1208
 
        rev_index = GraphIndex(packs._index_transport, name + '.rix', sizes[0])
1209
 
        inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1])
1210
 
        txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2])
1211
 
        sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3])
1212
 
        self.assertEqual(pack_repo.ExistingPack(packs._pack_transport,
1213
 
            name, rev_index, inv_index, txt_index, sig_index), pack_1)
1214
 
        # and the same instance should be returned on successive calls.
1215
 
        self.assertTrue(pack_1 is packs.get_pack_by_name(name))
1216
 
 
1217
 
    def test_reload_pack_names_new_entry(self):
1218
 
        tree, r, packs, revs = self.make_packs_and_alt_repo()
1219
 
        names = packs.names()
1220
 
        # Add a new pack file into the repository
1221
 
        rev4 = tree.commit('four')
1222
 
        new_names = tree.branch.repository._pack_collection.names()
1223
 
        new_name = set(new_names).difference(names)
1224
 
        self.assertEqual(1, len(new_name))
1225
 
        new_name = new_name.pop()
1226
 
        # The old collection hasn't noticed yet
1227
 
        self.assertEqual(names, packs.names())
1228
 
        self.assertTrue(packs.reload_pack_names())
1229
 
        self.assertEqual(new_names, packs.names())
1230
 
        # And the repository can access the new revision
1231
 
        self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
1232
 
        self.assertFalse(packs.reload_pack_names())
1233
 
 
1234
 
    def test_reload_pack_names_added_and_removed(self):
1235
 
        tree, r, packs, revs = self.make_packs_and_alt_repo()
1236
 
        names = packs.names()
1237
 
        # Now repack the whole thing
1238
 
        tree.branch.repository.pack()
1239
 
        new_names = tree.branch.repository._pack_collection.names()
1240
 
        # The other collection hasn't noticed yet
1241
 
        self.assertEqual(names, packs.names())
1242
 
        self.assertTrue(packs.reload_pack_names())
1243
 
        self.assertEqual(new_names, packs.names())
1244
 
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1245
 
        self.assertFalse(packs.reload_pack_names())
1246
 
 
1247
 
    def test_autopack_reloads_and_stops(self):
1248
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1249
 
        # After we have determined what needs to be autopacked, trigger a
1250
 
        # full-pack via the other repo which will cause us to re-evaluate and
1251
 
        # decide we don't need to do anything
1252
 
        orig_execute = packs._execute_pack_operations
1253
 
        def _munged_execute_pack_ops(*args, **kwargs):
1254
 
            tree.branch.repository.pack()
1255
 
            return orig_execute(*args, **kwargs)
1256
 
        packs._execute_pack_operations = _munged_execute_pack_ops
1257
 
        packs._max_pack_count = lambda x: 1
1258
 
        packs.pack_distribution = lambda x: [10]
1259
 
        self.assertFalse(packs.autopack())
1260
 
        self.assertEqual(1, len(packs.names()))
1261
 
        self.assertEqual(tree.branch.repository._pack_collection.names(),
1262
 
                         packs.names())
1263
 
 
1264
 
 
1265
 
class TestPack(TestCaseWithTransport):
1266
 
    """Tests for the Pack object."""
1267
 
 
1268
 
    def assertCurrentlyEqual(self, left, right):
1269
 
        self.assertTrue(left == right)
1270
 
        self.assertTrue(right == left)
1271
 
        self.assertFalse(left != right)
1272
 
        self.assertFalse(right != left)
1273
 
 
1274
 
    def assertCurrentlyNotEqual(self, left, right):
1275
 
        self.assertFalse(left == right)
1276
 
        self.assertFalse(right == left)
1277
 
        self.assertTrue(left != right)
1278
 
        self.assertTrue(right != left)
1279
 
 
1280
 
    def test___eq____ne__(self):
1281
 
        left = pack_repo.ExistingPack('', '', '', '', '', '')
1282
 
        right = pack_repo.ExistingPack('', '', '', '', '', '')
1283
 
        self.assertCurrentlyEqual(left, right)
1284
 
        # change all attributes and ensure equality changes as we do.
1285
 
        left.revision_index = 'a'
1286
 
        self.assertCurrentlyNotEqual(left, right)
1287
 
        right.revision_index = 'a'
1288
 
        self.assertCurrentlyEqual(left, right)
1289
 
        left.inventory_index = 'a'
1290
 
        self.assertCurrentlyNotEqual(left, right)
1291
 
        right.inventory_index = 'a'
1292
 
        self.assertCurrentlyEqual(left, right)
1293
 
        left.text_index = 'a'
1294
 
        self.assertCurrentlyNotEqual(left, right)
1295
 
        right.text_index = 'a'
1296
 
        self.assertCurrentlyEqual(left, right)
1297
 
        left.signature_index = 'a'
1298
 
        self.assertCurrentlyNotEqual(left, right)
1299
 
        right.signature_index = 'a'
1300
 
        self.assertCurrentlyEqual(left, right)
1301
 
        left.name = 'a'
1302
 
        self.assertCurrentlyNotEqual(left, right)
1303
 
        right.name = 'a'
1304
 
        self.assertCurrentlyEqual(left, right)
1305
 
        left.transport = 'a'
1306
 
        self.assertCurrentlyNotEqual(left, right)
1307
 
        right.transport = 'a'
1308
 
        self.assertCurrentlyEqual(left, right)
1309
 
 
1310
 
    def test_file_name(self):
1311
 
        pack = pack_repo.ExistingPack('', 'a_name', '', '', '', '')
1312
 
        self.assertEqual('a_name.pack', pack.file_name())
1313
 
 
1314
 
 
1315
 
class TestNewPack(TestCaseWithTransport):
1316
 
    """Tests for pack_repo.NewPack."""
1317
 
 
1318
 
    def test_new_instance_attributes(self):
1319
 
        upload_transport = self.get_transport('upload')
1320
 
        pack_transport = self.get_transport('pack')
1321
 
        index_transport = self.get_transport('index')
1322
 
        upload_transport.mkdir('.')
1323
 
        collection = pack_repo.RepositoryPackCollection(
1324
 
            repo=None,
1325
 
            transport=self.get_transport('.'),
1326
 
            index_transport=index_transport,
1327
 
            upload_transport=upload_transport,
1328
 
            pack_transport=pack_transport,
1329
 
            index_builder_class=BTreeBuilder,
1330
 
            index_class=BTreeGraphIndex,
1331
 
            use_chk_index=False)
1332
 
        pack = pack_repo.NewPack(collection)
1333
 
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
1334
 
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1335
 
        self.assertIsInstance(pack._hash, type(osutils.md5()))
1336
 
        self.assertTrue(pack.upload_transport is upload_transport)
1337
 
        self.assertTrue(pack.index_transport is index_transport)
1338
 
        self.assertTrue(pack.pack_transport is pack_transport)
1339
 
        self.assertEqual(None, pack.index_sizes)
1340
 
        self.assertEqual(20, len(pack.random_name))
1341
 
        self.assertIsInstance(pack.random_name, str)
1342
 
        self.assertIsInstance(pack.start_time, float)
1343
 
 
1344
 
 
1345
 
class TestPacker(TestCaseWithTransport):
1346
 
    """Tests for the packs repository Packer class."""
1347
 
 
1348
 
    def test_pack_optimizes_pack_order(self):
1349
 
        builder = self.make_branch_builder('.')
1350
 
        builder.start_series()
1351
 
        builder.build_snapshot('A', None, [
1352
 
            ('add', ('', 'root-id', 'directory', None)),
1353
 
            ('add', ('f', 'f-id', 'file', 'content\n'))])
1354
 
        builder.build_snapshot('B', ['A'],
1355
 
            [('modify', ('f-id', 'new-content\n'))])
1356
 
        builder.build_snapshot('C', ['B'],
1357
 
            [('modify', ('f-id', 'third-content\n'))])
1358
 
        builder.build_snapshot('D', ['C'],
1359
 
            [('modify', ('f-id', 'fourth-content\n'))])
1360
 
        b = builder.get_branch()
1361
 
        b.lock_read()
1362
 
        builder.finish_series()
1363
 
        self.addCleanup(b.unlock)
1364
 
        # At this point, we should have 4 pack files available
1365
 
        # Because of how they were built, they correspond to
1366
 
        # ['D', 'C', 'B', 'A']
1367
 
        packs = b.repository._pack_collection.packs
1368
 
        packer = pack_repo.Packer(b.repository._pack_collection,
1369
 
                                  packs, 'testing',
1370
 
                                  revision_ids=['B', 'C'])
1371
 
        # Now, when we are copying the B & C revisions, their pack files should
1372
 
        # be moved to the front of the stack
1373
 
        # The new ordering moves B & C to the front of the .packs attribute,
1374
 
        # and leaves the others in the original order.
1375
 
        new_packs = [packs[1], packs[2], packs[0], packs[3]]
1376
 
        new_pack = packer.pack()
1377
 
        self.assertEqual(new_packs, packer.packs)
1378
 
 
1379
 
 
1380
 
class TestOptimisingPacker(TestCaseWithTransport):
1381
 
    """Tests for the OptimisingPacker class."""
1382
 
 
1383
 
    def get_pack_collection(self):
1384
 
        repo = self.make_repository('.')
1385
 
        return repo._pack_collection
1386
 
 
1387
 
    def test_open_pack_will_optimise(self):
1388
 
        packer = pack_repo.OptimisingPacker(self.get_pack_collection(),
1389
 
                                            [], '.test')
1390
 
        new_pack = packer.open_pack()
1391
 
        self.assertIsInstance(new_pack, pack_repo.NewPack)
1392
 
        self.assertTrue(new_pack.revision_index._optimize_for_size)
1393
 
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
1394
 
        self.assertTrue(new_pack.text_index._optimize_for_size)
1395
 
        self.assertTrue(new_pack.signature_index._optimize_for_size)
1396
 
 
1397
 
 
1398
 
class TestCrossFormatPacks(TestCaseWithTransport):
1399
 
 
1400
 
    def log_pack(self, hint=None):
1401
 
        self.calls.append(('pack', hint))
1402
 
        self.orig_pack(hint=hint)
1403
 
        if self.expect_hint:
1404
 
            self.assertTrue(hint)
1405
 
 
1406
 
    def run_stream(self, src_fmt, target_fmt, expect_pack_called):
1407
 
        self.expect_hint = expect_pack_called
1408
 
        self.calls = []
1409
 
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
1410
 
        source_tree.lock_write()
1411
 
        self.addCleanup(source_tree.unlock)
1412
 
        tip = source_tree.commit('foo')
1413
 
        target = self.make_repository('target', format=target_fmt)
1414
 
        target.lock_write()
1415
 
        self.addCleanup(target.unlock)
1416
 
        source = source_tree.branch.repository._get_source(target._format)
1417
 
        self.orig_pack = target.pack
1418
 
        target.pack = self.log_pack
1419
 
        search = target.search_missing_revision_ids(
1420
 
            source_tree.branch.repository, tip)
1421
 
        stream = source.get_stream(search)
1422
 
        from_format = source_tree.branch.repository._format
1423
 
        sink = target._get_sink()
1424
 
        sink.insert_stream(stream, from_format, [])
1425
 
        if expect_pack_called:
1426
 
            self.assertLength(1, self.calls)
1427
 
        else:
1428
 
            self.assertLength(0, self.calls)
1429
 
 
1430
 
    def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
1431
 
        self.expect_hint = expect_pack_called
1432
 
        self.calls = []
1433
 
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
1434
 
        source_tree.lock_write()
1435
 
        self.addCleanup(source_tree.unlock)
1436
 
        tip = source_tree.commit('foo')
1437
 
        target = self.make_repository('target', format=target_fmt)
1438
 
        target.lock_write()
1439
 
        self.addCleanup(target.unlock)
1440
 
        source = source_tree.branch.repository
1441
 
        self.orig_pack = target.pack
1442
 
        target.pack = self.log_pack
1443
 
        target.fetch(source)
1444
 
        if expect_pack_called:
1445
 
            self.assertLength(1, self.calls)
1446
 
        else:
1447
 
            self.assertLength(0, self.calls)
1448
 
 
1449
 
    def test_sink_format_hint_no(self):
1450
 
        # When the target format says packing makes no difference, pack is not
1451
 
        # called.
1452
 
        self.run_stream('1.9', 'rich-root-pack', False)
1453
 
 
1454
 
    def test_sink_format_hint_yes(self):
1455
 
        # When the target format says packing makes a difference, pack is
1456
 
        # called.
1457
 
        self.run_stream('1.9', '2a', True)
1458
 
 
1459
 
    def test_sink_format_same_no(self):
1460
 
        # When the formats are the same, pack is not called.
1461
 
        self.run_stream('2a', '2a', False)
1462
 
 
1463
 
    def test_IDS_format_hint_no(self):
1464
 
        # When the target format says packing makes no difference, pack is not
1465
 
        # called.
1466
 
        self.run_fetch('1.9', 'rich-root-pack', False)
1467
 
 
1468
 
    def test_IDS_format_hint_yes(self):
1469
 
        # When the target format says packing makes a difference, pack is
1470
 
        # called.
1471
 
        self.run_fetch('1.9', '2a', True)
1472
 
 
1473
 
    def test_IDS_format_same_no(self):
1474
 
        # When the formats are the same, pack is not called.
1475
 
        self.run_fetch('2a', '2a', False)