~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

'bzr selftest' now shows a progress bar with the number of tests, and 
progress made. 'make check' shows tests in -v mode, to be more useful
for the PQM status window. (Robert Collins).

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006, 2007, 2008, 2009 Canonical Ltd
2
 
#
 
1
# (C) 2006 Canonical Ltd
 
2
 
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
5
5
# the Free Software Foundation; either version 2 of the License, or
6
6
# (at your option) any later version.
7
 
#
 
7
 
8
8
# This program is distributed in the hope that it will be useful,
9
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11
11
# GNU General Public License for more details.
12
 
#
 
12
 
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
16
16
 
17
17
"""Tests for the Repository facility that are not interface tests.
18
18
 
19
 
For interface tests see tests/per_repository/*.py.
 
19
For interface tests see tests/repository_implementations/*.py.
20
20
 
21
21
For concrete class tests see this file, and for storage formats tests
22
22
also see this file.
23
23
"""
24
24
 
25
 
from stat import S_ISDIR
 
25
from stat import *
26
26
from StringIO import StringIO
27
 
import sys
28
27
 
29
28
import bzrlib
 
29
import bzrlib.bzrdir as bzrdir
 
30
import bzrlib.errors as errors
30
31
from bzrlib.errors import (NotBranchError,
31
32
                           NoSuchFile,
32
33
                           UnknownFormatError,
33
34
                           UnsupportedFormatError,
34
35
                           )
35
 
from bzrlib import (
36
 
    graph,
37
 
    tests,
38
 
    )
39
 
from bzrlib.branchbuilder import BranchBuilder
40
 
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
41
 
from bzrlib.index import GraphIndex, InMemoryGraphIndex
42
 
from bzrlib.repository import RepositoryFormat
43
 
from bzrlib.smart import server
44
 
from bzrlib.tests import (
45
 
    TestCase,
46
 
    TestCaseWithTransport,
47
 
    TestSkipped,
48
 
    test_knit,
49
 
    )
50
 
from bzrlib.transport import (
51
 
    fakenfs,
52
 
    get_transport,
53
 
    )
 
36
import bzrlib.repository as repository
 
37
from bzrlib.tests import TestCase, TestCaseWithTransport
 
38
from bzrlib.transport import get_transport
 
39
from bzrlib.transport.http import HttpServer
54
40
from bzrlib.transport.memory import MemoryServer
55
 
from bzrlib import (
56
 
    bencode,
57
 
    bzrdir,
58
 
    errors,
59
 
    inventory,
60
 
    osutils,
61
 
    progress,
62
 
    repository,
63
 
    revision as _mod_revision,
64
 
    symbol_versioning,
65
 
    upgrade,
66
 
    workingtree,
67
 
    )
68
 
from bzrlib.repofmt import (
69
 
    groupcompress_repo,
70
 
    knitrepo,
71
 
    pack_repo,
72
 
    weaverepo,
73
 
    )
74
41
 
75
42
 
76
43
class TestDefaultFormat(TestCase):
77
44
 
78
45
    def test_get_set_default_format(self):
79
 
        old_default = bzrdir.format_registry.get('default')
80
 
        private_default = old_default().repository_format.__class__
81
46
        old_format = repository.RepositoryFormat.get_default_format()
82
 
        self.assertTrue(isinstance(old_format, private_default))
83
 
        def make_sample_bzrdir():
84
 
            my_bzrdir = bzrdir.BzrDirMetaFormat1()
85
 
            my_bzrdir.repository_format = SampleRepositoryFormat()
86
 
            return my_bzrdir
87
 
        bzrdir.format_registry.remove('default')
88
 
        bzrdir.format_registry.register('sample', make_sample_bzrdir, '')
89
 
        bzrdir.format_registry.set_default('sample')
 
47
        self.assertTrue(isinstance(old_format, repository.RepositoryFormatKnit1))
 
48
        repository.RepositoryFormat.set_default_format(SampleRepositoryFormat())
90
49
        # creating a repository should now create an instrumented dir.
91
50
        try:
92
51
            # the default branch format is used by the meta dir format
93
52
            # which is not the default bzrdir format at this point
94
 
            dir = bzrdir.BzrDirMetaFormat1().initialize('memory:///')
 
53
            dir = bzrdir.BzrDirMetaFormat1().initialize('memory:/')
95
54
            result = dir.create_repository()
96
55
            self.assertEqual(result, 'A bzr repository dir')
97
56
        finally:
98
 
            bzrdir.format_registry.remove('default')
99
 
            bzrdir.format_registry.remove('sample')
100
 
            bzrdir.format_registry.register('default', old_default, '')
101
 
        self.assertIsInstance(repository.RepositoryFormat.get_default_format(),
102
 
                              old_format.__class__)
 
57
            repository.RepositoryFormat.set_default_format(old_format)
 
58
        self.assertEqual(old_format, repository.RepositoryFormat.get_default_format())
103
59
 
104
60
 
105
61
class SampleRepositoryFormat(repository.RepositoryFormat):
106
62
    """A sample format
107
63
 
108
 
    this format is initializable, unsupported to aid in testing the
 
64
    this format is initializable, unsupported to aid in testing the 
109
65
    open and open(unsupported=True) routines.
110
66
    """
111
67
 
116
72
    def initialize(self, a_bzrdir, shared=False):
117
73
        """Initialize a repository in a BzrDir"""
118
74
        t = a_bzrdir.get_repository_transport(self)
119
 
        t.put_bytes('format', self.get_format_string())
 
75
        t.put('format', StringIO(self.get_format_string()))
120
76
        return 'A bzr repository dir'
121
77
 
122
78
    def is_supported(self):
132
88
    def test_find_format(self):
133
89
        # is the right format object found for a repository?
134
90
        # create a branch with a few known format objects.
135
 
        # this is not quite the same as
 
91
        # this is not quite the same as 
136
92
        self.build_tree(["foo/", "bar/"])
137
93
        def check_format(format, url):
138
94
            dir = format._matchingbzrdir.initialize(url)
140
96
            t = get_transport(url)
141
97
            found_format = repository.RepositoryFormat.find_format(dir)
142
98
            self.failUnless(isinstance(found_format, format.__class__))
143
 
        check_format(weaverepo.RepositoryFormat7(), "bar")
144
 
 
 
99
        check_format(repository.RepositoryFormat7(), "bar")
 
100
        
145
101
    def test_find_format_no_repository(self):
146
102
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
147
103
        self.assertRaises(errors.NoRepositoryPresent,
173
129
 
174
130
class TestFormat6(TestCaseWithTransport):
175
131
 
176
 
    def test_attribute__fetch_order(self):
177
 
        """Weaves need topological data insertion."""
178
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
179
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
180
 
        self.assertEqual('topological', repo._format._fetch_order)
181
 
 
182
 
    def test_attribute__fetch_uses_deltas(self):
183
 
        """Weaves do not reuse deltas."""
184
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
185
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
186
 
        self.assertEqual(False, repo._format._fetch_uses_deltas)
187
 
 
188
 
    def test_attribute__fetch_reconcile(self):
189
 
        """Weave repositories need a reconcile after fetch."""
190
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
191
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
192
 
        self.assertEqual(True, repo._format._fetch_reconcile)
193
 
 
194
132
    def test_no_ancestry_weave(self):
195
133
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
196
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
134
        repo = repository.RepositoryFormat6().initialize(control)
197
135
        # We no longer need to create the ancestry.weave file
198
136
        # since it is *never* used.
199
137
        self.assertRaises(NoSuchFile,
200
138
                          control.transport.get,
201
139
                          'ancestry.weave')
202
140
 
203
 
    def test_supports_external_lookups(self):
204
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
205
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
206
 
        self.assertFalse(repo._format.supports_external_lookups)
207
 
 
208
141
 
209
142
class TestFormat7(TestCaseWithTransport):
210
 
 
211
 
    def test_attribute__fetch_order(self):
212
 
        """Weaves need topological data insertion."""
213
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
214
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
215
 
        self.assertEqual('topological', repo._format._fetch_order)
216
 
 
217
 
    def test_attribute__fetch_uses_deltas(self):
218
 
        """Weaves do not reuse deltas."""
219
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
220
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
221
 
        self.assertEqual(False, repo._format._fetch_uses_deltas)
222
 
 
223
 
    def test_attribute__fetch_reconcile(self):
224
 
        """Weave repositories need a reconcile after fetch."""
225
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
226
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
227
 
        self.assertEqual(True, repo._format._fetch_reconcile)
228
 
 
 
143
    
229
144
    def test_disk_layout(self):
230
145
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
231
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
146
        repo = repository.RepositoryFormat7().initialize(control)
232
147
        # in case of side effects of locking.
233
148
        repo.lock_write()
234
149
        repo.unlock()
247
162
                             'w\n'
248
163
                             'W\n',
249
164
                             t.get('inventory.weave').read())
250
 
        # Creating a file with id Foo:Bar results in a non-escaped file name on
251
 
        # disk.
252
 
        control.create_branch()
253
 
        tree = control.create_workingtree()
254
 
        tree.add(['foo'], ['Foo:Bar'], ['file'])
255
 
        tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
256
 
        try:
257
 
            tree.commit('first post', rev_id='first')
258
 
        except errors.IllegalPath:
259
 
            if sys.platform != 'win32':
260
 
                raise
261
 
            self.knownFailure('Foo:Bar cannot be used as a file-id on windows'
262
 
                              ' in repo format 7')
263
 
            return
264
 
        self.assertEqualDiff(
265
 
            '# bzr weave file v5\n'
266
 
            'i\n'
267
 
            '1 7fe70820e08a1aac0ef224d9c66ab66831cc4ab1\n'
268
 
            'n first\n'
269
 
            '\n'
270
 
            'w\n'
271
 
            '{ 0\n'
272
 
            '. content\n'
273
 
            '}\n'
274
 
            'W\n',
275
 
            t.get('weaves/74/Foo%3ABar.weave').read())
276
165
 
277
166
    def test_shared_disk_layout(self):
278
167
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
279
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
168
        repo = repository.RepositoryFormat7().initialize(control, shared=True)
280
169
        # we want:
281
170
        # format 'Bazaar-NG Repository format 7'
282
171
        # inventory.weave == empty_weave
299
188
    def test_creates_lockdir(self):
300
189
        """Make sure it appears to be controlled by a LockDir existence"""
301
190
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
302
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
191
        repo = repository.RepositoryFormat7().initialize(control, shared=True)
303
192
        t = control.get_repository_transport(None)
304
 
        # TODO: Should check there is a 'lock' toplevel directory,
 
193
        # TODO: Should check there is a 'lock' toplevel directory, 
305
194
        # regardless of contents
306
195
        self.assertFalse(t.has('lock/held/info'))
307
196
        repo.lock_write()
315
204
        """repo format 7 actually locks on lockdir"""
316
205
        base_url = self.get_url()
317
206
        control = bzrdir.BzrDirMetaFormat1().initialize(base_url)
318
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
207
        repo = repository.RepositoryFormat7().initialize(control, shared=True)
319
208
        t = control.get_repository_transport(None)
320
209
        repo.lock_write()
321
210
        repo.unlock()
329
218
 
330
219
    def test_shared_no_tree_disk_layout(self):
331
220
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
332
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
221
        repo = repository.RepositoryFormat7().initialize(control, shared=True)
333
222
        repo.set_make_working_trees(False)
334
223
        # we want:
335
224
        # format 'Bazaar-NG Repository format 7'
353
242
                             'W\n',
354
243
                             t.get('inventory.weave').read())
355
244
 
356
 
    def test_supports_external_lookups(self):
357
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
358
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
359
 
        self.assertFalse(repo._format.supports_external_lookups)
360
 
 
361
245
 
362
246
class TestFormatKnit1(TestCaseWithTransport):
363
 
 
364
 
    def test_attribute__fetch_order(self):
365
 
        """Knits need topological data insertion."""
366
 
        repo = self.make_repository('.',
367
 
                format=bzrdir.format_registry.get('knit')())
368
 
        self.assertEqual('topological', repo._format._fetch_order)
369
 
 
370
 
    def test_attribute__fetch_uses_deltas(self):
371
 
        """Knits reuse deltas."""
372
 
        repo = self.make_repository('.',
373
 
                format=bzrdir.format_registry.get('knit')())
374
 
        self.assertEqual(True, repo._format._fetch_uses_deltas)
375
 
 
 
247
    
376
248
    def test_disk_layout(self):
377
249
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
378
 
        repo = knitrepo.RepositoryFormatKnit1().initialize(control)
 
250
        repo = repository.RepositoryFormatKnit1().initialize(control)
379
251
        # in case of side effects of locking.
380
252
        repo.lock_write()
381
253
        repo.unlock()
392
264
        # self.assertEqualDiff('', t.get('lock').read())
393
265
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
394
266
        self.check_knits(t)
395
 
        # Check per-file knits.
396
 
        branch = control.create_branch()
397
 
        tree = control.create_workingtree()
398
 
        tree.add(['foo'], ['Nasty-IdC:'], ['file'])
399
 
        tree.put_file_bytes_non_atomic('Nasty-IdC:', '')
400
 
        tree.commit('1st post', rev_id='foo')
401
 
        self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
402
 
            '\nfoo fulltext 0 81  :')
403
267
 
404
 
    def assertHasKnit(self, t, knit_name, extra_content=''):
 
268
    def assertHasKnit(self, t, knit_name):
405
269
        """Assert that knit_name exists on t."""
406
 
        self.assertEqualDiff('# bzr knit index 8\n' + extra_content,
 
270
        self.assertEqualDiff('# bzr knit index 8\n',
407
271
                             t.get(knit_name + '.kndx').read())
 
272
        # no default content
 
273
        self.assertTrue(t.has(knit_name + '.knit'))
408
274
 
409
275
    def check_knits(self, t):
410
276
        """check knit content for a repository."""
414
280
 
415
281
    def test_shared_disk_layout(self):
416
282
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
417
 
        repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
 
283
        repo = repository.RepositoryFormatKnit1().initialize(control, shared=True)
418
284
        # we want:
419
285
        # format 'Bazaar-NG Knit Repository Format 1'
420
286
        # lock: is a directory
433
299
 
434
300
    def test_shared_no_tree_disk_layout(self):
435
301
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
436
 
        repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
 
302
        repo = repository.RepositoryFormatKnit1().initialize(control, shared=True)
437
303
        repo.set_make_working_trees(False)
438
304
        # we want:
439
305
        # format 'Bazaar-NG Knit Repository Format 1'
454
320
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
455
321
        self.check_knits(t)
456
322
 
457
 
    def test_deserialise_sets_root_revision(self):
458
 
        """We must have a inventory.root.revision
459
 
 
460
 
        Old versions of the XML5 serializer did not set the revision_id for
461
 
        the whole inventory. So we grab the one from the expected text. Which
462
 
        is valid when the api is not being abused.
463
 
        """
464
 
        repo = self.make_repository('.',
465
 
                format=bzrdir.format_registry.get('knit')())
466
 
        inv_xml = '<inventory format="5">\n</inventory>\n'
467
 
        inv = repo.deserialise_inventory('test-rev-id', inv_xml)
468
 
        self.assertEqual('test-rev-id', inv.root.revision)
469
 
 
470
 
    def test_deserialise_uses_global_revision_id(self):
471
 
        """If it is set, then we re-use the global revision id"""
472
 
        repo = self.make_repository('.',
473
 
                format=bzrdir.format_registry.get('knit')())
474
 
        inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n'
475
 
                   '</inventory>\n')
476
 
        # Arguably, the deserialise_inventory should detect a mismatch, and
477
 
        # raise an error, rather than silently using one revision_id over the
478
 
        # other.
479
 
        self.assertRaises(AssertionError, repo.deserialise_inventory,
480
 
            'test-rev-id', inv_xml)
481
 
        inv = repo.deserialise_inventory('other-rev-id', inv_xml)
482
 
        self.assertEqual('other-rev-id', inv.root.revision)
483
 
 
484
 
    def test_supports_external_lookups(self):
485
 
        repo = self.make_repository('.',
486
 
                format=bzrdir.format_registry.get('knit')())
487
 
        self.assertFalse(repo._format.supports_external_lookups)
488
 
 
489
 
 
490
 
class DummyRepository(object):
491
 
    """A dummy repository for testing."""
492
 
 
493
 
    _format = None
494
 
    _serializer = None
495
 
 
496
 
    def supports_rich_root(self):
497
 
        if self._format is not None:
498
 
            return self._format.rich_root_data
499
 
        return False
500
 
 
501
 
    def get_graph(self):
502
 
        raise NotImplementedError
503
 
 
504
 
    def get_parent_map(self, revision_ids):
505
 
        raise NotImplementedError
506
 
 
507
 
 
508
 
class InterDummy(repository.InterRepository):
509
 
    """An inter-repository optimised code path for DummyRepository.
510
 
 
511
 
    This is for use during testing where we use DummyRepository as repositories
 
323
 
 
324
class InterString(repository.InterRepository):
 
325
    """An inter-repository optimised code path for strings.
 
326
 
 
327
    This is for use during testing where we use strings as repositories
512
328
    so that none of the default regsitered inter-repository classes will
513
 
    MATCH.
 
329
    match.
514
330
    """
515
331
 
516
332
    @staticmethod
517
333
    def is_compatible(repo_source, repo_target):
518
 
        """InterDummy is compatible with DummyRepository."""
519
 
        return (isinstance(repo_source, DummyRepository) and
520
 
            isinstance(repo_target, DummyRepository))
 
334
        """InterString is compatible with strings-as-repos."""
 
335
        return isinstance(repo_source, str) and isinstance(repo_target, str)
521
336
 
522
337
 
523
338
class TestInterRepository(TestCaseWithTransport):
529
344
        # This also tests that the default registered optimised interrepository
530
345
        # classes do not barf inappropriately when a surprising repository type
531
346
        # is handed to them.
532
 
        dummy_a = DummyRepository()
533
 
        dummy_b = DummyRepository()
 
347
        dummy_a = "Repository 1."
 
348
        dummy_b = "Repository 2."
534
349
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
535
350
 
536
351
    def assertGetsDefaultInterRepository(self, repo_a, repo_b):
537
 
        """Asserts that InterRepository.get(repo_a, repo_b) -> the default.
538
 
 
539
 
        The effective default is now InterSameDataRepository because there is
540
 
        no actual sane default in the presence of incompatible data models.
541
 
        """
 
352
        """Asserts that InterRepository.get(repo_a, repo_b) -> the default."""
542
353
        inter_repo = repository.InterRepository.get(repo_a, repo_b)
543
 
        self.assertEqual(repository.InterSameDataRepository,
 
354
        self.assertEqual(repository.InterRepository,
544
355
                         inter_repo.__class__)
545
356
        self.assertEqual(repo_a, inter_repo.source)
546
357
        self.assertEqual(repo_b, inter_repo.target)
551
362
        # and that it is correctly selected when given a repository
552
363
        # pair that it returns true on for the is_compatible static method
553
364
        # check
554
 
        dummy_a = DummyRepository()
555
 
        dummy_a._format = RepositoryFormat()
556
 
        dummy_b = DummyRepository()
557
 
        dummy_b._format = RepositoryFormat()
558
 
        repo = self.make_repository('.')
559
 
        # hack dummies to look like repo somewhat.
560
 
        dummy_a._serializer = repo._serializer
561
 
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
562
 
        dummy_a._format.rich_root_data = repo._format.rich_root_data
563
 
        dummy_b._serializer = repo._serializer
564
 
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
565
 
        dummy_b._format.rich_root_data = repo._format.rich_root_data
566
 
        repository.InterRepository.register_optimiser(InterDummy)
 
365
        dummy_a = "Repository 1."
 
366
        dummy_b = "Repository 2."
 
367
        repository.InterRepository.register_optimiser(InterString)
567
368
        try:
568
 
            # we should get the default for something InterDummy returns False
 
369
            # we should get the default for something InterString returns False
569
370
            # to
570
 
            self.assertFalse(InterDummy.is_compatible(dummy_a, repo))
571
 
            self.assertGetsDefaultInterRepository(dummy_a, repo)
572
 
            # and we should get an InterDummy for a pair it 'likes'
573
 
            self.assertTrue(InterDummy.is_compatible(dummy_a, dummy_b))
 
371
            self.assertFalse(InterString.is_compatible(dummy_a, None))
 
372
            self.assertGetsDefaultInterRepository(dummy_a, None)
 
373
            # and we should get an InterString for a pair it 'likes'
 
374
            self.assertTrue(InterString.is_compatible(dummy_a, dummy_b))
574
375
            inter_repo = repository.InterRepository.get(dummy_a, dummy_b)
575
 
            self.assertEqual(InterDummy, inter_repo.__class__)
 
376
            self.assertEqual(InterString, inter_repo.__class__)
576
377
            self.assertEqual(dummy_a, inter_repo.source)
577
378
            self.assertEqual(dummy_b, inter_repo.target)
578
379
        finally:
579
 
            repository.InterRepository.unregister_optimiser(InterDummy)
 
380
            repository.InterRepository.unregister_optimiser(InterString)
580
381
        # now we should get the default InterRepository object again.
581
382
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
582
383
 
586
387
    def test_is_compatible_and_registered(self):
587
388
        # InterWeaveRepo is compatible when either side
588
389
        # is a format 5/6/7 branch
589
 
        from bzrlib.repofmt import knitrepo, weaverepo
590
 
        formats = [weaverepo.RepositoryFormat5(),
591
 
                   weaverepo.RepositoryFormat6(),
592
 
                   weaverepo.RepositoryFormat7()]
593
 
        incompatible_formats = [weaverepo.RepositoryFormat4(),
594
 
                                knitrepo.RepositoryFormatKnit1(),
 
390
        formats = [repository.RepositoryFormat5(),
 
391
                   repository.RepositoryFormat6(),
 
392
                   repository.RepositoryFormat7()]
 
393
        incompatible_formats = [repository.RepositoryFormat4(),
 
394
                                repository.RepositoryFormatKnit1(),
595
395
                                ]
596
396
        repo_a = self.make_repository('a')
597
397
        repo_b = self.make_repository('b')
618
418
        t = get_transport(self.get_url('.'))
619
419
        t.mkdir('repository')
620
420
        repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
621
 
        repo = weaverepo.RepositoryFormat7().initialize(repo_dir)
622
 
        target_format = knitrepo.RepositoryFormatKnit1()
 
421
        repo = repository.RepositoryFormat7().initialize(repo_dir)
 
422
        target_format = repository.RepositoryFormatKnit1()
623
423
        converter = repository.CopyConverter(target_format)
624
424
        pb = bzrlib.ui.ui_factory.nested_progress_bar()
625
425
        try:
628
428
            pb.finished()
629
429
        repo = repo_dir.open_repository()
630
430
        self.assertTrue(isinstance(target_format, repo._format.__class__))
631
 
 
632
 
 
633
 
class TestMisc(TestCase):
634
 
 
635
 
    def test_unescape_xml(self):
636
 
        """We get some kind of error when malformed entities are passed"""
637
 
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;')
638
 
 
639
 
 
640
 
class TestRepositoryFormatKnit3(TestCaseWithTransport):
641
 
 
642
 
    def test_attribute__fetch_order(self):
643
 
        """Knits need topological data insertion."""
644
 
        format = bzrdir.BzrDirMetaFormat1()
645
 
        format.repository_format = knitrepo.RepositoryFormatKnit3()
646
 
        repo = self.make_repository('.', format=format)
647
 
        self.assertEqual('topological', repo._format._fetch_order)
648
 
 
649
 
    def test_attribute__fetch_uses_deltas(self):
650
 
        """Knits reuse deltas."""
651
 
        format = bzrdir.BzrDirMetaFormat1()
652
 
        format.repository_format = knitrepo.RepositoryFormatKnit3()
653
 
        repo = self.make_repository('.', format=format)
654
 
        self.assertEqual(True, repo._format._fetch_uses_deltas)
655
 
 
656
 
    def test_convert(self):
657
 
        """Ensure the upgrade adds weaves for roots"""
658
 
        format = bzrdir.BzrDirMetaFormat1()
659
 
        format.repository_format = knitrepo.RepositoryFormatKnit1()
660
 
        tree = self.make_branch_and_tree('.', format)
661
 
        tree.commit("Dull commit", rev_id="dull")
662
 
        revision_tree = tree.branch.repository.revision_tree('dull')
663
 
        revision_tree.lock_read()
664
 
        try:
665
 
            self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
666
 
                revision_tree.inventory.root.file_id)
667
 
        finally:
668
 
            revision_tree.unlock()
669
 
        format = bzrdir.BzrDirMetaFormat1()
670
 
        format.repository_format = knitrepo.RepositoryFormatKnit3()
671
 
        upgrade.Convert('.', format)
672
 
        tree = workingtree.WorkingTree.open('.')
673
 
        revision_tree = tree.branch.repository.revision_tree('dull')
674
 
        revision_tree.lock_read()
675
 
        try:
676
 
            revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
677
 
        finally:
678
 
            revision_tree.unlock()
679
 
        tree.commit("Another dull commit", rev_id='dull2')
680
 
        revision_tree = tree.branch.repository.revision_tree('dull2')
681
 
        revision_tree.lock_read()
682
 
        self.addCleanup(revision_tree.unlock)
683
 
        self.assertEqual('dull', revision_tree.inventory.root.revision)
684
 
 
685
 
    def test_supports_external_lookups(self):
686
 
        format = bzrdir.BzrDirMetaFormat1()
687
 
        format.repository_format = knitrepo.RepositoryFormatKnit3()
688
 
        repo = self.make_repository('.', format=format)
689
 
        self.assertFalse(repo._format.supports_external_lookups)
690
 
 
691
 
 
692
 
class Test2a(tests.TestCaseWithMemoryTransport):
693
 
 
694
 
    def test_fetch_combines_groups(self):
695
 
        builder = self.make_branch_builder('source', format='2a')
696
 
        builder.start_series()
697
 
        builder.build_snapshot('1', None, [
698
 
            ('add', ('', 'root-id', 'directory', '')),
699
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
700
 
        builder.build_snapshot('2', ['1'], [
701
 
            ('modify', ('file-id', 'content-2\n'))])
702
 
        builder.finish_series()
703
 
        source = builder.get_branch()
704
 
        target = self.make_repository('target', format='2a')
705
 
        target.fetch(source.repository)
706
 
        target.lock_read()
707
 
        self.addCleanup(target.unlock)
708
 
        details = target.texts._index.get_build_details(
709
 
            [('file-id', '1',), ('file-id', '2',)])
710
 
        file_1_details = details[('file-id', '1')]
711
 
        file_2_details = details[('file-id', '2')]
712
 
        # The index, and what to read off disk, should be the same for both
713
 
        # versions of the file.
714
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
715
 
 
716
 
    def test_fetch_combines_groups(self):
717
 
        builder = self.make_branch_builder('source', format='2a')
718
 
        builder.start_series()
719
 
        builder.build_snapshot('1', None, [
720
 
            ('add', ('', 'root-id', 'directory', '')),
721
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
722
 
        builder.build_snapshot('2', ['1'], [
723
 
            ('modify', ('file-id', 'content-2\n'))])
724
 
        builder.finish_series()
725
 
        source = builder.get_branch()
726
 
        target = self.make_repository('target', format='2a')
727
 
        target.fetch(source.repository)
728
 
        target.lock_read()
729
 
        self.addCleanup(target.unlock)
730
 
        details = target.texts._index.get_build_details(
731
 
            [('file-id', '1',), ('file-id', '2',)])
732
 
        file_1_details = details[('file-id', '1')]
733
 
        file_2_details = details[('file-id', '2')]
734
 
        # The index, and what to read off disk, should be the same for both
735
 
        # versions of the file.
736
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
737
 
 
738
 
    def test_fetch_combines_groups(self):
739
 
        builder = self.make_branch_builder('source', format='2a')
740
 
        builder.start_series()
741
 
        builder.build_snapshot('1', None, [
742
 
            ('add', ('', 'root-id', 'directory', '')),
743
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
744
 
        builder.build_snapshot('2', ['1'], [
745
 
            ('modify', ('file-id', 'content-2\n'))])
746
 
        builder.finish_series()
747
 
        source = builder.get_branch()
748
 
        target = self.make_repository('target', format='2a')
749
 
        target.fetch(source.repository)
750
 
        target.lock_read()
751
 
        self.addCleanup(target.unlock)
752
 
        details = target.texts._index.get_build_details(
753
 
            [('file-id', '1',), ('file-id', '2',)])
754
 
        file_1_details = details[('file-id', '1')]
755
 
        file_2_details = details[('file-id', '2')]
756
 
        # The index, and what to read off disk, should be the same for both
757
 
        # versions of the file.
758
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
759
 
 
760
 
    def test_format_pack_compresses_True(self):
761
 
        repo = self.make_repository('repo', format='2a')
762
 
        self.assertTrue(repo._format.pack_compresses)
763
 
 
764
 
    def test_inventories_use_chk_map_with_parent_base_dict(self):
765
 
        tree = self.make_branch_and_memory_tree('repo', format="2a")
766
 
        tree.lock_write()
767
 
        tree.add([''], ['TREE_ROOT'])
768
 
        revid = tree.commit("foo")
769
 
        tree.unlock()
770
 
        tree.lock_read()
771
 
        self.addCleanup(tree.unlock)
772
 
        inv = tree.branch.repository.get_inventory(revid)
773
 
        self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
774
 
        inv.parent_id_basename_to_file_id._ensure_root()
775
 
        inv.id_to_entry._ensure_root()
776
 
        self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
777
 
        self.assertEqual(65536,
778
 
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
779
 
 
780
 
    def test_autopack_unchanged_chk_nodes(self):
781
 
        # at 20 unchanged commits, chk pages are packed that are split into
782
 
        # two groups such that the new pack being made doesn't have all its
783
 
        # pages in the source packs (though they are in the repository).
784
 
        # Use a memory backed repository, we don't need to hit disk for this
785
 
        tree = self.make_branch_and_memory_tree('tree', format='2a')
786
 
        tree.lock_write()
787
 
        self.addCleanup(tree.unlock)
788
 
        tree.add([''], ['TREE_ROOT'])
789
 
        for pos in range(20):
790
 
            tree.commit(str(pos))
791
 
 
792
 
    def test_pack_with_hint(self):
793
 
        tree = self.make_branch_and_memory_tree('tree', format='2a')
794
 
        tree.lock_write()
795
 
        self.addCleanup(tree.unlock)
796
 
        tree.add([''], ['TREE_ROOT'])
797
 
        # 1 commit to leave untouched
798
 
        tree.commit('1')
799
 
        to_keep = tree.branch.repository._pack_collection.names()
800
 
        # 2 to combine
801
 
        tree.commit('2')
802
 
        tree.commit('3')
803
 
        all = tree.branch.repository._pack_collection.names()
804
 
        combine = list(set(all) - set(to_keep))
805
 
        self.assertLength(3, all)
806
 
        self.assertLength(2, combine)
807
 
        tree.branch.repository.pack(hint=combine)
808
 
        final = tree.branch.repository._pack_collection.names()
809
 
        self.assertLength(2, final)
810
 
        self.assertFalse(combine[0] in final)
811
 
        self.assertFalse(combine[1] in final)
812
 
        self.assertSubset(to_keep, final)
813
 
 
814
 
    def test_stream_source_to_gc(self):
815
 
        source = self.make_repository('source', format='2a')
816
 
        target = self.make_repository('target', format='2a')
817
 
        stream = source._get_source(target._format)
818
 
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
819
 
 
820
 
    def test_stream_source_to_non_gc(self):
821
 
        source = self.make_repository('source', format='2a')
822
 
        target = self.make_repository('target', format='rich-root-pack')
823
 
        stream = source._get_source(target._format)
824
 
        # We don't want the child GroupCHKStreamSource
825
 
        self.assertIs(type(stream), repository.StreamSource)
826
 
 
827
 
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
828
 
        source_builder = self.make_branch_builder('source',
829
 
                            format='2a')
830
 
        # We have to build a fairly large tree, so that we are sure the chk
831
 
        # pages will have split into multiple pages.
832
 
        entries = [('add', ('', 'a-root-id', 'directory', None))]
833
 
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
834
 
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
835
 
                fname = i + j
836
 
                fid = fname + '-id'
837
 
                content = 'content for %s\n' % (fname,)
838
 
                entries.append(('add', (fname, fid, 'file', content)))
839
 
        source_builder.start_series()
840
 
        source_builder.build_snapshot('rev-1', None, entries)
841
 
        # Now change a few of them, so we get a few new pages for the second
842
 
        # revision
843
 
        source_builder.build_snapshot('rev-2', ['rev-1'], [
844
 
            ('modify', ('aa-id', 'new content for aa-id\n')),
845
 
            ('modify', ('cc-id', 'new content for cc-id\n')),
846
 
            ('modify', ('zz-id', 'new content for zz-id\n')),
847
 
            ])
848
 
        source_builder.finish_series()
849
 
        source_branch = source_builder.get_branch()
850
 
        source_branch.lock_read()
851
 
        self.addCleanup(source_branch.unlock)
852
 
        target = self.make_repository('target', format='2a')
853
 
        source = source_branch.repository._get_source(target._format)
854
 
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
855
 
 
856
 
        # On a regular pass, getting the inventories and chk pages for rev-2
857
 
        # would only get the newly created chk pages
858
 
        search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
859
 
                                    set(['rev-2']))
860
 
        simple_chk_records = []
861
 
        for vf_name, substream in source.get_stream(search):
862
 
            if vf_name == 'chk_bytes':
863
 
                for record in substream:
864
 
                    simple_chk_records.append(record.key)
865
 
            else:
866
 
                for _ in substream:
867
 
                    continue
868
 
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
869
 
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
870
 
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
871
 
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
872
 
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
873
 
                         simple_chk_records)
874
 
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
875
 
        # we should get a much larger set of pages.
876
 
        missing = [('inventories', 'rev-2')]
877
 
        full_chk_records = []
878
 
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
879
 
            if vf_name == 'inventories':
880
 
                for record in substream:
881
 
                    self.assertEqual(('rev-2',), record.key)
882
 
            elif vf_name == 'chk_bytes':
883
 
                for record in substream:
884
 
                    full_chk_records.append(record.key)
885
 
            else:
886
 
                self.fail('Should not be getting a stream of %s' % (vf_name,))
887
 
        # We have 257 records now. This is because we have 1 root page, and 256
888
 
        # leaf pages in a complete listing.
889
 
        self.assertEqual(257, len(full_chk_records))
890
 
        self.assertSubset(simple_chk_records, full_chk_records)
891
 
 
892
 
    def test_inconsistency_fatal(self):
893
 
        repo = self.make_repository('repo', format='2a')
894
 
        self.assertTrue(repo.revisions._index._inconsistency_fatal)
895
 
        self.assertFalse(repo.texts._index._inconsistency_fatal)
896
 
        self.assertFalse(repo.inventories._index._inconsistency_fatal)
897
 
        self.assertFalse(repo.signatures._index._inconsistency_fatal)
898
 
        self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
899
 
 
900
 
 
901
 
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
902
 
 
903
 
    def test_source_to_exact_pack_092(self):
904
 
        source = self.make_repository('source', format='pack-0.92')
905
 
        target = self.make_repository('target', format='pack-0.92')
906
 
        stream_source = source._get_source(target._format)
907
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
908
 
 
909
 
    def test_source_to_exact_pack_rich_root_pack(self):
910
 
        source = self.make_repository('source', format='rich-root-pack')
911
 
        target = self.make_repository('target', format='rich-root-pack')
912
 
        stream_source = source._get_source(target._format)
913
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
914
 
 
915
 
    def test_source_to_exact_pack_19(self):
916
 
        source = self.make_repository('source', format='1.9')
917
 
        target = self.make_repository('target', format='1.9')
918
 
        stream_source = source._get_source(target._format)
919
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
920
 
 
921
 
    def test_source_to_exact_pack_19_rich_root(self):
922
 
        source = self.make_repository('source', format='1.9-rich-root')
923
 
        target = self.make_repository('target', format='1.9-rich-root')
924
 
        stream_source = source._get_source(target._format)
925
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
926
 
 
927
 
    def test_source_to_remote_exact_pack_19(self):
928
 
        trans = self.make_smart_server('target')
929
 
        trans.ensure_base()
930
 
        source = self.make_repository('source', format='1.9')
931
 
        target = self.make_repository('target', format='1.9')
932
 
        target = repository.Repository.open(trans.base)
933
 
        stream_source = source._get_source(target._format)
934
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
935
 
 
936
 
    def test_stream_source_to_non_exact(self):
937
 
        source = self.make_repository('source', format='pack-0.92')
938
 
        target = self.make_repository('target', format='1.9')
939
 
        stream = source._get_source(target._format)
940
 
        self.assertIs(type(stream), repository.StreamSource)
941
 
 
942
 
    def test_stream_source_to_non_exact_rich_root(self):
943
 
        source = self.make_repository('source', format='1.9')
944
 
        target = self.make_repository('target', format='1.9-rich-root')
945
 
        stream = source._get_source(target._format)
946
 
        self.assertIs(type(stream), repository.StreamSource)
947
 
 
948
 
    def test_source_to_remote_non_exact_pack_19(self):
949
 
        trans = self.make_smart_server('target')
950
 
        trans.ensure_base()
951
 
        source = self.make_repository('source', format='1.9')
952
 
        target = self.make_repository('target', format='1.6')
953
 
        target = repository.Repository.open(trans.base)
954
 
        stream_source = source._get_source(target._format)
955
 
        self.assertIs(type(stream_source), repository.StreamSource)
956
 
 
957
 
    def test_stream_source_to_knit(self):
958
 
        source = self.make_repository('source', format='pack-0.92')
959
 
        target = self.make_repository('target', format='dirstate')
960
 
        stream = source._get_source(target._format)
961
 
        self.assertIs(type(stream), repository.StreamSource)
962
 
 
963
 
 
964
 
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
965
 
    """Tests for _find_parent_ids_of_revisions."""
966
 
 
967
 
    def setUp(self):
968
 
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
969
 
        self.builder = self.make_branch_builder('source',
970
 
            format='development6-rich-root')
971
 
        self.builder.start_series()
972
 
        self.builder.build_snapshot('initial', None,
973
 
            [('add', ('', 'tree-root', 'directory', None))])
974
 
        self.repo = self.builder.get_branch().repository
975
 
        self.addCleanup(self.builder.finish_series)
976
 
 
977
 
    def assertParentIds(self, expected_result, rev_set):
978
 
        self.assertEqual(sorted(expected_result),
979
 
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
980
 
 
981
 
    def test_simple(self):
982
 
        self.builder.build_snapshot('revid1', None, [])
983
 
        self.builder.build_snapshot('revid2', ['revid1'], [])
984
 
        rev_set = ['revid2']
985
 
        self.assertParentIds(['revid1'], rev_set)
986
 
 
987
 
    def test_not_first_parent(self):
988
 
        self.builder.build_snapshot('revid1', None, [])
989
 
        self.builder.build_snapshot('revid2', ['revid1'], [])
990
 
        self.builder.build_snapshot('revid3', ['revid2'], [])
991
 
        rev_set = ['revid3', 'revid2']
992
 
        self.assertParentIds(['revid1'], rev_set)
993
 
 
994
 
    def test_not_null(self):
995
 
        rev_set = ['initial']
996
 
        self.assertParentIds([], rev_set)
997
 
 
998
 
    def test_not_null_set(self):
999
 
        self.builder.build_snapshot('revid1', None, [])
1000
 
        rev_set = [_mod_revision.NULL_REVISION]
1001
 
        self.assertParentIds([], rev_set)
1002
 
 
1003
 
    def test_ghost(self):
1004
 
        self.builder.build_snapshot('revid1', None, [])
1005
 
        rev_set = ['ghost', 'revid1']
1006
 
        self.assertParentIds(['initial'], rev_set)
1007
 
 
1008
 
    def test_ghost_parent(self):
1009
 
        self.builder.build_snapshot('revid1', None, [])
1010
 
        self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
1011
 
        rev_set = ['revid2', 'revid1']
1012
 
        self.assertParentIds(['ghost', 'initial'], rev_set)
1013
 
 
1014
 
    def test_righthand_parent(self):
1015
 
        self.builder.build_snapshot('revid1', None, [])
1016
 
        self.builder.build_snapshot('revid2a', ['revid1'], [])
1017
 
        self.builder.build_snapshot('revid2b', ['revid1'], [])
1018
 
        self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
1019
 
        rev_set = ['revid3', 'revid2a']
1020
 
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
1021
 
 
1022
 
 
1023
 
class TestWithBrokenRepo(TestCaseWithTransport):
1024
 
    """These tests seem to be more appropriate as interface tests?"""
1025
 
 
1026
 
    def make_broken_repository(self):
1027
 
        # XXX: This function is borrowed from Aaron's "Reconcile can fix bad
1028
 
        # parent references" branch which is due to land in bzr.dev soon.  Once
1029
 
        # it does, this duplication should be removed.
1030
 
        repo = self.make_repository('broken-repo')
1031
 
        cleanups = []
1032
 
        try:
1033
 
            repo.lock_write()
1034
 
            cleanups.append(repo.unlock)
1035
 
            repo.start_write_group()
1036
 
            cleanups.append(repo.commit_write_group)
1037
 
            # make rev1a: A well-formed revision, containing 'file1'
1038
 
            inv = inventory.Inventory(revision_id='rev1a')
1039
 
            inv.root.revision = 'rev1a'
1040
 
            self.add_file(repo, inv, 'file1', 'rev1a', [])
1041
 
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
1042
 
            repo.add_inventory('rev1a', inv, [])
1043
 
            revision = _mod_revision.Revision('rev1a',
1044
 
                committer='jrandom@example.com', timestamp=0,
1045
 
                inventory_sha1='', timezone=0, message='foo', parent_ids=[])
1046
 
            repo.add_revision('rev1a',revision, inv)
1047
 
 
1048
 
            # make rev1b, which has no Revision, but has an Inventory, and
1049
 
            # file1
1050
 
            inv = inventory.Inventory(revision_id='rev1b')
1051
 
            inv.root.revision = 'rev1b'
1052
 
            self.add_file(repo, inv, 'file1', 'rev1b', [])
1053
 
            repo.add_inventory('rev1b', inv, [])
1054
 
 
1055
 
            # make rev2, with file1 and file2
1056
 
            # file2 is sane
1057
 
            # file1 has 'rev1b' as an ancestor, even though this is not
1058
 
            # mentioned by 'rev1a', making it an unreferenced ancestor
1059
 
            inv = inventory.Inventory()
1060
 
            self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b'])
1061
 
            self.add_file(repo, inv, 'file2', 'rev2', [])
1062
 
            self.add_revision(repo, 'rev2', inv, ['rev1a'])
1063
 
 
1064
 
            # make ghost revision rev1c
1065
 
            inv = inventory.Inventory()
1066
 
            self.add_file(repo, inv, 'file2', 'rev1c', [])
1067
 
 
1068
 
            # make rev3 with file2
1069
 
            # file2 refers to 'rev1c', which is a ghost in this repository, so
1070
 
            # file2 cannot have rev1c as its ancestor.
1071
 
            inv = inventory.Inventory()
1072
 
            self.add_file(repo, inv, 'file2', 'rev3', ['rev1c'])
1073
 
            self.add_revision(repo, 'rev3', inv, ['rev1c'])
1074
 
            return repo
1075
 
        finally:
1076
 
            for cleanup in reversed(cleanups):
1077
 
                cleanup()
1078
 
 
1079
 
    def add_revision(self, repo, revision_id, inv, parent_ids):
1080
 
        inv.revision_id = revision_id
1081
 
        inv.root.revision = revision_id
1082
 
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
1083
 
        repo.add_inventory(revision_id, inv, parent_ids)
1084
 
        revision = _mod_revision.Revision(revision_id,
1085
 
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
1086
 
            timezone=0, message='foo', parent_ids=parent_ids)
1087
 
        repo.add_revision(revision_id,revision, inv)
1088
 
 
1089
 
    def add_file(self, repo, inv, filename, revision, parents):
1090
 
        file_id = filename + '-id'
1091
 
        entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT')
1092
 
        entry.revision = revision
1093
 
        entry.text_size = 0
1094
 
        inv.add(entry)
1095
 
        text_key = (file_id, revision)
1096
 
        parent_keys = [(file_id, parent) for parent in parents]
1097
 
        repo.texts.add_lines(text_key, parent_keys, ['line\n'])
1098
 
 
1099
 
    def test_insert_from_broken_repo(self):
1100
 
        """Inserting a data stream from a broken repository won't silently
1101
 
        corrupt the target repository.
1102
 
        """
1103
 
        broken_repo = self.make_broken_repository()
1104
 
        empty_repo = self.make_repository('empty-repo')
1105
 
        try:
1106
 
            empty_repo.fetch(broken_repo)
1107
 
        except (errors.RevisionNotPresent, errors.BzrCheckError):
1108
 
            # Test successful: compression parent not being copied leads to
1109
 
            # error.
1110
 
            return
1111
 
        empty_repo.lock_read()
1112
 
        self.addCleanup(empty_repo.unlock)
1113
 
        text = empty_repo.texts.get_record_stream(
1114
 
            [('file2-id', 'rev3')], 'topological', True).next()
1115
 
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
1116
 
 
1117
 
 
1118
 
class TestRepositoryPackCollection(TestCaseWithTransport):
1119
 
 
1120
 
    def get_format(self):
1121
 
        return bzrdir.format_registry.make_bzrdir('pack-0.92')
1122
 
 
1123
 
    def get_packs(self):
1124
 
        format = self.get_format()
1125
 
        repo = self.make_repository('.', format=format)
1126
 
        return repo._pack_collection
1127
 
 
1128
 
    def make_packs_and_alt_repo(self, write_lock=False):
1129
 
        """Create a pack repo with 3 packs, and access it via a second repo."""
1130
 
        tree = self.make_branch_and_tree('.', format=self.get_format())
1131
 
        tree.lock_write()
1132
 
        self.addCleanup(tree.unlock)
1133
 
        rev1 = tree.commit('one')
1134
 
        rev2 = tree.commit('two')
1135
 
        rev3 = tree.commit('three')
1136
 
        r = repository.Repository.open('.')
1137
 
        if write_lock:
1138
 
            r.lock_write()
1139
 
        else:
1140
 
            r.lock_read()
1141
 
        self.addCleanup(r.unlock)
1142
 
        packs = r._pack_collection
1143
 
        packs.ensure_loaded()
1144
 
        return tree, r, packs, [rev1, rev2, rev3]
1145
 
 
1146
 
    def test__max_pack_count(self):
1147
 
        """The maximum pack count is a function of the number of revisions."""
1148
 
        # no revisions - one pack, so that we can have a revision free repo
1149
 
        # without it blowing up
1150
 
        packs = self.get_packs()
1151
 
        self.assertEqual(1, packs._max_pack_count(0))
1152
 
        # after that the sum of the digits, - check the first 1-9
1153
 
        self.assertEqual(1, packs._max_pack_count(1))
1154
 
        self.assertEqual(2, packs._max_pack_count(2))
1155
 
        self.assertEqual(3, packs._max_pack_count(3))
1156
 
        self.assertEqual(4, packs._max_pack_count(4))
1157
 
        self.assertEqual(5, packs._max_pack_count(5))
1158
 
        self.assertEqual(6, packs._max_pack_count(6))
1159
 
        self.assertEqual(7, packs._max_pack_count(7))
1160
 
        self.assertEqual(8, packs._max_pack_count(8))
1161
 
        self.assertEqual(9, packs._max_pack_count(9))
1162
 
        # check the boundary cases with two digits for the next decade
1163
 
        self.assertEqual(1, packs._max_pack_count(10))
1164
 
        self.assertEqual(2, packs._max_pack_count(11))
1165
 
        self.assertEqual(10, packs._max_pack_count(19))
1166
 
        self.assertEqual(2, packs._max_pack_count(20))
1167
 
        self.assertEqual(3, packs._max_pack_count(21))
1168
 
        # check some arbitrary big numbers
1169
 
        self.assertEqual(25, packs._max_pack_count(112894))
1170
 
 
1171
 
    def test_repr(self):
1172
 
        packs = self.get_packs()
1173
 
        self.assertContainsRe(repr(packs),
1174
 
            'RepositoryPackCollection(.*Repository(.*))')
1175
 
 
1176
 
    def test_pack_distribution_zero(self):
1177
 
        packs = self.get_packs()
1178
 
        self.assertEqual([0], packs.pack_distribution(0))
1179
 
 
1180
 
    def test_ensure_loaded_unlocked(self):
1181
 
        packs = self.get_packs()
1182
 
        self.assertRaises(errors.ObjectNotLocked,
1183
 
                          packs.ensure_loaded)
1184
 
 
1185
 
    def test_pack_distribution_one_to_nine(self):
1186
 
        packs = self.get_packs()
1187
 
        self.assertEqual([1],
1188
 
            packs.pack_distribution(1))
1189
 
        self.assertEqual([1, 1],
1190
 
            packs.pack_distribution(2))
1191
 
        self.assertEqual([1, 1, 1],
1192
 
            packs.pack_distribution(3))
1193
 
        self.assertEqual([1, 1, 1, 1],
1194
 
            packs.pack_distribution(4))
1195
 
        self.assertEqual([1, 1, 1, 1, 1],
1196
 
            packs.pack_distribution(5))
1197
 
        self.assertEqual([1, 1, 1, 1, 1, 1],
1198
 
            packs.pack_distribution(6))
1199
 
        self.assertEqual([1, 1, 1, 1, 1, 1, 1],
1200
 
            packs.pack_distribution(7))
1201
 
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1],
1202
 
            packs.pack_distribution(8))
1203
 
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1],
1204
 
            packs.pack_distribution(9))
1205
 
 
1206
 
    def test_pack_distribution_stable_at_boundaries(self):
1207
 
        """When there are multi-rev packs the counts are stable."""
1208
 
        packs = self.get_packs()
1209
 
        # in 10s:
1210
 
        self.assertEqual([10], packs.pack_distribution(10))
1211
 
        self.assertEqual([10, 1], packs.pack_distribution(11))
1212
 
        self.assertEqual([10, 10], packs.pack_distribution(20))
1213
 
        self.assertEqual([10, 10, 1], packs.pack_distribution(21))
1214
 
        # 100s
1215
 
        self.assertEqual([100], packs.pack_distribution(100))
1216
 
        self.assertEqual([100, 1], packs.pack_distribution(101))
1217
 
        self.assertEqual([100, 10, 1], packs.pack_distribution(111))
1218
 
        self.assertEqual([100, 100], packs.pack_distribution(200))
1219
 
        self.assertEqual([100, 100, 1], packs.pack_distribution(201))
1220
 
        self.assertEqual([100, 100, 10, 1], packs.pack_distribution(211))
1221
 
 
1222
 
    def test_plan_pack_operations_2009_revisions_skip_all_packs(self):
1223
 
        packs = self.get_packs()
1224
 
        existing_packs = [(2000, "big"), (9, "medium")]
1225
 
        # rev count - 2009 -> 2x1000 + 9x1
1226
 
        pack_operations = packs.plan_autopack_combinations(
1227
 
            existing_packs, [1000, 1000, 1, 1, 1, 1, 1, 1, 1, 1, 1])
1228
 
        self.assertEqual([], pack_operations)
1229
 
 
1230
 
    def test_plan_pack_operations_2010_revisions_skip_all_packs(self):
1231
 
        packs = self.get_packs()
1232
 
        existing_packs = [(2000, "big"), (9, "medium"), (1, "single")]
1233
 
        # rev count - 2010 -> 2x1000 + 1x10
1234
 
        pack_operations = packs.plan_autopack_combinations(
1235
 
            existing_packs, [1000, 1000, 10])
1236
 
        self.assertEqual([], pack_operations)
1237
 
 
1238
 
    def test_plan_pack_operations_2010_combines_smallest_two(self):
1239
 
        packs = self.get_packs()
1240
 
        existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"),
1241
 
            (1, "single1")]
1242
 
        # rev count - 2010 -> 2x1000 + 1x10 (3)
1243
 
        pack_operations = packs.plan_autopack_combinations(
1244
 
            existing_packs, [1000, 1000, 10])
1245
 
        self.assertEqual([[2, ["single2", "single1"]]], pack_operations)
1246
 
 
1247
 
    def test_plan_pack_operations_creates_a_single_op(self):
1248
 
        packs = self.get_packs()
1249
 
        existing_packs = [(50, 'a'), (40, 'b'), (30, 'c'), (10, 'd'),
1250
 
                          (10, 'e'), (6, 'f'), (4, 'g')]
1251
 
        # rev count 150 -> 1x100 and 5x10
1252
 
        # The two size 10 packs do not need to be touched. The 50, 40, 30 would
1253
 
        # be combined into a single 120 size pack, and the 6 & 4 would
1254
 
        # becombined into a size 10 pack. However, if we have to rewrite them,
1255
 
        # we save a pack file with no increased I/O by putting them into the
1256
 
        # same file.
1257
 
        distribution = packs.pack_distribution(150)
1258
 
        pack_operations = packs.plan_autopack_combinations(existing_packs,
1259
 
                                                           distribution)
1260
 
        self.assertEqual([[130, ['a', 'b', 'c', 'f', 'g']]], pack_operations)
1261
 
 
1262
 
    def test_all_packs_none(self):
1263
 
        format = self.get_format()
1264
 
        tree = self.make_branch_and_tree('.', format=format)
1265
 
        tree.lock_read()
1266
 
        self.addCleanup(tree.unlock)
1267
 
        packs = tree.branch.repository._pack_collection
1268
 
        packs.ensure_loaded()
1269
 
        self.assertEqual([], packs.all_packs())
1270
 
 
1271
 
    def test_all_packs_one(self):
1272
 
        format = self.get_format()
1273
 
        tree = self.make_branch_and_tree('.', format=format)
1274
 
        tree.commit('start')
1275
 
        tree.lock_read()
1276
 
        self.addCleanup(tree.unlock)
1277
 
        packs = tree.branch.repository._pack_collection
1278
 
        packs.ensure_loaded()
1279
 
        self.assertEqual([
1280
 
            packs.get_pack_by_name(packs.names()[0])],
1281
 
            packs.all_packs())
1282
 
 
1283
 
    def test_all_packs_two(self):
1284
 
        format = self.get_format()
1285
 
        tree = self.make_branch_and_tree('.', format=format)
1286
 
        tree.commit('start')
1287
 
        tree.commit('continue')
1288
 
        tree.lock_read()
1289
 
        self.addCleanup(tree.unlock)
1290
 
        packs = tree.branch.repository._pack_collection
1291
 
        packs.ensure_loaded()
1292
 
        self.assertEqual([
1293
 
            packs.get_pack_by_name(packs.names()[0]),
1294
 
            packs.get_pack_by_name(packs.names()[1]),
1295
 
            ], packs.all_packs())
1296
 
 
1297
 
    def test_get_pack_by_name(self):
1298
 
        format = self.get_format()
1299
 
        tree = self.make_branch_and_tree('.', format=format)
1300
 
        tree.commit('start')
1301
 
        tree.lock_read()
1302
 
        self.addCleanup(tree.unlock)
1303
 
        packs = tree.branch.repository._pack_collection
1304
 
        packs.reset()
1305
 
        packs.ensure_loaded()
1306
 
        name = packs.names()[0]
1307
 
        pack_1 = packs.get_pack_by_name(name)
1308
 
        # the pack should be correctly initialised
1309
 
        sizes = packs._names[name]
1310
 
        rev_index = GraphIndex(packs._index_transport, name + '.rix', sizes[0])
1311
 
        inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1])
1312
 
        txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2])
1313
 
        sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3])
1314
 
        self.assertEqual(pack_repo.ExistingPack(packs._pack_transport,
1315
 
            name, rev_index, inv_index, txt_index, sig_index), pack_1)
1316
 
        # and the same instance should be returned on successive calls.
1317
 
        self.assertTrue(pack_1 is packs.get_pack_by_name(name))
1318
 
 
1319
 
    def test_reload_pack_names_new_entry(self):
1320
 
        tree, r, packs, revs = self.make_packs_and_alt_repo()
1321
 
        names = packs.names()
1322
 
        # Add a new pack file into the repository
1323
 
        rev4 = tree.commit('four')
1324
 
        new_names = tree.branch.repository._pack_collection.names()
1325
 
        new_name = set(new_names).difference(names)
1326
 
        self.assertEqual(1, len(new_name))
1327
 
        new_name = new_name.pop()
1328
 
        # The old collection hasn't noticed yet
1329
 
        self.assertEqual(names, packs.names())
1330
 
        self.assertTrue(packs.reload_pack_names())
1331
 
        self.assertEqual(new_names, packs.names())
1332
 
        # And the repository can access the new revision
1333
 
        self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
1334
 
        self.assertFalse(packs.reload_pack_names())
1335
 
 
1336
 
    def test_reload_pack_names_added_and_removed(self):
1337
 
        tree, r, packs, revs = self.make_packs_and_alt_repo()
1338
 
        names = packs.names()
1339
 
        # Now repack the whole thing
1340
 
        tree.branch.repository.pack()
1341
 
        new_names = tree.branch.repository._pack_collection.names()
1342
 
        # The other collection hasn't noticed yet
1343
 
        self.assertEqual(names, packs.names())
1344
 
        self.assertTrue(packs.reload_pack_names())
1345
 
        self.assertEqual(new_names, packs.names())
1346
 
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1347
 
        self.assertFalse(packs.reload_pack_names())
1348
 
 
1349
 
    def test_autopack_reloads_and_stops(self):
1350
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1351
 
        # After we have determined what needs to be autopacked, trigger a
1352
 
        # full-pack via the other repo which will cause us to re-evaluate and
1353
 
        # decide we don't need to do anything
1354
 
        orig_execute = packs._execute_pack_operations
1355
 
        def _munged_execute_pack_ops(*args, **kwargs):
1356
 
            tree.branch.repository.pack()
1357
 
            return orig_execute(*args, **kwargs)
1358
 
        packs._execute_pack_operations = _munged_execute_pack_ops
1359
 
        packs._max_pack_count = lambda x: 1
1360
 
        packs.pack_distribution = lambda x: [10]
1361
 
        self.assertFalse(packs.autopack())
1362
 
        self.assertEqual(1, len(packs.names()))
1363
 
        self.assertEqual(tree.branch.repository._pack_collection.names(),
1364
 
                         packs.names())
1365
 
 
1366
 
 
1367
 
class TestPack(TestCaseWithTransport):
1368
 
    """Tests for the Pack object."""
1369
 
 
1370
 
    def assertCurrentlyEqual(self, left, right):
1371
 
        self.assertTrue(left == right)
1372
 
        self.assertTrue(right == left)
1373
 
        self.assertFalse(left != right)
1374
 
        self.assertFalse(right != left)
1375
 
 
1376
 
    def assertCurrentlyNotEqual(self, left, right):
1377
 
        self.assertFalse(left == right)
1378
 
        self.assertFalse(right == left)
1379
 
        self.assertTrue(left != right)
1380
 
        self.assertTrue(right != left)
1381
 
 
1382
 
    def test___eq____ne__(self):
1383
 
        left = pack_repo.ExistingPack('', '', '', '', '', '')
1384
 
        right = pack_repo.ExistingPack('', '', '', '', '', '')
1385
 
        self.assertCurrentlyEqual(left, right)
1386
 
        # change all attributes and ensure equality changes as we do.
1387
 
        left.revision_index = 'a'
1388
 
        self.assertCurrentlyNotEqual(left, right)
1389
 
        right.revision_index = 'a'
1390
 
        self.assertCurrentlyEqual(left, right)
1391
 
        left.inventory_index = 'a'
1392
 
        self.assertCurrentlyNotEqual(left, right)
1393
 
        right.inventory_index = 'a'
1394
 
        self.assertCurrentlyEqual(left, right)
1395
 
        left.text_index = 'a'
1396
 
        self.assertCurrentlyNotEqual(left, right)
1397
 
        right.text_index = 'a'
1398
 
        self.assertCurrentlyEqual(left, right)
1399
 
        left.signature_index = 'a'
1400
 
        self.assertCurrentlyNotEqual(left, right)
1401
 
        right.signature_index = 'a'
1402
 
        self.assertCurrentlyEqual(left, right)
1403
 
        left.name = 'a'
1404
 
        self.assertCurrentlyNotEqual(left, right)
1405
 
        right.name = 'a'
1406
 
        self.assertCurrentlyEqual(left, right)
1407
 
        left.transport = 'a'
1408
 
        self.assertCurrentlyNotEqual(left, right)
1409
 
        right.transport = 'a'
1410
 
        self.assertCurrentlyEqual(left, right)
1411
 
 
1412
 
    def test_file_name(self):
1413
 
        pack = pack_repo.ExistingPack('', 'a_name', '', '', '', '')
1414
 
        self.assertEqual('a_name.pack', pack.file_name())
1415
 
 
1416
 
 
1417
 
class TestNewPack(TestCaseWithTransport):
1418
 
    """Tests for pack_repo.NewPack."""
1419
 
 
1420
 
    def test_new_instance_attributes(self):
1421
 
        upload_transport = self.get_transport('upload')
1422
 
        pack_transport = self.get_transport('pack')
1423
 
        index_transport = self.get_transport('index')
1424
 
        upload_transport.mkdir('.')
1425
 
        collection = pack_repo.RepositoryPackCollection(
1426
 
            repo=None,
1427
 
            transport=self.get_transport('.'),
1428
 
            index_transport=index_transport,
1429
 
            upload_transport=upload_transport,
1430
 
            pack_transport=pack_transport,
1431
 
            index_builder_class=BTreeBuilder,
1432
 
            index_class=BTreeGraphIndex,
1433
 
            use_chk_index=False)
1434
 
        pack = pack_repo.NewPack(collection)
1435
 
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
1436
 
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
1437
 
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1438
 
        self.assertIsInstance(pack._hash, type(osutils.md5()))
1439
 
        self.assertTrue(pack.upload_transport is upload_transport)
1440
 
        self.assertTrue(pack.index_transport is index_transport)
1441
 
        self.assertTrue(pack.pack_transport is pack_transport)
1442
 
        self.assertEqual(None, pack.index_sizes)
1443
 
        self.assertEqual(20, len(pack.random_name))
1444
 
        self.assertIsInstance(pack.random_name, str)
1445
 
        self.assertIsInstance(pack.start_time, float)
1446
 
 
1447
 
 
1448
 
class TestPacker(TestCaseWithTransport):
1449
 
    """Tests for the packs repository Packer class."""
1450
 
 
1451
 
    def test_pack_optimizes_pack_order(self):
1452
 
        builder = self.make_branch_builder('.', format="1.9")
1453
 
        builder.start_series()
1454
 
        builder.build_snapshot('A', None, [
1455
 
            ('add', ('', 'root-id', 'directory', None)),
1456
 
            ('add', ('f', 'f-id', 'file', 'content\n'))])
1457
 
        builder.build_snapshot('B', ['A'],
1458
 
            [('modify', ('f-id', 'new-content\n'))])
1459
 
        builder.build_snapshot('C', ['B'],
1460
 
            [('modify', ('f-id', 'third-content\n'))])
1461
 
        builder.build_snapshot('D', ['C'],
1462
 
            [('modify', ('f-id', 'fourth-content\n'))])
1463
 
        b = builder.get_branch()
1464
 
        b.lock_read()
1465
 
        builder.finish_series()
1466
 
        self.addCleanup(b.unlock)
1467
 
        # At this point, we should have 4 pack files available
1468
 
        # Because of how they were built, they correspond to
1469
 
        # ['D', 'C', 'B', 'A']
1470
 
        packs = b.repository._pack_collection.packs
1471
 
        packer = pack_repo.Packer(b.repository._pack_collection,
1472
 
                                  packs, 'testing',
1473
 
                                  revision_ids=['B', 'C'])
1474
 
        # Now, when we are copying the B & C revisions, their pack files should
1475
 
        # be moved to the front of the stack
1476
 
        # The new ordering moves B & C to the front of the .packs attribute,
1477
 
        # and leaves the others in the original order.
1478
 
        new_packs = [packs[1], packs[2], packs[0], packs[3]]
1479
 
        new_pack = packer.pack()
1480
 
        self.assertEqual(new_packs, packer.packs)
1481
 
 
1482
 
 
1483
 
class TestOptimisingPacker(TestCaseWithTransport):
1484
 
    """Tests for the OptimisingPacker class."""
1485
 
 
1486
 
    def get_pack_collection(self):
1487
 
        repo = self.make_repository('.')
1488
 
        return repo._pack_collection
1489
 
 
1490
 
    def test_open_pack_will_optimise(self):
1491
 
        packer = pack_repo.OptimisingPacker(self.get_pack_collection(),
1492
 
                                            [], '.test')
1493
 
        new_pack = packer.open_pack()
1494
 
        self.addCleanup(new_pack.abort) # ensure cleanup
1495
 
        self.assertIsInstance(new_pack, pack_repo.NewPack)
1496
 
        self.assertTrue(new_pack.revision_index._optimize_for_size)
1497
 
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
1498
 
        self.assertTrue(new_pack.text_index._optimize_for_size)
1499
 
        self.assertTrue(new_pack.signature_index._optimize_for_size)
1500
 
 
1501
 
 
1502
 
class TestCrossFormatPacks(TestCaseWithTransport):
1503
 
 
1504
 
    def log_pack(self, hint=None):
1505
 
        self.calls.append(('pack', hint))
1506
 
        self.orig_pack(hint=hint)
1507
 
        if self.expect_hint:
1508
 
            self.assertTrue(hint)
1509
 
 
1510
 
    def run_stream(self, src_fmt, target_fmt, expect_pack_called):
1511
 
        self.expect_hint = expect_pack_called
1512
 
        self.calls = []
1513
 
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
1514
 
        source_tree.lock_write()
1515
 
        self.addCleanup(source_tree.unlock)
1516
 
        tip = source_tree.commit('foo')
1517
 
        target = self.make_repository('target', format=target_fmt)
1518
 
        target.lock_write()
1519
 
        self.addCleanup(target.unlock)
1520
 
        source = source_tree.branch.repository._get_source(target._format)
1521
 
        self.orig_pack = target.pack
1522
 
        target.pack = self.log_pack
1523
 
        search = target.search_missing_revision_ids(
1524
 
            source_tree.branch.repository, tip)
1525
 
        stream = source.get_stream(search)
1526
 
        from_format = source_tree.branch.repository._format
1527
 
        sink = target._get_sink()
1528
 
        sink.insert_stream(stream, from_format, [])
1529
 
        if expect_pack_called:
1530
 
            self.assertLength(1, self.calls)
1531
 
        else:
1532
 
            self.assertLength(0, self.calls)
1533
 
 
1534
 
    def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
1535
 
        self.expect_hint = expect_pack_called
1536
 
        self.calls = []
1537
 
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
1538
 
        source_tree.lock_write()
1539
 
        self.addCleanup(source_tree.unlock)
1540
 
        tip = source_tree.commit('foo')
1541
 
        target = self.make_repository('target', format=target_fmt)
1542
 
        target.lock_write()
1543
 
        self.addCleanup(target.unlock)
1544
 
        source = source_tree.branch.repository
1545
 
        self.orig_pack = target.pack
1546
 
        target.pack = self.log_pack
1547
 
        target.fetch(source)
1548
 
        if expect_pack_called:
1549
 
            self.assertLength(1, self.calls)
1550
 
        else:
1551
 
            self.assertLength(0, self.calls)
1552
 
 
1553
 
    def test_sink_format_hint_no(self):
1554
 
        # When the target format says packing makes no difference, pack is not
1555
 
        # called.
1556
 
        self.run_stream('1.9', 'rich-root-pack', False)
1557
 
 
1558
 
    def test_sink_format_hint_yes(self):
1559
 
        # When the target format says packing makes a difference, pack is
1560
 
        # called.
1561
 
        self.run_stream('1.9', '2a', True)
1562
 
 
1563
 
    def test_sink_format_same_no(self):
1564
 
        # When the formats are the same, pack is not called.
1565
 
        self.run_stream('2a', '2a', False)
1566
 
 
1567
 
    def test_IDS_format_hint_no(self):
1568
 
        # When the target format says packing makes no difference, pack is not
1569
 
        # called.
1570
 
        self.run_fetch('1.9', 'rich-root-pack', False)
1571
 
 
1572
 
    def test_IDS_format_hint_yes(self):
1573
 
        # When the target format says packing makes a difference, pack is
1574
 
        # called.
1575
 
        self.run_fetch('1.9', '2a', True)
1576
 
 
1577
 
    def test_IDS_format_same_no(self):
1578
 
        # When the formats are the same, pack is not called.
1579
 
        self.run_fetch('2a', '2a', False)