~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: Canonical.com Patch Queue Manager
  • Date: 2011-08-17 18:13:57 UTC
  • mfrom: (5268.7.29 transport-segments)
  • Revision ID: pqm@pqm.ubuntu.com-20110817181357-y5q5eth1hk8bl3om
(jelmer) Allow specifying the colocated branch to use in the branch URL,
 and retrieving the branch name using ControlDir._get_selected_branch.
 (Jelmer Vernooij)

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006, 2007, 2008 Canonical Ltd
 
1
# Copyright (C) 2006-2011 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
12
12
#
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16
16
 
17
17
"""Tests for the Repository facility that are not interface tests.
18
18
 
23
23
"""
24
24
 
25
25
from stat import S_ISDIR
26
 
from StringIO import StringIO
27
26
 
28
27
import bzrlib
29
 
from bzrlib.errors import (NotBranchError,
30
 
                           NoSuchFile,
31
 
                           UnknownFormatError,
32
 
                           UnsupportedFormatError,
33
 
                           )
34
 
from bzrlib import graph
 
28
from bzrlib.errors import (
 
29
    UnknownFormatError,
 
30
    UnsupportedFormatError,
 
31
    )
 
32
from bzrlib import (
 
33
    btree_index,
 
34
    graph,
 
35
    symbol_versioning,
 
36
    tests,
 
37
    transport,
 
38
    )
35
39
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
36
 
from bzrlib.index import GraphIndex, InMemoryGraphIndex
 
40
from bzrlib.index import GraphIndex
37
41
from bzrlib.repository import RepositoryFormat
38
 
from bzrlib.smart import server
39
42
from bzrlib.tests import (
40
43
    TestCase,
41
44
    TestCaseWithTransport,
42
 
    TestSkipped,
43
 
    test_knit,
44
 
    )
45
 
from bzrlib.transport import (
46
 
    fakenfs,
47
 
    get_transport,
48
 
    )
49
 
from bzrlib.transport.memory import MemoryServer
50
 
from bzrlib.util import bencode
 
45
    )
51
46
from bzrlib import (
52
47
    bzrdir,
53
48
    errors,
54
49
    inventory,
55
50
    osutils,
56
 
    progress,
57
51
    repository,
58
52
    revision as _mod_revision,
59
 
    symbol_versioning,
60
53
    upgrade,
 
54
    versionedfile,
 
55
    vf_repository,
61
56
    workingtree,
62
57
    )
63
 
from bzrlib.repofmt import knitrepo, weaverepo, pack_repo
 
58
from bzrlib.repofmt import (
 
59
    groupcompress_repo,
 
60
    knitrepo,
 
61
    knitpack_repo,
 
62
    pack_repo,
 
63
    )
64
64
 
65
65
 
66
66
class TestDefaultFormat(TestCase):
68
68
    def test_get_set_default_format(self):
69
69
        old_default = bzrdir.format_registry.get('default')
70
70
        private_default = old_default().repository_format.__class__
71
 
        old_format = repository.RepositoryFormat.get_default_format()
 
71
        old_format = repository.format_registry.get_default()
72
72
        self.assertTrue(isinstance(old_format, private_default))
73
73
        def make_sample_bzrdir():
74
74
            my_bzrdir = bzrdir.BzrDirMetaFormat1()
88
88
            bzrdir.format_registry.remove('default')
89
89
            bzrdir.format_registry.remove('sample')
90
90
            bzrdir.format_registry.register('default', old_default, '')
91
 
        self.assertIsInstance(repository.RepositoryFormat.get_default_format(),
 
91
        self.assertIsInstance(repository.format_registry.get_default(),
92
92
                              old_format.__class__)
93
93
 
94
94
 
95
95
class SampleRepositoryFormat(repository.RepositoryFormat):
96
96
    """A sample format
97
97
 
98
 
    this format is initializable, unsupported to aid in testing the 
 
98
    this format is initializable, unsupported to aid in testing the
99
99
    open and open(unsupported=True) routines.
100
100
    """
101
101
 
116
116
        return "opened repository."
117
117
 
118
118
 
 
119
class SampleExtraRepositoryFormat(repository.RepositoryFormat):
 
120
    """A sample format that can not be used in a metadir
 
121
 
 
122
    """
 
123
 
 
124
    def get_format_string(self):
 
125
        raise NotImplementedError
 
126
 
 
127
 
119
128
class TestRepositoryFormat(TestCaseWithTransport):
120
129
    """Tests for the Repository format detection used by the bzr meta dir facility.BzrBranchFormat facility."""
121
130
 
122
131
    def test_find_format(self):
123
132
        # is the right format object found for a repository?
124
133
        # create a branch with a few known format objects.
125
 
        # this is not quite the same as 
 
134
        # this is not quite the same as
126
135
        self.build_tree(["foo/", "bar/"])
127
136
        def check_format(format, url):
128
137
            dir = format._matchingbzrdir.initialize(url)
129
138
            format.initialize(dir)
130
 
            t = get_transport(url)
 
139
            t = transport.get_transport(url)
131
140
            found_format = repository.RepositoryFormat.find_format(dir)
132
 
            self.failUnless(isinstance(found_format, format.__class__))
133
 
        check_format(weaverepo.RepositoryFormat7(), "bar")
134
 
        
 
141
            self.assertIsInstance(found_format, format.__class__)
 
142
        check_format(repository.format_registry.get_default(), "bar")
 
143
 
135
144
    def test_find_format_no_repository(self):
136
145
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
137
146
        self.assertRaises(errors.NoRepositoryPresent,
146
155
                          dir)
147
156
 
148
157
    def test_register_unregister_format(self):
 
158
        # Test deprecated format registration functions
149
159
        format = SampleRepositoryFormat()
150
160
        # make a control dir
151
161
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
152
162
        # make a repo
153
163
        format.initialize(dir)
154
164
        # register a format for it.
155
 
        repository.RepositoryFormat.register_format(format)
 
165
        self.applyDeprecated(symbol_versioning.deprecated_in((2, 4, 0)),
 
166
            repository.RepositoryFormat.register_format, format)
156
167
        # which repository.Open will refuse (not supported)
157
 
        self.assertRaises(UnsupportedFormatError, repository.Repository.open, self.get_url())
 
168
        self.assertRaises(UnsupportedFormatError, repository.Repository.open,
 
169
            self.get_url())
158
170
        # but open(unsupported) will work
159
171
        self.assertEqual(format.open(dir), "opened repository.")
160
172
        # unregister the format
161
 
        repository.RepositoryFormat.unregister_format(format)
162
 
 
163
 
 
164
 
class TestFormat6(TestCaseWithTransport):
165
 
 
166
 
    def test_attribute__fetch_order(self):
167
 
        """Weaves need topological data insertion."""
168
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
169
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
170
 
        self.assertEqual('topological', repo._fetch_order)
171
 
 
172
 
    def test_attribute__fetch_uses_deltas(self):
173
 
        """Weaves do not reuse deltas."""
174
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
175
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
176
 
        self.assertEqual(False, repo._fetch_uses_deltas)
177
 
 
178
 
    def test_attribute__fetch_reconcile(self):
179
 
        """Weave repositories need a reconcile after fetch."""
180
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
181
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
182
 
        self.assertEqual(True, repo._fetch_reconcile)
183
 
 
184
 
    def test_no_ancestry_weave(self):
185
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
186
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
187
 
        # We no longer need to create the ancestry.weave file
188
 
        # since it is *never* used.
189
 
        self.assertRaises(NoSuchFile,
190
 
                          control.transport.get,
191
 
                          'ancestry.weave')
192
 
 
193
 
    def test_supports_external_lookups(self):
194
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
195
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
196
 
        self.assertFalse(repo._format.supports_external_lookups)
197
 
 
198
 
 
199
 
class TestFormat7(TestCaseWithTransport):
200
 
 
201
 
    def test_attribute__fetch_order(self):
202
 
        """Weaves need topological data insertion."""
203
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
204
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
205
 
        self.assertEqual('topological', repo._fetch_order)
206
 
 
207
 
    def test_attribute__fetch_uses_deltas(self):
208
 
        """Weaves do not reuse deltas."""
209
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
210
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
211
 
        self.assertEqual(False, repo._fetch_uses_deltas)
212
 
 
213
 
    def test_attribute__fetch_reconcile(self):
214
 
        """Weave repositories need a reconcile after fetch."""
215
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
216
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
217
 
        self.assertEqual(True, repo._fetch_reconcile)
218
 
 
219
 
    def test_disk_layout(self):
220
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
221
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
222
 
        # in case of side effects of locking.
223
 
        repo.lock_write()
224
 
        repo.unlock()
225
 
        # we want:
226
 
        # format 'Bazaar-NG Repository format 7'
227
 
        # lock ''
228
 
        # inventory.weave == empty_weave
229
 
        # empty revision-store directory
230
 
        # empty weaves directory
231
 
        t = control.get_repository_transport(None)
232
 
        self.assertEqualDiff('Bazaar-NG Repository format 7',
233
 
                             t.get('format').read())
234
 
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
235
 
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
236
 
        self.assertEqualDiff('# bzr weave file v5\n'
237
 
                             'w\n'
238
 
                             'W\n',
239
 
                             t.get('inventory.weave').read())
240
 
        # Creating a file with id Foo:Bar results in a non-escaped file name on
241
 
        # disk.
242
 
        control.create_branch()
243
 
        tree = control.create_workingtree()
244
 
        tree.add(['foo'], ['Foo:Bar'], ['file'])
245
 
        tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
246
 
        tree.commit('first post', rev_id='first')
247
 
        self.assertEqualDiff(
248
 
            '# bzr weave file v5\n'
249
 
            'i\n'
250
 
            '1 7fe70820e08a1aac0ef224d9c66ab66831cc4ab1\n'
251
 
            'n first\n'
252
 
            '\n'
253
 
            'w\n'
254
 
            '{ 0\n'
255
 
            '. content\n'
256
 
            '}\n'
257
 
            'W\n',
258
 
            t.get('weaves/74/Foo%3ABar.weave').read())
259
 
 
260
 
    def test_shared_disk_layout(self):
261
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
262
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
263
 
        # we want:
264
 
        # format 'Bazaar-NG Repository format 7'
265
 
        # inventory.weave == empty_weave
266
 
        # empty revision-store directory
267
 
        # empty weaves directory
268
 
        # a 'shared-storage' marker file.
269
 
        # lock is not present when unlocked
270
 
        t = control.get_repository_transport(None)
271
 
        self.assertEqualDiff('Bazaar-NG Repository format 7',
272
 
                             t.get('format').read())
273
 
        self.assertEqualDiff('', t.get('shared-storage').read())
274
 
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
275
 
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
276
 
        self.assertEqualDiff('# bzr weave file v5\n'
277
 
                             'w\n'
278
 
                             'W\n',
279
 
                             t.get('inventory.weave').read())
280
 
        self.assertFalse(t.has('branch-lock'))
281
 
 
282
 
    def test_creates_lockdir(self):
283
 
        """Make sure it appears to be controlled by a LockDir existence"""
284
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
285
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
286
 
        t = control.get_repository_transport(None)
287
 
        # TODO: Should check there is a 'lock' toplevel directory, 
288
 
        # regardless of contents
289
 
        self.assertFalse(t.has('lock/held/info'))
290
 
        repo.lock_write()
291
 
        try:
292
 
            self.assertTrue(t.has('lock/held/info'))
293
 
        finally:
294
 
            # unlock so we don't get a warning about failing to do so
295
 
            repo.unlock()
296
 
 
297
 
    def test_uses_lockdir(self):
298
 
        """repo format 7 actually locks on lockdir"""
299
 
        base_url = self.get_url()
300
 
        control = bzrdir.BzrDirMetaFormat1().initialize(base_url)
301
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
302
 
        t = control.get_repository_transport(None)
303
 
        repo.lock_write()
304
 
        repo.unlock()
305
 
        del repo
306
 
        # make sure the same lock is created by opening it
307
 
        repo = repository.Repository.open(base_url)
308
 
        repo.lock_write()
309
 
        self.assertTrue(t.has('lock/held/info'))
310
 
        repo.unlock()
311
 
        self.assertFalse(t.has('lock/held/info'))
312
 
 
313
 
    def test_shared_no_tree_disk_layout(self):
314
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
315
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
316
 
        repo.set_make_working_trees(False)
317
 
        # we want:
318
 
        # format 'Bazaar-NG Repository format 7'
319
 
        # lock ''
320
 
        # inventory.weave == empty_weave
321
 
        # empty revision-store directory
322
 
        # empty weaves directory
323
 
        # a 'shared-storage' marker file.
324
 
        t = control.get_repository_transport(None)
325
 
        self.assertEqualDiff('Bazaar-NG Repository format 7',
326
 
                             t.get('format').read())
327
 
        ## self.assertEqualDiff('', t.get('lock').read())
328
 
        self.assertEqualDiff('', t.get('shared-storage').read())
329
 
        self.assertEqualDiff('', t.get('no-working-trees').read())
330
 
        repo.set_make_working_trees(True)
331
 
        self.assertFalse(t.has('no-working-trees'))
332
 
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
333
 
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
334
 
        self.assertEqualDiff('# bzr weave file v5\n'
335
 
                             'w\n'
336
 
                             'W\n',
337
 
                             t.get('inventory.weave').read())
338
 
 
339
 
    def test_supports_external_lookups(self):
340
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
341
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
342
 
        self.assertFalse(repo._format.supports_external_lookups)
 
173
        self.applyDeprecated(symbol_versioning.deprecated_in((2, 4, 0)),
 
174
            repository.RepositoryFormat.unregister_format, format)
 
175
 
 
176
 
 
177
class TestRepositoryFormatRegistry(TestCase):
 
178
 
 
179
    def setUp(self):
 
180
        super(TestRepositoryFormatRegistry, self).setUp()
 
181
        self.registry = repository.RepositoryFormatRegistry()
 
182
 
 
183
    def test_register_unregister_format(self):
 
184
        format = SampleRepositoryFormat()
 
185
        self.registry.register(format)
 
186
        self.assertEquals(format, self.registry.get("Sample .bzr repository format."))
 
187
        self.registry.remove(format)
 
188
        self.assertRaises(KeyError, self.registry.get, "Sample .bzr repository format.")
 
189
 
 
190
    def test_get_all(self):
 
191
        format = SampleRepositoryFormat()
 
192
        self.assertEquals([], self.registry._get_all())
 
193
        self.registry.register(format)
 
194
        self.assertEquals([format], self.registry._get_all())
 
195
 
 
196
    def test_register_extra(self):
 
197
        format = SampleExtraRepositoryFormat()
 
198
        self.assertEquals([], self.registry._get_all())
 
199
        self.registry.register_extra(format)
 
200
        self.assertEquals([format], self.registry._get_all())
 
201
 
 
202
    def test_register_extra_lazy(self):
 
203
        self.assertEquals([], self.registry._get_all())
 
204
        self.registry.register_extra_lazy("bzrlib.tests.test_repository",
 
205
            "SampleExtraRepositoryFormat")
 
206
        formats = self.registry._get_all()
 
207
        self.assertEquals(1, len(formats))
 
208
        self.assertIsInstance(formats[0], SampleExtraRepositoryFormat)
343
209
 
344
210
 
345
211
class TestFormatKnit1(TestCaseWithTransport):
346
 
    
 
212
 
347
213
    def test_attribute__fetch_order(self):
348
214
        """Knits need topological data insertion."""
349
215
        repo = self.make_repository('.',
350
216
                format=bzrdir.format_registry.get('knit')())
351
 
        self.assertEqual('topological', repo._fetch_order)
 
217
        self.assertEqual('topological', repo._format._fetch_order)
352
218
 
353
219
    def test_attribute__fetch_uses_deltas(self):
354
220
        """Knits reuse deltas."""
355
221
        repo = self.make_repository('.',
356
222
                format=bzrdir.format_registry.get('knit')())
357
 
        self.assertEqual(True, repo._fetch_uses_deltas)
 
223
        self.assertEqual(True, repo._format._fetch_uses_deltas)
358
224
 
359
225
    def test_disk_layout(self):
360
226
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
447
313
        repo = self.make_repository('.',
448
314
                format=bzrdir.format_registry.get('knit')())
449
315
        inv_xml = '<inventory format="5">\n</inventory>\n'
450
 
        inv = repo.deserialise_inventory('test-rev-id', inv_xml)
 
316
        inv = repo._deserialise_inventory('test-rev-id', inv_xml)
451
317
        self.assertEqual('test-rev-id', inv.root.revision)
452
318
 
453
319
    def test_deserialise_uses_global_revision_id(self):
459
325
        # Arguably, the deserialise_inventory should detect a mismatch, and
460
326
        # raise an error, rather than silently using one revision_id over the
461
327
        # other.
462
 
        self.assertRaises(AssertionError, repo.deserialise_inventory,
 
328
        self.assertRaises(AssertionError, repo._deserialise_inventory,
463
329
            'test-rev-id', inv_xml)
464
 
        inv = repo.deserialise_inventory('other-rev-id', inv_xml)
 
330
        inv = repo._deserialise_inventory('other-rev-id', inv_xml)
465
331
        self.assertEqual('other-rev-id', inv.root.revision)
466
332
 
467
333
    def test_supports_external_lookups(self):
473
339
class DummyRepository(object):
474
340
    """A dummy repository for testing."""
475
341
 
 
342
    _format = None
476
343
    _serializer = None
477
344
 
478
345
    def supports_rich_root(self):
 
346
        if self._format is not None:
 
347
            return self._format.rich_root_data
479
348
        return False
480
349
 
 
350
    def get_graph(self):
 
351
        raise NotImplementedError
 
352
 
 
353
    def get_parent_map(self, revision_ids):
 
354
        raise NotImplementedError
 
355
 
481
356
 
482
357
class InterDummy(repository.InterRepository):
483
358
    """An inter-repository optimised code path for DummyRepository.
490
365
    @staticmethod
491
366
    def is_compatible(repo_source, repo_target):
492
367
        """InterDummy is compatible with DummyRepository."""
493
 
        return (isinstance(repo_source, DummyRepository) and 
 
368
        return (isinstance(repo_source, DummyRepository) and
494
369
            isinstance(repo_target, DummyRepository))
495
370
 
496
371
 
504
379
        # classes do not barf inappropriately when a surprising repository type
505
380
        # is handed to them.
506
381
        dummy_a = DummyRepository()
 
382
        dummy_a._format = RepositoryFormat()
 
383
        dummy_a._format.supports_full_versioned_files = True
507
384
        dummy_b = DummyRepository()
 
385
        dummy_b._format = RepositoryFormat()
 
386
        dummy_b._format.supports_full_versioned_files = True
508
387
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
509
388
 
510
389
    def assertGetsDefaultInterRepository(self, repo_a, repo_b):
511
390
        """Asserts that InterRepository.get(repo_a, repo_b) -> the default.
512
 
        
 
391
 
513
392
        The effective default is now InterSameDataRepository because there is
514
393
        no actual sane default in the presence of incompatible data models.
515
394
        """
516
395
        inter_repo = repository.InterRepository.get(repo_a, repo_b)
517
 
        self.assertEqual(repository.InterSameDataRepository,
 
396
        self.assertEqual(vf_repository.InterSameDataRepository,
518
397
                         inter_repo.__class__)
519
398
        self.assertEqual(repo_a, inter_repo.source)
520
399
        self.assertEqual(repo_b, inter_repo.target)
526
405
        # pair that it returns true on for the is_compatible static method
527
406
        # check
528
407
        dummy_a = DummyRepository()
 
408
        dummy_a._format = RepositoryFormat()
529
409
        dummy_b = DummyRepository()
 
410
        dummy_b._format = RepositoryFormat()
530
411
        repo = self.make_repository('.')
531
412
        # hack dummies to look like repo somewhat.
532
413
        dummy_a._serializer = repo._serializer
 
414
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
 
415
        dummy_a._format.rich_root_data = repo._format.rich_root_data
 
416
        dummy_a._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
533
417
        dummy_b._serializer = repo._serializer
 
418
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
 
419
        dummy_b._format.rich_root_data = repo._format.rich_root_data
 
420
        dummy_b._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
534
421
        repository.InterRepository.register_optimiser(InterDummy)
535
422
        try:
536
423
            # we should get the default for something InterDummy returns False
549
436
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
550
437
 
551
438
 
552
 
class TestInterWeaveRepo(TestCaseWithTransport):
553
 
 
554
 
    def test_is_compatible_and_registered(self):
555
 
        # InterWeaveRepo is compatible when either side
556
 
        # is a format 5/6/7 branch
557
 
        from bzrlib.repofmt import knitrepo, weaverepo
558
 
        formats = [weaverepo.RepositoryFormat5(),
559
 
                   weaverepo.RepositoryFormat6(),
560
 
                   weaverepo.RepositoryFormat7()]
561
 
        incompatible_formats = [weaverepo.RepositoryFormat4(),
562
 
                                knitrepo.RepositoryFormatKnit1(),
563
 
                                ]
564
 
        repo_a = self.make_repository('a')
565
 
        repo_b = self.make_repository('b')
566
 
        is_compatible = repository.InterWeaveRepo.is_compatible
567
 
        for source in incompatible_formats:
568
 
            # force incompatible left then right
569
 
            repo_a._format = source
570
 
            repo_b._format = formats[0]
571
 
            self.assertFalse(is_compatible(repo_a, repo_b))
572
 
            self.assertFalse(is_compatible(repo_b, repo_a))
573
 
        for source in formats:
574
 
            repo_a._format = source
575
 
            for target in formats:
576
 
                repo_b._format = target
577
 
                self.assertTrue(is_compatible(repo_a, repo_b))
578
 
        self.assertEqual(repository.InterWeaveRepo,
579
 
                         repository.InterRepository.get(repo_a,
580
 
                                                        repo_b).__class__)
 
439
class TestRepositoryFormat1(knitrepo.RepositoryFormatKnit1):
 
440
 
 
441
    def get_format_string(self):
 
442
        return "Test Format 1"
 
443
 
 
444
 
 
445
class TestRepositoryFormat2(knitrepo.RepositoryFormatKnit1):
 
446
 
 
447
    def get_format_string(self):
 
448
        return "Test Format 2"
581
449
 
582
450
 
583
451
class TestRepositoryConverter(TestCaseWithTransport):
584
452
 
585
453
    def test_convert_empty(self):
586
 
        t = get_transport(self.get_url('.'))
 
454
        source_format = TestRepositoryFormat1()
 
455
        target_format = TestRepositoryFormat2()
 
456
        repository.format_registry.register(source_format)
 
457
        self.addCleanup(repository.format_registry.remove,
 
458
            source_format)
 
459
        repository.format_registry.register(target_format)
 
460
        self.addCleanup(repository.format_registry.remove,
 
461
            target_format)
 
462
        t = self.get_transport()
587
463
        t.mkdir('repository')
588
464
        repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
589
 
        repo = weaverepo.RepositoryFormat7().initialize(repo_dir)
590
 
        target_format = knitrepo.RepositoryFormatKnit1()
 
465
        repo = TestRepositoryFormat1().initialize(repo_dir)
591
466
        converter = repository.CopyConverter(target_format)
592
467
        pb = bzrlib.ui.ui_factory.nested_progress_bar()
593
468
        try:
598
473
        self.assertTrue(isinstance(target_format, repo._format.__class__))
599
474
 
600
475
 
601
 
class TestMisc(TestCase):
602
 
    
603
 
    def test_unescape_xml(self):
604
 
        """We get some kind of error when malformed entities are passed"""
605
 
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;') 
606
 
 
607
 
 
608
476
class TestRepositoryFormatKnit3(TestCaseWithTransport):
609
477
 
610
478
    def test_attribute__fetch_order(self):
612
480
        format = bzrdir.BzrDirMetaFormat1()
613
481
        format.repository_format = knitrepo.RepositoryFormatKnit3()
614
482
        repo = self.make_repository('.', format=format)
615
 
        self.assertEqual('topological', repo._fetch_order)
 
483
        self.assertEqual('topological', repo._format._fetch_order)
616
484
 
617
485
    def test_attribute__fetch_uses_deltas(self):
618
486
        """Knits reuse deltas."""
619
487
        format = bzrdir.BzrDirMetaFormat1()
620
488
        format.repository_format = knitrepo.RepositoryFormatKnit3()
621
489
        repo = self.make_repository('.', format=format)
622
 
        self.assertEqual(True, repo._fetch_uses_deltas)
 
490
        self.assertEqual(True, repo._format._fetch_uses_deltas)
623
491
 
624
492
    def test_convert(self):
625
493
        """Ensure the upgrade adds weaves for roots"""
657
525
        self.assertFalse(repo._format.supports_external_lookups)
658
526
 
659
527
 
 
528
class Test2a(tests.TestCaseWithMemoryTransport):
 
529
 
 
530
    def test_chk_bytes_uses_custom_btree_parser(self):
 
531
        mt = self.make_branch_and_memory_tree('test', format='2a')
 
532
        mt.lock_write()
 
533
        self.addCleanup(mt.unlock)
 
534
        mt.add([''], ['root-id'])
 
535
        mt.commit('first')
 
536
        index = mt.branch.repository.chk_bytes._index._graph_index._indices[0]
 
537
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
 
538
        # It should also work if we re-open the repo
 
539
        repo = mt.branch.repository.bzrdir.open_repository()
 
540
        repo.lock_read()
 
541
        self.addCleanup(repo.unlock)
 
542
        index = repo.chk_bytes._index._graph_index._indices[0]
 
543
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
 
544
 
 
545
    def test_fetch_combines_groups(self):
 
546
        builder = self.make_branch_builder('source', format='2a')
 
547
        builder.start_series()
 
548
        builder.build_snapshot('1', None, [
 
549
            ('add', ('', 'root-id', 'directory', '')),
 
550
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
551
        builder.build_snapshot('2', ['1'], [
 
552
            ('modify', ('file-id', 'content-2\n'))])
 
553
        builder.finish_series()
 
554
        source = builder.get_branch()
 
555
        target = self.make_repository('target', format='2a')
 
556
        target.fetch(source.repository)
 
557
        target.lock_read()
 
558
        self.addCleanup(target.unlock)
 
559
        details = target.texts._index.get_build_details(
 
560
            [('file-id', '1',), ('file-id', '2',)])
 
561
        file_1_details = details[('file-id', '1')]
 
562
        file_2_details = details[('file-id', '2')]
 
563
        # The index, and what to read off disk, should be the same for both
 
564
        # versions of the file.
 
565
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
566
 
 
567
    def test_fetch_combines_groups(self):
 
568
        builder = self.make_branch_builder('source', format='2a')
 
569
        builder.start_series()
 
570
        builder.build_snapshot('1', None, [
 
571
            ('add', ('', 'root-id', 'directory', '')),
 
572
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
573
        builder.build_snapshot('2', ['1'], [
 
574
            ('modify', ('file-id', 'content-2\n'))])
 
575
        builder.finish_series()
 
576
        source = builder.get_branch()
 
577
        target = self.make_repository('target', format='2a')
 
578
        target.fetch(source.repository)
 
579
        target.lock_read()
 
580
        self.addCleanup(target.unlock)
 
581
        details = target.texts._index.get_build_details(
 
582
            [('file-id', '1',), ('file-id', '2',)])
 
583
        file_1_details = details[('file-id', '1')]
 
584
        file_2_details = details[('file-id', '2')]
 
585
        # The index, and what to read off disk, should be the same for both
 
586
        # versions of the file.
 
587
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
588
 
 
589
    def test_fetch_combines_groups(self):
 
590
        builder = self.make_branch_builder('source', format='2a')
 
591
        builder.start_series()
 
592
        builder.build_snapshot('1', None, [
 
593
            ('add', ('', 'root-id', 'directory', '')),
 
594
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
595
        builder.build_snapshot('2', ['1'], [
 
596
            ('modify', ('file-id', 'content-2\n'))])
 
597
        builder.finish_series()
 
598
        source = builder.get_branch()
 
599
        target = self.make_repository('target', format='2a')
 
600
        target.fetch(source.repository)
 
601
        target.lock_read()
 
602
        self.addCleanup(target.unlock)
 
603
        details = target.texts._index.get_build_details(
 
604
            [('file-id', '1',), ('file-id', '2',)])
 
605
        file_1_details = details[('file-id', '1')]
 
606
        file_2_details = details[('file-id', '2')]
 
607
        # The index, and what to read off disk, should be the same for both
 
608
        # versions of the file.
 
609
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
610
 
 
611
    def test_format_pack_compresses_True(self):
 
612
        repo = self.make_repository('repo', format='2a')
 
613
        self.assertTrue(repo._format.pack_compresses)
 
614
 
 
615
    def test_inventories_use_chk_map_with_parent_base_dict(self):
 
616
        tree = self.make_branch_and_memory_tree('repo', format="2a")
 
617
        tree.lock_write()
 
618
        tree.add([''], ['TREE_ROOT'])
 
619
        revid = tree.commit("foo")
 
620
        tree.unlock()
 
621
        tree.lock_read()
 
622
        self.addCleanup(tree.unlock)
 
623
        inv = tree.branch.repository.get_inventory(revid)
 
624
        self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
 
625
        inv.parent_id_basename_to_file_id._ensure_root()
 
626
        inv.id_to_entry._ensure_root()
 
627
        self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
 
628
        self.assertEqual(65536,
 
629
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
 
630
 
 
631
    def test_autopack_unchanged_chk_nodes(self):
 
632
        # at 20 unchanged commits, chk pages are packed that are split into
 
633
        # two groups such that the new pack being made doesn't have all its
 
634
        # pages in the source packs (though they are in the repository).
 
635
        # Use a memory backed repository, we don't need to hit disk for this
 
636
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
637
        tree.lock_write()
 
638
        self.addCleanup(tree.unlock)
 
639
        tree.add([''], ['TREE_ROOT'])
 
640
        for pos in range(20):
 
641
            tree.commit(str(pos))
 
642
 
 
643
    def test_pack_with_hint(self):
 
644
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
645
        tree.lock_write()
 
646
        self.addCleanup(tree.unlock)
 
647
        tree.add([''], ['TREE_ROOT'])
 
648
        # 1 commit to leave untouched
 
649
        tree.commit('1')
 
650
        to_keep = tree.branch.repository._pack_collection.names()
 
651
        # 2 to combine
 
652
        tree.commit('2')
 
653
        tree.commit('3')
 
654
        all = tree.branch.repository._pack_collection.names()
 
655
        combine = list(set(all) - set(to_keep))
 
656
        self.assertLength(3, all)
 
657
        self.assertLength(2, combine)
 
658
        tree.branch.repository.pack(hint=combine)
 
659
        final = tree.branch.repository._pack_collection.names()
 
660
        self.assertLength(2, final)
 
661
        self.assertFalse(combine[0] in final)
 
662
        self.assertFalse(combine[1] in final)
 
663
        self.assertSubset(to_keep, final)
 
664
 
 
665
    def test_stream_source_to_gc(self):
 
666
        source = self.make_repository('source', format='2a')
 
667
        target = self.make_repository('target', format='2a')
 
668
        stream = source._get_source(target._format)
 
669
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
 
670
 
 
671
    def test_stream_source_to_non_gc(self):
 
672
        source = self.make_repository('source', format='2a')
 
673
        target = self.make_repository('target', format='rich-root-pack')
 
674
        stream = source._get_source(target._format)
 
675
        # We don't want the child GroupCHKStreamSource
 
676
        self.assertIs(type(stream), vf_repository.StreamSource)
 
677
 
 
678
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
 
679
        source_builder = self.make_branch_builder('source',
 
680
                            format='2a')
 
681
        # We have to build a fairly large tree, so that we are sure the chk
 
682
        # pages will have split into multiple pages.
 
683
        entries = [('add', ('', 'a-root-id', 'directory', None))]
 
684
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
 
685
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
 
686
                fname = i + j
 
687
                fid = fname + '-id'
 
688
                content = 'content for %s\n' % (fname,)
 
689
                entries.append(('add', (fname, fid, 'file', content)))
 
690
        source_builder.start_series()
 
691
        source_builder.build_snapshot('rev-1', None, entries)
 
692
        # Now change a few of them, so we get a few new pages for the second
 
693
        # revision
 
694
        source_builder.build_snapshot('rev-2', ['rev-1'], [
 
695
            ('modify', ('aa-id', 'new content for aa-id\n')),
 
696
            ('modify', ('cc-id', 'new content for cc-id\n')),
 
697
            ('modify', ('zz-id', 'new content for zz-id\n')),
 
698
            ])
 
699
        source_builder.finish_series()
 
700
        source_branch = source_builder.get_branch()
 
701
        source_branch.lock_read()
 
702
        self.addCleanup(source_branch.unlock)
 
703
        target = self.make_repository('target', format='2a')
 
704
        source = source_branch.repository._get_source(target._format)
 
705
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
 
706
 
 
707
        # On a regular pass, getting the inventories and chk pages for rev-2
 
708
        # would only get the newly created chk pages
 
709
        search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
 
710
                                    set(['rev-2']))
 
711
        simple_chk_records = []
 
712
        for vf_name, substream in source.get_stream(search):
 
713
            if vf_name == 'chk_bytes':
 
714
                for record in substream:
 
715
                    simple_chk_records.append(record.key)
 
716
            else:
 
717
                for _ in substream:
 
718
                    continue
 
719
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
 
720
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
 
721
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
 
722
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
 
723
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
 
724
                         simple_chk_records)
 
725
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
 
726
        # we should get a much larger set of pages.
 
727
        missing = [('inventories', 'rev-2')]
 
728
        full_chk_records = []
 
729
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
 
730
            if vf_name == 'inventories':
 
731
                for record in substream:
 
732
                    self.assertEqual(('rev-2',), record.key)
 
733
            elif vf_name == 'chk_bytes':
 
734
                for record in substream:
 
735
                    full_chk_records.append(record.key)
 
736
            else:
 
737
                self.fail('Should not be getting a stream of %s' % (vf_name,))
 
738
        # We have 257 records now. This is because we have 1 root page, and 256
 
739
        # leaf pages in a complete listing.
 
740
        self.assertEqual(257, len(full_chk_records))
 
741
        self.assertSubset(simple_chk_records, full_chk_records)
 
742
 
 
743
    def test_inconsistency_fatal(self):
 
744
        repo = self.make_repository('repo', format='2a')
 
745
        self.assertTrue(repo.revisions._index._inconsistency_fatal)
 
746
        self.assertFalse(repo.texts._index._inconsistency_fatal)
 
747
        self.assertFalse(repo.inventories._index._inconsistency_fatal)
 
748
        self.assertFalse(repo.signatures._index._inconsistency_fatal)
 
749
        self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
 
750
 
 
751
 
 
752
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
 
753
 
 
754
    def test_source_to_exact_pack_092(self):
 
755
        source = self.make_repository('source', format='pack-0.92')
 
756
        target = self.make_repository('target', format='pack-0.92')
 
757
        stream_source = source._get_source(target._format)
 
758
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
759
 
 
760
    def test_source_to_exact_pack_rich_root_pack(self):
 
761
        source = self.make_repository('source', format='rich-root-pack')
 
762
        target = self.make_repository('target', format='rich-root-pack')
 
763
        stream_source = source._get_source(target._format)
 
764
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
765
 
 
766
    def test_source_to_exact_pack_19(self):
 
767
        source = self.make_repository('source', format='1.9')
 
768
        target = self.make_repository('target', format='1.9')
 
769
        stream_source = source._get_source(target._format)
 
770
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
771
 
 
772
    def test_source_to_exact_pack_19_rich_root(self):
 
773
        source = self.make_repository('source', format='1.9-rich-root')
 
774
        target = self.make_repository('target', format='1.9-rich-root')
 
775
        stream_source = source._get_source(target._format)
 
776
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
777
 
 
778
    def test_source_to_remote_exact_pack_19(self):
 
779
        trans = self.make_smart_server('target')
 
780
        trans.ensure_base()
 
781
        source = self.make_repository('source', format='1.9')
 
782
        target = self.make_repository('target', format='1.9')
 
783
        target = repository.Repository.open(trans.base)
 
784
        stream_source = source._get_source(target._format)
 
785
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
 
786
 
 
787
    def test_stream_source_to_non_exact(self):
 
788
        source = self.make_repository('source', format='pack-0.92')
 
789
        target = self.make_repository('target', format='1.9')
 
790
        stream = source._get_source(target._format)
 
791
        self.assertIs(type(stream), vf_repository.StreamSource)
 
792
 
 
793
    def test_stream_source_to_non_exact_rich_root(self):
 
794
        source = self.make_repository('source', format='1.9')
 
795
        target = self.make_repository('target', format='1.9-rich-root')
 
796
        stream = source._get_source(target._format)
 
797
        self.assertIs(type(stream), vf_repository.StreamSource)
 
798
 
 
799
    def test_source_to_remote_non_exact_pack_19(self):
 
800
        trans = self.make_smart_server('target')
 
801
        trans.ensure_base()
 
802
        source = self.make_repository('source', format='1.9')
 
803
        target = self.make_repository('target', format='1.6')
 
804
        target = repository.Repository.open(trans.base)
 
805
        stream_source = source._get_source(target._format)
 
806
        self.assertIs(type(stream_source), vf_repository.StreamSource)
 
807
 
 
808
    def test_stream_source_to_knit(self):
 
809
        source = self.make_repository('source', format='pack-0.92')
 
810
        target = self.make_repository('target', format='dirstate')
 
811
        stream = source._get_source(target._format)
 
812
        self.assertIs(type(stream), vf_repository.StreamSource)
 
813
 
 
814
 
 
815
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
 
816
    """Tests for _find_parent_ids_of_revisions."""
 
817
 
 
818
    def setUp(self):
 
819
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
 
820
        self.builder = self.make_branch_builder('source')
 
821
        self.builder.start_series()
 
822
        self.builder.build_snapshot('initial', None,
 
823
            [('add', ('', 'tree-root', 'directory', None))])
 
824
        self.repo = self.builder.get_branch().repository
 
825
        self.addCleanup(self.builder.finish_series)
 
826
 
 
827
    def assertParentIds(self, expected_result, rev_set):
 
828
        self.assertEqual(sorted(expected_result),
 
829
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
 
830
 
 
831
    def test_simple(self):
 
832
        self.builder.build_snapshot('revid1', None, [])
 
833
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
834
        rev_set = ['revid2']
 
835
        self.assertParentIds(['revid1'], rev_set)
 
836
 
 
837
    def test_not_first_parent(self):
 
838
        self.builder.build_snapshot('revid1', None, [])
 
839
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
840
        self.builder.build_snapshot('revid3', ['revid2'], [])
 
841
        rev_set = ['revid3', 'revid2']
 
842
        self.assertParentIds(['revid1'], rev_set)
 
843
 
 
844
    def test_not_null(self):
 
845
        rev_set = ['initial']
 
846
        self.assertParentIds([], rev_set)
 
847
 
 
848
    def test_not_null_set(self):
 
849
        self.builder.build_snapshot('revid1', None, [])
 
850
        rev_set = [_mod_revision.NULL_REVISION]
 
851
        self.assertParentIds([], rev_set)
 
852
 
 
853
    def test_ghost(self):
 
854
        self.builder.build_snapshot('revid1', None, [])
 
855
        rev_set = ['ghost', 'revid1']
 
856
        self.assertParentIds(['initial'], rev_set)
 
857
 
 
858
    def test_ghost_parent(self):
 
859
        self.builder.build_snapshot('revid1', None, [])
 
860
        self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
 
861
        rev_set = ['revid2', 'revid1']
 
862
        self.assertParentIds(['ghost', 'initial'], rev_set)
 
863
 
 
864
    def test_righthand_parent(self):
 
865
        self.builder.build_snapshot('revid1', None, [])
 
866
        self.builder.build_snapshot('revid2a', ['revid1'], [])
 
867
        self.builder.build_snapshot('revid2b', ['revid1'], [])
 
868
        self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
 
869
        rev_set = ['revid3', 'revid2a']
 
870
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
 
871
 
 
872
 
660
873
class TestWithBrokenRepo(TestCaseWithTransport):
661
874
    """These tests seem to be more appropriate as interface tests?"""
662
875
 
675
888
            inv = inventory.Inventory(revision_id='rev1a')
676
889
            inv.root.revision = 'rev1a'
677
890
            self.add_file(repo, inv, 'file1', 'rev1a', [])
 
891
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
678
892
            repo.add_inventory('rev1a', inv, [])
679
893
            revision = _mod_revision.Revision('rev1a',
680
894
                committer='jrandom@example.com', timestamp=0,
715
929
    def add_revision(self, repo, revision_id, inv, parent_ids):
716
930
        inv.revision_id = revision_id
717
931
        inv.root.revision = revision_id
 
932
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
718
933
        repo.add_inventory(revision_id, inv, parent_ids)
719
934
        revision = _mod_revision.Revision(revision_id,
720
935
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
737
952
        """
738
953
        broken_repo = self.make_broken_repository()
739
954
        empty_repo = self.make_repository('empty-repo')
740
 
        self.assertRaises(errors.RevisionNotPresent, empty_repo.fetch, broken_repo)
 
955
        try:
 
956
            empty_repo.fetch(broken_repo)
 
957
        except (errors.RevisionNotPresent, errors.BzrCheckError):
 
958
            # Test successful: compression parent not being copied leads to
 
959
            # error.
 
960
            return
 
961
        empty_repo.lock_read()
 
962
        self.addCleanup(empty_repo.unlock)
 
963
        text = empty_repo.texts.get_record_stream(
 
964
            [('file2-id', 'rev3')], 'topological', True).next()
 
965
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
741
966
 
742
967
 
743
968
class TestRepositoryPackCollection(TestCaseWithTransport):
750
975
        repo = self.make_repository('.', format=format)
751
976
        return repo._pack_collection
752
977
 
 
978
    def make_packs_and_alt_repo(self, write_lock=False):
 
979
        """Create a pack repo with 3 packs, and access it via a second repo."""
 
980
        tree = self.make_branch_and_tree('.', format=self.get_format())
 
981
        tree.lock_write()
 
982
        self.addCleanup(tree.unlock)
 
983
        rev1 = tree.commit('one')
 
984
        rev2 = tree.commit('two')
 
985
        rev3 = tree.commit('three')
 
986
        r = repository.Repository.open('.')
 
987
        if write_lock:
 
988
            r.lock_write()
 
989
        else:
 
990
            r.lock_read()
 
991
        self.addCleanup(r.unlock)
 
992
        packs = r._pack_collection
 
993
        packs.ensure_loaded()
 
994
        return tree, r, packs, [rev1, rev2, rev3]
 
995
 
 
996
    def test__clear_obsolete_packs(self):
 
997
        packs = self.get_packs()
 
998
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
 
999
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1000
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1001
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1002
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1003
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1004
        res = packs._clear_obsolete_packs()
 
1005
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
 
1006
        self.assertEqual([], obsolete_pack_trans.list_dir('.'))
 
1007
 
 
1008
    def test__clear_obsolete_packs_preserve(self):
 
1009
        packs = self.get_packs()
 
1010
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
 
1011
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1012
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1013
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1014
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1015
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1016
        res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
 
1017
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
 
1018
        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
 
1019
                         sorted(obsolete_pack_trans.list_dir('.')))
 
1020
 
753
1021
    def test__max_pack_count(self):
754
1022
        """The maximum pack count is a function of the number of revisions."""
755
1023
        # no revisions - one pack, so that we can have a revision free repo
775
1043
        # check some arbitrary big numbers
776
1044
        self.assertEqual(25, packs._max_pack_count(112894))
777
1045
 
 
1046
    def test_repr(self):
 
1047
        packs = self.get_packs()
 
1048
        self.assertContainsRe(repr(packs),
 
1049
            'RepositoryPackCollection(.*Repository(.*))')
 
1050
 
 
1051
    def test__obsolete_packs(self):
 
1052
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1053
        names = packs.names()
 
1054
        pack = packs.get_pack_by_name(names[0])
 
1055
        # Schedule this one for removal
 
1056
        packs._remove_pack_from_memory(pack)
 
1057
        # Simulate a concurrent update by renaming the .pack file and one of
 
1058
        # the indices
 
1059
        packs.transport.rename('packs/%s.pack' % (names[0],),
 
1060
                               'obsolete_packs/%s.pack' % (names[0],))
 
1061
        packs.transport.rename('indices/%s.iix' % (names[0],),
 
1062
                               'obsolete_packs/%s.iix' % (names[0],))
 
1063
        # Now trigger the obsoletion, and ensure that all the remaining files
 
1064
        # are still renamed
 
1065
        packs._obsolete_packs([pack])
 
1066
        self.assertEqual([n + '.pack' for n in names[1:]],
 
1067
                         sorted(packs._pack_transport.list_dir('.')))
 
1068
        # names[0] should not be present in the index anymore
 
1069
        self.assertEqual(names[1:],
 
1070
            sorted(set([osutils.splitext(n)[0] for n in
 
1071
                        packs._index_transport.list_dir('.')])))
 
1072
 
778
1073
    def test_pack_distribution_zero(self):
779
1074
        packs = self.get_packs()
780
1075
        self.assertEqual([0], packs.pack_distribution(0))
903
1198
        tree.lock_read()
904
1199
        self.addCleanup(tree.unlock)
905
1200
        packs = tree.branch.repository._pack_collection
 
1201
        packs.reset()
906
1202
        packs.ensure_loaded()
907
1203
        name = packs.names()[0]
908
1204
        pack_1 = packs.get_pack_by_name(name)
917
1213
        # and the same instance should be returned on successive calls.
918
1214
        self.assertTrue(pack_1 is packs.get_pack_by_name(name))
919
1215
 
 
1216
    def test_reload_pack_names_new_entry(self):
 
1217
        tree, r, packs, revs = self.make_packs_and_alt_repo()
 
1218
        names = packs.names()
 
1219
        # Add a new pack file into the repository
 
1220
        rev4 = tree.commit('four')
 
1221
        new_names = tree.branch.repository._pack_collection.names()
 
1222
        new_name = set(new_names).difference(names)
 
1223
        self.assertEqual(1, len(new_name))
 
1224
        new_name = new_name.pop()
 
1225
        # The old collection hasn't noticed yet
 
1226
        self.assertEqual(names, packs.names())
 
1227
        self.assertTrue(packs.reload_pack_names())
 
1228
        self.assertEqual(new_names, packs.names())
 
1229
        # And the repository can access the new revision
 
1230
        self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
 
1231
        self.assertFalse(packs.reload_pack_names())
 
1232
 
 
1233
    def test_reload_pack_names_added_and_removed(self):
 
1234
        tree, r, packs, revs = self.make_packs_and_alt_repo()
 
1235
        names = packs.names()
 
1236
        # Now repack the whole thing
 
1237
        tree.branch.repository.pack()
 
1238
        new_names = tree.branch.repository._pack_collection.names()
 
1239
        # The other collection hasn't noticed yet
 
1240
        self.assertEqual(names, packs.names())
 
1241
        self.assertTrue(packs.reload_pack_names())
 
1242
        self.assertEqual(new_names, packs.names())
 
1243
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
 
1244
        self.assertFalse(packs.reload_pack_names())
 
1245
 
 
1246
    def test_reload_pack_names_preserves_pending(self):
 
1247
        # TODO: Update this to also test for pending-deleted names
 
1248
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1249
        # We will add one pack (via start_write_group + insert_record_stream),
 
1250
        # and remove another pack (via _remove_pack_from_memory)
 
1251
        orig_names = packs.names()
 
1252
        orig_at_load = packs._packs_at_load
 
1253
        to_remove_name = iter(orig_names).next()
 
1254
        r.start_write_group()
 
1255
        self.addCleanup(r.abort_write_group)
 
1256
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
 
1257
            ('text', 'rev'), (), None, 'content\n')])
 
1258
        new_pack = packs._new_pack
 
1259
        self.assertTrue(new_pack.data_inserted())
 
1260
        new_pack.finish()
 
1261
        packs.allocate(new_pack)
 
1262
        packs._new_pack = None
 
1263
        removed_pack = packs.get_pack_by_name(to_remove_name)
 
1264
        packs._remove_pack_from_memory(removed_pack)
 
1265
        names = packs.names()
 
1266
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1267
        new_names = set([x[0][0] for x in new_nodes])
 
1268
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1269
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1270
        self.assertEqual(set([new_pack.name]), new_names)
 
1271
        self.assertEqual([to_remove_name],
 
1272
                         sorted([x[0][0] for x in deleted_nodes]))
 
1273
        packs.reload_pack_names()
 
1274
        reloaded_names = packs.names()
 
1275
        self.assertEqual(orig_at_load, packs._packs_at_load)
 
1276
        self.assertEqual(names, reloaded_names)
 
1277
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1278
        new_names = set([x[0][0] for x in new_nodes])
 
1279
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1280
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1281
        self.assertEqual(set([new_pack.name]), new_names)
 
1282
        self.assertEqual([to_remove_name],
 
1283
                         sorted([x[0][0] for x in deleted_nodes]))
 
1284
 
 
1285
    def test_autopack_obsoletes_new_pack(self):
 
1286
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1287
        packs._max_pack_count = lambda x: 1
 
1288
        packs.pack_distribution = lambda x: [10]
 
1289
        r.start_write_group()
 
1290
        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
 
1291
            ('bogus-rev',), (), None, 'bogus-content\n')])
 
1292
        # This should trigger an autopack, which will combine everything into a
 
1293
        # single pack file.
 
1294
        new_names = r.commit_write_group()
 
1295
        names = packs.names()
 
1296
        self.assertEqual(1, len(names))
 
1297
        self.assertEqual([names[0] + '.pack'],
 
1298
                         packs._pack_transport.list_dir('.'))
 
1299
 
 
1300
    def test_autopack_reloads_and_stops(self):
 
1301
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1302
        # After we have determined what needs to be autopacked, trigger a
 
1303
        # full-pack via the other repo which will cause us to re-evaluate and
 
1304
        # decide we don't need to do anything
 
1305
        orig_execute = packs._execute_pack_operations
 
1306
        def _munged_execute_pack_ops(*args, **kwargs):
 
1307
            tree.branch.repository.pack()
 
1308
            return orig_execute(*args, **kwargs)
 
1309
        packs._execute_pack_operations = _munged_execute_pack_ops
 
1310
        packs._max_pack_count = lambda x: 1
 
1311
        packs.pack_distribution = lambda x: [10]
 
1312
        self.assertFalse(packs.autopack())
 
1313
        self.assertEqual(1, len(packs.names()))
 
1314
        self.assertEqual(tree.branch.repository._pack_collection.names(),
 
1315
                         packs.names())
 
1316
 
 
1317
    def test__save_pack_names(self):
 
1318
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1319
        names = packs.names()
 
1320
        pack = packs.get_pack_by_name(names[0])
 
1321
        packs._remove_pack_from_memory(pack)
 
1322
        packs._save_pack_names(obsolete_packs=[pack])
 
1323
        cur_packs = packs._pack_transport.list_dir('.')
 
1324
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1325
        # obsolete_packs will also have stuff like .rix and .iix present.
 
1326
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1327
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1328
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1329
 
 
1330
    def test__save_pack_names_already_obsoleted(self):
 
1331
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1332
        names = packs.names()
 
1333
        pack = packs.get_pack_by_name(names[0])
 
1334
        packs._remove_pack_from_memory(pack)
 
1335
        # We are going to simulate a concurrent autopack by manually obsoleting
 
1336
        # the pack directly.
 
1337
        packs._obsolete_packs([pack])
 
1338
        packs._save_pack_names(clear_obsolete_packs=True,
 
1339
                               obsolete_packs=[pack])
 
1340
        cur_packs = packs._pack_transport.list_dir('.')
 
1341
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1342
        # Note that while we set clear_obsolete_packs=True, it should not
 
1343
        # delete a pack file that we have also scheduled for obsoletion.
 
1344
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1345
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1346
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1347
 
 
1348
 
920
1349
 
921
1350
class TestPack(TestCaseWithTransport):
922
1351
    """Tests for the Pack object."""
976
1405
        pack_transport = self.get_transport('pack')
977
1406
        index_transport = self.get_transport('index')
978
1407
        upload_transport.mkdir('.')
979
 
        pack = pack_repo.NewPack(upload_transport, index_transport,
980
 
            pack_transport, index_builder_class=BTreeBuilder,
981
 
            index_class=BTreeGraphIndex)
 
1408
        collection = pack_repo.RepositoryPackCollection(
 
1409
            repo=None,
 
1410
            transport=self.get_transport('.'),
 
1411
            index_transport=index_transport,
 
1412
            upload_transport=upload_transport,
 
1413
            pack_transport=pack_transport,
 
1414
            index_builder_class=BTreeBuilder,
 
1415
            index_class=BTreeGraphIndex,
 
1416
            use_chk_index=False)
 
1417
        pack = pack_repo.NewPack(collection)
 
1418
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
982
1419
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
983
1420
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
984
1421
        self.assertIsInstance(pack._hash, type(osutils.md5()))
994
1431
class TestPacker(TestCaseWithTransport):
995
1432
    """Tests for the packs repository Packer class."""
996
1433
 
997
 
    # To date, this class has been factored out and nothing new added to it;
998
 
    # thus there are not yet any tests.
999
 
 
1000
 
 
1001
 
class TestInterDifferingSerializer(TestCaseWithTransport):
1002
 
 
1003
 
    def test_progress_bar(self):
1004
 
        tree = self.make_branch_and_tree('tree')
1005
 
        tree.commit('rev1', rev_id='rev-1')
1006
 
        tree.commit('rev2', rev_id='rev-2')
1007
 
        tree.commit('rev3', rev_id='rev-3')
1008
 
        repo = self.make_repository('repo')
1009
 
        inter_repo = repository.InterDifferingSerializer(
1010
 
            tree.branch.repository, repo)
1011
 
        pb = progress.InstrumentedProgress(to_file=StringIO())
1012
 
        pb.never_throttle = True
1013
 
        inter_repo.fetch('rev-1', pb)
1014
 
        self.assertEqual('Transferring revisions', pb.last_msg)
1015
 
        self.assertEqual(1, pb.last_cnt)
1016
 
        self.assertEqual(1, pb.last_total)
1017
 
        inter_repo.fetch('rev-3', pb)
1018
 
        self.assertEqual(2, pb.last_cnt)
1019
 
        self.assertEqual(2, pb.last_total)
 
1434
    def test_pack_optimizes_pack_order(self):
 
1435
        builder = self.make_branch_builder('.', format="1.9")
 
1436
        builder.start_series()
 
1437
        builder.build_snapshot('A', None, [
 
1438
            ('add', ('', 'root-id', 'directory', None)),
 
1439
            ('add', ('f', 'f-id', 'file', 'content\n'))])
 
1440
        builder.build_snapshot('B', ['A'],
 
1441
            [('modify', ('f-id', 'new-content\n'))])
 
1442
        builder.build_snapshot('C', ['B'],
 
1443
            [('modify', ('f-id', 'third-content\n'))])
 
1444
        builder.build_snapshot('D', ['C'],
 
1445
            [('modify', ('f-id', 'fourth-content\n'))])
 
1446
        b = builder.get_branch()
 
1447
        b.lock_read()
 
1448
        builder.finish_series()
 
1449
        self.addCleanup(b.unlock)
 
1450
        # At this point, we should have 4 pack files available
 
1451
        # Because of how they were built, they correspond to
 
1452
        # ['D', 'C', 'B', 'A']
 
1453
        packs = b.repository._pack_collection.packs
 
1454
        packer = knitpack_repo.KnitPacker(b.repository._pack_collection,
 
1455
                                  packs, 'testing',
 
1456
                                  revision_ids=['B', 'C'])
 
1457
        # Now, when we are copying the B & C revisions, their pack files should
 
1458
        # be moved to the front of the stack
 
1459
        # The new ordering moves B & C to the front of the .packs attribute,
 
1460
        # and leaves the others in the original order.
 
1461
        new_packs = [packs[1], packs[2], packs[0], packs[3]]
 
1462
        new_pack = packer.pack()
 
1463
        self.assertEqual(new_packs, packer.packs)
 
1464
 
 
1465
 
 
1466
class TestOptimisingPacker(TestCaseWithTransport):
 
1467
    """Tests for the OptimisingPacker class."""
 
1468
 
 
1469
    def get_pack_collection(self):
 
1470
        repo = self.make_repository('.')
 
1471
        return repo._pack_collection
 
1472
 
 
1473
    def test_open_pack_will_optimise(self):
 
1474
        packer = knitpack_repo.OptimisingKnitPacker(self.get_pack_collection(),
 
1475
                                            [], '.test')
 
1476
        new_pack = packer.open_pack()
 
1477
        self.addCleanup(new_pack.abort) # ensure cleanup
 
1478
        self.assertIsInstance(new_pack, pack_repo.NewPack)
 
1479
        self.assertTrue(new_pack.revision_index._optimize_for_size)
 
1480
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
 
1481
        self.assertTrue(new_pack.text_index._optimize_for_size)
 
1482
        self.assertTrue(new_pack.signature_index._optimize_for_size)
 
1483
 
 
1484
 
 
1485
class TestGCCHKPacker(TestCaseWithTransport):
 
1486
 
 
1487
    def make_abc_branch(self):
 
1488
        builder = self.make_branch_builder('source')
 
1489
        builder.start_series()
 
1490
        builder.build_snapshot('A', None, [
 
1491
            ('add', ('', 'root-id', 'directory', None)),
 
1492
            ('add', ('file', 'file-id', 'file', 'content\n')),
 
1493
            ])
 
1494
        builder.build_snapshot('B', ['A'], [
 
1495
            ('add', ('dir', 'dir-id', 'directory', None))])
 
1496
        builder.build_snapshot('C', ['B'], [
 
1497
            ('modify', ('file-id', 'new content\n'))])
 
1498
        builder.finish_series()
 
1499
        return builder.get_branch()
 
1500
 
 
1501
    def make_branch_with_disjoint_inventory_and_revision(self):
 
1502
        """a repo with separate packs for a revisions Revision and Inventory.
 
1503
 
 
1504
        There will be one pack file that holds the Revision content, and one
 
1505
        for the Inventory content.
 
1506
 
 
1507
        :return: (repository,
 
1508
                  pack_name_with_rev_A_Revision,
 
1509
                  pack_name_with_rev_A_Inventory,
 
1510
                  pack_name_with_rev_C_content)
 
1511
        """
 
1512
        b_source = self.make_abc_branch()
 
1513
        b_base = b_source.bzrdir.sprout('base', revision_id='A').open_branch()
 
1514
        b_stacked = b_base.bzrdir.sprout('stacked', stacked=True).open_branch()
 
1515
        b_stacked.lock_write()
 
1516
        self.addCleanup(b_stacked.unlock)
 
1517
        b_stacked.fetch(b_source, 'B')
 
1518
        # Now re-open the stacked repo directly (no fallbacks) so that we can
 
1519
        # fill in the A rev.
 
1520
        repo_not_stacked = b_stacked.bzrdir.open_repository()
 
1521
        repo_not_stacked.lock_write()
 
1522
        self.addCleanup(repo_not_stacked.unlock)
 
1523
        # Now we should have a pack file with A's inventory, but not its
 
1524
        # Revision
 
1525
        self.assertEqual([('A',), ('B',)],
 
1526
                         sorted(repo_not_stacked.inventories.keys()))
 
1527
        self.assertEqual([('B',)],
 
1528
                         sorted(repo_not_stacked.revisions.keys()))
 
1529
        stacked_pack_names = repo_not_stacked._pack_collection.names()
 
1530
        # We have a couple names here, figure out which has A's inventory
 
1531
        for name in stacked_pack_names:
 
1532
            pack = repo_not_stacked._pack_collection.get_pack_by_name(name)
 
1533
            keys = [n[1] for n in pack.inventory_index.iter_all_entries()]
 
1534
            if ('A',) in keys:
 
1535
                inv_a_pack_name = name
 
1536
                break
 
1537
        else:
 
1538
            self.fail('Could not find pack containing A\'s inventory')
 
1539
        repo_not_stacked.fetch(b_source.repository, 'A')
 
1540
        self.assertEqual([('A',), ('B',)],
 
1541
                         sorted(repo_not_stacked.revisions.keys()))
 
1542
        new_pack_names = set(repo_not_stacked._pack_collection.names())
 
1543
        rev_a_pack_names = new_pack_names.difference(stacked_pack_names)
 
1544
        self.assertEqual(1, len(rev_a_pack_names))
 
1545
        rev_a_pack_name = list(rev_a_pack_names)[0]
 
1546
        # Now fetch 'C', so we have a couple pack files to join
 
1547
        repo_not_stacked.fetch(b_source.repository, 'C')
 
1548
        rev_c_pack_names = set(repo_not_stacked._pack_collection.names())
 
1549
        rev_c_pack_names = rev_c_pack_names.difference(new_pack_names)
 
1550
        self.assertEqual(1, len(rev_c_pack_names))
 
1551
        rev_c_pack_name = list(rev_c_pack_names)[0]
 
1552
        return (repo_not_stacked, rev_a_pack_name, inv_a_pack_name,
 
1553
                rev_c_pack_name)
 
1554
 
 
1555
    def test_pack_with_distant_inventories(self):
 
1556
        # See https://bugs.launchpad.net/bzr/+bug/437003
 
1557
        # When repacking, it is possible to have an inventory in a different
 
1558
        # pack file than the associated revision. An autopack can then come
 
1559
        # along, and miss that inventory, and complain.
 
1560
        (repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
 
1561
         ) = self.make_branch_with_disjoint_inventory_and_revision()
 
1562
        a_pack = repo._pack_collection.get_pack_by_name(rev_a_pack_name)
 
1563
        c_pack = repo._pack_collection.get_pack_by_name(rev_c_pack_name)
 
1564
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
 
1565
                    [a_pack, c_pack], '.test-pack')
 
1566
        # This would raise ValueError in bug #437003, but should not raise an
 
1567
        # error once fixed.
 
1568
        packer.pack()
 
1569
 
 
1570
    def test_pack_with_missing_inventory(self):
 
1571
        # Similar to test_pack_with_missing_inventory, but this time, we force
 
1572
        # the A inventory to actually be gone from the repository.
 
1573
        (repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
 
1574
         ) = self.make_branch_with_disjoint_inventory_and_revision()
 
1575
        inv_a_pack = repo._pack_collection.get_pack_by_name(inv_a_pack_name)
 
1576
        repo._pack_collection._remove_pack_from_memory(inv_a_pack)
 
1577
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
 
1578
            repo._pack_collection.all_packs(), '.test-pack')
 
1579
        e = self.assertRaises(ValueError, packer.pack)
 
1580
        packer.new_pack.abort()
 
1581
        self.assertContainsRe(str(e),
 
1582
            r"We are missing inventories for revisions: .*'A'")
 
1583
 
 
1584
 
 
1585
class TestCrossFormatPacks(TestCaseWithTransport):
 
1586
 
 
1587
    def log_pack(self, hint=None):
 
1588
        self.calls.append(('pack', hint))
 
1589
        self.orig_pack(hint=hint)
 
1590
        if self.expect_hint:
 
1591
            self.assertTrue(hint)
 
1592
 
 
1593
    def run_stream(self, src_fmt, target_fmt, expect_pack_called):
 
1594
        self.expect_hint = expect_pack_called
 
1595
        self.calls = []
 
1596
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1597
        source_tree.lock_write()
 
1598
        self.addCleanup(source_tree.unlock)
 
1599
        tip = source_tree.commit('foo')
 
1600
        target = self.make_repository('target', format=target_fmt)
 
1601
        target.lock_write()
 
1602
        self.addCleanup(target.unlock)
 
1603
        source = source_tree.branch.repository._get_source(target._format)
 
1604
        self.orig_pack = target.pack
 
1605
        self.overrideAttr(target, "pack", self.log_pack)
 
1606
        search = target.search_missing_revision_ids(
 
1607
            source_tree.branch.repository, revision_ids=[tip])
 
1608
        stream = source.get_stream(search)
 
1609
        from_format = source_tree.branch.repository._format
 
1610
        sink = target._get_sink()
 
1611
        sink.insert_stream(stream, from_format, [])
 
1612
        if expect_pack_called:
 
1613
            self.assertLength(1, self.calls)
 
1614
        else:
 
1615
            self.assertLength(0, self.calls)
 
1616
 
 
1617
    def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
 
1618
        self.expect_hint = expect_pack_called
 
1619
        self.calls = []
 
1620
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1621
        source_tree.lock_write()
 
1622
        self.addCleanup(source_tree.unlock)
 
1623
        tip = source_tree.commit('foo')
 
1624
        target = self.make_repository('target', format=target_fmt)
 
1625
        target.lock_write()
 
1626
        self.addCleanup(target.unlock)
 
1627
        source = source_tree.branch.repository
 
1628
        self.orig_pack = target.pack
 
1629
        self.overrideAttr(target, "pack", self.log_pack)
 
1630
        target.fetch(source)
 
1631
        if expect_pack_called:
 
1632
            self.assertLength(1, self.calls)
 
1633
        else:
 
1634
            self.assertLength(0, self.calls)
 
1635
 
 
1636
    def test_sink_format_hint_no(self):
 
1637
        # When the target format says packing makes no difference, pack is not
 
1638
        # called.
 
1639
        self.run_stream('1.9', 'rich-root-pack', False)
 
1640
 
 
1641
    def test_sink_format_hint_yes(self):
 
1642
        # When the target format says packing makes a difference, pack is
 
1643
        # called.
 
1644
        self.run_stream('1.9', '2a', True)
 
1645
 
 
1646
    def test_sink_format_same_no(self):
 
1647
        # When the formats are the same, pack is not called.
 
1648
        self.run_stream('2a', '2a', False)
 
1649
 
 
1650
    def test_IDS_format_hint_no(self):
 
1651
        # When the target format says packing makes no difference, pack is not
 
1652
        # called.
 
1653
        self.run_fetch('1.9', 'rich-root-pack', False)
 
1654
 
 
1655
    def test_IDS_format_hint_yes(self):
 
1656
        # When the target format says packing makes a difference, pack is
 
1657
        # called.
 
1658
        self.run_fetch('1.9', '2a', True)
 
1659
 
 
1660
    def test_IDS_format_same_no(self):
 
1661
        # When the formats are the same, pack is not called.
 
1662
        self.run_fetch('2a', '2a', False)