~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: Vincent Ladeuil
  • Date: 2008-09-11 19:36:38 UTC
  • mfrom: (3703 +trunk)
  • mto: (3705.1.1 trunk2)
  • mto: This revision was merged to the branch mainline in revision 3708.
  • Revision ID: v.ladeuil+lp@free.fr-20080911193638-wtjyc1kcmacc6t1f
merge bzr.dev

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2011 Canonical Ltd
 
1
# Copyright (C) 2006, 2007, 2008 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
12
12
#
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
16
16
 
17
17
"""Tests for the Repository facility that are not interface tests.
18
18
 
22
22
also see this file.
23
23
"""
24
24
 
 
25
import md5
25
26
from stat import S_ISDIR
 
27
from StringIO import StringIO
26
28
 
27
29
import bzrlib
28
 
from bzrlib.errors import (
29
 
    UnknownFormatError,
30
 
    UnsupportedFormatError,
31
 
    )
32
 
from bzrlib import (
33
 
    btree_index,
34
 
    graph,
35
 
    symbol_versioning,
36
 
    tests,
37
 
    transport,
38
 
    )
39
 
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
40
 
from bzrlib.index import GraphIndex
 
30
from bzrlib.errors import (NotBranchError,
 
31
                           NoSuchFile,
 
32
                           UnknownFormatError,
 
33
                           UnsupportedFormatError,
 
34
                           )
 
35
from bzrlib import graph
 
36
from bzrlib.index import GraphIndex, InMemoryGraphIndex
41
37
from bzrlib.repository import RepositoryFormat
 
38
from bzrlib.smart import server
42
39
from bzrlib.tests import (
43
40
    TestCase,
44
41
    TestCaseWithTransport,
45
 
    )
 
42
    TestSkipped,
 
43
    test_knit,
 
44
    )
 
45
from bzrlib.transport import (
 
46
    fakenfs,
 
47
    get_transport,
 
48
    )
 
49
from bzrlib.transport.memory import MemoryServer
 
50
from bzrlib.util import bencode
46
51
from bzrlib import (
47
52
    bzrdir,
48
53
    errors,
49
54
    inventory,
50
 
    osutils,
 
55
    progress,
51
56
    repository,
52
57
    revision as _mod_revision,
 
58
    symbol_versioning,
53
59
    upgrade,
54
 
    versionedfile,
55
 
    vf_repository,
56
60
    workingtree,
57
61
    )
58
 
from bzrlib.repofmt import (
59
 
    groupcompress_repo,
60
 
    knitrepo,
61
 
    knitpack_repo,
62
 
    pack_repo,
63
 
    )
 
62
from bzrlib.repofmt import knitrepo, weaverepo, pack_repo
64
63
 
65
64
 
66
65
class TestDefaultFormat(TestCase):
68
67
    def test_get_set_default_format(self):
69
68
        old_default = bzrdir.format_registry.get('default')
70
69
        private_default = old_default().repository_format.__class__
71
 
        old_format = repository.format_registry.get_default()
 
70
        old_format = repository.RepositoryFormat.get_default_format()
72
71
        self.assertTrue(isinstance(old_format, private_default))
73
72
        def make_sample_bzrdir():
74
73
            my_bzrdir = bzrdir.BzrDirMetaFormat1()
88
87
            bzrdir.format_registry.remove('default')
89
88
            bzrdir.format_registry.remove('sample')
90
89
            bzrdir.format_registry.register('default', old_default, '')
91
 
        self.assertIsInstance(repository.format_registry.get_default(),
 
90
        self.assertIsInstance(repository.RepositoryFormat.get_default_format(),
92
91
                              old_format.__class__)
93
92
 
94
93
 
95
94
class SampleRepositoryFormat(repository.RepositoryFormat):
96
95
    """A sample format
97
96
 
98
 
    this format is initializable, unsupported to aid in testing the
 
97
    this format is initializable, unsupported to aid in testing the 
99
98
    open and open(unsupported=True) routines.
100
99
    """
101
100
 
116
115
        return "opened repository."
117
116
 
118
117
 
119
 
class SampleExtraRepositoryFormat(repository.RepositoryFormat):
120
 
    """A sample format that can not be used in a metadir
121
 
 
122
 
    """
123
 
 
124
 
    def get_format_string(self):
125
 
        raise NotImplementedError
126
 
 
127
 
 
128
118
class TestRepositoryFormat(TestCaseWithTransport):
129
119
    """Tests for the Repository format detection used by the bzr meta dir facility.BzrBranchFormat facility."""
130
120
 
131
121
    def test_find_format(self):
132
122
        # is the right format object found for a repository?
133
123
        # create a branch with a few known format objects.
134
 
        # this is not quite the same as
 
124
        # this is not quite the same as 
135
125
        self.build_tree(["foo/", "bar/"])
136
126
        def check_format(format, url):
137
127
            dir = format._matchingbzrdir.initialize(url)
138
128
            format.initialize(dir)
139
 
            t = transport.get_transport(url)
 
129
            t = get_transport(url)
140
130
            found_format = repository.RepositoryFormat.find_format(dir)
141
 
            self.assertIsInstance(found_format, format.__class__)
142
 
        check_format(repository.format_registry.get_default(), "bar")
143
 
 
 
131
            self.failUnless(isinstance(found_format, format.__class__))
 
132
        check_format(weaverepo.RepositoryFormat7(), "bar")
 
133
        
144
134
    def test_find_format_no_repository(self):
145
135
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
146
136
        self.assertRaises(errors.NoRepositoryPresent,
155
145
                          dir)
156
146
 
157
147
    def test_register_unregister_format(self):
158
 
        # Test deprecated format registration functions
159
148
        format = SampleRepositoryFormat()
160
149
        # make a control dir
161
150
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
162
151
        # make a repo
163
152
        format.initialize(dir)
164
153
        # register a format for it.
165
 
        self.applyDeprecated(symbol_versioning.deprecated_in((2, 4, 0)),
166
 
            repository.RepositoryFormat.register_format, format)
 
154
        repository.RepositoryFormat.register_format(format)
167
155
        # which repository.Open will refuse (not supported)
168
 
        self.assertRaises(UnsupportedFormatError, repository.Repository.open,
169
 
            self.get_url())
 
156
        self.assertRaises(UnsupportedFormatError, repository.Repository.open, self.get_url())
170
157
        # but open(unsupported) will work
171
158
        self.assertEqual(format.open(dir), "opened repository.")
172
159
        # unregister the format
173
 
        self.applyDeprecated(symbol_versioning.deprecated_in((2, 4, 0)),
174
 
            repository.RepositoryFormat.unregister_format, format)
175
 
 
176
 
 
177
 
class TestRepositoryFormatRegistry(TestCase):
178
 
 
179
 
    def setUp(self):
180
 
        super(TestRepositoryFormatRegistry, self).setUp()
181
 
        self.registry = repository.RepositoryFormatRegistry()
182
 
 
183
 
    def test_register_unregister_format(self):
184
 
        format = SampleRepositoryFormat()
185
 
        self.registry.register(format)
186
 
        self.assertEquals(format, self.registry.get("Sample .bzr repository format."))
187
 
        self.registry.remove(format)
188
 
        self.assertRaises(KeyError, self.registry.get, "Sample .bzr repository format.")
189
 
 
190
 
    def test_get_all(self):
191
 
        format = SampleRepositoryFormat()
192
 
        self.assertEquals([], self.registry._get_all())
193
 
        self.registry.register(format)
194
 
        self.assertEquals([format], self.registry._get_all())
195
 
 
196
 
    def test_register_extra(self):
197
 
        format = SampleExtraRepositoryFormat()
198
 
        self.assertEquals([], self.registry._get_all())
199
 
        self.registry.register_extra(format)
200
 
        self.assertEquals([format], self.registry._get_all())
201
 
 
202
 
    def test_register_extra_lazy(self):
203
 
        self.assertEquals([], self.registry._get_all())
204
 
        self.registry.register_extra_lazy("bzrlib.tests.test_repository",
205
 
            "SampleExtraRepositoryFormat")
206
 
        formats = self.registry._get_all()
207
 
        self.assertEquals(1, len(formats))
208
 
        self.assertIsInstance(formats[0], SampleExtraRepositoryFormat)
 
160
        repository.RepositoryFormat.unregister_format(format)
 
161
 
 
162
 
 
163
class TestFormat6(TestCaseWithTransport):
 
164
 
 
165
    def test_attribute__fetch_order(self):
 
166
        """Weaves need topological data insertion."""
 
167
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
168
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
169
        self.assertEqual('topological', repo._fetch_order)
 
170
 
 
171
    def test_attribute__fetch_uses_deltas(self):
 
172
        """Weaves do not reuse deltas."""
 
173
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
174
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
175
        self.assertEqual(False, repo._fetch_uses_deltas)
 
176
 
 
177
    def test_attribute__fetch_reconcile(self):
 
178
        """Weave repositories need a reconcile after fetch."""
 
179
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
180
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
181
        self.assertEqual(True, repo._fetch_reconcile)
 
182
 
 
183
    def test_no_ancestry_weave(self):
 
184
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
185
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
186
        # We no longer need to create the ancestry.weave file
 
187
        # since it is *never* used.
 
188
        self.assertRaises(NoSuchFile,
 
189
                          control.transport.get,
 
190
                          'ancestry.weave')
 
191
 
 
192
    def test_supports_external_lookups(self):
 
193
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
194
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
195
        self.assertFalse(repo._format.supports_external_lookups)
 
196
 
 
197
 
 
198
class TestFormat7(TestCaseWithTransport):
 
199
 
 
200
    def test_attribute__fetch_order(self):
 
201
        """Weaves need topological data insertion."""
 
202
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
203
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
204
        self.assertEqual('topological', repo._fetch_order)
 
205
 
 
206
    def test_attribute__fetch_uses_deltas(self):
 
207
        """Weaves do not reuse deltas."""
 
208
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
209
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
210
        self.assertEqual(False, repo._fetch_uses_deltas)
 
211
 
 
212
    def test_attribute__fetch_reconcile(self):
 
213
        """Weave repositories need a reconcile after fetch."""
 
214
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
215
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
216
        self.assertEqual(True, repo._fetch_reconcile)
 
217
 
 
218
    def test_disk_layout(self):
 
219
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
220
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
221
        # in case of side effects of locking.
 
222
        repo.lock_write()
 
223
        repo.unlock()
 
224
        # we want:
 
225
        # format 'Bazaar-NG Repository format 7'
 
226
        # lock ''
 
227
        # inventory.weave == empty_weave
 
228
        # empty revision-store directory
 
229
        # empty weaves directory
 
230
        t = control.get_repository_transport(None)
 
231
        self.assertEqualDiff('Bazaar-NG Repository format 7',
 
232
                             t.get('format').read())
 
233
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
 
234
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
 
235
        self.assertEqualDiff('# bzr weave file v5\n'
 
236
                             'w\n'
 
237
                             'W\n',
 
238
                             t.get('inventory.weave').read())
 
239
        # Creating a file with id Foo:Bar results in a non-escaped file name on
 
240
        # disk.
 
241
        control.create_branch()
 
242
        tree = control.create_workingtree()
 
243
        tree.add(['foo'], ['Foo:Bar'], ['file'])
 
244
        tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
 
245
        tree.commit('first post', rev_id='first')
 
246
        self.assertEqualDiff(
 
247
            '# bzr weave file v5\n'
 
248
            'i\n'
 
249
            '1 7fe70820e08a1aac0ef224d9c66ab66831cc4ab1\n'
 
250
            'n first\n'
 
251
            '\n'
 
252
            'w\n'
 
253
            '{ 0\n'
 
254
            '. content\n'
 
255
            '}\n'
 
256
            'W\n',
 
257
            t.get('weaves/74/Foo%3ABar.weave').read())
 
258
 
 
259
    def test_shared_disk_layout(self):
 
260
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
261
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
262
        # we want:
 
263
        # format 'Bazaar-NG Repository format 7'
 
264
        # inventory.weave == empty_weave
 
265
        # empty revision-store directory
 
266
        # empty weaves directory
 
267
        # a 'shared-storage' marker file.
 
268
        # lock is not present when unlocked
 
269
        t = control.get_repository_transport(None)
 
270
        self.assertEqualDiff('Bazaar-NG Repository format 7',
 
271
                             t.get('format').read())
 
272
        self.assertEqualDiff('', t.get('shared-storage').read())
 
273
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
 
274
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
 
275
        self.assertEqualDiff('# bzr weave file v5\n'
 
276
                             'w\n'
 
277
                             'W\n',
 
278
                             t.get('inventory.weave').read())
 
279
        self.assertFalse(t.has('branch-lock'))
 
280
 
 
281
    def test_creates_lockdir(self):
 
282
        """Make sure it appears to be controlled by a LockDir existence"""
 
283
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
284
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
285
        t = control.get_repository_transport(None)
 
286
        # TODO: Should check there is a 'lock' toplevel directory, 
 
287
        # regardless of contents
 
288
        self.assertFalse(t.has('lock/held/info'))
 
289
        repo.lock_write()
 
290
        try:
 
291
            self.assertTrue(t.has('lock/held/info'))
 
292
        finally:
 
293
            # unlock so we don't get a warning about failing to do so
 
294
            repo.unlock()
 
295
 
 
296
    def test_uses_lockdir(self):
 
297
        """repo format 7 actually locks on lockdir"""
 
298
        base_url = self.get_url()
 
299
        control = bzrdir.BzrDirMetaFormat1().initialize(base_url)
 
300
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
301
        t = control.get_repository_transport(None)
 
302
        repo.lock_write()
 
303
        repo.unlock()
 
304
        del repo
 
305
        # make sure the same lock is created by opening it
 
306
        repo = repository.Repository.open(base_url)
 
307
        repo.lock_write()
 
308
        self.assertTrue(t.has('lock/held/info'))
 
309
        repo.unlock()
 
310
        self.assertFalse(t.has('lock/held/info'))
 
311
 
 
312
    def test_shared_no_tree_disk_layout(self):
 
313
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
314
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
315
        repo.set_make_working_trees(False)
 
316
        # we want:
 
317
        # format 'Bazaar-NG Repository format 7'
 
318
        # lock ''
 
319
        # inventory.weave == empty_weave
 
320
        # empty revision-store directory
 
321
        # empty weaves directory
 
322
        # a 'shared-storage' marker file.
 
323
        t = control.get_repository_transport(None)
 
324
        self.assertEqualDiff('Bazaar-NG Repository format 7',
 
325
                             t.get('format').read())
 
326
        ## self.assertEqualDiff('', t.get('lock').read())
 
327
        self.assertEqualDiff('', t.get('shared-storage').read())
 
328
        self.assertEqualDiff('', t.get('no-working-trees').read())
 
329
        repo.set_make_working_trees(True)
 
330
        self.assertFalse(t.has('no-working-trees'))
 
331
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
 
332
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
 
333
        self.assertEqualDiff('# bzr weave file v5\n'
 
334
                             'w\n'
 
335
                             'W\n',
 
336
                             t.get('inventory.weave').read())
 
337
 
 
338
    def test_supports_external_lookups(self):
 
339
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
340
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
341
        self.assertFalse(repo._format.supports_external_lookups)
209
342
 
210
343
 
211
344
class TestFormatKnit1(TestCaseWithTransport):
212
 
 
 
345
    
213
346
    def test_attribute__fetch_order(self):
214
347
        """Knits need topological data insertion."""
215
348
        repo = self.make_repository('.',
216
349
                format=bzrdir.format_registry.get('knit')())
217
 
        self.assertEqual('topological', repo._format._fetch_order)
 
350
        self.assertEqual('topological', repo._fetch_order)
218
351
 
219
352
    def test_attribute__fetch_uses_deltas(self):
220
353
        """Knits reuse deltas."""
221
354
        repo = self.make_repository('.',
222
355
                format=bzrdir.format_registry.get('knit')())
223
 
        self.assertEqual(True, repo._format._fetch_uses_deltas)
 
356
        self.assertEqual(True, repo._fetch_uses_deltas)
224
357
 
225
358
    def test_disk_layout(self):
226
359
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
313
446
        repo = self.make_repository('.',
314
447
                format=bzrdir.format_registry.get('knit')())
315
448
        inv_xml = '<inventory format="5">\n</inventory>\n'
316
 
        inv = repo._deserialise_inventory('test-rev-id', inv_xml)
 
449
        inv = repo.deserialise_inventory('test-rev-id', inv_xml)
317
450
        self.assertEqual('test-rev-id', inv.root.revision)
318
451
 
319
452
    def test_deserialise_uses_global_revision_id(self):
325
458
        # Arguably, the deserialise_inventory should detect a mismatch, and
326
459
        # raise an error, rather than silently using one revision_id over the
327
460
        # other.
328
 
        self.assertRaises(AssertionError, repo._deserialise_inventory,
 
461
        self.assertRaises(AssertionError, repo.deserialise_inventory,
329
462
            'test-rev-id', inv_xml)
330
 
        inv = repo._deserialise_inventory('other-rev-id', inv_xml)
 
463
        inv = repo.deserialise_inventory('other-rev-id', inv_xml)
331
464
        self.assertEqual('other-rev-id', inv.root.revision)
332
465
 
333
466
    def test_supports_external_lookups(self):
339
472
class DummyRepository(object):
340
473
    """A dummy repository for testing."""
341
474
 
342
 
    _format = None
343
475
    _serializer = None
344
476
 
345
477
    def supports_rich_root(self):
346
 
        if self._format is not None:
347
 
            return self._format.rich_root_data
348
478
        return False
349
479
 
350
 
    def get_graph(self):
351
 
        raise NotImplementedError
352
 
 
353
 
    def get_parent_map(self, revision_ids):
354
 
        raise NotImplementedError
355
 
 
356
480
 
357
481
class InterDummy(repository.InterRepository):
358
482
    """An inter-repository optimised code path for DummyRepository.
365
489
    @staticmethod
366
490
    def is_compatible(repo_source, repo_target):
367
491
        """InterDummy is compatible with DummyRepository."""
368
 
        return (isinstance(repo_source, DummyRepository) and
 
492
        return (isinstance(repo_source, DummyRepository) and 
369
493
            isinstance(repo_target, DummyRepository))
370
494
 
371
495
 
379
503
        # classes do not barf inappropriately when a surprising repository type
380
504
        # is handed to them.
381
505
        dummy_a = DummyRepository()
382
 
        dummy_a._format = RepositoryFormat()
383
 
        dummy_a._format.supports_full_versioned_files = True
384
506
        dummy_b = DummyRepository()
385
 
        dummy_b._format = RepositoryFormat()
386
 
        dummy_b._format.supports_full_versioned_files = True
387
507
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
388
508
 
389
509
    def assertGetsDefaultInterRepository(self, repo_a, repo_b):
390
510
        """Asserts that InterRepository.get(repo_a, repo_b) -> the default.
391
 
 
 
511
        
392
512
        The effective default is now InterSameDataRepository because there is
393
513
        no actual sane default in the presence of incompatible data models.
394
514
        """
395
515
        inter_repo = repository.InterRepository.get(repo_a, repo_b)
396
 
        self.assertEqual(vf_repository.InterSameDataRepository,
 
516
        self.assertEqual(repository.InterSameDataRepository,
397
517
                         inter_repo.__class__)
398
518
        self.assertEqual(repo_a, inter_repo.source)
399
519
        self.assertEqual(repo_b, inter_repo.target)
405
525
        # pair that it returns true on for the is_compatible static method
406
526
        # check
407
527
        dummy_a = DummyRepository()
408
 
        dummy_a._format = RepositoryFormat()
409
528
        dummy_b = DummyRepository()
410
 
        dummy_b._format = RepositoryFormat()
411
529
        repo = self.make_repository('.')
412
530
        # hack dummies to look like repo somewhat.
413
531
        dummy_a._serializer = repo._serializer
414
 
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
415
 
        dummy_a._format.rich_root_data = repo._format.rich_root_data
416
 
        dummy_a._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
417
532
        dummy_b._serializer = repo._serializer
418
 
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
419
 
        dummy_b._format.rich_root_data = repo._format.rich_root_data
420
 
        dummy_b._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
421
533
        repository.InterRepository.register_optimiser(InterDummy)
422
534
        try:
423
535
            # we should get the default for something InterDummy returns False
436
548
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
437
549
 
438
550
 
439
 
class TestRepositoryFormat1(knitrepo.RepositoryFormatKnit1):
440
 
 
441
 
    def get_format_string(self):
442
 
        return "Test Format 1"
443
 
 
444
 
 
445
 
class TestRepositoryFormat2(knitrepo.RepositoryFormatKnit1):
446
 
 
447
 
    def get_format_string(self):
448
 
        return "Test Format 2"
 
551
class TestInterWeaveRepo(TestCaseWithTransport):
 
552
 
 
553
    def test_is_compatible_and_registered(self):
 
554
        # InterWeaveRepo is compatible when either side
 
555
        # is a format 5/6/7 branch
 
556
        from bzrlib.repofmt import knitrepo, weaverepo
 
557
        formats = [weaverepo.RepositoryFormat5(),
 
558
                   weaverepo.RepositoryFormat6(),
 
559
                   weaverepo.RepositoryFormat7()]
 
560
        incompatible_formats = [weaverepo.RepositoryFormat4(),
 
561
                                knitrepo.RepositoryFormatKnit1(),
 
562
                                ]
 
563
        repo_a = self.make_repository('a')
 
564
        repo_b = self.make_repository('b')
 
565
        is_compatible = repository.InterWeaveRepo.is_compatible
 
566
        for source in incompatible_formats:
 
567
            # force incompatible left then right
 
568
            repo_a._format = source
 
569
            repo_b._format = formats[0]
 
570
            self.assertFalse(is_compatible(repo_a, repo_b))
 
571
            self.assertFalse(is_compatible(repo_b, repo_a))
 
572
        for source in formats:
 
573
            repo_a._format = source
 
574
            for target in formats:
 
575
                repo_b._format = target
 
576
                self.assertTrue(is_compatible(repo_a, repo_b))
 
577
        self.assertEqual(repository.InterWeaveRepo,
 
578
                         repository.InterRepository.get(repo_a,
 
579
                                                        repo_b).__class__)
449
580
 
450
581
 
451
582
class TestRepositoryConverter(TestCaseWithTransport):
452
583
 
453
584
    def test_convert_empty(self):
454
 
        source_format = TestRepositoryFormat1()
455
 
        target_format = TestRepositoryFormat2()
456
 
        repository.format_registry.register(source_format)
457
 
        self.addCleanup(repository.format_registry.remove,
458
 
            source_format)
459
 
        repository.format_registry.register(target_format)
460
 
        self.addCleanup(repository.format_registry.remove,
461
 
            target_format)
462
 
        t = self.get_transport()
 
585
        t = get_transport(self.get_url('.'))
463
586
        t.mkdir('repository')
464
587
        repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
465
 
        repo = TestRepositoryFormat1().initialize(repo_dir)
 
588
        repo = weaverepo.RepositoryFormat7().initialize(repo_dir)
 
589
        target_format = knitrepo.RepositoryFormatKnit1()
466
590
        converter = repository.CopyConverter(target_format)
467
591
        pb = bzrlib.ui.ui_factory.nested_progress_bar()
468
592
        try:
473
597
        self.assertTrue(isinstance(target_format, repo._format.__class__))
474
598
 
475
599
 
 
600
class TestMisc(TestCase):
 
601
    
 
602
    def test_unescape_xml(self):
 
603
        """We get some kind of error when malformed entities are passed"""
 
604
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;') 
 
605
 
 
606
 
476
607
class TestRepositoryFormatKnit3(TestCaseWithTransport):
477
608
 
478
609
    def test_attribute__fetch_order(self):
480
611
        format = bzrdir.BzrDirMetaFormat1()
481
612
        format.repository_format = knitrepo.RepositoryFormatKnit3()
482
613
        repo = self.make_repository('.', format=format)
483
 
        self.assertEqual('topological', repo._format._fetch_order)
 
614
        self.assertEqual('topological', repo._fetch_order)
484
615
 
485
616
    def test_attribute__fetch_uses_deltas(self):
486
617
        """Knits reuse deltas."""
487
618
        format = bzrdir.BzrDirMetaFormat1()
488
619
        format.repository_format = knitrepo.RepositoryFormatKnit3()
489
620
        repo = self.make_repository('.', format=format)
490
 
        self.assertEqual(True, repo._format._fetch_uses_deltas)
 
621
        self.assertEqual(True, repo._fetch_uses_deltas)
491
622
 
492
623
    def test_convert(self):
493
624
        """Ensure the upgrade adds weaves for roots"""
525
656
        self.assertFalse(repo._format.supports_external_lookups)
526
657
 
527
658
 
528
 
class Test2a(tests.TestCaseWithMemoryTransport):
529
 
 
530
 
    def test_chk_bytes_uses_custom_btree_parser(self):
531
 
        mt = self.make_branch_and_memory_tree('test', format='2a')
532
 
        mt.lock_write()
533
 
        self.addCleanup(mt.unlock)
534
 
        mt.add([''], ['root-id'])
535
 
        mt.commit('first')
536
 
        index = mt.branch.repository.chk_bytes._index._graph_index._indices[0]
537
 
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
538
 
        # It should also work if we re-open the repo
539
 
        repo = mt.branch.repository.bzrdir.open_repository()
540
 
        repo.lock_read()
541
 
        self.addCleanup(repo.unlock)
542
 
        index = repo.chk_bytes._index._graph_index._indices[0]
543
 
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
544
 
 
545
 
    def test_fetch_combines_groups(self):
546
 
        builder = self.make_branch_builder('source', format='2a')
547
 
        builder.start_series()
548
 
        builder.build_snapshot('1', None, [
549
 
            ('add', ('', 'root-id', 'directory', '')),
550
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
551
 
        builder.build_snapshot('2', ['1'], [
552
 
            ('modify', ('file-id', 'content-2\n'))])
553
 
        builder.finish_series()
554
 
        source = builder.get_branch()
555
 
        target = self.make_repository('target', format='2a')
556
 
        target.fetch(source.repository)
557
 
        target.lock_read()
558
 
        self.addCleanup(target.unlock)
559
 
        details = target.texts._index.get_build_details(
560
 
            [('file-id', '1',), ('file-id', '2',)])
561
 
        file_1_details = details[('file-id', '1')]
562
 
        file_2_details = details[('file-id', '2')]
563
 
        # The index, and what to read off disk, should be the same for both
564
 
        # versions of the file.
565
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
566
 
 
567
 
    def test_fetch_combines_groups(self):
568
 
        builder = self.make_branch_builder('source', format='2a')
569
 
        builder.start_series()
570
 
        builder.build_snapshot('1', None, [
571
 
            ('add', ('', 'root-id', 'directory', '')),
572
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
573
 
        builder.build_snapshot('2', ['1'], [
574
 
            ('modify', ('file-id', 'content-2\n'))])
575
 
        builder.finish_series()
576
 
        source = builder.get_branch()
577
 
        target = self.make_repository('target', format='2a')
578
 
        target.fetch(source.repository)
579
 
        target.lock_read()
580
 
        self.addCleanup(target.unlock)
581
 
        details = target.texts._index.get_build_details(
582
 
            [('file-id', '1',), ('file-id', '2',)])
583
 
        file_1_details = details[('file-id', '1')]
584
 
        file_2_details = details[('file-id', '2')]
585
 
        # The index, and what to read off disk, should be the same for both
586
 
        # versions of the file.
587
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
588
 
 
589
 
    def test_fetch_combines_groups(self):
590
 
        builder = self.make_branch_builder('source', format='2a')
591
 
        builder.start_series()
592
 
        builder.build_snapshot('1', None, [
593
 
            ('add', ('', 'root-id', 'directory', '')),
594
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
595
 
        builder.build_snapshot('2', ['1'], [
596
 
            ('modify', ('file-id', 'content-2\n'))])
597
 
        builder.finish_series()
598
 
        source = builder.get_branch()
599
 
        target = self.make_repository('target', format='2a')
600
 
        target.fetch(source.repository)
601
 
        target.lock_read()
602
 
        self.addCleanup(target.unlock)
603
 
        details = target.texts._index.get_build_details(
604
 
            [('file-id', '1',), ('file-id', '2',)])
605
 
        file_1_details = details[('file-id', '1')]
606
 
        file_2_details = details[('file-id', '2')]
607
 
        # The index, and what to read off disk, should be the same for both
608
 
        # versions of the file.
609
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
610
 
 
611
 
    def test_format_pack_compresses_True(self):
612
 
        repo = self.make_repository('repo', format='2a')
613
 
        self.assertTrue(repo._format.pack_compresses)
614
 
 
615
 
    def test_inventories_use_chk_map_with_parent_base_dict(self):
616
 
        tree = self.make_branch_and_memory_tree('repo', format="2a")
617
 
        tree.lock_write()
618
 
        tree.add([''], ['TREE_ROOT'])
619
 
        revid = tree.commit("foo")
620
 
        tree.unlock()
621
 
        tree.lock_read()
622
 
        self.addCleanup(tree.unlock)
623
 
        inv = tree.branch.repository.get_inventory(revid)
624
 
        self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
625
 
        inv.parent_id_basename_to_file_id._ensure_root()
626
 
        inv.id_to_entry._ensure_root()
627
 
        self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
628
 
        self.assertEqual(65536,
629
 
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
630
 
 
631
 
    def test_autopack_unchanged_chk_nodes(self):
632
 
        # at 20 unchanged commits, chk pages are packed that are split into
633
 
        # two groups such that the new pack being made doesn't have all its
634
 
        # pages in the source packs (though they are in the repository).
635
 
        # Use a memory backed repository, we don't need to hit disk for this
636
 
        tree = self.make_branch_and_memory_tree('tree', format='2a')
637
 
        tree.lock_write()
638
 
        self.addCleanup(tree.unlock)
639
 
        tree.add([''], ['TREE_ROOT'])
640
 
        for pos in range(20):
641
 
            tree.commit(str(pos))
642
 
 
643
 
    def test_pack_with_hint(self):
644
 
        tree = self.make_branch_and_memory_tree('tree', format='2a')
645
 
        tree.lock_write()
646
 
        self.addCleanup(tree.unlock)
647
 
        tree.add([''], ['TREE_ROOT'])
648
 
        # 1 commit to leave untouched
649
 
        tree.commit('1')
650
 
        to_keep = tree.branch.repository._pack_collection.names()
651
 
        # 2 to combine
652
 
        tree.commit('2')
653
 
        tree.commit('3')
654
 
        all = tree.branch.repository._pack_collection.names()
655
 
        combine = list(set(all) - set(to_keep))
656
 
        self.assertLength(3, all)
657
 
        self.assertLength(2, combine)
658
 
        tree.branch.repository.pack(hint=combine)
659
 
        final = tree.branch.repository._pack_collection.names()
660
 
        self.assertLength(2, final)
661
 
        self.assertFalse(combine[0] in final)
662
 
        self.assertFalse(combine[1] in final)
663
 
        self.assertSubset(to_keep, final)
664
 
 
665
 
    def test_stream_source_to_gc(self):
666
 
        source = self.make_repository('source', format='2a')
667
 
        target = self.make_repository('target', format='2a')
668
 
        stream = source._get_source(target._format)
669
 
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
670
 
 
671
 
    def test_stream_source_to_non_gc(self):
672
 
        source = self.make_repository('source', format='2a')
673
 
        target = self.make_repository('target', format='rich-root-pack')
674
 
        stream = source._get_source(target._format)
675
 
        # We don't want the child GroupCHKStreamSource
676
 
        self.assertIs(type(stream), vf_repository.StreamSource)
677
 
 
678
 
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
679
 
        source_builder = self.make_branch_builder('source',
680
 
                            format='2a')
681
 
        # We have to build a fairly large tree, so that we are sure the chk
682
 
        # pages will have split into multiple pages.
683
 
        entries = [('add', ('', 'a-root-id', 'directory', None))]
684
 
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
685
 
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
686
 
                fname = i + j
687
 
                fid = fname + '-id'
688
 
                content = 'content for %s\n' % (fname,)
689
 
                entries.append(('add', (fname, fid, 'file', content)))
690
 
        source_builder.start_series()
691
 
        source_builder.build_snapshot('rev-1', None, entries)
692
 
        # Now change a few of them, so we get a few new pages for the second
693
 
        # revision
694
 
        source_builder.build_snapshot('rev-2', ['rev-1'], [
695
 
            ('modify', ('aa-id', 'new content for aa-id\n')),
696
 
            ('modify', ('cc-id', 'new content for cc-id\n')),
697
 
            ('modify', ('zz-id', 'new content for zz-id\n')),
698
 
            ])
699
 
        source_builder.finish_series()
700
 
        source_branch = source_builder.get_branch()
701
 
        source_branch.lock_read()
702
 
        self.addCleanup(source_branch.unlock)
703
 
        target = self.make_repository('target', format='2a')
704
 
        source = source_branch.repository._get_source(target._format)
705
 
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
706
 
 
707
 
        # On a regular pass, getting the inventories and chk pages for rev-2
708
 
        # would only get the newly created chk pages
709
 
        search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
710
 
                                    set(['rev-2']))
711
 
        simple_chk_records = []
712
 
        for vf_name, substream in source.get_stream(search):
713
 
            if vf_name == 'chk_bytes':
714
 
                for record in substream:
715
 
                    simple_chk_records.append(record.key)
716
 
            else:
717
 
                for _ in substream:
718
 
                    continue
719
 
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
720
 
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
721
 
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
722
 
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
723
 
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
724
 
                         simple_chk_records)
725
 
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
726
 
        # we should get a much larger set of pages.
727
 
        missing = [('inventories', 'rev-2')]
728
 
        full_chk_records = []
729
 
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
730
 
            if vf_name == 'inventories':
731
 
                for record in substream:
732
 
                    self.assertEqual(('rev-2',), record.key)
733
 
            elif vf_name == 'chk_bytes':
734
 
                for record in substream:
735
 
                    full_chk_records.append(record.key)
736
 
            else:
737
 
                self.fail('Should not be getting a stream of %s' % (vf_name,))
738
 
        # We have 257 records now. This is because we have 1 root page, and 256
739
 
        # leaf pages in a complete listing.
740
 
        self.assertEqual(257, len(full_chk_records))
741
 
        self.assertSubset(simple_chk_records, full_chk_records)
742
 
 
743
 
    def test_inconsistency_fatal(self):
744
 
        repo = self.make_repository('repo', format='2a')
745
 
        self.assertTrue(repo.revisions._index._inconsistency_fatal)
746
 
        self.assertFalse(repo.texts._index._inconsistency_fatal)
747
 
        self.assertFalse(repo.inventories._index._inconsistency_fatal)
748
 
        self.assertFalse(repo.signatures._index._inconsistency_fatal)
749
 
        self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
750
 
 
751
 
 
752
 
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
753
 
 
754
 
    def test_source_to_exact_pack_092(self):
755
 
        source = self.make_repository('source', format='pack-0.92')
756
 
        target = self.make_repository('target', format='pack-0.92')
757
 
        stream_source = source._get_source(target._format)
758
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
759
 
 
760
 
    def test_source_to_exact_pack_rich_root_pack(self):
761
 
        source = self.make_repository('source', format='rich-root-pack')
762
 
        target = self.make_repository('target', format='rich-root-pack')
763
 
        stream_source = source._get_source(target._format)
764
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
765
 
 
766
 
    def test_source_to_exact_pack_19(self):
767
 
        source = self.make_repository('source', format='1.9')
768
 
        target = self.make_repository('target', format='1.9')
769
 
        stream_source = source._get_source(target._format)
770
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
771
 
 
772
 
    def test_source_to_exact_pack_19_rich_root(self):
773
 
        source = self.make_repository('source', format='1.9-rich-root')
774
 
        target = self.make_repository('target', format='1.9-rich-root')
775
 
        stream_source = source._get_source(target._format)
776
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
777
 
 
778
 
    def test_source_to_remote_exact_pack_19(self):
779
 
        trans = self.make_smart_server('target')
780
 
        trans.ensure_base()
781
 
        source = self.make_repository('source', format='1.9')
782
 
        target = self.make_repository('target', format='1.9')
783
 
        target = repository.Repository.open(trans.base)
784
 
        stream_source = source._get_source(target._format)
785
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
786
 
 
787
 
    def test_stream_source_to_non_exact(self):
788
 
        source = self.make_repository('source', format='pack-0.92')
789
 
        target = self.make_repository('target', format='1.9')
790
 
        stream = source._get_source(target._format)
791
 
        self.assertIs(type(stream), vf_repository.StreamSource)
792
 
 
793
 
    def test_stream_source_to_non_exact_rich_root(self):
794
 
        source = self.make_repository('source', format='1.9')
795
 
        target = self.make_repository('target', format='1.9-rich-root')
796
 
        stream = source._get_source(target._format)
797
 
        self.assertIs(type(stream), vf_repository.StreamSource)
798
 
 
799
 
    def test_source_to_remote_non_exact_pack_19(self):
800
 
        trans = self.make_smart_server('target')
801
 
        trans.ensure_base()
802
 
        source = self.make_repository('source', format='1.9')
803
 
        target = self.make_repository('target', format='1.6')
804
 
        target = repository.Repository.open(trans.base)
805
 
        stream_source = source._get_source(target._format)
806
 
        self.assertIs(type(stream_source), vf_repository.StreamSource)
807
 
 
808
 
    def test_stream_source_to_knit(self):
809
 
        source = self.make_repository('source', format='pack-0.92')
810
 
        target = self.make_repository('target', format='dirstate')
811
 
        stream = source._get_source(target._format)
812
 
        self.assertIs(type(stream), vf_repository.StreamSource)
813
 
 
814
 
 
815
 
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
816
 
    """Tests for _find_parent_ids_of_revisions."""
817
 
 
818
 
    def setUp(self):
819
 
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
820
 
        self.builder = self.make_branch_builder('source')
821
 
        self.builder.start_series()
822
 
        self.builder.build_snapshot('initial', None,
823
 
            [('add', ('', 'tree-root', 'directory', None))])
824
 
        self.repo = self.builder.get_branch().repository
825
 
        self.addCleanup(self.builder.finish_series)
826
 
 
827
 
    def assertParentIds(self, expected_result, rev_set):
828
 
        self.assertEqual(sorted(expected_result),
829
 
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
830
 
 
831
 
    def test_simple(self):
832
 
        self.builder.build_snapshot('revid1', None, [])
833
 
        self.builder.build_snapshot('revid2', ['revid1'], [])
834
 
        rev_set = ['revid2']
835
 
        self.assertParentIds(['revid1'], rev_set)
836
 
 
837
 
    def test_not_first_parent(self):
838
 
        self.builder.build_snapshot('revid1', None, [])
839
 
        self.builder.build_snapshot('revid2', ['revid1'], [])
840
 
        self.builder.build_snapshot('revid3', ['revid2'], [])
841
 
        rev_set = ['revid3', 'revid2']
842
 
        self.assertParentIds(['revid1'], rev_set)
843
 
 
844
 
    def test_not_null(self):
845
 
        rev_set = ['initial']
846
 
        self.assertParentIds([], rev_set)
847
 
 
848
 
    def test_not_null_set(self):
849
 
        self.builder.build_snapshot('revid1', None, [])
850
 
        rev_set = [_mod_revision.NULL_REVISION]
851
 
        self.assertParentIds([], rev_set)
852
 
 
853
 
    def test_ghost(self):
854
 
        self.builder.build_snapshot('revid1', None, [])
855
 
        rev_set = ['ghost', 'revid1']
856
 
        self.assertParentIds(['initial'], rev_set)
857
 
 
858
 
    def test_ghost_parent(self):
859
 
        self.builder.build_snapshot('revid1', None, [])
860
 
        self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
861
 
        rev_set = ['revid2', 'revid1']
862
 
        self.assertParentIds(['ghost', 'initial'], rev_set)
863
 
 
864
 
    def test_righthand_parent(self):
865
 
        self.builder.build_snapshot('revid1', None, [])
866
 
        self.builder.build_snapshot('revid2a', ['revid1'], [])
867
 
        self.builder.build_snapshot('revid2b', ['revid1'], [])
868
 
        self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
869
 
        rev_set = ['revid3', 'revid2a']
870
 
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
871
 
 
872
 
 
873
659
class TestWithBrokenRepo(TestCaseWithTransport):
874
660
    """These tests seem to be more appropriate as interface tests?"""
875
661
 
888
674
            inv = inventory.Inventory(revision_id='rev1a')
889
675
            inv.root.revision = 'rev1a'
890
676
            self.add_file(repo, inv, 'file1', 'rev1a', [])
891
 
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
892
677
            repo.add_inventory('rev1a', inv, [])
893
678
            revision = _mod_revision.Revision('rev1a',
894
679
                committer='jrandom@example.com', timestamp=0,
929
714
    def add_revision(self, repo, revision_id, inv, parent_ids):
930
715
        inv.revision_id = revision_id
931
716
        inv.root.revision = revision_id
932
 
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
933
717
        repo.add_inventory(revision_id, inv, parent_ids)
934
718
        revision = _mod_revision.Revision(revision_id,
935
719
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
952
736
        """
953
737
        broken_repo = self.make_broken_repository()
954
738
        empty_repo = self.make_repository('empty-repo')
955
 
        try:
956
 
            empty_repo.fetch(broken_repo)
957
 
        except (errors.RevisionNotPresent, errors.BzrCheckError):
958
 
            # Test successful: compression parent not being copied leads to
959
 
            # error.
960
 
            return
961
 
        empty_repo.lock_read()
962
 
        self.addCleanup(empty_repo.unlock)
963
 
        text = empty_repo.texts.get_record_stream(
964
 
            [('file2-id', 'rev3')], 'topological', True).next()
965
 
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
 
739
        self.assertRaises(errors.RevisionNotPresent, empty_repo.fetch, broken_repo)
966
740
 
967
741
 
968
742
class TestRepositoryPackCollection(TestCaseWithTransport):
970
744
    def get_format(self):
971
745
        return bzrdir.format_registry.make_bzrdir('pack-0.92')
972
746
 
973
 
    def get_packs(self):
974
 
        format = self.get_format()
975
 
        repo = self.make_repository('.', format=format)
976
 
        return repo._pack_collection
977
 
 
978
 
    def make_packs_and_alt_repo(self, write_lock=False):
979
 
        """Create a pack repo with 3 packs, and access it via a second repo."""
980
 
        tree = self.make_branch_and_tree('.', format=self.get_format())
981
 
        tree.lock_write()
982
 
        self.addCleanup(tree.unlock)
983
 
        rev1 = tree.commit('one')
984
 
        rev2 = tree.commit('two')
985
 
        rev3 = tree.commit('three')
986
 
        r = repository.Repository.open('.')
987
 
        if write_lock:
988
 
            r.lock_write()
989
 
        else:
990
 
            r.lock_read()
991
 
        self.addCleanup(r.unlock)
992
 
        packs = r._pack_collection
993
 
        packs.ensure_loaded()
994
 
        return tree, r, packs, [rev1, rev2, rev3]
995
 
 
996
 
    def test__clear_obsolete_packs(self):
997
 
        packs = self.get_packs()
998
 
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
999
 
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1000
 
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1001
 
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1002
 
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1003
 
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1004
 
        res = packs._clear_obsolete_packs()
1005
 
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1006
 
        self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1007
 
 
1008
 
    def test__clear_obsolete_packs_preserve(self):
1009
 
        packs = self.get_packs()
1010
 
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1011
 
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1012
 
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1013
 
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1014
 
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1015
 
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1016
 
        res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
1017
 
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1018
 
        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1019
 
                         sorted(obsolete_pack_trans.list_dir('.')))
1020
 
 
1021
747
    def test__max_pack_count(self):
1022
748
        """The maximum pack count is a function of the number of revisions."""
 
749
        format = self.get_format()
 
750
        repo = self.make_repository('.', format=format)
 
751
        packs = repo._pack_collection
1023
752
        # no revisions - one pack, so that we can have a revision free repo
1024
753
        # without it blowing up
1025
 
        packs = self.get_packs()
1026
754
        self.assertEqual(1, packs._max_pack_count(0))
1027
755
        # after that the sum of the digits, - check the first 1-9
1028
756
        self.assertEqual(1, packs._max_pack_count(1))
1043
771
        # check some arbitrary big numbers
1044
772
        self.assertEqual(25, packs._max_pack_count(112894))
1045
773
 
1046
 
    def test_repr(self):
1047
 
        packs = self.get_packs()
1048
 
        self.assertContainsRe(repr(packs),
1049
 
            'RepositoryPackCollection(.*Repository(.*))')
1050
 
 
1051
 
    def test__obsolete_packs(self):
1052
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1053
 
        names = packs.names()
1054
 
        pack = packs.get_pack_by_name(names[0])
1055
 
        # Schedule this one for removal
1056
 
        packs._remove_pack_from_memory(pack)
1057
 
        # Simulate a concurrent update by renaming the .pack file and one of
1058
 
        # the indices
1059
 
        packs.transport.rename('packs/%s.pack' % (names[0],),
1060
 
                               'obsolete_packs/%s.pack' % (names[0],))
1061
 
        packs.transport.rename('indices/%s.iix' % (names[0],),
1062
 
                               'obsolete_packs/%s.iix' % (names[0],))
1063
 
        # Now trigger the obsoletion, and ensure that all the remaining files
1064
 
        # are still renamed
1065
 
        packs._obsolete_packs([pack])
1066
 
        self.assertEqual([n + '.pack' for n in names[1:]],
1067
 
                         sorted(packs._pack_transport.list_dir('.')))
1068
 
        # names[0] should not be present in the index anymore
1069
 
        self.assertEqual(names[1:],
1070
 
            sorted(set([osutils.splitext(n)[0] for n in
1071
 
                        packs._index_transport.list_dir('.')])))
1072
 
 
1073
774
    def test_pack_distribution_zero(self):
1074
 
        packs = self.get_packs()
 
775
        format = self.get_format()
 
776
        repo = self.make_repository('.', format=format)
 
777
        packs = repo._pack_collection
1075
778
        self.assertEqual([0], packs.pack_distribution(0))
1076
779
 
1077
780
    def test_ensure_loaded_unlocked(self):
1078
 
        packs = self.get_packs()
 
781
        format = self.get_format()
 
782
        repo = self.make_repository('.', format=format)
1079
783
        self.assertRaises(errors.ObjectNotLocked,
1080
 
                          packs.ensure_loaded)
 
784
                          repo._pack_collection.ensure_loaded)
1081
785
 
1082
786
    def test_pack_distribution_one_to_nine(self):
1083
 
        packs = self.get_packs()
 
787
        format = self.get_format()
 
788
        repo = self.make_repository('.', format=format)
 
789
        packs = repo._pack_collection
1084
790
        self.assertEqual([1],
1085
791
            packs.pack_distribution(1))
1086
792
        self.assertEqual([1, 1],
1102
808
 
1103
809
    def test_pack_distribution_stable_at_boundaries(self):
1104
810
        """When there are multi-rev packs the counts are stable."""
1105
 
        packs = self.get_packs()
 
811
        format = self.get_format()
 
812
        repo = self.make_repository('.', format=format)
 
813
        packs = repo._pack_collection
1106
814
        # in 10s:
1107
815
        self.assertEqual([10], packs.pack_distribution(10))
1108
816
        self.assertEqual([10, 1], packs.pack_distribution(11))
1117
825
        self.assertEqual([100, 100, 10, 1], packs.pack_distribution(211))
1118
826
 
1119
827
    def test_plan_pack_operations_2009_revisions_skip_all_packs(self):
1120
 
        packs = self.get_packs()
 
828
        format = self.get_format()
 
829
        repo = self.make_repository('.', format=format)
 
830
        packs = repo._pack_collection
1121
831
        existing_packs = [(2000, "big"), (9, "medium")]
1122
832
        # rev count - 2009 -> 2x1000 + 9x1
1123
833
        pack_operations = packs.plan_autopack_combinations(
1125
835
        self.assertEqual([], pack_operations)
1126
836
 
1127
837
    def test_plan_pack_operations_2010_revisions_skip_all_packs(self):
1128
 
        packs = self.get_packs()
 
838
        format = self.get_format()
 
839
        repo = self.make_repository('.', format=format)
 
840
        packs = repo._pack_collection
1129
841
        existing_packs = [(2000, "big"), (9, "medium"), (1, "single")]
1130
842
        # rev count - 2010 -> 2x1000 + 1x10
1131
843
        pack_operations = packs.plan_autopack_combinations(
1133
845
        self.assertEqual([], pack_operations)
1134
846
 
1135
847
    def test_plan_pack_operations_2010_combines_smallest_two(self):
1136
 
        packs = self.get_packs()
 
848
        format = self.get_format()
 
849
        repo = self.make_repository('.', format=format)
 
850
        packs = repo._pack_collection
1137
851
        existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"),
1138
852
            (1, "single1")]
1139
853
        # rev count - 2010 -> 2x1000 + 1x10 (3)
1140
854
        pack_operations = packs.plan_autopack_combinations(
1141
855
            existing_packs, [1000, 1000, 10])
1142
 
        self.assertEqual([[2, ["single2", "single1"]]], pack_operations)
1143
 
 
1144
 
    def test_plan_pack_operations_creates_a_single_op(self):
1145
 
        packs = self.get_packs()
1146
 
        existing_packs = [(50, 'a'), (40, 'b'), (30, 'c'), (10, 'd'),
1147
 
                          (10, 'e'), (6, 'f'), (4, 'g')]
1148
 
        # rev count 150 -> 1x100 and 5x10
1149
 
        # The two size 10 packs do not need to be touched. The 50, 40, 30 would
1150
 
        # be combined into a single 120 size pack, and the 6 & 4 would
1151
 
        # becombined into a size 10 pack. However, if we have to rewrite them,
1152
 
        # we save a pack file with no increased I/O by putting them into the
1153
 
        # same file.
1154
 
        distribution = packs.pack_distribution(150)
1155
 
        pack_operations = packs.plan_autopack_combinations(existing_packs,
1156
 
                                                           distribution)
1157
 
        self.assertEqual([[130, ['a', 'b', 'c', 'f', 'g']]], pack_operations)
 
856
        self.assertEqual([[2, ["single2", "single1"]], [0, []]], pack_operations)
1158
857
 
1159
858
    def test_all_packs_none(self):
1160
859
        format = self.get_format()
1198
897
        tree.lock_read()
1199
898
        self.addCleanup(tree.unlock)
1200
899
        packs = tree.branch.repository._pack_collection
1201
 
        packs.reset()
1202
900
        packs.ensure_loaded()
1203
901
        name = packs.names()[0]
1204
902
        pack_1 = packs.get_pack_by_name(name)
1213
911
        # and the same instance should be returned on successive calls.
1214
912
        self.assertTrue(pack_1 is packs.get_pack_by_name(name))
1215
913
 
1216
 
    def test_reload_pack_names_new_entry(self):
1217
 
        tree, r, packs, revs = self.make_packs_and_alt_repo()
1218
 
        names = packs.names()
1219
 
        # Add a new pack file into the repository
1220
 
        rev4 = tree.commit('four')
1221
 
        new_names = tree.branch.repository._pack_collection.names()
1222
 
        new_name = set(new_names).difference(names)
1223
 
        self.assertEqual(1, len(new_name))
1224
 
        new_name = new_name.pop()
1225
 
        # The old collection hasn't noticed yet
1226
 
        self.assertEqual(names, packs.names())
1227
 
        self.assertTrue(packs.reload_pack_names())
1228
 
        self.assertEqual(new_names, packs.names())
1229
 
        # And the repository can access the new revision
1230
 
        self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
1231
 
        self.assertFalse(packs.reload_pack_names())
1232
 
 
1233
 
    def test_reload_pack_names_added_and_removed(self):
1234
 
        tree, r, packs, revs = self.make_packs_and_alt_repo()
1235
 
        names = packs.names()
1236
 
        # Now repack the whole thing
1237
 
        tree.branch.repository.pack()
1238
 
        new_names = tree.branch.repository._pack_collection.names()
1239
 
        # The other collection hasn't noticed yet
1240
 
        self.assertEqual(names, packs.names())
1241
 
        self.assertTrue(packs.reload_pack_names())
1242
 
        self.assertEqual(new_names, packs.names())
1243
 
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1244
 
        self.assertFalse(packs.reload_pack_names())
1245
 
 
1246
 
    def test_reload_pack_names_preserves_pending(self):
1247
 
        # TODO: Update this to also test for pending-deleted names
1248
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1249
 
        # We will add one pack (via start_write_group + insert_record_stream),
1250
 
        # and remove another pack (via _remove_pack_from_memory)
1251
 
        orig_names = packs.names()
1252
 
        orig_at_load = packs._packs_at_load
1253
 
        to_remove_name = iter(orig_names).next()
1254
 
        r.start_write_group()
1255
 
        self.addCleanup(r.abort_write_group)
1256
 
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1257
 
            ('text', 'rev'), (), None, 'content\n')])
1258
 
        new_pack = packs._new_pack
1259
 
        self.assertTrue(new_pack.data_inserted())
1260
 
        new_pack.finish()
1261
 
        packs.allocate(new_pack)
1262
 
        packs._new_pack = None
1263
 
        removed_pack = packs.get_pack_by_name(to_remove_name)
1264
 
        packs._remove_pack_from_memory(removed_pack)
1265
 
        names = packs.names()
1266
 
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1267
 
        new_names = set([x[0][0] for x in new_nodes])
1268
 
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1269
 
        self.assertEqual(set(names) - set(orig_names), new_names)
1270
 
        self.assertEqual(set([new_pack.name]), new_names)
1271
 
        self.assertEqual([to_remove_name],
1272
 
                         sorted([x[0][0] for x in deleted_nodes]))
1273
 
        packs.reload_pack_names()
1274
 
        reloaded_names = packs.names()
1275
 
        self.assertEqual(orig_at_load, packs._packs_at_load)
1276
 
        self.assertEqual(names, reloaded_names)
1277
 
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1278
 
        new_names = set([x[0][0] for x in new_nodes])
1279
 
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1280
 
        self.assertEqual(set(names) - set(orig_names), new_names)
1281
 
        self.assertEqual(set([new_pack.name]), new_names)
1282
 
        self.assertEqual([to_remove_name],
1283
 
                         sorted([x[0][0] for x in deleted_nodes]))
1284
 
 
1285
 
    def test_autopack_obsoletes_new_pack(self):
1286
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1287
 
        packs._max_pack_count = lambda x: 1
1288
 
        packs.pack_distribution = lambda x: [10]
1289
 
        r.start_write_group()
1290
 
        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1291
 
            ('bogus-rev',), (), None, 'bogus-content\n')])
1292
 
        # This should trigger an autopack, which will combine everything into a
1293
 
        # single pack file.
1294
 
        new_names = r.commit_write_group()
1295
 
        names = packs.names()
1296
 
        self.assertEqual(1, len(names))
1297
 
        self.assertEqual([names[0] + '.pack'],
1298
 
                         packs._pack_transport.list_dir('.'))
1299
 
 
1300
 
    def test_autopack_reloads_and_stops(self):
1301
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1302
 
        # After we have determined what needs to be autopacked, trigger a
1303
 
        # full-pack via the other repo which will cause us to re-evaluate and
1304
 
        # decide we don't need to do anything
1305
 
        orig_execute = packs._execute_pack_operations
1306
 
        def _munged_execute_pack_ops(*args, **kwargs):
1307
 
            tree.branch.repository.pack()
1308
 
            return orig_execute(*args, **kwargs)
1309
 
        packs._execute_pack_operations = _munged_execute_pack_ops
1310
 
        packs._max_pack_count = lambda x: 1
1311
 
        packs.pack_distribution = lambda x: [10]
1312
 
        self.assertFalse(packs.autopack())
1313
 
        self.assertEqual(1, len(packs.names()))
1314
 
        self.assertEqual(tree.branch.repository._pack_collection.names(),
1315
 
                         packs.names())
1316
 
 
1317
 
    def test__save_pack_names(self):
1318
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1319
 
        names = packs.names()
1320
 
        pack = packs.get_pack_by_name(names[0])
1321
 
        packs._remove_pack_from_memory(pack)
1322
 
        packs._save_pack_names(obsolete_packs=[pack])
1323
 
        cur_packs = packs._pack_transport.list_dir('.')
1324
 
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1325
 
        # obsolete_packs will also have stuff like .rix and .iix present.
1326
 
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
1327
 
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1328
 
        self.assertEqual([pack.name], sorted(obsolete_names))
1329
 
 
1330
 
    def test__save_pack_names_already_obsoleted(self):
1331
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1332
 
        names = packs.names()
1333
 
        pack = packs.get_pack_by_name(names[0])
1334
 
        packs._remove_pack_from_memory(pack)
1335
 
        # We are going to simulate a concurrent autopack by manually obsoleting
1336
 
        # the pack directly.
1337
 
        packs._obsolete_packs([pack])
1338
 
        packs._save_pack_names(clear_obsolete_packs=True,
1339
 
                               obsolete_packs=[pack])
1340
 
        cur_packs = packs._pack_transport.list_dir('.')
1341
 
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1342
 
        # Note that while we set clear_obsolete_packs=True, it should not
1343
 
        # delete a pack file that we have also scheduled for obsoletion.
1344
 
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
1345
 
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1346
 
        self.assertEqual([pack.name], sorted(obsolete_names))
1347
 
 
1348
 
 
1349
914
 
1350
915
class TestPack(TestCaseWithTransport):
1351
916
    """Tests for the Pack object."""
1405
970
        pack_transport = self.get_transport('pack')
1406
971
        index_transport = self.get_transport('index')
1407
972
        upload_transport.mkdir('.')
1408
 
        collection = pack_repo.RepositoryPackCollection(
1409
 
            repo=None,
1410
 
            transport=self.get_transport('.'),
1411
 
            index_transport=index_transport,
1412
 
            upload_transport=upload_transport,
1413
 
            pack_transport=pack_transport,
1414
 
            index_builder_class=BTreeBuilder,
1415
 
            index_class=BTreeGraphIndex,
1416
 
            use_chk_index=False)
1417
 
        pack = pack_repo.NewPack(collection)
1418
 
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
1419
 
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
1420
 
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1421
 
        self.assertIsInstance(pack._hash, type(osutils.md5()))
 
973
        pack = pack_repo.NewPack(upload_transport, index_transport,
 
974
            pack_transport)
 
975
        self.assertIsInstance(pack.revision_index, InMemoryGraphIndex)
 
976
        self.assertIsInstance(pack.inventory_index, InMemoryGraphIndex)
 
977
        self.assertIsInstance(pack._hash, type(md5.new()))
1422
978
        self.assertTrue(pack.upload_transport is upload_transport)
1423
979
        self.assertTrue(pack.index_transport is index_transport)
1424
980
        self.assertTrue(pack.pack_transport is pack_transport)
1431
987
class TestPacker(TestCaseWithTransport):
1432
988
    """Tests for the packs repository Packer class."""
1433
989
 
1434
 
    def test_pack_optimizes_pack_order(self):
1435
 
        builder = self.make_branch_builder('.', format="1.9")
1436
 
        builder.start_series()
1437
 
        builder.build_snapshot('A', None, [
1438
 
            ('add', ('', 'root-id', 'directory', None)),
1439
 
            ('add', ('f', 'f-id', 'file', 'content\n'))])
1440
 
        builder.build_snapshot('B', ['A'],
1441
 
            [('modify', ('f-id', 'new-content\n'))])
1442
 
        builder.build_snapshot('C', ['B'],
1443
 
            [('modify', ('f-id', 'third-content\n'))])
1444
 
        builder.build_snapshot('D', ['C'],
1445
 
            [('modify', ('f-id', 'fourth-content\n'))])
1446
 
        b = builder.get_branch()
1447
 
        b.lock_read()
1448
 
        builder.finish_series()
1449
 
        self.addCleanup(b.unlock)
1450
 
        # At this point, we should have 4 pack files available
1451
 
        # Because of how they were built, they correspond to
1452
 
        # ['D', 'C', 'B', 'A']
1453
 
        packs = b.repository._pack_collection.packs
1454
 
        packer = knitpack_repo.KnitPacker(b.repository._pack_collection,
1455
 
                                  packs, 'testing',
1456
 
                                  revision_ids=['B', 'C'])
1457
 
        # Now, when we are copying the B & C revisions, their pack files should
1458
 
        # be moved to the front of the stack
1459
 
        # The new ordering moves B & C to the front of the .packs attribute,
1460
 
        # and leaves the others in the original order.
1461
 
        new_packs = [packs[1], packs[2], packs[0], packs[3]]
1462
 
        new_pack = packer.pack()
1463
 
        self.assertEqual(new_packs, packer.packs)
1464
 
 
1465
 
 
1466
 
class TestOptimisingPacker(TestCaseWithTransport):
1467
 
    """Tests for the OptimisingPacker class."""
1468
 
 
1469
 
    def get_pack_collection(self):
1470
 
        repo = self.make_repository('.')
1471
 
        return repo._pack_collection
1472
 
 
1473
 
    def test_open_pack_will_optimise(self):
1474
 
        packer = knitpack_repo.OptimisingKnitPacker(self.get_pack_collection(),
1475
 
                                            [], '.test')
1476
 
        new_pack = packer.open_pack()
1477
 
        self.addCleanup(new_pack.abort) # ensure cleanup
1478
 
        self.assertIsInstance(new_pack, pack_repo.NewPack)
1479
 
        self.assertTrue(new_pack.revision_index._optimize_for_size)
1480
 
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
1481
 
        self.assertTrue(new_pack.text_index._optimize_for_size)
1482
 
        self.assertTrue(new_pack.signature_index._optimize_for_size)
1483
 
 
1484
 
 
1485
 
class TestGCCHKPacker(TestCaseWithTransport):
1486
 
 
1487
 
    def make_abc_branch(self):
1488
 
        builder = self.make_branch_builder('source')
1489
 
        builder.start_series()
1490
 
        builder.build_snapshot('A', None, [
1491
 
            ('add', ('', 'root-id', 'directory', None)),
1492
 
            ('add', ('file', 'file-id', 'file', 'content\n')),
1493
 
            ])
1494
 
        builder.build_snapshot('B', ['A'], [
1495
 
            ('add', ('dir', 'dir-id', 'directory', None))])
1496
 
        builder.build_snapshot('C', ['B'], [
1497
 
            ('modify', ('file-id', 'new content\n'))])
1498
 
        builder.finish_series()
1499
 
        return builder.get_branch()
1500
 
 
1501
 
    def make_branch_with_disjoint_inventory_and_revision(self):
1502
 
        """a repo with separate packs for a revisions Revision and Inventory.
1503
 
 
1504
 
        There will be one pack file that holds the Revision content, and one
1505
 
        for the Inventory content.
1506
 
 
1507
 
        :return: (repository,
1508
 
                  pack_name_with_rev_A_Revision,
1509
 
                  pack_name_with_rev_A_Inventory,
1510
 
                  pack_name_with_rev_C_content)
1511
 
        """
1512
 
        b_source = self.make_abc_branch()
1513
 
        b_base = b_source.bzrdir.sprout('base', revision_id='A').open_branch()
1514
 
        b_stacked = b_base.bzrdir.sprout('stacked', stacked=True).open_branch()
1515
 
        b_stacked.lock_write()
1516
 
        self.addCleanup(b_stacked.unlock)
1517
 
        b_stacked.fetch(b_source, 'B')
1518
 
        # Now re-open the stacked repo directly (no fallbacks) so that we can
1519
 
        # fill in the A rev.
1520
 
        repo_not_stacked = b_stacked.bzrdir.open_repository()
1521
 
        repo_not_stacked.lock_write()
1522
 
        self.addCleanup(repo_not_stacked.unlock)
1523
 
        # Now we should have a pack file with A's inventory, but not its
1524
 
        # Revision
1525
 
        self.assertEqual([('A',), ('B',)],
1526
 
                         sorted(repo_not_stacked.inventories.keys()))
1527
 
        self.assertEqual([('B',)],
1528
 
                         sorted(repo_not_stacked.revisions.keys()))
1529
 
        stacked_pack_names = repo_not_stacked._pack_collection.names()
1530
 
        # We have a couple names here, figure out which has A's inventory
1531
 
        for name in stacked_pack_names:
1532
 
            pack = repo_not_stacked._pack_collection.get_pack_by_name(name)
1533
 
            keys = [n[1] for n in pack.inventory_index.iter_all_entries()]
1534
 
            if ('A',) in keys:
1535
 
                inv_a_pack_name = name
1536
 
                break
1537
 
        else:
1538
 
            self.fail('Could not find pack containing A\'s inventory')
1539
 
        repo_not_stacked.fetch(b_source.repository, 'A')
1540
 
        self.assertEqual([('A',), ('B',)],
1541
 
                         sorted(repo_not_stacked.revisions.keys()))
1542
 
        new_pack_names = set(repo_not_stacked._pack_collection.names())
1543
 
        rev_a_pack_names = new_pack_names.difference(stacked_pack_names)
1544
 
        self.assertEqual(1, len(rev_a_pack_names))
1545
 
        rev_a_pack_name = list(rev_a_pack_names)[0]
1546
 
        # Now fetch 'C', so we have a couple pack files to join
1547
 
        repo_not_stacked.fetch(b_source.repository, 'C')
1548
 
        rev_c_pack_names = set(repo_not_stacked._pack_collection.names())
1549
 
        rev_c_pack_names = rev_c_pack_names.difference(new_pack_names)
1550
 
        self.assertEqual(1, len(rev_c_pack_names))
1551
 
        rev_c_pack_name = list(rev_c_pack_names)[0]
1552
 
        return (repo_not_stacked, rev_a_pack_name, inv_a_pack_name,
1553
 
                rev_c_pack_name)
1554
 
 
1555
 
    def test_pack_with_distant_inventories(self):
1556
 
        # See https://bugs.launchpad.net/bzr/+bug/437003
1557
 
        # When repacking, it is possible to have an inventory in a different
1558
 
        # pack file than the associated revision. An autopack can then come
1559
 
        # along, and miss that inventory, and complain.
1560
 
        (repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
1561
 
         ) = self.make_branch_with_disjoint_inventory_and_revision()
1562
 
        a_pack = repo._pack_collection.get_pack_by_name(rev_a_pack_name)
1563
 
        c_pack = repo._pack_collection.get_pack_by_name(rev_c_pack_name)
1564
 
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1565
 
                    [a_pack, c_pack], '.test-pack')
1566
 
        # This would raise ValueError in bug #437003, but should not raise an
1567
 
        # error once fixed.
1568
 
        packer.pack()
1569
 
 
1570
 
    def test_pack_with_missing_inventory(self):
1571
 
        # Similar to test_pack_with_missing_inventory, but this time, we force
1572
 
        # the A inventory to actually be gone from the repository.
1573
 
        (repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
1574
 
         ) = self.make_branch_with_disjoint_inventory_and_revision()
1575
 
        inv_a_pack = repo._pack_collection.get_pack_by_name(inv_a_pack_name)
1576
 
        repo._pack_collection._remove_pack_from_memory(inv_a_pack)
1577
 
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1578
 
            repo._pack_collection.all_packs(), '.test-pack')
1579
 
        e = self.assertRaises(ValueError, packer.pack)
1580
 
        packer.new_pack.abort()
1581
 
        self.assertContainsRe(str(e),
1582
 
            r"We are missing inventories for revisions: .*'A'")
1583
 
 
1584
 
 
1585
 
class TestCrossFormatPacks(TestCaseWithTransport):
1586
 
 
1587
 
    def log_pack(self, hint=None):
1588
 
        self.calls.append(('pack', hint))
1589
 
        self.orig_pack(hint=hint)
1590
 
        if self.expect_hint:
1591
 
            self.assertTrue(hint)
1592
 
 
1593
 
    def run_stream(self, src_fmt, target_fmt, expect_pack_called):
1594
 
        self.expect_hint = expect_pack_called
1595
 
        self.calls = []
1596
 
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
1597
 
        source_tree.lock_write()
1598
 
        self.addCleanup(source_tree.unlock)
1599
 
        tip = source_tree.commit('foo')
1600
 
        target = self.make_repository('target', format=target_fmt)
1601
 
        target.lock_write()
1602
 
        self.addCleanup(target.unlock)
1603
 
        source = source_tree.branch.repository._get_source(target._format)
1604
 
        self.orig_pack = target.pack
1605
 
        self.overrideAttr(target, "pack", self.log_pack)
1606
 
        search = target.search_missing_revision_ids(
1607
 
            source_tree.branch.repository, revision_ids=[tip])
1608
 
        stream = source.get_stream(search)
1609
 
        from_format = source_tree.branch.repository._format
1610
 
        sink = target._get_sink()
1611
 
        sink.insert_stream(stream, from_format, [])
1612
 
        if expect_pack_called:
1613
 
            self.assertLength(1, self.calls)
1614
 
        else:
1615
 
            self.assertLength(0, self.calls)
1616
 
 
1617
 
    def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
1618
 
        self.expect_hint = expect_pack_called
1619
 
        self.calls = []
1620
 
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
1621
 
        source_tree.lock_write()
1622
 
        self.addCleanup(source_tree.unlock)
1623
 
        tip = source_tree.commit('foo')
1624
 
        target = self.make_repository('target', format=target_fmt)
1625
 
        target.lock_write()
1626
 
        self.addCleanup(target.unlock)
1627
 
        source = source_tree.branch.repository
1628
 
        self.orig_pack = target.pack
1629
 
        self.overrideAttr(target, "pack", self.log_pack)
1630
 
        target.fetch(source)
1631
 
        if expect_pack_called:
1632
 
            self.assertLength(1, self.calls)
1633
 
        else:
1634
 
            self.assertLength(0, self.calls)
1635
 
 
1636
 
    def test_sink_format_hint_no(self):
1637
 
        # When the target format says packing makes no difference, pack is not
1638
 
        # called.
1639
 
        self.run_stream('1.9', 'rich-root-pack', False)
1640
 
 
1641
 
    def test_sink_format_hint_yes(self):
1642
 
        # When the target format says packing makes a difference, pack is
1643
 
        # called.
1644
 
        self.run_stream('1.9', '2a', True)
1645
 
 
1646
 
    def test_sink_format_same_no(self):
1647
 
        # When the formats are the same, pack is not called.
1648
 
        self.run_stream('2a', '2a', False)
1649
 
 
1650
 
    def test_IDS_format_hint_no(self):
1651
 
        # When the target format says packing makes no difference, pack is not
1652
 
        # called.
1653
 
        self.run_fetch('1.9', 'rich-root-pack', False)
1654
 
 
1655
 
    def test_IDS_format_hint_yes(self):
1656
 
        # When the target format says packing makes a difference, pack is
1657
 
        # called.
1658
 
        self.run_fetch('1.9', '2a', True)
1659
 
 
1660
 
    def test_IDS_format_same_no(self):
1661
 
        # When the formats are the same, pack is not called.
1662
 
        self.run_fetch('2a', '2a', False)
 
990
    # To date, this class has been factored out and nothing new added to it;
 
991
    # thus there are not yet any tests.
 
992
 
 
993
 
 
994
class TestInterDifferingSerializer(TestCaseWithTransport):
 
995
 
 
996
    def test_progress_bar(self):
 
997
        tree = self.make_branch_and_tree('tree')
 
998
        tree.commit('rev1', rev_id='rev-1')
 
999
        tree.commit('rev2', rev_id='rev-2')
 
1000
        tree.commit('rev3', rev_id='rev-3')
 
1001
        repo = self.make_repository('repo')
 
1002
        inter_repo = repository.InterDifferingSerializer(
 
1003
            tree.branch.repository, repo)
 
1004
        pb = progress.InstrumentedProgress(to_file=StringIO())
 
1005
        pb.never_throttle = True
 
1006
        inter_repo.fetch('rev-1', pb)
 
1007
        self.assertEqual('Transferring revisions', pb.last_msg)
 
1008
        self.assertEqual(1, pb.last_cnt)
 
1009
        self.assertEqual(1, pb.last_total)
 
1010
        inter_repo.fetch('rev-3', pb)
 
1011
        self.assertEqual(2, pb.last_cnt)
 
1012
        self.assertEqual(2, pb.last_total)