~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: Aaron Bentley
  • Date: 2009-06-19 21:16:31 UTC
  • mto: This revision was merged to the branch mainline in revision 4481.
  • Revision ID: aaron@aaronbentley.com-20090619211631-4fnkv2uui98xj7ux
Provide control over switch and shelver messaging.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2011 Canonical Ltd
 
1
# Copyright (C) 2006, 2007, 2008, 2009 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
23
23
"""
24
24
 
25
25
from stat import S_ISDIR
 
26
from StringIO import StringIO
26
27
 
27
28
import bzrlib
28
 
from bzrlib.errors import (
29
 
    UnknownFormatError,
30
 
    UnsupportedFormatError,
31
 
    )
32
 
from bzrlib import (
33
 
    btree_index,
34
 
    graph,
35
 
    symbol_versioning,
36
 
    tests,
37
 
    transport,
38
 
    )
 
29
from bzrlib.errors import (NotBranchError,
 
30
                           NoSuchFile,
 
31
                           UnknownFormatError,
 
32
                           UnsupportedFormatError,
 
33
                           )
 
34
from bzrlib import graph
 
35
from bzrlib.branchbuilder import BranchBuilder
39
36
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
40
 
from bzrlib.index import GraphIndex
 
37
from bzrlib.index import GraphIndex, InMemoryGraphIndex
41
38
from bzrlib.repository import RepositoryFormat
 
39
from bzrlib.smart import server
42
40
from bzrlib.tests import (
43
41
    TestCase,
44
42
    TestCaseWithTransport,
45
 
    )
 
43
    TestSkipped,
 
44
    test_knit,
 
45
    )
 
46
from bzrlib.transport import (
 
47
    fakenfs,
 
48
    get_transport,
 
49
    )
 
50
from bzrlib.transport.memory import MemoryServer
46
51
from bzrlib import (
 
52
    bencode,
47
53
    bzrdir,
48
54
    errors,
49
55
    inventory,
50
56
    osutils,
 
57
    progress,
51
58
    repository,
52
59
    revision as _mod_revision,
 
60
    symbol_versioning,
53
61
    upgrade,
54
 
    versionedfile,
55
 
    vf_repository,
56
62
    workingtree,
57
63
    )
58
64
from bzrlib.repofmt import (
59
65
    groupcompress_repo,
60
66
    knitrepo,
61
 
    knitpack_repo,
62
67
    pack_repo,
 
68
    weaverepo,
63
69
    )
64
70
 
65
71
 
68
74
    def test_get_set_default_format(self):
69
75
        old_default = bzrdir.format_registry.get('default')
70
76
        private_default = old_default().repository_format.__class__
71
 
        old_format = repository.format_registry.get_default()
 
77
        old_format = repository.RepositoryFormat.get_default_format()
72
78
        self.assertTrue(isinstance(old_format, private_default))
73
79
        def make_sample_bzrdir():
74
80
            my_bzrdir = bzrdir.BzrDirMetaFormat1()
88
94
            bzrdir.format_registry.remove('default')
89
95
            bzrdir.format_registry.remove('sample')
90
96
            bzrdir.format_registry.register('default', old_default, '')
91
 
        self.assertIsInstance(repository.format_registry.get_default(),
 
97
        self.assertIsInstance(repository.RepositoryFormat.get_default_format(),
92
98
                              old_format.__class__)
93
99
 
94
100
 
116
122
        return "opened repository."
117
123
 
118
124
 
119
 
class SampleExtraRepositoryFormat(repository.RepositoryFormat):
120
 
    """A sample format that can not be used in a metadir
121
 
 
122
 
    """
123
 
 
124
 
    def get_format_string(self):
125
 
        raise NotImplementedError
126
 
 
127
 
 
128
125
class TestRepositoryFormat(TestCaseWithTransport):
129
126
    """Tests for the Repository format detection used by the bzr meta dir facility.BzrBranchFormat facility."""
130
127
 
136
133
        def check_format(format, url):
137
134
            dir = format._matchingbzrdir.initialize(url)
138
135
            format.initialize(dir)
139
 
            t = transport.get_transport(url)
 
136
            t = get_transport(url)
140
137
            found_format = repository.RepositoryFormat.find_format(dir)
141
 
            self.assertIsInstance(found_format, format.__class__)
142
 
        check_format(repository.format_registry.get_default(), "bar")
 
138
            self.failUnless(isinstance(found_format, format.__class__))
 
139
        check_format(weaverepo.RepositoryFormat7(), "bar")
143
140
 
144
141
    def test_find_format_no_repository(self):
145
142
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
155
152
                          dir)
156
153
 
157
154
    def test_register_unregister_format(self):
158
 
        # Test deprecated format registration functions
159
155
        format = SampleRepositoryFormat()
160
156
        # make a control dir
161
157
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
162
158
        # make a repo
163
159
        format.initialize(dir)
164
160
        # register a format for it.
165
 
        self.applyDeprecated(symbol_versioning.deprecated_in((2, 4, 0)),
166
 
            repository.RepositoryFormat.register_format, format)
 
161
        repository.RepositoryFormat.register_format(format)
167
162
        # which repository.Open will refuse (not supported)
168
 
        self.assertRaises(UnsupportedFormatError, repository.Repository.open,
169
 
            self.get_url())
 
163
        self.assertRaises(UnsupportedFormatError, repository.Repository.open, self.get_url())
170
164
        # but open(unsupported) will work
171
165
        self.assertEqual(format.open(dir), "opened repository.")
172
166
        # unregister the format
173
 
        self.applyDeprecated(symbol_versioning.deprecated_in((2, 4, 0)),
174
 
            repository.RepositoryFormat.unregister_format, format)
175
 
 
176
 
 
177
 
class TestRepositoryFormatRegistry(TestCase):
178
 
 
179
 
    def setUp(self):
180
 
        super(TestRepositoryFormatRegistry, self).setUp()
181
 
        self.registry = repository.RepositoryFormatRegistry()
182
 
 
183
 
    def test_register_unregister_format(self):
184
 
        format = SampleRepositoryFormat()
185
 
        self.registry.register(format)
186
 
        self.assertEquals(format, self.registry.get("Sample .bzr repository format."))
187
 
        self.registry.remove(format)
188
 
        self.assertRaises(KeyError, self.registry.get, "Sample .bzr repository format.")
189
 
 
190
 
    def test_get_all(self):
191
 
        format = SampleRepositoryFormat()
192
 
        self.assertEquals([], self.registry._get_all())
193
 
        self.registry.register(format)
194
 
        self.assertEquals([format], self.registry._get_all())
195
 
 
196
 
    def test_register_extra(self):
197
 
        format = SampleExtraRepositoryFormat()
198
 
        self.assertEquals([], self.registry._get_all())
199
 
        self.registry.register_extra(format)
200
 
        self.assertEquals([format], self.registry._get_all())
201
 
 
202
 
    def test_register_extra_lazy(self):
203
 
        self.assertEquals([], self.registry._get_all())
204
 
        self.registry.register_extra_lazy("bzrlib.tests.test_repository",
205
 
            "SampleExtraRepositoryFormat")
206
 
        formats = self.registry._get_all()
207
 
        self.assertEquals(1, len(formats))
208
 
        self.assertIsInstance(formats[0], SampleExtraRepositoryFormat)
 
167
        repository.RepositoryFormat.unregister_format(format)
 
168
 
 
169
 
 
170
class TestFormat6(TestCaseWithTransport):
 
171
 
 
172
    def test_attribute__fetch_order(self):
 
173
        """Weaves need topological data insertion."""
 
174
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
175
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
176
        self.assertEqual('topological', repo._format._fetch_order)
 
177
 
 
178
    def test_attribute__fetch_uses_deltas(self):
 
179
        """Weaves do not reuse deltas."""
 
180
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
181
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
182
        self.assertEqual(False, repo._format._fetch_uses_deltas)
 
183
 
 
184
    def test_attribute__fetch_reconcile(self):
 
185
        """Weave repositories need a reconcile after fetch."""
 
186
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
187
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
188
        self.assertEqual(True, repo._format._fetch_reconcile)
 
189
 
 
190
    def test_no_ancestry_weave(self):
 
191
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
192
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
193
        # We no longer need to create the ancestry.weave file
 
194
        # since it is *never* used.
 
195
        self.assertRaises(NoSuchFile,
 
196
                          control.transport.get,
 
197
                          'ancestry.weave')
 
198
 
 
199
    def test_supports_external_lookups(self):
 
200
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
201
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
202
        self.assertFalse(repo._format.supports_external_lookups)
 
203
 
 
204
 
 
205
class TestFormat7(TestCaseWithTransport):
 
206
 
 
207
    def test_attribute__fetch_order(self):
 
208
        """Weaves need topological data insertion."""
 
209
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
210
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
211
        self.assertEqual('topological', repo._format._fetch_order)
 
212
 
 
213
    def test_attribute__fetch_uses_deltas(self):
 
214
        """Weaves do not reuse deltas."""
 
215
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
216
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
217
        self.assertEqual(False, repo._format._fetch_uses_deltas)
 
218
 
 
219
    def test_attribute__fetch_reconcile(self):
 
220
        """Weave repositories need a reconcile after fetch."""
 
221
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
222
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
223
        self.assertEqual(True, repo._format._fetch_reconcile)
 
224
 
 
225
    def test_disk_layout(self):
 
226
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
227
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
228
        # in case of side effects of locking.
 
229
        repo.lock_write()
 
230
        repo.unlock()
 
231
        # we want:
 
232
        # format 'Bazaar-NG Repository format 7'
 
233
        # lock ''
 
234
        # inventory.weave == empty_weave
 
235
        # empty revision-store directory
 
236
        # empty weaves directory
 
237
        t = control.get_repository_transport(None)
 
238
        self.assertEqualDiff('Bazaar-NG Repository format 7',
 
239
                             t.get('format').read())
 
240
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
 
241
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
 
242
        self.assertEqualDiff('# bzr weave file v5\n'
 
243
                             'w\n'
 
244
                             'W\n',
 
245
                             t.get('inventory.weave').read())
 
246
        # Creating a file with id Foo:Bar results in a non-escaped file name on
 
247
        # disk.
 
248
        control.create_branch()
 
249
        tree = control.create_workingtree()
 
250
        tree.add(['foo'], ['Foo:Bar'], ['file'])
 
251
        tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
 
252
        tree.commit('first post', rev_id='first')
 
253
        self.assertEqualDiff(
 
254
            '# bzr weave file v5\n'
 
255
            'i\n'
 
256
            '1 7fe70820e08a1aac0ef224d9c66ab66831cc4ab1\n'
 
257
            'n first\n'
 
258
            '\n'
 
259
            'w\n'
 
260
            '{ 0\n'
 
261
            '. content\n'
 
262
            '}\n'
 
263
            'W\n',
 
264
            t.get('weaves/74/Foo%3ABar.weave').read())
 
265
 
 
266
    def test_shared_disk_layout(self):
 
267
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
268
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
269
        # we want:
 
270
        # format 'Bazaar-NG Repository format 7'
 
271
        # inventory.weave == empty_weave
 
272
        # empty revision-store directory
 
273
        # empty weaves directory
 
274
        # a 'shared-storage' marker file.
 
275
        # lock is not present when unlocked
 
276
        t = control.get_repository_transport(None)
 
277
        self.assertEqualDiff('Bazaar-NG Repository format 7',
 
278
                             t.get('format').read())
 
279
        self.assertEqualDiff('', t.get('shared-storage').read())
 
280
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
 
281
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
 
282
        self.assertEqualDiff('# bzr weave file v5\n'
 
283
                             'w\n'
 
284
                             'W\n',
 
285
                             t.get('inventory.weave').read())
 
286
        self.assertFalse(t.has('branch-lock'))
 
287
 
 
288
    def test_creates_lockdir(self):
 
289
        """Make sure it appears to be controlled by a LockDir existence"""
 
290
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
291
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
292
        t = control.get_repository_transport(None)
 
293
        # TODO: Should check there is a 'lock' toplevel directory,
 
294
        # regardless of contents
 
295
        self.assertFalse(t.has('lock/held/info'))
 
296
        repo.lock_write()
 
297
        try:
 
298
            self.assertTrue(t.has('lock/held/info'))
 
299
        finally:
 
300
            # unlock so we don't get a warning about failing to do so
 
301
            repo.unlock()
 
302
 
 
303
    def test_uses_lockdir(self):
 
304
        """repo format 7 actually locks on lockdir"""
 
305
        base_url = self.get_url()
 
306
        control = bzrdir.BzrDirMetaFormat1().initialize(base_url)
 
307
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
308
        t = control.get_repository_transport(None)
 
309
        repo.lock_write()
 
310
        repo.unlock()
 
311
        del repo
 
312
        # make sure the same lock is created by opening it
 
313
        repo = repository.Repository.open(base_url)
 
314
        repo.lock_write()
 
315
        self.assertTrue(t.has('lock/held/info'))
 
316
        repo.unlock()
 
317
        self.assertFalse(t.has('lock/held/info'))
 
318
 
 
319
    def test_shared_no_tree_disk_layout(self):
 
320
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
321
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
322
        repo.set_make_working_trees(False)
 
323
        # we want:
 
324
        # format 'Bazaar-NG Repository format 7'
 
325
        # lock ''
 
326
        # inventory.weave == empty_weave
 
327
        # empty revision-store directory
 
328
        # empty weaves directory
 
329
        # a 'shared-storage' marker file.
 
330
        t = control.get_repository_transport(None)
 
331
        self.assertEqualDiff('Bazaar-NG Repository format 7',
 
332
                             t.get('format').read())
 
333
        ## self.assertEqualDiff('', t.get('lock').read())
 
334
        self.assertEqualDiff('', t.get('shared-storage').read())
 
335
        self.assertEqualDiff('', t.get('no-working-trees').read())
 
336
        repo.set_make_working_trees(True)
 
337
        self.assertFalse(t.has('no-working-trees'))
 
338
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
 
339
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
 
340
        self.assertEqualDiff('# bzr weave file v5\n'
 
341
                             'w\n'
 
342
                             'W\n',
 
343
                             t.get('inventory.weave').read())
 
344
 
 
345
    def test_supports_external_lookups(self):
 
346
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
347
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
348
        self.assertFalse(repo._format.supports_external_lookups)
209
349
 
210
350
 
211
351
class TestFormatKnit1(TestCaseWithTransport):
313
453
        repo = self.make_repository('.',
314
454
                format=bzrdir.format_registry.get('knit')())
315
455
        inv_xml = '<inventory format="5">\n</inventory>\n'
316
 
        inv = repo._deserialise_inventory('test-rev-id', inv_xml)
 
456
        inv = repo.deserialise_inventory('test-rev-id', inv_xml)
317
457
        self.assertEqual('test-rev-id', inv.root.revision)
318
458
 
319
459
    def test_deserialise_uses_global_revision_id(self):
325
465
        # Arguably, the deserialise_inventory should detect a mismatch, and
326
466
        # raise an error, rather than silently using one revision_id over the
327
467
        # other.
328
 
        self.assertRaises(AssertionError, repo._deserialise_inventory,
 
468
        self.assertRaises(AssertionError, repo.deserialise_inventory,
329
469
            'test-rev-id', inv_xml)
330
 
        inv = repo._deserialise_inventory('other-rev-id', inv_xml)
 
470
        inv = repo.deserialise_inventory('other-rev-id', inv_xml)
331
471
        self.assertEqual('other-rev-id', inv.root.revision)
332
472
 
333
473
    def test_supports_external_lookups(self):
343
483
    _serializer = None
344
484
 
345
485
    def supports_rich_root(self):
346
 
        if self._format is not None:
347
 
            return self._format.rich_root_data
348
486
        return False
349
487
 
350
488
    def get_graph(self):
379
517
        # classes do not barf inappropriately when a surprising repository type
380
518
        # is handed to them.
381
519
        dummy_a = DummyRepository()
382
 
        dummy_a._format = RepositoryFormat()
383
 
        dummy_a._format.supports_full_versioned_files = True
384
520
        dummy_b = DummyRepository()
385
 
        dummy_b._format = RepositoryFormat()
386
 
        dummy_b._format.supports_full_versioned_files = True
387
521
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
388
522
 
389
523
    def assertGetsDefaultInterRepository(self, repo_a, repo_b):
393
527
        no actual sane default in the presence of incompatible data models.
394
528
        """
395
529
        inter_repo = repository.InterRepository.get(repo_a, repo_b)
396
 
        self.assertEqual(vf_repository.InterSameDataRepository,
 
530
        self.assertEqual(repository.InterSameDataRepository,
397
531
                         inter_repo.__class__)
398
532
        self.assertEqual(repo_a, inter_repo.source)
399
533
        self.assertEqual(repo_b, inter_repo.target)
405
539
        # pair that it returns true on for the is_compatible static method
406
540
        # check
407
541
        dummy_a = DummyRepository()
408
 
        dummy_a._format = RepositoryFormat()
409
542
        dummy_b = DummyRepository()
410
 
        dummy_b._format = RepositoryFormat()
411
543
        repo = self.make_repository('.')
412
544
        # hack dummies to look like repo somewhat.
413
545
        dummy_a._serializer = repo._serializer
414
 
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
415
 
        dummy_a._format.rich_root_data = repo._format.rich_root_data
416
 
        dummy_a._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
417
546
        dummy_b._serializer = repo._serializer
418
 
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
419
 
        dummy_b._format.rich_root_data = repo._format.rich_root_data
420
 
        dummy_b._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
421
547
        repository.InterRepository.register_optimiser(InterDummy)
422
548
        try:
423
549
            # we should get the default for something InterDummy returns False
436
562
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
437
563
 
438
564
 
439
 
class TestRepositoryFormat1(knitrepo.RepositoryFormatKnit1):
440
 
 
441
 
    def get_format_string(self):
442
 
        return "Test Format 1"
443
 
 
444
 
 
445
 
class TestRepositoryFormat2(knitrepo.RepositoryFormatKnit1):
446
 
 
447
 
    def get_format_string(self):
448
 
        return "Test Format 2"
 
565
class TestInterWeaveRepo(TestCaseWithTransport):
 
566
 
 
567
    def test_is_compatible_and_registered(self):
 
568
        # InterWeaveRepo is compatible when either side
 
569
        # is a format 5/6/7 branch
 
570
        from bzrlib.repofmt import knitrepo, weaverepo
 
571
        formats = [weaverepo.RepositoryFormat5(),
 
572
                   weaverepo.RepositoryFormat6(),
 
573
                   weaverepo.RepositoryFormat7()]
 
574
        incompatible_formats = [weaverepo.RepositoryFormat4(),
 
575
                                knitrepo.RepositoryFormatKnit1(),
 
576
                                ]
 
577
        repo_a = self.make_repository('a')
 
578
        repo_b = self.make_repository('b')
 
579
        is_compatible = repository.InterWeaveRepo.is_compatible
 
580
        for source in incompatible_formats:
 
581
            # force incompatible left then right
 
582
            repo_a._format = source
 
583
            repo_b._format = formats[0]
 
584
            self.assertFalse(is_compatible(repo_a, repo_b))
 
585
            self.assertFalse(is_compatible(repo_b, repo_a))
 
586
        for source in formats:
 
587
            repo_a._format = source
 
588
            for target in formats:
 
589
                repo_b._format = target
 
590
                self.assertTrue(is_compatible(repo_a, repo_b))
 
591
        self.assertEqual(repository.InterWeaveRepo,
 
592
                         repository.InterRepository.get(repo_a,
 
593
                                                        repo_b).__class__)
449
594
 
450
595
 
451
596
class TestRepositoryConverter(TestCaseWithTransport):
452
597
 
453
598
    def test_convert_empty(self):
454
 
        source_format = TestRepositoryFormat1()
455
 
        target_format = TestRepositoryFormat2()
456
 
        repository.format_registry.register(source_format)
457
 
        self.addCleanup(repository.format_registry.remove,
458
 
            source_format)
459
 
        repository.format_registry.register(target_format)
460
 
        self.addCleanup(repository.format_registry.remove,
461
 
            target_format)
462
 
        t = self.get_transport()
 
599
        t = get_transport(self.get_url('.'))
463
600
        t.mkdir('repository')
464
601
        repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
465
 
        repo = TestRepositoryFormat1().initialize(repo_dir)
 
602
        repo = weaverepo.RepositoryFormat7().initialize(repo_dir)
 
603
        target_format = knitrepo.RepositoryFormatKnit1()
466
604
        converter = repository.CopyConverter(target_format)
467
605
        pb = bzrlib.ui.ui_factory.nested_progress_bar()
468
606
        try:
473
611
        self.assertTrue(isinstance(target_format, repo._format.__class__))
474
612
 
475
613
 
 
614
class TestMisc(TestCase):
 
615
 
 
616
    def test_unescape_xml(self):
 
617
        """We get some kind of error when malformed entities are passed"""
 
618
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;')
 
619
 
 
620
 
476
621
class TestRepositoryFormatKnit3(TestCaseWithTransport):
477
622
 
478
623
    def test_attribute__fetch_order(self):
525
670
        self.assertFalse(repo._format.supports_external_lookups)
526
671
 
527
672
 
528
 
class Test2a(tests.TestCaseWithMemoryTransport):
529
 
 
530
 
    def test_chk_bytes_uses_custom_btree_parser(self):
531
 
        mt = self.make_branch_and_memory_tree('test', format='2a')
532
 
        mt.lock_write()
533
 
        self.addCleanup(mt.unlock)
534
 
        mt.add([''], ['root-id'])
535
 
        mt.commit('first')
536
 
        index = mt.branch.repository.chk_bytes._index._graph_index._indices[0]
537
 
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
538
 
        # It should also work if we re-open the repo
539
 
        repo = mt.branch.repository.bzrdir.open_repository()
540
 
        repo.lock_read()
541
 
        self.addCleanup(repo.unlock)
542
 
        index = repo.chk_bytes._index._graph_index._indices[0]
543
 
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
544
 
 
545
 
    def test_fetch_combines_groups(self):
546
 
        builder = self.make_branch_builder('source', format='2a')
547
 
        builder.start_series()
548
 
        builder.build_snapshot('1', None, [
549
 
            ('add', ('', 'root-id', 'directory', '')),
550
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
551
 
        builder.build_snapshot('2', ['1'], [
552
 
            ('modify', ('file-id', 'content-2\n'))])
553
 
        builder.finish_series()
554
 
        source = builder.get_branch()
555
 
        target = self.make_repository('target', format='2a')
556
 
        target.fetch(source.repository)
557
 
        target.lock_read()
558
 
        self.addCleanup(target.unlock)
559
 
        details = target.texts._index.get_build_details(
560
 
            [('file-id', '1',), ('file-id', '2',)])
561
 
        file_1_details = details[('file-id', '1')]
562
 
        file_2_details = details[('file-id', '2')]
563
 
        # The index, and what to read off disk, should be the same for both
564
 
        # versions of the file.
565
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
566
 
 
567
 
    def test_fetch_combines_groups(self):
568
 
        builder = self.make_branch_builder('source', format='2a')
569
 
        builder.start_series()
570
 
        builder.build_snapshot('1', None, [
571
 
            ('add', ('', 'root-id', 'directory', '')),
572
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
573
 
        builder.build_snapshot('2', ['1'], [
574
 
            ('modify', ('file-id', 'content-2\n'))])
575
 
        builder.finish_series()
576
 
        source = builder.get_branch()
577
 
        target = self.make_repository('target', format='2a')
578
 
        target.fetch(source.repository)
579
 
        target.lock_read()
580
 
        self.addCleanup(target.unlock)
581
 
        details = target.texts._index.get_build_details(
582
 
            [('file-id', '1',), ('file-id', '2',)])
583
 
        file_1_details = details[('file-id', '1')]
584
 
        file_2_details = details[('file-id', '2')]
585
 
        # The index, and what to read off disk, should be the same for both
586
 
        # versions of the file.
587
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
588
 
 
589
 
    def test_fetch_combines_groups(self):
590
 
        builder = self.make_branch_builder('source', format='2a')
591
 
        builder.start_series()
592
 
        builder.build_snapshot('1', None, [
593
 
            ('add', ('', 'root-id', 'directory', '')),
594
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
595
 
        builder.build_snapshot('2', ['1'], [
596
 
            ('modify', ('file-id', 'content-2\n'))])
597
 
        builder.finish_series()
598
 
        source = builder.get_branch()
599
 
        target = self.make_repository('target', format='2a')
600
 
        target.fetch(source.repository)
601
 
        target.lock_read()
602
 
        self.addCleanup(target.unlock)
603
 
        details = target.texts._index.get_build_details(
604
 
            [('file-id', '1',), ('file-id', '2',)])
605
 
        file_1_details = details[('file-id', '1')]
606
 
        file_2_details = details[('file-id', '2')]
607
 
        # The index, and what to read off disk, should be the same for both
608
 
        # versions of the file.
609
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
610
 
 
611
 
    def test_format_pack_compresses_True(self):
612
 
        repo = self.make_repository('repo', format='2a')
613
 
        self.assertTrue(repo._format.pack_compresses)
 
673
class TestDevelopment6(TestCaseWithTransport):
614
674
 
615
675
    def test_inventories_use_chk_map_with_parent_base_dict(self):
616
 
        tree = self.make_branch_and_memory_tree('repo', format="2a")
617
 
        tree.lock_write()
618
 
        tree.add([''], ['TREE_ROOT'])
 
676
        tree = self.make_branch_and_tree('repo', format="development6-rich-root")
619
677
        revid = tree.commit("foo")
620
 
        tree.unlock()
621
678
        tree.lock_read()
622
679
        self.addCleanup(tree.unlock)
623
680
        inv = tree.branch.repository.get_inventory(revid)
628
685
        self.assertEqual(65536,
629
686
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
630
687
 
631
 
    def test_autopack_unchanged_chk_nodes(self):
632
 
        # at 20 unchanged commits, chk pages are packed that are split into
633
 
        # two groups such that the new pack being made doesn't have all its
634
 
        # pages in the source packs (though they are in the repository).
635
 
        # Use a memory backed repository, we don't need to hit disk for this
636
 
        tree = self.make_branch_and_memory_tree('tree', format='2a')
637
 
        tree.lock_write()
638
 
        self.addCleanup(tree.unlock)
639
 
        tree.add([''], ['TREE_ROOT'])
640
 
        for pos in range(20):
641
 
            tree.commit(str(pos))
642
 
 
643
 
    def test_pack_with_hint(self):
644
 
        tree = self.make_branch_and_memory_tree('tree', format='2a')
645
 
        tree.lock_write()
646
 
        self.addCleanup(tree.unlock)
647
 
        tree.add([''], ['TREE_ROOT'])
648
 
        # 1 commit to leave untouched
649
 
        tree.commit('1')
650
 
        to_keep = tree.branch.repository._pack_collection.names()
651
 
        # 2 to combine
652
 
        tree.commit('2')
653
 
        tree.commit('3')
654
 
        all = tree.branch.repository._pack_collection.names()
655
 
        combine = list(set(all) - set(to_keep))
656
 
        self.assertLength(3, all)
657
 
        self.assertLength(2, combine)
658
 
        tree.branch.repository.pack(hint=combine)
659
 
        final = tree.branch.repository._pack_collection.names()
660
 
        self.assertLength(2, final)
661
 
        self.assertFalse(combine[0] in final)
662
 
        self.assertFalse(combine[1] in final)
663
 
        self.assertSubset(to_keep, final)
664
 
 
665
 
    def test_stream_source_to_gc(self):
666
 
        source = self.make_repository('source', format='2a')
667
 
        target = self.make_repository('target', format='2a')
668
 
        stream = source._get_source(target._format)
669
 
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
670
 
 
671
 
    def test_stream_source_to_non_gc(self):
672
 
        source = self.make_repository('source', format='2a')
673
 
        target = self.make_repository('target', format='rich-root-pack')
674
 
        stream = source._get_source(target._format)
675
 
        # We don't want the child GroupCHKStreamSource
676
 
        self.assertIs(type(stream), vf_repository.StreamSource)
677
 
 
678
 
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
679
 
        source_builder = self.make_branch_builder('source',
680
 
                            format='2a')
681
 
        # We have to build a fairly large tree, so that we are sure the chk
682
 
        # pages will have split into multiple pages.
683
 
        entries = [('add', ('', 'a-root-id', 'directory', None))]
684
 
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
685
 
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
686
 
                fname = i + j
687
 
                fid = fname + '-id'
688
 
                content = 'content for %s\n' % (fname,)
689
 
                entries.append(('add', (fname, fid, 'file', content)))
690
 
        source_builder.start_series()
691
 
        source_builder.build_snapshot('rev-1', None, entries)
692
 
        # Now change a few of them, so we get a few new pages for the second
693
 
        # revision
694
 
        source_builder.build_snapshot('rev-2', ['rev-1'], [
695
 
            ('modify', ('aa-id', 'new content for aa-id\n')),
696
 
            ('modify', ('cc-id', 'new content for cc-id\n')),
697
 
            ('modify', ('zz-id', 'new content for zz-id\n')),
698
 
            ])
699
 
        source_builder.finish_series()
700
 
        source_branch = source_builder.get_branch()
701
 
        source_branch.lock_read()
702
 
        self.addCleanup(source_branch.unlock)
703
 
        target = self.make_repository('target', format='2a')
704
 
        source = source_branch.repository._get_source(target._format)
705
 
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
706
 
 
707
 
        # On a regular pass, getting the inventories and chk pages for rev-2
708
 
        # would only get the newly created chk pages
709
 
        search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
710
 
                                    set(['rev-2']))
711
 
        simple_chk_records = []
712
 
        for vf_name, substream in source.get_stream(search):
713
 
            if vf_name == 'chk_bytes':
714
 
                for record in substream:
715
 
                    simple_chk_records.append(record.key)
716
 
            else:
717
 
                for _ in substream:
718
 
                    continue
719
 
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
720
 
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
721
 
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
722
 
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
723
 
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
724
 
                         simple_chk_records)
725
 
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
726
 
        # we should get a much larger set of pages.
727
 
        missing = [('inventories', 'rev-2')]
728
 
        full_chk_records = []
729
 
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
730
 
            if vf_name == 'inventories':
731
 
                for record in substream:
732
 
                    self.assertEqual(('rev-2',), record.key)
733
 
            elif vf_name == 'chk_bytes':
734
 
                for record in substream:
735
 
                    full_chk_records.append(record.key)
736
 
            else:
737
 
                self.fail('Should not be getting a stream of %s' % (vf_name,))
738
 
        # We have 257 records now. This is because we have 1 root page, and 256
739
 
        # leaf pages in a complete listing.
740
 
        self.assertEqual(257, len(full_chk_records))
741
 
        self.assertSubset(simple_chk_records, full_chk_records)
742
 
 
743
 
    def test_inconsistency_fatal(self):
744
 
        repo = self.make_repository('repo', format='2a')
745
 
        self.assertTrue(repo.revisions._index._inconsistency_fatal)
746
 
        self.assertFalse(repo.texts._index._inconsistency_fatal)
747
 
        self.assertFalse(repo.inventories._index._inconsistency_fatal)
748
 
        self.assertFalse(repo.signatures._index._inconsistency_fatal)
749
 
        self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
750
 
 
751
 
 
752
 
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
753
 
 
754
 
    def test_source_to_exact_pack_092(self):
755
 
        source = self.make_repository('source', format='pack-0.92')
756
 
        target = self.make_repository('target', format='pack-0.92')
757
 
        stream_source = source._get_source(target._format)
758
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
759
 
 
760
 
    def test_source_to_exact_pack_rich_root_pack(self):
761
 
        source = self.make_repository('source', format='rich-root-pack')
762
 
        target = self.make_repository('target', format='rich-root-pack')
763
 
        stream_source = source._get_source(target._format)
764
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
765
 
 
766
 
    def test_source_to_exact_pack_19(self):
767
 
        source = self.make_repository('source', format='1.9')
768
 
        target = self.make_repository('target', format='1.9')
769
 
        stream_source = source._get_source(target._format)
770
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
771
 
 
772
 
    def test_source_to_exact_pack_19_rich_root(self):
773
 
        source = self.make_repository('source', format='1.9-rich-root')
774
 
        target = self.make_repository('target', format='1.9-rich-root')
775
 
        stream_source = source._get_source(target._format)
776
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
777
 
 
778
 
    def test_source_to_remote_exact_pack_19(self):
779
 
        trans = self.make_smart_server('target')
780
 
        trans.ensure_base()
781
 
        source = self.make_repository('source', format='1.9')
782
 
        target = self.make_repository('target', format='1.9')
783
 
        target = repository.Repository.open(trans.base)
784
 
        stream_source = source._get_source(target._format)
785
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
786
 
 
787
 
    def test_stream_source_to_non_exact(self):
788
 
        source = self.make_repository('source', format='pack-0.92')
789
 
        target = self.make_repository('target', format='1.9')
790
 
        stream = source._get_source(target._format)
791
 
        self.assertIs(type(stream), vf_repository.StreamSource)
792
 
 
793
 
    def test_stream_source_to_non_exact_rich_root(self):
794
 
        source = self.make_repository('source', format='1.9')
795
 
        target = self.make_repository('target', format='1.9-rich-root')
796
 
        stream = source._get_source(target._format)
797
 
        self.assertIs(type(stream), vf_repository.StreamSource)
798
 
 
799
 
    def test_source_to_remote_non_exact_pack_19(self):
800
 
        trans = self.make_smart_server('target')
801
 
        trans.ensure_base()
802
 
        source = self.make_repository('source', format='1.9')
803
 
        target = self.make_repository('target', format='1.6')
804
 
        target = repository.Repository.open(trans.base)
805
 
        stream_source = source._get_source(target._format)
806
 
        self.assertIs(type(stream_source), vf_repository.StreamSource)
807
 
 
808
 
    def test_stream_source_to_knit(self):
809
 
        source = self.make_repository('source', format='pack-0.92')
810
 
        target = self.make_repository('target', format='dirstate')
811
 
        stream = source._get_source(target._format)
812
 
        self.assertIs(type(stream), vf_repository.StreamSource)
813
 
 
814
688
 
815
689
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
816
690
    """Tests for _find_parent_ids_of_revisions."""
817
691
 
818
692
    def setUp(self):
819
693
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
820
 
        self.builder = self.make_branch_builder('source')
 
694
        self.builder = self.make_branch_builder('source',
 
695
            format='development6-rich-root')
821
696
        self.builder.start_series()
822
697
        self.builder.build_snapshot('initial', None,
823
698
            [('add', ('', 'tree-root', 'directory', None))])
888
763
            inv = inventory.Inventory(revision_id='rev1a')
889
764
            inv.root.revision = 'rev1a'
890
765
            self.add_file(repo, inv, 'file1', 'rev1a', [])
891
 
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
892
766
            repo.add_inventory('rev1a', inv, [])
893
767
            revision = _mod_revision.Revision('rev1a',
894
768
                committer='jrandom@example.com', timestamp=0,
929
803
    def add_revision(self, repo, revision_id, inv, parent_ids):
930
804
        inv.revision_id = revision_id
931
805
        inv.root.revision = revision_id
932
 
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
933
806
        repo.add_inventory(revision_id, inv, parent_ids)
934
807
        revision = _mod_revision.Revision(revision_id,
935
808
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
952
825
        """
953
826
        broken_repo = self.make_broken_repository()
954
827
        empty_repo = self.make_repository('empty-repo')
955
 
        try:
956
 
            empty_repo.fetch(broken_repo)
957
 
        except (errors.RevisionNotPresent, errors.BzrCheckError):
958
 
            # Test successful: compression parent not being copied leads to
959
 
            # error.
960
 
            return
961
 
        empty_repo.lock_read()
962
 
        self.addCleanup(empty_repo.unlock)
963
 
        text = empty_repo.texts.get_record_stream(
964
 
            [('file2-id', 'rev3')], 'topological', True).next()
965
 
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
 
828
        self.assertRaises((errors.RevisionNotPresent, errors.BzrCheckError),
 
829
                          empty_repo.fetch, broken_repo)
966
830
 
967
831
 
968
832
class TestRepositoryPackCollection(TestCaseWithTransport):
977
841
 
978
842
    def make_packs_and_alt_repo(self, write_lock=False):
979
843
        """Create a pack repo with 3 packs, and access it via a second repo."""
980
 
        tree = self.make_branch_and_tree('.', format=self.get_format())
 
844
        tree = self.make_branch_and_tree('.')
981
845
        tree.lock_write()
982
846
        self.addCleanup(tree.unlock)
983
847
        rev1 = tree.commit('one')
993
857
        packs.ensure_loaded()
994
858
        return tree, r, packs, [rev1, rev2, rev3]
995
859
 
996
 
    def test__clear_obsolete_packs(self):
997
 
        packs = self.get_packs()
998
 
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
999
 
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1000
 
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1001
 
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1002
 
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1003
 
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1004
 
        res = packs._clear_obsolete_packs()
1005
 
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1006
 
        self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1007
 
 
1008
 
    def test__clear_obsolete_packs_preserve(self):
1009
 
        packs = self.get_packs()
1010
 
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1011
 
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1012
 
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1013
 
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1014
 
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1015
 
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1016
 
        res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
1017
 
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1018
 
        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1019
 
                         sorted(obsolete_pack_trans.list_dir('.')))
1020
 
 
1021
860
    def test__max_pack_count(self):
1022
861
        """The maximum pack count is a function of the number of revisions."""
1023
862
        # no revisions - one pack, so that we can have a revision free repo
1043
882
        # check some arbitrary big numbers
1044
883
        self.assertEqual(25, packs._max_pack_count(112894))
1045
884
 
1046
 
    def test_repr(self):
1047
 
        packs = self.get_packs()
1048
 
        self.assertContainsRe(repr(packs),
1049
 
            'RepositoryPackCollection(.*Repository(.*))')
1050
 
 
1051
 
    def test__obsolete_packs(self):
1052
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1053
 
        names = packs.names()
1054
 
        pack = packs.get_pack_by_name(names[0])
1055
 
        # Schedule this one for removal
1056
 
        packs._remove_pack_from_memory(pack)
1057
 
        # Simulate a concurrent update by renaming the .pack file and one of
1058
 
        # the indices
1059
 
        packs.transport.rename('packs/%s.pack' % (names[0],),
1060
 
                               'obsolete_packs/%s.pack' % (names[0],))
1061
 
        packs.transport.rename('indices/%s.iix' % (names[0],),
1062
 
                               'obsolete_packs/%s.iix' % (names[0],))
1063
 
        # Now trigger the obsoletion, and ensure that all the remaining files
1064
 
        # are still renamed
1065
 
        packs._obsolete_packs([pack])
1066
 
        self.assertEqual([n + '.pack' for n in names[1:]],
1067
 
                         sorted(packs._pack_transport.list_dir('.')))
1068
 
        # names[0] should not be present in the index anymore
1069
 
        self.assertEqual(names[1:],
1070
 
            sorted(set([osutils.splitext(n)[0] for n in
1071
 
                        packs._index_transport.list_dir('.')])))
1072
 
 
1073
885
    def test_pack_distribution_zero(self):
1074
886
        packs = self.get_packs()
1075
887
        self.assertEqual([0], packs.pack_distribution(0))
1243
1055
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1244
1056
        self.assertFalse(packs.reload_pack_names())
1245
1057
 
1246
 
    def test_reload_pack_names_preserves_pending(self):
1247
 
        # TODO: Update this to also test for pending-deleted names
1248
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1249
 
        # We will add one pack (via start_write_group + insert_record_stream),
1250
 
        # and remove another pack (via _remove_pack_from_memory)
1251
 
        orig_names = packs.names()
1252
 
        orig_at_load = packs._packs_at_load
1253
 
        to_remove_name = iter(orig_names).next()
1254
 
        r.start_write_group()
1255
 
        self.addCleanup(r.abort_write_group)
1256
 
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1257
 
            ('text', 'rev'), (), None, 'content\n')])
1258
 
        new_pack = packs._new_pack
1259
 
        self.assertTrue(new_pack.data_inserted())
1260
 
        new_pack.finish()
1261
 
        packs.allocate(new_pack)
1262
 
        packs._new_pack = None
1263
 
        removed_pack = packs.get_pack_by_name(to_remove_name)
1264
 
        packs._remove_pack_from_memory(removed_pack)
1265
 
        names = packs.names()
1266
 
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1267
 
        new_names = set([x[0][0] for x in new_nodes])
1268
 
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1269
 
        self.assertEqual(set(names) - set(orig_names), new_names)
1270
 
        self.assertEqual(set([new_pack.name]), new_names)
1271
 
        self.assertEqual([to_remove_name],
1272
 
                         sorted([x[0][0] for x in deleted_nodes]))
1273
 
        packs.reload_pack_names()
1274
 
        reloaded_names = packs.names()
1275
 
        self.assertEqual(orig_at_load, packs._packs_at_load)
1276
 
        self.assertEqual(names, reloaded_names)
1277
 
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1278
 
        new_names = set([x[0][0] for x in new_nodes])
1279
 
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1280
 
        self.assertEqual(set(names) - set(orig_names), new_names)
1281
 
        self.assertEqual(set([new_pack.name]), new_names)
1282
 
        self.assertEqual([to_remove_name],
1283
 
                         sorted([x[0][0] for x in deleted_nodes]))
1284
 
 
1285
 
    def test_autopack_obsoletes_new_pack(self):
1286
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1287
 
        packs._max_pack_count = lambda x: 1
1288
 
        packs.pack_distribution = lambda x: [10]
1289
 
        r.start_write_group()
1290
 
        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1291
 
            ('bogus-rev',), (), None, 'bogus-content\n')])
1292
 
        # This should trigger an autopack, which will combine everything into a
1293
 
        # single pack file.
1294
 
        new_names = r.commit_write_group()
1295
 
        names = packs.names()
1296
 
        self.assertEqual(1, len(names))
1297
 
        self.assertEqual([names[0] + '.pack'],
1298
 
                         packs._pack_transport.list_dir('.'))
1299
 
 
1300
1058
    def test_autopack_reloads_and_stops(self):
1301
1059
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1302
1060
        # After we have determined what needs to be autopacked, trigger a
1314
1072
        self.assertEqual(tree.branch.repository._pack_collection.names(),
1315
1073
                         packs.names())
1316
1074
 
1317
 
    def test__save_pack_names(self):
1318
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1319
 
        names = packs.names()
1320
 
        pack = packs.get_pack_by_name(names[0])
1321
 
        packs._remove_pack_from_memory(pack)
1322
 
        packs._save_pack_names(obsolete_packs=[pack])
1323
 
        cur_packs = packs._pack_transport.list_dir('.')
1324
 
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1325
 
        # obsolete_packs will also have stuff like .rix and .iix present.
1326
 
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
1327
 
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1328
 
        self.assertEqual([pack.name], sorted(obsolete_names))
1329
 
 
1330
 
    def test__save_pack_names_already_obsoleted(self):
1331
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1332
 
        names = packs.names()
1333
 
        pack = packs.get_pack_by_name(names[0])
1334
 
        packs._remove_pack_from_memory(pack)
1335
 
        # We are going to simulate a concurrent autopack by manually obsoleting
1336
 
        # the pack directly.
1337
 
        packs._obsolete_packs([pack])
1338
 
        packs._save_pack_names(clear_obsolete_packs=True,
1339
 
                               obsolete_packs=[pack])
1340
 
        cur_packs = packs._pack_transport.list_dir('.')
1341
 
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1342
 
        # Note that while we set clear_obsolete_packs=True, it should not
1343
 
        # delete a pack file that we have also scheduled for obsoletion.
1344
 
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
1345
 
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1346
 
        self.assertEqual([pack.name], sorted(obsolete_names))
1347
 
 
1348
 
 
1349
1075
 
1350
1076
class TestPack(TestCaseWithTransport):
1351
1077
    """Tests for the Pack object."""
1415
1141
            index_class=BTreeGraphIndex,
1416
1142
            use_chk_index=False)
1417
1143
        pack = pack_repo.NewPack(collection)
1418
 
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
1419
1144
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
1420
1145
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1421
1146
        self.assertIsInstance(pack._hash, type(osutils.md5()))
1432
1157
    """Tests for the packs repository Packer class."""
1433
1158
 
1434
1159
    def test_pack_optimizes_pack_order(self):
1435
 
        builder = self.make_branch_builder('.', format="1.9")
 
1160
        builder = self.make_branch_builder('.')
1436
1161
        builder.start_series()
1437
1162
        builder.build_snapshot('A', None, [
1438
1163
            ('add', ('', 'root-id', 'directory', None)),
1451
1176
        # Because of how they were built, they correspond to
1452
1177
        # ['D', 'C', 'B', 'A']
1453
1178
        packs = b.repository._pack_collection.packs
1454
 
        packer = knitpack_repo.KnitPacker(b.repository._pack_collection,
 
1179
        packer = pack_repo.Packer(b.repository._pack_collection,
1455
1180
                                  packs, 'testing',
1456
1181
                                  revision_ids=['B', 'C'])
1457
1182
        # Now, when we are copying the B & C revisions, their pack files should
1471
1196
        return repo._pack_collection
1472
1197
 
1473
1198
    def test_open_pack_will_optimise(self):
1474
 
        packer = knitpack_repo.OptimisingKnitPacker(self.get_pack_collection(),
 
1199
        packer = pack_repo.OptimisingPacker(self.get_pack_collection(),
1475
1200
                                            [], '.test')
1476
1201
        new_pack = packer.open_pack()
1477
 
        self.addCleanup(new_pack.abort) # ensure cleanup
1478
1202
        self.assertIsInstance(new_pack, pack_repo.NewPack)
1479
1203
        self.assertTrue(new_pack.revision_index._optimize_for_size)
1480
1204
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
1482
1206
        self.assertTrue(new_pack.signature_index._optimize_for_size)
1483
1207
 
1484
1208
 
1485
 
class TestGCCHKPacker(TestCaseWithTransport):
1486
 
 
1487
 
    def make_abc_branch(self):
1488
 
        builder = self.make_branch_builder('source')
1489
 
        builder.start_series()
1490
 
        builder.build_snapshot('A', None, [
1491
 
            ('add', ('', 'root-id', 'directory', None)),
1492
 
            ('add', ('file', 'file-id', 'file', 'content\n')),
 
1209
class TestGCCHKPackCollection(TestCaseWithTransport):
 
1210
 
 
1211
    def test_stream_source_to_gc(self):
 
1212
        source = self.make_repository('source', format='development6-rich-root')
 
1213
        target = self.make_repository('target', format='development6-rich-root')
 
1214
        stream = source._get_source(target._format)
 
1215
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
 
1216
 
 
1217
    def test_stream_source_to_non_gc(self):
 
1218
        source = self.make_repository('source', format='development6-rich-root')
 
1219
        target = self.make_repository('target', format='rich-root-pack')
 
1220
        stream = source._get_source(target._format)
 
1221
        # We don't want the child GroupCHKStreamSource
 
1222
        self.assertIs(type(stream), repository.StreamSource)
 
1223
 
 
1224
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
 
1225
        source_builder = self.make_branch_builder('source',
 
1226
                            format='development6-rich-root')
 
1227
        # We have to build a fairly large tree, so that we are sure the chk
 
1228
        # pages will have split into multiple pages.
 
1229
        entries = [('add', ('', 'a-root-id', 'directory', None))]
 
1230
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
 
1231
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
 
1232
                fname = i + j
 
1233
                fid = fname + '-id'
 
1234
                content = 'content for %s\n' % (fname,)
 
1235
                entries.append(('add', (fname, fid, 'file', content)))
 
1236
        source_builder.start_series()
 
1237
        source_builder.build_snapshot('rev-1', None, entries)
 
1238
        # Now change a few of them, so we get a few new pages for the second
 
1239
        # revision
 
1240
        source_builder.build_snapshot('rev-2', ['rev-1'], [
 
1241
            ('modify', ('aa-id', 'new content for aa-id\n')),
 
1242
            ('modify', ('cc-id', 'new content for cc-id\n')),
 
1243
            ('modify', ('zz-id', 'new content for zz-id\n')),
1493
1244
            ])
1494
 
        builder.build_snapshot('B', ['A'], [
1495
 
            ('add', ('dir', 'dir-id', 'directory', None))])
1496
 
        builder.build_snapshot('C', ['B'], [
1497
 
            ('modify', ('file-id', 'new content\n'))])
1498
 
        builder.finish_series()
1499
 
        return builder.get_branch()
1500
 
 
1501
 
    def make_branch_with_disjoint_inventory_and_revision(self):
1502
 
        """a repo with separate packs for a revisions Revision and Inventory.
1503
 
 
1504
 
        There will be one pack file that holds the Revision content, and one
1505
 
        for the Inventory content.
1506
 
 
1507
 
        :return: (repository,
1508
 
                  pack_name_with_rev_A_Revision,
1509
 
                  pack_name_with_rev_A_Inventory,
1510
 
                  pack_name_with_rev_C_content)
1511
 
        """
1512
 
        b_source = self.make_abc_branch()
1513
 
        b_base = b_source.bzrdir.sprout('base', revision_id='A').open_branch()
1514
 
        b_stacked = b_base.bzrdir.sprout('stacked', stacked=True).open_branch()
1515
 
        b_stacked.lock_write()
1516
 
        self.addCleanup(b_stacked.unlock)
1517
 
        b_stacked.fetch(b_source, 'B')
1518
 
        # Now re-open the stacked repo directly (no fallbacks) so that we can
1519
 
        # fill in the A rev.
1520
 
        repo_not_stacked = b_stacked.bzrdir.open_repository()
1521
 
        repo_not_stacked.lock_write()
1522
 
        self.addCleanup(repo_not_stacked.unlock)
1523
 
        # Now we should have a pack file with A's inventory, but not its
1524
 
        # Revision
1525
 
        self.assertEqual([('A',), ('B',)],
1526
 
                         sorted(repo_not_stacked.inventories.keys()))
1527
 
        self.assertEqual([('B',)],
1528
 
                         sorted(repo_not_stacked.revisions.keys()))
1529
 
        stacked_pack_names = repo_not_stacked._pack_collection.names()
1530
 
        # We have a couple names here, figure out which has A's inventory
1531
 
        for name in stacked_pack_names:
1532
 
            pack = repo_not_stacked._pack_collection.get_pack_by_name(name)
1533
 
            keys = [n[1] for n in pack.inventory_index.iter_all_entries()]
1534
 
            if ('A',) in keys:
1535
 
                inv_a_pack_name = name
1536
 
                break
1537
 
        else:
1538
 
            self.fail('Could not find pack containing A\'s inventory')
1539
 
        repo_not_stacked.fetch(b_source.repository, 'A')
1540
 
        self.assertEqual([('A',), ('B',)],
1541
 
                         sorted(repo_not_stacked.revisions.keys()))
1542
 
        new_pack_names = set(repo_not_stacked._pack_collection.names())
1543
 
        rev_a_pack_names = new_pack_names.difference(stacked_pack_names)
1544
 
        self.assertEqual(1, len(rev_a_pack_names))
1545
 
        rev_a_pack_name = list(rev_a_pack_names)[0]
1546
 
        # Now fetch 'C', so we have a couple pack files to join
1547
 
        repo_not_stacked.fetch(b_source.repository, 'C')
1548
 
        rev_c_pack_names = set(repo_not_stacked._pack_collection.names())
1549
 
        rev_c_pack_names = rev_c_pack_names.difference(new_pack_names)
1550
 
        self.assertEqual(1, len(rev_c_pack_names))
1551
 
        rev_c_pack_name = list(rev_c_pack_names)[0]
1552
 
        return (repo_not_stacked, rev_a_pack_name, inv_a_pack_name,
1553
 
                rev_c_pack_name)
1554
 
 
1555
 
    def test_pack_with_distant_inventories(self):
1556
 
        # See https://bugs.launchpad.net/bzr/+bug/437003
1557
 
        # When repacking, it is possible to have an inventory in a different
1558
 
        # pack file than the associated revision. An autopack can then come
1559
 
        # along, and miss that inventory, and complain.
1560
 
        (repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
1561
 
         ) = self.make_branch_with_disjoint_inventory_and_revision()
1562
 
        a_pack = repo._pack_collection.get_pack_by_name(rev_a_pack_name)
1563
 
        c_pack = repo._pack_collection.get_pack_by_name(rev_c_pack_name)
1564
 
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1565
 
                    [a_pack, c_pack], '.test-pack')
1566
 
        # This would raise ValueError in bug #437003, but should not raise an
1567
 
        # error once fixed.
1568
 
        packer.pack()
1569
 
 
1570
 
    def test_pack_with_missing_inventory(self):
1571
 
        # Similar to test_pack_with_missing_inventory, but this time, we force
1572
 
        # the A inventory to actually be gone from the repository.
1573
 
        (repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
1574
 
         ) = self.make_branch_with_disjoint_inventory_and_revision()
1575
 
        inv_a_pack = repo._pack_collection.get_pack_by_name(inv_a_pack_name)
1576
 
        repo._pack_collection._remove_pack_from_memory(inv_a_pack)
1577
 
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1578
 
            repo._pack_collection.all_packs(), '.test-pack')
1579
 
        e = self.assertRaises(ValueError, packer.pack)
1580
 
        packer.new_pack.abort()
1581
 
        self.assertContainsRe(str(e),
1582
 
            r"We are missing inventories for revisions: .*'A'")
1583
 
 
1584
 
 
1585
 
class TestCrossFormatPacks(TestCaseWithTransport):
1586
 
 
1587
 
    def log_pack(self, hint=None):
1588
 
        self.calls.append(('pack', hint))
1589
 
        self.orig_pack(hint=hint)
1590
 
        if self.expect_hint:
1591
 
            self.assertTrue(hint)
1592
 
 
1593
 
    def run_stream(self, src_fmt, target_fmt, expect_pack_called):
1594
 
        self.expect_hint = expect_pack_called
1595
 
        self.calls = []
1596
 
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
1597
 
        source_tree.lock_write()
1598
 
        self.addCleanup(source_tree.unlock)
1599
 
        tip = source_tree.commit('foo')
1600
 
        target = self.make_repository('target', format=target_fmt)
1601
 
        target.lock_write()
1602
 
        self.addCleanup(target.unlock)
1603
 
        source = source_tree.branch.repository._get_source(target._format)
1604
 
        self.orig_pack = target.pack
1605
 
        self.overrideAttr(target, "pack", self.log_pack)
1606
 
        search = target.search_missing_revision_ids(
1607
 
            source_tree.branch.repository, revision_ids=[tip])
1608
 
        stream = source.get_stream(search)
1609
 
        from_format = source_tree.branch.repository._format
1610
 
        sink = target._get_sink()
1611
 
        sink.insert_stream(stream, from_format, [])
1612
 
        if expect_pack_called:
1613
 
            self.assertLength(1, self.calls)
1614
 
        else:
1615
 
            self.assertLength(0, self.calls)
1616
 
 
1617
 
    def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
1618
 
        self.expect_hint = expect_pack_called
1619
 
        self.calls = []
1620
 
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
1621
 
        source_tree.lock_write()
1622
 
        self.addCleanup(source_tree.unlock)
1623
 
        tip = source_tree.commit('foo')
1624
 
        target = self.make_repository('target', format=target_fmt)
1625
 
        target.lock_write()
1626
 
        self.addCleanup(target.unlock)
1627
 
        source = source_tree.branch.repository
1628
 
        self.orig_pack = target.pack
1629
 
        self.overrideAttr(target, "pack", self.log_pack)
1630
 
        target.fetch(source)
1631
 
        if expect_pack_called:
1632
 
            self.assertLength(1, self.calls)
1633
 
        else:
1634
 
            self.assertLength(0, self.calls)
1635
 
 
1636
 
    def test_sink_format_hint_no(self):
1637
 
        # When the target format says packing makes no difference, pack is not
1638
 
        # called.
1639
 
        self.run_stream('1.9', 'rich-root-pack', False)
1640
 
 
1641
 
    def test_sink_format_hint_yes(self):
1642
 
        # When the target format says packing makes a difference, pack is
1643
 
        # called.
1644
 
        self.run_stream('1.9', '2a', True)
1645
 
 
1646
 
    def test_sink_format_same_no(self):
1647
 
        # When the formats are the same, pack is not called.
1648
 
        self.run_stream('2a', '2a', False)
1649
 
 
1650
 
    def test_IDS_format_hint_no(self):
1651
 
        # When the target format says packing makes no difference, pack is not
1652
 
        # called.
1653
 
        self.run_fetch('1.9', 'rich-root-pack', False)
1654
 
 
1655
 
    def test_IDS_format_hint_yes(self):
1656
 
        # When the target format says packing makes a difference, pack is
1657
 
        # called.
1658
 
        self.run_fetch('1.9', '2a', True)
1659
 
 
1660
 
    def test_IDS_format_same_no(self):
1661
 
        # When the formats are the same, pack is not called.
1662
 
        self.run_fetch('2a', '2a', False)
 
1245
        source_builder.finish_series()
 
1246
        source_branch = source_builder.get_branch()
 
1247
        source_branch.lock_read()
 
1248
        self.addCleanup(source_branch.unlock)
 
1249
        target = self.make_repository('target', format='development6-rich-root')
 
1250
        source = source_branch.repository._get_source(target._format)
 
1251
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
 
1252
 
 
1253
        # On a regular pass, getting the inventories and chk pages for rev-2
 
1254
        # would only get the newly created chk pages
 
1255
        search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
 
1256
                                    set(['rev-2']))
 
1257
        simple_chk_records = []
 
1258
        for vf_name, substream in source.get_stream(search):
 
1259
            if vf_name == 'chk_bytes':
 
1260
                for record in substream:
 
1261
                    simple_chk_records.append(record.key)
 
1262
            else:
 
1263
                for _ in substream:
 
1264
                    continue
 
1265
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
 
1266
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
 
1267
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
 
1268
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
 
1269
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
 
1270
                         simple_chk_records)
 
1271
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
 
1272
        # we should get a much larger set of pages.
 
1273
        missing = [('inventories', 'rev-2')]
 
1274
        full_chk_records = []
 
1275
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
 
1276
            if vf_name == 'inventories':
 
1277
                for record in substream:
 
1278
                    self.assertEqual(('rev-2',), record.key)
 
1279
            elif vf_name == 'chk_bytes':
 
1280
                for record in substream:
 
1281
                    full_chk_records.append(record.key)
 
1282
            else:
 
1283
                self.fail('Should not be getting a stream of %s' % (vf_name,))
 
1284
        # We have 257 records now. This is because we have 1 root page, and 256
 
1285
        # leaf pages in a complete listing.
 
1286
        self.assertEqual(257, len(full_chk_records))
 
1287
        self.assertSubset(simple_chk_records, full_chk_records)