~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: Jelmer Vernooij
  • Date: 2010-12-20 11:57:14 UTC
  • mto: This revision was merged to the branch mainline in revision 5577.
  • Revision ID: jelmer@samba.org-20101220115714-2ru3hfappjweeg7q
Don't use no-plugins.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006, 2007 Canonical Ltd
 
1
# Copyright (C) 2006-2010 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
12
12
#
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16
16
 
17
17
"""Tests for the Repository facility that are not interface tests.
18
18
 
19
 
For interface tests see tests/repository_implementations/*.py.
 
19
For interface tests see tests/per_repository/*.py.
20
20
 
21
21
For concrete class tests see this file, and for storage formats tests
22
22
also see this file.
23
23
"""
24
24
 
25
 
import md5
26
25
from stat import S_ISDIR
27
 
from StringIO import StringIO
 
26
import sys
28
27
 
29
28
import bzrlib
30
 
from bzrlib.errors import (NotBranchError,
31
 
                           NoSuchFile,
 
29
from bzrlib.errors import (NoSuchFile,
32
30
                           UnknownFormatError,
33
31
                           UnsupportedFormatError,
34
32
                           )
35
 
from bzrlib.index import GraphIndex, InMemoryGraphIndex
 
33
from bzrlib import (
 
34
    btree_index,
 
35
    graph,
 
36
    tests,
 
37
    )
 
38
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
 
39
from bzrlib.index import GraphIndex
36
40
from bzrlib.repository import RepositoryFormat
37
 
from bzrlib.smart import server
38
41
from bzrlib.tests import (
39
42
    TestCase,
40
43
    TestCaseWithTransport,
41
 
    test_knit,
42
 
    )
43
 
from bzrlib.transport import get_transport
44
 
from bzrlib.transport.memory import MemoryServer
45
 
from bzrlib.util import bencode
 
44
    )
 
45
from bzrlib.transport import (
 
46
    get_transport,
 
47
    )
46
48
from bzrlib import (
47
49
    bzrdir,
48
50
    errors,
49
51
    inventory,
 
52
    osutils,
50
53
    repository,
51
54
    revision as _mod_revision,
52
 
    symbol_versioning,
53
55
    upgrade,
 
56
    versionedfile,
54
57
    workingtree,
55
58
    )
56
 
from bzrlib.repofmt import knitrepo, weaverepo, pack_repo
 
59
from bzrlib.repofmt import (
 
60
    groupcompress_repo,
 
61
    knitrepo,
 
62
    pack_repo,
 
63
    weaverepo,
 
64
    )
57
65
 
58
66
 
59
67
class TestDefaultFormat(TestCase):
88
96
class SampleRepositoryFormat(repository.RepositoryFormat):
89
97
    """A sample format
90
98
 
91
 
    this format is initializable, unsupported to aid in testing the 
 
99
    this format is initializable, unsupported to aid in testing the
92
100
    open and open(unsupported=True) routines.
93
101
    """
94
102
 
115
123
    def test_find_format(self):
116
124
        # is the right format object found for a repository?
117
125
        # create a branch with a few known format objects.
118
 
        # this is not quite the same as 
 
126
        # this is not quite the same as
119
127
        self.build_tree(["foo/", "bar/"])
120
128
        def check_format(format, url):
121
129
            dir = format._matchingbzrdir.initialize(url)
124
132
            found_format = repository.RepositoryFormat.find_format(dir)
125
133
            self.failUnless(isinstance(found_format, format.__class__))
126
134
        check_format(weaverepo.RepositoryFormat7(), "bar")
127
 
        
 
135
 
128
136
    def test_find_format_no_repository(self):
129
137
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
130
138
        self.assertRaises(errors.NoRepositoryPresent,
156
164
 
157
165
class TestFormat6(TestCaseWithTransport):
158
166
 
 
167
    def test_attribute__fetch_order(self):
 
168
        """Weaves need topological data insertion."""
 
169
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
170
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
171
        self.assertEqual('topological', repo._format._fetch_order)
 
172
 
 
173
    def test_attribute__fetch_uses_deltas(self):
 
174
        """Weaves do not reuse deltas."""
 
175
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
176
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
177
        self.assertEqual(False, repo._format._fetch_uses_deltas)
 
178
 
 
179
    def test_attribute__fetch_reconcile(self):
 
180
        """Weave repositories need a reconcile after fetch."""
 
181
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
182
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
183
        self.assertEqual(True, repo._format._fetch_reconcile)
 
184
 
159
185
    def test_no_ancestry_weave(self):
160
186
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
161
187
        repo = weaverepo.RepositoryFormat6().initialize(control)
165
191
                          control.transport.get,
166
192
                          'ancestry.weave')
167
193
 
168
 
    def test_exposed_versioned_files_are_marked_dirty(self):
 
194
    def test_supports_external_lookups(self):
169
195
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
170
196
        repo = weaverepo.RepositoryFormat6().initialize(control)
171
 
        repo.lock_write()
172
 
        inv = repo.get_inventory_weave()
173
 
        repo.unlock()
174
 
        self.assertRaises(errors.OutSideTransaction,
175
 
            inv.add_lines, 'foo', [], [])
 
197
        self.assertFalse(repo._format.supports_external_lookups)
176
198
 
177
199
 
178
200
class TestFormat7(TestCaseWithTransport):
179
 
    
 
201
 
 
202
    def test_attribute__fetch_order(self):
 
203
        """Weaves need topological data insertion."""
 
204
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
205
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
206
        self.assertEqual('topological', repo._format._fetch_order)
 
207
 
 
208
    def test_attribute__fetch_uses_deltas(self):
 
209
        """Weaves do not reuse deltas."""
 
210
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
211
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
212
        self.assertEqual(False, repo._format._fetch_uses_deltas)
 
213
 
 
214
    def test_attribute__fetch_reconcile(self):
 
215
        """Weave repositories need a reconcile after fetch."""
 
216
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
217
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
218
        self.assertEqual(True, repo._format._fetch_reconcile)
 
219
 
180
220
    def test_disk_layout(self):
181
221
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
182
222
        repo = weaverepo.RepositoryFormat7().initialize(control)
198
238
                             'w\n'
199
239
                             'W\n',
200
240
                             t.get('inventory.weave').read())
 
241
        # Creating a file with id Foo:Bar results in a non-escaped file name on
 
242
        # disk.
 
243
        control.create_branch()
 
244
        tree = control.create_workingtree()
 
245
        tree.add(['foo'], ['Foo:Bar'], ['file'])
 
246
        tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
 
247
        try:
 
248
            tree.commit('first post', rev_id='first')
 
249
        except errors.IllegalPath:
 
250
            if sys.platform != 'win32':
 
251
                raise
 
252
            self.knownFailure('Foo:Bar cannot be used as a file-id on windows'
 
253
                              ' in repo format 7')
 
254
            return
 
255
        self.assertEqualDiff(
 
256
            '# bzr weave file v5\n'
 
257
            'i\n'
 
258
            '1 7fe70820e08a1aac0ef224d9c66ab66831cc4ab1\n'
 
259
            'n first\n'
 
260
            '\n'
 
261
            'w\n'
 
262
            '{ 0\n'
 
263
            '. content\n'
 
264
            '}\n'
 
265
            'W\n',
 
266
            t.get('weaves/74/Foo%3ABar.weave').read())
201
267
 
202
268
    def test_shared_disk_layout(self):
203
269
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
226
292
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
227
293
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
228
294
        t = control.get_repository_transport(None)
229
 
        # TODO: Should check there is a 'lock' toplevel directory, 
 
295
        # TODO: Should check there is a 'lock' toplevel directory,
230
296
        # regardless of contents
231
297
        self.assertFalse(t.has('lock/held/info'))
232
298
        repo.lock_write()
278
344
                             'W\n',
279
345
                             t.get('inventory.weave').read())
280
346
 
281
 
    def test_exposed_versioned_files_are_marked_dirty(self):
 
347
    def test_supports_external_lookups(self):
282
348
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
283
349
        repo = weaverepo.RepositoryFormat7().initialize(control)
284
 
        repo.lock_write()
285
 
        inv = repo.get_inventory_weave()
286
 
        repo.unlock()
287
 
        self.assertRaises(errors.OutSideTransaction,
288
 
            inv.add_lines, 'foo', [], [])
 
350
        self.assertFalse(repo._format.supports_external_lookups)
289
351
 
290
352
 
291
353
class TestFormatKnit1(TestCaseWithTransport):
292
 
    
 
354
 
 
355
    def test_attribute__fetch_order(self):
 
356
        """Knits need topological data insertion."""
 
357
        repo = self.make_repository('.',
 
358
                format=bzrdir.format_registry.get('knit')())
 
359
        self.assertEqual('topological', repo._format._fetch_order)
 
360
 
 
361
    def test_attribute__fetch_uses_deltas(self):
 
362
        """Knits reuse deltas."""
 
363
        repo = self.make_repository('.',
 
364
                format=bzrdir.format_registry.get('knit')())
 
365
        self.assertEqual(True, repo._format._fetch_uses_deltas)
 
366
 
293
367
    def test_disk_layout(self):
294
368
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
295
369
        repo = knitrepo.RepositoryFormatKnit1().initialize(control)
309
383
        # self.assertEqualDiff('', t.get('lock').read())
310
384
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
311
385
        self.check_knits(t)
 
386
        # Check per-file knits.
 
387
        branch = control.create_branch()
 
388
        tree = control.create_workingtree()
 
389
        tree.add(['foo'], ['Nasty-IdC:'], ['file'])
 
390
        tree.put_file_bytes_non_atomic('Nasty-IdC:', '')
 
391
        tree.commit('1st post', rev_id='foo')
 
392
        self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
 
393
            '\nfoo fulltext 0 81  :')
312
394
 
313
 
    def assertHasKnit(self, t, knit_name):
 
395
    def assertHasKnit(self, t, knit_name, extra_content=''):
314
396
        """Assert that knit_name exists on t."""
315
 
        self.assertEqualDiff('# bzr knit index 8\n',
 
397
        self.assertEqualDiff('# bzr knit index 8\n' + extra_content,
316
398
                             t.get(knit_name + '.kndx').read())
317
 
        # no default content
318
 
        self.assertTrue(t.has(knit_name + '.knit'))
319
399
 
320
400
    def check_knits(self, t):
321
401
        """check knit content for a repository."""
365
445
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
366
446
        self.check_knits(t)
367
447
 
368
 
    def test_exposed_versioned_files_are_marked_dirty(self):
369
 
        format = bzrdir.BzrDirMetaFormat1()
370
 
        format.repository_format = knitrepo.RepositoryFormatKnit1()
371
 
        repo = self.make_repository('.', format=format)
372
 
        repo.lock_write()
373
 
        inv = repo.get_inventory_weave()
374
 
        repo.unlock()
375
 
        self.assertRaises(errors.OutSideTransaction,
376
 
            inv.add_lines, 'foo', [], [])
377
 
 
378
448
    def test_deserialise_sets_root_revision(self):
379
449
        """We must have a inventory.root.revision
380
450
 
385
455
        repo = self.make_repository('.',
386
456
                format=bzrdir.format_registry.get('knit')())
387
457
        inv_xml = '<inventory format="5">\n</inventory>\n'
388
 
        inv = repo.deserialise_inventory('test-rev-id', inv_xml)
 
458
        inv = repo._deserialise_inventory('test-rev-id', inv_xml)
389
459
        self.assertEqual('test-rev-id', inv.root.revision)
390
460
 
391
461
    def test_deserialise_uses_global_revision_id(self):
397
467
        # Arguably, the deserialise_inventory should detect a mismatch, and
398
468
        # raise an error, rather than silently using one revision_id over the
399
469
        # other.
400
 
        inv = repo.deserialise_inventory('test-rev-id', inv_xml)
 
470
        self.assertRaises(AssertionError, repo._deserialise_inventory,
 
471
            'test-rev-id', inv_xml)
 
472
        inv = repo._deserialise_inventory('other-rev-id', inv_xml)
401
473
        self.assertEqual('other-rev-id', inv.root.revision)
402
474
 
403
 
 
404
 
class KnitRepositoryStreamTests(test_knit.KnitTests):
405
 
    """Tests for knitrepo._get_stream_as_bytes."""
406
 
 
407
 
    def test_get_stream_as_bytes(self):
408
 
        # Make a simple knit
409
 
        k1 = self.make_test_knit()
410
 
        k1.add_lines('text-a', [], test_knit.split_lines(test_knit.TEXT_1))
411
 
        
412
 
        # Serialise it, check the output.
413
 
        bytes = knitrepo._get_stream_as_bytes(k1, ['text-a'])
414
 
        data = bencode.bdecode(bytes)
415
 
        format, record = data
416
 
        self.assertEqual('knit-plain', format)
417
 
        self.assertEqual(['text-a', ['fulltext'], []], record[:3])
418
 
        self.assertRecordContentEqual(k1, 'text-a', record[3])
419
 
 
420
 
    def test_get_stream_as_bytes_all(self):
421
 
        """Get a serialised data stream for all the records in a knit.
422
 
 
423
 
        Much like test_get_stream_all, except for get_stream_as_bytes.
424
 
        """
425
 
        k1 = self.make_test_knit()
426
 
        # Insert the same data as BasicKnitTests.test_knit_join, as they seem
427
 
        # to cover a range of cases (no parents, one parent, multiple parents).
428
 
        test_data = [
429
 
            ('text-a', [], test_knit.TEXT_1),
430
 
            ('text-b', ['text-a'], test_knit.TEXT_1),
431
 
            ('text-c', [], test_knit.TEXT_1),
432
 
            ('text-d', ['text-c'], test_knit.TEXT_1),
433
 
            ('text-m', ['text-b', 'text-d'], test_knit.TEXT_1),
434
 
           ]
435
 
        expected_data_list = [
436
 
            # version, options, parents
437
 
            ('text-a', ['fulltext'], []),
438
 
            ('text-b', ['line-delta'], ['text-a']),
439
 
            ('text-c', ['fulltext'], []),
440
 
            ('text-d', ['line-delta'], ['text-c']),
441
 
            ('text-m', ['line-delta'], ['text-b', 'text-d']),
442
 
            ]
443
 
        for version_id, parents, lines in test_data:
444
 
            k1.add_lines(version_id, parents, test_knit.split_lines(lines))
445
 
 
446
 
        bytes = knitrepo._get_stream_as_bytes(
447
 
            k1, ['text-a', 'text-b', 'text-c', 'text-d', 'text-m'])
448
 
 
449
 
        data = bencode.bdecode(bytes)
450
 
        format = data.pop(0)
451
 
        self.assertEqual('knit-plain', format)
452
 
 
453
 
        for expected, actual in zip(expected_data_list, data):
454
 
            expected_version = expected[0]
455
 
            expected_options = expected[1]
456
 
            expected_parents = expected[2]
457
 
            version, options, parents, bytes = actual
458
 
            self.assertEqual(expected_version, version)
459
 
            self.assertEqual(expected_options, options)
460
 
            self.assertEqual(expected_parents, parents)
461
 
            self.assertRecordContentEqual(k1, version, bytes)
 
475
    def test_supports_external_lookups(self):
 
476
        repo = self.make_repository('.',
 
477
                format=bzrdir.format_registry.get('knit')())
 
478
        self.assertFalse(repo._format.supports_external_lookups)
462
479
 
463
480
 
464
481
class DummyRepository(object):
465
482
    """A dummy repository for testing."""
466
483
 
 
484
    _format = None
467
485
    _serializer = None
468
486
 
469
487
    def supports_rich_root(self):
 
488
        if self._format is not None:
 
489
            return self._format.rich_root_data
470
490
        return False
471
491
 
 
492
    def get_graph(self):
 
493
        raise NotImplementedError
 
494
 
 
495
    def get_parent_map(self, revision_ids):
 
496
        raise NotImplementedError
 
497
 
472
498
 
473
499
class InterDummy(repository.InterRepository):
474
500
    """An inter-repository optimised code path for DummyRepository.
481
507
    @staticmethod
482
508
    def is_compatible(repo_source, repo_target):
483
509
        """InterDummy is compatible with DummyRepository."""
484
 
        return (isinstance(repo_source, DummyRepository) and 
 
510
        return (isinstance(repo_source, DummyRepository) and
485
511
            isinstance(repo_target, DummyRepository))
486
512
 
487
513
 
500
526
 
501
527
    def assertGetsDefaultInterRepository(self, repo_a, repo_b):
502
528
        """Asserts that InterRepository.get(repo_a, repo_b) -> the default.
503
 
        
 
529
 
504
530
        The effective default is now InterSameDataRepository because there is
505
531
        no actual sane default in the presence of incompatible data models.
506
532
        """
517
543
        # pair that it returns true on for the is_compatible static method
518
544
        # check
519
545
        dummy_a = DummyRepository()
 
546
        dummy_a._format = RepositoryFormat()
520
547
        dummy_b = DummyRepository()
 
548
        dummy_b._format = RepositoryFormat()
521
549
        repo = self.make_repository('.')
522
550
        # hack dummies to look like repo somewhat.
523
551
        dummy_a._serializer = repo._serializer
 
552
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
 
553
        dummy_a._format.rich_root_data = repo._format.rich_root_data
524
554
        dummy_b._serializer = repo._serializer
 
555
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
 
556
        dummy_b._format.rich_root_data = repo._format.rich_root_data
525
557
        repository.InterRepository.register_optimiser(InterDummy)
526
558
        try:
527
559
            # we should get the default for something InterDummy returns False
554
586
                                ]
555
587
        repo_a = self.make_repository('a')
556
588
        repo_b = self.make_repository('b')
557
 
        is_compatible = repository.InterWeaveRepo.is_compatible
 
589
        is_compatible = weaverepo.InterWeaveRepo.is_compatible
558
590
        for source in incompatible_formats:
559
591
            # force incompatible left then right
560
592
            repo_a._format = source
566
598
            for target in formats:
567
599
                repo_b._format = target
568
600
                self.assertTrue(is_compatible(repo_a, repo_b))
569
 
        self.assertEqual(repository.InterWeaveRepo,
 
601
        self.assertEqual(weaverepo.InterWeaveRepo,
570
602
                         repository.InterRepository.get(repo_a,
571
603
                                                        repo_b).__class__)
572
604
 
573
605
 
574
 
class TestInterRemoteToOther(TestCaseWithTransport):
575
 
 
576
 
    def make_remote_repository(self, path, backing_format=None):
577
 
        """Make a RemoteRepository object backed by a real repository that will
578
 
        be created at the given path."""
579
 
        self.make_repository(path, format=backing_format)
580
 
        smart_server = server.SmartTCPServer_for_testing()
581
 
        smart_server.setUp()
582
 
        remote_transport = get_transport(smart_server.get_url()).clone(path)
583
 
        self.addCleanup(smart_server.tearDown)
584
 
        remote_bzrdir = bzrdir.BzrDir.open_from_transport(remote_transport)
585
 
        remote_repo = remote_bzrdir.open_repository()
586
 
        return remote_repo
587
 
 
588
 
    def test_is_compatible_same_format(self):
589
 
        """InterRemoteToOther is compatible with a remote repository and a
590
 
        second repository that have the same format."""
591
 
        local_repo = self.make_repository('local')
592
 
        remote_repo = self.make_remote_repository('remote')
593
 
        is_compatible = repository.InterRemoteToOther.is_compatible
594
 
        self.assertTrue(
595
 
            is_compatible(remote_repo, local_repo),
596
 
            "InterRemoteToOther(%r, %r) is false" % (remote_repo, local_repo))
597
 
          
598
 
    def test_is_incompatible_different_format(self):
599
 
        local_repo = self.make_repository('local', 'dirstate')
600
 
        remote_repo = self.make_remote_repository('a', 'dirstate-with-subtree')
601
 
        is_compatible = repository.InterRemoteToOther.is_compatible
602
 
        self.assertFalse(
603
 
            is_compatible(remote_repo, local_repo),
604
 
            "InterRemoteToOther(%r, %r) is true" % (local_repo, remote_repo))
605
 
 
606
 
    def test_is_incompatible_different_format_both_remote(self):
607
 
        remote_repo_a = self.make_remote_repository(
608
 
            'a', 'dirstate-with-subtree')
609
 
        remote_repo_b = self.make_remote_repository('b', 'dirstate')
610
 
        is_compatible = repository.InterRemoteToOther.is_compatible
611
 
        self.assertFalse(
612
 
            is_compatible(remote_repo_a, remote_repo_b),
613
 
            "InterRemoteToOther(%r, %r) is true"
614
 
            % (remote_repo_a, remote_repo_b))
615
 
 
616
 
 
617
606
class TestRepositoryConverter(TestCaseWithTransport):
618
607
 
619
608
    def test_convert_empty(self):
633
622
 
634
623
 
635
624
class TestMisc(TestCase):
636
 
    
 
625
 
637
626
    def test_unescape_xml(self):
638
627
        """We get some kind of error when malformed entities are passed"""
639
 
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;') 
 
628
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;')
640
629
 
641
630
 
642
631
class TestRepositoryFormatKnit3(TestCaseWithTransport):
643
632
 
 
633
    def test_attribute__fetch_order(self):
 
634
        """Knits need topological data insertion."""
 
635
        format = bzrdir.BzrDirMetaFormat1()
 
636
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
637
        repo = self.make_repository('.', format=format)
 
638
        self.assertEqual('topological', repo._format._fetch_order)
 
639
 
 
640
    def test_attribute__fetch_uses_deltas(self):
 
641
        """Knits reuse deltas."""
 
642
        format = bzrdir.BzrDirMetaFormat1()
 
643
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
644
        repo = self.make_repository('.', format=format)
 
645
        self.assertEqual(True, repo._format._fetch_uses_deltas)
 
646
 
644
647
    def test_convert(self):
645
648
        """Ensure the upgrade adds weaves for roots"""
646
649
        format = bzrdir.BzrDirMetaFormat1()
648
651
        tree = self.make_branch_and_tree('.', format)
649
652
        tree.commit("Dull commit", rev_id="dull")
650
653
        revision_tree = tree.branch.repository.revision_tree('dull')
651
 
        self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
652
 
            revision_tree.inventory.root.file_id)
 
654
        revision_tree.lock_read()
 
655
        try:
 
656
            self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
 
657
                revision_tree.inventory.root.file_id)
 
658
        finally:
 
659
            revision_tree.unlock()
653
660
        format = bzrdir.BzrDirMetaFormat1()
654
661
        format.repository_format = knitrepo.RepositoryFormatKnit3()
655
662
        upgrade.Convert('.', format)
656
663
        tree = workingtree.WorkingTree.open('.')
657
664
        revision_tree = tree.branch.repository.revision_tree('dull')
658
 
        revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
 
665
        revision_tree.lock_read()
 
666
        try:
 
667
            revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
 
668
        finally:
 
669
            revision_tree.unlock()
659
670
        tree.commit("Another dull commit", rev_id='dull2')
660
671
        revision_tree = tree.branch.repository.revision_tree('dull2')
 
672
        revision_tree.lock_read()
 
673
        self.addCleanup(revision_tree.unlock)
661
674
        self.assertEqual('dull', revision_tree.inventory.root.revision)
662
675
 
663
 
    def test_exposed_versioned_files_are_marked_dirty(self):
 
676
    def test_supports_external_lookups(self):
664
677
        format = bzrdir.BzrDirMetaFormat1()
665
678
        format.repository_format = knitrepo.RepositoryFormatKnit3()
666
679
        repo = self.make_repository('.', format=format)
667
 
        repo.lock_write()
668
 
        inv = repo.get_inventory_weave()
669
 
        repo.unlock()
670
 
        self.assertRaises(errors.OutSideTransaction,
671
 
            inv.add_lines, 'foo', [], [])
 
680
        self.assertFalse(repo._format.supports_external_lookups)
 
681
 
 
682
 
 
683
class Test2a(tests.TestCaseWithMemoryTransport):
 
684
 
 
685
    def test_chk_bytes_uses_custom_btree_parser(self):
 
686
        mt = self.make_branch_and_memory_tree('test', format='2a')
 
687
        mt.lock_write()
 
688
        self.addCleanup(mt.unlock)
 
689
        mt.add([''], ['root-id'])
 
690
        mt.commit('first')
 
691
        index = mt.branch.repository.chk_bytes._index._graph_index._indices[0]
 
692
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
 
693
        # It should also work if we re-open the repo
 
694
        repo = mt.branch.repository.bzrdir.open_repository()
 
695
        repo.lock_read()
 
696
        self.addCleanup(repo.unlock)
 
697
        index = repo.chk_bytes._index._graph_index._indices[0]
 
698
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
 
699
 
 
700
    def test_fetch_combines_groups(self):
 
701
        builder = self.make_branch_builder('source', format='2a')
 
702
        builder.start_series()
 
703
        builder.build_snapshot('1', None, [
 
704
            ('add', ('', 'root-id', 'directory', '')),
 
705
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
706
        builder.build_snapshot('2', ['1'], [
 
707
            ('modify', ('file-id', 'content-2\n'))])
 
708
        builder.finish_series()
 
709
        source = builder.get_branch()
 
710
        target = self.make_repository('target', format='2a')
 
711
        target.fetch(source.repository)
 
712
        target.lock_read()
 
713
        self.addCleanup(target.unlock)
 
714
        details = target.texts._index.get_build_details(
 
715
            [('file-id', '1',), ('file-id', '2',)])
 
716
        file_1_details = details[('file-id', '1')]
 
717
        file_2_details = details[('file-id', '2')]
 
718
        # The index, and what to read off disk, should be the same for both
 
719
        # versions of the file.
 
720
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
721
 
 
722
    def test_fetch_combines_groups(self):
 
723
        builder = self.make_branch_builder('source', format='2a')
 
724
        builder.start_series()
 
725
        builder.build_snapshot('1', None, [
 
726
            ('add', ('', 'root-id', 'directory', '')),
 
727
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
728
        builder.build_snapshot('2', ['1'], [
 
729
            ('modify', ('file-id', 'content-2\n'))])
 
730
        builder.finish_series()
 
731
        source = builder.get_branch()
 
732
        target = self.make_repository('target', format='2a')
 
733
        target.fetch(source.repository)
 
734
        target.lock_read()
 
735
        self.addCleanup(target.unlock)
 
736
        details = target.texts._index.get_build_details(
 
737
            [('file-id', '1',), ('file-id', '2',)])
 
738
        file_1_details = details[('file-id', '1')]
 
739
        file_2_details = details[('file-id', '2')]
 
740
        # The index, and what to read off disk, should be the same for both
 
741
        # versions of the file.
 
742
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
743
 
 
744
    def test_fetch_combines_groups(self):
 
745
        builder = self.make_branch_builder('source', format='2a')
 
746
        builder.start_series()
 
747
        builder.build_snapshot('1', None, [
 
748
            ('add', ('', 'root-id', 'directory', '')),
 
749
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
750
        builder.build_snapshot('2', ['1'], [
 
751
            ('modify', ('file-id', 'content-2\n'))])
 
752
        builder.finish_series()
 
753
        source = builder.get_branch()
 
754
        target = self.make_repository('target', format='2a')
 
755
        target.fetch(source.repository)
 
756
        target.lock_read()
 
757
        self.addCleanup(target.unlock)
 
758
        details = target.texts._index.get_build_details(
 
759
            [('file-id', '1',), ('file-id', '2',)])
 
760
        file_1_details = details[('file-id', '1')]
 
761
        file_2_details = details[('file-id', '2')]
 
762
        # The index, and what to read off disk, should be the same for both
 
763
        # versions of the file.
 
764
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
765
 
 
766
    def test_format_pack_compresses_True(self):
 
767
        repo = self.make_repository('repo', format='2a')
 
768
        self.assertTrue(repo._format.pack_compresses)
 
769
 
 
770
    def test_inventories_use_chk_map_with_parent_base_dict(self):
 
771
        tree = self.make_branch_and_memory_tree('repo', format="2a")
 
772
        tree.lock_write()
 
773
        tree.add([''], ['TREE_ROOT'])
 
774
        revid = tree.commit("foo")
 
775
        tree.unlock()
 
776
        tree.lock_read()
 
777
        self.addCleanup(tree.unlock)
 
778
        inv = tree.branch.repository.get_inventory(revid)
 
779
        self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
 
780
        inv.parent_id_basename_to_file_id._ensure_root()
 
781
        inv.id_to_entry._ensure_root()
 
782
        self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
 
783
        self.assertEqual(65536,
 
784
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
 
785
 
 
786
    def test_autopack_unchanged_chk_nodes(self):
 
787
        # at 20 unchanged commits, chk pages are packed that are split into
 
788
        # two groups such that the new pack being made doesn't have all its
 
789
        # pages in the source packs (though they are in the repository).
 
790
        # Use a memory backed repository, we don't need to hit disk for this
 
791
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
792
        tree.lock_write()
 
793
        self.addCleanup(tree.unlock)
 
794
        tree.add([''], ['TREE_ROOT'])
 
795
        for pos in range(20):
 
796
            tree.commit(str(pos))
 
797
 
 
798
    def test_pack_with_hint(self):
 
799
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
800
        tree.lock_write()
 
801
        self.addCleanup(tree.unlock)
 
802
        tree.add([''], ['TREE_ROOT'])
 
803
        # 1 commit to leave untouched
 
804
        tree.commit('1')
 
805
        to_keep = tree.branch.repository._pack_collection.names()
 
806
        # 2 to combine
 
807
        tree.commit('2')
 
808
        tree.commit('3')
 
809
        all = tree.branch.repository._pack_collection.names()
 
810
        combine = list(set(all) - set(to_keep))
 
811
        self.assertLength(3, all)
 
812
        self.assertLength(2, combine)
 
813
        tree.branch.repository.pack(hint=combine)
 
814
        final = tree.branch.repository._pack_collection.names()
 
815
        self.assertLength(2, final)
 
816
        self.assertFalse(combine[0] in final)
 
817
        self.assertFalse(combine[1] in final)
 
818
        self.assertSubset(to_keep, final)
 
819
 
 
820
    def test_stream_source_to_gc(self):
 
821
        source = self.make_repository('source', format='2a')
 
822
        target = self.make_repository('target', format='2a')
 
823
        stream = source._get_source(target._format)
 
824
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
 
825
 
 
826
    def test_stream_source_to_non_gc(self):
 
827
        source = self.make_repository('source', format='2a')
 
828
        target = self.make_repository('target', format='rich-root-pack')
 
829
        stream = source._get_source(target._format)
 
830
        # We don't want the child GroupCHKStreamSource
 
831
        self.assertIs(type(stream), repository.StreamSource)
 
832
 
 
833
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
 
834
        source_builder = self.make_branch_builder('source',
 
835
                            format='2a')
 
836
        # We have to build a fairly large tree, so that we are sure the chk
 
837
        # pages will have split into multiple pages.
 
838
        entries = [('add', ('', 'a-root-id', 'directory', None))]
 
839
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
 
840
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
 
841
                fname = i + j
 
842
                fid = fname + '-id'
 
843
                content = 'content for %s\n' % (fname,)
 
844
                entries.append(('add', (fname, fid, 'file', content)))
 
845
        source_builder.start_series()
 
846
        source_builder.build_snapshot('rev-1', None, entries)
 
847
        # Now change a few of them, so we get a few new pages for the second
 
848
        # revision
 
849
        source_builder.build_snapshot('rev-2', ['rev-1'], [
 
850
            ('modify', ('aa-id', 'new content for aa-id\n')),
 
851
            ('modify', ('cc-id', 'new content for cc-id\n')),
 
852
            ('modify', ('zz-id', 'new content for zz-id\n')),
 
853
            ])
 
854
        source_builder.finish_series()
 
855
        source_branch = source_builder.get_branch()
 
856
        source_branch.lock_read()
 
857
        self.addCleanup(source_branch.unlock)
 
858
        target = self.make_repository('target', format='2a')
 
859
        source = source_branch.repository._get_source(target._format)
 
860
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
 
861
 
 
862
        # On a regular pass, getting the inventories and chk pages for rev-2
 
863
        # would only get the newly created chk pages
 
864
        search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
 
865
                                    set(['rev-2']))
 
866
        simple_chk_records = []
 
867
        for vf_name, substream in source.get_stream(search):
 
868
            if vf_name == 'chk_bytes':
 
869
                for record in substream:
 
870
                    simple_chk_records.append(record.key)
 
871
            else:
 
872
                for _ in substream:
 
873
                    continue
 
874
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
 
875
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
 
876
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
 
877
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
 
878
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
 
879
                         simple_chk_records)
 
880
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
 
881
        # we should get a much larger set of pages.
 
882
        missing = [('inventories', 'rev-2')]
 
883
        full_chk_records = []
 
884
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
 
885
            if vf_name == 'inventories':
 
886
                for record in substream:
 
887
                    self.assertEqual(('rev-2',), record.key)
 
888
            elif vf_name == 'chk_bytes':
 
889
                for record in substream:
 
890
                    full_chk_records.append(record.key)
 
891
            else:
 
892
                self.fail('Should not be getting a stream of %s' % (vf_name,))
 
893
        # We have 257 records now. This is because we have 1 root page, and 256
 
894
        # leaf pages in a complete listing.
 
895
        self.assertEqual(257, len(full_chk_records))
 
896
        self.assertSubset(simple_chk_records, full_chk_records)
 
897
 
 
898
    def test_inconsistency_fatal(self):
 
899
        repo = self.make_repository('repo', format='2a')
 
900
        self.assertTrue(repo.revisions._index._inconsistency_fatal)
 
901
        self.assertFalse(repo.texts._index._inconsistency_fatal)
 
902
        self.assertFalse(repo.inventories._index._inconsistency_fatal)
 
903
        self.assertFalse(repo.signatures._index._inconsistency_fatal)
 
904
        self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
 
905
 
 
906
 
 
907
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
 
908
 
 
909
    def test_source_to_exact_pack_092(self):
 
910
        source = self.make_repository('source', format='pack-0.92')
 
911
        target = self.make_repository('target', format='pack-0.92')
 
912
        stream_source = source._get_source(target._format)
 
913
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
914
 
 
915
    def test_source_to_exact_pack_rich_root_pack(self):
 
916
        source = self.make_repository('source', format='rich-root-pack')
 
917
        target = self.make_repository('target', format='rich-root-pack')
 
918
        stream_source = source._get_source(target._format)
 
919
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
920
 
 
921
    def test_source_to_exact_pack_19(self):
 
922
        source = self.make_repository('source', format='1.9')
 
923
        target = self.make_repository('target', format='1.9')
 
924
        stream_source = source._get_source(target._format)
 
925
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
926
 
 
927
    def test_source_to_exact_pack_19_rich_root(self):
 
928
        source = self.make_repository('source', format='1.9-rich-root')
 
929
        target = self.make_repository('target', format='1.9-rich-root')
 
930
        stream_source = source._get_source(target._format)
 
931
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
932
 
 
933
    def test_source_to_remote_exact_pack_19(self):
 
934
        trans = self.make_smart_server('target')
 
935
        trans.ensure_base()
 
936
        source = self.make_repository('source', format='1.9')
 
937
        target = self.make_repository('target', format='1.9')
 
938
        target = repository.Repository.open(trans.base)
 
939
        stream_source = source._get_source(target._format)
 
940
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
941
 
 
942
    def test_stream_source_to_non_exact(self):
 
943
        source = self.make_repository('source', format='pack-0.92')
 
944
        target = self.make_repository('target', format='1.9')
 
945
        stream = source._get_source(target._format)
 
946
        self.assertIs(type(stream), repository.StreamSource)
 
947
 
 
948
    def test_stream_source_to_non_exact_rich_root(self):
 
949
        source = self.make_repository('source', format='1.9')
 
950
        target = self.make_repository('target', format='1.9-rich-root')
 
951
        stream = source._get_source(target._format)
 
952
        self.assertIs(type(stream), repository.StreamSource)
 
953
 
 
954
    def test_source_to_remote_non_exact_pack_19(self):
 
955
        trans = self.make_smart_server('target')
 
956
        trans.ensure_base()
 
957
        source = self.make_repository('source', format='1.9')
 
958
        target = self.make_repository('target', format='1.6')
 
959
        target = repository.Repository.open(trans.base)
 
960
        stream_source = source._get_source(target._format)
 
961
        self.assertIs(type(stream_source), repository.StreamSource)
 
962
 
 
963
    def test_stream_source_to_knit(self):
 
964
        source = self.make_repository('source', format='pack-0.92')
 
965
        target = self.make_repository('target', format='dirstate')
 
966
        stream = source._get_source(target._format)
 
967
        self.assertIs(type(stream), repository.StreamSource)
 
968
 
 
969
 
 
970
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
 
971
    """Tests for _find_parent_ids_of_revisions."""
 
972
 
 
973
    def setUp(self):
 
974
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
 
975
        self.builder = self.make_branch_builder('source')
 
976
        self.builder.start_series()
 
977
        self.builder.build_snapshot('initial', None,
 
978
            [('add', ('', 'tree-root', 'directory', None))])
 
979
        self.repo = self.builder.get_branch().repository
 
980
        self.addCleanup(self.builder.finish_series)
 
981
 
 
982
    def assertParentIds(self, expected_result, rev_set):
 
983
        self.assertEqual(sorted(expected_result),
 
984
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
 
985
 
 
986
    def test_simple(self):
 
987
        self.builder.build_snapshot('revid1', None, [])
 
988
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
989
        rev_set = ['revid2']
 
990
        self.assertParentIds(['revid1'], rev_set)
 
991
 
 
992
    def test_not_first_parent(self):
 
993
        self.builder.build_snapshot('revid1', None, [])
 
994
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
995
        self.builder.build_snapshot('revid3', ['revid2'], [])
 
996
        rev_set = ['revid3', 'revid2']
 
997
        self.assertParentIds(['revid1'], rev_set)
 
998
 
 
999
    def test_not_null(self):
 
1000
        rev_set = ['initial']
 
1001
        self.assertParentIds([], rev_set)
 
1002
 
 
1003
    def test_not_null_set(self):
 
1004
        self.builder.build_snapshot('revid1', None, [])
 
1005
        rev_set = [_mod_revision.NULL_REVISION]
 
1006
        self.assertParentIds([], rev_set)
 
1007
 
 
1008
    def test_ghost(self):
 
1009
        self.builder.build_snapshot('revid1', None, [])
 
1010
        rev_set = ['ghost', 'revid1']
 
1011
        self.assertParentIds(['initial'], rev_set)
 
1012
 
 
1013
    def test_ghost_parent(self):
 
1014
        self.builder.build_snapshot('revid1', None, [])
 
1015
        self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
 
1016
        rev_set = ['revid2', 'revid1']
 
1017
        self.assertParentIds(['ghost', 'initial'], rev_set)
 
1018
 
 
1019
    def test_righthand_parent(self):
 
1020
        self.builder.build_snapshot('revid1', None, [])
 
1021
        self.builder.build_snapshot('revid2a', ['revid1'], [])
 
1022
        self.builder.build_snapshot('revid2b', ['revid1'], [])
 
1023
        self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
 
1024
        rev_set = ['revid3', 'revid2a']
 
1025
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
672
1026
 
673
1027
 
674
1028
class TestWithBrokenRepo(TestCaseWithTransport):
689
1043
            inv = inventory.Inventory(revision_id='rev1a')
690
1044
            inv.root.revision = 'rev1a'
691
1045
            self.add_file(repo, inv, 'file1', 'rev1a', [])
 
1046
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
692
1047
            repo.add_inventory('rev1a', inv, [])
693
1048
            revision = _mod_revision.Revision('rev1a',
694
1049
                committer='jrandom@example.com', timestamp=0,
729
1084
    def add_revision(self, repo, revision_id, inv, parent_ids):
730
1085
        inv.revision_id = revision_id
731
1086
        inv.root.revision = revision_id
 
1087
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
732
1088
        repo.add_inventory(revision_id, inv, parent_ids)
733
1089
        revision = _mod_revision.Revision(revision_id,
734
1090
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
741
1097
        entry.revision = revision
742
1098
        entry.text_size = 0
743
1099
        inv.add(entry)
744
 
        vf = repo.weave_store.get_weave_or_empty(file_id,
745
 
                                                 repo.get_transaction())
746
 
        vf.add_lines(revision, parents, ['line\n'])
 
1100
        text_key = (file_id, revision)
 
1101
        parent_keys = [(file_id, parent) for parent in parents]
 
1102
        repo.texts.add_lines(text_key, parent_keys, ['line\n'])
747
1103
 
748
1104
    def test_insert_from_broken_repo(self):
749
1105
        """Inserting a data stream from a broken repository won't silently
751
1107
        """
752
1108
        broken_repo = self.make_broken_repository()
753
1109
        empty_repo = self.make_repository('empty-repo')
754
 
        stream = broken_repo.get_data_stream(['rev1a', 'rev2', 'rev3'])
755
 
        empty_repo.lock_write()
 
1110
        try:
 
1111
            empty_repo.fetch(broken_repo)
 
1112
        except (errors.RevisionNotPresent, errors.BzrCheckError):
 
1113
            # Test successful: compression parent not being copied leads to
 
1114
            # error.
 
1115
            return
 
1116
        empty_repo.lock_read()
756
1117
        self.addCleanup(empty_repo.unlock)
757
 
        empty_repo.start_write_group()
758
 
        try:
759
 
            self.assertRaises(
760
 
                errors.KnitCorrupt, empty_repo.insert_data_stream, stream)
761
 
        finally:
762
 
            empty_repo.abort_write_group()
763
 
 
764
 
 
765
 
class TestKnitPackNoSubtrees(TestCaseWithTransport):
766
 
 
767
 
    def get_format(self):
768
 
        return bzrdir.format_registry.make_bzrdir('pack-0.92')
769
 
 
770
 
    def test_disk_layout(self):
771
 
        format = self.get_format()
772
 
        repo = self.make_repository('.', format=format)
773
 
        # in case of side effects of locking.
774
 
        repo.lock_write()
775
 
        repo.unlock()
776
 
        t = repo.bzrdir.get_repository_transport(None)
777
 
        self.check_format(t)
778
 
        # XXX: no locks left when unlocked at the moment
779
 
        # self.assertEqualDiff('', t.get('lock').read())
780
 
        self.check_databases(t)
781
 
 
782
 
    def check_format(self, t):
783
 
        self.assertEqualDiff(
784
 
            "Bazaar pack repository format 1 (needs bzr 0.92)\n",
785
 
                             t.get('format').read())
786
 
 
787
 
    def assertHasKndx(self, t, knit_name):
788
 
        """Assert that knit_name exists on t."""
789
 
        self.assertEqualDiff('# bzr knit index 8\n',
790
 
                             t.get(knit_name + '.kndx').read())
791
 
 
792
 
    def assertHasNoKndx(self, t, knit_name):
793
 
        """Assert that knit_name has no index on t."""
794
 
        self.assertFalse(t.has(knit_name + '.kndx'))
795
 
 
796
 
    def assertHasNoKnit(self, t, knit_name):
797
 
        """Assert that knit_name exists on t."""
798
 
        # no default content
799
 
        self.assertFalse(t.has(knit_name + '.knit'))
800
 
 
801
 
    def check_databases(self, t):
802
 
        """check knit content for a repository."""
803
 
        # check conversion worked
804
 
        self.assertHasNoKndx(t, 'inventory')
805
 
        self.assertHasNoKnit(t, 'inventory')
806
 
        self.assertHasNoKndx(t, 'revisions')
807
 
        self.assertHasNoKnit(t, 'revisions')
808
 
        self.assertHasNoKndx(t, 'signatures')
809
 
        self.assertHasNoKnit(t, 'signatures')
810
 
        self.assertFalse(t.has('knits'))
811
 
        # revision-indexes file-container directory
812
 
        self.assertEqual([],
813
 
            list(GraphIndex(t, 'pack-names', None).iter_all_entries()))
814
 
        self.assertTrue(S_ISDIR(t.stat('packs').st_mode))
815
 
        self.assertTrue(S_ISDIR(t.stat('upload').st_mode))
816
 
        self.assertTrue(S_ISDIR(t.stat('indices').st_mode))
817
 
        self.assertTrue(S_ISDIR(t.stat('obsolete_packs').st_mode))
818
 
 
819
 
    def test_shared_disk_layout(self):
820
 
        format = self.get_format()
821
 
        repo = self.make_repository('.', shared=True, format=format)
822
 
        # we want:
823
 
        t = repo.bzrdir.get_repository_transport(None)
824
 
        self.check_format(t)
825
 
        # XXX: no locks left when unlocked at the moment
826
 
        # self.assertEqualDiff('', t.get('lock').read())
827
 
        # We should have a 'shared-storage' marker file.
828
 
        self.assertEqualDiff('', t.get('shared-storage').read())
829
 
        self.check_databases(t)
830
 
 
831
 
    def test_shared_no_tree_disk_layout(self):
832
 
        format = self.get_format()
833
 
        repo = self.make_repository('.', shared=True, format=format)
834
 
        repo.set_make_working_trees(False)
835
 
        # we want:
836
 
        t = repo.bzrdir.get_repository_transport(None)
837
 
        self.check_format(t)
838
 
        # XXX: no locks left when unlocked at the moment
839
 
        # self.assertEqualDiff('', t.get('lock').read())
840
 
        # We should have a 'shared-storage' marker file.
841
 
        self.assertEqualDiff('', t.get('shared-storage').read())
842
 
        # We should have a marker for the no-working-trees flag.
843
 
        self.assertEqualDiff('', t.get('no-working-trees').read())
844
 
        # The marker should go when we toggle the setting.
845
 
        repo.set_make_working_trees(True)
846
 
        self.assertFalse(t.has('no-working-trees'))
847
 
        self.check_databases(t)
848
 
 
849
 
    def test_adding_revision_creates_pack_indices(self):
850
 
        format = self.get_format()
851
 
        tree = self.make_branch_and_tree('.', format=format)
852
 
        trans = tree.branch.repository.bzrdir.get_repository_transport(None)
853
 
        self.assertEqual([],
854
 
            list(GraphIndex(trans, 'pack-names', None).iter_all_entries()))
855
 
        tree.commit('foobarbaz')
856
 
        index = GraphIndex(trans, 'pack-names', None)
857
 
        index_nodes = list(index.iter_all_entries())
858
 
        self.assertEqual(1, len(index_nodes))
859
 
        node = index_nodes[0]
860
 
        name = node[1][0]
861
 
        # the pack sizes should be listed in the index
862
 
        pack_value = node[2]
863
 
        sizes = [int(digits) for digits in pack_value.split(' ')]
864
 
        for size, suffix in zip(sizes, ['.rix', '.iix', '.tix', '.six']):
865
 
            stat = trans.stat('indices/%s%s' % (name, suffix))
866
 
            self.assertEqual(size, stat.st_size)
867
 
 
868
 
    def test_pulling_nothing_leads_to_no_new_names(self):
869
 
        format = self.get_format()
870
 
        tree1 = self.make_branch_and_tree('1', format=format)
871
 
        tree2 = self.make_branch_and_tree('2', format=format)
872
 
        tree1.branch.repository.fetch(tree2.branch.repository)
873
 
        trans = tree1.branch.repository.bzrdir.get_repository_transport(None)
874
 
        self.assertEqual([],
875
 
            list(GraphIndex(trans, 'pack-names', None).iter_all_entries()))
876
 
 
877
 
    def test_commit_across_pack_shape_boundary_autopacks(self):
878
 
        format = self.get_format()
879
 
        tree = self.make_branch_and_tree('.', format=format)
880
 
        trans = tree.branch.repository.bzrdir.get_repository_transport(None)
881
 
        # This test could be a little cheaper by replacing the packs
882
 
        # attribute on the repository to allow a different pack distribution
883
 
        # and max packs policy - so we are checking the policy is honoured
884
 
        # in the test. But for now 11 commits is not a big deal in a single
885
 
        # test.
886
 
        for x in range(9):
887
 
            tree.commit('commit %s' % x)
888
 
        # there should be 9 packs:
889
 
        index = GraphIndex(trans, 'pack-names', None)
890
 
        self.assertEqual(9, len(list(index.iter_all_entries())))
891
 
        # insert some files in obsolete_packs which should be removed by pack.
892
 
        trans.put_bytes('obsolete_packs/foo', '123')
893
 
        trans.put_bytes('obsolete_packs/bar', '321')
894
 
        # committing one more should coalesce to 1 of 10.
895
 
        tree.commit('commit triggering pack')
896
 
        index = GraphIndex(trans, 'pack-names', None)
897
 
        self.assertEqual(1, len(list(index.iter_all_entries())))
898
 
        # packing should not damage data
899
 
        tree = tree.bzrdir.open_workingtree()
900
 
        check_result = tree.branch.repository.check(
901
 
            [tree.branch.last_revision()])
902
 
        # We should have 50 (10x5) files in the obsolete_packs directory.
903
 
        obsolete_files = list(trans.list_dir('obsolete_packs'))
904
 
        self.assertFalse('foo' in obsolete_files)
905
 
        self.assertFalse('bar' in obsolete_files)
906
 
        self.assertEqual(50, len(obsolete_files))
907
 
        # XXX: Todo check packs obsoleted correctly - old packs and indices
908
 
        # in the obsolete_packs directory.
909
 
        large_pack_name = list(index.iter_all_entries())[0][1][0]
910
 
        # finally, committing again should not touch the large pack.
911
 
        tree.commit('commit not triggering pack')
912
 
        index = GraphIndex(trans, 'pack-names', None)
913
 
        self.assertEqual(2, len(list(index.iter_all_entries())))
914
 
        pack_names = [node[1][0] for node in index.iter_all_entries()]
915
 
        self.assertTrue(large_pack_name in pack_names)
916
 
 
917
 
    def test_pack_after_two_commits_packs_everything(self):
918
 
        format = self.get_format()
919
 
        tree = self.make_branch_and_tree('.', format=format)
920
 
        trans = tree.branch.repository.bzrdir.get_repository_transport(None)
921
 
        tree.commit('start')
922
 
        tree.commit('more work')
923
 
        tree.branch.repository.pack()
924
 
        # there should be 1 pack:
925
 
        index = GraphIndex(trans, 'pack-names', None)
926
 
        self.assertEqual(1, len(list(index.iter_all_entries())))
927
 
        self.assertEqual(2, len(tree.branch.repository.all_revision_ids()))
928
 
 
929
 
    def test_pack_repositories_support_multiple_write_locks(self):
930
 
        format = self.get_format()
931
 
        self.make_repository('.', shared=True, format=format)
932
 
        r1 = repository.Repository.open('.')
933
 
        r2 = repository.Repository.open('.')
934
 
        r1.lock_write()
935
 
        self.addCleanup(r1.unlock)
936
 
        r2.lock_write()
937
 
        r2.unlock()
938
 
 
939
 
    def _add_text(self, repo, fileid):
940
 
        """Add a text to the repository within a write group."""
941
 
        vf =repo.weave_store.get_weave(fileid, repo.get_transaction())
942
 
        vf.add_lines('samplerev+' + fileid, [], [])
943
 
 
944
 
    def test_concurrent_writers_merge_new_packs(self):
945
 
        format = self.get_format()
946
 
        self.make_repository('.', shared=True, format=format)
947
 
        r1 = repository.Repository.open('.')
948
 
        r2 = repository.Repository.open('.')
949
 
        r1.lock_write()
950
 
        try:
951
 
            # access enough data to load the names list
952
 
            list(r1.all_revision_ids())
953
 
            r2.lock_write()
954
 
            try:
955
 
                # access enough data to load the names list
956
 
                list(r2.all_revision_ids())
957
 
                r1.start_write_group()
958
 
                try:
959
 
                    r2.start_write_group()
960
 
                    try:
961
 
                        self._add_text(r1, 'fileidr1')
962
 
                        self._add_text(r2, 'fileidr2')
963
 
                    except:
964
 
                        r2.abort_write_group()
965
 
                        raise
966
 
                except:
967
 
                    r1.abort_write_group()
968
 
                    raise
969
 
                # both r1 and r2 have open write groups with data in them
970
 
                # created while the other's write group was open.
971
 
                # Commit both which requires a merge to the pack-names.
972
 
                try:
973
 
                    r1.commit_write_group()
974
 
                except:
975
 
                    r1.abort_write_group()
976
 
                    r2.abort_write_group()
977
 
                    raise
978
 
                r2.commit_write_group()
979
 
                # tell r1 to reload from disk
980
 
                r1._pack_collection.reset()
981
 
                # Now both repositories should know about both names
982
 
                r1._pack_collection.ensure_loaded()
983
 
                r2._pack_collection.ensure_loaded()
984
 
                self.assertEqual(r1._pack_collection.names(), r2._pack_collection.names())
985
 
                self.assertEqual(2, len(r1._pack_collection.names()))
986
 
            finally:
987
 
                r2.unlock()
988
 
        finally:
989
 
            r1.unlock()
990
 
 
991
 
    def test_concurrent_writer_second_preserves_dropping_a_pack(self):
992
 
        format = self.get_format()
993
 
        self.make_repository('.', shared=True, format=format)
994
 
        r1 = repository.Repository.open('.')
995
 
        r2 = repository.Repository.open('.')
996
 
        # add a pack to drop
997
 
        r1.lock_write()
998
 
        try:
999
 
            r1.start_write_group()
1000
 
            try:
1001
 
                self._add_text(r1, 'fileidr1')
1002
 
            except:
1003
 
                r1.abort_write_group()
1004
 
                raise
1005
 
            else:
1006
 
                r1.commit_write_group()
1007
 
            r1._pack_collection.ensure_loaded()
1008
 
            name_to_drop = r1._pack_collection.all_packs()[0].name
1009
 
        finally:
1010
 
            r1.unlock()
1011
 
        r1.lock_write()
1012
 
        try:
1013
 
            # access enough data to load the names list
1014
 
            list(r1.all_revision_ids())
1015
 
            r2.lock_write()
1016
 
            try:
1017
 
                # access enough data to load the names list
1018
 
                list(r2.all_revision_ids())
1019
 
                r1._pack_collection.ensure_loaded()
1020
 
                try:
1021
 
                    r2.start_write_group()
1022
 
                    try:
1023
 
                        # in r1, drop the pack
1024
 
                        r1._pack_collection._remove_pack_from_memory(
1025
 
                            r1._pack_collection.get_pack_by_name(name_to_drop))
1026
 
                        # in r2, add a pack
1027
 
                        self._add_text(r2, 'fileidr2')
1028
 
                    except:
1029
 
                        r2.abort_write_group()
1030
 
                        raise
1031
 
                except:
1032
 
                    r1._pack_collection.reset()
1033
 
                    raise
1034
 
                # r1 has a changed names list, and r2 an open write groups with
1035
 
                # changes.
1036
 
                # save r1, and then commit the r2 write group, which requires a
1037
 
                # merge to the pack-names, which should not reinstate
1038
 
                # name_to_drop
1039
 
                try:
1040
 
                    r1._pack_collection._save_pack_names()
1041
 
                    r1._pack_collection.reset()
1042
 
                except:
1043
 
                    r2.abort_write_group()
1044
 
                    raise
1045
 
                try:
1046
 
                    r2.commit_write_group()
1047
 
                except:
1048
 
                    r2.abort_write_group()
1049
 
                    raise
1050
 
                # Now both repositories should now about just one name.
1051
 
                r1._pack_collection.ensure_loaded()
1052
 
                r2._pack_collection.ensure_loaded()
1053
 
                self.assertEqual(r1._pack_collection.names(), r2._pack_collection.names())
1054
 
                self.assertEqual(1, len(r1._pack_collection.names()))
1055
 
                self.assertFalse(name_to_drop in r1._pack_collection.names())
1056
 
            finally:
1057
 
                r2.unlock()
1058
 
        finally:
1059
 
            r1.unlock()
1060
 
 
1061
 
    def test_lock_write_does_not_physically_lock(self):
1062
 
        repo = self.make_repository('.', format=self.get_format())
1063
 
        repo.lock_write()
1064
 
        self.addCleanup(repo.unlock)
1065
 
        self.assertFalse(repo.get_physical_lock_status())
1066
 
 
1067
 
    def prepare_for_break_lock(self):
1068
 
        # Setup the global ui factory state so that a break-lock method call
1069
 
        # will find usable input in the input stream.
1070
 
        old_factory = bzrlib.ui.ui_factory
1071
 
        def restoreFactory():
1072
 
            bzrlib.ui.ui_factory = old_factory
1073
 
        self.addCleanup(restoreFactory)
1074
 
        bzrlib.ui.ui_factory = bzrlib.ui.SilentUIFactory()
1075
 
        bzrlib.ui.ui_factory.stdin = StringIO("y\n")
1076
 
 
1077
 
    def test_break_lock_breaks_physical_lock(self):
1078
 
        repo = self.make_repository('.', format=self.get_format())
1079
 
        repo._pack_collection.lock_names()
1080
 
        repo2 = repository.Repository.open('.')
1081
 
        self.assertTrue(repo.get_physical_lock_status())
1082
 
        self.prepare_for_break_lock()
1083
 
        repo2.break_lock()
1084
 
        self.assertFalse(repo.get_physical_lock_status())
1085
 
 
1086
 
    def test_broken_physical_locks_error_on__unlock_names_lock(self):
1087
 
        repo = self.make_repository('.', format=self.get_format())
1088
 
        repo._pack_collection.lock_names()
1089
 
        self.assertTrue(repo.get_physical_lock_status())
1090
 
        repo2 = repository.Repository.open('.')
1091
 
        self.prepare_for_break_lock()
1092
 
        repo2.break_lock()
1093
 
        self.assertRaises(errors.LockBroken, repo._pack_collection._unlock_names)
1094
 
 
1095
 
    def test_fetch_without_find_ghosts_ignores_ghosts(self):
1096
 
        # we want two repositories at this point:
1097
 
        # one with a revision that is a ghost in the other
1098
 
        # repository.
1099
 
        # 'ghost' is present in has_ghost, 'ghost' is absent in 'missing_ghost'.
1100
 
        # 'references' is present in both repositories, and 'tip' is present
1101
 
        # just in has_ghost.
1102
 
        # has_ghost       missing_ghost
1103
 
        #------------------------------
1104
 
        # 'ghost'             -
1105
 
        # 'references'    'references'
1106
 
        # 'tip'               -
1107
 
        # In this test we fetch 'tip' which should not fetch 'ghost'
1108
 
        has_ghost = self.make_repository('has_ghost', format=self.get_format())
1109
 
        missing_ghost = self.make_repository('missing_ghost',
1110
 
            format=self.get_format())
1111
 
 
1112
 
        def add_commit(repo, revision_id, parent_ids):
1113
 
            repo.lock_write()
1114
 
            repo.start_write_group()
1115
 
            inv = inventory.Inventory(revision_id=revision_id)
1116
 
            inv.root.revision = revision_id
1117
 
            root_id = inv.root.file_id
1118
 
            sha1 = repo.add_inventory(revision_id, inv, [])
1119
 
            vf = repo.weave_store.get_weave_or_empty(root_id,
1120
 
                repo.get_transaction())
1121
 
            vf.add_lines(revision_id, [], [])
1122
 
            rev = bzrlib.revision.Revision(timestamp=0,
1123
 
                                           timezone=None,
1124
 
                                           committer="Foo Bar <foo@example.com>",
1125
 
                                           message="Message",
1126
 
                                           inventory_sha1=sha1,
1127
 
                                           revision_id=revision_id)
1128
 
            rev.parent_ids = parent_ids
1129
 
            repo.add_revision(revision_id, rev)
1130
 
            repo.commit_write_group()
1131
 
            repo.unlock()
1132
 
        add_commit(has_ghost, 'ghost', [])
1133
 
        add_commit(has_ghost, 'references', ['ghost'])
1134
 
        add_commit(missing_ghost, 'references', ['ghost'])
1135
 
        add_commit(has_ghost, 'tip', ['references'])
1136
 
        missing_ghost.fetch(has_ghost, 'tip')
1137
 
        # missing ghost now has tip and not ghost.
1138
 
        rev = missing_ghost.get_revision('tip')
1139
 
        inv = missing_ghost.get_inventory('tip')
1140
 
        self.assertRaises(errors.NoSuchRevision,
1141
 
            missing_ghost.get_revision, 'ghost')
1142
 
        self.assertRaises(errors.RevisionNotPresent,
1143
 
            missing_ghost.get_inventory, 'ghost')
1144
 
 
1145
 
 
1146
 
class TestKnitPackSubtrees(TestKnitPackNoSubtrees):
1147
 
 
1148
 
    def get_format(self):
1149
 
        return bzrdir.format_registry.make_bzrdir(
1150
 
            'pack-0.92-subtree')
1151
 
 
1152
 
    def check_format(self, t):
1153
 
        self.assertEqualDiff(
1154
 
            "Bazaar pack repository format 1 with subtree support (needs bzr 0.92)\n",
1155
 
            t.get('format').read())
 
1118
        text = empty_repo.texts.get_record_stream(
 
1119
            [('file2-id', 'rev3')], 'topological', True).next()
 
1120
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
1156
1121
 
1157
1122
 
1158
1123
class TestRepositoryPackCollection(TestCaseWithTransport):
1160
1125
    def get_format(self):
1161
1126
        return bzrdir.format_registry.make_bzrdir('pack-0.92')
1162
1127
 
 
1128
    def get_packs(self):
 
1129
        format = self.get_format()
 
1130
        repo = self.make_repository('.', format=format)
 
1131
        return repo._pack_collection
 
1132
 
 
1133
    def make_packs_and_alt_repo(self, write_lock=False):
 
1134
        """Create a pack repo with 3 packs, and access it via a second repo."""
 
1135
        tree = self.make_branch_and_tree('.', format=self.get_format())
 
1136
        tree.lock_write()
 
1137
        self.addCleanup(tree.unlock)
 
1138
        rev1 = tree.commit('one')
 
1139
        rev2 = tree.commit('two')
 
1140
        rev3 = tree.commit('three')
 
1141
        r = repository.Repository.open('.')
 
1142
        if write_lock:
 
1143
            r.lock_write()
 
1144
        else:
 
1145
            r.lock_read()
 
1146
        self.addCleanup(r.unlock)
 
1147
        packs = r._pack_collection
 
1148
        packs.ensure_loaded()
 
1149
        return tree, r, packs, [rev1, rev2, rev3]
 
1150
 
 
1151
    def test__clear_obsolete_packs(self):
 
1152
        packs = self.get_packs()
 
1153
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
 
1154
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1155
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1156
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1157
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1158
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1159
        res = packs._clear_obsolete_packs()
 
1160
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
 
1161
        self.assertEqual([], obsolete_pack_trans.list_dir('.'))
 
1162
 
 
1163
    def test__clear_obsolete_packs_preserve(self):
 
1164
        packs = self.get_packs()
 
1165
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
 
1166
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1167
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1168
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1169
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1170
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1171
        res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
 
1172
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
 
1173
        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
 
1174
                         sorted(obsolete_pack_trans.list_dir('.')))
 
1175
 
1163
1176
    def test__max_pack_count(self):
1164
1177
        """The maximum pack count is a function of the number of revisions."""
1165
 
        format = self.get_format()
1166
 
        repo = self.make_repository('.', format=format)
1167
 
        packs = repo._pack_collection
1168
1178
        # no revisions - one pack, so that we can have a revision free repo
1169
1179
        # without it blowing up
 
1180
        packs = self.get_packs()
1170
1181
        self.assertEqual(1, packs._max_pack_count(0))
1171
1182
        # after that the sum of the digits, - check the first 1-9
1172
1183
        self.assertEqual(1, packs._max_pack_count(1))
1187
1198
        # check some arbitrary big numbers
1188
1199
        self.assertEqual(25, packs._max_pack_count(112894))
1189
1200
 
 
1201
    def test_repr(self):
 
1202
        packs = self.get_packs()
 
1203
        self.assertContainsRe(repr(packs),
 
1204
            'RepositoryPackCollection(.*Repository(.*))')
 
1205
 
 
1206
    def test__obsolete_packs(self):
 
1207
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1208
        names = packs.names()
 
1209
        pack = packs.get_pack_by_name(names[0])
 
1210
        # Schedule this one for removal
 
1211
        packs._remove_pack_from_memory(pack)
 
1212
        # Simulate a concurrent update by renaming the .pack file and one of
 
1213
        # the indices
 
1214
        packs.transport.rename('packs/%s.pack' % (names[0],),
 
1215
                               'obsolete_packs/%s.pack' % (names[0],))
 
1216
        packs.transport.rename('indices/%s.iix' % (names[0],),
 
1217
                               'obsolete_packs/%s.iix' % (names[0],))
 
1218
        # Now trigger the obsoletion, and ensure that all the remaining files
 
1219
        # are still renamed
 
1220
        packs._obsolete_packs([pack])
 
1221
        self.assertEqual([n + '.pack' for n in names[1:]],
 
1222
                         sorted(packs._pack_transport.list_dir('.')))
 
1223
        # names[0] should not be present in the index anymore
 
1224
        self.assertEqual(names[1:],
 
1225
            sorted(set([osutils.splitext(n)[0] for n in
 
1226
                        packs._index_transport.list_dir('.')])))
 
1227
 
1190
1228
    def test_pack_distribution_zero(self):
1191
 
        format = self.get_format()
1192
 
        repo = self.make_repository('.', format=format)
1193
 
        packs = repo._pack_collection
 
1229
        packs = self.get_packs()
1194
1230
        self.assertEqual([0], packs.pack_distribution(0))
1195
 
        
 
1231
 
 
1232
    def test_ensure_loaded_unlocked(self):
 
1233
        packs = self.get_packs()
 
1234
        self.assertRaises(errors.ObjectNotLocked,
 
1235
                          packs.ensure_loaded)
 
1236
 
1196
1237
    def test_pack_distribution_one_to_nine(self):
1197
 
        format = self.get_format()
1198
 
        repo = self.make_repository('.', format=format)
1199
 
        packs = repo._pack_collection
 
1238
        packs = self.get_packs()
1200
1239
        self.assertEqual([1],
1201
1240
            packs.pack_distribution(1))
1202
1241
        self.assertEqual([1, 1],
1218
1257
 
1219
1258
    def test_pack_distribution_stable_at_boundaries(self):
1220
1259
        """When there are multi-rev packs the counts are stable."""
1221
 
        format = self.get_format()
1222
 
        repo = self.make_repository('.', format=format)
1223
 
        packs = repo._pack_collection
 
1260
        packs = self.get_packs()
1224
1261
        # in 10s:
1225
1262
        self.assertEqual([10], packs.pack_distribution(10))
1226
1263
        self.assertEqual([10, 1], packs.pack_distribution(11))
1235
1272
        self.assertEqual([100, 100, 10, 1], packs.pack_distribution(211))
1236
1273
 
1237
1274
    def test_plan_pack_operations_2009_revisions_skip_all_packs(self):
1238
 
        format = self.get_format()
1239
 
        repo = self.make_repository('.', format=format)
1240
 
        packs = repo._pack_collection
 
1275
        packs = self.get_packs()
1241
1276
        existing_packs = [(2000, "big"), (9, "medium")]
1242
1277
        # rev count - 2009 -> 2x1000 + 9x1
1243
1278
        pack_operations = packs.plan_autopack_combinations(
1245
1280
        self.assertEqual([], pack_operations)
1246
1281
 
1247
1282
    def test_plan_pack_operations_2010_revisions_skip_all_packs(self):
1248
 
        format = self.get_format()
1249
 
        repo = self.make_repository('.', format=format)
1250
 
        packs = repo._pack_collection
 
1283
        packs = self.get_packs()
1251
1284
        existing_packs = [(2000, "big"), (9, "medium"), (1, "single")]
1252
1285
        # rev count - 2010 -> 2x1000 + 1x10
1253
1286
        pack_operations = packs.plan_autopack_combinations(
1255
1288
        self.assertEqual([], pack_operations)
1256
1289
 
1257
1290
    def test_plan_pack_operations_2010_combines_smallest_two(self):
1258
 
        format = self.get_format()
1259
 
        repo = self.make_repository('.', format=format)
1260
 
        packs = repo._pack_collection
 
1291
        packs = self.get_packs()
1261
1292
        existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"),
1262
1293
            (1, "single1")]
1263
1294
        # rev count - 2010 -> 2x1000 + 1x10 (3)
1264
1295
        pack_operations = packs.plan_autopack_combinations(
1265
1296
            existing_packs, [1000, 1000, 10])
1266
 
        self.assertEqual([[2, ["single2", "single1"]], [0, []]], pack_operations)
 
1297
        self.assertEqual([[2, ["single2", "single1"]]], pack_operations)
 
1298
 
 
1299
    def test_plan_pack_operations_creates_a_single_op(self):
 
1300
        packs = self.get_packs()
 
1301
        existing_packs = [(50, 'a'), (40, 'b'), (30, 'c'), (10, 'd'),
 
1302
                          (10, 'e'), (6, 'f'), (4, 'g')]
 
1303
        # rev count 150 -> 1x100 and 5x10
 
1304
        # The two size 10 packs do not need to be touched. The 50, 40, 30 would
 
1305
        # be combined into a single 120 size pack, and the 6 & 4 would
 
1306
        # becombined into a size 10 pack. However, if we have to rewrite them,
 
1307
        # we save a pack file with no increased I/O by putting them into the
 
1308
        # same file.
 
1309
        distribution = packs.pack_distribution(150)
 
1310
        pack_operations = packs.plan_autopack_combinations(existing_packs,
 
1311
                                                           distribution)
 
1312
        self.assertEqual([[130, ['a', 'b', 'c', 'f', 'g']]], pack_operations)
1267
1313
 
1268
1314
    def test_all_packs_none(self):
1269
1315
        format = self.get_format()
1307
1353
        tree.lock_read()
1308
1354
        self.addCleanup(tree.unlock)
1309
1355
        packs = tree.branch.repository._pack_collection
 
1356
        packs.reset()
1310
1357
        packs.ensure_loaded()
1311
1358
        name = packs.names()[0]
1312
1359
        pack_1 = packs.get_pack_by_name(name)
1313
1360
        # the pack should be correctly initialised
1314
 
        rev_index = GraphIndex(packs._index_transport, name + '.rix',
1315
 
            packs._names[name][0])
1316
 
        inv_index = GraphIndex(packs._index_transport, name + '.iix',
1317
 
            packs._names[name][1])
1318
 
        txt_index = GraphIndex(packs._index_transport, name + '.tix',
1319
 
            packs._names[name][2])
1320
 
        sig_index = GraphIndex(packs._index_transport, name + '.six',
1321
 
            packs._names[name][3])
 
1361
        sizes = packs._names[name]
 
1362
        rev_index = GraphIndex(packs._index_transport, name + '.rix', sizes[0])
 
1363
        inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1])
 
1364
        txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2])
 
1365
        sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3])
1322
1366
        self.assertEqual(pack_repo.ExistingPack(packs._pack_transport,
1323
1367
            name, rev_index, inv_index, txt_index, sig_index), pack_1)
1324
1368
        # and the same instance should be returned on successive calls.
1325
1369
        self.assertTrue(pack_1 is packs.get_pack_by_name(name))
1326
1370
 
 
1371
    def test_reload_pack_names_new_entry(self):
 
1372
        tree, r, packs, revs = self.make_packs_and_alt_repo()
 
1373
        names = packs.names()
 
1374
        # Add a new pack file into the repository
 
1375
        rev4 = tree.commit('four')
 
1376
        new_names = tree.branch.repository._pack_collection.names()
 
1377
        new_name = set(new_names).difference(names)
 
1378
        self.assertEqual(1, len(new_name))
 
1379
        new_name = new_name.pop()
 
1380
        # The old collection hasn't noticed yet
 
1381
        self.assertEqual(names, packs.names())
 
1382
        self.assertTrue(packs.reload_pack_names())
 
1383
        self.assertEqual(new_names, packs.names())
 
1384
        # And the repository can access the new revision
 
1385
        self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
 
1386
        self.assertFalse(packs.reload_pack_names())
 
1387
 
 
1388
    def test_reload_pack_names_added_and_removed(self):
 
1389
        tree, r, packs, revs = self.make_packs_and_alt_repo()
 
1390
        names = packs.names()
 
1391
        # Now repack the whole thing
 
1392
        tree.branch.repository.pack()
 
1393
        new_names = tree.branch.repository._pack_collection.names()
 
1394
        # The other collection hasn't noticed yet
 
1395
        self.assertEqual(names, packs.names())
 
1396
        self.assertTrue(packs.reload_pack_names())
 
1397
        self.assertEqual(new_names, packs.names())
 
1398
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
 
1399
        self.assertFalse(packs.reload_pack_names())
 
1400
 
 
1401
    def test_reload_pack_names_preserves_pending(self):
 
1402
        # TODO: Update this to also test for pending-deleted names
 
1403
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1404
        # We will add one pack (via start_write_group + insert_record_stream),
 
1405
        # and remove another pack (via _remove_pack_from_memory)
 
1406
        orig_names = packs.names()
 
1407
        orig_at_load = packs._packs_at_load
 
1408
        to_remove_name = iter(orig_names).next()
 
1409
        r.start_write_group()
 
1410
        self.addCleanup(r.abort_write_group)
 
1411
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
 
1412
            ('text', 'rev'), (), None, 'content\n')])
 
1413
        new_pack = packs._new_pack
 
1414
        self.assertTrue(new_pack.data_inserted())
 
1415
        new_pack.finish()
 
1416
        packs.allocate(new_pack)
 
1417
        packs._new_pack = None
 
1418
        removed_pack = packs.get_pack_by_name(to_remove_name)
 
1419
        packs._remove_pack_from_memory(removed_pack)
 
1420
        names = packs.names()
 
1421
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1422
        new_names = set([x[0][0] for x in new_nodes])
 
1423
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1424
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1425
        self.assertEqual(set([new_pack.name]), new_names)
 
1426
        self.assertEqual([to_remove_name],
 
1427
                         sorted([x[0][0] for x in deleted_nodes]))
 
1428
        packs.reload_pack_names()
 
1429
        reloaded_names = packs.names()
 
1430
        self.assertEqual(orig_at_load, packs._packs_at_load)
 
1431
        self.assertEqual(names, reloaded_names)
 
1432
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1433
        new_names = set([x[0][0] for x in new_nodes])
 
1434
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1435
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1436
        self.assertEqual(set([new_pack.name]), new_names)
 
1437
        self.assertEqual([to_remove_name],
 
1438
                         sorted([x[0][0] for x in deleted_nodes]))
 
1439
 
 
1440
    def test_autopack_obsoletes_new_pack(self):
 
1441
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1442
        packs._max_pack_count = lambda x: 1
 
1443
        packs.pack_distribution = lambda x: [10]
 
1444
        r.start_write_group()
 
1445
        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
 
1446
            ('bogus-rev',), (), None, 'bogus-content\n')])
 
1447
        # This should trigger an autopack, which will combine everything into a
 
1448
        # single pack file.
 
1449
        new_names = r.commit_write_group()
 
1450
        names = packs.names()
 
1451
        self.assertEqual(1, len(names))
 
1452
        self.assertEqual([names[0] + '.pack'],
 
1453
                         packs._pack_transport.list_dir('.'))
 
1454
 
 
1455
    def test_autopack_reloads_and_stops(self):
 
1456
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1457
        # After we have determined what needs to be autopacked, trigger a
 
1458
        # full-pack via the other repo which will cause us to re-evaluate and
 
1459
        # decide we don't need to do anything
 
1460
        orig_execute = packs._execute_pack_operations
 
1461
        def _munged_execute_pack_ops(*args, **kwargs):
 
1462
            tree.branch.repository.pack()
 
1463
            return orig_execute(*args, **kwargs)
 
1464
        packs._execute_pack_operations = _munged_execute_pack_ops
 
1465
        packs._max_pack_count = lambda x: 1
 
1466
        packs.pack_distribution = lambda x: [10]
 
1467
        self.assertFalse(packs.autopack())
 
1468
        self.assertEqual(1, len(packs.names()))
 
1469
        self.assertEqual(tree.branch.repository._pack_collection.names(),
 
1470
                         packs.names())
 
1471
 
 
1472
    def test__save_pack_names(self):
 
1473
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1474
        names = packs.names()
 
1475
        pack = packs.get_pack_by_name(names[0])
 
1476
        packs._remove_pack_from_memory(pack)
 
1477
        packs._save_pack_names(obsolete_packs=[pack])
 
1478
        cur_packs = packs._pack_transport.list_dir('.')
 
1479
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1480
        # obsolete_packs will also have stuff like .rix and .iix present.
 
1481
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1482
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1483
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1484
 
 
1485
    def test__save_pack_names_already_obsoleted(self):
 
1486
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1487
        names = packs.names()
 
1488
        pack = packs.get_pack_by_name(names[0])
 
1489
        packs._remove_pack_from_memory(pack)
 
1490
        # We are going to simulate a concurrent autopack by manually obsoleting
 
1491
        # the pack directly.
 
1492
        packs._obsolete_packs([pack])
 
1493
        packs._save_pack_names(clear_obsolete_packs=True,
 
1494
                               obsolete_packs=[pack])
 
1495
        cur_packs = packs._pack_transport.list_dir('.')
 
1496
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1497
        # Note that while we set clear_obsolete_packs=True, it should not
 
1498
        # delete a pack file that we have also scheduled for obsoletion.
 
1499
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1500
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1501
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1502
 
 
1503
 
1327
1504
 
1328
1505
class TestPack(TestCaseWithTransport):
1329
1506
    """Tests for the Pack object."""
1383
1560
        pack_transport = self.get_transport('pack')
1384
1561
        index_transport = self.get_transport('index')
1385
1562
        upload_transport.mkdir('.')
1386
 
        pack = pack_repo.NewPack(upload_transport, index_transport,
1387
 
            pack_transport)
1388
 
        self.assertIsInstance(pack.revision_index, InMemoryGraphIndex)
1389
 
        self.assertIsInstance(pack.inventory_index, InMemoryGraphIndex)
1390
 
        self.assertIsInstance(pack._hash, type(md5.new()))
 
1563
        collection = pack_repo.RepositoryPackCollection(
 
1564
            repo=None,
 
1565
            transport=self.get_transport('.'),
 
1566
            index_transport=index_transport,
 
1567
            upload_transport=upload_transport,
 
1568
            pack_transport=pack_transport,
 
1569
            index_builder_class=BTreeBuilder,
 
1570
            index_class=BTreeGraphIndex,
 
1571
            use_chk_index=False)
 
1572
        pack = pack_repo.NewPack(collection)
 
1573
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
 
1574
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
 
1575
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
 
1576
        self.assertIsInstance(pack._hash, type(osutils.md5()))
1391
1577
        self.assertTrue(pack.upload_transport is upload_transport)
1392
1578
        self.assertTrue(pack.index_transport is index_transport)
1393
1579
        self.assertTrue(pack.pack_transport is pack_transport)
1400
1586
class TestPacker(TestCaseWithTransport):
1401
1587
    """Tests for the packs repository Packer class."""
1402
1588
 
1403
 
    # To date, this class has been factored out and nothing new added to it;
1404
 
    # thus there are not yet any tests.
 
1589
    def test_pack_optimizes_pack_order(self):
 
1590
        builder = self.make_branch_builder('.', format="1.9")
 
1591
        builder.start_series()
 
1592
        builder.build_snapshot('A', None, [
 
1593
            ('add', ('', 'root-id', 'directory', None)),
 
1594
            ('add', ('f', 'f-id', 'file', 'content\n'))])
 
1595
        builder.build_snapshot('B', ['A'],
 
1596
            [('modify', ('f-id', 'new-content\n'))])
 
1597
        builder.build_snapshot('C', ['B'],
 
1598
            [('modify', ('f-id', 'third-content\n'))])
 
1599
        builder.build_snapshot('D', ['C'],
 
1600
            [('modify', ('f-id', 'fourth-content\n'))])
 
1601
        b = builder.get_branch()
 
1602
        b.lock_read()
 
1603
        builder.finish_series()
 
1604
        self.addCleanup(b.unlock)
 
1605
        # At this point, we should have 4 pack files available
 
1606
        # Because of how they were built, they correspond to
 
1607
        # ['D', 'C', 'B', 'A']
 
1608
        packs = b.repository._pack_collection.packs
 
1609
        packer = pack_repo.Packer(b.repository._pack_collection,
 
1610
                                  packs, 'testing',
 
1611
                                  revision_ids=['B', 'C'])
 
1612
        # Now, when we are copying the B & C revisions, their pack files should
 
1613
        # be moved to the front of the stack
 
1614
        # The new ordering moves B & C to the front of the .packs attribute,
 
1615
        # and leaves the others in the original order.
 
1616
        new_packs = [packs[1], packs[2], packs[0], packs[3]]
 
1617
        new_pack = packer.pack()
 
1618
        self.assertEqual(new_packs, packer.packs)
 
1619
 
 
1620
 
 
1621
class TestOptimisingPacker(TestCaseWithTransport):
 
1622
    """Tests for the OptimisingPacker class."""
 
1623
 
 
1624
    def get_pack_collection(self):
 
1625
        repo = self.make_repository('.')
 
1626
        return repo._pack_collection
 
1627
 
 
1628
    def test_open_pack_will_optimise(self):
 
1629
        packer = pack_repo.OptimisingPacker(self.get_pack_collection(),
 
1630
                                            [], '.test')
 
1631
        new_pack = packer.open_pack()
 
1632
        self.addCleanup(new_pack.abort) # ensure cleanup
 
1633
        self.assertIsInstance(new_pack, pack_repo.NewPack)
 
1634
        self.assertTrue(new_pack.revision_index._optimize_for_size)
 
1635
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
 
1636
        self.assertTrue(new_pack.text_index._optimize_for_size)
 
1637
        self.assertTrue(new_pack.signature_index._optimize_for_size)
 
1638
 
 
1639
 
 
1640
class TestCrossFormatPacks(TestCaseWithTransport):
 
1641
 
 
1642
    def log_pack(self, hint=None):
 
1643
        self.calls.append(('pack', hint))
 
1644
        self.orig_pack(hint=hint)
 
1645
        if self.expect_hint:
 
1646
            self.assertTrue(hint)
 
1647
 
 
1648
    def run_stream(self, src_fmt, target_fmt, expect_pack_called):
 
1649
        self.expect_hint = expect_pack_called
 
1650
        self.calls = []
 
1651
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1652
        source_tree.lock_write()
 
1653
        self.addCleanup(source_tree.unlock)
 
1654
        tip = source_tree.commit('foo')
 
1655
        target = self.make_repository('target', format=target_fmt)
 
1656
        target.lock_write()
 
1657
        self.addCleanup(target.unlock)
 
1658
        source = source_tree.branch.repository._get_source(target._format)
 
1659
        self.orig_pack = target.pack
 
1660
        target.pack = self.log_pack
 
1661
        search = target.search_missing_revision_ids(
 
1662
            source_tree.branch.repository, tip)
 
1663
        stream = source.get_stream(search)
 
1664
        from_format = source_tree.branch.repository._format
 
1665
        sink = target._get_sink()
 
1666
        sink.insert_stream(stream, from_format, [])
 
1667
        if expect_pack_called:
 
1668
            self.assertLength(1, self.calls)
 
1669
        else:
 
1670
            self.assertLength(0, self.calls)
 
1671
 
 
1672
    def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
 
1673
        self.expect_hint = expect_pack_called
 
1674
        self.calls = []
 
1675
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1676
        source_tree.lock_write()
 
1677
        self.addCleanup(source_tree.unlock)
 
1678
        tip = source_tree.commit('foo')
 
1679
        target = self.make_repository('target', format=target_fmt)
 
1680
        target.lock_write()
 
1681
        self.addCleanup(target.unlock)
 
1682
        source = source_tree.branch.repository
 
1683
        self.orig_pack = target.pack
 
1684
        target.pack = self.log_pack
 
1685
        target.fetch(source)
 
1686
        if expect_pack_called:
 
1687
            self.assertLength(1, self.calls)
 
1688
        else:
 
1689
            self.assertLength(0, self.calls)
 
1690
 
 
1691
    def test_sink_format_hint_no(self):
 
1692
        # When the target format says packing makes no difference, pack is not
 
1693
        # called.
 
1694
        self.run_stream('1.9', 'rich-root-pack', False)
 
1695
 
 
1696
    def test_sink_format_hint_yes(self):
 
1697
        # When the target format says packing makes a difference, pack is
 
1698
        # called.
 
1699
        self.run_stream('1.9', '2a', True)
 
1700
 
 
1701
    def test_sink_format_same_no(self):
 
1702
        # When the formats are the same, pack is not called.
 
1703
        self.run_stream('2a', '2a', False)
 
1704
 
 
1705
    def test_IDS_format_hint_no(self):
 
1706
        # When the target format says packing makes no difference, pack is not
 
1707
        # called.
 
1708
        self.run_fetch('1.9', 'rich-root-pack', False)
 
1709
 
 
1710
    def test_IDS_format_hint_yes(self):
 
1711
        # When the target format says packing makes a difference, pack is
 
1712
        # called.
 
1713
        self.run_fetch('1.9', '2a', True)
 
1714
 
 
1715
    def test_IDS_format_same_no(self):
 
1716
        # When the formats are the same, pack is not called.
 
1717
        self.run_fetch('2a', '2a', False)