~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: Robert Collins
  • Date: 2006-07-20 13:00:31 UTC
  • mto: (1852.9.1 Tree.compare().)
  • mto: This revision was merged to the branch mainline in revision 1890.
  • Revision ID: robertc@robertcollins.net-20060720130031-d26103a427ea10f3
StartĀ treeĀ implementationĀ tests.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2010 Canonical Ltd
 
1
# Copyright (C) 2006 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
12
12
#
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
16
16
 
17
17
"""Tests for the Repository facility that are not interface tests.
18
18
 
19
 
For interface tests see tests/per_repository/*.py.
 
19
For interface tests see tests/repository_implementations/*.py.
20
20
 
21
21
For concrete class tests see this file, and for storage formats tests
22
22
also see this file.
24
24
 
25
25
from stat import S_ISDIR
26
26
from StringIO import StringIO
27
 
import sys
28
27
 
29
28
import bzrlib
 
29
import bzrlib.bzrdir as bzrdir
 
30
import bzrlib.errors as errors
30
31
from bzrlib.errors import (NotBranchError,
31
32
                           NoSuchFile,
32
33
                           UnknownFormatError,
33
34
                           UnsupportedFormatError,
34
35
                           )
35
 
from bzrlib import (
36
 
    graph,
37
 
    tests,
38
 
    )
39
 
from bzrlib.branchbuilder import BranchBuilder
40
 
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
41
 
from bzrlib.index import GraphIndex, InMemoryGraphIndex
42
 
from bzrlib.repository import RepositoryFormat
43
 
from bzrlib.smart import server
44
 
from bzrlib.tests import (
45
 
    TestCase,
46
 
    TestCaseWithTransport,
47
 
    TestSkipped,
48
 
    test_knit,
49
 
    )
50
 
from bzrlib.transport import (
51
 
    fakenfs,
52
 
    get_transport,
53
 
    )
 
36
import bzrlib.repository as repository
 
37
from bzrlib.tests import TestCase, TestCaseWithTransport
 
38
from bzrlib.transport import get_transport
 
39
from bzrlib.transport.http import HttpServer
54
40
from bzrlib.transport.memory import MemoryServer
55
 
from bzrlib import (
56
 
    bencode,
57
 
    bzrdir,
58
 
    errors,
59
 
    inventory,
60
 
    osutils,
61
 
    progress,
62
 
    repository,
63
 
    revision as _mod_revision,
64
 
    symbol_versioning,
65
 
    upgrade,
66
 
    versionedfile,
67
 
    workingtree,
68
 
    )
69
 
from bzrlib.repofmt import (
70
 
    groupcompress_repo,
71
 
    knitrepo,
72
 
    pack_repo,
73
 
    weaverepo,
74
 
    )
75
41
 
76
42
 
77
43
class TestDefaultFormat(TestCase):
78
44
 
79
45
    def test_get_set_default_format(self):
80
 
        old_default = bzrdir.format_registry.get('default')
81
 
        private_default = old_default().repository_format.__class__
82
46
        old_format = repository.RepositoryFormat.get_default_format()
83
 
        self.assertTrue(isinstance(old_format, private_default))
84
 
        def make_sample_bzrdir():
85
 
            my_bzrdir = bzrdir.BzrDirMetaFormat1()
86
 
            my_bzrdir.repository_format = SampleRepositoryFormat()
87
 
            return my_bzrdir
88
 
        bzrdir.format_registry.remove('default')
89
 
        bzrdir.format_registry.register('sample', make_sample_bzrdir, '')
90
 
        bzrdir.format_registry.set_default('sample')
 
47
        self.assertTrue(isinstance(old_format, repository.RepositoryFormatKnit1))
 
48
        repository.RepositoryFormat.set_default_format(SampleRepositoryFormat())
91
49
        # creating a repository should now create an instrumented dir.
92
50
        try:
93
51
            # the default branch format is used by the meta dir format
96
54
            result = dir.create_repository()
97
55
            self.assertEqual(result, 'A bzr repository dir')
98
56
        finally:
99
 
            bzrdir.format_registry.remove('default')
100
 
            bzrdir.format_registry.remove('sample')
101
 
            bzrdir.format_registry.register('default', old_default, '')
102
 
        self.assertIsInstance(repository.RepositoryFormat.get_default_format(),
103
 
                              old_format.__class__)
 
57
            repository.RepositoryFormat.set_default_format(old_format)
 
58
        self.assertEqual(old_format, repository.RepositoryFormat.get_default_format())
104
59
 
105
60
 
106
61
class SampleRepositoryFormat(repository.RepositoryFormat):
107
62
    """A sample format
108
63
 
109
 
    this format is initializable, unsupported to aid in testing the
 
64
    this format is initializable, unsupported to aid in testing the 
110
65
    open and open(unsupported=True) routines.
111
66
    """
112
67
 
117
72
    def initialize(self, a_bzrdir, shared=False):
118
73
        """Initialize a repository in a BzrDir"""
119
74
        t = a_bzrdir.get_repository_transport(self)
120
 
        t.put_bytes('format', self.get_format_string())
 
75
        t.put('format', StringIO(self.get_format_string()))
121
76
        return 'A bzr repository dir'
122
77
 
123
78
    def is_supported(self):
133
88
    def test_find_format(self):
134
89
        # is the right format object found for a repository?
135
90
        # create a branch with a few known format objects.
136
 
        # this is not quite the same as
 
91
        # this is not quite the same as 
137
92
        self.build_tree(["foo/", "bar/"])
138
93
        def check_format(format, url):
139
94
            dir = format._matchingbzrdir.initialize(url)
141
96
            t = get_transport(url)
142
97
            found_format = repository.RepositoryFormat.find_format(dir)
143
98
            self.failUnless(isinstance(found_format, format.__class__))
144
 
        check_format(weaverepo.RepositoryFormat7(), "bar")
145
 
 
 
99
        check_format(repository.RepositoryFormat7(), "bar")
 
100
        
146
101
    def test_find_format_no_repository(self):
147
102
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
148
103
        self.assertRaises(errors.NoRepositoryPresent,
174
129
 
175
130
class TestFormat6(TestCaseWithTransport):
176
131
 
177
 
    def test_attribute__fetch_order(self):
178
 
        """Weaves need topological data insertion."""
179
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
180
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
181
 
        self.assertEqual('topological', repo._format._fetch_order)
182
 
 
183
 
    def test_attribute__fetch_uses_deltas(self):
184
 
        """Weaves do not reuse deltas."""
185
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
186
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
187
 
        self.assertEqual(False, repo._format._fetch_uses_deltas)
188
 
 
189
 
    def test_attribute__fetch_reconcile(self):
190
 
        """Weave repositories need a reconcile after fetch."""
191
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
192
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
193
 
        self.assertEqual(True, repo._format._fetch_reconcile)
194
 
 
195
132
    def test_no_ancestry_weave(self):
196
133
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
197
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
134
        repo = repository.RepositoryFormat6().initialize(control)
198
135
        # We no longer need to create the ancestry.weave file
199
136
        # since it is *never* used.
200
137
        self.assertRaises(NoSuchFile,
201
138
                          control.transport.get,
202
139
                          'ancestry.weave')
203
140
 
204
 
    def test_supports_external_lookups(self):
205
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
206
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
207
 
        self.assertFalse(repo._format.supports_external_lookups)
208
 
 
209
141
 
210
142
class TestFormat7(TestCaseWithTransport):
211
 
 
212
 
    def test_attribute__fetch_order(self):
213
 
        """Weaves need topological data insertion."""
214
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
215
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
216
 
        self.assertEqual('topological', repo._format._fetch_order)
217
 
 
218
 
    def test_attribute__fetch_uses_deltas(self):
219
 
        """Weaves do not reuse deltas."""
220
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
221
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
222
 
        self.assertEqual(False, repo._format._fetch_uses_deltas)
223
 
 
224
 
    def test_attribute__fetch_reconcile(self):
225
 
        """Weave repositories need a reconcile after fetch."""
226
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
227
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
228
 
        self.assertEqual(True, repo._format._fetch_reconcile)
229
 
 
 
143
    
230
144
    def test_disk_layout(self):
231
145
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
232
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
146
        repo = repository.RepositoryFormat7().initialize(control)
233
147
        # in case of side effects of locking.
234
148
        repo.lock_write()
235
149
        repo.unlock()
248
162
                             'w\n'
249
163
                             'W\n',
250
164
                             t.get('inventory.weave').read())
251
 
        # Creating a file with id Foo:Bar results in a non-escaped file name on
252
 
        # disk.
253
 
        control.create_branch()
254
 
        tree = control.create_workingtree()
255
 
        tree.add(['foo'], ['Foo:Bar'], ['file'])
256
 
        tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
257
 
        try:
258
 
            tree.commit('first post', rev_id='first')
259
 
        except errors.IllegalPath:
260
 
            if sys.platform != 'win32':
261
 
                raise
262
 
            self.knownFailure('Foo:Bar cannot be used as a file-id on windows'
263
 
                              ' in repo format 7')
264
 
            return
265
 
        self.assertEqualDiff(
266
 
            '# bzr weave file v5\n'
267
 
            'i\n'
268
 
            '1 7fe70820e08a1aac0ef224d9c66ab66831cc4ab1\n'
269
 
            'n first\n'
270
 
            '\n'
271
 
            'w\n'
272
 
            '{ 0\n'
273
 
            '. content\n'
274
 
            '}\n'
275
 
            'W\n',
276
 
            t.get('weaves/74/Foo%3ABar.weave').read())
277
165
 
278
166
    def test_shared_disk_layout(self):
279
167
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
280
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
168
        repo = repository.RepositoryFormat7().initialize(control, shared=True)
281
169
        # we want:
282
170
        # format 'Bazaar-NG Repository format 7'
283
171
        # inventory.weave == empty_weave
300
188
    def test_creates_lockdir(self):
301
189
        """Make sure it appears to be controlled by a LockDir existence"""
302
190
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
303
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
191
        repo = repository.RepositoryFormat7().initialize(control, shared=True)
304
192
        t = control.get_repository_transport(None)
305
 
        # TODO: Should check there is a 'lock' toplevel directory,
 
193
        # TODO: Should check there is a 'lock' toplevel directory, 
306
194
        # regardless of contents
307
195
        self.assertFalse(t.has('lock/held/info'))
308
196
        repo.lock_write()
316
204
        """repo format 7 actually locks on lockdir"""
317
205
        base_url = self.get_url()
318
206
        control = bzrdir.BzrDirMetaFormat1().initialize(base_url)
319
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
207
        repo = repository.RepositoryFormat7().initialize(control, shared=True)
320
208
        t = control.get_repository_transport(None)
321
209
        repo.lock_write()
322
210
        repo.unlock()
330
218
 
331
219
    def test_shared_no_tree_disk_layout(self):
332
220
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
333
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
221
        repo = repository.RepositoryFormat7().initialize(control, shared=True)
334
222
        repo.set_make_working_trees(False)
335
223
        # we want:
336
224
        # format 'Bazaar-NG Repository format 7'
354
242
                             'W\n',
355
243
                             t.get('inventory.weave').read())
356
244
 
357
 
    def test_supports_external_lookups(self):
358
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
359
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
360
 
        self.assertFalse(repo._format.supports_external_lookups)
361
 
 
362
245
 
363
246
class TestFormatKnit1(TestCaseWithTransport):
364
 
 
365
 
    def test_attribute__fetch_order(self):
366
 
        """Knits need topological data insertion."""
367
 
        repo = self.make_repository('.',
368
 
                format=bzrdir.format_registry.get('knit')())
369
 
        self.assertEqual('topological', repo._format._fetch_order)
370
 
 
371
 
    def test_attribute__fetch_uses_deltas(self):
372
 
        """Knits reuse deltas."""
373
 
        repo = self.make_repository('.',
374
 
                format=bzrdir.format_registry.get('knit')())
375
 
        self.assertEqual(True, repo._format._fetch_uses_deltas)
376
 
 
 
247
    
377
248
    def test_disk_layout(self):
378
249
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
379
 
        repo = knitrepo.RepositoryFormatKnit1().initialize(control)
 
250
        repo = repository.RepositoryFormatKnit1().initialize(control)
380
251
        # in case of side effects of locking.
381
252
        repo.lock_write()
382
253
        repo.unlock()
393
264
        # self.assertEqualDiff('', t.get('lock').read())
394
265
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
395
266
        self.check_knits(t)
396
 
        # Check per-file knits.
397
 
        branch = control.create_branch()
398
 
        tree = control.create_workingtree()
399
 
        tree.add(['foo'], ['Nasty-IdC:'], ['file'])
400
 
        tree.put_file_bytes_non_atomic('Nasty-IdC:', '')
401
 
        tree.commit('1st post', rev_id='foo')
402
 
        self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
403
 
            '\nfoo fulltext 0 81  :')
404
267
 
405
 
    def assertHasKnit(self, t, knit_name, extra_content=''):
 
268
    def assertHasKnit(self, t, knit_name):
406
269
        """Assert that knit_name exists on t."""
407
 
        self.assertEqualDiff('# bzr knit index 8\n' + extra_content,
 
270
        self.assertEqualDiff('# bzr knit index 8\n',
408
271
                             t.get(knit_name + '.kndx').read())
 
272
        # no default content
 
273
        self.assertTrue(t.has(knit_name + '.knit'))
409
274
 
410
275
    def check_knits(self, t):
411
276
        """check knit content for a repository."""
415
280
 
416
281
    def test_shared_disk_layout(self):
417
282
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
418
 
        repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
 
283
        repo = repository.RepositoryFormatKnit1().initialize(control, shared=True)
419
284
        # we want:
420
285
        # format 'Bazaar-NG Knit Repository Format 1'
421
286
        # lock: is a directory
434
299
 
435
300
    def test_shared_no_tree_disk_layout(self):
436
301
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
437
 
        repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
 
302
        repo = repository.RepositoryFormatKnit1().initialize(control, shared=True)
438
303
        repo.set_make_working_trees(False)
439
304
        # we want:
440
305
        # format 'Bazaar-NG Knit Repository Format 1'
455
320
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
456
321
        self.check_knits(t)
457
322
 
458
 
    def test_deserialise_sets_root_revision(self):
459
 
        """We must have a inventory.root.revision
460
 
 
461
 
        Old versions of the XML5 serializer did not set the revision_id for
462
 
        the whole inventory. So we grab the one from the expected text. Which
463
 
        is valid when the api is not being abused.
464
 
        """
465
 
        repo = self.make_repository('.',
466
 
                format=bzrdir.format_registry.get('knit')())
467
 
        inv_xml = '<inventory format="5">\n</inventory>\n'
468
 
        inv = repo._deserialise_inventory('test-rev-id', inv_xml)
469
 
        self.assertEqual('test-rev-id', inv.root.revision)
470
 
 
471
 
    def test_deserialise_uses_global_revision_id(self):
472
 
        """If it is set, then we re-use the global revision id"""
473
 
        repo = self.make_repository('.',
474
 
                format=bzrdir.format_registry.get('knit')())
475
 
        inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n'
476
 
                   '</inventory>\n')
477
 
        # Arguably, the deserialise_inventory should detect a mismatch, and
478
 
        # raise an error, rather than silently using one revision_id over the
479
 
        # other.
480
 
        self.assertRaises(AssertionError, repo._deserialise_inventory,
481
 
            'test-rev-id', inv_xml)
482
 
        inv = repo._deserialise_inventory('other-rev-id', inv_xml)
483
 
        self.assertEqual('other-rev-id', inv.root.revision)
484
 
 
485
 
    def test_supports_external_lookups(self):
486
 
        repo = self.make_repository('.',
487
 
                format=bzrdir.format_registry.get('knit')())
488
 
        self.assertFalse(repo._format.supports_external_lookups)
489
 
 
490
 
 
491
 
class DummyRepository(object):
492
 
    """A dummy repository for testing."""
493
 
 
494
 
    _format = None
495
 
    _serializer = None
496
 
 
497
 
    def supports_rich_root(self):
498
 
        if self._format is not None:
499
 
            return self._format.rich_root_data
500
 
        return False
501
 
 
502
 
    def get_graph(self):
503
 
        raise NotImplementedError
504
 
 
505
 
    def get_parent_map(self, revision_ids):
506
 
        raise NotImplementedError
507
 
 
508
 
 
509
 
class InterDummy(repository.InterRepository):
510
 
    """An inter-repository optimised code path for DummyRepository.
511
 
 
512
 
    This is for use during testing where we use DummyRepository as repositories
 
323
 
 
324
class InterString(repository.InterRepository):
 
325
    """An inter-repository optimised code path for strings.
 
326
 
 
327
    This is for use during testing where we use strings as repositories
513
328
    so that none of the default regsitered inter-repository classes will
514
 
    MATCH.
 
329
    match.
515
330
    """
516
331
 
517
332
    @staticmethod
518
333
    def is_compatible(repo_source, repo_target):
519
 
        """InterDummy is compatible with DummyRepository."""
520
 
        return (isinstance(repo_source, DummyRepository) and
521
 
            isinstance(repo_target, DummyRepository))
 
334
        """InterString is compatible with strings-as-repos."""
 
335
        return isinstance(repo_source, str) and isinstance(repo_target, str)
522
336
 
523
337
 
524
338
class TestInterRepository(TestCaseWithTransport):
530
344
        # This also tests that the default registered optimised interrepository
531
345
        # classes do not barf inappropriately when a surprising repository type
532
346
        # is handed to them.
533
 
        dummy_a = DummyRepository()
534
 
        dummy_b = DummyRepository()
 
347
        dummy_a = "Repository 1."
 
348
        dummy_b = "Repository 2."
535
349
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
536
350
 
537
351
    def assertGetsDefaultInterRepository(self, repo_a, repo_b):
538
 
        """Asserts that InterRepository.get(repo_a, repo_b) -> the default.
539
 
 
540
 
        The effective default is now InterSameDataRepository because there is
541
 
        no actual sane default in the presence of incompatible data models.
542
 
        """
 
352
        """Asserts that InterRepository.get(repo_a, repo_b) -> the default."""
543
353
        inter_repo = repository.InterRepository.get(repo_a, repo_b)
544
 
        self.assertEqual(repository.InterSameDataRepository,
 
354
        self.assertEqual(repository.InterRepository,
545
355
                         inter_repo.__class__)
546
356
        self.assertEqual(repo_a, inter_repo.source)
547
357
        self.assertEqual(repo_b, inter_repo.target)
552
362
        # and that it is correctly selected when given a repository
553
363
        # pair that it returns true on for the is_compatible static method
554
364
        # check
555
 
        dummy_a = DummyRepository()
556
 
        dummy_a._format = RepositoryFormat()
557
 
        dummy_b = DummyRepository()
558
 
        dummy_b._format = RepositoryFormat()
559
 
        repo = self.make_repository('.')
560
 
        # hack dummies to look like repo somewhat.
561
 
        dummy_a._serializer = repo._serializer
562
 
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
563
 
        dummy_a._format.rich_root_data = repo._format.rich_root_data
564
 
        dummy_b._serializer = repo._serializer
565
 
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
566
 
        dummy_b._format.rich_root_data = repo._format.rich_root_data
567
 
        repository.InterRepository.register_optimiser(InterDummy)
 
365
        dummy_a = "Repository 1."
 
366
        dummy_b = "Repository 2."
 
367
        repository.InterRepository.register_optimiser(InterString)
568
368
        try:
569
 
            # we should get the default for something InterDummy returns False
 
369
            # we should get the default for something InterString returns False
570
370
            # to
571
 
            self.assertFalse(InterDummy.is_compatible(dummy_a, repo))
572
 
            self.assertGetsDefaultInterRepository(dummy_a, repo)
573
 
            # and we should get an InterDummy for a pair it 'likes'
574
 
            self.assertTrue(InterDummy.is_compatible(dummy_a, dummy_b))
 
371
            self.assertFalse(InterString.is_compatible(dummy_a, None))
 
372
            self.assertGetsDefaultInterRepository(dummy_a, None)
 
373
            # and we should get an InterString for a pair it 'likes'
 
374
            self.assertTrue(InterString.is_compatible(dummy_a, dummy_b))
575
375
            inter_repo = repository.InterRepository.get(dummy_a, dummy_b)
576
 
            self.assertEqual(InterDummy, inter_repo.__class__)
 
376
            self.assertEqual(InterString, inter_repo.__class__)
577
377
            self.assertEqual(dummy_a, inter_repo.source)
578
378
            self.assertEqual(dummy_b, inter_repo.target)
579
379
        finally:
580
 
            repository.InterRepository.unregister_optimiser(InterDummy)
 
380
            repository.InterRepository.unregister_optimiser(InterString)
581
381
        # now we should get the default InterRepository object again.
582
382
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
583
383
 
587
387
    def test_is_compatible_and_registered(self):
588
388
        # InterWeaveRepo is compatible when either side
589
389
        # is a format 5/6/7 branch
590
 
        from bzrlib.repofmt import knitrepo, weaverepo
591
 
        formats = [weaverepo.RepositoryFormat5(),
592
 
                   weaverepo.RepositoryFormat6(),
593
 
                   weaverepo.RepositoryFormat7()]
594
 
        incompatible_formats = [weaverepo.RepositoryFormat4(),
595
 
                                knitrepo.RepositoryFormatKnit1(),
 
390
        formats = [repository.RepositoryFormat5(),
 
391
                   repository.RepositoryFormat6(),
 
392
                   repository.RepositoryFormat7()]
 
393
        incompatible_formats = [repository.RepositoryFormat4(),
 
394
                                repository.RepositoryFormatKnit1(),
596
395
                                ]
597
396
        repo_a = self.make_repository('a')
598
397
        repo_b = self.make_repository('b')
619
418
        t = get_transport(self.get_url('.'))
620
419
        t.mkdir('repository')
621
420
        repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
622
 
        repo = weaverepo.RepositoryFormat7().initialize(repo_dir)
623
 
        target_format = knitrepo.RepositoryFormatKnit1()
 
421
        repo = repository.RepositoryFormat7().initialize(repo_dir)
 
422
        target_format = repository.RepositoryFormatKnit1()
624
423
        converter = repository.CopyConverter(target_format)
625
424
        pb = bzrlib.ui.ui_factory.nested_progress_bar()
626
425
        try:
632
431
 
633
432
 
634
433
class TestMisc(TestCase):
635
 
 
 
434
    
636
435
    def test_unescape_xml(self):
637
436
        """We get some kind of error when malformed entities are passed"""
638
 
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;')
639
 
 
640
 
 
641
 
class TestRepositoryFormatKnit3(TestCaseWithTransport):
642
 
 
643
 
    def test_attribute__fetch_order(self):
644
 
        """Knits need topological data insertion."""
645
 
        format = bzrdir.BzrDirMetaFormat1()
646
 
        format.repository_format = knitrepo.RepositoryFormatKnit3()
647
 
        repo = self.make_repository('.', format=format)
648
 
        self.assertEqual('topological', repo._format._fetch_order)
649
 
 
650
 
    def test_attribute__fetch_uses_deltas(self):
651
 
        """Knits reuse deltas."""
652
 
        format = bzrdir.BzrDirMetaFormat1()
653
 
        format.repository_format = knitrepo.RepositoryFormatKnit3()
654
 
        repo = self.make_repository('.', format=format)
655
 
        self.assertEqual(True, repo._format._fetch_uses_deltas)
656
 
 
657
 
    def test_convert(self):
658
 
        """Ensure the upgrade adds weaves for roots"""
659
 
        format = bzrdir.BzrDirMetaFormat1()
660
 
        format.repository_format = knitrepo.RepositoryFormatKnit1()
661
 
        tree = self.make_branch_and_tree('.', format)
662
 
        tree.commit("Dull commit", rev_id="dull")
663
 
        revision_tree = tree.branch.repository.revision_tree('dull')
664
 
        revision_tree.lock_read()
665
 
        try:
666
 
            self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
667
 
                revision_tree.inventory.root.file_id)
668
 
        finally:
669
 
            revision_tree.unlock()
670
 
        format = bzrdir.BzrDirMetaFormat1()
671
 
        format.repository_format = knitrepo.RepositoryFormatKnit3()
672
 
        upgrade.Convert('.', format)
673
 
        tree = workingtree.WorkingTree.open('.')
674
 
        revision_tree = tree.branch.repository.revision_tree('dull')
675
 
        revision_tree.lock_read()
676
 
        try:
677
 
            revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
678
 
        finally:
679
 
            revision_tree.unlock()
680
 
        tree.commit("Another dull commit", rev_id='dull2')
681
 
        revision_tree = tree.branch.repository.revision_tree('dull2')
682
 
        revision_tree.lock_read()
683
 
        self.addCleanup(revision_tree.unlock)
684
 
        self.assertEqual('dull', revision_tree.inventory.root.revision)
685
 
 
686
 
    def test_supports_external_lookups(self):
687
 
        format = bzrdir.BzrDirMetaFormat1()
688
 
        format.repository_format = knitrepo.RepositoryFormatKnit3()
689
 
        repo = self.make_repository('.', format=format)
690
 
        self.assertFalse(repo._format.supports_external_lookups)
691
 
 
692
 
 
693
 
class Test2a(tests.TestCaseWithMemoryTransport):
694
 
 
695
 
    def test_fetch_combines_groups(self):
696
 
        builder = self.make_branch_builder('source', format='2a')
697
 
        builder.start_series()
698
 
        builder.build_snapshot('1', None, [
699
 
            ('add', ('', 'root-id', 'directory', '')),
700
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
701
 
        builder.build_snapshot('2', ['1'], [
702
 
            ('modify', ('file-id', 'content-2\n'))])
703
 
        builder.finish_series()
704
 
        source = builder.get_branch()
705
 
        target = self.make_repository('target', format='2a')
706
 
        target.fetch(source.repository)
707
 
        target.lock_read()
708
 
        self.addCleanup(target.unlock)
709
 
        details = target.texts._index.get_build_details(
710
 
            [('file-id', '1',), ('file-id', '2',)])
711
 
        file_1_details = details[('file-id', '1')]
712
 
        file_2_details = details[('file-id', '2')]
713
 
        # The index, and what to read off disk, should be the same for both
714
 
        # versions of the file.
715
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
716
 
 
717
 
    def test_fetch_combines_groups(self):
718
 
        builder = self.make_branch_builder('source', format='2a')
719
 
        builder.start_series()
720
 
        builder.build_snapshot('1', None, [
721
 
            ('add', ('', 'root-id', 'directory', '')),
722
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
723
 
        builder.build_snapshot('2', ['1'], [
724
 
            ('modify', ('file-id', 'content-2\n'))])
725
 
        builder.finish_series()
726
 
        source = builder.get_branch()
727
 
        target = self.make_repository('target', format='2a')
728
 
        target.fetch(source.repository)
729
 
        target.lock_read()
730
 
        self.addCleanup(target.unlock)
731
 
        details = target.texts._index.get_build_details(
732
 
            [('file-id', '1',), ('file-id', '2',)])
733
 
        file_1_details = details[('file-id', '1')]
734
 
        file_2_details = details[('file-id', '2')]
735
 
        # The index, and what to read off disk, should be the same for both
736
 
        # versions of the file.
737
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
738
 
 
739
 
    def test_fetch_combines_groups(self):
740
 
        builder = self.make_branch_builder('source', format='2a')
741
 
        builder.start_series()
742
 
        builder.build_snapshot('1', None, [
743
 
            ('add', ('', 'root-id', 'directory', '')),
744
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
745
 
        builder.build_snapshot('2', ['1'], [
746
 
            ('modify', ('file-id', 'content-2\n'))])
747
 
        builder.finish_series()
748
 
        source = builder.get_branch()
749
 
        target = self.make_repository('target', format='2a')
750
 
        target.fetch(source.repository)
751
 
        target.lock_read()
752
 
        self.addCleanup(target.unlock)
753
 
        details = target.texts._index.get_build_details(
754
 
            [('file-id', '1',), ('file-id', '2',)])
755
 
        file_1_details = details[('file-id', '1')]
756
 
        file_2_details = details[('file-id', '2')]
757
 
        # The index, and what to read off disk, should be the same for both
758
 
        # versions of the file.
759
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
760
 
 
761
 
    def test_format_pack_compresses_True(self):
762
 
        repo = self.make_repository('repo', format='2a')
763
 
        self.assertTrue(repo._format.pack_compresses)
764
 
 
765
 
    def test_inventories_use_chk_map_with_parent_base_dict(self):
766
 
        tree = self.make_branch_and_memory_tree('repo', format="2a")
767
 
        tree.lock_write()
768
 
        tree.add([''], ['TREE_ROOT'])
769
 
        revid = tree.commit("foo")
770
 
        tree.unlock()
771
 
        tree.lock_read()
772
 
        self.addCleanup(tree.unlock)
773
 
        inv = tree.branch.repository.get_inventory(revid)
774
 
        self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
775
 
        inv.parent_id_basename_to_file_id._ensure_root()
776
 
        inv.id_to_entry._ensure_root()
777
 
        self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
778
 
        self.assertEqual(65536,
779
 
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
780
 
 
781
 
    def test_autopack_unchanged_chk_nodes(self):
782
 
        # at 20 unchanged commits, chk pages are packed that are split into
783
 
        # two groups such that the new pack being made doesn't have all its
784
 
        # pages in the source packs (though they are in the repository).
785
 
        # Use a memory backed repository, we don't need to hit disk for this
786
 
        tree = self.make_branch_and_memory_tree('tree', format='2a')
787
 
        tree.lock_write()
788
 
        self.addCleanup(tree.unlock)
789
 
        tree.add([''], ['TREE_ROOT'])
790
 
        for pos in range(20):
791
 
            tree.commit(str(pos))
792
 
 
793
 
    def test_pack_with_hint(self):
794
 
        tree = self.make_branch_and_memory_tree('tree', format='2a')
795
 
        tree.lock_write()
796
 
        self.addCleanup(tree.unlock)
797
 
        tree.add([''], ['TREE_ROOT'])
798
 
        # 1 commit to leave untouched
799
 
        tree.commit('1')
800
 
        to_keep = tree.branch.repository._pack_collection.names()
801
 
        # 2 to combine
802
 
        tree.commit('2')
803
 
        tree.commit('3')
804
 
        all = tree.branch.repository._pack_collection.names()
805
 
        combine = list(set(all) - set(to_keep))
806
 
        self.assertLength(3, all)
807
 
        self.assertLength(2, combine)
808
 
        tree.branch.repository.pack(hint=combine)
809
 
        final = tree.branch.repository._pack_collection.names()
810
 
        self.assertLength(2, final)
811
 
        self.assertFalse(combine[0] in final)
812
 
        self.assertFalse(combine[1] in final)
813
 
        self.assertSubset(to_keep, final)
814
 
 
815
 
    def test_stream_source_to_gc(self):
816
 
        source = self.make_repository('source', format='2a')
817
 
        target = self.make_repository('target', format='2a')
818
 
        stream = source._get_source(target._format)
819
 
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
820
 
 
821
 
    def test_stream_source_to_non_gc(self):
822
 
        source = self.make_repository('source', format='2a')
823
 
        target = self.make_repository('target', format='rich-root-pack')
824
 
        stream = source._get_source(target._format)
825
 
        # We don't want the child GroupCHKStreamSource
826
 
        self.assertIs(type(stream), repository.StreamSource)
827
 
 
828
 
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
829
 
        source_builder = self.make_branch_builder('source',
830
 
                            format='2a')
831
 
        # We have to build a fairly large tree, so that we are sure the chk
832
 
        # pages will have split into multiple pages.
833
 
        entries = [('add', ('', 'a-root-id', 'directory', None))]
834
 
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
835
 
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
836
 
                fname = i + j
837
 
                fid = fname + '-id'
838
 
                content = 'content for %s\n' % (fname,)
839
 
                entries.append(('add', (fname, fid, 'file', content)))
840
 
        source_builder.start_series()
841
 
        source_builder.build_snapshot('rev-1', None, entries)
842
 
        # Now change a few of them, so we get a few new pages for the second
843
 
        # revision
844
 
        source_builder.build_snapshot('rev-2', ['rev-1'], [
845
 
            ('modify', ('aa-id', 'new content for aa-id\n')),
846
 
            ('modify', ('cc-id', 'new content for cc-id\n')),
847
 
            ('modify', ('zz-id', 'new content for zz-id\n')),
848
 
            ])
849
 
        source_builder.finish_series()
850
 
        source_branch = source_builder.get_branch()
851
 
        source_branch.lock_read()
852
 
        self.addCleanup(source_branch.unlock)
853
 
        target = self.make_repository('target', format='2a')
854
 
        source = source_branch.repository._get_source(target._format)
855
 
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
856
 
 
857
 
        # On a regular pass, getting the inventories and chk pages for rev-2
858
 
        # would only get the newly created chk pages
859
 
        search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
860
 
                                    set(['rev-2']))
861
 
        simple_chk_records = []
862
 
        for vf_name, substream in source.get_stream(search):
863
 
            if vf_name == 'chk_bytes':
864
 
                for record in substream:
865
 
                    simple_chk_records.append(record.key)
866
 
            else:
867
 
                for _ in substream:
868
 
                    continue
869
 
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
870
 
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
871
 
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
872
 
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
873
 
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
874
 
                         simple_chk_records)
875
 
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
876
 
        # we should get a much larger set of pages.
877
 
        missing = [('inventories', 'rev-2')]
878
 
        full_chk_records = []
879
 
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
880
 
            if vf_name == 'inventories':
881
 
                for record in substream:
882
 
                    self.assertEqual(('rev-2',), record.key)
883
 
            elif vf_name == 'chk_bytes':
884
 
                for record in substream:
885
 
                    full_chk_records.append(record.key)
886
 
            else:
887
 
                self.fail('Should not be getting a stream of %s' % (vf_name,))
888
 
        # We have 257 records now. This is because we have 1 root page, and 256
889
 
        # leaf pages in a complete listing.
890
 
        self.assertEqual(257, len(full_chk_records))
891
 
        self.assertSubset(simple_chk_records, full_chk_records)
892
 
 
893
 
    def test_inconsistency_fatal(self):
894
 
        repo = self.make_repository('repo', format='2a')
895
 
        self.assertTrue(repo.revisions._index._inconsistency_fatal)
896
 
        self.assertFalse(repo.texts._index._inconsistency_fatal)
897
 
        self.assertFalse(repo.inventories._index._inconsistency_fatal)
898
 
        self.assertFalse(repo.signatures._index._inconsistency_fatal)
899
 
        self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
900
 
 
901
 
 
902
 
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
903
 
 
904
 
    def test_source_to_exact_pack_092(self):
905
 
        source = self.make_repository('source', format='pack-0.92')
906
 
        target = self.make_repository('target', format='pack-0.92')
907
 
        stream_source = source._get_source(target._format)
908
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
909
 
 
910
 
    def test_source_to_exact_pack_rich_root_pack(self):
911
 
        source = self.make_repository('source', format='rich-root-pack')
912
 
        target = self.make_repository('target', format='rich-root-pack')
913
 
        stream_source = source._get_source(target._format)
914
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
915
 
 
916
 
    def test_source_to_exact_pack_19(self):
917
 
        source = self.make_repository('source', format='1.9')
918
 
        target = self.make_repository('target', format='1.9')
919
 
        stream_source = source._get_source(target._format)
920
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
921
 
 
922
 
    def test_source_to_exact_pack_19_rich_root(self):
923
 
        source = self.make_repository('source', format='1.9-rich-root')
924
 
        target = self.make_repository('target', format='1.9-rich-root')
925
 
        stream_source = source._get_source(target._format)
926
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
927
 
 
928
 
    def test_source_to_remote_exact_pack_19(self):
929
 
        trans = self.make_smart_server('target')
930
 
        trans.ensure_base()
931
 
        source = self.make_repository('source', format='1.9')
932
 
        target = self.make_repository('target', format='1.9')
933
 
        target = repository.Repository.open(trans.base)
934
 
        stream_source = source._get_source(target._format)
935
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
936
 
 
937
 
    def test_stream_source_to_non_exact(self):
938
 
        source = self.make_repository('source', format='pack-0.92')
939
 
        target = self.make_repository('target', format='1.9')
940
 
        stream = source._get_source(target._format)
941
 
        self.assertIs(type(stream), repository.StreamSource)
942
 
 
943
 
    def test_stream_source_to_non_exact_rich_root(self):
944
 
        source = self.make_repository('source', format='1.9')
945
 
        target = self.make_repository('target', format='1.9-rich-root')
946
 
        stream = source._get_source(target._format)
947
 
        self.assertIs(type(stream), repository.StreamSource)
948
 
 
949
 
    def test_source_to_remote_non_exact_pack_19(self):
950
 
        trans = self.make_smart_server('target')
951
 
        trans.ensure_base()
952
 
        source = self.make_repository('source', format='1.9')
953
 
        target = self.make_repository('target', format='1.6')
954
 
        target = repository.Repository.open(trans.base)
955
 
        stream_source = source._get_source(target._format)
956
 
        self.assertIs(type(stream_source), repository.StreamSource)
957
 
 
958
 
    def test_stream_source_to_knit(self):
959
 
        source = self.make_repository('source', format='pack-0.92')
960
 
        target = self.make_repository('target', format='dirstate')
961
 
        stream = source._get_source(target._format)
962
 
        self.assertIs(type(stream), repository.StreamSource)
963
 
 
964
 
 
965
 
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
966
 
    """Tests for _find_parent_ids_of_revisions."""
967
 
 
968
 
    def setUp(self):
969
 
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
970
 
        self.builder = self.make_branch_builder('source',
971
 
            format='development6-rich-root')
972
 
        self.builder.start_series()
973
 
        self.builder.build_snapshot('initial', None,
974
 
            [('add', ('', 'tree-root', 'directory', None))])
975
 
        self.repo = self.builder.get_branch().repository
976
 
        self.addCleanup(self.builder.finish_series)
977
 
 
978
 
    def assertParentIds(self, expected_result, rev_set):
979
 
        self.assertEqual(sorted(expected_result),
980
 
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
981
 
 
982
 
    def test_simple(self):
983
 
        self.builder.build_snapshot('revid1', None, [])
984
 
        self.builder.build_snapshot('revid2', ['revid1'], [])
985
 
        rev_set = ['revid2']
986
 
        self.assertParentIds(['revid1'], rev_set)
987
 
 
988
 
    def test_not_first_parent(self):
989
 
        self.builder.build_snapshot('revid1', None, [])
990
 
        self.builder.build_snapshot('revid2', ['revid1'], [])
991
 
        self.builder.build_snapshot('revid3', ['revid2'], [])
992
 
        rev_set = ['revid3', 'revid2']
993
 
        self.assertParentIds(['revid1'], rev_set)
994
 
 
995
 
    def test_not_null(self):
996
 
        rev_set = ['initial']
997
 
        self.assertParentIds([], rev_set)
998
 
 
999
 
    def test_not_null_set(self):
1000
 
        self.builder.build_snapshot('revid1', None, [])
1001
 
        rev_set = [_mod_revision.NULL_REVISION]
1002
 
        self.assertParentIds([], rev_set)
1003
 
 
1004
 
    def test_ghost(self):
1005
 
        self.builder.build_snapshot('revid1', None, [])
1006
 
        rev_set = ['ghost', 'revid1']
1007
 
        self.assertParentIds(['initial'], rev_set)
1008
 
 
1009
 
    def test_ghost_parent(self):
1010
 
        self.builder.build_snapshot('revid1', None, [])
1011
 
        self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
1012
 
        rev_set = ['revid2', 'revid1']
1013
 
        self.assertParentIds(['ghost', 'initial'], rev_set)
1014
 
 
1015
 
    def test_righthand_parent(self):
1016
 
        self.builder.build_snapshot('revid1', None, [])
1017
 
        self.builder.build_snapshot('revid2a', ['revid1'], [])
1018
 
        self.builder.build_snapshot('revid2b', ['revid1'], [])
1019
 
        self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
1020
 
        rev_set = ['revid3', 'revid2a']
1021
 
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
1022
 
 
1023
 
 
1024
 
class TestWithBrokenRepo(TestCaseWithTransport):
1025
 
    """These tests seem to be more appropriate as interface tests?"""
1026
 
 
1027
 
    def make_broken_repository(self):
1028
 
        # XXX: This function is borrowed from Aaron's "Reconcile can fix bad
1029
 
        # parent references" branch which is due to land in bzr.dev soon.  Once
1030
 
        # it does, this duplication should be removed.
1031
 
        repo = self.make_repository('broken-repo')
1032
 
        cleanups = []
1033
 
        try:
1034
 
            repo.lock_write()
1035
 
            cleanups.append(repo.unlock)
1036
 
            repo.start_write_group()
1037
 
            cleanups.append(repo.commit_write_group)
1038
 
            # make rev1a: A well-formed revision, containing 'file1'
1039
 
            inv = inventory.Inventory(revision_id='rev1a')
1040
 
            inv.root.revision = 'rev1a'
1041
 
            self.add_file(repo, inv, 'file1', 'rev1a', [])
1042
 
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
1043
 
            repo.add_inventory('rev1a', inv, [])
1044
 
            revision = _mod_revision.Revision('rev1a',
1045
 
                committer='jrandom@example.com', timestamp=0,
1046
 
                inventory_sha1='', timezone=0, message='foo', parent_ids=[])
1047
 
            repo.add_revision('rev1a',revision, inv)
1048
 
 
1049
 
            # make rev1b, which has no Revision, but has an Inventory, and
1050
 
            # file1
1051
 
            inv = inventory.Inventory(revision_id='rev1b')
1052
 
            inv.root.revision = 'rev1b'
1053
 
            self.add_file(repo, inv, 'file1', 'rev1b', [])
1054
 
            repo.add_inventory('rev1b', inv, [])
1055
 
 
1056
 
            # make rev2, with file1 and file2
1057
 
            # file2 is sane
1058
 
            # file1 has 'rev1b' as an ancestor, even though this is not
1059
 
            # mentioned by 'rev1a', making it an unreferenced ancestor
1060
 
            inv = inventory.Inventory()
1061
 
            self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b'])
1062
 
            self.add_file(repo, inv, 'file2', 'rev2', [])
1063
 
            self.add_revision(repo, 'rev2', inv, ['rev1a'])
1064
 
 
1065
 
            # make ghost revision rev1c
1066
 
            inv = inventory.Inventory()
1067
 
            self.add_file(repo, inv, 'file2', 'rev1c', [])
1068
 
 
1069
 
            # make rev3 with file2
1070
 
            # file2 refers to 'rev1c', which is a ghost in this repository, so
1071
 
            # file2 cannot have rev1c as its ancestor.
1072
 
            inv = inventory.Inventory()
1073
 
            self.add_file(repo, inv, 'file2', 'rev3', ['rev1c'])
1074
 
            self.add_revision(repo, 'rev3', inv, ['rev1c'])
1075
 
            return repo
1076
 
        finally:
1077
 
            for cleanup in reversed(cleanups):
1078
 
                cleanup()
1079
 
 
1080
 
    def add_revision(self, repo, revision_id, inv, parent_ids):
1081
 
        inv.revision_id = revision_id
1082
 
        inv.root.revision = revision_id
1083
 
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
1084
 
        repo.add_inventory(revision_id, inv, parent_ids)
1085
 
        revision = _mod_revision.Revision(revision_id,
1086
 
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
1087
 
            timezone=0, message='foo', parent_ids=parent_ids)
1088
 
        repo.add_revision(revision_id,revision, inv)
1089
 
 
1090
 
    def add_file(self, repo, inv, filename, revision, parents):
1091
 
        file_id = filename + '-id'
1092
 
        entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT')
1093
 
        entry.revision = revision
1094
 
        entry.text_size = 0
1095
 
        inv.add(entry)
1096
 
        text_key = (file_id, revision)
1097
 
        parent_keys = [(file_id, parent) for parent in parents]
1098
 
        repo.texts.add_lines(text_key, parent_keys, ['line\n'])
1099
 
 
1100
 
    def test_insert_from_broken_repo(self):
1101
 
        """Inserting a data stream from a broken repository won't silently
1102
 
        corrupt the target repository.
1103
 
        """
1104
 
        broken_repo = self.make_broken_repository()
1105
 
        empty_repo = self.make_repository('empty-repo')
1106
 
        try:
1107
 
            empty_repo.fetch(broken_repo)
1108
 
        except (errors.RevisionNotPresent, errors.BzrCheckError):
1109
 
            # Test successful: compression parent not being copied leads to
1110
 
            # error.
1111
 
            return
1112
 
        empty_repo.lock_read()
1113
 
        self.addCleanup(empty_repo.unlock)
1114
 
        text = empty_repo.texts.get_record_stream(
1115
 
            [('file2-id', 'rev3')], 'topological', True).next()
1116
 
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
1117
 
 
1118
 
 
1119
 
class TestRepositoryPackCollection(TestCaseWithTransport):
1120
 
 
1121
 
    def get_format(self):
1122
 
        return bzrdir.format_registry.make_bzrdir('pack-0.92')
1123
 
 
1124
 
    def get_packs(self):
1125
 
        format = self.get_format()
1126
 
        repo = self.make_repository('.', format=format)
1127
 
        return repo._pack_collection
1128
 
 
1129
 
    def make_packs_and_alt_repo(self, write_lock=False):
1130
 
        """Create a pack repo with 3 packs, and access it via a second repo."""
1131
 
        tree = self.make_branch_and_tree('.', format=self.get_format())
1132
 
        tree.lock_write()
1133
 
        self.addCleanup(tree.unlock)
1134
 
        rev1 = tree.commit('one')
1135
 
        rev2 = tree.commit('two')
1136
 
        rev3 = tree.commit('three')
1137
 
        r = repository.Repository.open('.')
1138
 
        if write_lock:
1139
 
            r.lock_write()
1140
 
        else:
1141
 
            r.lock_read()
1142
 
        self.addCleanup(r.unlock)
1143
 
        packs = r._pack_collection
1144
 
        packs.ensure_loaded()
1145
 
        return tree, r, packs, [rev1, rev2, rev3]
1146
 
 
1147
 
    def test__clear_obsolete_packs(self):
1148
 
        packs = self.get_packs()
1149
 
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1150
 
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1151
 
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1152
 
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1153
 
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1154
 
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1155
 
        res = packs._clear_obsolete_packs()
1156
 
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1157
 
        self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1158
 
 
1159
 
    def test__clear_obsolete_packs_preserve(self):
1160
 
        packs = self.get_packs()
1161
 
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1162
 
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1163
 
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1164
 
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1165
 
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1166
 
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1167
 
        res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
1168
 
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1169
 
        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1170
 
                         sorted(obsolete_pack_trans.list_dir('.')))
1171
 
 
1172
 
    def test__max_pack_count(self):
1173
 
        """The maximum pack count is a function of the number of revisions."""
1174
 
        # no revisions - one pack, so that we can have a revision free repo
1175
 
        # without it blowing up
1176
 
        packs = self.get_packs()
1177
 
        self.assertEqual(1, packs._max_pack_count(0))
1178
 
        # after that the sum of the digits, - check the first 1-9
1179
 
        self.assertEqual(1, packs._max_pack_count(1))
1180
 
        self.assertEqual(2, packs._max_pack_count(2))
1181
 
        self.assertEqual(3, packs._max_pack_count(3))
1182
 
        self.assertEqual(4, packs._max_pack_count(4))
1183
 
        self.assertEqual(5, packs._max_pack_count(5))
1184
 
        self.assertEqual(6, packs._max_pack_count(6))
1185
 
        self.assertEqual(7, packs._max_pack_count(7))
1186
 
        self.assertEqual(8, packs._max_pack_count(8))
1187
 
        self.assertEqual(9, packs._max_pack_count(9))
1188
 
        # check the boundary cases with two digits for the next decade
1189
 
        self.assertEqual(1, packs._max_pack_count(10))
1190
 
        self.assertEqual(2, packs._max_pack_count(11))
1191
 
        self.assertEqual(10, packs._max_pack_count(19))
1192
 
        self.assertEqual(2, packs._max_pack_count(20))
1193
 
        self.assertEqual(3, packs._max_pack_count(21))
1194
 
        # check some arbitrary big numbers
1195
 
        self.assertEqual(25, packs._max_pack_count(112894))
1196
 
 
1197
 
    def test_repr(self):
1198
 
        packs = self.get_packs()
1199
 
        self.assertContainsRe(repr(packs),
1200
 
            'RepositoryPackCollection(.*Repository(.*))')
1201
 
 
1202
 
    def test__obsolete_packs(self):
1203
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1204
 
        names = packs.names()
1205
 
        pack = packs.get_pack_by_name(names[0])
1206
 
        # Schedule this one for removal
1207
 
        packs._remove_pack_from_memory(pack)
1208
 
        # Simulate a concurrent update by renaming the .pack file and one of
1209
 
        # the indices
1210
 
        packs.transport.rename('packs/%s.pack' % (names[0],),
1211
 
                               'obsolete_packs/%s.pack' % (names[0],))
1212
 
        packs.transport.rename('indices/%s.iix' % (names[0],),
1213
 
                               'obsolete_packs/%s.iix' % (names[0],))
1214
 
        # Now trigger the obsoletion, and ensure that all the remaining files
1215
 
        # are still renamed
1216
 
        packs._obsolete_packs([pack])
1217
 
        self.assertEqual([n + '.pack' for n in names[1:]],
1218
 
                         sorted(packs._pack_transport.list_dir('.')))
1219
 
        # names[0] should not be present in the index anymore
1220
 
        self.assertEqual(names[1:],
1221
 
            sorted(set([osutils.splitext(n)[0] for n in
1222
 
                        packs._index_transport.list_dir('.')])))
1223
 
 
1224
 
    def test_pack_distribution_zero(self):
1225
 
        packs = self.get_packs()
1226
 
        self.assertEqual([0], packs.pack_distribution(0))
1227
 
 
1228
 
    def test_ensure_loaded_unlocked(self):
1229
 
        packs = self.get_packs()
1230
 
        self.assertRaises(errors.ObjectNotLocked,
1231
 
                          packs.ensure_loaded)
1232
 
 
1233
 
    def test_pack_distribution_one_to_nine(self):
1234
 
        packs = self.get_packs()
1235
 
        self.assertEqual([1],
1236
 
            packs.pack_distribution(1))
1237
 
        self.assertEqual([1, 1],
1238
 
            packs.pack_distribution(2))
1239
 
        self.assertEqual([1, 1, 1],
1240
 
            packs.pack_distribution(3))
1241
 
        self.assertEqual([1, 1, 1, 1],
1242
 
            packs.pack_distribution(4))
1243
 
        self.assertEqual([1, 1, 1, 1, 1],
1244
 
            packs.pack_distribution(5))
1245
 
        self.assertEqual([1, 1, 1, 1, 1, 1],
1246
 
            packs.pack_distribution(6))
1247
 
        self.assertEqual([1, 1, 1, 1, 1, 1, 1],
1248
 
            packs.pack_distribution(7))
1249
 
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1],
1250
 
            packs.pack_distribution(8))
1251
 
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1],
1252
 
            packs.pack_distribution(9))
1253
 
 
1254
 
    def test_pack_distribution_stable_at_boundaries(self):
1255
 
        """When there are multi-rev packs the counts are stable."""
1256
 
        packs = self.get_packs()
1257
 
        # in 10s:
1258
 
        self.assertEqual([10], packs.pack_distribution(10))
1259
 
        self.assertEqual([10, 1], packs.pack_distribution(11))
1260
 
        self.assertEqual([10, 10], packs.pack_distribution(20))
1261
 
        self.assertEqual([10, 10, 1], packs.pack_distribution(21))
1262
 
        # 100s
1263
 
        self.assertEqual([100], packs.pack_distribution(100))
1264
 
        self.assertEqual([100, 1], packs.pack_distribution(101))
1265
 
        self.assertEqual([100, 10, 1], packs.pack_distribution(111))
1266
 
        self.assertEqual([100, 100], packs.pack_distribution(200))
1267
 
        self.assertEqual([100, 100, 1], packs.pack_distribution(201))
1268
 
        self.assertEqual([100, 100, 10, 1], packs.pack_distribution(211))
1269
 
 
1270
 
    def test_plan_pack_operations_2009_revisions_skip_all_packs(self):
1271
 
        packs = self.get_packs()
1272
 
        existing_packs = [(2000, "big"), (9, "medium")]
1273
 
        # rev count - 2009 -> 2x1000 + 9x1
1274
 
        pack_operations = packs.plan_autopack_combinations(
1275
 
            existing_packs, [1000, 1000, 1, 1, 1, 1, 1, 1, 1, 1, 1])
1276
 
        self.assertEqual([], pack_operations)
1277
 
 
1278
 
    def test_plan_pack_operations_2010_revisions_skip_all_packs(self):
1279
 
        packs = self.get_packs()
1280
 
        existing_packs = [(2000, "big"), (9, "medium"), (1, "single")]
1281
 
        # rev count - 2010 -> 2x1000 + 1x10
1282
 
        pack_operations = packs.plan_autopack_combinations(
1283
 
            existing_packs, [1000, 1000, 10])
1284
 
        self.assertEqual([], pack_operations)
1285
 
 
1286
 
    def test_plan_pack_operations_2010_combines_smallest_two(self):
1287
 
        packs = self.get_packs()
1288
 
        existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"),
1289
 
            (1, "single1")]
1290
 
        # rev count - 2010 -> 2x1000 + 1x10 (3)
1291
 
        pack_operations = packs.plan_autopack_combinations(
1292
 
            existing_packs, [1000, 1000, 10])
1293
 
        self.assertEqual([[2, ["single2", "single1"]]], pack_operations)
1294
 
 
1295
 
    def test_plan_pack_operations_creates_a_single_op(self):
1296
 
        packs = self.get_packs()
1297
 
        existing_packs = [(50, 'a'), (40, 'b'), (30, 'c'), (10, 'd'),
1298
 
                          (10, 'e'), (6, 'f'), (4, 'g')]
1299
 
        # rev count 150 -> 1x100 and 5x10
1300
 
        # The two size 10 packs do not need to be touched. The 50, 40, 30 would
1301
 
        # be combined into a single 120 size pack, and the 6 & 4 would
1302
 
        # becombined into a size 10 pack. However, if we have to rewrite them,
1303
 
        # we save a pack file with no increased I/O by putting them into the
1304
 
        # same file.
1305
 
        distribution = packs.pack_distribution(150)
1306
 
        pack_operations = packs.plan_autopack_combinations(existing_packs,
1307
 
                                                           distribution)
1308
 
        self.assertEqual([[130, ['a', 'b', 'c', 'f', 'g']]], pack_operations)
1309
 
 
1310
 
    def test_all_packs_none(self):
1311
 
        format = self.get_format()
1312
 
        tree = self.make_branch_and_tree('.', format=format)
1313
 
        tree.lock_read()
1314
 
        self.addCleanup(tree.unlock)
1315
 
        packs = tree.branch.repository._pack_collection
1316
 
        packs.ensure_loaded()
1317
 
        self.assertEqual([], packs.all_packs())
1318
 
 
1319
 
    def test_all_packs_one(self):
1320
 
        format = self.get_format()
1321
 
        tree = self.make_branch_and_tree('.', format=format)
1322
 
        tree.commit('start')
1323
 
        tree.lock_read()
1324
 
        self.addCleanup(tree.unlock)
1325
 
        packs = tree.branch.repository._pack_collection
1326
 
        packs.ensure_loaded()
1327
 
        self.assertEqual([
1328
 
            packs.get_pack_by_name(packs.names()[0])],
1329
 
            packs.all_packs())
1330
 
 
1331
 
    def test_all_packs_two(self):
1332
 
        format = self.get_format()
1333
 
        tree = self.make_branch_and_tree('.', format=format)
1334
 
        tree.commit('start')
1335
 
        tree.commit('continue')
1336
 
        tree.lock_read()
1337
 
        self.addCleanup(tree.unlock)
1338
 
        packs = tree.branch.repository._pack_collection
1339
 
        packs.ensure_loaded()
1340
 
        self.assertEqual([
1341
 
            packs.get_pack_by_name(packs.names()[0]),
1342
 
            packs.get_pack_by_name(packs.names()[1]),
1343
 
            ], packs.all_packs())
1344
 
 
1345
 
    def test_get_pack_by_name(self):
1346
 
        format = self.get_format()
1347
 
        tree = self.make_branch_and_tree('.', format=format)
1348
 
        tree.commit('start')
1349
 
        tree.lock_read()
1350
 
        self.addCleanup(tree.unlock)
1351
 
        packs = tree.branch.repository._pack_collection
1352
 
        packs.reset()
1353
 
        packs.ensure_loaded()
1354
 
        name = packs.names()[0]
1355
 
        pack_1 = packs.get_pack_by_name(name)
1356
 
        # the pack should be correctly initialised
1357
 
        sizes = packs._names[name]
1358
 
        rev_index = GraphIndex(packs._index_transport, name + '.rix', sizes[0])
1359
 
        inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1])
1360
 
        txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2])
1361
 
        sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3])
1362
 
        self.assertEqual(pack_repo.ExistingPack(packs._pack_transport,
1363
 
            name, rev_index, inv_index, txt_index, sig_index), pack_1)
1364
 
        # and the same instance should be returned on successive calls.
1365
 
        self.assertTrue(pack_1 is packs.get_pack_by_name(name))
1366
 
 
1367
 
    def test_reload_pack_names_new_entry(self):
1368
 
        tree, r, packs, revs = self.make_packs_and_alt_repo()
1369
 
        names = packs.names()
1370
 
        # Add a new pack file into the repository
1371
 
        rev4 = tree.commit('four')
1372
 
        new_names = tree.branch.repository._pack_collection.names()
1373
 
        new_name = set(new_names).difference(names)
1374
 
        self.assertEqual(1, len(new_name))
1375
 
        new_name = new_name.pop()
1376
 
        # The old collection hasn't noticed yet
1377
 
        self.assertEqual(names, packs.names())
1378
 
        self.assertTrue(packs.reload_pack_names())
1379
 
        self.assertEqual(new_names, packs.names())
1380
 
        # And the repository can access the new revision
1381
 
        self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
1382
 
        self.assertFalse(packs.reload_pack_names())
1383
 
 
1384
 
    def test_reload_pack_names_added_and_removed(self):
1385
 
        tree, r, packs, revs = self.make_packs_and_alt_repo()
1386
 
        names = packs.names()
1387
 
        # Now repack the whole thing
1388
 
        tree.branch.repository.pack()
1389
 
        new_names = tree.branch.repository._pack_collection.names()
1390
 
        # The other collection hasn't noticed yet
1391
 
        self.assertEqual(names, packs.names())
1392
 
        self.assertTrue(packs.reload_pack_names())
1393
 
        self.assertEqual(new_names, packs.names())
1394
 
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1395
 
        self.assertFalse(packs.reload_pack_names())
1396
 
 
1397
 
    def test_reload_pack_names_preserves_pending(self):
1398
 
        # TODO: Update this to also test for pending-deleted names
1399
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1400
 
        # We will add one pack (via start_write_group + insert_record_stream),
1401
 
        # and remove another pack (via _remove_pack_from_memory)
1402
 
        orig_names = packs.names()
1403
 
        orig_at_load = packs._packs_at_load
1404
 
        to_remove_name = iter(orig_names).next()
1405
 
        r.start_write_group()
1406
 
        self.addCleanup(r.abort_write_group)
1407
 
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1408
 
            ('text', 'rev'), (), None, 'content\n')])
1409
 
        new_pack = packs._new_pack
1410
 
        self.assertTrue(new_pack.data_inserted())
1411
 
        new_pack.finish()
1412
 
        packs.allocate(new_pack)
1413
 
        packs._new_pack = None
1414
 
        removed_pack = packs.get_pack_by_name(to_remove_name)
1415
 
        packs._remove_pack_from_memory(removed_pack)
1416
 
        names = packs.names()
1417
 
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1418
 
        new_names = set([x[0][0] for x in new_nodes])
1419
 
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1420
 
        self.assertEqual(set(names) - set(orig_names), new_names)
1421
 
        self.assertEqual(set([new_pack.name]), new_names)
1422
 
        self.assertEqual([to_remove_name],
1423
 
                         sorted([x[0][0] for x in deleted_nodes]))
1424
 
        packs.reload_pack_names()
1425
 
        reloaded_names = packs.names()
1426
 
        self.assertEqual(orig_at_load, packs._packs_at_load)
1427
 
        self.assertEqual(names, reloaded_names)
1428
 
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1429
 
        new_names = set([x[0][0] for x in new_nodes])
1430
 
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1431
 
        self.assertEqual(set(names) - set(orig_names), new_names)
1432
 
        self.assertEqual(set([new_pack.name]), new_names)
1433
 
        self.assertEqual([to_remove_name],
1434
 
                         sorted([x[0][0] for x in deleted_nodes]))
1435
 
 
1436
 
    def test_autopack_obsoletes_new_pack(self):
1437
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1438
 
        packs._max_pack_count = lambda x: 1
1439
 
        packs.pack_distribution = lambda x: [10]
1440
 
        r.start_write_group()
1441
 
        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1442
 
            ('bogus-rev',), (), None, 'bogus-content\n')])
1443
 
        # This should trigger an autopack, which will combine everything into a
1444
 
        # single pack file.
1445
 
        new_names = r.commit_write_group()
1446
 
        names = packs.names()
1447
 
        self.assertEqual(1, len(names))
1448
 
        self.assertEqual([names[0] + '.pack'],
1449
 
                         packs._pack_transport.list_dir('.'))
1450
 
 
1451
 
    def test_autopack_reloads_and_stops(self):
1452
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1453
 
        # After we have determined what needs to be autopacked, trigger a
1454
 
        # full-pack via the other repo which will cause us to re-evaluate and
1455
 
        # decide we don't need to do anything
1456
 
        orig_execute = packs._execute_pack_operations
1457
 
        def _munged_execute_pack_ops(*args, **kwargs):
1458
 
            tree.branch.repository.pack()
1459
 
            return orig_execute(*args, **kwargs)
1460
 
        packs._execute_pack_operations = _munged_execute_pack_ops
1461
 
        packs._max_pack_count = lambda x: 1
1462
 
        packs.pack_distribution = lambda x: [10]
1463
 
        self.assertFalse(packs.autopack())
1464
 
        self.assertEqual(1, len(packs.names()))
1465
 
        self.assertEqual(tree.branch.repository._pack_collection.names(),
1466
 
                         packs.names())
1467
 
 
1468
 
    def test__save_pack_names(self):
1469
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1470
 
        names = packs.names()
1471
 
        pack = packs.get_pack_by_name(names[0])
1472
 
        packs._remove_pack_from_memory(pack)
1473
 
        packs._save_pack_names(obsolete_packs=[pack])
1474
 
        cur_packs = packs._pack_transport.list_dir('.')
1475
 
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1476
 
        # obsolete_packs will also have stuff like .rix and .iix present.
1477
 
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
1478
 
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1479
 
        self.assertEqual([pack.name], sorted(obsolete_names))
1480
 
 
1481
 
    def test__save_pack_names_already_obsoleted(self):
1482
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1483
 
        names = packs.names()
1484
 
        pack = packs.get_pack_by_name(names[0])
1485
 
        packs._remove_pack_from_memory(pack)
1486
 
        # We are going to simulate a concurrent autopack by manually obsoleting
1487
 
        # the pack directly.
1488
 
        packs._obsolete_packs([pack])
1489
 
        packs._save_pack_names(clear_obsolete_packs=True,
1490
 
                               obsolete_packs=[pack])
1491
 
        cur_packs = packs._pack_transport.list_dir('.')
1492
 
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1493
 
        # Note that while we set clear_obsolete_packs=True, it should not
1494
 
        # delete a pack file that we have also scheduled for obsoletion.
1495
 
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
1496
 
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1497
 
        self.assertEqual([pack.name], sorted(obsolete_names))
1498
 
 
1499
 
 
1500
 
 
1501
 
class TestPack(TestCaseWithTransport):
1502
 
    """Tests for the Pack object."""
1503
 
 
1504
 
    def assertCurrentlyEqual(self, left, right):
1505
 
        self.assertTrue(left == right)
1506
 
        self.assertTrue(right == left)
1507
 
        self.assertFalse(left != right)
1508
 
        self.assertFalse(right != left)
1509
 
 
1510
 
    def assertCurrentlyNotEqual(self, left, right):
1511
 
        self.assertFalse(left == right)
1512
 
        self.assertFalse(right == left)
1513
 
        self.assertTrue(left != right)
1514
 
        self.assertTrue(right != left)
1515
 
 
1516
 
    def test___eq____ne__(self):
1517
 
        left = pack_repo.ExistingPack('', '', '', '', '', '')
1518
 
        right = pack_repo.ExistingPack('', '', '', '', '', '')
1519
 
        self.assertCurrentlyEqual(left, right)
1520
 
        # change all attributes and ensure equality changes as we do.
1521
 
        left.revision_index = 'a'
1522
 
        self.assertCurrentlyNotEqual(left, right)
1523
 
        right.revision_index = 'a'
1524
 
        self.assertCurrentlyEqual(left, right)
1525
 
        left.inventory_index = 'a'
1526
 
        self.assertCurrentlyNotEqual(left, right)
1527
 
        right.inventory_index = 'a'
1528
 
        self.assertCurrentlyEqual(left, right)
1529
 
        left.text_index = 'a'
1530
 
        self.assertCurrentlyNotEqual(left, right)
1531
 
        right.text_index = 'a'
1532
 
        self.assertCurrentlyEqual(left, right)
1533
 
        left.signature_index = 'a'
1534
 
        self.assertCurrentlyNotEqual(left, right)
1535
 
        right.signature_index = 'a'
1536
 
        self.assertCurrentlyEqual(left, right)
1537
 
        left.name = 'a'
1538
 
        self.assertCurrentlyNotEqual(left, right)
1539
 
        right.name = 'a'
1540
 
        self.assertCurrentlyEqual(left, right)
1541
 
        left.transport = 'a'
1542
 
        self.assertCurrentlyNotEqual(left, right)
1543
 
        right.transport = 'a'
1544
 
        self.assertCurrentlyEqual(left, right)
1545
 
 
1546
 
    def test_file_name(self):
1547
 
        pack = pack_repo.ExistingPack('', 'a_name', '', '', '', '')
1548
 
        self.assertEqual('a_name.pack', pack.file_name())
1549
 
 
1550
 
 
1551
 
class TestNewPack(TestCaseWithTransport):
1552
 
    """Tests for pack_repo.NewPack."""
1553
 
 
1554
 
    def test_new_instance_attributes(self):
1555
 
        upload_transport = self.get_transport('upload')
1556
 
        pack_transport = self.get_transport('pack')
1557
 
        index_transport = self.get_transport('index')
1558
 
        upload_transport.mkdir('.')
1559
 
        collection = pack_repo.RepositoryPackCollection(
1560
 
            repo=None,
1561
 
            transport=self.get_transport('.'),
1562
 
            index_transport=index_transport,
1563
 
            upload_transport=upload_transport,
1564
 
            pack_transport=pack_transport,
1565
 
            index_builder_class=BTreeBuilder,
1566
 
            index_class=BTreeGraphIndex,
1567
 
            use_chk_index=False)
1568
 
        pack = pack_repo.NewPack(collection)
1569
 
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
1570
 
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
1571
 
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1572
 
        self.assertIsInstance(pack._hash, type(osutils.md5()))
1573
 
        self.assertTrue(pack.upload_transport is upload_transport)
1574
 
        self.assertTrue(pack.index_transport is index_transport)
1575
 
        self.assertTrue(pack.pack_transport is pack_transport)
1576
 
        self.assertEqual(None, pack.index_sizes)
1577
 
        self.assertEqual(20, len(pack.random_name))
1578
 
        self.assertIsInstance(pack.random_name, str)
1579
 
        self.assertIsInstance(pack.start_time, float)
1580
 
 
1581
 
 
1582
 
class TestPacker(TestCaseWithTransport):
1583
 
    """Tests for the packs repository Packer class."""
1584
 
 
1585
 
    def test_pack_optimizes_pack_order(self):
1586
 
        builder = self.make_branch_builder('.', format="1.9")
1587
 
        builder.start_series()
1588
 
        builder.build_snapshot('A', None, [
1589
 
            ('add', ('', 'root-id', 'directory', None)),
1590
 
            ('add', ('f', 'f-id', 'file', 'content\n'))])
1591
 
        builder.build_snapshot('B', ['A'],
1592
 
            [('modify', ('f-id', 'new-content\n'))])
1593
 
        builder.build_snapshot('C', ['B'],
1594
 
            [('modify', ('f-id', 'third-content\n'))])
1595
 
        builder.build_snapshot('D', ['C'],
1596
 
            [('modify', ('f-id', 'fourth-content\n'))])
1597
 
        b = builder.get_branch()
1598
 
        b.lock_read()
1599
 
        builder.finish_series()
1600
 
        self.addCleanup(b.unlock)
1601
 
        # At this point, we should have 4 pack files available
1602
 
        # Because of how they were built, they correspond to
1603
 
        # ['D', 'C', 'B', 'A']
1604
 
        packs = b.repository._pack_collection.packs
1605
 
        packer = pack_repo.Packer(b.repository._pack_collection,
1606
 
                                  packs, 'testing',
1607
 
                                  revision_ids=['B', 'C'])
1608
 
        # Now, when we are copying the B & C revisions, their pack files should
1609
 
        # be moved to the front of the stack
1610
 
        # The new ordering moves B & C to the front of the .packs attribute,
1611
 
        # and leaves the others in the original order.
1612
 
        new_packs = [packs[1], packs[2], packs[0], packs[3]]
1613
 
        new_pack = packer.pack()
1614
 
        self.assertEqual(new_packs, packer.packs)
1615
 
 
1616
 
 
1617
 
class TestOptimisingPacker(TestCaseWithTransport):
1618
 
    """Tests for the OptimisingPacker class."""
1619
 
 
1620
 
    def get_pack_collection(self):
1621
 
        repo = self.make_repository('.')
1622
 
        return repo._pack_collection
1623
 
 
1624
 
    def test_open_pack_will_optimise(self):
1625
 
        packer = pack_repo.OptimisingPacker(self.get_pack_collection(),
1626
 
                                            [], '.test')
1627
 
        new_pack = packer.open_pack()
1628
 
        self.addCleanup(new_pack.abort) # ensure cleanup
1629
 
        self.assertIsInstance(new_pack, pack_repo.NewPack)
1630
 
        self.assertTrue(new_pack.revision_index._optimize_for_size)
1631
 
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
1632
 
        self.assertTrue(new_pack.text_index._optimize_for_size)
1633
 
        self.assertTrue(new_pack.signature_index._optimize_for_size)
1634
 
 
1635
 
 
1636
 
class TestCrossFormatPacks(TestCaseWithTransport):
1637
 
 
1638
 
    def log_pack(self, hint=None):
1639
 
        self.calls.append(('pack', hint))
1640
 
        self.orig_pack(hint=hint)
1641
 
        if self.expect_hint:
1642
 
            self.assertTrue(hint)
1643
 
 
1644
 
    def run_stream(self, src_fmt, target_fmt, expect_pack_called):
1645
 
        self.expect_hint = expect_pack_called
1646
 
        self.calls = []
1647
 
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
1648
 
        source_tree.lock_write()
1649
 
        self.addCleanup(source_tree.unlock)
1650
 
        tip = source_tree.commit('foo')
1651
 
        target = self.make_repository('target', format=target_fmt)
1652
 
        target.lock_write()
1653
 
        self.addCleanup(target.unlock)
1654
 
        source = source_tree.branch.repository._get_source(target._format)
1655
 
        self.orig_pack = target.pack
1656
 
        target.pack = self.log_pack
1657
 
        search = target.search_missing_revision_ids(
1658
 
            source_tree.branch.repository, tip)
1659
 
        stream = source.get_stream(search)
1660
 
        from_format = source_tree.branch.repository._format
1661
 
        sink = target._get_sink()
1662
 
        sink.insert_stream(stream, from_format, [])
1663
 
        if expect_pack_called:
1664
 
            self.assertLength(1, self.calls)
1665
 
        else:
1666
 
            self.assertLength(0, self.calls)
1667
 
 
1668
 
    def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
1669
 
        self.expect_hint = expect_pack_called
1670
 
        self.calls = []
1671
 
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
1672
 
        source_tree.lock_write()
1673
 
        self.addCleanup(source_tree.unlock)
1674
 
        tip = source_tree.commit('foo')
1675
 
        target = self.make_repository('target', format=target_fmt)
1676
 
        target.lock_write()
1677
 
        self.addCleanup(target.unlock)
1678
 
        source = source_tree.branch.repository
1679
 
        self.orig_pack = target.pack
1680
 
        target.pack = self.log_pack
1681
 
        target.fetch(source)
1682
 
        if expect_pack_called:
1683
 
            self.assertLength(1, self.calls)
1684
 
        else:
1685
 
            self.assertLength(0, self.calls)
1686
 
 
1687
 
    def test_sink_format_hint_no(self):
1688
 
        # When the target format says packing makes no difference, pack is not
1689
 
        # called.
1690
 
        self.run_stream('1.9', 'rich-root-pack', False)
1691
 
 
1692
 
    def test_sink_format_hint_yes(self):
1693
 
        # When the target format says packing makes a difference, pack is
1694
 
        # called.
1695
 
        self.run_stream('1.9', '2a', True)
1696
 
 
1697
 
    def test_sink_format_same_no(self):
1698
 
        # When the formats are the same, pack is not called.
1699
 
        self.run_stream('2a', '2a', False)
1700
 
 
1701
 
    def test_IDS_format_hint_no(self):
1702
 
        # When the target format says packing makes no difference, pack is not
1703
 
        # called.
1704
 
        self.run_fetch('1.9', 'rich-root-pack', False)
1705
 
 
1706
 
    def test_IDS_format_hint_yes(self):
1707
 
        # When the target format says packing makes a difference, pack is
1708
 
        # called.
1709
 
        self.run_fetch('1.9', '2a', True)
1710
 
 
1711
 
    def test_IDS_format_same_no(self):
1712
 
        # When the formats are the same, pack is not called.
1713
 
        self.run_fetch('2a', '2a', False)
 
437
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;')