~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

[merge] Storage filename escaping

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006, 2007, 2008, 2009 Canonical Ltd
2
 
#
 
1
# (C) 2006 Canonical Ltd
 
2
 
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
5
5
# the Free Software Foundation; either version 2 of the License, or
6
6
# (at your option) any later version.
7
 
#
 
7
 
8
8
# This program is distributed in the hope that it will be useful,
9
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11
11
# GNU General Public License for more details.
12
 
#
 
12
 
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
16
16
 
17
17
"""Tests for the Repository facility that are not interface tests.
18
18
 
19
 
For interface tests see tests/per_repository/*.py.
 
19
For interface tests see tests/repository_implementations/*.py.
20
20
 
21
21
For concrete class tests see this file, and for storage formats tests
22
22
also see this file.
23
23
"""
24
24
 
25
 
from stat import S_ISDIR
 
25
from stat import *
26
26
from StringIO import StringIO
27
27
 
28
28
import bzrlib
 
29
import bzrlib.bzrdir as bzrdir
 
30
import bzrlib.errors as errors
29
31
from bzrlib.errors import (NotBranchError,
30
32
                           NoSuchFile,
31
33
                           UnknownFormatError,
32
34
                           UnsupportedFormatError,
33
35
                           )
34
 
from bzrlib import (
35
 
    graph,
36
 
    tests,
37
 
    )
38
 
from bzrlib.branchbuilder import BranchBuilder
39
 
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
40
 
from bzrlib.index import GraphIndex, InMemoryGraphIndex
41
 
from bzrlib.repository import RepositoryFormat
42
 
from bzrlib.smart import server
43
 
from bzrlib.tests import (
44
 
    TestCase,
45
 
    TestCaseWithTransport,
46
 
    TestSkipped,
47
 
    test_knit,
48
 
    )
49
 
from bzrlib.transport import (
50
 
    fakenfs,
51
 
    get_transport,
52
 
    )
 
36
import bzrlib.repository as repository
 
37
from bzrlib.tests import TestCase, TestCaseWithTransport
 
38
from bzrlib.transport import get_transport
 
39
from bzrlib.transport.http import HttpServer
53
40
from bzrlib.transport.memory import MemoryServer
54
 
from bzrlib import (
55
 
    bencode,
56
 
    bzrdir,
57
 
    errors,
58
 
    inventory,
59
 
    osutils,
60
 
    progress,
61
 
    repository,
62
 
    revision as _mod_revision,
63
 
    symbol_versioning,
64
 
    upgrade,
65
 
    workingtree,
66
 
    )
67
 
from bzrlib.repofmt import (
68
 
    groupcompress_repo,
69
 
    knitrepo,
70
 
    pack_repo,
71
 
    weaverepo,
72
 
    )
73
41
 
74
42
 
75
43
class TestDefaultFormat(TestCase):
76
44
 
77
45
    def test_get_set_default_format(self):
78
 
        old_default = bzrdir.format_registry.get('default')
79
 
        private_default = old_default().repository_format.__class__
80
46
        old_format = repository.RepositoryFormat.get_default_format()
81
 
        self.assertTrue(isinstance(old_format, private_default))
82
 
        def make_sample_bzrdir():
83
 
            my_bzrdir = bzrdir.BzrDirMetaFormat1()
84
 
            my_bzrdir.repository_format = SampleRepositoryFormat()
85
 
            return my_bzrdir
86
 
        bzrdir.format_registry.remove('default')
87
 
        bzrdir.format_registry.register('sample', make_sample_bzrdir, '')
88
 
        bzrdir.format_registry.set_default('sample')
 
47
        # default is None - we cannot create a Repository independently yet
 
48
        self.assertTrue(isinstance(old_format, repository.RepositoryFormat7))
 
49
        repository.RepositoryFormat.set_default_format(SampleRepositoryFormat())
89
50
        # creating a repository should now create an instrumented dir.
90
51
        try:
91
52
            # the default branch format is used by the meta dir format
92
53
            # which is not the default bzrdir format at this point
93
 
            dir = bzrdir.BzrDirMetaFormat1().initialize('memory:///')
 
54
            dir = bzrdir.BzrDirMetaFormat1().initialize('memory:/')
94
55
            result = dir.create_repository()
95
56
            self.assertEqual(result, 'A bzr repository dir')
96
57
        finally:
97
 
            bzrdir.format_registry.remove('default')
98
 
            bzrdir.format_registry.remove('sample')
99
 
            bzrdir.format_registry.register('default', old_default, '')
100
 
        self.assertIsInstance(repository.RepositoryFormat.get_default_format(),
101
 
                              old_format.__class__)
 
58
            repository.RepositoryFormat.set_default_format(old_format)
 
59
        self.assertEqual(old_format, repository.RepositoryFormat.get_default_format())
102
60
 
103
61
 
104
62
class SampleRepositoryFormat(repository.RepositoryFormat):
105
63
    """A sample format
106
64
 
107
 
    this format is initializable, unsupported to aid in testing the
 
65
    this format is initializable, unsupported to aid in testing the 
108
66
    open and open(unsupported=True) routines.
109
67
    """
110
68
 
115
73
    def initialize(self, a_bzrdir, shared=False):
116
74
        """Initialize a repository in a BzrDir"""
117
75
        t = a_bzrdir.get_repository_transport(self)
118
 
        t.put_bytes('format', self.get_format_string())
 
76
        t.put('format', StringIO(self.get_format_string()))
119
77
        return 'A bzr repository dir'
120
78
 
121
79
    def is_supported(self):
131
89
    def test_find_format(self):
132
90
        # is the right format object found for a repository?
133
91
        # create a branch with a few known format objects.
134
 
        # this is not quite the same as
 
92
        # this is not quite the same as 
135
93
        self.build_tree(["foo/", "bar/"])
136
94
        def check_format(format, url):
137
95
            dir = format._matchingbzrdir.initialize(url)
139
97
            t = get_transport(url)
140
98
            found_format = repository.RepositoryFormat.find_format(dir)
141
99
            self.failUnless(isinstance(found_format, format.__class__))
142
 
        check_format(weaverepo.RepositoryFormat7(), "bar")
143
 
 
 
100
        check_format(repository.RepositoryFormat7(), "bar")
 
101
        
144
102
    def test_find_format_no_repository(self):
145
103
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
146
104
        self.assertRaises(errors.NoRepositoryPresent,
172
130
 
173
131
class TestFormat6(TestCaseWithTransport):
174
132
 
175
 
    def test_attribute__fetch_order(self):
176
 
        """Weaves need topological data insertion."""
177
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
178
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
179
 
        self.assertEqual('topological', repo._format._fetch_order)
180
 
 
181
 
    def test_attribute__fetch_uses_deltas(self):
182
 
        """Weaves do not reuse deltas."""
183
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
184
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
185
 
        self.assertEqual(False, repo._format._fetch_uses_deltas)
186
 
 
187
 
    def test_attribute__fetch_reconcile(self):
188
 
        """Weave repositories need a reconcile after fetch."""
189
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
190
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
191
 
        self.assertEqual(True, repo._format._fetch_reconcile)
192
 
 
193
133
    def test_no_ancestry_weave(self):
194
134
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
195
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
135
        repo = repository.RepositoryFormat6().initialize(control)
196
136
        # We no longer need to create the ancestry.weave file
197
137
        # since it is *never* used.
198
138
        self.assertRaises(NoSuchFile,
199
139
                          control.transport.get,
200
140
                          'ancestry.weave')
201
141
 
202
 
    def test_supports_external_lookups(self):
203
 
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
204
 
        repo = weaverepo.RepositoryFormat6().initialize(control)
205
 
        self.assertFalse(repo._format.supports_external_lookups)
206
 
 
207
142
 
208
143
class TestFormat7(TestCaseWithTransport):
209
 
 
210
 
    def test_attribute__fetch_order(self):
211
 
        """Weaves need topological data insertion."""
212
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
213
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
214
 
        self.assertEqual('topological', repo._format._fetch_order)
215
 
 
216
 
    def test_attribute__fetch_uses_deltas(self):
217
 
        """Weaves do not reuse deltas."""
218
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
219
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
220
 
        self.assertEqual(False, repo._format._fetch_uses_deltas)
221
 
 
222
 
    def test_attribute__fetch_reconcile(self):
223
 
        """Weave repositories need a reconcile after fetch."""
224
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
225
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
226
 
        self.assertEqual(True, repo._format._fetch_reconcile)
227
 
 
 
144
    
228
145
    def test_disk_layout(self):
229
146
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
230
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
147
        repo = repository.RepositoryFormat7().initialize(control)
231
148
        # in case of side effects of locking.
232
149
        repo.lock_write()
233
150
        repo.unlock()
246
163
                             'w\n'
247
164
                             'W\n',
248
165
                             t.get('inventory.weave').read())
249
 
        # Creating a file with id Foo:Bar results in a non-escaped file name on
250
 
        # disk.
251
 
        control.create_branch()
252
 
        tree = control.create_workingtree()
253
 
        tree.add(['foo'], ['Foo:Bar'], ['file'])
254
 
        tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
255
 
        tree.commit('first post', rev_id='first')
256
 
        self.assertEqualDiff(
257
 
            '# bzr weave file v5\n'
258
 
            'i\n'
259
 
            '1 7fe70820e08a1aac0ef224d9c66ab66831cc4ab1\n'
260
 
            'n first\n'
261
 
            '\n'
262
 
            'w\n'
263
 
            '{ 0\n'
264
 
            '. content\n'
265
 
            '}\n'
266
 
            'W\n',
267
 
            t.get('weaves/74/Foo%3ABar.weave').read())
268
166
 
269
167
    def test_shared_disk_layout(self):
270
168
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
271
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
169
        repo = repository.RepositoryFormat7().initialize(control, shared=True)
272
170
        # we want:
273
171
        # format 'Bazaar-NG Repository format 7'
274
172
        # inventory.weave == empty_weave
291
189
    def test_creates_lockdir(self):
292
190
        """Make sure it appears to be controlled by a LockDir existence"""
293
191
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
294
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
192
        repo = repository.RepositoryFormat7().initialize(control, shared=True)
295
193
        t = control.get_repository_transport(None)
296
 
        # TODO: Should check there is a 'lock' toplevel directory,
 
194
        # TODO: Should check there is a 'lock' toplevel directory, 
297
195
        # regardless of contents
298
196
        self.assertFalse(t.has('lock/held/info'))
299
197
        repo.lock_write()
300
 
        try:
301
 
            self.assertTrue(t.has('lock/held/info'))
302
 
        finally:
303
 
            # unlock so we don't get a warning about failing to do so
304
 
            repo.unlock()
 
198
        self.assertTrue(t.has('lock/held/info'))
305
199
 
306
200
    def test_uses_lockdir(self):
307
201
        """repo format 7 actually locks on lockdir"""
308
202
        base_url = self.get_url()
309
203
        control = bzrdir.BzrDirMetaFormat1().initialize(base_url)
310
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
204
        repo = repository.RepositoryFormat7().initialize(control, shared=True)
311
205
        t = control.get_repository_transport(None)
312
206
        repo.lock_write()
313
207
        repo.unlock()
321
215
 
322
216
    def test_shared_no_tree_disk_layout(self):
323
217
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
324
 
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
 
218
        repo = repository.RepositoryFormat7().initialize(control, shared=True)
325
219
        repo.set_make_working_trees(False)
326
220
        # we want:
327
221
        # format 'Bazaar-NG Repository format 7'
345
239
                             'W\n',
346
240
                             t.get('inventory.weave').read())
347
241
 
348
 
    def test_supports_external_lookups(self):
349
 
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
350
 
        repo = weaverepo.RepositoryFormat7().initialize(control)
351
 
        self.assertFalse(repo._format.supports_external_lookups)
352
 
 
353
242
 
354
243
class TestFormatKnit1(TestCaseWithTransport):
355
 
 
356
 
    def test_attribute__fetch_order(self):
357
 
        """Knits need topological data insertion."""
358
 
        repo = self.make_repository('.',
359
 
                format=bzrdir.format_registry.get('knit')())
360
 
        self.assertEqual('topological', repo._format._fetch_order)
361
 
 
362
 
    def test_attribute__fetch_uses_deltas(self):
363
 
        """Knits reuse deltas."""
364
 
        repo = self.make_repository('.',
365
 
                format=bzrdir.format_registry.get('knit')())
366
 
        self.assertEqual(True, repo._format._fetch_uses_deltas)
367
 
 
 
244
    
368
245
    def test_disk_layout(self):
369
246
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
370
 
        repo = knitrepo.RepositoryFormatKnit1().initialize(control)
 
247
        repo = repository.RepositoryFormatKnit1().initialize(control)
371
248
        # in case of side effects of locking.
372
249
        repo.lock_write()
373
250
        repo.unlock()
384
261
        # self.assertEqualDiff('', t.get('lock').read())
385
262
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
386
263
        self.check_knits(t)
387
 
        # Check per-file knits.
388
 
        branch = control.create_branch()
389
 
        tree = control.create_workingtree()
390
 
        tree.add(['foo'], ['Nasty-IdC:'], ['file'])
391
 
        tree.put_file_bytes_non_atomic('Nasty-IdC:', '')
392
 
        tree.commit('1st post', rev_id='foo')
393
 
        self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
394
 
            '\nfoo fulltext 0 81  :')
395
 
 
396
 
    def assertHasKnit(self, t, knit_name, extra_content=''):
397
 
        """Assert that knit_name exists on t."""
398
 
        self.assertEqualDiff('# bzr knit index 8\n' + extra_content,
399
 
                             t.get(knit_name + '.kndx').read())
400
264
 
401
265
    def check_knits(self, t):
402
266
        """check knit content for a repository."""
403
 
        self.assertHasKnit(t, 'inventory')
404
 
        self.assertHasKnit(t, 'revisions')
405
 
        self.assertHasKnit(t, 'signatures')
 
267
        self.assertEqualDiff('# bzr knit index 7\n',
 
268
                             t.get('inventory.kndx').read())
 
269
        # no default content
 
270
        self.assertTrue(t.has('inventory.knit'))
 
271
        self.assertEqualDiff('# bzr knit index 7\n',
 
272
                             t.get('revisions.kndx').read())
 
273
        # no default content
 
274
        self.assertTrue(t.has('revisions.knit'))
 
275
        self.assertEqualDiff('# bzr knit index 7\n',
 
276
                             t.get('signatures.kndx').read())
 
277
        # no default content
 
278
        self.assertTrue(t.has('signatures.knit'))
406
279
 
407
280
    def test_shared_disk_layout(self):
408
281
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
409
 
        repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
 
282
        repo = repository.RepositoryFormatKnit1().initialize(control, shared=True)
410
283
        # we want:
411
284
        # format 'Bazaar-NG Knit Repository Format 1'
412
285
        # lock: is a directory
425
298
 
426
299
    def test_shared_no_tree_disk_layout(self):
427
300
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
428
 
        repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
 
301
        repo = repository.RepositoryFormatKnit1().initialize(control, shared=True)
429
302
        repo.set_make_working_trees(False)
430
303
        # we want:
431
304
        # format 'Bazaar-NG Knit Repository Format 1'
446
319
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
447
320
        self.check_knits(t)
448
321
 
449
 
    def test_deserialise_sets_root_revision(self):
450
 
        """We must have a inventory.root.revision
451
 
 
452
 
        Old versions of the XML5 serializer did not set the revision_id for
453
 
        the whole inventory. So we grab the one from the expected text. Which
454
 
        is valid when the api is not being abused.
455
 
        """
456
 
        repo = self.make_repository('.',
457
 
                format=bzrdir.format_registry.get('knit')())
458
 
        inv_xml = '<inventory format="5">\n</inventory>\n'
459
 
        inv = repo.deserialise_inventory('test-rev-id', inv_xml)
460
 
        self.assertEqual('test-rev-id', inv.root.revision)
461
 
 
462
 
    def test_deserialise_uses_global_revision_id(self):
463
 
        """If it is set, then we re-use the global revision id"""
464
 
        repo = self.make_repository('.',
465
 
                format=bzrdir.format_registry.get('knit')())
466
 
        inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n'
467
 
                   '</inventory>\n')
468
 
        # Arguably, the deserialise_inventory should detect a mismatch, and
469
 
        # raise an error, rather than silently using one revision_id over the
470
 
        # other.
471
 
        self.assertRaises(AssertionError, repo.deserialise_inventory,
472
 
            'test-rev-id', inv_xml)
473
 
        inv = repo.deserialise_inventory('other-rev-id', inv_xml)
474
 
        self.assertEqual('other-rev-id', inv.root.revision)
475
 
 
476
 
    def test_supports_external_lookups(self):
477
 
        repo = self.make_repository('.',
478
 
                format=bzrdir.format_registry.get('knit')())
479
 
        self.assertFalse(repo._format.supports_external_lookups)
480
 
 
481
 
 
482
 
class DummyRepository(object):
483
 
    """A dummy repository for testing."""
484
 
 
485
 
    _format = None
486
 
    _serializer = None
487
 
 
488
 
    def supports_rich_root(self):
489
 
        if self._format is not None:
490
 
            return self._format.rich_root_data
491
 
        return False
492
 
 
493
 
    def get_graph(self):
494
 
        raise NotImplementedError
495
 
 
496
 
    def get_parent_map(self, revision_ids):
497
 
        raise NotImplementedError
498
 
 
499
 
 
500
 
class InterDummy(repository.InterRepository):
501
 
    """An inter-repository optimised code path for DummyRepository.
502
 
 
503
 
    This is for use during testing where we use DummyRepository as repositories
 
322
 
 
323
class InterString(repository.InterRepository):
 
324
    """An inter-repository optimised code path for strings.
 
325
 
 
326
    This is for use during testing where we use strings as repositories
504
327
    so that none of the default regsitered inter-repository classes will
505
 
    MATCH.
 
328
    match.
506
329
    """
507
330
 
508
331
    @staticmethod
509
332
    def is_compatible(repo_source, repo_target):
510
 
        """InterDummy is compatible with DummyRepository."""
511
 
        return (isinstance(repo_source, DummyRepository) and
512
 
            isinstance(repo_target, DummyRepository))
 
333
        """InterString is compatible with strings-as-repos."""
 
334
        return isinstance(repo_source, str) and isinstance(repo_target, str)
513
335
 
514
336
 
515
337
class TestInterRepository(TestCaseWithTransport):
521
343
        # This also tests that the default registered optimised interrepository
522
344
        # classes do not barf inappropriately when a surprising repository type
523
345
        # is handed to them.
524
 
        dummy_a = DummyRepository()
525
 
        dummy_b = DummyRepository()
 
346
        dummy_a = "Repository 1."
 
347
        dummy_b = "Repository 2."
526
348
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
527
349
 
528
350
    def assertGetsDefaultInterRepository(self, repo_a, repo_b):
529
 
        """Asserts that InterRepository.get(repo_a, repo_b) -> the default.
530
 
 
531
 
        The effective default is now InterSameDataRepository because there is
532
 
        no actual sane default in the presence of incompatible data models.
533
 
        """
 
351
        """Asserts that InterRepository.get(repo_a, repo_b) -> the default."""
534
352
        inter_repo = repository.InterRepository.get(repo_a, repo_b)
535
 
        self.assertEqual(repository.InterSameDataRepository,
 
353
        self.assertEqual(repository.InterRepository,
536
354
                         inter_repo.__class__)
537
355
        self.assertEqual(repo_a, inter_repo.source)
538
356
        self.assertEqual(repo_b, inter_repo.target)
543
361
        # and that it is correctly selected when given a repository
544
362
        # pair that it returns true on for the is_compatible static method
545
363
        # check
546
 
        dummy_a = DummyRepository()
547
 
        dummy_a._format = RepositoryFormat()
548
 
        dummy_b = DummyRepository()
549
 
        dummy_b._format = RepositoryFormat()
550
 
        repo = self.make_repository('.')
551
 
        # hack dummies to look like repo somewhat.
552
 
        dummy_a._serializer = repo._serializer
553
 
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
554
 
        dummy_a._format.rich_root_data = repo._format.rich_root_data
555
 
        dummy_b._serializer = repo._serializer
556
 
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
557
 
        dummy_b._format.rich_root_data = repo._format.rich_root_data
558
 
        repository.InterRepository.register_optimiser(InterDummy)
 
364
        dummy_a = "Repository 1."
 
365
        dummy_b = "Repository 2."
 
366
        repository.InterRepository.register_optimiser(InterString)
559
367
        try:
560
 
            # we should get the default for something InterDummy returns False
 
368
            # we should get the default for something InterString returns False
561
369
            # to
562
 
            self.assertFalse(InterDummy.is_compatible(dummy_a, repo))
563
 
            self.assertGetsDefaultInterRepository(dummy_a, repo)
564
 
            # and we should get an InterDummy for a pair it 'likes'
565
 
            self.assertTrue(InterDummy.is_compatible(dummy_a, dummy_b))
 
370
            self.assertFalse(InterString.is_compatible(dummy_a, None))
 
371
            self.assertGetsDefaultInterRepository(dummy_a, None)
 
372
            # and we should get an InterString for a pair it 'likes'
 
373
            self.assertTrue(InterString.is_compatible(dummy_a, dummy_b))
566
374
            inter_repo = repository.InterRepository.get(dummy_a, dummy_b)
567
 
            self.assertEqual(InterDummy, inter_repo.__class__)
 
375
            self.assertEqual(InterString, inter_repo.__class__)
568
376
            self.assertEqual(dummy_a, inter_repo.source)
569
377
            self.assertEqual(dummy_b, inter_repo.target)
570
378
        finally:
571
 
            repository.InterRepository.unregister_optimiser(InterDummy)
 
379
            repository.InterRepository.unregister_optimiser(InterString)
572
380
        # now we should get the default InterRepository object again.
573
381
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
574
382
 
578
386
    def test_is_compatible_and_registered(self):
579
387
        # InterWeaveRepo is compatible when either side
580
388
        # is a format 5/6/7 branch
581
 
        from bzrlib.repofmt import knitrepo, weaverepo
582
 
        formats = [weaverepo.RepositoryFormat5(),
583
 
                   weaverepo.RepositoryFormat6(),
584
 
                   weaverepo.RepositoryFormat7()]
585
 
        incompatible_formats = [weaverepo.RepositoryFormat4(),
586
 
                                knitrepo.RepositoryFormatKnit1(),
 
389
        formats = [repository.RepositoryFormat5(),
 
390
                   repository.RepositoryFormat6(),
 
391
                   repository.RepositoryFormat7()]
 
392
        incompatible_formats = [repository.RepositoryFormat4(),
 
393
                                repository.RepositoryFormatKnit1(),
587
394
                                ]
588
395
        repo_a = self.make_repository('a')
589
396
        repo_b = self.make_repository('b')
610
417
        t = get_transport(self.get_url('.'))
611
418
        t.mkdir('repository')
612
419
        repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
613
 
        repo = weaverepo.RepositoryFormat7().initialize(repo_dir)
614
 
        target_format = knitrepo.RepositoryFormatKnit1()
 
420
        repo = repository.RepositoryFormat7().initialize(repo_dir)
 
421
        target_format = repository.RepositoryFormatKnit1()
615
422
        converter = repository.CopyConverter(target_format)
616
423
        pb = bzrlib.ui.ui_factory.nested_progress_bar()
617
424
        try:
620
427
            pb.finished()
621
428
        repo = repo_dir.open_repository()
622
429
        self.assertTrue(isinstance(target_format, repo._format.__class__))
623
 
 
624
 
 
625
 
class TestMisc(TestCase):
626
 
 
627
 
    def test_unescape_xml(self):
628
 
        """We get some kind of error when malformed entities are passed"""
629
 
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;')
630
 
 
631
 
 
632
 
class TestRepositoryFormatKnit3(TestCaseWithTransport):
633
 
 
634
 
    def test_attribute__fetch_order(self):
635
 
        """Knits need topological data insertion."""
636
 
        format = bzrdir.BzrDirMetaFormat1()
637
 
        format.repository_format = knitrepo.RepositoryFormatKnit3()
638
 
        repo = self.make_repository('.', format=format)
639
 
        self.assertEqual('topological', repo._format._fetch_order)
640
 
 
641
 
    def test_attribute__fetch_uses_deltas(self):
642
 
        """Knits reuse deltas."""
643
 
        format = bzrdir.BzrDirMetaFormat1()
644
 
        format.repository_format = knitrepo.RepositoryFormatKnit3()
645
 
        repo = self.make_repository('.', format=format)
646
 
        self.assertEqual(True, repo._format._fetch_uses_deltas)
647
 
 
648
 
    def test_convert(self):
649
 
        """Ensure the upgrade adds weaves for roots"""
650
 
        format = bzrdir.BzrDirMetaFormat1()
651
 
        format.repository_format = knitrepo.RepositoryFormatKnit1()
652
 
        tree = self.make_branch_and_tree('.', format)
653
 
        tree.commit("Dull commit", rev_id="dull")
654
 
        revision_tree = tree.branch.repository.revision_tree('dull')
655
 
        revision_tree.lock_read()
656
 
        try:
657
 
            self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
658
 
                revision_tree.inventory.root.file_id)
659
 
        finally:
660
 
            revision_tree.unlock()
661
 
        format = bzrdir.BzrDirMetaFormat1()
662
 
        format.repository_format = knitrepo.RepositoryFormatKnit3()
663
 
        upgrade.Convert('.', format)
664
 
        tree = workingtree.WorkingTree.open('.')
665
 
        revision_tree = tree.branch.repository.revision_tree('dull')
666
 
        revision_tree.lock_read()
667
 
        try:
668
 
            revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
669
 
        finally:
670
 
            revision_tree.unlock()
671
 
        tree.commit("Another dull commit", rev_id='dull2')
672
 
        revision_tree = tree.branch.repository.revision_tree('dull2')
673
 
        revision_tree.lock_read()
674
 
        self.addCleanup(revision_tree.unlock)
675
 
        self.assertEqual('dull', revision_tree.inventory.root.revision)
676
 
 
677
 
    def test_supports_external_lookups(self):
678
 
        format = bzrdir.BzrDirMetaFormat1()
679
 
        format.repository_format = knitrepo.RepositoryFormatKnit3()
680
 
        repo = self.make_repository('.', format=format)
681
 
        self.assertFalse(repo._format.supports_external_lookups)
682
 
 
683
 
 
684
 
class Test2a(TestCaseWithTransport):
685
 
 
686
 
    def test_format_pack_compresses_True(self):
687
 
        repo = self.make_repository('repo', format='2a')
688
 
        self.assertTrue(repo._format.pack_compresses)
689
 
 
690
 
    def test_inventories_use_chk_map_with_parent_base_dict(self):
691
 
        tree = self.make_branch_and_tree('repo', format="2a")
692
 
        revid = tree.commit("foo")
693
 
        tree.lock_read()
694
 
        self.addCleanup(tree.unlock)
695
 
        inv = tree.branch.repository.get_inventory(revid)
696
 
        self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
697
 
        inv.parent_id_basename_to_file_id._ensure_root()
698
 
        inv.id_to_entry._ensure_root()
699
 
        self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
700
 
        self.assertEqual(65536,
701
 
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
702
 
 
703
 
    def test_autopack_unchanged_chk_nodes(self):
704
 
        # at 20 unchanged commits, chk pages are packed that are split into
705
 
        # two groups such that the new pack being made doesn't have all its
706
 
        # pages in the source packs (though they are in the repository).
707
 
        tree = self.make_branch_and_tree('tree', format='2a')
708
 
        for pos in range(20):
709
 
            tree.commit(str(pos))
710
 
 
711
 
    def test_pack_with_hint(self):
712
 
        tree = self.make_branch_and_tree('tree', format='2a')
713
 
        # 1 commit to leave untouched
714
 
        tree.commit('1')
715
 
        to_keep = tree.branch.repository._pack_collection.names()
716
 
        # 2 to combine
717
 
        tree.commit('2')
718
 
        tree.commit('3')
719
 
        all = tree.branch.repository._pack_collection.names()
720
 
        combine = list(set(all) - set(to_keep))
721
 
        self.assertLength(3, all)
722
 
        self.assertLength(2, combine)
723
 
        tree.branch.repository.pack(hint=combine)
724
 
        final = tree.branch.repository._pack_collection.names()
725
 
        self.assertLength(2, final)
726
 
        self.assertFalse(combine[0] in final)
727
 
        self.assertFalse(combine[1] in final)
728
 
        self.assertSubset(to_keep, final)
729
 
 
730
 
    def test_stream_source_to_gc(self):
731
 
        source = self.make_repository('source', format='2a')
732
 
        target = self.make_repository('target', format='2a')
733
 
        stream = source._get_source(target._format)
734
 
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
735
 
 
736
 
    def test_stream_source_to_non_gc(self):
737
 
        source = self.make_repository('source', format='2a')
738
 
        target = self.make_repository('target', format='rich-root-pack')
739
 
        stream = source._get_source(target._format)
740
 
        # We don't want the child GroupCHKStreamSource
741
 
        self.assertIs(type(stream), repository.StreamSource)
742
 
 
743
 
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
744
 
        source_builder = self.make_branch_builder('source',
745
 
                            format='2a')
746
 
        # We have to build a fairly large tree, so that we are sure the chk
747
 
        # pages will have split into multiple pages.
748
 
        entries = [('add', ('', 'a-root-id', 'directory', None))]
749
 
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
750
 
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
751
 
                fname = i + j
752
 
                fid = fname + '-id'
753
 
                content = 'content for %s\n' % (fname,)
754
 
                entries.append(('add', (fname, fid, 'file', content)))
755
 
        source_builder.start_series()
756
 
        source_builder.build_snapshot('rev-1', None, entries)
757
 
        # Now change a few of them, so we get a few new pages for the second
758
 
        # revision
759
 
        source_builder.build_snapshot('rev-2', ['rev-1'], [
760
 
            ('modify', ('aa-id', 'new content for aa-id\n')),
761
 
            ('modify', ('cc-id', 'new content for cc-id\n')),
762
 
            ('modify', ('zz-id', 'new content for zz-id\n')),
763
 
            ])
764
 
        source_builder.finish_series()
765
 
        source_branch = source_builder.get_branch()
766
 
        source_branch.lock_read()
767
 
        self.addCleanup(source_branch.unlock)
768
 
        target = self.make_repository('target', format='2a')
769
 
        source = source_branch.repository._get_source(target._format)
770
 
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
771
 
 
772
 
        # On a regular pass, getting the inventories and chk pages for rev-2
773
 
        # would only get the newly created chk pages
774
 
        search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
775
 
                                    set(['rev-2']))
776
 
        simple_chk_records = []
777
 
        for vf_name, substream in source.get_stream(search):
778
 
            if vf_name == 'chk_bytes':
779
 
                for record in substream:
780
 
                    simple_chk_records.append(record.key)
781
 
            else:
782
 
                for _ in substream:
783
 
                    continue
784
 
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
785
 
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
786
 
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
787
 
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
788
 
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
789
 
                         simple_chk_records)
790
 
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
791
 
        # we should get a much larger set of pages.
792
 
        missing = [('inventories', 'rev-2')]
793
 
        full_chk_records = []
794
 
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
795
 
            if vf_name == 'inventories':
796
 
                for record in substream:
797
 
                    self.assertEqual(('rev-2',), record.key)
798
 
            elif vf_name == 'chk_bytes':
799
 
                for record in substream:
800
 
                    full_chk_records.append(record.key)
801
 
            else:
802
 
                self.fail('Should not be getting a stream of %s' % (vf_name,))
803
 
        # We have 257 records now. This is because we have 1 root page, and 256
804
 
        # leaf pages in a complete listing.
805
 
        self.assertEqual(257, len(full_chk_records))
806
 
        self.assertSubset(simple_chk_records, full_chk_records)
807
 
 
808
 
    def test_inconsistency_fatal(self):
809
 
        repo = self.make_repository('repo', format='2a')
810
 
        self.assertTrue(repo.revisions._index._inconsistency_fatal)
811
 
        self.assertFalse(repo.texts._index._inconsistency_fatal)
812
 
        self.assertFalse(repo.inventories._index._inconsistency_fatal)
813
 
        self.assertFalse(repo.signatures._index._inconsistency_fatal)
814
 
        self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
815
 
 
816
 
 
817
 
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
818
 
 
819
 
    def test_source_to_exact_pack_092(self):
820
 
        source = self.make_repository('source', format='pack-0.92')
821
 
        target = self.make_repository('target', format='pack-0.92')
822
 
        stream_source = source._get_source(target._format)
823
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
824
 
 
825
 
    def test_source_to_exact_pack_rich_root_pack(self):
826
 
        source = self.make_repository('source', format='rich-root-pack')
827
 
        target = self.make_repository('target', format='rich-root-pack')
828
 
        stream_source = source._get_source(target._format)
829
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
830
 
 
831
 
    def test_source_to_exact_pack_19(self):
832
 
        source = self.make_repository('source', format='1.9')
833
 
        target = self.make_repository('target', format='1.9')
834
 
        stream_source = source._get_source(target._format)
835
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
836
 
 
837
 
    def test_source_to_exact_pack_19_rich_root(self):
838
 
        source = self.make_repository('source', format='1.9-rich-root')
839
 
        target = self.make_repository('target', format='1.9-rich-root')
840
 
        stream_source = source._get_source(target._format)
841
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
842
 
 
843
 
    def test_source_to_remote_exact_pack_19(self):
844
 
        trans = self.make_smart_server('target')
845
 
        trans.ensure_base()
846
 
        source = self.make_repository('source', format='1.9')
847
 
        target = self.make_repository('target', format='1.9')
848
 
        target = repository.Repository.open(trans.base)
849
 
        stream_source = source._get_source(target._format)
850
 
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
851
 
 
852
 
    def test_stream_source_to_non_exact(self):
853
 
        source = self.make_repository('source', format='pack-0.92')
854
 
        target = self.make_repository('target', format='1.9')
855
 
        stream = source._get_source(target._format)
856
 
        self.assertIs(type(stream), repository.StreamSource)
857
 
 
858
 
    def test_stream_source_to_non_exact_rich_root(self):
859
 
        source = self.make_repository('source', format='1.9')
860
 
        target = self.make_repository('target', format='1.9-rich-root')
861
 
        stream = source._get_source(target._format)
862
 
        self.assertIs(type(stream), repository.StreamSource)
863
 
 
864
 
    def test_source_to_remote_non_exact_pack_19(self):
865
 
        trans = self.make_smart_server('target')
866
 
        trans.ensure_base()
867
 
        source = self.make_repository('source', format='1.9')
868
 
        target = self.make_repository('target', format='1.6')
869
 
        target = repository.Repository.open(trans.base)
870
 
        stream_source = source._get_source(target._format)
871
 
        self.assertIs(type(stream_source), repository.StreamSource)
872
 
 
873
 
    def test_stream_source_to_knit(self):
874
 
        source = self.make_repository('source', format='pack-0.92')
875
 
        target = self.make_repository('target', format='dirstate')
876
 
        stream = source._get_source(target._format)
877
 
        self.assertIs(type(stream), repository.StreamSource)
878
 
 
879
 
 
880
 
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
881
 
    """Tests for _find_parent_ids_of_revisions."""
882
 
 
883
 
    def setUp(self):
884
 
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
885
 
        self.builder = self.make_branch_builder('source',
886
 
            format='development6-rich-root')
887
 
        self.builder.start_series()
888
 
        self.builder.build_snapshot('initial', None,
889
 
            [('add', ('', 'tree-root', 'directory', None))])
890
 
        self.repo = self.builder.get_branch().repository
891
 
        self.addCleanup(self.builder.finish_series)
892
 
 
893
 
    def assertParentIds(self, expected_result, rev_set):
894
 
        self.assertEqual(sorted(expected_result),
895
 
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
896
 
 
897
 
    def test_simple(self):
898
 
        self.builder.build_snapshot('revid1', None, [])
899
 
        self.builder.build_snapshot('revid2', ['revid1'], [])
900
 
        rev_set = ['revid2']
901
 
        self.assertParentIds(['revid1'], rev_set)
902
 
 
903
 
    def test_not_first_parent(self):
904
 
        self.builder.build_snapshot('revid1', None, [])
905
 
        self.builder.build_snapshot('revid2', ['revid1'], [])
906
 
        self.builder.build_snapshot('revid3', ['revid2'], [])
907
 
        rev_set = ['revid3', 'revid2']
908
 
        self.assertParentIds(['revid1'], rev_set)
909
 
 
910
 
    def test_not_null(self):
911
 
        rev_set = ['initial']
912
 
        self.assertParentIds([], rev_set)
913
 
 
914
 
    def test_not_null_set(self):
915
 
        self.builder.build_snapshot('revid1', None, [])
916
 
        rev_set = [_mod_revision.NULL_REVISION]
917
 
        self.assertParentIds([], rev_set)
918
 
 
919
 
    def test_ghost(self):
920
 
        self.builder.build_snapshot('revid1', None, [])
921
 
        rev_set = ['ghost', 'revid1']
922
 
        self.assertParentIds(['initial'], rev_set)
923
 
 
924
 
    def test_ghost_parent(self):
925
 
        self.builder.build_snapshot('revid1', None, [])
926
 
        self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
927
 
        rev_set = ['revid2', 'revid1']
928
 
        self.assertParentIds(['ghost', 'initial'], rev_set)
929
 
 
930
 
    def test_righthand_parent(self):
931
 
        self.builder.build_snapshot('revid1', None, [])
932
 
        self.builder.build_snapshot('revid2a', ['revid1'], [])
933
 
        self.builder.build_snapshot('revid2b', ['revid1'], [])
934
 
        self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
935
 
        rev_set = ['revid3', 'revid2a']
936
 
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
937
 
 
938
 
 
939
 
class TestWithBrokenRepo(TestCaseWithTransport):
940
 
    """These tests seem to be more appropriate as interface tests?"""
941
 
 
942
 
    def make_broken_repository(self):
943
 
        # XXX: This function is borrowed from Aaron's "Reconcile can fix bad
944
 
        # parent references" branch which is due to land in bzr.dev soon.  Once
945
 
        # it does, this duplication should be removed.
946
 
        repo = self.make_repository('broken-repo')
947
 
        cleanups = []
948
 
        try:
949
 
            repo.lock_write()
950
 
            cleanups.append(repo.unlock)
951
 
            repo.start_write_group()
952
 
            cleanups.append(repo.commit_write_group)
953
 
            # make rev1a: A well-formed revision, containing 'file1'
954
 
            inv = inventory.Inventory(revision_id='rev1a')
955
 
            inv.root.revision = 'rev1a'
956
 
            self.add_file(repo, inv, 'file1', 'rev1a', [])
957
 
            repo.add_inventory('rev1a', inv, [])
958
 
            revision = _mod_revision.Revision('rev1a',
959
 
                committer='jrandom@example.com', timestamp=0,
960
 
                inventory_sha1='', timezone=0, message='foo', parent_ids=[])
961
 
            repo.add_revision('rev1a',revision, inv)
962
 
 
963
 
            # make rev1b, which has no Revision, but has an Inventory, and
964
 
            # file1
965
 
            inv = inventory.Inventory(revision_id='rev1b')
966
 
            inv.root.revision = 'rev1b'
967
 
            self.add_file(repo, inv, 'file1', 'rev1b', [])
968
 
            repo.add_inventory('rev1b', inv, [])
969
 
 
970
 
            # make rev2, with file1 and file2
971
 
            # file2 is sane
972
 
            # file1 has 'rev1b' as an ancestor, even though this is not
973
 
            # mentioned by 'rev1a', making it an unreferenced ancestor
974
 
            inv = inventory.Inventory()
975
 
            self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b'])
976
 
            self.add_file(repo, inv, 'file2', 'rev2', [])
977
 
            self.add_revision(repo, 'rev2', inv, ['rev1a'])
978
 
 
979
 
            # make ghost revision rev1c
980
 
            inv = inventory.Inventory()
981
 
            self.add_file(repo, inv, 'file2', 'rev1c', [])
982
 
 
983
 
            # make rev3 with file2
984
 
            # file2 refers to 'rev1c', which is a ghost in this repository, so
985
 
            # file2 cannot have rev1c as its ancestor.
986
 
            inv = inventory.Inventory()
987
 
            self.add_file(repo, inv, 'file2', 'rev3', ['rev1c'])
988
 
            self.add_revision(repo, 'rev3', inv, ['rev1c'])
989
 
            return repo
990
 
        finally:
991
 
            for cleanup in reversed(cleanups):
992
 
                cleanup()
993
 
 
994
 
    def add_revision(self, repo, revision_id, inv, parent_ids):
995
 
        inv.revision_id = revision_id
996
 
        inv.root.revision = revision_id
997
 
        repo.add_inventory(revision_id, inv, parent_ids)
998
 
        revision = _mod_revision.Revision(revision_id,
999
 
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
1000
 
            timezone=0, message='foo', parent_ids=parent_ids)
1001
 
        repo.add_revision(revision_id,revision, inv)
1002
 
 
1003
 
    def add_file(self, repo, inv, filename, revision, parents):
1004
 
        file_id = filename + '-id'
1005
 
        entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT')
1006
 
        entry.revision = revision
1007
 
        entry.text_size = 0
1008
 
        inv.add(entry)
1009
 
        text_key = (file_id, revision)
1010
 
        parent_keys = [(file_id, parent) for parent in parents]
1011
 
        repo.texts.add_lines(text_key, parent_keys, ['line\n'])
1012
 
 
1013
 
    def test_insert_from_broken_repo(self):
1014
 
        """Inserting a data stream from a broken repository won't silently
1015
 
        corrupt the target repository.
1016
 
        """
1017
 
        broken_repo = self.make_broken_repository()
1018
 
        empty_repo = self.make_repository('empty-repo')
1019
 
        try:
1020
 
            empty_repo.fetch(broken_repo)
1021
 
        except (errors.RevisionNotPresent, errors.BzrCheckError):
1022
 
            # Test successful: compression parent not being copied leads to
1023
 
            # error.
1024
 
            return
1025
 
        empty_repo.lock_read()
1026
 
        self.addCleanup(empty_repo.unlock)
1027
 
        text = empty_repo.texts.get_record_stream(
1028
 
            [('file2-id', 'rev3')], 'topological', True).next()
1029
 
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
1030
 
 
1031
 
 
1032
 
class TestRepositoryPackCollection(TestCaseWithTransport):
1033
 
 
1034
 
    def get_format(self):
1035
 
        return bzrdir.format_registry.make_bzrdir('pack-0.92')
1036
 
 
1037
 
    def get_packs(self):
1038
 
        format = self.get_format()
1039
 
        repo = self.make_repository('.', format=format)
1040
 
        return repo._pack_collection
1041
 
 
1042
 
    def make_packs_and_alt_repo(self, write_lock=False):
1043
 
        """Create a pack repo with 3 packs, and access it via a second repo."""
1044
 
        tree = self.make_branch_and_tree('.', format=self.get_format())
1045
 
        tree.lock_write()
1046
 
        self.addCleanup(tree.unlock)
1047
 
        rev1 = tree.commit('one')
1048
 
        rev2 = tree.commit('two')
1049
 
        rev3 = tree.commit('three')
1050
 
        r = repository.Repository.open('.')
1051
 
        if write_lock:
1052
 
            r.lock_write()
1053
 
        else:
1054
 
            r.lock_read()
1055
 
        self.addCleanup(r.unlock)
1056
 
        packs = r._pack_collection
1057
 
        packs.ensure_loaded()
1058
 
        return tree, r, packs, [rev1, rev2, rev3]
1059
 
 
1060
 
    def test__max_pack_count(self):
1061
 
        """The maximum pack count is a function of the number of revisions."""
1062
 
        # no revisions - one pack, so that we can have a revision free repo
1063
 
        # without it blowing up
1064
 
        packs = self.get_packs()
1065
 
        self.assertEqual(1, packs._max_pack_count(0))
1066
 
        # after that the sum of the digits, - check the first 1-9
1067
 
        self.assertEqual(1, packs._max_pack_count(1))
1068
 
        self.assertEqual(2, packs._max_pack_count(2))
1069
 
        self.assertEqual(3, packs._max_pack_count(3))
1070
 
        self.assertEqual(4, packs._max_pack_count(4))
1071
 
        self.assertEqual(5, packs._max_pack_count(5))
1072
 
        self.assertEqual(6, packs._max_pack_count(6))
1073
 
        self.assertEqual(7, packs._max_pack_count(7))
1074
 
        self.assertEqual(8, packs._max_pack_count(8))
1075
 
        self.assertEqual(9, packs._max_pack_count(9))
1076
 
        # check the boundary cases with two digits for the next decade
1077
 
        self.assertEqual(1, packs._max_pack_count(10))
1078
 
        self.assertEqual(2, packs._max_pack_count(11))
1079
 
        self.assertEqual(10, packs._max_pack_count(19))
1080
 
        self.assertEqual(2, packs._max_pack_count(20))
1081
 
        self.assertEqual(3, packs._max_pack_count(21))
1082
 
        # check some arbitrary big numbers
1083
 
        self.assertEqual(25, packs._max_pack_count(112894))
1084
 
 
1085
 
    def test_pack_distribution_zero(self):
1086
 
        packs = self.get_packs()
1087
 
        self.assertEqual([0], packs.pack_distribution(0))
1088
 
 
1089
 
    def test_ensure_loaded_unlocked(self):
1090
 
        packs = self.get_packs()
1091
 
        self.assertRaises(errors.ObjectNotLocked,
1092
 
                          packs.ensure_loaded)
1093
 
 
1094
 
    def test_pack_distribution_one_to_nine(self):
1095
 
        packs = self.get_packs()
1096
 
        self.assertEqual([1],
1097
 
            packs.pack_distribution(1))
1098
 
        self.assertEqual([1, 1],
1099
 
            packs.pack_distribution(2))
1100
 
        self.assertEqual([1, 1, 1],
1101
 
            packs.pack_distribution(3))
1102
 
        self.assertEqual([1, 1, 1, 1],
1103
 
            packs.pack_distribution(4))
1104
 
        self.assertEqual([1, 1, 1, 1, 1],
1105
 
            packs.pack_distribution(5))
1106
 
        self.assertEqual([1, 1, 1, 1, 1, 1],
1107
 
            packs.pack_distribution(6))
1108
 
        self.assertEqual([1, 1, 1, 1, 1, 1, 1],
1109
 
            packs.pack_distribution(7))
1110
 
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1],
1111
 
            packs.pack_distribution(8))
1112
 
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1],
1113
 
            packs.pack_distribution(9))
1114
 
 
1115
 
    def test_pack_distribution_stable_at_boundaries(self):
1116
 
        """When there are multi-rev packs the counts are stable."""
1117
 
        packs = self.get_packs()
1118
 
        # in 10s:
1119
 
        self.assertEqual([10], packs.pack_distribution(10))
1120
 
        self.assertEqual([10, 1], packs.pack_distribution(11))
1121
 
        self.assertEqual([10, 10], packs.pack_distribution(20))
1122
 
        self.assertEqual([10, 10, 1], packs.pack_distribution(21))
1123
 
        # 100s
1124
 
        self.assertEqual([100], packs.pack_distribution(100))
1125
 
        self.assertEqual([100, 1], packs.pack_distribution(101))
1126
 
        self.assertEqual([100, 10, 1], packs.pack_distribution(111))
1127
 
        self.assertEqual([100, 100], packs.pack_distribution(200))
1128
 
        self.assertEqual([100, 100, 1], packs.pack_distribution(201))
1129
 
        self.assertEqual([100, 100, 10, 1], packs.pack_distribution(211))
1130
 
 
1131
 
    def test_plan_pack_operations_2009_revisions_skip_all_packs(self):
1132
 
        packs = self.get_packs()
1133
 
        existing_packs = [(2000, "big"), (9, "medium")]
1134
 
        # rev count - 2009 -> 2x1000 + 9x1
1135
 
        pack_operations = packs.plan_autopack_combinations(
1136
 
            existing_packs, [1000, 1000, 1, 1, 1, 1, 1, 1, 1, 1, 1])
1137
 
        self.assertEqual([], pack_operations)
1138
 
 
1139
 
    def test_plan_pack_operations_2010_revisions_skip_all_packs(self):
1140
 
        packs = self.get_packs()
1141
 
        existing_packs = [(2000, "big"), (9, "medium"), (1, "single")]
1142
 
        # rev count - 2010 -> 2x1000 + 1x10
1143
 
        pack_operations = packs.plan_autopack_combinations(
1144
 
            existing_packs, [1000, 1000, 10])
1145
 
        self.assertEqual([], pack_operations)
1146
 
 
1147
 
    def test_plan_pack_operations_2010_combines_smallest_two(self):
1148
 
        packs = self.get_packs()
1149
 
        existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"),
1150
 
            (1, "single1")]
1151
 
        # rev count - 2010 -> 2x1000 + 1x10 (3)
1152
 
        pack_operations = packs.plan_autopack_combinations(
1153
 
            existing_packs, [1000, 1000, 10])
1154
 
        self.assertEqual([[2, ["single2", "single1"]]], pack_operations)
1155
 
 
1156
 
    def test_plan_pack_operations_creates_a_single_op(self):
1157
 
        packs = self.get_packs()
1158
 
        existing_packs = [(50, 'a'), (40, 'b'), (30, 'c'), (10, 'd'),
1159
 
                          (10, 'e'), (6, 'f'), (4, 'g')]
1160
 
        # rev count 150 -> 1x100 and 5x10
1161
 
        # The two size 10 packs do not need to be touched. The 50, 40, 30 would
1162
 
        # be combined into a single 120 size pack, and the 6 & 4 would
1163
 
        # becombined into a size 10 pack. However, if we have to rewrite them,
1164
 
        # we save a pack file with no increased I/O by putting them into the
1165
 
        # same file.
1166
 
        distribution = packs.pack_distribution(150)
1167
 
        pack_operations = packs.plan_autopack_combinations(existing_packs,
1168
 
                                                           distribution)
1169
 
        self.assertEqual([[130, ['a', 'b', 'c', 'f', 'g']]], pack_operations)
1170
 
 
1171
 
    def test_all_packs_none(self):
1172
 
        format = self.get_format()
1173
 
        tree = self.make_branch_and_tree('.', format=format)
1174
 
        tree.lock_read()
1175
 
        self.addCleanup(tree.unlock)
1176
 
        packs = tree.branch.repository._pack_collection
1177
 
        packs.ensure_loaded()
1178
 
        self.assertEqual([], packs.all_packs())
1179
 
 
1180
 
    def test_all_packs_one(self):
1181
 
        format = self.get_format()
1182
 
        tree = self.make_branch_and_tree('.', format=format)
1183
 
        tree.commit('start')
1184
 
        tree.lock_read()
1185
 
        self.addCleanup(tree.unlock)
1186
 
        packs = tree.branch.repository._pack_collection
1187
 
        packs.ensure_loaded()
1188
 
        self.assertEqual([
1189
 
            packs.get_pack_by_name(packs.names()[0])],
1190
 
            packs.all_packs())
1191
 
 
1192
 
    def test_all_packs_two(self):
1193
 
        format = self.get_format()
1194
 
        tree = self.make_branch_and_tree('.', format=format)
1195
 
        tree.commit('start')
1196
 
        tree.commit('continue')
1197
 
        tree.lock_read()
1198
 
        self.addCleanup(tree.unlock)
1199
 
        packs = tree.branch.repository._pack_collection
1200
 
        packs.ensure_loaded()
1201
 
        self.assertEqual([
1202
 
            packs.get_pack_by_name(packs.names()[0]),
1203
 
            packs.get_pack_by_name(packs.names()[1]),
1204
 
            ], packs.all_packs())
1205
 
 
1206
 
    def test_get_pack_by_name(self):
1207
 
        format = self.get_format()
1208
 
        tree = self.make_branch_and_tree('.', format=format)
1209
 
        tree.commit('start')
1210
 
        tree.lock_read()
1211
 
        self.addCleanup(tree.unlock)
1212
 
        packs = tree.branch.repository._pack_collection
1213
 
        packs.reset()
1214
 
        packs.ensure_loaded()
1215
 
        name = packs.names()[0]
1216
 
        pack_1 = packs.get_pack_by_name(name)
1217
 
        # the pack should be correctly initialised
1218
 
        sizes = packs._names[name]
1219
 
        rev_index = GraphIndex(packs._index_transport, name + '.rix', sizes[0])
1220
 
        inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1])
1221
 
        txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2])
1222
 
        sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3])
1223
 
        self.assertEqual(pack_repo.ExistingPack(packs._pack_transport,
1224
 
            name, rev_index, inv_index, txt_index, sig_index), pack_1)
1225
 
        # and the same instance should be returned on successive calls.
1226
 
        self.assertTrue(pack_1 is packs.get_pack_by_name(name))
1227
 
 
1228
 
    def test_reload_pack_names_new_entry(self):
1229
 
        tree, r, packs, revs = self.make_packs_and_alt_repo()
1230
 
        names = packs.names()
1231
 
        # Add a new pack file into the repository
1232
 
        rev4 = tree.commit('four')
1233
 
        new_names = tree.branch.repository._pack_collection.names()
1234
 
        new_name = set(new_names).difference(names)
1235
 
        self.assertEqual(1, len(new_name))
1236
 
        new_name = new_name.pop()
1237
 
        # The old collection hasn't noticed yet
1238
 
        self.assertEqual(names, packs.names())
1239
 
        self.assertTrue(packs.reload_pack_names())
1240
 
        self.assertEqual(new_names, packs.names())
1241
 
        # And the repository can access the new revision
1242
 
        self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
1243
 
        self.assertFalse(packs.reload_pack_names())
1244
 
 
1245
 
    def test_reload_pack_names_added_and_removed(self):
1246
 
        tree, r, packs, revs = self.make_packs_and_alt_repo()
1247
 
        names = packs.names()
1248
 
        # Now repack the whole thing
1249
 
        tree.branch.repository.pack()
1250
 
        new_names = tree.branch.repository._pack_collection.names()
1251
 
        # The other collection hasn't noticed yet
1252
 
        self.assertEqual(names, packs.names())
1253
 
        self.assertTrue(packs.reload_pack_names())
1254
 
        self.assertEqual(new_names, packs.names())
1255
 
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1256
 
        self.assertFalse(packs.reload_pack_names())
1257
 
 
1258
 
    def test_autopack_reloads_and_stops(self):
1259
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1260
 
        # After we have determined what needs to be autopacked, trigger a
1261
 
        # full-pack via the other repo which will cause us to re-evaluate and
1262
 
        # decide we don't need to do anything
1263
 
        orig_execute = packs._execute_pack_operations
1264
 
        def _munged_execute_pack_ops(*args, **kwargs):
1265
 
            tree.branch.repository.pack()
1266
 
            return orig_execute(*args, **kwargs)
1267
 
        packs._execute_pack_operations = _munged_execute_pack_ops
1268
 
        packs._max_pack_count = lambda x: 1
1269
 
        packs.pack_distribution = lambda x: [10]
1270
 
        self.assertFalse(packs.autopack())
1271
 
        self.assertEqual(1, len(packs.names()))
1272
 
        self.assertEqual(tree.branch.repository._pack_collection.names(),
1273
 
                         packs.names())
1274
 
 
1275
 
 
1276
 
class TestPack(TestCaseWithTransport):
1277
 
    """Tests for the Pack object."""
1278
 
 
1279
 
    def assertCurrentlyEqual(self, left, right):
1280
 
        self.assertTrue(left == right)
1281
 
        self.assertTrue(right == left)
1282
 
        self.assertFalse(left != right)
1283
 
        self.assertFalse(right != left)
1284
 
 
1285
 
    def assertCurrentlyNotEqual(self, left, right):
1286
 
        self.assertFalse(left == right)
1287
 
        self.assertFalse(right == left)
1288
 
        self.assertTrue(left != right)
1289
 
        self.assertTrue(right != left)
1290
 
 
1291
 
    def test___eq____ne__(self):
1292
 
        left = pack_repo.ExistingPack('', '', '', '', '', '')
1293
 
        right = pack_repo.ExistingPack('', '', '', '', '', '')
1294
 
        self.assertCurrentlyEqual(left, right)
1295
 
        # change all attributes and ensure equality changes as we do.
1296
 
        left.revision_index = 'a'
1297
 
        self.assertCurrentlyNotEqual(left, right)
1298
 
        right.revision_index = 'a'
1299
 
        self.assertCurrentlyEqual(left, right)
1300
 
        left.inventory_index = 'a'
1301
 
        self.assertCurrentlyNotEqual(left, right)
1302
 
        right.inventory_index = 'a'
1303
 
        self.assertCurrentlyEqual(left, right)
1304
 
        left.text_index = 'a'
1305
 
        self.assertCurrentlyNotEqual(left, right)
1306
 
        right.text_index = 'a'
1307
 
        self.assertCurrentlyEqual(left, right)
1308
 
        left.signature_index = 'a'
1309
 
        self.assertCurrentlyNotEqual(left, right)
1310
 
        right.signature_index = 'a'
1311
 
        self.assertCurrentlyEqual(left, right)
1312
 
        left.name = 'a'
1313
 
        self.assertCurrentlyNotEqual(left, right)
1314
 
        right.name = 'a'
1315
 
        self.assertCurrentlyEqual(left, right)
1316
 
        left.transport = 'a'
1317
 
        self.assertCurrentlyNotEqual(left, right)
1318
 
        right.transport = 'a'
1319
 
        self.assertCurrentlyEqual(left, right)
1320
 
 
1321
 
    def test_file_name(self):
1322
 
        pack = pack_repo.ExistingPack('', 'a_name', '', '', '', '')
1323
 
        self.assertEqual('a_name.pack', pack.file_name())
1324
 
 
1325
 
 
1326
 
class TestNewPack(TestCaseWithTransport):
1327
 
    """Tests for pack_repo.NewPack."""
1328
 
 
1329
 
    def test_new_instance_attributes(self):
1330
 
        upload_transport = self.get_transport('upload')
1331
 
        pack_transport = self.get_transport('pack')
1332
 
        index_transport = self.get_transport('index')
1333
 
        upload_transport.mkdir('.')
1334
 
        collection = pack_repo.RepositoryPackCollection(
1335
 
            repo=None,
1336
 
            transport=self.get_transport('.'),
1337
 
            index_transport=index_transport,
1338
 
            upload_transport=upload_transport,
1339
 
            pack_transport=pack_transport,
1340
 
            index_builder_class=BTreeBuilder,
1341
 
            index_class=BTreeGraphIndex,
1342
 
            use_chk_index=False)
1343
 
        pack = pack_repo.NewPack(collection)
1344
 
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
1345
 
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1346
 
        self.assertIsInstance(pack._hash, type(osutils.md5()))
1347
 
        self.assertTrue(pack.upload_transport is upload_transport)
1348
 
        self.assertTrue(pack.index_transport is index_transport)
1349
 
        self.assertTrue(pack.pack_transport is pack_transport)
1350
 
        self.assertEqual(None, pack.index_sizes)
1351
 
        self.assertEqual(20, len(pack.random_name))
1352
 
        self.assertIsInstance(pack.random_name, str)
1353
 
        self.assertIsInstance(pack.start_time, float)
1354
 
 
1355
 
 
1356
 
class TestPacker(TestCaseWithTransport):
1357
 
    """Tests for the packs repository Packer class."""
1358
 
 
1359
 
    def test_pack_optimizes_pack_order(self):
1360
 
        builder = self.make_branch_builder('.', format="1.9")
1361
 
        builder.start_series()
1362
 
        builder.build_snapshot('A', None, [
1363
 
            ('add', ('', 'root-id', 'directory', None)),
1364
 
            ('add', ('f', 'f-id', 'file', 'content\n'))])
1365
 
        builder.build_snapshot('B', ['A'],
1366
 
            [('modify', ('f-id', 'new-content\n'))])
1367
 
        builder.build_snapshot('C', ['B'],
1368
 
            [('modify', ('f-id', 'third-content\n'))])
1369
 
        builder.build_snapshot('D', ['C'],
1370
 
            [('modify', ('f-id', 'fourth-content\n'))])
1371
 
        b = builder.get_branch()
1372
 
        b.lock_read()
1373
 
        builder.finish_series()
1374
 
        self.addCleanup(b.unlock)
1375
 
        # At this point, we should have 4 pack files available
1376
 
        # Because of how they were built, they correspond to
1377
 
        # ['D', 'C', 'B', 'A']
1378
 
        packs = b.repository._pack_collection.packs
1379
 
        packer = pack_repo.Packer(b.repository._pack_collection,
1380
 
                                  packs, 'testing',
1381
 
                                  revision_ids=['B', 'C'])
1382
 
        # Now, when we are copying the B & C revisions, their pack files should
1383
 
        # be moved to the front of the stack
1384
 
        # The new ordering moves B & C to the front of the .packs attribute,
1385
 
        # and leaves the others in the original order.
1386
 
        new_packs = [packs[1], packs[2], packs[0], packs[3]]
1387
 
        new_pack = packer.pack()
1388
 
        self.assertEqual(new_packs, packer.packs)
1389
 
 
1390
 
 
1391
 
class TestOptimisingPacker(TestCaseWithTransport):
1392
 
    """Tests for the OptimisingPacker class."""
1393
 
 
1394
 
    def get_pack_collection(self):
1395
 
        repo = self.make_repository('.')
1396
 
        return repo._pack_collection
1397
 
 
1398
 
    def test_open_pack_will_optimise(self):
1399
 
        packer = pack_repo.OptimisingPacker(self.get_pack_collection(),
1400
 
                                            [], '.test')
1401
 
        new_pack = packer.open_pack()
1402
 
        self.assertIsInstance(new_pack, pack_repo.NewPack)
1403
 
        self.assertTrue(new_pack.revision_index._optimize_for_size)
1404
 
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
1405
 
        self.assertTrue(new_pack.text_index._optimize_for_size)
1406
 
        self.assertTrue(new_pack.signature_index._optimize_for_size)
1407
 
 
1408
 
 
1409
 
class TestCrossFormatPacks(TestCaseWithTransport):
1410
 
 
1411
 
    def log_pack(self, hint=None):
1412
 
        self.calls.append(('pack', hint))
1413
 
        self.orig_pack(hint=hint)
1414
 
        if self.expect_hint:
1415
 
            self.assertTrue(hint)
1416
 
 
1417
 
    def run_stream(self, src_fmt, target_fmt, expect_pack_called):
1418
 
        self.expect_hint = expect_pack_called
1419
 
        self.calls = []
1420
 
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
1421
 
        source_tree.lock_write()
1422
 
        self.addCleanup(source_tree.unlock)
1423
 
        tip = source_tree.commit('foo')
1424
 
        target = self.make_repository('target', format=target_fmt)
1425
 
        target.lock_write()
1426
 
        self.addCleanup(target.unlock)
1427
 
        source = source_tree.branch.repository._get_source(target._format)
1428
 
        self.orig_pack = target.pack
1429
 
        target.pack = self.log_pack
1430
 
        search = target.search_missing_revision_ids(
1431
 
            source_tree.branch.repository, tip)
1432
 
        stream = source.get_stream(search)
1433
 
        from_format = source_tree.branch.repository._format
1434
 
        sink = target._get_sink()
1435
 
        sink.insert_stream(stream, from_format, [])
1436
 
        if expect_pack_called:
1437
 
            self.assertLength(1, self.calls)
1438
 
        else:
1439
 
            self.assertLength(0, self.calls)
1440
 
 
1441
 
    def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
1442
 
        self.expect_hint = expect_pack_called
1443
 
        self.calls = []
1444
 
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
1445
 
        source_tree.lock_write()
1446
 
        self.addCleanup(source_tree.unlock)
1447
 
        tip = source_tree.commit('foo')
1448
 
        target = self.make_repository('target', format=target_fmt)
1449
 
        target.lock_write()
1450
 
        self.addCleanup(target.unlock)
1451
 
        source = source_tree.branch.repository
1452
 
        self.orig_pack = target.pack
1453
 
        target.pack = self.log_pack
1454
 
        target.fetch(source)
1455
 
        if expect_pack_called:
1456
 
            self.assertLength(1, self.calls)
1457
 
        else:
1458
 
            self.assertLength(0, self.calls)
1459
 
 
1460
 
    def test_sink_format_hint_no(self):
1461
 
        # When the target format says packing makes no difference, pack is not
1462
 
        # called.
1463
 
        self.run_stream('1.9', 'rich-root-pack', False)
1464
 
 
1465
 
    def test_sink_format_hint_yes(self):
1466
 
        # When the target format says packing makes a difference, pack is
1467
 
        # called.
1468
 
        self.run_stream('1.9', '2a', True)
1469
 
 
1470
 
    def test_sink_format_same_no(self):
1471
 
        # When the formats are the same, pack is not called.
1472
 
        self.run_stream('2a', '2a', False)
1473
 
 
1474
 
    def test_IDS_format_hint_no(self):
1475
 
        # When the target format says packing makes no difference, pack is not
1476
 
        # called.
1477
 
        self.run_fetch('1.9', 'rich-root-pack', False)
1478
 
 
1479
 
    def test_IDS_format_hint_yes(self):
1480
 
        # When the target format says packing makes a difference, pack is
1481
 
        # called.
1482
 
        self.run_fetch('1.9', '2a', True)
1483
 
 
1484
 
    def test_IDS_format_same_no(self):
1485
 
        # When the formats are the same, pack is not called.
1486
 
        self.run_fetch('2a', '2a', False)