~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: Vincent Ladeuil
  • Date: 2010-10-26 08:08:23 UTC
  • mfrom: (5514.1.1 665100-content-type)
  • mto: This revision was merged to the branch mainline in revision 5516.
  • Revision ID: v.ladeuil+lp@free.fr-20101026080823-3wggo03b7cpn9908
Correctly set the Content-Type header when POSTing http requests

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006 Canonical Ltd
 
1
# Copyright (C) 2006-2010 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
12
12
#
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16
16
 
17
17
"""Tests for the Repository facility that are not interface tests.
18
18
 
19
 
For interface tests see tests/repository_implementations/*.py.
 
19
For interface tests see tests/per_repository/*.py.
20
20
 
21
21
For concrete class tests see this file, and for storage formats tests
22
22
also see this file.
23
23
"""
24
24
 
25
25
from stat import S_ISDIR
26
 
from StringIO import StringIO
 
26
import sys
27
27
 
28
28
import bzrlib
29
 
import bzrlib.bzrdir as bzrdir
30
 
import bzrlib.errors as errors
31
 
from bzrlib.errors import (NotBranchError,
32
 
                           NoSuchFile,
 
29
from bzrlib.errors import (NoSuchFile,
33
30
                           UnknownFormatError,
34
31
                           UnsupportedFormatError,
35
32
                           )
36
 
import bzrlib.repository as repository
37
 
from bzrlib.tests import TestCase, TestCaseWithTransport
38
 
from bzrlib.transport import get_transport
39
 
from bzrlib.transport.http import HttpServer
40
 
from bzrlib.transport.memory import MemoryServer
 
33
from bzrlib import (
 
34
    btree_index,
 
35
    graph,
 
36
    tests,
 
37
    )
 
38
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
 
39
from bzrlib.index import GraphIndex
 
40
from bzrlib.repository import RepositoryFormat
 
41
from bzrlib.tests import (
 
42
    TestCase,
 
43
    TestCaseWithTransport,
 
44
    )
 
45
from bzrlib.transport import (
 
46
    get_transport,
 
47
    )
 
48
from bzrlib import (
 
49
    bzrdir,
 
50
    errors,
 
51
    inventory,
 
52
    osutils,
 
53
    repository,
 
54
    revision as _mod_revision,
 
55
    upgrade,
 
56
    versionedfile,
 
57
    workingtree,
 
58
    )
 
59
from bzrlib.repofmt import (
 
60
    groupcompress_repo,
 
61
    knitrepo,
 
62
    pack_repo,
 
63
    weaverepo,
 
64
    )
41
65
 
42
66
 
43
67
class TestDefaultFormat(TestCase):
44
68
 
45
69
    def test_get_set_default_format(self):
 
70
        old_default = bzrdir.format_registry.get('default')
 
71
        private_default = old_default().repository_format.__class__
46
72
        old_format = repository.RepositoryFormat.get_default_format()
47
 
        self.assertTrue(isinstance(old_format, repository.RepositoryFormatKnit1))
48
 
        repository.RepositoryFormat.set_default_format(SampleRepositoryFormat())
 
73
        self.assertTrue(isinstance(old_format, private_default))
 
74
        def make_sample_bzrdir():
 
75
            my_bzrdir = bzrdir.BzrDirMetaFormat1()
 
76
            my_bzrdir.repository_format = SampleRepositoryFormat()
 
77
            return my_bzrdir
 
78
        bzrdir.format_registry.remove('default')
 
79
        bzrdir.format_registry.register('sample', make_sample_bzrdir, '')
 
80
        bzrdir.format_registry.set_default('sample')
49
81
        # creating a repository should now create an instrumented dir.
50
82
        try:
51
83
            # the default branch format is used by the meta dir format
54
86
            result = dir.create_repository()
55
87
            self.assertEqual(result, 'A bzr repository dir')
56
88
        finally:
57
 
            repository.RepositoryFormat.set_default_format(old_format)
58
 
        self.assertEqual(old_format, repository.RepositoryFormat.get_default_format())
 
89
            bzrdir.format_registry.remove('default')
 
90
            bzrdir.format_registry.remove('sample')
 
91
            bzrdir.format_registry.register('default', old_default, '')
 
92
        self.assertIsInstance(repository.RepositoryFormat.get_default_format(),
 
93
                              old_format.__class__)
59
94
 
60
95
 
61
96
class SampleRepositoryFormat(repository.RepositoryFormat):
62
97
    """A sample format
63
98
 
64
 
    this format is initializable, unsupported to aid in testing the 
 
99
    this format is initializable, unsupported to aid in testing the
65
100
    open and open(unsupported=True) routines.
66
101
    """
67
102
 
72
107
    def initialize(self, a_bzrdir, shared=False):
73
108
        """Initialize a repository in a BzrDir"""
74
109
        t = a_bzrdir.get_repository_transport(self)
75
 
        t.put('format', StringIO(self.get_format_string()))
 
110
        t.put_bytes('format', self.get_format_string())
76
111
        return 'A bzr repository dir'
77
112
 
78
113
    def is_supported(self):
88
123
    def test_find_format(self):
89
124
        # is the right format object found for a repository?
90
125
        # create a branch with a few known format objects.
91
 
        # this is not quite the same as 
 
126
        # this is not quite the same as
92
127
        self.build_tree(["foo/", "bar/"])
93
128
        def check_format(format, url):
94
129
            dir = format._matchingbzrdir.initialize(url)
96
131
            t = get_transport(url)
97
132
            found_format = repository.RepositoryFormat.find_format(dir)
98
133
            self.failUnless(isinstance(found_format, format.__class__))
99
 
        check_format(repository.RepositoryFormat7(), "bar")
100
 
        
 
134
        check_format(weaverepo.RepositoryFormat7(), "bar")
 
135
 
101
136
    def test_find_format_no_repository(self):
102
137
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
103
138
        self.assertRaises(errors.NoRepositoryPresent,
129
164
 
130
165
class TestFormat6(TestCaseWithTransport):
131
166
 
 
167
    def test_attribute__fetch_order(self):
 
168
        """Weaves need topological data insertion."""
 
169
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
170
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
171
        self.assertEqual('topological', repo._format._fetch_order)
 
172
 
 
173
    def test_attribute__fetch_uses_deltas(self):
 
174
        """Weaves do not reuse deltas."""
 
175
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
176
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
177
        self.assertEqual(False, repo._format._fetch_uses_deltas)
 
178
 
 
179
    def test_attribute__fetch_reconcile(self):
 
180
        """Weave repositories need a reconcile after fetch."""
 
181
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
182
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
183
        self.assertEqual(True, repo._format._fetch_reconcile)
 
184
 
132
185
    def test_no_ancestry_weave(self):
133
186
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
134
 
        repo = repository.RepositoryFormat6().initialize(control)
 
187
        repo = weaverepo.RepositoryFormat6().initialize(control)
135
188
        # We no longer need to create the ancestry.weave file
136
189
        # since it is *never* used.
137
190
        self.assertRaises(NoSuchFile,
138
191
                          control.transport.get,
139
192
                          'ancestry.weave')
140
193
 
 
194
    def test_supports_external_lookups(self):
 
195
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
196
        repo = weaverepo.RepositoryFormat6().initialize(control)
 
197
        self.assertFalse(repo._format.supports_external_lookups)
 
198
 
141
199
 
142
200
class TestFormat7(TestCaseWithTransport):
143
 
    
 
201
 
 
202
    def test_attribute__fetch_order(self):
 
203
        """Weaves need topological data insertion."""
 
204
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
205
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
206
        self.assertEqual('topological', repo._format._fetch_order)
 
207
 
 
208
    def test_attribute__fetch_uses_deltas(self):
 
209
        """Weaves do not reuse deltas."""
 
210
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
211
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
212
        self.assertEqual(False, repo._format._fetch_uses_deltas)
 
213
 
 
214
    def test_attribute__fetch_reconcile(self):
 
215
        """Weave repositories need a reconcile after fetch."""
 
216
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
217
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
218
        self.assertEqual(True, repo._format._fetch_reconcile)
 
219
 
144
220
    def test_disk_layout(self):
145
221
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
146
 
        repo = repository.RepositoryFormat7().initialize(control)
 
222
        repo = weaverepo.RepositoryFormat7().initialize(control)
147
223
        # in case of side effects of locking.
148
224
        repo.lock_write()
149
225
        repo.unlock()
162
238
                             'w\n'
163
239
                             'W\n',
164
240
                             t.get('inventory.weave').read())
 
241
        # Creating a file with id Foo:Bar results in a non-escaped file name on
 
242
        # disk.
 
243
        control.create_branch()
 
244
        tree = control.create_workingtree()
 
245
        tree.add(['foo'], ['Foo:Bar'], ['file'])
 
246
        tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
 
247
        try:
 
248
            tree.commit('first post', rev_id='first')
 
249
        except errors.IllegalPath:
 
250
            if sys.platform != 'win32':
 
251
                raise
 
252
            self.knownFailure('Foo:Bar cannot be used as a file-id on windows'
 
253
                              ' in repo format 7')
 
254
            return
 
255
        self.assertEqualDiff(
 
256
            '# bzr weave file v5\n'
 
257
            'i\n'
 
258
            '1 7fe70820e08a1aac0ef224d9c66ab66831cc4ab1\n'
 
259
            'n first\n'
 
260
            '\n'
 
261
            'w\n'
 
262
            '{ 0\n'
 
263
            '. content\n'
 
264
            '}\n'
 
265
            'W\n',
 
266
            t.get('weaves/74/Foo%3ABar.weave').read())
165
267
 
166
268
    def test_shared_disk_layout(self):
167
269
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
168
 
        repo = repository.RepositoryFormat7().initialize(control, shared=True)
 
270
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
169
271
        # we want:
170
272
        # format 'Bazaar-NG Repository format 7'
171
273
        # inventory.weave == empty_weave
188
290
    def test_creates_lockdir(self):
189
291
        """Make sure it appears to be controlled by a LockDir existence"""
190
292
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
191
 
        repo = repository.RepositoryFormat7().initialize(control, shared=True)
 
293
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
192
294
        t = control.get_repository_transport(None)
193
 
        # TODO: Should check there is a 'lock' toplevel directory, 
 
295
        # TODO: Should check there is a 'lock' toplevel directory,
194
296
        # regardless of contents
195
297
        self.assertFalse(t.has('lock/held/info'))
196
298
        repo.lock_write()
204
306
        """repo format 7 actually locks on lockdir"""
205
307
        base_url = self.get_url()
206
308
        control = bzrdir.BzrDirMetaFormat1().initialize(base_url)
207
 
        repo = repository.RepositoryFormat7().initialize(control, shared=True)
 
309
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
208
310
        t = control.get_repository_transport(None)
209
311
        repo.lock_write()
210
312
        repo.unlock()
218
320
 
219
321
    def test_shared_no_tree_disk_layout(self):
220
322
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
221
 
        repo = repository.RepositoryFormat7().initialize(control, shared=True)
 
323
        repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
222
324
        repo.set_make_working_trees(False)
223
325
        # we want:
224
326
        # format 'Bazaar-NG Repository format 7'
242
344
                             'W\n',
243
345
                             t.get('inventory.weave').read())
244
346
 
 
347
    def test_supports_external_lookups(self):
 
348
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
349
        repo = weaverepo.RepositoryFormat7().initialize(control)
 
350
        self.assertFalse(repo._format.supports_external_lookups)
 
351
 
245
352
 
246
353
class TestFormatKnit1(TestCaseWithTransport):
247
 
    
 
354
 
 
355
    def test_attribute__fetch_order(self):
 
356
        """Knits need topological data insertion."""
 
357
        repo = self.make_repository('.',
 
358
                format=bzrdir.format_registry.get('knit')())
 
359
        self.assertEqual('topological', repo._format._fetch_order)
 
360
 
 
361
    def test_attribute__fetch_uses_deltas(self):
 
362
        """Knits reuse deltas."""
 
363
        repo = self.make_repository('.',
 
364
                format=bzrdir.format_registry.get('knit')())
 
365
        self.assertEqual(True, repo._format._fetch_uses_deltas)
 
366
 
248
367
    def test_disk_layout(self):
249
368
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
250
 
        repo = repository.RepositoryFormatKnit1().initialize(control)
 
369
        repo = knitrepo.RepositoryFormatKnit1().initialize(control)
251
370
        # in case of side effects of locking.
252
371
        repo.lock_write()
253
372
        repo.unlock()
264
383
        # self.assertEqualDiff('', t.get('lock').read())
265
384
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
266
385
        self.check_knits(t)
 
386
        # Check per-file knits.
 
387
        branch = control.create_branch()
 
388
        tree = control.create_workingtree()
 
389
        tree.add(['foo'], ['Nasty-IdC:'], ['file'])
 
390
        tree.put_file_bytes_non_atomic('Nasty-IdC:', '')
 
391
        tree.commit('1st post', rev_id='foo')
 
392
        self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
 
393
            '\nfoo fulltext 0 81  :')
267
394
 
268
 
    def assertHasKnit(self, t, knit_name):
 
395
    def assertHasKnit(self, t, knit_name, extra_content=''):
269
396
        """Assert that knit_name exists on t."""
270
 
        self.assertEqualDiff('# bzr knit index 8\n',
 
397
        self.assertEqualDiff('# bzr knit index 8\n' + extra_content,
271
398
                             t.get(knit_name + '.kndx').read())
272
 
        # no default content
273
 
        self.assertTrue(t.has(knit_name + '.knit'))
274
399
 
275
400
    def check_knits(self, t):
276
401
        """check knit content for a repository."""
280
405
 
281
406
    def test_shared_disk_layout(self):
282
407
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
283
 
        repo = repository.RepositoryFormatKnit1().initialize(control, shared=True)
 
408
        repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
284
409
        # we want:
285
410
        # format 'Bazaar-NG Knit Repository Format 1'
286
411
        # lock: is a directory
299
424
 
300
425
    def test_shared_no_tree_disk_layout(self):
301
426
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
302
 
        repo = repository.RepositoryFormatKnit1().initialize(control, shared=True)
 
427
        repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
303
428
        repo.set_make_working_trees(False)
304
429
        # we want:
305
430
        # format 'Bazaar-NG Knit Repository Format 1'
320
445
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
321
446
        self.check_knits(t)
322
447
 
323
 
 
324
 
class InterString(repository.InterRepository):
325
 
    """An inter-repository optimised code path for strings.
326
 
 
327
 
    This is for use during testing where we use strings as repositories
 
448
    def test_deserialise_sets_root_revision(self):
 
449
        """We must have a inventory.root.revision
 
450
 
 
451
        Old versions of the XML5 serializer did not set the revision_id for
 
452
        the whole inventory. So we grab the one from the expected text. Which
 
453
        is valid when the api is not being abused.
 
454
        """
 
455
        repo = self.make_repository('.',
 
456
                format=bzrdir.format_registry.get('knit')())
 
457
        inv_xml = '<inventory format="5">\n</inventory>\n'
 
458
        inv = repo._deserialise_inventory('test-rev-id', inv_xml)
 
459
        self.assertEqual('test-rev-id', inv.root.revision)
 
460
 
 
461
    def test_deserialise_uses_global_revision_id(self):
 
462
        """If it is set, then we re-use the global revision id"""
 
463
        repo = self.make_repository('.',
 
464
                format=bzrdir.format_registry.get('knit')())
 
465
        inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n'
 
466
                   '</inventory>\n')
 
467
        # Arguably, the deserialise_inventory should detect a mismatch, and
 
468
        # raise an error, rather than silently using one revision_id over the
 
469
        # other.
 
470
        self.assertRaises(AssertionError, repo._deserialise_inventory,
 
471
            'test-rev-id', inv_xml)
 
472
        inv = repo._deserialise_inventory('other-rev-id', inv_xml)
 
473
        self.assertEqual('other-rev-id', inv.root.revision)
 
474
 
 
475
    def test_supports_external_lookups(self):
 
476
        repo = self.make_repository('.',
 
477
                format=bzrdir.format_registry.get('knit')())
 
478
        self.assertFalse(repo._format.supports_external_lookups)
 
479
 
 
480
 
 
481
class DummyRepository(object):
 
482
    """A dummy repository for testing."""
 
483
 
 
484
    _format = None
 
485
    _serializer = None
 
486
 
 
487
    def supports_rich_root(self):
 
488
        if self._format is not None:
 
489
            return self._format.rich_root_data
 
490
        return False
 
491
 
 
492
    def get_graph(self):
 
493
        raise NotImplementedError
 
494
 
 
495
    def get_parent_map(self, revision_ids):
 
496
        raise NotImplementedError
 
497
 
 
498
 
 
499
class InterDummy(repository.InterRepository):
 
500
    """An inter-repository optimised code path for DummyRepository.
 
501
 
 
502
    This is for use during testing where we use DummyRepository as repositories
328
503
    so that none of the default regsitered inter-repository classes will
329
 
    match.
 
504
    MATCH.
330
505
    """
331
506
 
332
507
    @staticmethod
333
508
    def is_compatible(repo_source, repo_target):
334
 
        """InterString is compatible with strings-as-repos."""
335
 
        return isinstance(repo_source, str) and isinstance(repo_target, str)
 
509
        """InterDummy is compatible with DummyRepository."""
 
510
        return (isinstance(repo_source, DummyRepository) and
 
511
            isinstance(repo_target, DummyRepository))
336
512
 
337
513
 
338
514
class TestInterRepository(TestCaseWithTransport):
344
520
        # This also tests that the default registered optimised interrepository
345
521
        # classes do not barf inappropriately when a surprising repository type
346
522
        # is handed to them.
347
 
        dummy_a = "Repository 1."
348
 
        dummy_b = "Repository 2."
 
523
        dummy_a = DummyRepository()
 
524
        dummy_b = DummyRepository()
349
525
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
350
526
 
351
527
    def assertGetsDefaultInterRepository(self, repo_a, repo_b):
352
 
        """Asserts that InterRepository.get(repo_a, repo_b) -> the default."""
 
528
        """Asserts that InterRepository.get(repo_a, repo_b) -> the default.
 
529
 
 
530
        The effective default is now InterSameDataRepository because there is
 
531
        no actual sane default in the presence of incompatible data models.
 
532
        """
353
533
        inter_repo = repository.InterRepository.get(repo_a, repo_b)
354
 
        self.assertEqual(repository.InterRepository,
 
534
        self.assertEqual(repository.InterSameDataRepository,
355
535
                         inter_repo.__class__)
356
536
        self.assertEqual(repo_a, inter_repo.source)
357
537
        self.assertEqual(repo_b, inter_repo.target)
362
542
        # and that it is correctly selected when given a repository
363
543
        # pair that it returns true on for the is_compatible static method
364
544
        # check
365
 
        dummy_a = "Repository 1."
366
 
        dummy_b = "Repository 2."
367
 
        repository.InterRepository.register_optimiser(InterString)
 
545
        dummy_a = DummyRepository()
 
546
        dummy_a._format = RepositoryFormat()
 
547
        dummy_b = DummyRepository()
 
548
        dummy_b._format = RepositoryFormat()
 
549
        repo = self.make_repository('.')
 
550
        # hack dummies to look like repo somewhat.
 
551
        dummy_a._serializer = repo._serializer
 
552
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
 
553
        dummy_a._format.rich_root_data = repo._format.rich_root_data
 
554
        dummy_b._serializer = repo._serializer
 
555
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
 
556
        dummy_b._format.rich_root_data = repo._format.rich_root_data
 
557
        repository.InterRepository.register_optimiser(InterDummy)
368
558
        try:
369
 
            # we should get the default for something InterString returns False
 
559
            # we should get the default for something InterDummy returns False
370
560
            # to
371
 
            self.assertFalse(InterString.is_compatible(dummy_a, None))
372
 
            self.assertGetsDefaultInterRepository(dummy_a, None)
373
 
            # and we should get an InterString for a pair it 'likes'
374
 
            self.assertTrue(InterString.is_compatible(dummy_a, dummy_b))
 
561
            self.assertFalse(InterDummy.is_compatible(dummy_a, repo))
 
562
            self.assertGetsDefaultInterRepository(dummy_a, repo)
 
563
            # and we should get an InterDummy for a pair it 'likes'
 
564
            self.assertTrue(InterDummy.is_compatible(dummy_a, dummy_b))
375
565
            inter_repo = repository.InterRepository.get(dummy_a, dummy_b)
376
 
            self.assertEqual(InterString, inter_repo.__class__)
 
566
            self.assertEqual(InterDummy, inter_repo.__class__)
377
567
            self.assertEqual(dummy_a, inter_repo.source)
378
568
            self.assertEqual(dummy_b, inter_repo.target)
379
569
        finally:
380
 
            repository.InterRepository.unregister_optimiser(InterString)
 
570
            repository.InterRepository.unregister_optimiser(InterDummy)
381
571
        # now we should get the default InterRepository object again.
382
572
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
383
573
 
387
577
    def test_is_compatible_and_registered(self):
388
578
        # InterWeaveRepo is compatible when either side
389
579
        # is a format 5/6/7 branch
390
 
        formats = [repository.RepositoryFormat5(),
391
 
                   repository.RepositoryFormat6(),
392
 
                   repository.RepositoryFormat7()]
393
 
        incompatible_formats = [repository.RepositoryFormat4(),
394
 
                                repository.RepositoryFormatKnit1(),
 
580
        from bzrlib.repofmt import knitrepo, weaverepo
 
581
        formats = [weaverepo.RepositoryFormat5(),
 
582
                   weaverepo.RepositoryFormat6(),
 
583
                   weaverepo.RepositoryFormat7()]
 
584
        incompatible_formats = [weaverepo.RepositoryFormat4(),
 
585
                                knitrepo.RepositoryFormatKnit1(),
395
586
                                ]
396
587
        repo_a = self.make_repository('a')
397
588
        repo_b = self.make_repository('b')
418
609
        t = get_transport(self.get_url('.'))
419
610
        t.mkdir('repository')
420
611
        repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
421
 
        repo = repository.RepositoryFormat7().initialize(repo_dir)
422
 
        target_format = repository.RepositoryFormatKnit1()
 
612
        repo = weaverepo.RepositoryFormat7().initialize(repo_dir)
 
613
        target_format = knitrepo.RepositoryFormatKnit1()
423
614
        converter = repository.CopyConverter(target_format)
424
615
        pb = bzrlib.ui.ui_factory.nested_progress_bar()
425
616
        try:
431
622
 
432
623
 
433
624
class TestMisc(TestCase):
434
 
    
 
625
 
435
626
    def test_unescape_xml(self):
436
627
        """We get some kind of error when malformed entities are passed"""
437
 
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;') 
 
628
        self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;')
 
629
 
 
630
 
 
631
class TestRepositoryFormatKnit3(TestCaseWithTransport):
 
632
 
 
633
    def test_attribute__fetch_order(self):
 
634
        """Knits need topological data insertion."""
 
635
        format = bzrdir.BzrDirMetaFormat1()
 
636
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
637
        repo = self.make_repository('.', format=format)
 
638
        self.assertEqual('topological', repo._format._fetch_order)
 
639
 
 
640
    def test_attribute__fetch_uses_deltas(self):
 
641
        """Knits reuse deltas."""
 
642
        format = bzrdir.BzrDirMetaFormat1()
 
643
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
644
        repo = self.make_repository('.', format=format)
 
645
        self.assertEqual(True, repo._format._fetch_uses_deltas)
 
646
 
 
647
    def test_convert(self):
 
648
        """Ensure the upgrade adds weaves for roots"""
 
649
        format = bzrdir.BzrDirMetaFormat1()
 
650
        format.repository_format = knitrepo.RepositoryFormatKnit1()
 
651
        tree = self.make_branch_and_tree('.', format)
 
652
        tree.commit("Dull commit", rev_id="dull")
 
653
        revision_tree = tree.branch.repository.revision_tree('dull')
 
654
        revision_tree.lock_read()
 
655
        try:
 
656
            self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
 
657
                revision_tree.inventory.root.file_id)
 
658
        finally:
 
659
            revision_tree.unlock()
 
660
        format = bzrdir.BzrDirMetaFormat1()
 
661
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
662
        upgrade.Convert('.', format)
 
663
        tree = workingtree.WorkingTree.open('.')
 
664
        revision_tree = tree.branch.repository.revision_tree('dull')
 
665
        revision_tree.lock_read()
 
666
        try:
 
667
            revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
 
668
        finally:
 
669
            revision_tree.unlock()
 
670
        tree.commit("Another dull commit", rev_id='dull2')
 
671
        revision_tree = tree.branch.repository.revision_tree('dull2')
 
672
        revision_tree.lock_read()
 
673
        self.addCleanup(revision_tree.unlock)
 
674
        self.assertEqual('dull', revision_tree.inventory.root.revision)
 
675
 
 
676
    def test_supports_external_lookups(self):
 
677
        format = bzrdir.BzrDirMetaFormat1()
 
678
        format.repository_format = knitrepo.RepositoryFormatKnit3()
 
679
        repo = self.make_repository('.', format=format)
 
680
        self.assertFalse(repo._format.supports_external_lookups)
 
681
 
 
682
 
 
683
class Test2a(tests.TestCaseWithMemoryTransport):
 
684
 
 
685
    def test_chk_bytes_uses_custom_btree_parser(self):
 
686
        mt = self.make_branch_and_memory_tree('test', format='2a')
 
687
        mt.lock_write()
 
688
        self.addCleanup(mt.unlock)
 
689
        mt.add([''], ['root-id'])
 
690
        mt.commit('first')
 
691
        index = mt.branch.repository.chk_bytes._index._graph_index._indices[0]
 
692
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
 
693
        # It should also work if we re-open the repo
 
694
        repo = mt.branch.repository.bzrdir.open_repository()
 
695
        repo.lock_read()
 
696
        self.addCleanup(repo.unlock)
 
697
        index = repo.chk_bytes._index._graph_index._indices[0]
 
698
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
 
699
 
 
700
    def test_fetch_combines_groups(self):
 
701
        builder = self.make_branch_builder('source', format='2a')
 
702
        builder.start_series()
 
703
        builder.build_snapshot('1', None, [
 
704
            ('add', ('', 'root-id', 'directory', '')),
 
705
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
706
        builder.build_snapshot('2', ['1'], [
 
707
            ('modify', ('file-id', 'content-2\n'))])
 
708
        builder.finish_series()
 
709
        source = builder.get_branch()
 
710
        target = self.make_repository('target', format='2a')
 
711
        target.fetch(source.repository)
 
712
        target.lock_read()
 
713
        self.addCleanup(target.unlock)
 
714
        details = target.texts._index.get_build_details(
 
715
            [('file-id', '1',), ('file-id', '2',)])
 
716
        file_1_details = details[('file-id', '1')]
 
717
        file_2_details = details[('file-id', '2')]
 
718
        # The index, and what to read off disk, should be the same for both
 
719
        # versions of the file.
 
720
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
721
 
 
722
    def test_fetch_combines_groups(self):
 
723
        builder = self.make_branch_builder('source', format='2a')
 
724
        builder.start_series()
 
725
        builder.build_snapshot('1', None, [
 
726
            ('add', ('', 'root-id', 'directory', '')),
 
727
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
728
        builder.build_snapshot('2', ['1'], [
 
729
            ('modify', ('file-id', 'content-2\n'))])
 
730
        builder.finish_series()
 
731
        source = builder.get_branch()
 
732
        target = self.make_repository('target', format='2a')
 
733
        target.fetch(source.repository)
 
734
        target.lock_read()
 
735
        self.addCleanup(target.unlock)
 
736
        details = target.texts._index.get_build_details(
 
737
            [('file-id', '1',), ('file-id', '2',)])
 
738
        file_1_details = details[('file-id', '1')]
 
739
        file_2_details = details[('file-id', '2')]
 
740
        # The index, and what to read off disk, should be the same for both
 
741
        # versions of the file.
 
742
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
743
 
 
744
    def test_fetch_combines_groups(self):
 
745
        builder = self.make_branch_builder('source', format='2a')
 
746
        builder.start_series()
 
747
        builder.build_snapshot('1', None, [
 
748
            ('add', ('', 'root-id', 'directory', '')),
 
749
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
750
        builder.build_snapshot('2', ['1'], [
 
751
            ('modify', ('file-id', 'content-2\n'))])
 
752
        builder.finish_series()
 
753
        source = builder.get_branch()
 
754
        target = self.make_repository('target', format='2a')
 
755
        target.fetch(source.repository)
 
756
        target.lock_read()
 
757
        self.addCleanup(target.unlock)
 
758
        details = target.texts._index.get_build_details(
 
759
            [('file-id', '1',), ('file-id', '2',)])
 
760
        file_1_details = details[('file-id', '1')]
 
761
        file_2_details = details[('file-id', '2')]
 
762
        # The index, and what to read off disk, should be the same for both
 
763
        # versions of the file.
 
764
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
765
 
 
766
    def test_format_pack_compresses_True(self):
 
767
        repo = self.make_repository('repo', format='2a')
 
768
        self.assertTrue(repo._format.pack_compresses)
 
769
 
 
770
    def test_inventories_use_chk_map_with_parent_base_dict(self):
 
771
        tree = self.make_branch_and_memory_tree('repo', format="2a")
 
772
        tree.lock_write()
 
773
        tree.add([''], ['TREE_ROOT'])
 
774
        revid = tree.commit("foo")
 
775
        tree.unlock()
 
776
        tree.lock_read()
 
777
        self.addCleanup(tree.unlock)
 
778
        inv = tree.branch.repository.get_inventory(revid)
 
779
        self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
 
780
        inv.parent_id_basename_to_file_id._ensure_root()
 
781
        inv.id_to_entry._ensure_root()
 
782
        self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
 
783
        self.assertEqual(65536,
 
784
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
 
785
 
 
786
    def test_autopack_unchanged_chk_nodes(self):
 
787
        # at 20 unchanged commits, chk pages are packed that are split into
 
788
        # two groups such that the new pack being made doesn't have all its
 
789
        # pages in the source packs (though they are in the repository).
 
790
        # Use a memory backed repository, we don't need to hit disk for this
 
791
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
792
        tree.lock_write()
 
793
        self.addCleanup(tree.unlock)
 
794
        tree.add([''], ['TREE_ROOT'])
 
795
        for pos in range(20):
 
796
            tree.commit(str(pos))
 
797
 
 
798
    def test_pack_with_hint(self):
 
799
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
800
        tree.lock_write()
 
801
        self.addCleanup(tree.unlock)
 
802
        tree.add([''], ['TREE_ROOT'])
 
803
        # 1 commit to leave untouched
 
804
        tree.commit('1')
 
805
        to_keep = tree.branch.repository._pack_collection.names()
 
806
        # 2 to combine
 
807
        tree.commit('2')
 
808
        tree.commit('3')
 
809
        all = tree.branch.repository._pack_collection.names()
 
810
        combine = list(set(all) - set(to_keep))
 
811
        self.assertLength(3, all)
 
812
        self.assertLength(2, combine)
 
813
        tree.branch.repository.pack(hint=combine)
 
814
        final = tree.branch.repository._pack_collection.names()
 
815
        self.assertLength(2, final)
 
816
        self.assertFalse(combine[0] in final)
 
817
        self.assertFalse(combine[1] in final)
 
818
        self.assertSubset(to_keep, final)
 
819
 
 
820
    def test_stream_source_to_gc(self):
 
821
        source = self.make_repository('source', format='2a')
 
822
        target = self.make_repository('target', format='2a')
 
823
        stream = source._get_source(target._format)
 
824
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
 
825
 
 
826
    def test_stream_source_to_non_gc(self):
 
827
        source = self.make_repository('source', format='2a')
 
828
        target = self.make_repository('target', format='rich-root-pack')
 
829
        stream = source._get_source(target._format)
 
830
        # We don't want the child GroupCHKStreamSource
 
831
        self.assertIs(type(stream), repository.StreamSource)
 
832
 
 
833
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
 
834
        source_builder = self.make_branch_builder('source',
 
835
                            format='2a')
 
836
        # We have to build a fairly large tree, so that we are sure the chk
 
837
        # pages will have split into multiple pages.
 
838
        entries = [('add', ('', 'a-root-id', 'directory', None))]
 
839
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
 
840
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
 
841
                fname = i + j
 
842
                fid = fname + '-id'
 
843
                content = 'content for %s\n' % (fname,)
 
844
                entries.append(('add', (fname, fid, 'file', content)))
 
845
        source_builder.start_series()
 
846
        source_builder.build_snapshot('rev-1', None, entries)
 
847
        # Now change a few of them, so we get a few new pages for the second
 
848
        # revision
 
849
        source_builder.build_snapshot('rev-2', ['rev-1'], [
 
850
            ('modify', ('aa-id', 'new content for aa-id\n')),
 
851
            ('modify', ('cc-id', 'new content for cc-id\n')),
 
852
            ('modify', ('zz-id', 'new content for zz-id\n')),
 
853
            ])
 
854
        source_builder.finish_series()
 
855
        source_branch = source_builder.get_branch()
 
856
        source_branch.lock_read()
 
857
        self.addCleanup(source_branch.unlock)
 
858
        target = self.make_repository('target', format='2a')
 
859
        source = source_branch.repository._get_source(target._format)
 
860
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
 
861
 
 
862
        # On a regular pass, getting the inventories and chk pages for rev-2
 
863
        # would only get the newly created chk pages
 
864
        search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
 
865
                                    set(['rev-2']))
 
866
        simple_chk_records = []
 
867
        for vf_name, substream in source.get_stream(search):
 
868
            if vf_name == 'chk_bytes':
 
869
                for record in substream:
 
870
                    simple_chk_records.append(record.key)
 
871
            else:
 
872
                for _ in substream:
 
873
                    continue
 
874
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
 
875
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
 
876
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
 
877
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
 
878
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
 
879
                         simple_chk_records)
 
880
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
 
881
        # we should get a much larger set of pages.
 
882
        missing = [('inventories', 'rev-2')]
 
883
        full_chk_records = []
 
884
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
 
885
            if vf_name == 'inventories':
 
886
                for record in substream:
 
887
                    self.assertEqual(('rev-2',), record.key)
 
888
            elif vf_name == 'chk_bytes':
 
889
                for record in substream:
 
890
                    full_chk_records.append(record.key)
 
891
            else:
 
892
                self.fail('Should not be getting a stream of %s' % (vf_name,))
 
893
        # We have 257 records now. This is because we have 1 root page, and 256
 
894
        # leaf pages in a complete listing.
 
895
        self.assertEqual(257, len(full_chk_records))
 
896
        self.assertSubset(simple_chk_records, full_chk_records)
 
897
 
 
898
    def test_inconsistency_fatal(self):
 
899
        repo = self.make_repository('repo', format='2a')
 
900
        self.assertTrue(repo.revisions._index._inconsistency_fatal)
 
901
        self.assertFalse(repo.texts._index._inconsistency_fatal)
 
902
        self.assertFalse(repo.inventories._index._inconsistency_fatal)
 
903
        self.assertFalse(repo.signatures._index._inconsistency_fatal)
 
904
        self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
 
905
 
 
906
 
 
907
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
 
908
 
 
909
    def test_source_to_exact_pack_092(self):
 
910
        source = self.make_repository('source', format='pack-0.92')
 
911
        target = self.make_repository('target', format='pack-0.92')
 
912
        stream_source = source._get_source(target._format)
 
913
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
914
 
 
915
    def test_source_to_exact_pack_rich_root_pack(self):
 
916
        source = self.make_repository('source', format='rich-root-pack')
 
917
        target = self.make_repository('target', format='rich-root-pack')
 
918
        stream_source = source._get_source(target._format)
 
919
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
920
 
 
921
    def test_source_to_exact_pack_19(self):
 
922
        source = self.make_repository('source', format='1.9')
 
923
        target = self.make_repository('target', format='1.9')
 
924
        stream_source = source._get_source(target._format)
 
925
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
926
 
 
927
    def test_source_to_exact_pack_19_rich_root(self):
 
928
        source = self.make_repository('source', format='1.9-rich-root')
 
929
        target = self.make_repository('target', format='1.9-rich-root')
 
930
        stream_source = source._get_source(target._format)
 
931
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
932
 
 
933
    def test_source_to_remote_exact_pack_19(self):
 
934
        trans = self.make_smart_server('target')
 
935
        trans.ensure_base()
 
936
        source = self.make_repository('source', format='1.9')
 
937
        target = self.make_repository('target', format='1.9')
 
938
        target = repository.Repository.open(trans.base)
 
939
        stream_source = source._get_source(target._format)
 
940
        self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
 
941
 
 
942
    def test_stream_source_to_non_exact(self):
 
943
        source = self.make_repository('source', format='pack-0.92')
 
944
        target = self.make_repository('target', format='1.9')
 
945
        stream = source._get_source(target._format)
 
946
        self.assertIs(type(stream), repository.StreamSource)
 
947
 
 
948
    def test_stream_source_to_non_exact_rich_root(self):
 
949
        source = self.make_repository('source', format='1.9')
 
950
        target = self.make_repository('target', format='1.9-rich-root')
 
951
        stream = source._get_source(target._format)
 
952
        self.assertIs(type(stream), repository.StreamSource)
 
953
 
 
954
    def test_source_to_remote_non_exact_pack_19(self):
 
955
        trans = self.make_smart_server('target')
 
956
        trans.ensure_base()
 
957
        source = self.make_repository('source', format='1.9')
 
958
        target = self.make_repository('target', format='1.6')
 
959
        target = repository.Repository.open(trans.base)
 
960
        stream_source = source._get_source(target._format)
 
961
        self.assertIs(type(stream_source), repository.StreamSource)
 
962
 
 
963
    def test_stream_source_to_knit(self):
 
964
        source = self.make_repository('source', format='pack-0.92')
 
965
        target = self.make_repository('target', format='dirstate')
 
966
        stream = source._get_source(target._format)
 
967
        self.assertIs(type(stream), repository.StreamSource)
 
968
 
 
969
 
 
970
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
 
971
    """Tests for _find_parent_ids_of_revisions."""
 
972
 
 
973
    def setUp(self):
 
974
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
 
975
        self.builder = self.make_branch_builder('source',
 
976
            format='development6-rich-root')
 
977
        self.builder.start_series()
 
978
        self.builder.build_snapshot('initial', None,
 
979
            [('add', ('', 'tree-root', 'directory', None))])
 
980
        self.repo = self.builder.get_branch().repository
 
981
        self.addCleanup(self.builder.finish_series)
 
982
 
 
983
    def assertParentIds(self, expected_result, rev_set):
 
984
        self.assertEqual(sorted(expected_result),
 
985
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
 
986
 
 
987
    def test_simple(self):
 
988
        self.builder.build_snapshot('revid1', None, [])
 
989
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
990
        rev_set = ['revid2']
 
991
        self.assertParentIds(['revid1'], rev_set)
 
992
 
 
993
    def test_not_first_parent(self):
 
994
        self.builder.build_snapshot('revid1', None, [])
 
995
        self.builder.build_snapshot('revid2', ['revid1'], [])
 
996
        self.builder.build_snapshot('revid3', ['revid2'], [])
 
997
        rev_set = ['revid3', 'revid2']
 
998
        self.assertParentIds(['revid1'], rev_set)
 
999
 
 
1000
    def test_not_null(self):
 
1001
        rev_set = ['initial']
 
1002
        self.assertParentIds([], rev_set)
 
1003
 
 
1004
    def test_not_null_set(self):
 
1005
        self.builder.build_snapshot('revid1', None, [])
 
1006
        rev_set = [_mod_revision.NULL_REVISION]
 
1007
        self.assertParentIds([], rev_set)
 
1008
 
 
1009
    def test_ghost(self):
 
1010
        self.builder.build_snapshot('revid1', None, [])
 
1011
        rev_set = ['ghost', 'revid1']
 
1012
        self.assertParentIds(['initial'], rev_set)
 
1013
 
 
1014
    def test_ghost_parent(self):
 
1015
        self.builder.build_snapshot('revid1', None, [])
 
1016
        self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
 
1017
        rev_set = ['revid2', 'revid1']
 
1018
        self.assertParentIds(['ghost', 'initial'], rev_set)
 
1019
 
 
1020
    def test_righthand_parent(self):
 
1021
        self.builder.build_snapshot('revid1', None, [])
 
1022
        self.builder.build_snapshot('revid2a', ['revid1'], [])
 
1023
        self.builder.build_snapshot('revid2b', ['revid1'], [])
 
1024
        self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
 
1025
        rev_set = ['revid3', 'revid2a']
 
1026
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
 
1027
 
 
1028
 
 
1029
class TestWithBrokenRepo(TestCaseWithTransport):
 
1030
    """These tests seem to be more appropriate as interface tests?"""
 
1031
 
 
1032
    def make_broken_repository(self):
 
1033
        # XXX: This function is borrowed from Aaron's "Reconcile can fix bad
 
1034
        # parent references" branch which is due to land in bzr.dev soon.  Once
 
1035
        # it does, this duplication should be removed.
 
1036
        repo = self.make_repository('broken-repo')
 
1037
        cleanups = []
 
1038
        try:
 
1039
            repo.lock_write()
 
1040
            cleanups.append(repo.unlock)
 
1041
            repo.start_write_group()
 
1042
            cleanups.append(repo.commit_write_group)
 
1043
            # make rev1a: A well-formed revision, containing 'file1'
 
1044
            inv = inventory.Inventory(revision_id='rev1a')
 
1045
            inv.root.revision = 'rev1a'
 
1046
            self.add_file(repo, inv, 'file1', 'rev1a', [])
 
1047
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
 
1048
            repo.add_inventory('rev1a', inv, [])
 
1049
            revision = _mod_revision.Revision('rev1a',
 
1050
                committer='jrandom@example.com', timestamp=0,
 
1051
                inventory_sha1='', timezone=0, message='foo', parent_ids=[])
 
1052
            repo.add_revision('rev1a',revision, inv)
 
1053
 
 
1054
            # make rev1b, which has no Revision, but has an Inventory, and
 
1055
            # file1
 
1056
            inv = inventory.Inventory(revision_id='rev1b')
 
1057
            inv.root.revision = 'rev1b'
 
1058
            self.add_file(repo, inv, 'file1', 'rev1b', [])
 
1059
            repo.add_inventory('rev1b', inv, [])
 
1060
 
 
1061
            # make rev2, with file1 and file2
 
1062
            # file2 is sane
 
1063
            # file1 has 'rev1b' as an ancestor, even though this is not
 
1064
            # mentioned by 'rev1a', making it an unreferenced ancestor
 
1065
            inv = inventory.Inventory()
 
1066
            self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b'])
 
1067
            self.add_file(repo, inv, 'file2', 'rev2', [])
 
1068
            self.add_revision(repo, 'rev2', inv, ['rev1a'])
 
1069
 
 
1070
            # make ghost revision rev1c
 
1071
            inv = inventory.Inventory()
 
1072
            self.add_file(repo, inv, 'file2', 'rev1c', [])
 
1073
 
 
1074
            # make rev3 with file2
 
1075
            # file2 refers to 'rev1c', which is a ghost in this repository, so
 
1076
            # file2 cannot have rev1c as its ancestor.
 
1077
            inv = inventory.Inventory()
 
1078
            self.add_file(repo, inv, 'file2', 'rev3', ['rev1c'])
 
1079
            self.add_revision(repo, 'rev3', inv, ['rev1c'])
 
1080
            return repo
 
1081
        finally:
 
1082
            for cleanup in reversed(cleanups):
 
1083
                cleanup()
 
1084
 
 
1085
    def add_revision(self, repo, revision_id, inv, parent_ids):
 
1086
        inv.revision_id = revision_id
 
1087
        inv.root.revision = revision_id
 
1088
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
 
1089
        repo.add_inventory(revision_id, inv, parent_ids)
 
1090
        revision = _mod_revision.Revision(revision_id,
 
1091
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
 
1092
            timezone=0, message='foo', parent_ids=parent_ids)
 
1093
        repo.add_revision(revision_id,revision, inv)
 
1094
 
 
1095
    def add_file(self, repo, inv, filename, revision, parents):
 
1096
        file_id = filename + '-id'
 
1097
        entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT')
 
1098
        entry.revision = revision
 
1099
        entry.text_size = 0
 
1100
        inv.add(entry)
 
1101
        text_key = (file_id, revision)
 
1102
        parent_keys = [(file_id, parent) for parent in parents]
 
1103
        repo.texts.add_lines(text_key, parent_keys, ['line\n'])
 
1104
 
 
1105
    def test_insert_from_broken_repo(self):
 
1106
        """Inserting a data stream from a broken repository won't silently
 
1107
        corrupt the target repository.
 
1108
        """
 
1109
        broken_repo = self.make_broken_repository()
 
1110
        empty_repo = self.make_repository('empty-repo')
 
1111
        try:
 
1112
            empty_repo.fetch(broken_repo)
 
1113
        except (errors.RevisionNotPresent, errors.BzrCheckError):
 
1114
            # Test successful: compression parent not being copied leads to
 
1115
            # error.
 
1116
            return
 
1117
        empty_repo.lock_read()
 
1118
        self.addCleanup(empty_repo.unlock)
 
1119
        text = empty_repo.texts.get_record_stream(
 
1120
            [('file2-id', 'rev3')], 'topological', True).next()
 
1121
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
 
1122
 
 
1123
 
 
1124
class TestRepositoryPackCollection(TestCaseWithTransport):
 
1125
 
 
1126
    def get_format(self):
 
1127
        return bzrdir.format_registry.make_bzrdir('pack-0.92')
 
1128
 
 
1129
    def get_packs(self):
 
1130
        format = self.get_format()
 
1131
        repo = self.make_repository('.', format=format)
 
1132
        return repo._pack_collection
 
1133
 
 
1134
    def make_packs_and_alt_repo(self, write_lock=False):
 
1135
        """Create a pack repo with 3 packs, and access it via a second repo."""
 
1136
        tree = self.make_branch_and_tree('.', format=self.get_format())
 
1137
        tree.lock_write()
 
1138
        self.addCleanup(tree.unlock)
 
1139
        rev1 = tree.commit('one')
 
1140
        rev2 = tree.commit('two')
 
1141
        rev3 = tree.commit('three')
 
1142
        r = repository.Repository.open('.')
 
1143
        if write_lock:
 
1144
            r.lock_write()
 
1145
        else:
 
1146
            r.lock_read()
 
1147
        self.addCleanup(r.unlock)
 
1148
        packs = r._pack_collection
 
1149
        packs.ensure_loaded()
 
1150
        return tree, r, packs, [rev1, rev2, rev3]
 
1151
 
 
1152
    def test__clear_obsolete_packs(self):
 
1153
        packs = self.get_packs()
 
1154
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
 
1155
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1156
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1157
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1158
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1159
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1160
        res = packs._clear_obsolete_packs()
 
1161
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
 
1162
        self.assertEqual([], obsolete_pack_trans.list_dir('.'))
 
1163
 
 
1164
    def test__clear_obsolete_packs_preserve(self):
 
1165
        packs = self.get_packs()
 
1166
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
 
1167
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1168
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1169
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1170
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1171
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1172
        res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
 
1173
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
 
1174
        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
 
1175
                         sorted(obsolete_pack_trans.list_dir('.')))
 
1176
 
 
1177
    def test__max_pack_count(self):
 
1178
        """The maximum pack count is a function of the number of revisions."""
 
1179
        # no revisions - one pack, so that we can have a revision free repo
 
1180
        # without it blowing up
 
1181
        packs = self.get_packs()
 
1182
        self.assertEqual(1, packs._max_pack_count(0))
 
1183
        # after that the sum of the digits, - check the first 1-9
 
1184
        self.assertEqual(1, packs._max_pack_count(1))
 
1185
        self.assertEqual(2, packs._max_pack_count(2))
 
1186
        self.assertEqual(3, packs._max_pack_count(3))
 
1187
        self.assertEqual(4, packs._max_pack_count(4))
 
1188
        self.assertEqual(5, packs._max_pack_count(5))
 
1189
        self.assertEqual(6, packs._max_pack_count(6))
 
1190
        self.assertEqual(7, packs._max_pack_count(7))
 
1191
        self.assertEqual(8, packs._max_pack_count(8))
 
1192
        self.assertEqual(9, packs._max_pack_count(9))
 
1193
        # check the boundary cases with two digits for the next decade
 
1194
        self.assertEqual(1, packs._max_pack_count(10))
 
1195
        self.assertEqual(2, packs._max_pack_count(11))
 
1196
        self.assertEqual(10, packs._max_pack_count(19))
 
1197
        self.assertEqual(2, packs._max_pack_count(20))
 
1198
        self.assertEqual(3, packs._max_pack_count(21))
 
1199
        # check some arbitrary big numbers
 
1200
        self.assertEqual(25, packs._max_pack_count(112894))
 
1201
 
 
1202
    def test_repr(self):
 
1203
        packs = self.get_packs()
 
1204
        self.assertContainsRe(repr(packs),
 
1205
            'RepositoryPackCollection(.*Repository(.*))')
 
1206
 
 
1207
    def test__obsolete_packs(self):
 
1208
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1209
        names = packs.names()
 
1210
        pack = packs.get_pack_by_name(names[0])
 
1211
        # Schedule this one for removal
 
1212
        packs._remove_pack_from_memory(pack)
 
1213
        # Simulate a concurrent update by renaming the .pack file and one of
 
1214
        # the indices
 
1215
        packs.transport.rename('packs/%s.pack' % (names[0],),
 
1216
                               'obsolete_packs/%s.pack' % (names[0],))
 
1217
        packs.transport.rename('indices/%s.iix' % (names[0],),
 
1218
                               'obsolete_packs/%s.iix' % (names[0],))
 
1219
        # Now trigger the obsoletion, and ensure that all the remaining files
 
1220
        # are still renamed
 
1221
        packs._obsolete_packs([pack])
 
1222
        self.assertEqual([n + '.pack' for n in names[1:]],
 
1223
                         sorted(packs._pack_transport.list_dir('.')))
 
1224
        # names[0] should not be present in the index anymore
 
1225
        self.assertEqual(names[1:],
 
1226
            sorted(set([osutils.splitext(n)[0] for n in
 
1227
                        packs._index_transport.list_dir('.')])))
 
1228
 
 
1229
    def test_pack_distribution_zero(self):
 
1230
        packs = self.get_packs()
 
1231
        self.assertEqual([0], packs.pack_distribution(0))
 
1232
 
 
1233
    def test_ensure_loaded_unlocked(self):
 
1234
        packs = self.get_packs()
 
1235
        self.assertRaises(errors.ObjectNotLocked,
 
1236
                          packs.ensure_loaded)
 
1237
 
 
1238
    def test_pack_distribution_one_to_nine(self):
 
1239
        packs = self.get_packs()
 
1240
        self.assertEqual([1],
 
1241
            packs.pack_distribution(1))
 
1242
        self.assertEqual([1, 1],
 
1243
            packs.pack_distribution(2))
 
1244
        self.assertEqual([1, 1, 1],
 
1245
            packs.pack_distribution(3))
 
1246
        self.assertEqual([1, 1, 1, 1],
 
1247
            packs.pack_distribution(4))
 
1248
        self.assertEqual([1, 1, 1, 1, 1],
 
1249
            packs.pack_distribution(5))
 
1250
        self.assertEqual([1, 1, 1, 1, 1, 1],
 
1251
            packs.pack_distribution(6))
 
1252
        self.assertEqual([1, 1, 1, 1, 1, 1, 1],
 
1253
            packs.pack_distribution(7))
 
1254
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1],
 
1255
            packs.pack_distribution(8))
 
1256
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1],
 
1257
            packs.pack_distribution(9))
 
1258
 
 
1259
    def test_pack_distribution_stable_at_boundaries(self):
 
1260
        """When there are multi-rev packs the counts are stable."""
 
1261
        packs = self.get_packs()
 
1262
        # in 10s:
 
1263
        self.assertEqual([10], packs.pack_distribution(10))
 
1264
        self.assertEqual([10, 1], packs.pack_distribution(11))
 
1265
        self.assertEqual([10, 10], packs.pack_distribution(20))
 
1266
        self.assertEqual([10, 10, 1], packs.pack_distribution(21))
 
1267
        # 100s
 
1268
        self.assertEqual([100], packs.pack_distribution(100))
 
1269
        self.assertEqual([100, 1], packs.pack_distribution(101))
 
1270
        self.assertEqual([100, 10, 1], packs.pack_distribution(111))
 
1271
        self.assertEqual([100, 100], packs.pack_distribution(200))
 
1272
        self.assertEqual([100, 100, 1], packs.pack_distribution(201))
 
1273
        self.assertEqual([100, 100, 10, 1], packs.pack_distribution(211))
 
1274
 
 
1275
    def test_plan_pack_operations_2009_revisions_skip_all_packs(self):
 
1276
        packs = self.get_packs()
 
1277
        existing_packs = [(2000, "big"), (9, "medium")]
 
1278
        # rev count - 2009 -> 2x1000 + 9x1
 
1279
        pack_operations = packs.plan_autopack_combinations(
 
1280
            existing_packs, [1000, 1000, 1, 1, 1, 1, 1, 1, 1, 1, 1])
 
1281
        self.assertEqual([], pack_operations)
 
1282
 
 
1283
    def test_plan_pack_operations_2010_revisions_skip_all_packs(self):
 
1284
        packs = self.get_packs()
 
1285
        existing_packs = [(2000, "big"), (9, "medium"), (1, "single")]
 
1286
        # rev count - 2010 -> 2x1000 + 1x10
 
1287
        pack_operations = packs.plan_autopack_combinations(
 
1288
            existing_packs, [1000, 1000, 10])
 
1289
        self.assertEqual([], pack_operations)
 
1290
 
 
1291
    def test_plan_pack_operations_2010_combines_smallest_two(self):
 
1292
        packs = self.get_packs()
 
1293
        existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"),
 
1294
            (1, "single1")]
 
1295
        # rev count - 2010 -> 2x1000 + 1x10 (3)
 
1296
        pack_operations = packs.plan_autopack_combinations(
 
1297
            existing_packs, [1000, 1000, 10])
 
1298
        self.assertEqual([[2, ["single2", "single1"]]], pack_operations)
 
1299
 
 
1300
    def test_plan_pack_operations_creates_a_single_op(self):
 
1301
        packs = self.get_packs()
 
1302
        existing_packs = [(50, 'a'), (40, 'b'), (30, 'c'), (10, 'd'),
 
1303
                          (10, 'e'), (6, 'f'), (4, 'g')]
 
1304
        # rev count 150 -> 1x100 and 5x10
 
1305
        # The two size 10 packs do not need to be touched. The 50, 40, 30 would
 
1306
        # be combined into a single 120 size pack, and the 6 & 4 would
 
1307
        # becombined into a size 10 pack. However, if we have to rewrite them,
 
1308
        # we save a pack file with no increased I/O by putting them into the
 
1309
        # same file.
 
1310
        distribution = packs.pack_distribution(150)
 
1311
        pack_operations = packs.plan_autopack_combinations(existing_packs,
 
1312
                                                           distribution)
 
1313
        self.assertEqual([[130, ['a', 'b', 'c', 'f', 'g']]], pack_operations)
 
1314
 
 
1315
    def test_all_packs_none(self):
 
1316
        format = self.get_format()
 
1317
        tree = self.make_branch_and_tree('.', format=format)
 
1318
        tree.lock_read()
 
1319
        self.addCleanup(tree.unlock)
 
1320
        packs = tree.branch.repository._pack_collection
 
1321
        packs.ensure_loaded()
 
1322
        self.assertEqual([], packs.all_packs())
 
1323
 
 
1324
    def test_all_packs_one(self):
 
1325
        format = self.get_format()
 
1326
        tree = self.make_branch_and_tree('.', format=format)
 
1327
        tree.commit('start')
 
1328
        tree.lock_read()
 
1329
        self.addCleanup(tree.unlock)
 
1330
        packs = tree.branch.repository._pack_collection
 
1331
        packs.ensure_loaded()
 
1332
        self.assertEqual([
 
1333
            packs.get_pack_by_name(packs.names()[0])],
 
1334
            packs.all_packs())
 
1335
 
 
1336
    def test_all_packs_two(self):
 
1337
        format = self.get_format()
 
1338
        tree = self.make_branch_and_tree('.', format=format)
 
1339
        tree.commit('start')
 
1340
        tree.commit('continue')
 
1341
        tree.lock_read()
 
1342
        self.addCleanup(tree.unlock)
 
1343
        packs = tree.branch.repository._pack_collection
 
1344
        packs.ensure_loaded()
 
1345
        self.assertEqual([
 
1346
            packs.get_pack_by_name(packs.names()[0]),
 
1347
            packs.get_pack_by_name(packs.names()[1]),
 
1348
            ], packs.all_packs())
 
1349
 
 
1350
    def test_get_pack_by_name(self):
 
1351
        format = self.get_format()
 
1352
        tree = self.make_branch_and_tree('.', format=format)
 
1353
        tree.commit('start')
 
1354
        tree.lock_read()
 
1355
        self.addCleanup(tree.unlock)
 
1356
        packs = tree.branch.repository._pack_collection
 
1357
        packs.reset()
 
1358
        packs.ensure_loaded()
 
1359
        name = packs.names()[0]
 
1360
        pack_1 = packs.get_pack_by_name(name)
 
1361
        # the pack should be correctly initialised
 
1362
        sizes = packs._names[name]
 
1363
        rev_index = GraphIndex(packs._index_transport, name + '.rix', sizes[0])
 
1364
        inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1])
 
1365
        txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2])
 
1366
        sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3])
 
1367
        self.assertEqual(pack_repo.ExistingPack(packs._pack_transport,
 
1368
            name, rev_index, inv_index, txt_index, sig_index), pack_1)
 
1369
        # and the same instance should be returned on successive calls.
 
1370
        self.assertTrue(pack_1 is packs.get_pack_by_name(name))
 
1371
 
 
1372
    def test_reload_pack_names_new_entry(self):
 
1373
        tree, r, packs, revs = self.make_packs_and_alt_repo()
 
1374
        names = packs.names()
 
1375
        # Add a new pack file into the repository
 
1376
        rev4 = tree.commit('four')
 
1377
        new_names = tree.branch.repository._pack_collection.names()
 
1378
        new_name = set(new_names).difference(names)
 
1379
        self.assertEqual(1, len(new_name))
 
1380
        new_name = new_name.pop()
 
1381
        # The old collection hasn't noticed yet
 
1382
        self.assertEqual(names, packs.names())
 
1383
        self.assertTrue(packs.reload_pack_names())
 
1384
        self.assertEqual(new_names, packs.names())
 
1385
        # And the repository can access the new revision
 
1386
        self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
 
1387
        self.assertFalse(packs.reload_pack_names())
 
1388
 
 
1389
    def test_reload_pack_names_added_and_removed(self):
 
1390
        tree, r, packs, revs = self.make_packs_and_alt_repo()
 
1391
        names = packs.names()
 
1392
        # Now repack the whole thing
 
1393
        tree.branch.repository.pack()
 
1394
        new_names = tree.branch.repository._pack_collection.names()
 
1395
        # The other collection hasn't noticed yet
 
1396
        self.assertEqual(names, packs.names())
 
1397
        self.assertTrue(packs.reload_pack_names())
 
1398
        self.assertEqual(new_names, packs.names())
 
1399
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
 
1400
        self.assertFalse(packs.reload_pack_names())
 
1401
 
 
1402
    def test_reload_pack_names_preserves_pending(self):
 
1403
        # TODO: Update this to also test for pending-deleted names
 
1404
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1405
        # We will add one pack (via start_write_group + insert_record_stream),
 
1406
        # and remove another pack (via _remove_pack_from_memory)
 
1407
        orig_names = packs.names()
 
1408
        orig_at_load = packs._packs_at_load
 
1409
        to_remove_name = iter(orig_names).next()
 
1410
        r.start_write_group()
 
1411
        self.addCleanup(r.abort_write_group)
 
1412
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
 
1413
            ('text', 'rev'), (), None, 'content\n')])
 
1414
        new_pack = packs._new_pack
 
1415
        self.assertTrue(new_pack.data_inserted())
 
1416
        new_pack.finish()
 
1417
        packs.allocate(new_pack)
 
1418
        packs._new_pack = None
 
1419
        removed_pack = packs.get_pack_by_name(to_remove_name)
 
1420
        packs._remove_pack_from_memory(removed_pack)
 
1421
        names = packs.names()
 
1422
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1423
        new_names = set([x[0][0] for x in new_nodes])
 
1424
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1425
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1426
        self.assertEqual(set([new_pack.name]), new_names)
 
1427
        self.assertEqual([to_remove_name],
 
1428
                         sorted([x[0][0] for x in deleted_nodes]))
 
1429
        packs.reload_pack_names()
 
1430
        reloaded_names = packs.names()
 
1431
        self.assertEqual(orig_at_load, packs._packs_at_load)
 
1432
        self.assertEqual(names, reloaded_names)
 
1433
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1434
        new_names = set([x[0][0] for x in new_nodes])
 
1435
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1436
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1437
        self.assertEqual(set([new_pack.name]), new_names)
 
1438
        self.assertEqual([to_remove_name],
 
1439
                         sorted([x[0][0] for x in deleted_nodes]))
 
1440
 
 
1441
    def test_autopack_obsoletes_new_pack(self):
 
1442
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1443
        packs._max_pack_count = lambda x: 1
 
1444
        packs.pack_distribution = lambda x: [10]
 
1445
        r.start_write_group()
 
1446
        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
 
1447
            ('bogus-rev',), (), None, 'bogus-content\n')])
 
1448
        # This should trigger an autopack, which will combine everything into a
 
1449
        # single pack file.
 
1450
        new_names = r.commit_write_group()
 
1451
        names = packs.names()
 
1452
        self.assertEqual(1, len(names))
 
1453
        self.assertEqual([names[0] + '.pack'],
 
1454
                         packs._pack_transport.list_dir('.'))
 
1455
 
 
1456
    def test_autopack_reloads_and_stops(self):
 
1457
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1458
        # After we have determined what needs to be autopacked, trigger a
 
1459
        # full-pack via the other repo which will cause us to re-evaluate and
 
1460
        # decide we don't need to do anything
 
1461
        orig_execute = packs._execute_pack_operations
 
1462
        def _munged_execute_pack_ops(*args, **kwargs):
 
1463
            tree.branch.repository.pack()
 
1464
            return orig_execute(*args, **kwargs)
 
1465
        packs._execute_pack_operations = _munged_execute_pack_ops
 
1466
        packs._max_pack_count = lambda x: 1
 
1467
        packs.pack_distribution = lambda x: [10]
 
1468
        self.assertFalse(packs.autopack())
 
1469
        self.assertEqual(1, len(packs.names()))
 
1470
        self.assertEqual(tree.branch.repository._pack_collection.names(),
 
1471
                         packs.names())
 
1472
 
 
1473
    def test__save_pack_names(self):
 
1474
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1475
        names = packs.names()
 
1476
        pack = packs.get_pack_by_name(names[0])
 
1477
        packs._remove_pack_from_memory(pack)
 
1478
        packs._save_pack_names(obsolete_packs=[pack])
 
1479
        cur_packs = packs._pack_transport.list_dir('.')
 
1480
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1481
        # obsolete_packs will also have stuff like .rix and .iix present.
 
1482
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1483
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1484
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1485
 
 
1486
    def test__save_pack_names_already_obsoleted(self):
 
1487
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1488
        names = packs.names()
 
1489
        pack = packs.get_pack_by_name(names[0])
 
1490
        packs._remove_pack_from_memory(pack)
 
1491
        # We are going to simulate a concurrent autopack by manually obsoleting
 
1492
        # the pack directly.
 
1493
        packs._obsolete_packs([pack])
 
1494
        packs._save_pack_names(clear_obsolete_packs=True,
 
1495
                               obsolete_packs=[pack])
 
1496
        cur_packs = packs._pack_transport.list_dir('.')
 
1497
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1498
        # Note that while we set clear_obsolete_packs=True, it should not
 
1499
        # delete a pack file that we have also scheduled for obsoletion.
 
1500
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1501
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1502
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1503
 
 
1504
 
 
1505
 
 
1506
class TestPack(TestCaseWithTransport):
 
1507
    """Tests for the Pack object."""
 
1508
 
 
1509
    def assertCurrentlyEqual(self, left, right):
 
1510
        self.assertTrue(left == right)
 
1511
        self.assertTrue(right == left)
 
1512
        self.assertFalse(left != right)
 
1513
        self.assertFalse(right != left)
 
1514
 
 
1515
    def assertCurrentlyNotEqual(self, left, right):
 
1516
        self.assertFalse(left == right)
 
1517
        self.assertFalse(right == left)
 
1518
        self.assertTrue(left != right)
 
1519
        self.assertTrue(right != left)
 
1520
 
 
1521
    def test___eq____ne__(self):
 
1522
        left = pack_repo.ExistingPack('', '', '', '', '', '')
 
1523
        right = pack_repo.ExistingPack('', '', '', '', '', '')
 
1524
        self.assertCurrentlyEqual(left, right)
 
1525
        # change all attributes and ensure equality changes as we do.
 
1526
        left.revision_index = 'a'
 
1527
        self.assertCurrentlyNotEqual(left, right)
 
1528
        right.revision_index = 'a'
 
1529
        self.assertCurrentlyEqual(left, right)
 
1530
        left.inventory_index = 'a'
 
1531
        self.assertCurrentlyNotEqual(left, right)
 
1532
        right.inventory_index = 'a'
 
1533
        self.assertCurrentlyEqual(left, right)
 
1534
        left.text_index = 'a'
 
1535
        self.assertCurrentlyNotEqual(left, right)
 
1536
        right.text_index = 'a'
 
1537
        self.assertCurrentlyEqual(left, right)
 
1538
        left.signature_index = 'a'
 
1539
        self.assertCurrentlyNotEqual(left, right)
 
1540
        right.signature_index = 'a'
 
1541
        self.assertCurrentlyEqual(left, right)
 
1542
        left.name = 'a'
 
1543
        self.assertCurrentlyNotEqual(left, right)
 
1544
        right.name = 'a'
 
1545
        self.assertCurrentlyEqual(left, right)
 
1546
        left.transport = 'a'
 
1547
        self.assertCurrentlyNotEqual(left, right)
 
1548
        right.transport = 'a'
 
1549
        self.assertCurrentlyEqual(left, right)
 
1550
 
 
1551
    def test_file_name(self):
 
1552
        pack = pack_repo.ExistingPack('', 'a_name', '', '', '', '')
 
1553
        self.assertEqual('a_name.pack', pack.file_name())
 
1554
 
 
1555
 
 
1556
class TestNewPack(TestCaseWithTransport):
 
1557
    """Tests for pack_repo.NewPack."""
 
1558
 
 
1559
    def test_new_instance_attributes(self):
 
1560
        upload_transport = self.get_transport('upload')
 
1561
        pack_transport = self.get_transport('pack')
 
1562
        index_transport = self.get_transport('index')
 
1563
        upload_transport.mkdir('.')
 
1564
        collection = pack_repo.RepositoryPackCollection(
 
1565
            repo=None,
 
1566
            transport=self.get_transport('.'),
 
1567
            index_transport=index_transport,
 
1568
            upload_transport=upload_transport,
 
1569
            pack_transport=pack_transport,
 
1570
            index_builder_class=BTreeBuilder,
 
1571
            index_class=BTreeGraphIndex,
 
1572
            use_chk_index=False)
 
1573
        pack = pack_repo.NewPack(collection)
 
1574
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
 
1575
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
 
1576
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
 
1577
        self.assertIsInstance(pack._hash, type(osutils.md5()))
 
1578
        self.assertTrue(pack.upload_transport is upload_transport)
 
1579
        self.assertTrue(pack.index_transport is index_transport)
 
1580
        self.assertTrue(pack.pack_transport is pack_transport)
 
1581
        self.assertEqual(None, pack.index_sizes)
 
1582
        self.assertEqual(20, len(pack.random_name))
 
1583
        self.assertIsInstance(pack.random_name, str)
 
1584
        self.assertIsInstance(pack.start_time, float)
 
1585
 
 
1586
 
 
1587
class TestPacker(TestCaseWithTransport):
 
1588
    """Tests for the packs repository Packer class."""
 
1589
 
 
1590
    def test_pack_optimizes_pack_order(self):
 
1591
        builder = self.make_branch_builder('.', format="1.9")
 
1592
        builder.start_series()
 
1593
        builder.build_snapshot('A', None, [
 
1594
            ('add', ('', 'root-id', 'directory', None)),
 
1595
            ('add', ('f', 'f-id', 'file', 'content\n'))])
 
1596
        builder.build_snapshot('B', ['A'],
 
1597
            [('modify', ('f-id', 'new-content\n'))])
 
1598
        builder.build_snapshot('C', ['B'],
 
1599
            [('modify', ('f-id', 'third-content\n'))])
 
1600
        builder.build_snapshot('D', ['C'],
 
1601
            [('modify', ('f-id', 'fourth-content\n'))])
 
1602
        b = builder.get_branch()
 
1603
        b.lock_read()
 
1604
        builder.finish_series()
 
1605
        self.addCleanup(b.unlock)
 
1606
        # At this point, we should have 4 pack files available
 
1607
        # Because of how they were built, they correspond to
 
1608
        # ['D', 'C', 'B', 'A']
 
1609
        packs = b.repository._pack_collection.packs
 
1610
        packer = pack_repo.Packer(b.repository._pack_collection,
 
1611
                                  packs, 'testing',
 
1612
                                  revision_ids=['B', 'C'])
 
1613
        # Now, when we are copying the B & C revisions, their pack files should
 
1614
        # be moved to the front of the stack
 
1615
        # The new ordering moves B & C to the front of the .packs attribute,
 
1616
        # and leaves the others in the original order.
 
1617
        new_packs = [packs[1], packs[2], packs[0], packs[3]]
 
1618
        new_pack = packer.pack()
 
1619
        self.assertEqual(new_packs, packer.packs)
 
1620
 
 
1621
 
 
1622
class TestOptimisingPacker(TestCaseWithTransport):
 
1623
    """Tests for the OptimisingPacker class."""
 
1624
 
 
1625
    def get_pack_collection(self):
 
1626
        repo = self.make_repository('.')
 
1627
        return repo._pack_collection
 
1628
 
 
1629
    def test_open_pack_will_optimise(self):
 
1630
        packer = pack_repo.OptimisingPacker(self.get_pack_collection(),
 
1631
                                            [], '.test')
 
1632
        new_pack = packer.open_pack()
 
1633
        self.addCleanup(new_pack.abort) # ensure cleanup
 
1634
        self.assertIsInstance(new_pack, pack_repo.NewPack)
 
1635
        self.assertTrue(new_pack.revision_index._optimize_for_size)
 
1636
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
 
1637
        self.assertTrue(new_pack.text_index._optimize_for_size)
 
1638
        self.assertTrue(new_pack.signature_index._optimize_for_size)
 
1639
 
 
1640
 
 
1641
class TestCrossFormatPacks(TestCaseWithTransport):
 
1642
 
 
1643
    def log_pack(self, hint=None):
 
1644
        self.calls.append(('pack', hint))
 
1645
        self.orig_pack(hint=hint)
 
1646
        if self.expect_hint:
 
1647
            self.assertTrue(hint)
 
1648
 
 
1649
    def run_stream(self, src_fmt, target_fmt, expect_pack_called):
 
1650
        self.expect_hint = expect_pack_called
 
1651
        self.calls = []
 
1652
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1653
        source_tree.lock_write()
 
1654
        self.addCleanup(source_tree.unlock)
 
1655
        tip = source_tree.commit('foo')
 
1656
        target = self.make_repository('target', format=target_fmt)
 
1657
        target.lock_write()
 
1658
        self.addCleanup(target.unlock)
 
1659
        source = source_tree.branch.repository._get_source(target._format)
 
1660
        self.orig_pack = target.pack
 
1661
        target.pack = self.log_pack
 
1662
        search = target.search_missing_revision_ids(
 
1663
            source_tree.branch.repository, tip)
 
1664
        stream = source.get_stream(search)
 
1665
        from_format = source_tree.branch.repository._format
 
1666
        sink = target._get_sink()
 
1667
        sink.insert_stream(stream, from_format, [])
 
1668
        if expect_pack_called:
 
1669
            self.assertLength(1, self.calls)
 
1670
        else:
 
1671
            self.assertLength(0, self.calls)
 
1672
 
 
1673
    def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
 
1674
        self.expect_hint = expect_pack_called
 
1675
        self.calls = []
 
1676
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
 
1677
        source_tree.lock_write()
 
1678
        self.addCleanup(source_tree.unlock)
 
1679
        tip = source_tree.commit('foo')
 
1680
        target = self.make_repository('target', format=target_fmt)
 
1681
        target.lock_write()
 
1682
        self.addCleanup(target.unlock)
 
1683
        source = source_tree.branch.repository
 
1684
        self.orig_pack = target.pack
 
1685
        target.pack = self.log_pack
 
1686
        target.fetch(source)
 
1687
        if expect_pack_called:
 
1688
            self.assertLength(1, self.calls)
 
1689
        else:
 
1690
            self.assertLength(0, self.calls)
 
1691
 
 
1692
    def test_sink_format_hint_no(self):
 
1693
        # When the target format says packing makes no difference, pack is not
 
1694
        # called.
 
1695
        self.run_stream('1.9', 'rich-root-pack', False)
 
1696
 
 
1697
    def test_sink_format_hint_yes(self):
 
1698
        # When the target format says packing makes a difference, pack is
 
1699
        # called.
 
1700
        self.run_stream('1.9', '2a', True)
 
1701
 
 
1702
    def test_sink_format_same_no(self):
 
1703
        # When the formats are the same, pack is not called.
 
1704
        self.run_stream('2a', '2a', False)
 
1705
 
 
1706
    def test_IDS_format_hint_no(self):
 
1707
        # When the target format says packing makes no difference, pack is not
 
1708
        # called.
 
1709
        self.run_fetch('1.9', 'rich-root-pack', False)
 
1710
 
 
1711
    def test_IDS_format_hint_yes(self):
 
1712
        # When the target format says packing makes a difference, pack is
 
1713
        # called.
 
1714
        self.run_fetch('1.9', '2a', True)
 
1715
 
 
1716
    def test_IDS_format_same_no(self):
 
1717
        # When the formats are the same, pack is not called.
 
1718
        self.run_fetch('2a', '2a', False)