1
# Copyright (C) 2006-2010 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
"""Tests for the Repository facility that are not interface tests.
19
For interface tests see tests/per_repository/*.py.
21
For concrete class tests see this file, and for storage formats tests
25
from stat import S_ISDIR
29
from bzrlib.errors import (NoSuchFile,
31
UnsupportedFormatError,
37
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
38
from bzrlib.index import GraphIndex
39
from bzrlib.repository import RepositoryFormat
40
from bzrlib.tests import (
42
TestCaseWithTransport,
44
from bzrlib.transport import (
53
revision as _mod_revision,
58
from bzrlib.repofmt import (
66
class TestDefaultFormat(TestCase):
68
def test_get_set_default_format(self):
69
old_default = bzrdir.format_registry.get('default')
70
private_default = old_default().repository_format.__class__
71
old_format = repository.RepositoryFormat.get_default_format()
72
self.assertTrue(isinstance(old_format, private_default))
73
def make_sample_bzrdir():
74
my_bzrdir = bzrdir.BzrDirMetaFormat1()
75
my_bzrdir.repository_format = SampleRepositoryFormat()
77
bzrdir.format_registry.remove('default')
78
bzrdir.format_registry.register('sample', make_sample_bzrdir, '')
79
bzrdir.format_registry.set_default('sample')
80
# creating a repository should now create an instrumented dir.
82
# the default branch format is used by the meta dir format
83
# which is not the default bzrdir format at this point
84
dir = bzrdir.BzrDirMetaFormat1().initialize('memory:///')
85
result = dir.create_repository()
86
self.assertEqual(result, 'A bzr repository dir')
88
bzrdir.format_registry.remove('default')
89
bzrdir.format_registry.remove('sample')
90
bzrdir.format_registry.register('default', old_default, '')
91
self.assertIsInstance(repository.RepositoryFormat.get_default_format(),
95
class SampleRepositoryFormat(repository.RepositoryFormat):
98
this format is initializable, unsupported to aid in testing the
99
open and open(unsupported=True) routines.
102
def get_format_string(self):
103
"""See RepositoryFormat.get_format_string()."""
104
return "Sample .bzr repository format."
106
def initialize(self, a_bzrdir, shared=False):
107
"""Initialize a repository in a BzrDir"""
108
t = a_bzrdir.get_repository_transport(self)
109
t.put_bytes('format', self.get_format_string())
110
return 'A bzr repository dir'
112
def is_supported(self):
115
def open(self, a_bzrdir, _found=False):
116
return "opened repository."
119
class TestRepositoryFormat(TestCaseWithTransport):
120
"""Tests for the Repository format detection used by the bzr meta dir facility.BzrBranchFormat facility."""
122
def test_find_format(self):
123
# is the right format object found for a repository?
124
# create a branch with a few known format objects.
125
# this is not quite the same as
126
self.build_tree(["foo/", "bar/"])
127
def check_format(format, url):
128
dir = format._matchingbzrdir.initialize(url)
129
format.initialize(dir)
130
t = get_transport(url)
131
found_format = repository.RepositoryFormat.find_format(dir)
132
self.failUnless(isinstance(found_format, format.__class__))
133
check_format(weaverepo.RepositoryFormat7(), "bar")
135
def test_find_format_no_repository(self):
136
dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
137
self.assertRaises(errors.NoRepositoryPresent,
138
repository.RepositoryFormat.find_format,
141
def test_find_format_unknown_format(self):
142
dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
143
SampleRepositoryFormat().initialize(dir)
144
self.assertRaises(UnknownFormatError,
145
repository.RepositoryFormat.find_format,
148
def test_register_unregister_format(self):
149
format = SampleRepositoryFormat()
151
dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
153
format.initialize(dir)
154
# register a format for it.
155
repository.RepositoryFormat.register_format(format)
156
# which repository.Open will refuse (not supported)
157
self.assertRaises(UnsupportedFormatError, repository.Repository.open, self.get_url())
158
# but open(unsupported) will work
159
self.assertEqual(format.open(dir), "opened repository.")
160
# unregister the format
161
repository.RepositoryFormat.unregister_format(format)
164
class TestFormat6(TestCaseWithTransport):
166
def test_attribute__fetch_order(self):
167
"""Weaves need topological data insertion."""
168
control = bzrdir.BzrDirFormat6().initialize(self.get_url())
169
repo = weaverepo.RepositoryFormat6().initialize(control)
170
self.assertEqual('topological', repo._format._fetch_order)
172
def test_attribute__fetch_uses_deltas(self):
173
"""Weaves do not reuse deltas."""
174
control = bzrdir.BzrDirFormat6().initialize(self.get_url())
175
repo = weaverepo.RepositoryFormat6().initialize(control)
176
self.assertEqual(False, repo._format._fetch_uses_deltas)
178
def test_attribute__fetch_reconcile(self):
179
"""Weave repositories need a reconcile after fetch."""
180
control = bzrdir.BzrDirFormat6().initialize(self.get_url())
181
repo = weaverepo.RepositoryFormat6().initialize(control)
182
self.assertEqual(True, repo._format._fetch_reconcile)
184
def test_no_ancestry_weave(self):
185
control = bzrdir.BzrDirFormat6().initialize(self.get_url())
186
repo = weaverepo.RepositoryFormat6().initialize(control)
187
# We no longer need to create the ancestry.weave file
188
# since it is *never* used.
189
self.assertRaises(NoSuchFile,
190
control.transport.get,
193
def test_supports_external_lookups(self):
194
control = bzrdir.BzrDirFormat6().initialize(self.get_url())
195
repo = weaverepo.RepositoryFormat6().initialize(control)
196
self.assertFalse(repo._format.supports_external_lookups)
199
class TestFormat7(TestCaseWithTransport):
201
def test_attribute__fetch_order(self):
202
"""Weaves need topological data insertion."""
203
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
204
repo = weaverepo.RepositoryFormat7().initialize(control)
205
self.assertEqual('topological', repo._format._fetch_order)
207
def test_attribute__fetch_uses_deltas(self):
208
"""Weaves do not reuse deltas."""
209
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
210
repo = weaverepo.RepositoryFormat7().initialize(control)
211
self.assertEqual(False, repo._format._fetch_uses_deltas)
213
def test_attribute__fetch_reconcile(self):
214
"""Weave repositories need a reconcile after fetch."""
215
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
216
repo = weaverepo.RepositoryFormat7().initialize(control)
217
self.assertEqual(True, repo._format._fetch_reconcile)
219
def test_disk_layout(self):
220
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
221
repo = weaverepo.RepositoryFormat7().initialize(control)
222
# in case of side effects of locking.
226
# format 'Bazaar-NG Repository format 7'
228
# inventory.weave == empty_weave
229
# empty revision-store directory
230
# empty weaves directory
231
t = control.get_repository_transport(None)
232
self.assertEqualDiff('Bazaar-NG Repository format 7',
233
t.get('format').read())
234
self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
235
self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
236
self.assertEqualDiff('# bzr weave file v5\n'
239
t.get('inventory.weave').read())
240
# Creating a file with id Foo:Bar results in a non-escaped file name on
242
control.create_branch()
243
tree = control.create_workingtree()
244
tree.add(['foo'], ['Foo:Bar'], ['file'])
245
tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
247
tree.commit('first post', rev_id='first')
248
except errors.IllegalPath:
249
if sys.platform != 'win32':
251
self.knownFailure('Foo:Bar cannot be used as a file-id on windows'
254
self.assertEqualDiff(
255
'# bzr weave file v5\n'
257
'1 7fe70820e08a1aac0ef224d9c66ab66831cc4ab1\n'
265
t.get('weaves/74/Foo%3ABar.weave').read())
267
def test_shared_disk_layout(self):
268
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
269
repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
271
# format 'Bazaar-NG Repository format 7'
272
# inventory.weave == empty_weave
273
# empty revision-store directory
274
# empty weaves directory
275
# a 'shared-storage' marker file.
276
# lock is not present when unlocked
277
t = control.get_repository_transport(None)
278
self.assertEqualDiff('Bazaar-NG Repository format 7',
279
t.get('format').read())
280
self.assertEqualDiff('', t.get('shared-storage').read())
281
self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
282
self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
283
self.assertEqualDiff('# bzr weave file v5\n'
286
t.get('inventory.weave').read())
287
self.assertFalse(t.has('branch-lock'))
289
def test_creates_lockdir(self):
290
"""Make sure it appears to be controlled by a LockDir existence"""
291
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
292
repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
293
t = control.get_repository_transport(None)
294
# TODO: Should check there is a 'lock' toplevel directory,
295
# regardless of contents
296
self.assertFalse(t.has('lock/held/info'))
299
self.assertTrue(t.has('lock/held/info'))
301
# unlock so we don't get a warning about failing to do so
304
def test_uses_lockdir(self):
305
"""repo format 7 actually locks on lockdir"""
306
base_url = self.get_url()
307
control = bzrdir.BzrDirMetaFormat1().initialize(base_url)
308
repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
309
t = control.get_repository_transport(None)
313
# make sure the same lock is created by opening it
314
repo = repository.Repository.open(base_url)
316
self.assertTrue(t.has('lock/held/info'))
318
self.assertFalse(t.has('lock/held/info'))
320
def test_shared_no_tree_disk_layout(self):
321
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
322
repo = weaverepo.RepositoryFormat7().initialize(control, shared=True)
323
repo.set_make_working_trees(False)
325
# format 'Bazaar-NG Repository format 7'
327
# inventory.weave == empty_weave
328
# empty revision-store directory
329
# empty weaves directory
330
# a 'shared-storage' marker file.
331
t = control.get_repository_transport(None)
332
self.assertEqualDiff('Bazaar-NG Repository format 7',
333
t.get('format').read())
334
## self.assertEqualDiff('', t.get('lock').read())
335
self.assertEqualDiff('', t.get('shared-storage').read())
336
self.assertEqualDiff('', t.get('no-working-trees').read())
337
repo.set_make_working_trees(True)
338
self.assertFalse(t.has('no-working-trees'))
339
self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
340
self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
341
self.assertEqualDiff('# bzr weave file v5\n'
344
t.get('inventory.weave').read())
346
def test_supports_external_lookups(self):
347
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
348
repo = weaverepo.RepositoryFormat7().initialize(control)
349
self.assertFalse(repo._format.supports_external_lookups)
352
class TestFormatKnit1(TestCaseWithTransport):
354
def test_attribute__fetch_order(self):
355
"""Knits need topological data insertion."""
356
repo = self.make_repository('.',
357
format=bzrdir.format_registry.get('knit')())
358
self.assertEqual('topological', repo._format._fetch_order)
360
def test_attribute__fetch_uses_deltas(self):
361
"""Knits reuse deltas."""
362
repo = self.make_repository('.',
363
format=bzrdir.format_registry.get('knit')())
364
self.assertEqual(True, repo._format._fetch_uses_deltas)
366
def test_disk_layout(self):
367
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
368
repo = knitrepo.RepositoryFormatKnit1().initialize(control)
369
# in case of side effects of locking.
373
# format 'Bazaar-NG Knit Repository Format 1'
374
# lock: is a directory
375
# inventory.weave == empty_weave
376
# empty revision-store directory
377
# empty weaves directory
378
t = control.get_repository_transport(None)
379
self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
380
t.get('format').read())
381
# XXX: no locks left when unlocked at the moment
382
# self.assertEqualDiff('', t.get('lock').read())
383
self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
385
# Check per-file knits.
386
branch = control.create_branch()
387
tree = control.create_workingtree()
388
tree.add(['foo'], ['Nasty-IdC:'], ['file'])
389
tree.put_file_bytes_non_atomic('Nasty-IdC:', '')
390
tree.commit('1st post', rev_id='foo')
391
self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
392
'\nfoo fulltext 0 81 :')
394
def assertHasKnit(self, t, knit_name, extra_content=''):
395
"""Assert that knit_name exists on t."""
396
self.assertEqualDiff('# bzr knit index 8\n' + extra_content,
397
t.get(knit_name + '.kndx').read())
399
def check_knits(self, t):
400
"""check knit content for a repository."""
401
self.assertHasKnit(t, 'inventory')
402
self.assertHasKnit(t, 'revisions')
403
self.assertHasKnit(t, 'signatures')
405
def test_shared_disk_layout(self):
406
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
407
repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
409
# format 'Bazaar-NG Knit Repository Format 1'
410
# lock: is a directory
411
# inventory.weave == empty_weave
412
# empty revision-store directory
413
# empty weaves directory
414
# a 'shared-storage' marker file.
415
t = control.get_repository_transport(None)
416
self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
417
t.get('format').read())
418
# XXX: no locks left when unlocked at the moment
419
# self.assertEqualDiff('', t.get('lock').read())
420
self.assertEqualDiff('', t.get('shared-storage').read())
421
self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
424
def test_shared_no_tree_disk_layout(self):
425
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
426
repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
427
repo.set_make_working_trees(False)
429
# format 'Bazaar-NG Knit Repository Format 1'
431
# inventory.weave == empty_weave
432
# empty revision-store directory
433
# empty weaves directory
434
# a 'shared-storage' marker file.
435
t = control.get_repository_transport(None)
436
self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
437
t.get('format').read())
438
# XXX: no locks left when unlocked at the moment
439
# self.assertEqualDiff('', t.get('lock').read())
440
self.assertEqualDiff('', t.get('shared-storage').read())
441
self.assertEqualDiff('', t.get('no-working-trees').read())
442
repo.set_make_working_trees(True)
443
self.assertFalse(t.has('no-working-trees'))
444
self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
447
def test_deserialise_sets_root_revision(self):
448
"""We must have a inventory.root.revision
450
Old versions of the XML5 serializer did not set the revision_id for
451
the whole inventory. So we grab the one from the expected text. Which
452
is valid when the api is not being abused.
454
repo = self.make_repository('.',
455
format=bzrdir.format_registry.get('knit')())
456
inv_xml = '<inventory format="5">\n</inventory>\n'
457
inv = repo._deserialise_inventory('test-rev-id', inv_xml)
458
self.assertEqual('test-rev-id', inv.root.revision)
460
def test_deserialise_uses_global_revision_id(self):
461
"""If it is set, then we re-use the global revision id"""
462
repo = self.make_repository('.',
463
format=bzrdir.format_registry.get('knit')())
464
inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n'
466
# Arguably, the deserialise_inventory should detect a mismatch, and
467
# raise an error, rather than silently using one revision_id over the
469
self.assertRaises(AssertionError, repo._deserialise_inventory,
470
'test-rev-id', inv_xml)
471
inv = repo._deserialise_inventory('other-rev-id', inv_xml)
472
self.assertEqual('other-rev-id', inv.root.revision)
474
def test_supports_external_lookups(self):
475
repo = self.make_repository('.',
476
format=bzrdir.format_registry.get('knit')())
477
self.assertFalse(repo._format.supports_external_lookups)
480
class DummyRepository(object):
481
"""A dummy repository for testing."""
486
def supports_rich_root(self):
487
if self._format is not None:
488
return self._format.rich_root_data
492
raise NotImplementedError
494
def get_parent_map(self, revision_ids):
495
raise NotImplementedError
498
class InterDummy(repository.InterRepository):
499
"""An inter-repository optimised code path for DummyRepository.
501
This is for use during testing where we use DummyRepository as repositories
502
so that none of the default regsitered inter-repository classes will
507
def is_compatible(repo_source, repo_target):
508
"""InterDummy is compatible with DummyRepository."""
509
return (isinstance(repo_source, DummyRepository) and
510
isinstance(repo_target, DummyRepository))
513
class TestInterRepository(TestCaseWithTransport):
515
def test_get_default_inter_repository(self):
516
# test that the InterRepository.get(repo_a, repo_b) probes
517
# for a inter_repo class where is_compatible(repo_a, repo_b) returns
518
# true and returns a default inter_repo otherwise.
519
# This also tests that the default registered optimised interrepository
520
# classes do not barf inappropriately when a surprising repository type
522
dummy_a = DummyRepository()
523
dummy_b = DummyRepository()
524
self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
526
def assertGetsDefaultInterRepository(self, repo_a, repo_b):
527
"""Asserts that InterRepository.get(repo_a, repo_b) -> the default.
529
The effective default is now InterSameDataRepository because there is
530
no actual sane default in the presence of incompatible data models.
532
inter_repo = repository.InterRepository.get(repo_a, repo_b)
533
self.assertEqual(repository.InterSameDataRepository,
534
inter_repo.__class__)
535
self.assertEqual(repo_a, inter_repo.source)
536
self.assertEqual(repo_b, inter_repo.target)
538
def test_register_inter_repository_class(self):
539
# test that a optimised code path provider - a
540
# InterRepository subclass can be registered and unregistered
541
# and that it is correctly selected when given a repository
542
# pair that it returns true on for the is_compatible static method
544
dummy_a = DummyRepository()
545
dummy_a._format = RepositoryFormat()
546
dummy_b = DummyRepository()
547
dummy_b._format = RepositoryFormat()
548
repo = self.make_repository('.')
549
# hack dummies to look like repo somewhat.
550
dummy_a._serializer = repo._serializer
551
dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
552
dummy_a._format.rich_root_data = repo._format.rich_root_data
553
dummy_b._serializer = repo._serializer
554
dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
555
dummy_b._format.rich_root_data = repo._format.rich_root_data
556
repository.InterRepository.register_optimiser(InterDummy)
558
# we should get the default for something InterDummy returns False
560
self.assertFalse(InterDummy.is_compatible(dummy_a, repo))
561
self.assertGetsDefaultInterRepository(dummy_a, repo)
562
# and we should get an InterDummy for a pair it 'likes'
563
self.assertTrue(InterDummy.is_compatible(dummy_a, dummy_b))
564
inter_repo = repository.InterRepository.get(dummy_a, dummy_b)
565
self.assertEqual(InterDummy, inter_repo.__class__)
566
self.assertEqual(dummy_a, inter_repo.source)
567
self.assertEqual(dummy_b, inter_repo.target)
569
repository.InterRepository.unregister_optimiser(InterDummy)
570
# now we should get the default InterRepository object again.
571
self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
574
class TestInterWeaveRepo(TestCaseWithTransport):
576
def test_is_compatible_and_registered(self):
577
# InterWeaveRepo is compatible when either side
578
# is a format 5/6/7 branch
579
from bzrlib.repofmt import knitrepo, weaverepo
580
formats = [weaverepo.RepositoryFormat5(),
581
weaverepo.RepositoryFormat6(),
582
weaverepo.RepositoryFormat7()]
583
incompatible_formats = [weaverepo.RepositoryFormat4(),
584
knitrepo.RepositoryFormatKnit1(),
586
repo_a = self.make_repository('a')
587
repo_b = self.make_repository('b')
588
is_compatible = repository.InterWeaveRepo.is_compatible
589
for source in incompatible_formats:
590
# force incompatible left then right
591
repo_a._format = source
592
repo_b._format = formats[0]
593
self.assertFalse(is_compatible(repo_a, repo_b))
594
self.assertFalse(is_compatible(repo_b, repo_a))
595
for source in formats:
596
repo_a._format = source
597
for target in formats:
598
repo_b._format = target
599
self.assertTrue(is_compatible(repo_a, repo_b))
600
self.assertEqual(repository.InterWeaveRepo,
601
repository.InterRepository.get(repo_a,
605
class TestRepositoryConverter(TestCaseWithTransport):
607
def test_convert_empty(self):
608
t = get_transport(self.get_url('.'))
609
t.mkdir('repository')
610
repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
611
repo = weaverepo.RepositoryFormat7().initialize(repo_dir)
612
target_format = knitrepo.RepositoryFormatKnit1()
613
converter = repository.CopyConverter(target_format)
614
pb = bzrlib.ui.ui_factory.nested_progress_bar()
616
converter.convert(repo, pb)
619
repo = repo_dir.open_repository()
620
self.assertTrue(isinstance(target_format, repo._format.__class__))
623
class TestMisc(TestCase):
625
def test_unescape_xml(self):
626
"""We get some kind of error when malformed entities are passed"""
627
self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;')
630
class TestRepositoryFormatKnit3(TestCaseWithTransport):
632
def test_attribute__fetch_order(self):
633
"""Knits need topological data insertion."""
634
format = bzrdir.BzrDirMetaFormat1()
635
format.repository_format = knitrepo.RepositoryFormatKnit3()
636
repo = self.make_repository('.', format=format)
637
self.assertEqual('topological', repo._format._fetch_order)
639
def test_attribute__fetch_uses_deltas(self):
640
"""Knits reuse deltas."""
641
format = bzrdir.BzrDirMetaFormat1()
642
format.repository_format = knitrepo.RepositoryFormatKnit3()
643
repo = self.make_repository('.', format=format)
644
self.assertEqual(True, repo._format._fetch_uses_deltas)
646
def test_convert(self):
647
"""Ensure the upgrade adds weaves for roots"""
648
format = bzrdir.BzrDirMetaFormat1()
649
format.repository_format = knitrepo.RepositoryFormatKnit1()
650
tree = self.make_branch_and_tree('.', format)
651
tree.commit("Dull commit", rev_id="dull")
652
revision_tree = tree.branch.repository.revision_tree('dull')
653
revision_tree.lock_read()
655
self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
656
revision_tree.inventory.root.file_id)
658
revision_tree.unlock()
659
format = bzrdir.BzrDirMetaFormat1()
660
format.repository_format = knitrepo.RepositoryFormatKnit3()
661
upgrade.Convert('.', format)
662
tree = workingtree.WorkingTree.open('.')
663
revision_tree = tree.branch.repository.revision_tree('dull')
664
revision_tree.lock_read()
666
revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
668
revision_tree.unlock()
669
tree.commit("Another dull commit", rev_id='dull2')
670
revision_tree = tree.branch.repository.revision_tree('dull2')
671
revision_tree.lock_read()
672
self.addCleanup(revision_tree.unlock)
673
self.assertEqual('dull', revision_tree.inventory.root.revision)
675
def test_supports_external_lookups(self):
676
format = bzrdir.BzrDirMetaFormat1()
677
format.repository_format = knitrepo.RepositoryFormatKnit3()
678
repo = self.make_repository('.', format=format)
679
self.assertFalse(repo._format.supports_external_lookups)
682
class Test2a(tests.TestCaseWithMemoryTransport):
684
def test_fetch_combines_groups(self):
685
builder = self.make_branch_builder('source', format='2a')
686
builder.start_series()
687
builder.build_snapshot('1', None, [
688
('add', ('', 'root-id', 'directory', '')),
689
('add', ('file', 'file-id', 'file', 'content\n'))])
690
builder.build_snapshot('2', ['1'], [
691
('modify', ('file-id', 'content-2\n'))])
692
builder.finish_series()
693
source = builder.get_branch()
694
target = self.make_repository('target', format='2a')
695
target.fetch(source.repository)
697
self.addCleanup(target.unlock)
698
details = target.texts._index.get_build_details(
699
[('file-id', '1',), ('file-id', '2',)])
700
file_1_details = details[('file-id', '1')]
701
file_2_details = details[('file-id', '2')]
702
# The index, and what to read off disk, should be the same for both
703
# versions of the file.
704
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
706
def test_fetch_combines_groups(self):
707
builder = self.make_branch_builder('source', format='2a')
708
builder.start_series()
709
builder.build_snapshot('1', None, [
710
('add', ('', 'root-id', 'directory', '')),
711
('add', ('file', 'file-id', 'file', 'content\n'))])
712
builder.build_snapshot('2', ['1'], [
713
('modify', ('file-id', 'content-2\n'))])
714
builder.finish_series()
715
source = builder.get_branch()
716
target = self.make_repository('target', format='2a')
717
target.fetch(source.repository)
719
self.addCleanup(target.unlock)
720
details = target.texts._index.get_build_details(
721
[('file-id', '1',), ('file-id', '2',)])
722
file_1_details = details[('file-id', '1')]
723
file_2_details = details[('file-id', '2')]
724
# The index, and what to read off disk, should be the same for both
725
# versions of the file.
726
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
728
def test_fetch_combines_groups(self):
729
builder = self.make_branch_builder('source', format='2a')
730
builder.start_series()
731
builder.build_snapshot('1', None, [
732
('add', ('', 'root-id', 'directory', '')),
733
('add', ('file', 'file-id', 'file', 'content\n'))])
734
builder.build_snapshot('2', ['1'], [
735
('modify', ('file-id', 'content-2\n'))])
736
builder.finish_series()
737
source = builder.get_branch()
738
target = self.make_repository('target', format='2a')
739
target.fetch(source.repository)
741
self.addCleanup(target.unlock)
742
details = target.texts._index.get_build_details(
743
[('file-id', '1',), ('file-id', '2',)])
744
file_1_details = details[('file-id', '1')]
745
file_2_details = details[('file-id', '2')]
746
# The index, and what to read off disk, should be the same for both
747
# versions of the file.
748
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
750
def test_format_pack_compresses_True(self):
751
repo = self.make_repository('repo', format='2a')
752
self.assertTrue(repo._format.pack_compresses)
754
def test_inventories_use_chk_map_with_parent_base_dict(self):
755
tree = self.make_branch_and_memory_tree('repo', format="2a")
757
tree.add([''], ['TREE_ROOT'])
758
revid = tree.commit("foo")
761
self.addCleanup(tree.unlock)
762
inv = tree.branch.repository.get_inventory(revid)
763
self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
764
inv.parent_id_basename_to_file_id._ensure_root()
765
inv.id_to_entry._ensure_root()
766
self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
767
self.assertEqual(65536,
768
inv.parent_id_basename_to_file_id._root_node.maximum_size)
770
def test_autopack_unchanged_chk_nodes(self):
771
# at 20 unchanged commits, chk pages are packed that are split into
772
# two groups such that the new pack being made doesn't have all its
773
# pages in the source packs (though they are in the repository).
774
# Use a memory backed repository, we don't need to hit disk for this
775
tree = self.make_branch_and_memory_tree('tree', format='2a')
777
self.addCleanup(tree.unlock)
778
tree.add([''], ['TREE_ROOT'])
779
for pos in range(20):
780
tree.commit(str(pos))
782
def test_pack_with_hint(self):
783
tree = self.make_branch_and_memory_tree('tree', format='2a')
785
self.addCleanup(tree.unlock)
786
tree.add([''], ['TREE_ROOT'])
787
# 1 commit to leave untouched
789
to_keep = tree.branch.repository._pack_collection.names()
793
all = tree.branch.repository._pack_collection.names()
794
combine = list(set(all) - set(to_keep))
795
self.assertLength(3, all)
796
self.assertLength(2, combine)
797
tree.branch.repository.pack(hint=combine)
798
final = tree.branch.repository._pack_collection.names()
799
self.assertLength(2, final)
800
self.assertFalse(combine[0] in final)
801
self.assertFalse(combine[1] in final)
802
self.assertSubset(to_keep, final)
804
def test_stream_source_to_gc(self):
805
source = self.make_repository('source', format='2a')
806
target = self.make_repository('target', format='2a')
807
stream = source._get_source(target._format)
808
self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
810
def test_stream_source_to_non_gc(self):
811
source = self.make_repository('source', format='2a')
812
target = self.make_repository('target', format='rich-root-pack')
813
stream = source._get_source(target._format)
814
# We don't want the child GroupCHKStreamSource
815
self.assertIs(type(stream), repository.StreamSource)
817
def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
818
source_builder = self.make_branch_builder('source',
820
# We have to build a fairly large tree, so that we are sure the chk
821
# pages will have split into multiple pages.
822
entries = [('add', ('', 'a-root-id', 'directory', None))]
823
for i in 'abcdefghijklmnopqrstuvwxyz123456789':
824
for j in 'abcdefghijklmnopqrstuvwxyz123456789':
827
content = 'content for %s\n' % (fname,)
828
entries.append(('add', (fname, fid, 'file', content)))
829
source_builder.start_series()
830
source_builder.build_snapshot('rev-1', None, entries)
831
# Now change a few of them, so we get a few new pages for the second
833
source_builder.build_snapshot('rev-2', ['rev-1'], [
834
('modify', ('aa-id', 'new content for aa-id\n')),
835
('modify', ('cc-id', 'new content for cc-id\n')),
836
('modify', ('zz-id', 'new content for zz-id\n')),
838
source_builder.finish_series()
839
source_branch = source_builder.get_branch()
840
source_branch.lock_read()
841
self.addCleanup(source_branch.unlock)
842
target = self.make_repository('target', format='2a')
843
source = source_branch.repository._get_source(target._format)
844
self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
846
# On a regular pass, getting the inventories and chk pages for rev-2
847
# would only get the newly created chk pages
848
search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
850
simple_chk_records = []
851
for vf_name, substream in source.get_stream(search):
852
if vf_name == 'chk_bytes':
853
for record in substream:
854
simple_chk_records.append(record.key)
858
# 3 pages, the root (InternalNode), + 2 pages which actually changed
859
self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
860
('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
861
('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
862
('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
864
# Now, when we do a similar call using 'get_stream_for_missing_keys'
865
# we should get a much larger set of pages.
866
missing = [('inventories', 'rev-2')]
867
full_chk_records = []
868
for vf_name, substream in source.get_stream_for_missing_keys(missing):
869
if vf_name == 'inventories':
870
for record in substream:
871
self.assertEqual(('rev-2',), record.key)
872
elif vf_name == 'chk_bytes':
873
for record in substream:
874
full_chk_records.append(record.key)
876
self.fail('Should not be getting a stream of %s' % (vf_name,))
877
# We have 257 records now. This is because we have 1 root page, and 256
878
# leaf pages in a complete listing.
879
self.assertEqual(257, len(full_chk_records))
880
self.assertSubset(simple_chk_records, full_chk_records)
882
def test_inconsistency_fatal(self):
883
repo = self.make_repository('repo', format='2a')
884
self.assertTrue(repo.revisions._index._inconsistency_fatal)
885
self.assertFalse(repo.texts._index._inconsistency_fatal)
886
self.assertFalse(repo.inventories._index._inconsistency_fatal)
887
self.assertFalse(repo.signatures._index._inconsistency_fatal)
888
self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
891
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
893
def test_source_to_exact_pack_092(self):
894
source = self.make_repository('source', format='pack-0.92')
895
target = self.make_repository('target', format='pack-0.92')
896
stream_source = source._get_source(target._format)
897
self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
899
def test_source_to_exact_pack_rich_root_pack(self):
900
source = self.make_repository('source', format='rich-root-pack')
901
target = self.make_repository('target', format='rich-root-pack')
902
stream_source = source._get_source(target._format)
903
self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
905
def test_source_to_exact_pack_19(self):
906
source = self.make_repository('source', format='1.9')
907
target = self.make_repository('target', format='1.9')
908
stream_source = source._get_source(target._format)
909
self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
911
def test_source_to_exact_pack_19_rich_root(self):
912
source = self.make_repository('source', format='1.9-rich-root')
913
target = self.make_repository('target', format='1.9-rich-root')
914
stream_source = source._get_source(target._format)
915
self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
917
def test_source_to_remote_exact_pack_19(self):
918
trans = self.make_smart_server('target')
920
source = self.make_repository('source', format='1.9')
921
target = self.make_repository('target', format='1.9')
922
target = repository.Repository.open(trans.base)
923
stream_source = source._get_source(target._format)
924
self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
926
def test_stream_source_to_non_exact(self):
927
source = self.make_repository('source', format='pack-0.92')
928
target = self.make_repository('target', format='1.9')
929
stream = source._get_source(target._format)
930
self.assertIs(type(stream), repository.StreamSource)
932
def test_stream_source_to_non_exact_rich_root(self):
933
source = self.make_repository('source', format='1.9')
934
target = self.make_repository('target', format='1.9-rich-root')
935
stream = source._get_source(target._format)
936
self.assertIs(type(stream), repository.StreamSource)
938
def test_source_to_remote_non_exact_pack_19(self):
939
trans = self.make_smart_server('target')
941
source = self.make_repository('source', format='1.9')
942
target = self.make_repository('target', format='1.6')
943
target = repository.Repository.open(trans.base)
944
stream_source = source._get_source(target._format)
945
self.assertIs(type(stream_source), repository.StreamSource)
947
def test_stream_source_to_knit(self):
948
source = self.make_repository('source', format='pack-0.92')
949
target = self.make_repository('target', format='dirstate')
950
stream = source._get_source(target._format)
951
self.assertIs(type(stream), repository.StreamSource)
954
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
955
"""Tests for _find_parent_ids_of_revisions."""
958
super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
959
self.builder = self.make_branch_builder('source',
960
format='development6-rich-root')
961
self.builder.start_series()
962
self.builder.build_snapshot('initial', None,
963
[('add', ('', 'tree-root', 'directory', None))])
964
self.repo = self.builder.get_branch().repository
965
self.addCleanup(self.builder.finish_series)
967
def assertParentIds(self, expected_result, rev_set):
968
self.assertEqual(sorted(expected_result),
969
sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
971
def test_simple(self):
972
self.builder.build_snapshot('revid1', None, [])
973
self.builder.build_snapshot('revid2', ['revid1'], [])
975
self.assertParentIds(['revid1'], rev_set)
977
def test_not_first_parent(self):
978
self.builder.build_snapshot('revid1', None, [])
979
self.builder.build_snapshot('revid2', ['revid1'], [])
980
self.builder.build_snapshot('revid3', ['revid2'], [])
981
rev_set = ['revid3', 'revid2']
982
self.assertParentIds(['revid1'], rev_set)
984
def test_not_null(self):
985
rev_set = ['initial']
986
self.assertParentIds([], rev_set)
988
def test_not_null_set(self):
989
self.builder.build_snapshot('revid1', None, [])
990
rev_set = [_mod_revision.NULL_REVISION]
991
self.assertParentIds([], rev_set)
993
def test_ghost(self):
994
self.builder.build_snapshot('revid1', None, [])
995
rev_set = ['ghost', 'revid1']
996
self.assertParentIds(['initial'], rev_set)
998
def test_ghost_parent(self):
999
self.builder.build_snapshot('revid1', None, [])
1000
self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
1001
rev_set = ['revid2', 'revid1']
1002
self.assertParentIds(['ghost', 'initial'], rev_set)
1004
def test_righthand_parent(self):
1005
self.builder.build_snapshot('revid1', None, [])
1006
self.builder.build_snapshot('revid2a', ['revid1'], [])
1007
self.builder.build_snapshot('revid2b', ['revid1'], [])
1008
self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
1009
rev_set = ['revid3', 'revid2a']
1010
self.assertParentIds(['revid1', 'revid2b'], rev_set)
1013
class TestWithBrokenRepo(TestCaseWithTransport):
1014
"""These tests seem to be more appropriate as interface tests?"""
1016
def make_broken_repository(self):
1017
# XXX: This function is borrowed from Aaron's "Reconcile can fix bad
1018
# parent references" branch which is due to land in bzr.dev soon. Once
1019
# it does, this duplication should be removed.
1020
repo = self.make_repository('broken-repo')
1024
cleanups.append(repo.unlock)
1025
repo.start_write_group()
1026
cleanups.append(repo.commit_write_group)
1027
# make rev1a: A well-formed revision, containing 'file1'
1028
inv = inventory.Inventory(revision_id='rev1a')
1029
inv.root.revision = 'rev1a'
1030
self.add_file(repo, inv, 'file1', 'rev1a', [])
1031
repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
1032
repo.add_inventory('rev1a', inv, [])
1033
revision = _mod_revision.Revision('rev1a',
1034
committer='jrandom@example.com', timestamp=0,
1035
inventory_sha1='', timezone=0, message='foo', parent_ids=[])
1036
repo.add_revision('rev1a',revision, inv)
1038
# make rev1b, which has no Revision, but has an Inventory, and
1040
inv = inventory.Inventory(revision_id='rev1b')
1041
inv.root.revision = 'rev1b'
1042
self.add_file(repo, inv, 'file1', 'rev1b', [])
1043
repo.add_inventory('rev1b', inv, [])
1045
# make rev2, with file1 and file2
1047
# file1 has 'rev1b' as an ancestor, even though this is not
1048
# mentioned by 'rev1a', making it an unreferenced ancestor
1049
inv = inventory.Inventory()
1050
self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b'])
1051
self.add_file(repo, inv, 'file2', 'rev2', [])
1052
self.add_revision(repo, 'rev2', inv, ['rev1a'])
1054
# make ghost revision rev1c
1055
inv = inventory.Inventory()
1056
self.add_file(repo, inv, 'file2', 'rev1c', [])
1058
# make rev3 with file2
1059
# file2 refers to 'rev1c', which is a ghost in this repository, so
1060
# file2 cannot have rev1c as its ancestor.
1061
inv = inventory.Inventory()
1062
self.add_file(repo, inv, 'file2', 'rev3', ['rev1c'])
1063
self.add_revision(repo, 'rev3', inv, ['rev1c'])
1066
for cleanup in reversed(cleanups):
1069
def add_revision(self, repo, revision_id, inv, parent_ids):
1070
inv.revision_id = revision_id
1071
inv.root.revision = revision_id
1072
repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
1073
repo.add_inventory(revision_id, inv, parent_ids)
1074
revision = _mod_revision.Revision(revision_id,
1075
committer='jrandom@example.com', timestamp=0, inventory_sha1='',
1076
timezone=0, message='foo', parent_ids=parent_ids)
1077
repo.add_revision(revision_id,revision, inv)
1079
def add_file(self, repo, inv, filename, revision, parents):
1080
file_id = filename + '-id'
1081
entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT')
1082
entry.revision = revision
1085
text_key = (file_id, revision)
1086
parent_keys = [(file_id, parent) for parent in parents]
1087
repo.texts.add_lines(text_key, parent_keys, ['line\n'])
1089
def test_insert_from_broken_repo(self):
1090
"""Inserting a data stream from a broken repository won't silently
1091
corrupt the target repository.
1093
broken_repo = self.make_broken_repository()
1094
empty_repo = self.make_repository('empty-repo')
1096
empty_repo.fetch(broken_repo)
1097
except (errors.RevisionNotPresent, errors.BzrCheckError):
1098
# Test successful: compression parent not being copied leads to
1101
empty_repo.lock_read()
1102
self.addCleanup(empty_repo.unlock)
1103
text = empty_repo.texts.get_record_stream(
1104
[('file2-id', 'rev3')], 'topological', True).next()
1105
self.assertEqual('line\n', text.get_bytes_as('fulltext'))
1108
class TestRepositoryPackCollection(TestCaseWithTransport):
1110
def get_format(self):
1111
return bzrdir.format_registry.make_bzrdir('pack-0.92')
1113
def get_packs(self):
1114
format = self.get_format()
1115
repo = self.make_repository('.', format=format)
1116
return repo._pack_collection
1118
def make_packs_and_alt_repo(self, write_lock=False):
1119
"""Create a pack repo with 3 packs, and access it via a second repo."""
1120
tree = self.make_branch_and_tree('.', format=self.get_format())
1122
self.addCleanup(tree.unlock)
1123
rev1 = tree.commit('one')
1124
rev2 = tree.commit('two')
1125
rev3 = tree.commit('three')
1126
r = repository.Repository.open('.')
1131
self.addCleanup(r.unlock)
1132
packs = r._pack_collection
1133
packs.ensure_loaded()
1134
return tree, r, packs, [rev1, rev2, rev3]
1136
def test__clear_obsolete_packs(self):
1137
packs = self.get_packs()
1138
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1139
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1140
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1141
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1142
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1143
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1144
res = packs._clear_obsolete_packs()
1145
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1146
self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1148
def test__clear_obsolete_packs_preserve(self):
1149
packs = self.get_packs()
1150
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1151
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1152
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1153
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1154
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1155
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1156
res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
1157
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1158
self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1159
sorted(obsolete_pack_trans.list_dir('.')))
1161
def test__max_pack_count(self):
1162
"""The maximum pack count is a function of the number of revisions."""
1163
# no revisions - one pack, so that we can have a revision free repo
1164
# without it blowing up
1165
packs = self.get_packs()
1166
self.assertEqual(1, packs._max_pack_count(0))
1167
# after that the sum of the digits, - check the first 1-9
1168
self.assertEqual(1, packs._max_pack_count(1))
1169
self.assertEqual(2, packs._max_pack_count(2))
1170
self.assertEqual(3, packs._max_pack_count(3))
1171
self.assertEqual(4, packs._max_pack_count(4))
1172
self.assertEqual(5, packs._max_pack_count(5))
1173
self.assertEqual(6, packs._max_pack_count(6))
1174
self.assertEqual(7, packs._max_pack_count(7))
1175
self.assertEqual(8, packs._max_pack_count(8))
1176
self.assertEqual(9, packs._max_pack_count(9))
1177
# check the boundary cases with two digits for the next decade
1178
self.assertEqual(1, packs._max_pack_count(10))
1179
self.assertEqual(2, packs._max_pack_count(11))
1180
self.assertEqual(10, packs._max_pack_count(19))
1181
self.assertEqual(2, packs._max_pack_count(20))
1182
self.assertEqual(3, packs._max_pack_count(21))
1183
# check some arbitrary big numbers
1184
self.assertEqual(25, packs._max_pack_count(112894))
1186
def test_repr(self):
1187
packs = self.get_packs()
1188
self.assertContainsRe(repr(packs),
1189
'RepositoryPackCollection(.*Repository(.*))')
1191
def test__obsolete_packs(self):
1192
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1193
names = packs.names()
1194
pack = packs.get_pack_by_name(names[0])
1195
# Schedule this one for removal
1196
packs._remove_pack_from_memory(pack)
1197
# Simulate a concurrent update by renaming the .pack file and one of
1199
packs.transport.rename('packs/%s.pack' % (names[0],),
1200
'obsolete_packs/%s.pack' % (names[0],))
1201
packs.transport.rename('indices/%s.iix' % (names[0],),
1202
'obsolete_packs/%s.iix' % (names[0],))
1203
# Now trigger the obsoletion, and ensure that all the remaining files
1205
packs._obsolete_packs([pack])
1206
self.assertEqual([n + '.pack' for n in names[1:]],
1207
sorted(packs._pack_transport.list_dir('.')))
1208
# names[0] should not be present in the index anymore
1209
self.assertEqual(names[1:],
1210
sorted(set([osutils.splitext(n)[0] for n in
1211
packs._index_transport.list_dir('.')])))
1213
def test_pack_distribution_zero(self):
1214
packs = self.get_packs()
1215
self.assertEqual([0], packs.pack_distribution(0))
1217
def test_ensure_loaded_unlocked(self):
1218
packs = self.get_packs()
1219
self.assertRaises(errors.ObjectNotLocked,
1220
packs.ensure_loaded)
1222
def test_pack_distribution_one_to_nine(self):
1223
packs = self.get_packs()
1224
self.assertEqual([1],
1225
packs.pack_distribution(1))
1226
self.assertEqual([1, 1],
1227
packs.pack_distribution(2))
1228
self.assertEqual([1, 1, 1],
1229
packs.pack_distribution(3))
1230
self.assertEqual([1, 1, 1, 1],
1231
packs.pack_distribution(4))
1232
self.assertEqual([1, 1, 1, 1, 1],
1233
packs.pack_distribution(5))
1234
self.assertEqual([1, 1, 1, 1, 1, 1],
1235
packs.pack_distribution(6))
1236
self.assertEqual([1, 1, 1, 1, 1, 1, 1],
1237
packs.pack_distribution(7))
1238
self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1],
1239
packs.pack_distribution(8))
1240
self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1],
1241
packs.pack_distribution(9))
1243
def test_pack_distribution_stable_at_boundaries(self):
1244
"""When there are multi-rev packs the counts are stable."""
1245
packs = self.get_packs()
1247
self.assertEqual([10], packs.pack_distribution(10))
1248
self.assertEqual([10, 1], packs.pack_distribution(11))
1249
self.assertEqual([10, 10], packs.pack_distribution(20))
1250
self.assertEqual([10, 10, 1], packs.pack_distribution(21))
1252
self.assertEqual([100], packs.pack_distribution(100))
1253
self.assertEqual([100, 1], packs.pack_distribution(101))
1254
self.assertEqual([100, 10, 1], packs.pack_distribution(111))
1255
self.assertEqual([100, 100], packs.pack_distribution(200))
1256
self.assertEqual([100, 100, 1], packs.pack_distribution(201))
1257
self.assertEqual([100, 100, 10, 1], packs.pack_distribution(211))
1259
def test_plan_pack_operations_2009_revisions_skip_all_packs(self):
1260
packs = self.get_packs()
1261
existing_packs = [(2000, "big"), (9, "medium")]
1262
# rev count - 2009 -> 2x1000 + 9x1
1263
pack_operations = packs.plan_autopack_combinations(
1264
existing_packs, [1000, 1000, 1, 1, 1, 1, 1, 1, 1, 1, 1])
1265
self.assertEqual([], pack_operations)
1267
def test_plan_pack_operations_2010_revisions_skip_all_packs(self):
1268
packs = self.get_packs()
1269
existing_packs = [(2000, "big"), (9, "medium"), (1, "single")]
1270
# rev count - 2010 -> 2x1000 + 1x10
1271
pack_operations = packs.plan_autopack_combinations(
1272
existing_packs, [1000, 1000, 10])
1273
self.assertEqual([], pack_operations)
1275
def test_plan_pack_operations_2010_combines_smallest_two(self):
1276
packs = self.get_packs()
1277
existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"),
1279
# rev count - 2010 -> 2x1000 + 1x10 (3)
1280
pack_operations = packs.plan_autopack_combinations(
1281
existing_packs, [1000, 1000, 10])
1282
self.assertEqual([[2, ["single2", "single1"]]], pack_operations)
1284
def test_plan_pack_operations_creates_a_single_op(self):
1285
packs = self.get_packs()
1286
existing_packs = [(50, 'a'), (40, 'b'), (30, 'c'), (10, 'd'),
1287
(10, 'e'), (6, 'f'), (4, 'g')]
1288
# rev count 150 -> 1x100 and 5x10
1289
# The two size 10 packs do not need to be touched. The 50, 40, 30 would
1290
# be combined into a single 120 size pack, and the 6 & 4 would
1291
# becombined into a size 10 pack. However, if we have to rewrite them,
1292
# we save a pack file with no increased I/O by putting them into the
1294
distribution = packs.pack_distribution(150)
1295
pack_operations = packs.plan_autopack_combinations(existing_packs,
1297
self.assertEqual([[130, ['a', 'b', 'c', 'f', 'g']]], pack_operations)
1299
def test_all_packs_none(self):
1300
format = self.get_format()
1301
tree = self.make_branch_and_tree('.', format=format)
1303
self.addCleanup(tree.unlock)
1304
packs = tree.branch.repository._pack_collection
1305
packs.ensure_loaded()
1306
self.assertEqual([], packs.all_packs())
1308
def test_all_packs_one(self):
1309
format = self.get_format()
1310
tree = self.make_branch_and_tree('.', format=format)
1311
tree.commit('start')
1313
self.addCleanup(tree.unlock)
1314
packs = tree.branch.repository._pack_collection
1315
packs.ensure_loaded()
1317
packs.get_pack_by_name(packs.names()[0])],
1320
def test_all_packs_two(self):
1321
format = self.get_format()
1322
tree = self.make_branch_and_tree('.', format=format)
1323
tree.commit('start')
1324
tree.commit('continue')
1326
self.addCleanup(tree.unlock)
1327
packs = tree.branch.repository._pack_collection
1328
packs.ensure_loaded()
1330
packs.get_pack_by_name(packs.names()[0]),
1331
packs.get_pack_by_name(packs.names()[1]),
1332
], packs.all_packs())
1334
def test_get_pack_by_name(self):
1335
format = self.get_format()
1336
tree = self.make_branch_and_tree('.', format=format)
1337
tree.commit('start')
1339
self.addCleanup(tree.unlock)
1340
packs = tree.branch.repository._pack_collection
1342
packs.ensure_loaded()
1343
name = packs.names()[0]
1344
pack_1 = packs.get_pack_by_name(name)
1345
# the pack should be correctly initialised
1346
sizes = packs._names[name]
1347
rev_index = GraphIndex(packs._index_transport, name + '.rix', sizes[0])
1348
inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1])
1349
txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2])
1350
sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3])
1351
self.assertEqual(pack_repo.ExistingPack(packs._pack_transport,
1352
name, rev_index, inv_index, txt_index, sig_index), pack_1)
1353
# and the same instance should be returned on successive calls.
1354
self.assertTrue(pack_1 is packs.get_pack_by_name(name))
1356
def test_reload_pack_names_new_entry(self):
1357
tree, r, packs, revs = self.make_packs_and_alt_repo()
1358
names = packs.names()
1359
# Add a new pack file into the repository
1360
rev4 = tree.commit('four')
1361
new_names = tree.branch.repository._pack_collection.names()
1362
new_name = set(new_names).difference(names)
1363
self.assertEqual(1, len(new_name))
1364
new_name = new_name.pop()
1365
# The old collection hasn't noticed yet
1366
self.assertEqual(names, packs.names())
1367
self.assertTrue(packs.reload_pack_names())
1368
self.assertEqual(new_names, packs.names())
1369
# And the repository can access the new revision
1370
self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
1371
self.assertFalse(packs.reload_pack_names())
1373
def test_reload_pack_names_added_and_removed(self):
1374
tree, r, packs, revs = self.make_packs_and_alt_repo()
1375
names = packs.names()
1376
# Now repack the whole thing
1377
tree.branch.repository.pack()
1378
new_names = tree.branch.repository._pack_collection.names()
1379
# The other collection hasn't noticed yet
1380
self.assertEqual(names, packs.names())
1381
self.assertTrue(packs.reload_pack_names())
1382
self.assertEqual(new_names, packs.names())
1383
self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1384
self.assertFalse(packs.reload_pack_names())
1386
def test_reload_pack_names_preserves_pending(self):
1387
# TODO: Update this to also test for pending-deleted names
1388
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1389
# We will add one pack (via start_write_group + insert_record_stream),
1390
# and remove another pack (via _remove_pack_from_memory)
1391
orig_names = packs.names()
1392
orig_at_load = packs._packs_at_load
1393
to_remove_name = iter(orig_names).next()
1394
r.start_write_group()
1395
self.addCleanup(r.abort_write_group)
1396
r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1397
('text', 'rev'), (), None, 'content\n')])
1398
new_pack = packs._new_pack
1399
self.assertTrue(new_pack.data_inserted())
1401
packs.allocate(new_pack)
1402
packs._new_pack = None
1403
removed_pack = packs.get_pack_by_name(to_remove_name)
1404
packs._remove_pack_from_memory(removed_pack)
1405
names = packs.names()
1406
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1407
new_names = set([x[0][0] for x in new_nodes])
1408
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1409
self.assertEqual(set(names) - set(orig_names), new_names)
1410
self.assertEqual(set([new_pack.name]), new_names)
1411
self.assertEqual([to_remove_name],
1412
sorted([x[0][0] for x in deleted_nodes]))
1413
packs.reload_pack_names()
1414
reloaded_names = packs.names()
1415
self.assertEqual(orig_at_load, packs._packs_at_load)
1416
self.assertEqual(names, reloaded_names)
1417
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1418
new_names = set([x[0][0] for x in new_nodes])
1419
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1420
self.assertEqual(set(names) - set(orig_names), new_names)
1421
self.assertEqual(set([new_pack.name]), new_names)
1422
self.assertEqual([to_remove_name],
1423
sorted([x[0][0] for x in deleted_nodes]))
1425
def test_autopack_obsoletes_new_pack(self):
1426
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1427
packs._max_pack_count = lambda x: 1
1428
packs.pack_distribution = lambda x: [10]
1429
r.start_write_group()
1430
r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1431
('bogus-rev',), (), None, 'bogus-content\n')])
1432
# This should trigger an autopack, which will combine everything into a
1434
new_names = r.commit_write_group()
1435
names = packs.names()
1436
self.assertEqual(1, len(names))
1437
self.assertEqual([names[0] + '.pack'],
1438
packs._pack_transport.list_dir('.'))
1440
def test_autopack_reloads_and_stops(self):
1441
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1442
# After we have determined what needs to be autopacked, trigger a
1443
# full-pack via the other repo which will cause us to re-evaluate and
1444
# decide we don't need to do anything
1445
orig_execute = packs._execute_pack_operations
1446
def _munged_execute_pack_ops(*args, **kwargs):
1447
tree.branch.repository.pack()
1448
return orig_execute(*args, **kwargs)
1449
packs._execute_pack_operations = _munged_execute_pack_ops
1450
packs._max_pack_count = lambda x: 1
1451
packs.pack_distribution = lambda x: [10]
1452
self.assertFalse(packs.autopack())
1453
self.assertEqual(1, len(packs.names()))
1454
self.assertEqual(tree.branch.repository._pack_collection.names(),
1457
def test__save_pack_names(self):
1458
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1459
names = packs.names()
1460
pack = packs.get_pack_by_name(names[0])
1461
packs._remove_pack_from_memory(pack)
1462
packs._save_pack_names(obsolete_packs=[pack])
1463
cur_packs = packs._pack_transport.list_dir('.')
1464
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1465
# obsolete_packs will also have stuff like .rix and .iix present.
1466
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1467
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1468
self.assertEqual([pack.name], sorted(obsolete_names))
1470
def test__save_pack_names_already_obsoleted(self):
1471
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1472
names = packs.names()
1473
pack = packs.get_pack_by_name(names[0])
1474
packs._remove_pack_from_memory(pack)
1475
# We are going to simulate a concurrent autopack by manually obsoleting
1476
# the pack directly.
1477
packs._obsolete_packs([pack])
1478
packs._save_pack_names(clear_obsolete_packs=True,
1479
obsolete_packs=[pack])
1480
cur_packs = packs._pack_transport.list_dir('.')
1481
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1482
# Note that while we set clear_obsolete_packs=True, it should not
1483
# delete a pack file that we have also scheduled for obsoletion.
1484
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1485
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1486
self.assertEqual([pack.name], sorted(obsolete_names))
1490
class TestPack(TestCaseWithTransport):
1491
"""Tests for the Pack object."""
1493
def assertCurrentlyEqual(self, left, right):
1494
self.assertTrue(left == right)
1495
self.assertTrue(right == left)
1496
self.assertFalse(left != right)
1497
self.assertFalse(right != left)
1499
def assertCurrentlyNotEqual(self, left, right):
1500
self.assertFalse(left == right)
1501
self.assertFalse(right == left)
1502
self.assertTrue(left != right)
1503
self.assertTrue(right != left)
1505
def test___eq____ne__(self):
1506
left = pack_repo.ExistingPack('', '', '', '', '', '')
1507
right = pack_repo.ExistingPack('', '', '', '', '', '')
1508
self.assertCurrentlyEqual(left, right)
1509
# change all attributes and ensure equality changes as we do.
1510
left.revision_index = 'a'
1511
self.assertCurrentlyNotEqual(left, right)
1512
right.revision_index = 'a'
1513
self.assertCurrentlyEqual(left, right)
1514
left.inventory_index = 'a'
1515
self.assertCurrentlyNotEqual(left, right)
1516
right.inventory_index = 'a'
1517
self.assertCurrentlyEqual(left, right)
1518
left.text_index = 'a'
1519
self.assertCurrentlyNotEqual(left, right)
1520
right.text_index = 'a'
1521
self.assertCurrentlyEqual(left, right)
1522
left.signature_index = 'a'
1523
self.assertCurrentlyNotEqual(left, right)
1524
right.signature_index = 'a'
1525
self.assertCurrentlyEqual(left, right)
1527
self.assertCurrentlyNotEqual(left, right)
1529
self.assertCurrentlyEqual(left, right)
1530
left.transport = 'a'
1531
self.assertCurrentlyNotEqual(left, right)
1532
right.transport = 'a'
1533
self.assertCurrentlyEqual(left, right)
1535
def test_file_name(self):
1536
pack = pack_repo.ExistingPack('', 'a_name', '', '', '', '')
1537
self.assertEqual('a_name.pack', pack.file_name())
1540
class TestNewPack(TestCaseWithTransport):
1541
"""Tests for pack_repo.NewPack."""
1543
def test_new_instance_attributes(self):
1544
upload_transport = self.get_transport('upload')
1545
pack_transport = self.get_transport('pack')
1546
index_transport = self.get_transport('index')
1547
upload_transport.mkdir('.')
1548
collection = pack_repo.RepositoryPackCollection(
1550
transport=self.get_transport('.'),
1551
index_transport=index_transport,
1552
upload_transport=upload_transport,
1553
pack_transport=pack_transport,
1554
index_builder_class=BTreeBuilder,
1555
index_class=BTreeGraphIndex,
1556
use_chk_index=False)
1557
pack = pack_repo.NewPack(collection)
1558
self.addCleanup(pack.abort) # Make sure the write stream gets closed
1559
self.assertIsInstance(pack.revision_index, BTreeBuilder)
1560
self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1561
self.assertIsInstance(pack._hash, type(osutils.md5()))
1562
self.assertTrue(pack.upload_transport is upload_transport)
1563
self.assertTrue(pack.index_transport is index_transport)
1564
self.assertTrue(pack.pack_transport is pack_transport)
1565
self.assertEqual(None, pack.index_sizes)
1566
self.assertEqual(20, len(pack.random_name))
1567
self.assertIsInstance(pack.random_name, str)
1568
self.assertIsInstance(pack.start_time, float)
1571
class TestPacker(TestCaseWithTransport):
1572
"""Tests for the packs repository Packer class."""
1574
def test_pack_optimizes_pack_order(self):
1575
builder = self.make_branch_builder('.', format="1.9")
1576
builder.start_series()
1577
builder.build_snapshot('A', None, [
1578
('add', ('', 'root-id', 'directory', None)),
1579
('add', ('f', 'f-id', 'file', 'content\n'))])
1580
builder.build_snapshot('B', ['A'],
1581
[('modify', ('f-id', 'new-content\n'))])
1582
builder.build_snapshot('C', ['B'],
1583
[('modify', ('f-id', 'third-content\n'))])
1584
builder.build_snapshot('D', ['C'],
1585
[('modify', ('f-id', 'fourth-content\n'))])
1586
b = builder.get_branch()
1588
builder.finish_series()
1589
self.addCleanup(b.unlock)
1590
# At this point, we should have 4 pack files available
1591
# Because of how they were built, they correspond to
1592
# ['D', 'C', 'B', 'A']
1593
packs = b.repository._pack_collection.packs
1594
packer = pack_repo.Packer(b.repository._pack_collection,
1596
revision_ids=['B', 'C'])
1597
# Now, when we are copying the B & C revisions, their pack files should
1598
# be moved to the front of the stack
1599
# The new ordering moves B & C to the front of the .packs attribute,
1600
# and leaves the others in the original order.
1601
new_packs = [packs[1], packs[2], packs[0], packs[3]]
1602
new_pack = packer.pack()
1603
self.assertEqual(new_packs, packer.packs)
1606
class TestOptimisingPacker(TestCaseWithTransport):
1607
"""Tests for the OptimisingPacker class."""
1609
def get_pack_collection(self):
1610
repo = self.make_repository('.')
1611
return repo._pack_collection
1613
def test_open_pack_will_optimise(self):
1614
packer = pack_repo.OptimisingPacker(self.get_pack_collection(),
1616
new_pack = packer.open_pack()
1617
self.addCleanup(new_pack.abort) # ensure cleanup
1618
self.assertIsInstance(new_pack, pack_repo.NewPack)
1619
self.assertTrue(new_pack.revision_index._optimize_for_size)
1620
self.assertTrue(new_pack.inventory_index._optimize_for_size)
1621
self.assertTrue(new_pack.text_index._optimize_for_size)
1622
self.assertTrue(new_pack.signature_index._optimize_for_size)
1625
class TestCrossFormatPacks(TestCaseWithTransport):
1627
def log_pack(self, hint=None):
1628
self.calls.append(('pack', hint))
1629
self.orig_pack(hint=hint)
1630
if self.expect_hint:
1631
self.assertTrue(hint)
1633
def run_stream(self, src_fmt, target_fmt, expect_pack_called):
1634
self.expect_hint = expect_pack_called
1636
source_tree = self.make_branch_and_tree('src', format=src_fmt)
1637
source_tree.lock_write()
1638
self.addCleanup(source_tree.unlock)
1639
tip = source_tree.commit('foo')
1640
target = self.make_repository('target', format=target_fmt)
1642
self.addCleanup(target.unlock)
1643
source = source_tree.branch.repository._get_source(target._format)
1644
self.orig_pack = target.pack
1645
target.pack = self.log_pack
1646
search = target.search_missing_revision_ids(
1647
source_tree.branch.repository, tip)
1648
stream = source.get_stream(search)
1649
from_format = source_tree.branch.repository._format
1650
sink = target._get_sink()
1651
sink.insert_stream(stream, from_format, [])
1652
if expect_pack_called:
1653
self.assertLength(1, self.calls)
1655
self.assertLength(0, self.calls)
1657
def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
1658
self.expect_hint = expect_pack_called
1660
source_tree = self.make_branch_and_tree('src', format=src_fmt)
1661
source_tree.lock_write()
1662
self.addCleanup(source_tree.unlock)
1663
tip = source_tree.commit('foo')
1664
target = self.make_repository('target', format=target_fmt)
1666
self.addCleanup(target.unlock)
1667
source = source_tree.branch.repository
1668
self.orig_pack = target.pack
1669
target.pack = self.log_pack
1670
target.fetch(source)
1671
if expect_pack_called:
1672
self.assertLength(1, self.calls)
1674
self.assertLength(0, self.calls)
1676
def test_sink_format_hint_no(self):
1677
# When the target format says packing makes no difference, pack is not
1679
self.run_stream('1.9', 'rich-root-pack', False)
1681
def test_sink_format_hint_yes(self):
1682
# When the target format says packing makes a difference, pack is
1684
self.run_stream('1.9', '2a', True)
1686
def test_sink_format_same_no(self):
1687
# When the formats are the same, pack is not called.
1688
self.run_stream('2a', '2a', False)
1690
def test_IDS_format_hint_no(self):
1691
# When the target format says packing makes no difference, pack is not
1693
self.run_fetch('1.9', 'rich-root-pack', False)
1695
def test_IDS_format_hint_yes(self):
1696
# When the target format says packing makes a difference, pack is
1698
self.run_fetch('1.9', '2a', True)
1700
def test_IDS_format_same_no(self):
1701
# When the formats are the same, pack is not called.
1702
self.run_fetch('2a', '2a', False)