1
# Copyright (C) 2006-2011 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
"""Tests for the Repository facility that are not interface tests.
19
For interface tests see tests/per_repository/*.py.
21
For concrete class tests see this file, and for storage formats tests
25
from stat import S_ISDIR
28
from bzrlib.errors import (
30
UnsupportedFormatError,
39
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
40
from bzrlib.index import GraphIndex
41
from bzrlib.repository import RepositoryFormat
42
from bzrlib.tests import (
44
TestCaseWithTransport,
52
revision as _mod_revision,
58
from bzrlib.repofmt import (
66
class TestDefaultFormat(TestCase):
68
def test_get_set_default_format(self):
69
old_default = bzrdir.format_registry.get('default')
70
private_default = old_default().repository_format.__class__
71
old_format = repository.format_registry.get_default()
72
self.assertTrue(isinstance(old_format, private_default))
73
def make_sample_bzrdir():
74
my_bzrdir = bzrdir.BzrDirMetaFormat1()
75
my_bzrdir.repository_format = SampleRepositoryFormat()
77
bzrdir.format_registry.remove('default')
78
bzrdir.format_registry.register('sample', make_sample_bzrdir, '')
79
bzrdir.format_registry.set_default('sample')
80
# creating a repository should now create an instrumented dir.
82
# the default branch format is used by the meta dir format
83
# which is not the default bzrdir format at this point
84
dir = bzrdir.BzrDirMetaFormat1().initialize('memory:///')
85
result = dir.create_repository()
86
self.assertEqual(result, 'A bzr repository dir')
88
bzrdir.format_registry.remove('default')
89
bzrdir.format_registry.remove('sample')
90
bzrdir.format_registry.register('default', old_default, '')
91
self.assertIsInstance(repository.format_registry.get_default(),
95
class SampleRepositoryFormat(repository.RepositoryFormatMetaDir):
98
this format is initializable, unsupported to aid in testing the
99
open and open(unsupported=True) routines.
103
def get_format_string(cls):
104
"""See RepositoryFormat.get_format_string()."""
105
return "Sample .bzr repository format."
107
def initialize(self, a_bzrdir, shared=False):
108
"""Initialize a repository in a BzrDir"""
109
t = a_bzrdir.get_repository_transport(self)
110
t.put_bytes('format', self.get_format_string())
111
return 'A bzr repository dir'
113
def is_supported(self):
116
def open(self, a_bzrdir, _found=False):
117
return "opened repository."
120
class SampleExtraRepositoryFormat(repository.RepositoryFormat):
121
"""A sample format that can not be used in a metadir
125
def get_format_string(self):
126
raise NotImplementedError
129
class TestRepositoryFormat(TestCaseWithTransport):
130
"""Tests for the Repository format detection used by the bzr meta dir facility.BzrBranchFormat facility."""
132
def test_find_format(self):
133
# is the right format object found for a repository?
134
# create a branch with a few known format objects.
135
# this is not quite the same as
136
self.build_tree(["foo/", "bar/"])
137
def check_format(format, url):
138
dir = format._matchingbzrdir.initialize(url)
139
format.initialize(dir)
140
t = transport.get_transport_from_path(url)
141
found_format = repository.RepositoryFormatMetaDir.find_format(dir)
142
self.assertIsInstance(found_format, format.__class__)
143
check_format(repository.format_registry.get_default(), "bar")
145
def test_find_format_no_repository(self):
146
dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
147
self.assertRaises(errors.NoRepositoryPresent,
148
repository.RepositoryFormatMetaDir.find_format,
151
def test_from_string(self):
152
self.assertIsInstance(
153
SampleRepositoryFormat.from_string(
154
"Sample .bzr repository format."),
155
SampleRepositoryFormat)
156
self.assertRaises(AssertionError,
157
SampleRepositoryFormat.from_string,
158
"Different .bzr repository format.")
160
def test_find_format_unknown_format(self):
161
dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
162
SampleRepositoryFormat().initialize(dir)
163
self.assertRaises(UnknownFormatError,
164
repository.RepositoryFormatMetaDir.find_format,
167
def test_find_format_with_features(self):
168
tree = self.make_branch_and_tree('.', format='2a')
169
tree.branch.repository.update_feature_flags({"name": "necessity"})
170
found_format = repository.RepositoryFormatMetaDir.find_format(tree.bzrdir)
171
self.assertIsInstance(found_format, repository.RepositoryFormatMetaDir)
172
self.assertEquals(found_format.features.get("name"), "necessity")
173
self.assertRaises(errors.MissingFeature, found_format.check_support_status,
175
self.addCleanup(repository.RepositoryFormatMetaDir.unregister_feature,
177
repository.RepositoryFormatMetaDir.register_feature("name")
178
found_format.check_support_status(True)
180
def test_register_unregister_format(self):
181
# Test deprecated format registration functions
182
format = SampleRepositoryFormat()
184
dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
186
format.initialize(dir)
187
# register a format for it.
188
self.applyDeprecated(symbol_versioning.deprecated_in((2, 4, 0)),
189
repository.RepositoryFormat.register_format, format)
190
# which repository.Open will refuse (not supported)
191
self.assertRaises(UnsupportedFormatError, repository.Repository.open,
193
# but open(unsupported) will work
194
self.assertEqual(format.open(dir), "opened repository.")
195
# unregister the format
196
self.applyDeprecated(symbol_versioning.deprecated_in((2, 4, 0)),
197
repository.RepositoryFormat.unregister_format, format)
200
class TestRepositoryFormatRegistry(TestCase):
203
super(TestRepositoryFormatRegistry, self).setUp()
204
self.registry = repository.RepositoryFormatRegistry()
206
def test_register_unregister_format(self):
207
format = SampleRepositoryFormat()
208
self.registry.register(format)
209
self.assertEquals(format, self.registry.get("Sample .bzr repository format."))
210
self.registry.remove(format)
211
self.assertRaises(KeyError, self.registry.get, "Sample .bzr repository format.")
213
def test_get_all(self):
214
format = SampleRepositoryFormat()
215
self.assertEquals([], self.registry._get_all())
216
self.registry.register(format)
217
self.assertEquals([format], self.registry._get_all())
219
def test_register_extra(self):
220
format = SampleExtraRepositoryFormat()
221
self.assertEquals([], self.registry._get_all())
222
self.registry.register_extra(format)
223
self.assertEquals([format], self.registry._get_all())
225
def test_register_extra_lazy(self):
226
self.assertEquals([], self.registry._get_all())
227
self.registry.register_extra_lazy("bzrlib.tests.test_repository",
228
"SampleExtraRepositoryFormat")
229
formats = self.registry._get_all()
230
self.assertEquals(1, len(formats))
231
self.assertIsInstance(formats[0], SampleExtraRepositoryFormat)
234
class TestFormatKnit1(TestCaseWithTransport):
236
def test_attribute__fetch_order(self):
237
"""Knits need topological data insertion."""
238
repo = self.make_repository('.',
239
format=bzrdir.format_registry.get('knit')())
240
self.assertEqual('topological', repo._format._fetch_order)
242
def test_attribute__fetch_uses_deltas(self):
243
"""Knits reuse deltas."""
244
repo = self.make_repository('.',
245
format=bzrdir.format_registry.get('knit')())
246
self.assertEqual(True, repo._format._fetch_uses_deltas)
248
def test_disk_layout(self):
249
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
250
repo = knitrepo.RepositoryFormatKnit1().initialize(control)
251
# in case of side effects of locking.
255
# format 'Bazaar-NG Knit Repository Format 1'
256
# lock: is a directory
257
# inventory.weave == empty_weave
258
# empty revision-store directory
259
# empty weaves directory
260
t = control.get_repository_transport(None)
261
self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
262
t.get('format').read())
263
# XXX: no locks left when unlocked at the moment
264
# self.assertEqualDiff('', t.get('lock').read())
265
self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
267
# Check per-file knits.
268
branch = control.create_branch()
269
tree = control.create_workingtree()
270
tree.add(['foo'], ['Nasty-IdC:'], ['file'])
271
tree.put_file_bytes_non_atomic('Nasty-IdC:', '')
272
tree.commit('1st post', rev_id='foo')
273
self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
274
'\nfoo fulltext 0 81 :')
276
def assertHasKnit(self, t, knit_name, extra_content=''):
277
"""Assert that knit_name exists on t."""
278
self.assertEqualDiff('# bzr knit index 8\n' + extra_content,
279
t.get(knit_name + '.kndx').read())
281
def check_knits(self, t):
282
"""check knit content for a repository."""
283
self.assertHasKnit(t, 'inventory')
284
self.assertHasKnit(t, 'revisions')
285
self.assertHasKnit(t, 'signatures')
287
def test_shared_disk_layout(self):
288
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
289
repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
291
# format 'Bazaar-NG Knit Repository Format 1'
292
# lock: is a directory
293
# inventory.weave == empty_weave
294
# empty revision-store directory
295
# empty weaves directory
296
# a 'shared-storage' marker file.
297
t = control.get_repository_transport(None)
298
self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
299
t.get('format').read())
300
# XXX: no locks left when unlocked at the moment
301
# self.assertEqualDiff('', t.get('lock').read())
302
self.assertEqualDiff('', t.get('shared-storage').read())
303
self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
306
def test_shared_no_tree_disk_layout(self):
307
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
308
repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
309
repo.set_make_working_trees(False)
311
# format 'Bazaar-NG Knit Repository Format 1'
313
# inventory.weave == empty_weave
314
# empty revision-store directory
315
# empty weaves directory
316
# a 'shared-storage' marker file.
317
t = control.get_repository_transport(None)
318
self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
319
t.get('format').read())
320
# XXX: no locks left when unlocked at the moment
321
# self.assertEqualDiff('', t.get('lock').read())
322
self.assertEqualDiff('', t.get('shared-storage').read())
323
self.assertEqualDiff('', t.get('no-working-trees').read())
324
repo.set_make_working_trees(True)
325
self.assertFalse(t.has('no-working-trees'))
326
self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
329
def test_deserialise_sets_root_revision(self):
330
"""We must have a inventory.root.revision
332
Old versions of the XML5 serializer did not set the revision_id for
333
the whole inventory. So we grab the one from the expected text. Which
334
is valid when the api is not being abused.
336
repo = self.make_repository('.',
337
format=bzrdir.format_registry.get('knit')())
338
inv_xml = '<inventory format="5">\n</inventory>\n'
339
inv = repo._deserialise_inventory('test-rev-id', inv_xml)
340
self.assertEqual('test-rev-id', inv.root.revision)
342
def test_deserialise_uses_global_revision_id(self):
343
"""If it is set, then we re-use the global revision id"""
344
repo = self.make_repository('.',
345
format=bzrdir.format_registry.get('knit')())
346
inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n'
348
# Arguably, the deserialise_inventory should detect a mismatch, and
349
# raise an error, rather than silently using one revision_id over the
351
self.assertRaises(AssertionError, repo._deserialise_inventory,
352
'test-rev-id', inv_xml)
353
inv = repo._deserialise_inventory('other-rev-id', inv_xml)
354
self.assertEqual('other-rev-id', inv.root.revision)
356
def test_supports_external_lookups(self):
357
repo = self.make_repository('.',
358
format=bzrdir.format_registry.get('knit')())
359
self.assertFalse(repo._format.supports_external_lookups)
362
class DummyRepository(object):
363
"""A dummy repository for testing."""
368
def supports_rich_root(self):
369
if self._format is not None:
370
return self._format.rich_root_data
374
raise NotImplementedError
376
def get_parent_map(self, revision_ids):
377
raise NotImplementedError
380
class InterDummy(repository.InterRepository):
381
"""An inter-repository optimised code path for DummyRepository.
383
This is for use during testing where we use DummyRepository as repositories
384
so that none of the default regsitered inter-repository classes will
389
def is_compatible(repo_source, repo_target):
390
"""InterDummy is compatible with DummyRepository."""
391
return (isinstance(repo_source, DummyRepository) and
392
isinstance(repo_target, DummyRepository))
395
class TestInterRepository(TestCaseWithTransport):
397
def test_get_default_inter_repository(self):
398
# test that the InterRepository.get(repo_a, repo_b) probes
399
# for a inter_repo class where is_compatible(repo_a, repo_b) returns
400
# true and returns a default inter_repo otherwise.
401
# This also tests that the default registered optimised interrepository
402
# classes do not barf inappropriately when a surprising repository type
404
dummy_a = DummyRepository()
405
dummy_a._format = RepositoryFormat()
406
dummy_a._format.supports_full_versioned_files = True
407
dummy_b = DummyRepository()
408
dummy_b._format = RepositoryFormat()
409
dummy_b._format.supports_full_versioned_files = True
410
self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
412
def assertGetsDefaultInterRepository(self, repo_a, repo_b):
413
"""Asserts that InterRepository.get(repo_a, repo_b) -> the default.
415
The effective default is now InterSameDataRepository because there is
416
no actual sane default in the presence of incompatible data models.
418
inter_repo = repository.InterRepository.get(repo_a, repo_b)
419
self.assertEqual(vf_repository.InterSameDataRepository,
420
inter_repo.__class__)
421
self.assertEqual(repo_a, inter_repo.source)
422
self.assertEqual(repo_b, inter_repo.target)
424
def test_register_inter_repository_class(self):
425
# test that a optimised code path provider - a
426
# InterRepository subclass can be registered and unregistered
427
# and that it is correctly selected when given a repository
428
# pair that it returns true on for the is_compatible static method
430
dummy_a = DummyRepository()
431
dummy_a._format = RepositoryFormat()
432
dummy_b = DummyRepository()
433
dummy_b._format = RepositoryFormat()
434
repo = self.make_repository('.')
435
# hack dummies to look like repo somewhat.
436
dummy_a._serializer = repo._serializer
437
dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
438
dummy_a._format.rich_root_data = repo._format.rich_root_data
439
dummy_a._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
440
dummy_b._serializer = repo._serializer
441
dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
442
dummy_b._format.rich_root_data = repo._format.rich_root_data
443
dummy_b._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
444
repository.InterRepository.register_optimiser(InterDummy)
446
# we should get the default for something InterDummy returns False
448
self.assertFalse(InterDummy.is_compatible(dummy_a, repo))
449
self.assertGetsDefaultInterRepository(dummy_a, repo)
450
# and we should get an InterDummy for a pair it 'likes'
451
self.assertTrue(InterDummy.is_compatible(dummy_a, dummy_b))
452
inter_repo = repository.InterRepository.get(dummy_a, dummy_b)
453
self.assertEqual(InterDummy, inter_repo.__class__)
454
self.assertEqual(dummy_a, inter_repo.source)
455
self.assertEqual(dummy_b, inter_repo.target)
457
repository.InterRepository.unregister_optimiser(InterDummy)
458
# now we should get the default InterRepository object again.
459
self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
462
class TestRepositoryFormat1(knitrepo.RepositoryFormatKnit1):
465
def get_format_string(cls):
466
return "Test Format 1"
469
class TestRepositoryFormat2(knitrepo.RepositoryFormatKnit1):
472
def get_format_string(cls):
473
return "Test Format 2"
476
class TestRepositoryConverter(TestCaseWithTransport):
478
def test_convert_empty(self):
479
source_format = TestRepositoryFormat1()
480
target_format = TestRepositoryFormat2()
481
repository.format_registry.register(source_format)
482
self.addCleanup(repository.format_registry.remove,
484
repository.format_registry.register(target_format)
485
self.addCleanup(repository.format_registry.remove,
487
t = self.get_transport()
488
t.mkdir('repository')
489
repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
490
repo = TestRepositoryFormat1().initialize(repo_dir)
491
converter = repository.CopyConverter(target_format)
492
pb = bzrlib.ui.ui_factory.nested_progress_bar()
494
converter.convert(repo, pb)
497
repo = repo_dir.open_repository()
498
self.assertTrue(isinstance(target_format, repo._format.__class__))
501
class TestRepositoryFormatKnit3(TestCaseWithTransport):
503
def test_attribute__fetch_order(self):
504
"""Knits need topological data insertion."""
505
format = bzrdir.BzrDirMetaFormat1()
506
format.repository_format = knitrepo.RepositoryFormatKnit3()
507
repo = self.make_repository('.', format=format)
508
self.assertEqual('topological', repo._format._fetch_order)
510
def test_attribute__fetch_uses_deltas(self):
511
"""Knits reuse deltas."""
512
format = bzrdir.BzrDirMetaFormat1()
513
format.repository_format = knitrepo.RepositoryFormatKnit3()
514
repo = self.make_repository('.', format=format)
515
self.assertEqual(True, repo._format._fetch_uses_deltas)
517
def test_convert(self):
518
"""Ensure the upgrade adds weaves for roots"""
519
format = bzrdir.BzrDirMetaFormat1()
520
format.repository_format = knitrepo.RepositoryFormatKnit1()
521
tree = self.make_branch_and_tree('.', format)
522
tree.commit("Dull commit", rev_id="dull")
523
revision_tree = tree.branch.repository.revision_tree('dull')
524
revision_tree.lock_read()
526
self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
527
revision_tree.inventory.root.file_id)
529
revision_tree.unlock()
530
format = bzrdir.BzrDirMetaFormat1()
531
format.repository_format = knitrepo.RepositoryFormatKnit3()
532
upgrade.Convert('.', format)
533
tree = workingtree.WorkingTree.open('.')
534
revision_tree = tree.branch.repository.revision_tree('dull')
535
revision_tree.lock_read()
537
revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
539
revision_tree.unlock()
540
tree.commit("Another dull commit", rev_id='dull2')
541
revision_tree = tree.branch.repository.revision_tree('dull2')
542
revision_tree.lock_read()
543
self.addCleanup(revision_tree.unlock)
544
self.assertEqual('dull', revision_tree.inventory.root.revision)
546
def test_supports_external_lookups(self):
547
format = bzrdir.BzrDirMetaFormat1()
548
format.repository_format = knitrepo.RepositoryFormatKnit3()
549
repo = self.make_repository('.', format=format)
550
self.assertFalse(repo._format.supports_external_lookups)
553
class Test2a(tests.TestCaseWithMemoryTransport):
555
def test_chk_bytes_uses_custom_btree_parser(self):
556
mt = self.make_branch_and_memory_tree('test', format='2a')
558
self.addCleanup(mt.unlock)
559
mt.add([''], ['root-id'])
561
index = mt.branch.repository.chk_bytes._index._graph_index._indices[0]
562
self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
563
# It should also work if we re-open the repo
564
repo = mt.branch.repository.bzrdir.open_repository()
566
self.addCleanup(repo.unlock)
567
index = repo.chk_bytes._index._graph_index._indices[0]
568
self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
570
def test_fetch_combines_groups(self):
571
builder = self.make_branch_builder('source', format='2a')
572
builder.start_series()
573
builder.build_snapshot('1', None, [
574
('add', ('', 'root-id', 'directory', '')),
575
('add', ('file', 'file-id', 'file', 'content\n'))])
576
builder.build_snapshot('2', ['1'], [
577
('modify', ('file-id', 'content-2\n'))])
578
builder.finish_series()
579
source = builder.get_branch()
580
target = self.make_repository('target', format='2a')
581
target.fetch(source.repository)
583
self.addCleanup(target.unlock)
584
details = target.texts._index.get_build_details(
585
[('file-id', '1',), ('file-id', '2',)])
586
file_1_details = details[('file-id', '1')]
587
file_2_details = details[('file-id', '2')]
588
# The index, and what to read off disk, should be the same for both
589
# versions of the file.
590
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
592
def test_fetch_combines_groups(self):
593
builder = self.make_branch_builder('source', format='2a')
594
builder.start_series()
595
builder.build_snapshot('1', None, [
596
('add', ('', 'root-id', 'directory', '')),
597
('add', ('file', 'file-id', 'file', 'content\n'))])
598
builder.build_snapshot('2', ['1'], [
599
('modify', ('file-id', 'content-2\n'))])
600
builder.finish_series()
601
source = builder.get_branch()
602
target = self.make_repository('target', format='2a')
603
target.fetch(source.repository)
605
self.addCleanup(target.unlock)
606
details = target.texts._index.get_build_details(
607
[('file-id', '1',), ('file-id', '2',)])
608
file_1_details = details[('file-id', '1')]
609
file_2_details = details[('file-id', '2')]
610
# The index, and what to read off disk, should be the same for both
611
# versions of the file.
612
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
614
def test_fetch_combines_groups(self):
615
builder = self.make_branch_builder('source', format='2a')
616
builder.start_series()
617
builder.build_snapshot('1', None, [
618
('add', ('', 'root-id', 'directory', '')),
619
('add', ('file', 'file-id', 'file', 'content\n'))])
620
builder.build_snapshot('2', ['1'], [
621
('modify', ('file-id', 'content-2\n'))])
622
builder.finish_series()
623
source = builder.get_branch()
624
target = self.make_repository('target', format='2a')
625
target.fetch(source.repository)
627
self.addCleanup(target.unlock)
628
details = target.texts._index.get_build_details(
629
[('file-id', '1',), ('file-id', '2',)])
630
file_1_details = details[('file-id', '1')]
631
file_2_details = details[('file-id', '2')]
632
# The index, and what to read off disk, should be the same for both
633
# versions of the file.
634
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
636
def test_format_pack_compresses_True(self):
637
repo = self.make_repository('repo', format='2a')
638
self.assertTrue(repo._format.pack_compresses)
640
def test_inventories_use_chk_map_with_parent_base_dict(self):
641
tree = self.make_branch_and_memory_tree('repo', format="2a")
643
tree.add([''], ['TREE_ROOT'])
644
revid = tree.commit("foo")
647
self.addCleanup(tree.unlock)
648
inv = tree.branch.repository.get_inventory(revid)
649
self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
650
inv.parent_id_basename_to_file_id._ensure_root()
651
inv.id_to_entry._ensure_root()
652
self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
653
self.assertEqual(65536,
654
inv.parent_id_basename_to_file_id._root_node.maximum_size)
656
def test_autopack_unchanged_chk_nodes(self):
657
# at 20 unchanged commits, chk pages are packed that are split into
658
# two groups such that the new pack being made doesn't have all its
659
# pages in the source packs (though they are in the repository).
660
# Use a memory backed repository, we don't need to hit disk for this
661
tree = self.make_branch_and_memory_tree('tree', format='2a')
663
self.addCleanup(tree.unlock)
664
tree.add([''], ['TREE_ROOT'])
665
for pos in range(20):
666
tree.commit(str(pos))
668
def test_pack_with_hint(self):
669
tree = self.make_branch_and_memory_tree('tree', format='2a')
671
self.addCleanup(tree.unlock)
672
tree.add([''], ['TREE_ROOT'])
673
# 1 commit to leave untouched
675
to_keep = tree.branch.repository._pack_collection.names()
679
all = tree.branch.repository._pack_collection.names()
680
combine = list(set(all) - set(to_keep))
681
self.assertLength(3, all)
682
self.assertLength(2, combine)
683
tree.branch.repository.pack(hint=combine)
684
final = tree.branch.repository._pack_collection.names()
685
self.assertLength(2, final)
686
self.assertFalse(combine[0] in final)
687
self.assertFalse(combine[1] in final)
688
self.assertSubset(to_keep, final)
690
def test_stream_source_to_gc(self):
691
source = self.make_repository('source', format='2a')
692
target = self.make_repository('target', format='2a')
693
stream = source._get_source(target._format)
694
self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
696
def test_stream_source_to_non_gc(self):
697
source = self.make_repository('source', format='2a')
698
target = self.make_repository('target', format='rich-root-pack')
699
stream = source._get_source(target._format)
700
# We don't want the child GroupCHKStreamSource
701
self.assertIs(type(stream), vf_repository.StreamSource)
703
def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
704
source_builder = self.make_branch_builder('source',
706
# We have to build a fairly large tree, so that we are sure the chk
707
# pages will have split into multiple pages.
708
entries = [('add', ('', 'a-root-id', 'directory', None))]
709
for i in 'abcdefghijklmnopqrstuvwxyz123456789':
710
for j in 'abcdefghijklmnopqrstuvwxyz123456789':
713
content = 'content for %s\n' % (fname,)
714
entries.append(('add', (fname, fid, 'file', content)))
715
source_builder.start_series()
716
source_builder.build_snapshot('rev-1', None, entries)
717
# Now change a few of them, so we get a few new pages for the second
719
source_builder.build_snapshot('rev-2', ['rev-1'], [
720
('modify', ('aa-id', 'new content for aa-id\n')),
721
('modify', ('cc-id', 'new content for cc-id\n')),
722
('modify', ('zz-id', 'new content for zz-id\n')),
724
source_builder.finish_series()
725
source_branch = source_builder.get_branch()
726
source_branch.lock_read()
727
self.addCleanup(source_branch.unlock)
728
target = self.make_repository('target', format='2a')
729
source = source_branch.repository._get_source(target._format)
730
self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
732
# On a regular pass, getting the inventories and chk pages for rev-2
733
# would only get the newly created chk pages
734
search = vf_search.SearchResult(set(['rev-2']), set(['rev-1']), 1,
736
simple_chk_records = []
737
for vf_name, substream in source.get_stream(search):
738
if vf_name == 'chk_bytes':
739
for record in substream:
740
simple_chk_records.append(record.key)
744
# 3 pages, the root (InternalNode), + 2 pages which actually changed
745
self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
746
('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
747
('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
748
('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
750
# Now, when we do a similar call using 'get_stream_for_missing_keys'
751
# we should get a much larger set of pages.
752
missing = [('inventories', 'rev-2')]
753
full_chk_records = []
754
for vf_name, substream in source.get_stream_for_missing_keys(missing):
755
if vf_name == 'inventories':
756
for record in substream:
757
self.assertEqual(('rev-2',), record.key)
758
elif vf_name == 'chk_bytes':
759
for record in substream:
760
full_chk_records.append(record.key)
762
self.fail('Should not be getting a stream of %s' % (vf_name,))
763
# We have 257 records now. This is because we have 1 root page, and 256
764
# leaf pages in a complete listing.
765
self.assertEqual(257, len(full_chk_records))
766
self.assertSubset(simple_chk_records, full_chk_records)
768
def test_inconsistency_fatal(self):
769
repo = self.make_repository('repo', format='2a')
770
self.assertTrue(repo.revisions._index._inconsistency_fatal)
771
self.assertFalse(repo.texts._index._inconsistency_fatal)
772
self.assertFalse(repo.inventories._index._inconsistency_fatal)
773
self.assertFalse(repo.signatures._index._inconsistency_fatal)
774
self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
777
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
779
def test_source_to_exact_pack_092(self):
780
source = self.make_repository('source', format='pack-0.92')
781
target = self.make_repository('target', format='pack-0.92')
782
stream_source = source._get_source(target._format)
783
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
785
def test_source_to_exact_pack_rich_root_pack(self):
786
source = self.make_repository('source', format='rich-root-pack')
787
target = self.make_repository('target', format='rich-root-pack')
788
stream_source = source._get_source(target._format)
789
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
791
def test_source_to_exact_pack_19(self):
792
source = self.make_repository('source', format='1.9')
793
target = self.make_repository('target', format='1.9')
794
stream_source = source._get_source(target._format)
795
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
797
def test_source_to_exact_pack_19_rich_root(self):
798
source = self.make_repository('source', format='1.9-rich-root')
799
target = self.make_repository('target', format='1.9-rich-root')
800
stream_source = source._get_source(target._format)
801
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
803
def test_source_to_remote_exact_pack_19(self):
804
trans = self.make_smart_server('target')
806
source = self.make_repository('source', format='1.9')
807
target = self.make_repository('target', format='1.9')
808
target = repository.Repository.open(trans.base)
809
stream_source = source._get_source(target._format)
810
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
812
def test_stream_source_to_non_exact(self):
813
source = self.make_repository('source', format='pack-0.92')
814
target = self.make_repository('target', format='1.9')
815
stream = source._get_source(target._format)
816
self.assertIs(type(stream), vf_repository.StreamSource)
818
def test_stream_source_to_non_exact_rich_root(self):
819
source = self.make_repository('source', format='1.9')
820
target = self.make_repository('target', format='1.9-rich-root')
821
stream = source._get_source(target._format)
822
self.assertIs(type(stream), vf_repository.StreamSource)
824
def test_source_to_remote_non_exact_pack_19(self):
825
trans = self.make_smart_server('target')
827
source = self.make_repository('source', format='1.9')
828
target = self.make_repository('target', format='1.6')
829
target = repository.Repository.open(trans.base)
830
stream_source = source._get_source(target._format)
831
self.assertIs(type(stream_source), vf_repository.StreamSource)
833
def test_stream_source_to_knit(self):
834
source = self.make_repository('source', format='pack-0.92')
835
target = self.make_repository('target', format='dirstate')
836
stream = source._get_source(target._format)
837
self.assertIs(type(stream), vf_repository.StreamSource)
840
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
841
"""Tests for _find_parent_ids_of_revisions."""
844
super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
845
self.builder = self.make_branch_builder('source')
846
self.builder.start_series()
847
self.builder.build_snapshot('initial', None,
848
[('add', ('', 'tree-root', 'directory', None))])
849
self.repo = self.builder.get_branch().repository
850
self.addCleanup(self.builder.finish_series)
852
def assertParentIds(self, expected_result, rev_set):
853
self.assertEqual(sorted(expected_result),
854
sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
856
def test_simple(self):
857
self.builder.build_snapshot('revid1', None, [])
858
self.builder.build_snapshot('revid2', ['revid1'], [])
860
self.assertParentIds(['revid1'], rev_set)
862
def test_not_first_parent(self):
863
self.builder.build_snapshot('revid1', None, [])
864
self.builder.build_snapshot('revid2', ['revid1'], [])
865
self.builder.build_snapshot('revid3', ['revid2'], [])
866
rev_set = ['revid3', 'revid2']
867
self.assertParentIds(['revid1'], rev_set)
869
def test_not_null(self):
870
rev_set = ['initial']
871
self.assertParentIds([], rev_set)
873
def test_not_null_set(self):
874
self.builder.build_snapshot('revid1', None, [])
875
rev_set = [_mod_revision.NULL_REVISION]
876
self.assertParentIds([], rev_set)
878
def test_ghost(self):
879
self.builder.build_snapshot('revid1', None, [])
880
rev_set = ['ghost', 'revid1']
881
self.assertParentIds(['initial'], rev_set)
883
def test_ghost_parent(self):
884
self.builder.build_snapshot('revid1', None, [])
885
self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
886
rev_set = ['revid2', 'revid1']
887
self.assertParentIds(['ghost', 'initial'], rev_set)
889
def test_righthand_parent(self):
890
self.builder.build_snapshot('revid1', None, [])
891
self.builder.build_snapshot('revid2a', ['revid1'], [])
892
self.builder.build_snapshot('revid2b', ['revid1'], [])
893
self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
894
rev_set = ['revid3', 'revid2a']
895
self.assertParentIds(['revid1', 'revid2b'], rev_set)
898
class TestWithBrokenRepo(TestCaseWithTransport):
899
"""These tests seem to be more appropriate as interface tests?"""
901
def make_broken_repository(self):
902
# XXX: This function is borrowed from Aaron's "Reconcile can fix bad
903
# parent references" branch which is due to land in bzr.dev soon. Once
904
# it does, this duplication should be removed.
905
repo = self.make_repository('broken-repo')
909
cleanups.append(repo.unlock)
910
repo.start_write_group()
911
cleanups.append(repo.commit_write_group)
912
# make rev1a: A well-formed revision, containing 'file1'
913
inv = inventory.Inventory(revision_id='rev1a')
914
inv.root.revision = 'rev1a'
915
self.add_file(repo, inv, 'file1', 'rev1a', [])
916
repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
917
repo.add_inventory('rev1a', inv, [])
918
revision = _mod_revision.Revision('rev1a',
919
committer='jrandom@example.com', timestamp=0,
920
inventory_sha1='', timezone=0, message='foo', parent_ids=[])
921
repo.add_revision('rev1a', revision, inv)
923
# make rev1b, which has no Revision, but has an Inventory, and
925
inv = inventory.Inventory(revision_id='rev1b')
926
inv.root.revision = 'rev1b'
927
self.add_file(repo, inv, 'file1', 'rev1b', [])
928
repo.add_inventory('rev1b', inv, [])
930
# make rev2, with file1 and file2
932
# file1 has 'rev1b' as an ancestor, even though this is not
933
# mentioned by 'rev1a', making it an unreferenced ancestor
934
inv = inventory.Inventory()
935
self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b'])
936
self.add_file(repo, inv, 'file2', 'rev2', [])
937
self.add_revision(repo, 'rev2', inv, ['rev1a'])
939
# make ghost revision rev1c
940
inv = inventory.Inventory()
941
self.add_file(repo, inv, 'file2', 'rev1c', [])
943
# make rev3 with file2
944
# file2 refers to 'rev1c', which is a ghost in this repository, so
945
# file2 cannot have rev1c as its ancestor.
946
inv = inventory.Inventory()
947
self.add_file(repo, inv, 'file2', 'rev3', ['rev1c'])
948
self.add_revision(repo, 'rev3', inv, ['rev1c'])
951
for cleanup in reversed(cleanups):
954
def add_revision(self, repo, revision_id, inv, parent_ids):
955
inv.revision_id = revision_id
956
inv.root.revision = revision_id
957
repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
958
repo.add_inventory(revision_id, inv, parent_ids)
959
revision = _mod_revision.Revision(revision_id,
960
committer='jrandom@example.com', timestamp=0, inventory_sha1='',
961
timezone=0, message='foo', parent_ids=parent_ids)
962
repo.add_revision(revision_id, revision, inv)
964
def add_file(self, repo, inv, filename, revision, parents):
965
file_id = filename + '-id'
966
entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT')
967
entry.revision = revision
970
text_key = (file_id, revision)
971
parent_keys = [(file_id, parent) for parent in parents]
972
repo.texts.add_lines(text_key, parent_keys, ['line\n'])
974
def test_insert_from_broken_repo(self):
975
"""Inserting a data stream from a broken repository won't silently
976
corrupt the target repository.
978
broken_repo = self.make_broken_repository()
979
empty_repo = self.make_repository('empty-repo')
981
empty_repo.fetch(broken_repo)
982
except (errors.RevisionNotPresent, errors.BzrCheckError):
983
# Test successful: compression parent not being copied leads to
986
empty_repo.lock_read()
987
self.addCleanup(empty_repo.unlock)
988
text = empty_repo.texts.get_record_stream(
989
[('file2-id', 'rev3')], 'topological', True).next()
990
self.assertEqual('line\n', text.get_bytes_as('fulltext'))
993
class TestRepositoryPackCollection(TestCaseWithTransport):
995
def get_format(self):
996
return bzrdir.format_registry.make_bzrdir('pack-0.92')
999
format = self.get_format()
1000
repo = self.make_repository('.', format=format)
1001
return repo._pack_collection
1003
def make_packs_and_alt_repo(self, write_lock=False):
1004
"""Create a pack repo with 3 packs, and access it via a second repo."""
1005
tree = self.make_branch_and_tree('.', format=self.get_format())
1007
self.addCleanup(tree.unlock)
1008
rev1 = tree.commit('one')
1009
rev2 = tree.commit('two')
1010
rev3 = tree.commit('three')
1011
r = repository.Repository.open('.')
1016
self.addCleanup(r.unlock)
1017
packs = r._pack_collection
1018
packs.ensure_loaded()
1019
return tree, r, packs, [rev1, rev2, rev3]
1021
def test__clear_obsolete_packs(self):
1022
packs = self.get_packs()
1023
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1024
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1025
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1026
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1027
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1028
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1029
res = packs._clear_obsolete_packs()
1030
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1031
self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1033
def test__clear_obsolete_packs_preserve(self):
1034
packs = self.get_packs()
1035
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1036
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1037
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1038
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1039
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1040
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1041
res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
1042
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1043
self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1044
sorted(obsolete_pack_trans.list_dir('.')))
1046
def test__max_pack_count(self):
1047
"""The maximum pack count is a function of the number of revisions."""
1048
# no revisions - one pack, so that we can have a revision free repo
1049
# without it blowing up
1050
packs = self.get_packs()
1051
self.assertEqual(1, packs._max_pack_count(0))
1052
# after that the sum of the digits, - check the first 1-9
1053
self.assertEqual(1, packs._max_pack_count(1))
1054
self.assertEqual(2, packs._max_pack_count(2))
1055
self.assertEqual(3, packs._max_pack_count(3))
1056
self.assertEqual(4, packs._max_pack_count(4))
1057
self.assertEqual(5, packs._max_pack_count(5))
1058
self.assertEqual(6, packs._max_pack_count(6))
1059
self.assertEqual(7, packs._max_pack_count(7))
1060
self.assertEqual(8, packs._max_pack_count(8))
1061
self.assertEqual(9, packs._max_pack_count(9))
1062
# check the boundary cases with two digits for the next decade
1063
self.assertEqual(1, packs._max_pack_count(10))
1064
self.assertEqual(2, packs._max_pack_count(11))
1065
self.assertEqual(10, packs._max_pack_count(19))
1066
self.assertEqual(2, packs._max_pack_count(20))
1067
self.assertEqual(3, packs._max_pack_count(21))
1068
# check some arbitrary big numbers
1069
self.assertEqual(25, packs._max_pack_count(112894))
1071
def test_repr(self):
1072
packs = self.get_packs()
1073
self.assertContainsRe(repr(packs),
1074
'RepositoryPackCollection(.*Repository(.*))')
1076
def test__obsolete_packs(self):
1077
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1078
names = packs.names()
1079
pack = packs.get_pack_by_name(names[0])
1080
# Schedule this one for removal
1081
packs._remove_pack_from_memory(pack)
1082
# Simulate a concurrent update by renaming the .pack file and one of
1084
packs.transport.rename('packs/%s.pack' % (names[0],),
1085
'obsolete_packs/%s.pack' % (names[0],))
1086
packs.transport.rename('indices/%s.iix' % (names[0],),
1087
'obsolete_packs/%s.iix' % (names[0],))
1088
# Now trigger the obsoletion, and ensure that all the remaining files
1090
packs._obsolete_packs([pack])
1091
self.assertEqual([n + '.pack' for n in names[1:]],
1092
sorted(packs._pack_transport.list_dir('.')))
1093
# names[0] should not be present in the index anymore
1094
self.assertEqual(names[1:],
1095
sorted(set([osutils.splitext(n)[0] for n in
1096
packs._index_transport.list_dir('.')])))
1098
def test__obsolete_packs_missing_directory(self):
1099
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1100
r.control_transport.rmdir('obsolete_packs')
1101
names = packs.names()
1102
pack = packs.get_pack_by_name(names[0])
1103
# Schedule this one for removal
1104
packs._remove_pack_from_memory(pack)
1105
# Now trigger the obsoletion, and ensure that all the remaining files
1107
packs._obsolete_packs([pack])
1108
self.assertEqual([n + '.pack' for n in names[1:]],
1109
sorted(packs._pack_transport.list_dir('.')))
1110
# names[0] should not be present in the index anymore
1111
self.assertEqual(names[1:],
1112
sorted(set([osutils.splitext(n)[0] for n in
1113
packs._index_transport.list_dir('.')])))
1115
def test_pack_distribution_zero(self):
1116
packs = self.get_packs()
1117
self.assertEqual([0], packs.pack_distribution(0))
1119
def test_ensure_loaded_unlocked(self):
1120
packs = self.get_packs()
1121
self.assertRaises(errors.ObjectNotLocked,
1122
packs.ensure_loaded)
1124
def test_pack_distribution_one_to_nine(self):
1125
packs = self.get_packs()
1126
self.assertEqual([1],
1127
packs.pack_distribution(1))
1128
self.assertEqual([1, 1],
1129
packs.pack_distribution(2))
1130
self.assertEqual([1, 1, 1],
1131
packs.pack_distribution(3))
1132
self.assertEqual([1, 1, 1, 1],
1133
packs.pack_distribution(4))
1134
self.assertEqual([1, 1, 1, 1, 1],
1135
packs.pack_distribution(5))
1136
self.assertEqual([1, 1, 1, 1, 1, 1],
1137
packs.pack_distribution(6))
1138
self.assertEqual([1, 1, 1, 1, 1, 1, 1],
1139
packs.pack_distribution(7))
1140
self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1],
1141
packs.pack_distribution(8))
1142
self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1],
1143
packs.pack_distribution(9))
1145
def test_pack_distribution_stable_at_boundaries(self):
1146
"""When there are multi-rev packs the counts are stable."""
1147
packs = self.get_packs()
1149
self.assertEqual([10], packs.pack_distribution(10))
1150
self.assertEqual([10, 1], packs.pack_distribution(11))
1151
self.assertEqual([10, 10], packs.pack_distribution(20))
1152
self.assertEqual([10, 10, 1], packs.pack_distribution(21))
1154
self.assertEqual([100], packs.pack_distribution(100))
1155
self.assertEqual([100, 1], packs.pack_distribution(101))
1156
self.assertEqual([100, 10, 1], packs.pack_distribution(111))
1157
self.assertEqual([100, 100], packs.pack_distribution(200))
1158
self.assertEqual([100, 100, 1], packs.pack_distribution(201))
1159
self.assertEqual([100, 100, 10, 1], packs.pack_distribution(211))
1161
def test_plan_pack_operations_2009_revisions_skip_all_packs(self):
1162
packs = self.get_packs()
1163
existing_packs = [(2000, "big"), (9, "medium")]
1164
# rev count - 2009 -> 2x1000 + 9x1
1165
pack_operations = packs.plan_autopack_combinations(
1166
existing_packs, [1000, 1000, 1, 1, 1, 1, 1, 1, 1, 1, 1])
1167
self.assertEqual([], pack_operations)
1169
def test_plan_pack_operations_2010_revisions_skip_all_packs(self):
1170
packs = self.get_packs()
1171
existing_packs = [(2000, "big"), (9, "medium"), (1, "single")]
1172
# rev count - 2010 -> 2x1000 + 1x10
1173
pack_operations = packs.plan_autopack_combinations(
1174
existing_packs, [1000, 1000, 10])
1175
self.assertEqual([], pack_operations)
1177
def test_plan_pack_operations_2010_combines_smallest_two(self):
1178
packs = self.get_packs()
1179
existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"),
1181
# rev count - 2010 -> 2x1000 + 1x10 (3)
1182
pack_operations = packs.plan_autopack_combinations(
1183
existing_packs, [1000, 1000, 10])
1184
self.assertEqual([[2, ["single2", "single1"]]], pack_operations)
1186
def test_plan_pack_operations_creates_a_single_op(self):
1187
packs = self.get_packs()
1188
existing_packs = [(50, 'a'), (40, 'b'), (30, 'c'), (10, 'd'),
1189
(10, 'e'), (6, 'f'), (4, 'g')]
1190
# rev count 150 -> 1x100 and 5x10
1191
# The two size 10 packs do not need to be touched. The 50, 40, 30 would
1192
# be combined into a single 120 size pack, and the 6 & 4 would
1193
# becombined into a size 10 pack. However, if we have to rewrite them,
1194
# we save a pack file with no increased I/O by putting them into the
1196
distribution = packs.pack_distribution(150)
1197
pack_operations = packs.plan_autopack_combinations(existing_packs,
1199
self.assertEqual([[130, ['a', 'b', 'c', 'f', 'g']]], pack_operations)
1201
def test_all_packs_none(self):
1202
format = self.get_format()
1203
tree = self.make_branch_and_tree('.', format=format)
1205
self.addCleanup(tree.unlock)
1206
packs = tree.branch.repository._pack_collection
1207
packs.ensure_loaded()
1208
self.assertEqual([], packs.all_packs())
1210
def test_all_packs_one(self):
1211
format = self.get_format()
1212
tree = self.make_branch_and_tree('.', format=format)
1213
tree.commit('start')
1215
self.addCleanup(tree.unlock)
1216
packs = tree.branch.repository._pack_collection
1217
packs.ensure_loaded()
1219
packs.get_pack_by_name(packs.names()[0])],
1222
def test_all_packs_two(self):
1223
format = self.get_format()
1224
tree = self.make_branch_and_tree('.', format=format)
1225
tree.commit('start')
1226
tree.commit('continue')
1228
self.addCleanup(tree.unlock)
1229
packs = tree.branch.repository._pack_collection
1230
packs.ensure_loaded()
1232
packs.get_pack_by_name(packs.names()[0]),
1233
packs.get_pack_by_name(packs.names()[1]),
1234
], packs.all_packs())
1236
def test_get_pack_by_name(self):
1237
format = self.get_format()
1238
tree = self.make_branch_and_tree('.', format=format)
1239
tree.commit('start')
1241
self.addCleanup(tree.unlock)
1242
packs = tree.branch.repository._pack_collection
1244
packs.ensure_loaded()
1245
name = packs.names()[0]
1246
pack_1 = packs.get_pack_by_name(name)
1247
# the pack should be correctly initialised
1248
sizes = packs._names[name]
1249
rev_index = GraphIndex(packs._index_transport, name + '.rix', sizes[0])
1250
inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1])
1251
txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2])
1252
sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3])
1253
self.assertEqual(pack_repo.ExistingPack(packs._pack_transport,
1254
name, rev_index, inv_index, txt_index, sig_index), pack_1)
1255
# and the same instance should be returned on successive calls.
1256
self.assertTrue(pack_1 is packs.get_pack_by_name(name))
1258
def test_reload_pack_names_new_entry(self):
1259
tree, r, packs, revs = self.make_packs_and_alt_repo()
1260
names = packs.names()
1261
# Add a new pack file into the repository
1262
rev4 = tree.commit('four')
1263
new_names = tree.branch.repository._pack_collection.names()
1264
new_name = set(new_names).difference(names)
1265
self.assertEqual(1, len(new_name))
1266
new_name = new_name.pop()
1267
# The old collection hasn't noticed yet
1268
self.assertEqual(names, packs.names())
1269
self.assertTrue(packs.reload_pack_names())
1270
self.assertEqual(new_names, packs.names())
1271
# And the repository can access the new revision
1272
self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
1273
self.assertFalse(packs.reload_pack_names())
1275
def test_reload_pack_names_added_and_removed(self):
1276
tree, r, packs, revs = self.make_packs_and_alt_repo()
1277
names = packs.names()
1278
# Now repack the whole thing
1279
tree.branch.repository.pack()
1280
new_names = tree.branch.repository._pack_collection.names()
1281
# The other collection hasn't noticed yet
1282
self.assertEqual(names, packs.names())
1283
self.assertTrue(packs.reload_pack_names())
1284
self.assertEqual(new_names, packs.names())
1285
self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1286
self.assertFalse(packs.reload_pack_names())
1288
def test_reload_pack_names_preserves_pending(self):
1289
# TODO: Update this to also test for pending-deleted names
1290
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1291
# We will add one pack (via start_write_group + insert_record_stream),
1292
# and remove another pack (via _remove_pack_from_memory)
1293
orig_names = packs.names()
1294
orig_at_load = packs._packs_at_load
1295
to_remove_name = iter(orig_names).next()
1296
r.start_write_group()
1297
self.addCleanup(r.abort_write_group)
1298
r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1299
('text', 'rev'), (), None, 'content\n')])
1300
new_pack = packs._new_pack
1301
self.assertTrue(new_pack.data_inserted())
1303
packs.allocate(new_pack)
1304
packs._new_pack = None
1305
removed_pack = packs.get_pack_by_name(to_remove_name)
1306
packs._remove_pack_from_memory(removed_pack)
1307
names = packs.names()
1308
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1309
new_names = set([x[0][0] for x in new_nodes])
1310
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1311
self.assertEqual(set(names) - set(orig_names), new_names)
1312
self.assertEqual(set([new_pack.name]), new_names)
1313
self.assertEqual([to_remove_name],
1314
sorted([x[0][0] for x in deleted_nodes]))
1315
packs.reload_pack_names()
1316
reloaded_names = packs.names()
1317
self.assertEqual(orig_at_load, packs._packs_at_load)
1318
self.assertEqual(names, reloaded_names)
1319
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1320
new_names = set([x[0][0] for x in new_nodes])
1321
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1322
self.assertEqual(set(names) - set(orig_names), new_names)
1323
self.assertEqual(set([new_pack.name]), new_names)
1324
self.assertEqual([to_remove_name],
1325
sorted([x[0][0] for x in deleted_nodes]))
1327
def test_autopack_obsoletes_new_pack(self):
1328
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1329
packs._max_pack_count = lambda x: 1
1330
packs.pack_distribution = lambda x: [10]
1331
r.start_write_group()
1332
r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1333
('bogus-rev',), (), None, 'bogus-content\n')])
1334
# This should trigger an autopack, which will combine everything into a
1336
new_names = r.commit_write_group()
1337
names = packs.names()
1338
self.assertEqual(1, len(names))
1339
self.assertEqual([names[0] + '.pack'],
1340
packs._pack_transport.list_dir('.'))
1342
def test_autopack_reloads_and_stops(self):
1343
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1344
# After we have determined what needs to be autopacked, trigger a
1345
# full-pack via the other repo which will cause us to re-evaluate and
1346
# decide we don't need to do anything
1347
orig_execute = packs._execute_pack_operations
1348
def _munged_execute_pack_ops(*args, **kwargs):
1349
tree.branch.repository.pack()
1350
return orig_execute(*args, **kwargs)
1351
packs._execute_pack_operations = _munged_execute_pack_ops
1352
packs._max_pack_count = lambda x: 1
1353
packs.pack_distribution = lambda x: [10]
1354
self.assertFalse(packs.autopack())
1355
self.assertEqual(1, len(packs.names()))
1356
self.assertEqual(tree.branch.repository._pack_collection.names(),
1359
def test__save_pack_names(self):
1360
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1361
names = packs.names()
1362
pack = packs.get_pack_by_name(names[0])
1363
packs._remove_pack_from_memory(pack)
1364
packs._save_pack_names(obsolete_packs=[pack])
1365
cur_packs = packs._pack_transport.list_dir('.')
1366
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1367
# obsolete_packs will also have stuff like .rix and .iix present.
1368
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1369
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1370
self.assertEqual([pack.name], sorted(obsolete_names))
1372
def test__save_pack_names_already_obsoleted(self):
1373
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1374
names = packs.names()
1375
pack = packs.get_pack_by_name(names[0])
1376
packs._remove_pack_from_memory(pack)
1377
# We are going to simulate a concurrent autopack by manually obsoleting
1378
# the pack directly.
1379
packs._obsolete_packs([pack])
1380
packs._save_pack_names(clear_obsolete_packs=True,
1381
obsolete_packs=[pack])
1382
cur_packs = packs._pack_transport.list_dir('.')
1383
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1384
# Note that while we set clear_obsolete_packs=True, it should not
1385
# delete a pack file that we have also scheduled for obsoletion.
1386
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1387
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1388
self.assertEqual([pack.name], sorted(obsolete_names))
1390
def test_pack_no_obsolete_packs_directory(self):
1391
"""Bug #314314, don't fail if obsolete_packs directory does
1393
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1394
r.control_transport.rmdir('obsolete_packs')
1395
packs._clear_obsolete_packs()
1398
class TestPack(TestCaseWithTransport):
1399
"""Tests for the Pack object."""
1401
def assertCurrentlyEqual(self, left, right):
1402
self.assertTrue(left == right)
1403
self.assertTrue(right == left)
1404
self.assertFalse(left != right)
1405
self.assertFalse(right != left)
1407
def assertCurrentlyNotEqual(self, left, right):
1408
self.assertFalse(left == right)
1409
self.assertFalse(right == left)
1410
self.assertTrue(left != right)
1411
self.assertTrue(right != left)
1413
def test___eq____ne__(self):
1414
left = pack_repo.ExistingPack('', '', '', '', '', '')
1415
right = pack_repo.ExistingPack('', '', '', '', '', '')
1416
self.assertCurrentlyEqual(left, right)
1417
# change all attributes and ensure equality changes as we do.
1418
left.revision_index = 'a'
1419
self.assertCurrentlyNotEqual(left, right)
1420
right.revision_index = 'a'
1421
self.assertCurrentlyEqual(left, right)
1422
left.inventory_index = 'a'
1423
self.assertCurrentlyNotEqual(left, right)
1424
right.inventory_index = 'a'
1425
self.assertCurrentlyEqual(left, right)
1426
left.text_index = 'a'
1427
self.assertCurrentlyNotEqual(left, right)
1428
right.text_index = 'a'
1429
self.assertCurrentlyEqual(left, right)
1430
left.signature_index = 'a'
1431
self.assertCurrentlyNotEqual(left, right)
1432
right.signature_index = 'a'
1433
self.assertCurrentlyEqual(left, right)
1435
self.assertCurrentlyNotEqual(left, right)
1437
self.assertCurrentlyEqual(left, right)
1438
left.transport = 'a'
1439
self.assertCurrentlyNotEqual(left, right)
1440
right.transport = 'a'
1441
self.assertCurrentlyEqual(left, right)
1443
def test_file_name(self):
1444
pack = pack_repo.ExistingPack('', 'a_name', '', '', '', '')
1445
self.assertEqual('a_name.pack', pack.file_name())
1448
class TestNewPack(TestCaseWithTransport):
1449
"""Tests for pack_repo.NewPack."""
1451
def test_new_instance_attributes(self):
1452
upload_transport = self.get_transport('upload')
1453
pack_transport = self.get_transport('pack')
1454
index_transport = self.get_transport('index')
1455
upload_transport.mkdir('.')
1456
collection = pack_repo.RepositoryPackCollection(
1458
transport=self.get_transport('.'),
1459
index_transport=index_transport,
1460
upload_transport=upload_transport,
1461
pack_transport=pack_transport,
1462
index_builder_class=BTreeBuilder,
1463
index_class=BTreeGraphIndex,
1464
use_chk_index=False)
1465
pack = pack_repo.NewPack(collection)
1466
self.addCleanup(pack.abort) # Make sure the write stream gets closed
1467
self.assertIsInstance(pack.revision_index, BTreeBuilder)
1468
self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1469
self.assertIsInstance(pack._hash, type(osutils.md5()))
1470
self.assertTrue(pack.upload_transport is upload_transport)
1471
self.assertTrue(pack.index_transport is index_transport)
1472
self.assertTrue(pack.pack_transport is pack_transport)
1473
self.assertEqual(None, pack.index_sizes)
1474
self.assertEqual(20, len(pack.random_name))
1475
self.assertIsInstance(pack.random_name, str)
1476
self.assertIsInstance(pack.start_time, float)
1479
class TestPacker(TestCaseWithTransport):
1480
"""Tests for the packs repository Packer class."""
1482
def test_pack_optimizes_pack_order(self):
1483
builder = self.make_branch_builder('.', format="1.9")
1484
builder.start_series()
1485
builder.build_snapshot('A', None, [
1486
('add', ('', 'root-id', 'directory', None)),
1487
('add', ('f', 'f-id', 'file', 'content\n'))])
1488
builder.build_snapshot('B', ['A'],
1489
[('modify', ('f-id', 'new-content\n'))])
1490
builder.build_snapshot('C', ['B'],
1491
[('modify', ('f-id', 'third-content\n'))])
1492
builder.build_snapshot('D', ['C'],
1493
[('modify', ('f-id', 'fourth-content\n'))])
1494
b = builder.get_branch()
1496
builder.finish_series()
1497
self.addCleanup(b.unlock)
1498
# At this point, we should have 4 pack files available
1499
# Because of how they were built, they correspond to
1500
# ['D', 'C', 'B', 'A']
1501
packs = b.repository._pack_collection.packs
1502
packer = knitpack_repo.KnitPacker(b.repository._pack_collection,
1504
revision_ids=['B', 'C'])
1505
# Now, when we are copying the B & C revisions, their pack files should
1506
# be moved to the front of the stack
1507
# The new ordering moves B & C to the front of the .packs attribute,
1508
# and leaves the others in the original order.
1509
new_packs = [packs[1], packs[2], packs[0], packs[3]]
1510
new_pack = packer.pack()
1511
self.assertEqual(new_packs, packer.packs)
1514
class TestOptimisingPacker(TestCaseWithTransport):
1515
"""Tests for the OptimisingPacker class."""
1517
def get_pack_collection(self):
1518
repo = self.make_repository('.')
1519
return repo._pack_collection
1521
def test_open_pack_will_optimise(self):
1522
packer = knitpack_repo.OptimisingKnitPacker(self.get_pack_collection(),
1524
new_pack = packer.open_pack()
1525
self.addCleanup(new_pack.abort) # ensure cleanup
1526
self.assertIsInstance(new_pack, pack_repo.NewPack)
1527
self.assertTrue(new_pack.revision_index._optimize_for_size)
1528
self.assertTrue(new_pack.inventory_index._optimize_for_size)
1529
self.assertTrue(new_pack.text_index._optimize_for_size)
1530
self.assertTrue(new_pack.signature_index._optimize_for_size)
1533
class TestGCCHKPacker(TestCaseWithTransport):
1535
def make_abc_branch(self):
1536
builder = self.make_branch_builder('source')
1537
builder.start_series()
1538
builder.build_snapshot('A', None, [
1539
('add', ('', 'root-id', 'directory', None)),
1540
('add', ('file', 'file-id', 'file', 'content\n')),
1542
builder.build_snapshot('B', ['A'], [
1543
('add', ('dir', 'dir-id', 'directory', None))])
1544
builder.build_snapshot('C', ['B'], [
1545
('modify', ('file-id', 'new content\n'))])
1546
builder.finish_series()
1547
return builder.get_branch()
1549
def make_branch_with_disjoint_inventory_and_revision(self):
1550
"""a repo with separate packs for a revisions Revision and Inventory.
1552
There will be one pack file that holds the Revision content, and one
1553
for the Inventory content.
1555
:return: (repository,
1556
pack_name_with_rev_A_Revision,
1557
pack_name_with_rev_A_Inventory,
1558
pack_name_with_rev_C_content)
1560
b_source = self.make_abc_branch()
1561
b_base = b_source.bzrdir.sprout('base', revision_id='A').open_branch()
1562
b_stacked = b_base.bzrdir.sprout('stacked', stacked=True).open_branch()
1563
b_stacked.lock_write()
1564
self.addCleanup(b_stacked.unlock)
1565
b_stacked.fetch(b_source, 'B')
1566
# Now re-open the stacked repo directly (no fallbacks) so that we can
1567
# fill in the A rev.
1568
repo_not_stacked = b_stacked.bzrdir.open_repository()
1569
repo_not_stacked.lock_write()
1570
self.addCleanup(repo_not_stacked.unlock)
1571
# Now we should have a pack file with A's inventory, but not its
1573
self.assertEqual([('A',), ('B',)],
1574
sorted(repo_not_stacked.inventories.keys()))
1575
self.assertEqual([('B',)],
1576
sorted(repo_not_stacked.revisions.keys()))
1577
stacked_pack_names = repo_not_stacked._pack_collection.names()
1578
# We have a couple names here, figure out which has A's inventory
1579
for name in stacked_pack_names:
1580
pack = repo_not_stacked._pack_collection.get_pack_by_name(name)
1581
keys = [n[1] for n in pack.inventory_index.iter_all_entries()]
1583
inv_a_pack_name = name
1586
self.fail('Could not find pack containing A\'s inventory')
1587
repo_not_stacked.fetch(b_source.repository, 'A')
1588
self.assertEqual([('A',), ('B',)],
1589
sorted(repo_not_stacked.revisions.keys()))
1590
new_pack_names = set(repo_not_stacked._pack_collection.names())
1591
rev_a_pack_names = new_pack_names.difference(stacked_pack_names)
1592
self.assertEqual(1, len(rev_a_pack_names))
1593
rev_a_pack_name = list(rev_a_pack_names)[0]
1594
# Now fetch 'C', so we have a couple pack files to join
1595
repo_not_stacked.fetch(b_source.repository, 'C')
1596
rev_c_pack_names = set(repo_not_stacked._pack_collection.names())
1597
rev_c_pack_names = rev_c_pack_names.difference(new_pack_names)
1598
self.assertEqual(1, len(rev_c_pack_names))
1599
rev_c_pack_name = list(rev_c_pack_names)[0]
1600
return (repo_not_stacked, rev_a_pack_name, inv_a_pack_name,
1603
def test_pack_with_distant_inventories(self):
1604
# See https://bugs.launchpad.net/bzr/+bug/437003
1605
# When repacking, it is possible to have an inventory in a different
1606
# pack file than the associated revision. An autopack can then come
1607
# along, and miss that inventory, and complain.
1608
(repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
1609
) = self.make_branch_with_disjoint_inventory_and_revision()
1610
a_pack = repo._pack_collection.get_pack_by_name(rev_a_pack_name)
1611
c_pack = repo._pack_collection.get_pack_by_name(rev_c_pack_name)
1612
packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1613
[a_pack, c_pack], '.test-pack')
1614
# This would raise ValueError in bug #437003, but should not raise an
1618
def test_pack_with_missing_inventory(self):
1619
# Similar to test_pack_with_missing_inventory, but this time, we force
1620
# the A inventory to actually be gone from the repository.
1621
(repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
1622
) = self.make_branch_with_disjoint_inventory_and_revision()
1623
inv_a_pack = repo._pack_collection.get_pack_by_name(inv_a_pack_name)
1624
repo._pack_collection._remove_pack_from_memory(inv_a_pack)
1625
packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1626
repo._pack_collection.all_packs(), '.test-pack')
1627
e = self.assertRaises(ValueError, packer.pack)
1628
packer.new_pack.abort()
1629
self.assertContainsRe(str(e),
1630
r"We are missing inventories for revisions: .*'A'")
1633
class TestCrossFormatPacks(TestCaseWithTransport):
1635
def log_pack(self, hint=None):
1636
self.calls.append(('pack', hint))
1637
self.orig_pack(hint=hint)
1638
if self.expect_hint:
1639
self.assertTrue(hint)
1641
def run_stream(self, src_fmt, target_fmt, expect_pack_called):
1642
self.expect_hint = expect_pack_called
1644
source_tree = self.make_branch_and_tree('src', format=src_fmt)
1645
source_tree.lock_write()
1646
self.addCleanup(source_tree.unlock)
1647
tip = source_tree.commit('foo')
1648
target = self.make_repository('target', format=target_fmt)
1650
self.addCleanup(target.unlock)
1651
source = source_tree.branch.repository._get_source(target._format)
1652
self.orig_pack = target.pack
1653
self.overrideAttr(target, "pack", self.log_pack)
1654
search = target.search_missing_revision_ids(
1655
source_tree.branch.repository, revision_ids=[tip])
1656
stream = source.get_stream(search)
1657
from_format = source_tree.branch.repository._format
1658
sink = target._get_sink()
1659
sink.insert_stream(stream, from_format, [])
1660
if expect_pack_called:
1661
self.assertLength(1, self.calls)
1663
self.assertLength(0, self.calls)
1665
def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
1666
self.expect_hint = expect_pack_called
1668
source_tree = self.make_branch_and_tree('src', format=src_fmt)
1669
source_tree.lock_write()
1670
self.addCleanup(source_tree.unlock)
1671
tip = source_tree.commit('foo')
1672
target = self.make_repository('target', format=target_fmt)
1674
self.addCleanup(target.unlock)
1675
source = source_tree.branch.repository
1676
self.orig_pack = target.pack
1677
self.overrideAttr(target, "pack", self.log_pack)
1678
target.fetch(source)
1679
if expect_pack_called:
1680
self.assertLength(1, self.calls)
1682
self.assertLength(0, self.calls)
1684
def test_sink_format_hint_no(self):
1685
# When the target format says packing makes no difference, pack is not
1687
self.run_stream('1.9', 'rich-root-pack', False)
1689
def test_sink_format_hint_yes(self):
1690
# When the target format says packing makes a difference, pack is
1692
self.run_stream('1.9', '2a', True)
1694
def test_sink_format_same_no(self):
1695
# When the formats are the same, pack is not called.
1696
self.run_stream('2a', '2a', False)
1698
def test_IDS_format_hint_no(self):
1699
# When the target format says packing makes no difference, pack is not
1701
self.run_fetch('1.9', 'rich-root-pack', False)
1703
def test_IDS_format_hint_yes(self):
1704
# When the target format says packing makes a difference, pack is
1706
self.run_fetch('1.9', '2a', True)
1708
def test_IDS_format_same_no(self):
1709
# When the formats are the same, pack is not called.
1710
self.run_fetch('2a', '2a', False)
1713
class Test_LazyListJoin(tests.TestCase):
1715
def test__repr__(self):
1716
lazy = repository._LazyListJoin(['a'], ['b'])
1717
self.assertEqual("bzrlib.repository._LazyListJoin((['a'], ['b']))",
1721
class TestFeatures(tests.TestCaseWithTransport):
1723
def test_open_with_present_feature(self):
1725
repository.RepositoryFormatMetaDir.unregister_feature,
1726
"makes-cheese-sandwich")
1727
repository.RepositoryFormatMetaDir.register_feature(
1728
"makes-cheese-sandwich")
1729
repo = self.make_repository('.')
1731
repo._format.features["makes-cheese-sandwich"] = "required"
1732
repo._format.check_support_status(False)
1735
def test_open_with_missing_required_feature(self):
1736
repo = self.make_repository('.')
1738
repo._format.features["makes-cheese-sandwich"] = "required"
1739
self.assertRaises(errors.MissingFeature,
1740
repo._format.check_support_status, False)