431
466
self.assertTrue(isinstance(target_format, repo._format.__class__))
434
class TestMisc(TestCase):
436
def test_unescape_xml(self):
437
"""We get some kind of error when malformed entities are passed"""
438
self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;')
441
class TestRepositoryFormatKnit2(TestCaseWithTransport):
469
class TestRepositoryFormatKnit3(TestCaseWithTransport):
471
def test_attribute__fetch_order(self):
472
"""Knits need topological data insertion."""
473
format = bzrdir.BzrDirMetaFormat1()
474
format.repository_format = knitrepo.RepositoryFormatKnit3()
475
repo = self.make_repository('.', format=format)
476
self.assertEqual('topological', repo._format._fetch_order)
478
def test_attribute__fetch_uses_deltas(self):
479
"""Knits reuse deltas."""
480
format = bzrdir.BzrDirMetaFormat1()
481
format.repository_format = knitrepo.RepositoryFormatKnit3()
482
repo = self.make_repository('.', format=format)
483
self.assertEqual(True, repo._format._fetch_uses_deltas)
443
485
def test_convert(self):
444
486
"""Ensure the upgrade adds weaves for roots"""
445
487
format = bzrdir.BzrDirMetaFormat1()
446
format.repository_format = repository.RepositoryFormatKnit1()
488
format.repository_format = knitrepo.RepositoryFormatKnit1()
447
489
tree = self.make_branch_and_tree('.', format)
448
490
tree.commit("Dull commit", rev_id="dull")
449
491
revision_tree = tree.branch.repository.revision_tree('dull')
450
self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
451
revision_tree.inventory.root.file_id)
492
revision_tree.lock_read()
494
self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
495
revision_tree.inventory.root.file_id)
497
revision_tree.unlock()
452
498
format = bzrdir.BzrDirMetaFormat1()
453
format.repository_format = repository.RepositoryFormatKnit2()
499
format.repository_format = knitrepo.RepositoryFormatKnit3()
454
500
upgrade.Convert('.', format)
455
501
tree = workingtree.WorkingTree.open('.')
456
502
revision_tree = tree.branch.repository.revision_tree('dull')
457
revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
503
revision_tree.lock_read()
505
revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
507
revision_tree.unlock()
458
508
tree.commit("Another dull commit", rev_id='dull2')
459
509
revision_tree = tree.branch.repository.revision_tree('dull2')
510
revision_tree.lock_read()
511
self.addCleanup(revision_tree.unlock)
460
512
self.assertEqual('dull', revision_tree.inventory.root.revision)
514
def test_supports_external_lookups(self):
515
format = bzrdir.BzrDirMetaFormat1()
516
format.repository_format = knitrepo.RepositoryFormatKnit3()
517
repo = self.make_repository('.', format=format)
518
self.assertFalse(repo._format.supports_external_lookups)
521
class Test2a(tests.TestCaseWithMemoryTransport):
523
def test_chk_bytes_uses_custom_btree_parser(self):
524
mt = self.make_branch_and_memory_tree('test', format='2a')
526
self.addCleanup(mt.unlock)
527
mt.add([''], ['root-id'])
529
index = mt.branch.repository.chk_bytes._index._graph_index._indices[0]
530
self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
531
# It should also work if we re-open the repo
532
repo = mt.branch.repository.bzrdir.open_repository()
534
self.addCleanup(repo.unlock)
535
index = repo.chk_bytes._index._graph_index._indices[0]
536
self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
538
def test_fetch_combines_groups(self):
539
builder = self.make_branch_builder('source', format='2a')
540
builder.start_series()
541
builder.build_snapshot('1', None, [
542
('add', ('', 'root-id', 'directory', '')),
543
('add', ('file', 'file-id', 'file', 'content\n'))])
544
builder.build_snapshot('2', ['1'], [
545
('modify', ('file-id', 'content-2\n'))])
546
builder.finish_series()
547
source = builder.get_branch()
548
target = self.make_repository('target', format='2a')
549
target.fetch(source.repository)
551
self.addCleanup(target.unlock)
552
details = target.texts._index.get_build_details(
553
[('file-id', '1',), ('file-id', '2',)])
554
file_1_details = details[('file-id', '1')]
555
file_2_details = details[('file-id', '2')]
556
# The index, and what to read off disk, should be the same for both
557
# versions of the file.
558
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
560
def test_fetch_combines_groups(self):
561
builder = self.make_branch_builder('source', format='2a')
562
builder.start_series()
563
builder.build_snapshot('1', None, [
564
('add', ('', 'root-id', 'directory', '')),
565
('add', ('file', 'file-id', 'file', 'content\n'))])
566
builder.build_snapshot('2', ['1'], [
567
('modify', ('file-id', 'content-2\n'))])
568
builder.finish_series()
569
source = builder.get_branch()
570
target = self.make_repository('target', format='2a')
571
target.fetch(source.repository)
573
self.addCleanup(target.unlock)
574
details = target.texts._index.get_build_details(
575
[('file-id', '1',), ('file-id', '2',)])
576
file_1_details = details[('file-id', '1')]
577
file_2_details = details[('file-id', '2')]
578
# The index, and what to read off disk, should be the same for both
579
# versions of the file.
580
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
582
def test_fetch_combines_groups(self):
583
builder = self.make_branch_builder('source', format='2a')
584
builder.start_series()
585
builder.build_snapshot('1', None, [
586
('add', ('', 'root-id', 'directory', '')),
587
('add', ('file', 'file-id', 'file', 'content\n'))])
588
builder.build_snapshot('2', ['1'], [
589
('modify', ('file-id', 'content-2\n'))])
590
builder.finish_series()
591
source = builder.get_branch()
592
target = self.make_repository('target', format='2a')
593
target.fetch(source.repository)
595
self.addCleanup(target.unlock)
596
details = target.texts._index.get_build_details(
597
[('file-id', '1',), ('file-id', '2',)])
598
file_1_details = details[('file-id', '1')]
599
file_2_details = details[('file-id', '2')]
600
# The index, and what to read off disk, should be the same for both
601
# versions of the file.
602
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
604
def test_format_pack_compresses_True(self):
605
repo = self.make_repository('repo', format='2a')
606
self.assertTrue(repo._format.pack_compresses)
608
def test_inventories_use_chk_map_with_parent_base_dict(self):
609
tree = self.make_branch_and_memory_tree('repo', format="2a")
611
tree.add([''], ['TREE_ROOT'])
612
revid = tree.commit("foo")
615
self.addCleanup(tree.unlock)
616
inv = tree.branch.repository.get_inventory(revid)
617
self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
618
inv.parent_id_basename_to_file_id._ensure_root()
619
inv.id_to_entry._ensure_root()
620
self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
621
self.assertEqual(65536,
622
inv.parent_id_basename_to_file_id._root_node.maximum_size)
624
def test_autopack_unchanged_chk_nodes(self):
625
# at 20 unchanged commits, chk pages are packed that are split into
626
# two groups such that the new pack being made doesn't have all its
627
# pages in the source packs (though they are in the repository).
628
# Use a memory backed repository, we don't need to hit disk for this
629
tree = self.make_branch_and_memory_tree('tree', format='2a')
631
self.addCleanup(tree.unlock)
632
tree.add([''], ['TREE_ROOT'])
633
for pos in range(20):
634
tree.commit(str(pos))
636
def test_pack_with_hint(self):
637
tree = self.make_branch_and_memory_tree('tree', format='2a')
639
self.addCleanup(tree.unlock)
640
tree.add([''], ['TREE_ROOT'])
641
# 1 commit to leave untouched
643
to_keep = tree.branch.repository._pack_collection.names()
647
all = tree.branch.repository._pack_collection.names()
648
combine = list(set(all) - set(to_keep))
649
self.assertLength(3, all)
650
self.assertLength(2, combine)
651
tree.branch.repository.pack(hint=combine)
652
final = tree.branch.repository._pack_collection.names()
653
self.assertLength(2, final)
654
self.assertFalse(combine[0] in final)
655
self.assertFalse(combine[1] in final)
656
self.assertSubset(to_keep, final)
658
def test_stream_source_to_gc(self):
659
source = self.make_repository('source', format='2a')
660
target = self.make_repository('target', format='2a')
661
stream = source._get_source(target._format)
662
self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
664
def test_stream_source_to_non_gc(self):
665
source = self.make_repository('source', format='2a')
666
target = self.make_repository('target', format='rich-root-pack')
667
stream = source._get_source(target._format)
668
# We don't want the child GroupCHKStreamSource
669
self.assertIs(type(stream), repository.StreamSource)
671
def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
672
source_builder = self.make_branch_builder('source',
674
# We have to build a fairly large tree, so that we are sure the chk
675
# pages will have split into multiple pages.
676
entries = [('add', ('', 'a-root-id', 'directory', None))]
677
for i in 'abcdefghijklmnopqrstuvwxyz123456789':
678
for j in 'abcdefghijklmnopqrstuvwxyz123456789':
681
content = 'content for %s\n' % (fname,)
682
entries.append(('add', (fname, fid, 'file', content)))
683
source_builder.start_series()
684
source_builder.build_snapshot('rev-1', None, entries)
685
# Now change a few of them, so we get a few new pages for the second
687
source_builder.build_snapshot('rev-2', ['rev-1'], [
688
('modify', ('aa-id', 'new content for aa-id\n')),
689
('modify', ('cc-id', 'new content for cc-id\n')),
690
('modify', ('zz-id', 'new content for zz-id\n')),
692
source_builder.finish_series()
693
source_branch = source_builder.get_branch()
694
source_branch.lock_read()
695
self.addCleanup(source_branch.unlock)
696
target = self.make_repository('target', format='2a')
697
source = source_branch.repository._get_source(target._format)
698
self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
700
# On a regular pass, getting the inventories and chk pages for rev-2
701
# would only get the newly created chk pages
702
search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
704
simple_chk_records = []
705
for vf_name, substream in source.get_stream(search):
706
if vf_name == 'chk_bytes':
707
for record in substream:
708
simple_chk_records.append(record.key)
712
# 3 pages, the root (InternalNode), + 2 pages which actually changed
713
self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
714
('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
715
('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
716
('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
718
# Now, when we do a similar call using 'get_stream_for_missing_keys'
719
# we should get a much larger set of pages.
720
missing = [('inventories', 'rev-2')]
721
full_chk_records = []
722
for vf_name, substream in source.get_stream_for_missing_keys(missing):
723
if vf_name == 'inventories':
724
for record in substream:
725
self.assertEqual(('rev-2',), record.key)
726
elif vf_name == 'chk_bytes':
727
for record in substream:
728
full_chk_records.append(record.key)
730
self.fail('Should not be getting a stream of %s' % (vf_name,))
731
# We have 257 records now. This is because we have 1 root page, and 256
732
# leaf pages in a complete listing.
733
self.assertEqual(257, len(full_chk_records))
734
self.assertSubset(simple_chk_records, full_chk_records)
736
def test_inconsistency_fatal(self):
737
repo = self.make_repository('repo', format='2a')
738
self.assertTrue(repo.revisions._index._inconsistency_fatal)
739
self.assertFalse(repo.texts._index._inconsistency_fatal)
740
self.assertFalse(repo.inventories._index._inconsistency_fatal)
741
self.assertFalse(repo.signatures._index._inconsistency_fatal)
742
self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
745
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
747
def test_source_to_exact_pack_092(self):
748
source = self.make_repository('source', format='pack-0.92')
749
target = self.make_repository('target', format='pack-0.92')
750
stream_source = source._get_source(target._format)
751
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
753
def test_source_to_exact_pack_rich_root_pack(self):
754
source = self.make_repository('source', format='rich-root-pack')
755
target = self.make_repository('target', format='rich-root-pack')
756
stream_source = source._get_source(target._format)
757
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
759
def test_source_to_exact_pack_19(self):
760
source = self.make_repository('source', format='1.9')
761
target = self.make_repository('target', format='1.9')
762
stream_source = source._get_source(target._format)
763
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
765
def test_source_to_exact_pack_19_rich_root(self):
766
source = self.make_repository('source', format='1.9-rich-root')
767
target = self.make_repository('target', format='1.9-rich-root')
768
stream_source = source._get_source(target._format)
769
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
771
def test_source_to_remote_exact_pack_19(self):
772
trans = self.make_smart_server('target')
774
source = self.make_repository('source', format='1.9')
775
target = self.make_repository('target', format='1.9')
776
target = repository.Repository.open(trans.base)
777
stream_source = source._get_source(target._format)
778
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
780
def test_stream_source_to_non_exact(self):
781
source = self.make_repository('source', format='pack-0.92')
782
target = self.make_repository('target', format='1.9')
783
stream = source._get_source(target._format)
784
self.assertIs(type(stream), repository.StreamSource)
786
def test_stream_source_to_non_exact_rich_root(self):
787
source = self.make_repository('source', format='1.9')
788
target = self.make_repository('target', format='1.9-rich-root')
789
stream = source._get_source(target._format)
790
self.assertIs(type(stream), repository.StreamSource)
792
def test_source_to_remote_non_exact_pack_19(self):
793
trans = self.make_smart_server('target')
795
source = self.make_repository('source', format='1.9')
796
target = self.make_repository('target', format='1.6')
797
target = repository.Repository.open(trans.base)
798
stream_source = source._get_source(target._format)
799
self.assertIs(type(stream_source), repository.StreamSource)
801
def test_stream_source_to_knit(self):
802
source = self.make_repository('source', format='pack-0.92')
803
target = self.make_repository('target', format='dirstate')
804
stream = source._get_source(target._format)
805
self.assertIs(type(stream), repository.StreamSource)
808
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
809
"""Tests for _find_parent_ids_of_revisions."""
812
super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
813
self.builder = self.make_branch_builder('source')
814
self.builder.start_series()
815
self.builder.build_snapshot('initial', None,
816
[('add', ('', 'tree-root', 'directory', None))])
817
self.repo = self.builder.get_branch().repository
818
self.addCleanup(self.builder.finish_series)
820
def assertParentIds(self, expected_result, rev_set):
821
self.assertEqual(sorted(expected_result),
822
sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
824
def test_simple(self):
825
self.builder.build_snapshot('revid1', None, [])
826
self.builder.build_snapshot('revid2', ['revid1'], [])
828
self.assertParentIds(['revid1'], rev_set)
830
def test_not_first_parent(self):
831
self.builder.build_snapshot('revid1', None, [])
832
self.builder.build_snapshot('revid2', ['revid1'], [])
833
self.builder.build_snapshot('revid3', ['revid2'], [])
834
rev_set = ['revid3', 'revid2']
835
self.assertParentIds(['revid1'], rev_set)
837
def test_not_null(self):
838
rev_set = ['initial']
839
self.assertParentIds([], rev_set)
841
def test_not_null_set(self):
842
self.builder.build_snapshot('revid1', None, [])
843
rev_set = [_mod_revision.NULL_REVISION]
844
self.assertParentIds([], rev_set)
846
def test_ghost(self):
847
self.builder.build_snapshot('revid1', None, [])
848
rev_set = ['ghost', 'revid1']
849
self.assertParentIds(['initial'], rev_set)
851
def test_ghost_parent(self):
852
self.builder.build_snapshot('revid1', None, [])
853
self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
854
rev_set = ['revid2', 'revid1']
855
self.assertParentIds(['ghost', 'initial'], rev_set)
857
def test_righthand_parent(self):
858
self.builder.build_snapshot('revid1', None, [])
859
self.builder.build_snapshot('revid2a', ['revid1'], [])
860
self.builder.build_snapshot('revid2b', ['revid1'], [])
861
self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
862
rev_set = ['revid3', 'revid2a']
863
self.assertParentIds(['revid1', 'revid2b'], rev_set)
866
class TestWithBrokenRepo(TestCaseWithTransport):
867
"""These tests seem to be more appropriate as interface tests?"""
869
def make_broken_repository(self):
870
# XXX: This function is borrowed from Aaron's "Reconcile can fix bad
871
# parent references" branch which is due to land in bzr.dev soon. Once
872
# it does, this duplication should be removed.
873
repo = self.make_repository('broken-repo')
877
cleanups.append(repo.unlock)
878
repo.start_write_group()
879
cleanups.append(repo.commit_write_group)
880
# make rev1a: A well-formed revision, containing 'file1'
881
inv = inventory.Inventory(revision_id='rev1a')
882
inv.root.revision = 'rev1a'
883
self.add_file(repo, inv, 'file1', 'rev1a', [])
884
repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
885
repo.add_inventory('rev1a', inv, [])
886
revision = _mod_revision.Revision('rev1a',
887
committer='jrandom@example.com', timestamp=0,
888
inventory_sha1='', timezone=0, message='foo', parent_ids=[])
889
repo.add_revision('rev1a',revision, inv)
891
# make rev1b, which has no Revision, but has an Inventory, and
893
inv = inventory.Inventory(revision_id='rev1b')
894
inv.root.revision = 'rev1b'
895
self.add_file(repo, inv, 'file1', 'rev1b', [])
896
repo.add_inventory('rev1b', inv, [])
898
# make rev2, with file1 and file2
900
# file1 has 'rev1b' as an ancestor, even though this is not
901
# mentioned by 'rev1a', making it an unreferenced ancestor
902
inv = inventory.Inventory()
903
self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b'])
904
self.add_file(repo, inv, 'file2', 'rev2', [])
905
self.add_revision(repo, 'rev2', inv, ['rev1a'])
907
# make ghost revision rev1c
908
inv = inventory.Inventory()
909
self.add_file(repo, inv, 'file2', 'rev1c', [])
911
# make rev3 with file2
912
# file2 refers to 'rev1c', which is a ghost in this repository, so
913
# file2 cannot have rev1c as its ancestor.
914
inv = inventory.Inventory()
915
self.add_file(repo, inv, 'file2', 'rev3', ['rev1c'])
916
self.add_revision(repo, 'rev3', inv, ['rev1c'])
919
for cleanup in reversed(cleanups):
922
def add_revision(self, repo, revision_id, inv, parent_ids):
923
inv.revision_id = revision_id
924
inv.root.revision = revision_id
925
repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
926
repo.add_inventory(revision_id, inv, parent_ids)
927
revision = _mod_revision.Revision(revision_id,
928
committer='jrandom@example.com', timestamp=0, inventory_sha1='',
929
timezone=0, message='foo', parent_ids=parent_ids)
930
repo.add_revision(revision_id,revision, inv)
932
def add_file(self, repo, inv, filename, revision, parents):
933
file_id = filename + '-id'
934
entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT')
935
entry.revision = revision
938
text_key = (file_id, revision)
939
parent_keys = [(file_id, parent) for parent in parents]
940
repo.texts.add_lines(text_key, parent_keys, ['line\n'])
942
def test_insert_from_broken_repo(self):
943
"""Inserting a data stream from a broken repository won't silently
944
corrupt the target repository.
946
broken_repo = self.make_broken_repository()
947
empty_repo = self.make_repository('empty-repo')
949
empty_repo.fetch(broken_repo)
950
except (errors.RevisionNotPresent, errors.BzrCheckError):
951
# Test successful: compression parent not being copied leads to
954
empty_repo.lock_read()
955
self.addCleanup(empty_repo.unlock)
956
text = empty_repo.texts.get_record_stream(
957
[('file2-id', 'rev3')], 'topological', True).next()
958
self.assertEqual('line\n', text.get_bytes_as('fulltext'))
961
class TestRepositoryPackCollection(TestCaseWithTransport):
963
def get_format(self):
964
return bzrdir.format_registry.make_bzrdir('pack-0.92')
967
format = self.get_format()
968
repo = self.make_repository('.', format=format)
969
return repo._pack_collection
971
def make_packs_and_alt_repo(self, write_lock=False):
972
"""Create a pack repo with 3 packs, and access it via a second repo."""
973
tree = self.make_branch_and_tree('.', format=self.get_format())
975
self.addCleanup(tree.unlock)
976
rev1 = tree.commit('one')
977
rev2 = tree.commit('two')
978
rev3 = tree.commit('three')
979
r = repository.Repository.open('.')
984
self.addCleanup(r.unlock)
985
packs = r._pack_collection
986
packs.ensure_loaded()
987
return tree, r, packs, [rev1, rev2, rev3]
989
def test__clear_obsolete_packs(self):
990
packs = self.get_packs()
991
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
992
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
993
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
994
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
995
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
996
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
997
res = packs._clear_obsolete_packs()
998
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
999
self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1001
def test__clear_obsolete_packs_preserve(self):
1002
packs = self.get_packs()
1003
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1004
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1005
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1006
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1007
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1008
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1009
res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
1010
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1011
self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1012
sorted(obsolete_pack_trans.list_dir('.')))
1014
def test__max_pack_count(self):
1015
"""The maximum pack count is a function of the number of revisions."""
1016
# no revisions - one pack, so that we can have a revision free repo
1017
# without it blowing up
1018
packs = self.get_packs()
1019
self.assertEqual(1, packs._max_pack_count(0))
1020
# after that the sum of the digits, - check the first 1-9
1021
self.assertEqual(1, packs._max_pack_count(1))
1022
self.assertEqual(2, packs._max_pack_count(2))
1023
self.assertEqual(3, packs._max_pack_count(3))
1024
self.assertEqual(4, packs._max_pack_count(4))
1025
self.assertEqual(5, packs._max_pack_count(5))
1026
self.assertEqual(6, packs._max_pack_count(6))
1027
self.assertEqual(7, packs._max_pack_count(7))
1028
self.assertEqual(8, packs._max_pack_count(8))
1029
self.assertEqual(9, packs._max_pack_count(9))
1030
# check the boundary cases with two digits for the next decade
1031
self.assertEqual(1, packs._max_pack_count(10))
1032
self.assertEqual(2, packs._max_pack_count(11))
1033
self.assertEqual(10, packs._max_pack_count(19))
1034
self.assertEqual(2, packs._max_pack_count(20))
1035
self.assertEqual(3, packs._max_pack_count(21))
1036
# check some arbitrary big numbers
1037
self.assertEqual(25, packs._max_pack_count(112894))
1039
def test_repr(self):
1040
packs = self.get_packs()
1041
self.assertContainsRe(repr(packs),
1042
'RepositoryPackCollection(.*Repository(.*))')
1044
def test__obsolete_packs(self):
1045
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1046
names = packs.names()
1047
pack = packs.get_pack_by_name(names[0])
1048
# Schedule this one for removal
1049
packs._remove_pack_from_memory(pack)
1050
# Simulate a concurrent update by renaming the .pack file and one of
1052
packs.transport.rename('packs/%s.pack' % (names[0],),
1053
'obsolete_packs/%s.pack' % (names[0],))
1054
packs.transport.rename('indices/%s.iix' % (names[0],),
1055
'obsolete_packs/%s.iix' % (names[0],))
1056
# Now trigger the obsoletion, and ensure that all the remaining files
1058
packs._obsolete_packs([pack])
1059
self.assertEqual([n + '.pack' for n in names[1:]],
1060
sorted(packs._pack_transport.list_dir('.')))
1061
# names[0] should not be present in the index anymore
1062
self.assertEqual(names[1:],
1063
sorted(set([osutils.splitext(n)[0] for n in
1064
packs._index_transport.list_dir('.')])))
1066
def test_pack_distribution_zero(self):
1067
packs = self.get_packs()
1068
self.assertEqual([0], packs.pack_distribution(0))
1070
def test_ensure_loaded_unlocked(self):
1071
packs = self.get_packs()
1072
self.assertRaises(errors.ObjectNotLocked,
1073
packs.ensure_loaded)
1075
def test_pack_distribution_one_to_nine(self):
1076
packs = self.get_packs()
1077
self.assertEqual([1],
1078
packs.pack_distribution(1))
1079
self.assertEqual([1, 1],
1080
packs.pack_distribution(2))
1081
self.assertEqual([1, 1, 1],
1082
packs.pack_distribution(3))
1083
self.assertEqual([1, 1, 1, 1],
1084
packs.pack_distribution(4))
1085
self.assertEqual([1, 1, 1, 1, 1],
1086
packs.pack_distribution(5))
1087
self.assertEqual([1, 1, 1, 1, 1, 1],
1088
packs.pack_distribution(6))
1089
self.assertEqual([1, 1, 1, 1, 1, 1, 1],
1090
packs.pack_distribution(7))
1091
self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1],
1092
packs.pack_distribution(8))
1093
self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1],
1094
packs.pack_distribution(9))
1096
def test_pack_distribution_stable_at_boundaries(self):
1097
"""When there are multi-rev packs the counts are stable."""
1098
packs = self.get_packs()
1100
self.assertEqual([10], packs.pack_distribution(10))
1101
self.assertEqual([10, 1], packs.pack_distribution(11))
1102
self.assertEqual([10, 10], packs.pack_distribution(20))
1103
self.assertEqual([10, 10, 1], packs.pack_distribution(21))
1105
self.assertEqual([100], packs.pack_distribution(100))
1106
self.assertEqual([100, 1], packs.pack_distribution(101))
1107
self.assertEqual([100, 10, 1], packs.pack_distribution(111))
1108
self.assertEqual([100, 100], packs.pack_distribution(200))
1109
self.assertEqual([100, 100, 1], packs.pack_distribution(201))
1110
self.assertEqual([100, 100, 10, 1], packs.pack_distribution(211))
1112
def test_plan_pack_operations_2009_revisions_skip_all_packs(self):
1113
packs = self.get_packs()
1114
existing_packs = [(2000, "big"), (9, "medium")]
1115
# rev count - 2009 -> 2x1000 + 9x1
1116
pack_operations = packs.plan_autopack_combinations(
1117
existing_packs, [1000, 1000, 1, 1, 1, 1, 1, 1, 1, 1, 1])
1118
self.assertEqual([], pack_operations)
1120
def test_plan_pack_operations_2010_revisions_skip_all_packs(self):
1121
packs = self.get_packs()
1122
existing_packs = [(2000, "big"), (9, "medium"), (1, "single")]
1123
# rev count - 2010 -> 2x1000 + 1x10
1124
pack_operations = packs.plan_autopack_combinations(
1125
existing_packs, [1000, 1000, 10])
1126
self.assertEqual([], pack_operations)
1128
def test_plan_pack_operations_2010_combines_smallest_two(self):
1129
packs = self.get_packs()
1130
existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"),
1132
# rev count - 2010 -> 2x1000 + 1x10 (3)
1133
pack_operations = packs.plan_autopack_combinations(
1134
existing_packs, [1000, 1000, 10])
1135
self.assertEqual([[2, ["single2", "single1"]]], pack_operations)
1137
def test_plan_pack_operations_creates_a_single_op(self):
1138
packs = self.get_packs()
1139
existing_packs = [(50, 'a'), (40, 'b'), (30, 'c'), (10, 'd'),
1140
(10, 'e'), (6, 'f'), (4, 'g')]
1141
# rev count 150 -> 1x100 and 5x10
1142
# The two size 10 packs do not need to be touched. The 50, 40, 30 would
1143
# be combined into a single 120 size pack, and the 6 & 4 would
1144
# becombined into a size 10 pack. However, if we have to rewrite them,
1145
# we save a pack file with no increased I/O by putting them into the
1147
distribution = packs.pack_distribution(150)
1148
pack_operations = packs.plan_autopack_combinations(existing_packs,
1150
self.assertEqual([[130, ['a', 'b', 'c', 'f', 'g']]], pack_operations)
1152
def test_all_packs_none(self):
1153
format = self.get_format()
1154
tree = self.make_branch_and_tree('.', format=format)
1156
self.addCleanup(tree.unlock)
1157
packs = tree.branch.repository._pack_collection
1158
packs.ensure_loaded()
1159
self.assertEqual([], packs.all_packs())
1161
def test_all_packs_one(self):
1162
format = self.get_format()
1163
tree = self.make_branch_and_tree('.', format=format)
1164
tree.commit('start')
1166
self.addCleanup(tree.unlock)
1167
packs = tree.branch.repository._pack_collection
1168
packs.ensure_loaded()
1170
packs.get_pack_by_name(packs.names()[0])],
1173
def test_all_packs_two(self):
1174
format = self.get_format()
1175
tree = self.make_branch_and_tree('.', format=format)
1176
tree.commit('start')
1177
tree.commit('continue')
1179
self.addCleanup(tree.unlock)
1180
packs = tree.branch.repository._pack_collection
1181
packs.ensure_loaded()
1183
packs.get_pack_by_name(packs.names()[0]),
1184
packs.get_pack_by_name(packs.names()[1]),
1185
], packs.all_packs())
1187
def test_get_pack_by_name(self):
1188
format = self.get_format()
1189
tree = self.make_branch_and_tree('.', format=format)
1190
tree.commit('start')
1192
self.addCleanup(tree.unlock)
1193
packs = tree.branch.repository._pack_collection
1195
packs.ensure_loaded()
1196
name = packs.names()[0]
1197
pack_1 = packs.get_pack_by_name(name)
1198
# the pack should be correctly initialised
1199
sizes = packs._names[name]
1200
rev_index = GraphIndex(packs._index_transport, name + '.rix', sizes[0])
1201
inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1])
1202
txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2])
1203
sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3])
1204
self.assertEqual(pack_repo.ExistingPack(packs._pack_transport,
1205
name, rev_index, inv_index, txt_index, sig_index), pack_1)
1206
# and the same instance should be returned on successive calls.
1207
self.assertTrue(pack_1 is packs.get_pack_by_name(name))
1209
def test_reload_pack_names_new_entry(self):
1210
tree, r, packs, revs = self.make_packs_and_alt_repo()
1211
names = packs.names()
1212
# Add a new pack file into the repository
1213
rev4 = tree.commit('four')
1214
new_names = tree.branch.repository._pack_collection.names()
1215
new_name = set(new_names).difference(names)
1216
self.assertEqual(1, len(new_name))
1217
new_name = new_name.pop()
1218
# The old collection hasn't noticed yet
1219
self.assertEqual(names, packs.names())
1220
self.assertTrue(packs.reload_pack_names())
1221
self.assertEqual(new_names, packs.names())
1222
# And the repository can access the new revision
1223
self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
1224
self.assertFalse(packs.reload_pack_names())
1226
def test_reload_pack_names_added_and_removed(self):
1227
tree, r, packs, revs = self.make_packs_and_alt_repo()
1228
names = packs.names()
1229
# Now repack the whole thing
1230
tree.branch.repository.pack()
1231
new_names = tree.branch.repository._pack_collection.names()
1232
# The other collection hasn't noticed yet
1233
self.assertEqual(names, packs.names())
1234
self.assertTrue(packs.reload_pack_names())
1235
self.assertEqual(new_names, packs.names())
1236
self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1237
self.assertFalse(packs.reload_pack_names())
1239
def test_reload_pack_names_preserves_pending(self):
1240
# TODO: Update this to also test for pending-deleted names
1241
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1242
# We will add one pack (via start_write_group + insert_record_stream),
1243
# and remove another pack (via _remove_pack_from_memory)
1244
orig_names = packs.names()
1245
orig_at_load = packs._packs_at_load
1246
to_remove_name = iter(orig_names).next()
1247
r.start_write_group()
1248
self.addCleanup(r.abort_write_group)
1249
r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1250
('text', 'rev'), (), None, 'content\n')])
1251
new_pack = packs._new_pack
1252
self.assertTrue(new_pack.data_inserted())
1254
packs.allocate(new_pack)
1255
packs._new_pack = None
1256
removed_pack = packs.get_pack_by_name(to_remove_name)
1257
packs._remove_pack_from_memory(removed_pack)
1258
names = packs.names()
1259
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1260
new_names = set([x[0][0] for x in new_nodes])
1261
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1262
self.assertEqual(set(names) - set(orig_names), new_names)
1263
self.assertEqual(set([new_pack.name]), new_names)
1264
self.assertEqual([to_remove_name],
1265
sorted([x[0][0] for x in deleted_nodes]))
1266
packs.reload_pack_names()
1267
reloaded_names = packs.names()
1268
self.assertEqual(orig_at_load, packs._packs_at_load)
1269
self.assertEqual(names, reloaded_names)
1270
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1271
new_names = set([x[0][0] for x in new_nodes])
1272
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1273
self.assertEqual(set(names) - set(orig_names), new_names)
1274
self.assertEqual(set([new_pack.name]), new_names)
1275
self.assertEqual([to_remove_name],
1276
sorted([x[0][0] for x in deleted_nodes]))
1278
def test_autopack_obsoletes_new_pack(self):
1279
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1280
packs._max_pack_count = lambda x: 1
1281
packs.pack_distribution = lambda x: [10]
1282
r.start_write_group()
1283
r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1284
('bogus-rev',), (), None, 'bogus-content\n')])
1285
# This should trigger an autopack, which will combine everything into a
1287
new_names = r.commit_write_group()
1288
names = packs.names()
1289
self.assertEqual(1, len(names))
1290
self.assertEqual([names[0] + '.pack'],
1291
packs._pack_transport.list_dir('.'))
1293
def test_autopack_reloads_and_stops(self):
1294
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1295
# After we have determined what needs to be autopacked, trigger a
1296
# full-pack via the other repo which will cause us to re-evaluate and
1297
# decide we don't need to do anything
1298
orig_execute = packs._execute_pack_operations
1299
def _munged_execute_pack_ops(*args, **kwargs):
1300
tree.branch.repository.pack()
1301
return orig_execute(*args, **kwargs)
1302
packs._execute_pack_operations = _munged_execute_pack_ops
1303
packs._max_pack_count = lambda x: 1
1304
packs.pack_distribution = lambda x: [10]
1305
self.assertFalse(packs.autopack())
1306
self.assertEqual(1, len(packs.names()))
1307
self.assertEqual(tree.branch.repository._pack_collection.names(),
1310
def test__save_pack_names(self):
1311
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1312
names = packs.names()
1313
pack = packs.get_pack_by_name(names[0])
1314
packs._remove_pack_from_memory(pack)
1315
packs._save_pack_names(obsolete_packs=[pack])
1316
cur_packs = packs._pack_transport.list_dir('.')
1317
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1318
# obsolete_packs will also have stuff like .rix and .iix present.
1319
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1320
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1321
self.assertEqual([pack.name], sorted(obsolete_names))
1323
def test__save_pack_names_already_obsoleted(self):
1324
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1325
names = packs.names()
1326
pack = packs.get_pack_by_name(names[0])
1327
packs._remove_pack_from_memory(pack)
1328
# We are going to simulate a concurrent autopack by manually obsoleting
1329
# the pack directly.
1330
packs._obsolete_packs([pack])
1331
packs._save_pack_names(clear_obsolete_packs=True,
1332
obsolete_packs=[pack])
1333
cur_packs = packs._pack_transport.list_dir('.')
1334
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1335
# Note that while we set clear_obsolete_packs=True, it should not
1336
# delete a pack file that we have also scheduled for obsoletion.
1337
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1338
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1339
self.assertEqual([pack.name], sorted(obsolete_names))
1343
class TestPack(TestCaseWithTransport):
1344
"""Tests for the Pack object."""
1346
def assertCurrentlyEqual(self, left, right):
1347
self.assertTrue(left == right)
1348
self.assertTrue(right == left)
1349
self.assertFalse(left != right)
1350
self.assertFalse(right != left)
1352
def assertCurrentlyNotEqual(self, left, right):
1353
self.assertFalse(left == right)
1354
self.assertFalse(right == left)
1355
self.assertTrue(left != right)
1356
self.assertTrue(right != left)
1358
def test___eq____ne__(self):
1359
left = pack_repo.ExistingPack('', '', '', '', '', '')
1360
right = pack_repo.ExistingPack('', '', '', '', '', '')
1361
self.assertCurrentlyEqual(left, right)
1362
# change all attributes and ensure equality changes as we do.
1363
left.revision_index = 'a'
1364
self.assertCurrentlyNotEqual(left, right)
1365
right.revision_index = 'a'
1366
self.assertCurrentlyEqual(left, right)
1367
left.inventory_index = 'a'
1368
self.assertCurrentlyNotEqual(left, right)
1369
right.inventory_index = 'a'
1370
self.assertCurrentlyEqual(left, right)
1371
left.text_index = 'a'
1372
self.assertCurrentlyNotEqual(left, right)
1373
right.text_index = 'a'
1374
self.assertCurrentlyEqual(left, right)
1375
left.signature_index = 'a'
1376
self.assertCurrentlyNotEqual(left, right)
1377
right.signature_index = 'a'
1378
self.assertCurrentlyEqual(left, right)
1380
self.assertCurrentlyNotEqual(left, right)
1382
self.assertCurrentlyEqual(left, right)
1383
left.transport = 'a'
1384
self.assertCurrentlyNotEqual(left, right)
1385
right.transport = 'a'
1386
self.assertCurrentlyEqual(left, right)
1388
def test_file_name(self):
1389
pack = pack_repo.ExistingPack('', 'a_name', '', '', '', '')
1390
self.assertEqual('a_name.pack', pack.file_name())
1393
class TestNewPack(TestCaseWithTransport):
1394
"""Tests for pack_repo.NewPack."""
1396
def test_new_instance_attributes(self):
1397
upload_transport = self.get_transport('upload')
1398
pack_transport = self.get_transport('pack')
1399
index_transport = self.get_transport('index')
1400
upload_transport.mkdir('.')
1401
collection = pack_repo.RepositoryPackCollection(
1403
transport=self.get_transport('.'),
1404
index_transport=index_transport,
1405
upload_transport=upload_transport,
1406
pack_transport=pack_transport,
1407
index_builder_class=BTreeBuilder,
1408
index_class=BTreeGraphIndex,
1409
use_chk_index=False)
1410
pack = pack_repo.NewPack(collection)
1411
self.addCleanup(pack.abort) # Make sure the write stream gets closed
1412
self.assertIsInstance(pack.revision_index, BTreeBuilder)
1413
self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1414
self.assertIsInstance(pack._hash, type(osutils.md5()))
1415
self.assertTrue(pack.upload_transport is upload_transport)
1416
self.assertTrue(pack.index_transport is index_transport)
1417
self.assertTrue(pack.pack_transport is pack_transport)
1418
self.assertEqual(None, pack.index_sizes)
1419
self.assertEqual(20, len(pack.random_name))
1420
self.assertIsInstance(pack.random_name, str)
1421
self.assertIsInstance(pack.start_time, float)
1424
class TestPacker(TestCaseWithTransport):
1425
"""Tests for the packs repository Packer class."""
1427
def test_pack_optimizes_pack_order(self):
1428
builder = self.make_branch_builder('.', format="1.9")
1429
builder.start_series()
1430
builder.build_snapshot('A', None, [
1431
('add', ('', 'root-id', 'directory', None)),
1432
('add', ('f', 'f-id', 'file', 'content\n'))])
1433
builder.build_snapshot('B', ['A'],
1434
[('modify', ('f-id', 'new-content\n'))])
1435
builder.build_snapshot('C', ['B'],
1436
[('modify', ('f-id', 'third-content\n'))])
1437
builder.build_snapshot('D', ['C'],
1438
[('modify', ('f-id', 'fourth-content\n'))])
1439
b = builder.get_branch()
1441
builder.finish_series()
1442
self.addCleanup(b.unlock)
1443
# At this point, we should have 4 pack files available
1444
# Because of how they were built, they correspond to
1445
# ['D', 'C', 'B', 'A']
1446
packs = b.repository._pack_collection.packs
1447
packer = pack_repo.Packer(b.repository._pack_collection,
1449
revision_ids=['B', 'C'])
1450
# Now, when we are copying the B & C revisions, their pack files should
1451
# be moved to the front of the stack
1452
# The new ordering moves B & C to the front of the .packs attribute,
1453
# and leaves the others in the original order.
1454
new_packs = [packs[1], packs[2], packs[0], packs[3]]
1455
new_pack = packer.pack()
1456
self.assertEqual(new_packs, packer.packs)
1459
class TestOptimisingPacker(TestCaseWithTransport):
1460
"""Tests for the OptimisingPacker class."""
1462
def get_pack_collection(self):
1463
repo = self.make_repository('.')
1464
return repo._pack_collection
1466
def test_open_pack_will_optimise(self):
1467
packer = pack_repo.OptimisingPacker(self.get_pack_collection(),
1469
new_pack = packer.open_pack()
1470
self.addCleanup(new_pack.abort) # ensure cleanup
1471
self.assertIsInstance(new_pack, pack_repo.NewPack)
1472
self.assertTrue(new_pack.revision_index._optimize_for_size)
1473
self.assertTrue(new_pack.inventory_index._optimize_for_size)
1474
self.assertTrue(new_pack.text_index._optimize_for_size)
1475
self.assertTrue(new_pack.signature_index._optimize_for_size)
1478
class TestGCCHKPacker(TestCaseWithTransport):
1480
def make_abc_branch(self):
1481
builder = self.make_branch_builder('source')
1482
builder.start_series()
1483
builder.build_snapshot('A', None, [
1484
('add', ('', 'root-id', 'directory', None)),
1485
('add', ('file', 'file-id', 'file', 'content\n')),
1487
builder.build_snapshot('B', ['A'], [
1488
('add', ('dir', 'dir-id', 'directory', None))])
1489
builder.build_snapshot('C', ['B'], [
1490
('modify', ('file-id', 'new content\n'))])
1491
builder.finish_series()
1492
return builder.get_branch()
1494
def make_branch_with_disjoint_inventory_and_revision(self):
1495
"""a repo with separate packs for a revisions Revision and Inventory.
1497
There will be one pack file that holds the Revision content, and one
1498
for the Inventory content.
1500
:return: (repository,
1501
pack_name_with_rev_A_Revision,
1502
pack_name_with_rev_A_Inventory,
1503
pack_name_with_rev_C_content)
1505
b_source = self.make_abc_branch()
1506
b_base = b_source.bzrdir.sprout('base', revision_id='A').open_branch()
1507
b_stacked = b_base.bzrdir.sprout('stacked', stacked=True).open_branch()
1508
b_stacked.lock_write()
1509
self.addCleanup(b_stacked.unlock)
1510
b_stacked.fetch(b_source, 'B')
1511
# Now re-open the stacked repo directly (no fallbacks) so that we can
1512
# fill in the A rev.
1513
repo_not_stacked = b_stacked.bzrdir.open_repository()
1514
repo_not_stacked.lock_write()
1515
self.addCleanup(repo_not_stacked.unlock)
1516
# Now we should have a pack file with A's inventory, but not its
1518
self.assertEqual([('A',), ('B',)],
1519
sorted(repo_not_stacked.inventories.keys()))
1520
self.assertEqual([('B',)],
1521
sorted(repo_not_stacked.revisions.keys()))
1522
stacked_pack_names = repo_not_stacked._pack_collection.names()
1523
# We have a couple names here, figure out which has A's inventory
1524
for name in stacked_pack_names:
1525
pack = repo_not_stacked._pack_collection.get_pack_by_name(name)
1526
keys = [n[1] for n in pack.inventory_index.iter_all_entries()]
1528
inv_a_pack_name = name
1531
self.fail('Could not find pack containing A\'s inventory')
1532
repo_not_stacked.fetch(b_source.repository, 'A')
1533
self.assertEqual([('A',), ('B',)],
1534
sorted(repo_not_stacked.revisions.keys()))
1535
new_pack_names = set(repo_not_stacked._pack_collection.names())
1536
rev_a_pack_names = new_pack_names.difference(stacked_pack_names)
1537
self.assertEqual(1, len(rev_a_pack_names))
1538
rev_a_pack_name = list(rev_a_pack_names)[0]
1539
# Now fetch 'C', so we have a couple pack files to join
1540
repo_not_stacked.fetch(b_source.repository, 'C')
1541
rev_c_pack_names = set(repo_not_stacked._pack_collection.names())
1542
rev_c_pack_names = rev_c_pack_names.difference(new_pack_names)
1543
self.assertEqual(1, len(rev_c_pack_names))
1544
rev_c_pack_name = list(rev_c_pack_names)[0]
1545
return (repo_not_stacked, rev_a_pack_name, inv_a_pack_name,
1548
def test_pack_with_distant_inventories(self):
1549
# See https://bugs.launchpad.net/bzr/+bug/437003
1550
# When repacking, it is possible to have an inventory in a different
1551
# pack file than the associated revision. An autopack can then come
1552
# along, and miss that inventory, and complain.
1553
(repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
1554
) = self.make_branch_with_disjoint_inventory_and_revision()
1555
a_pack = repo._pack_collection.get_pack_by_name(rev_a_pack_name)
1556
c_pack = repo._pack_collection.get_pack_by_name(rev_c_pack_name)
1557
packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1558
[a_pack, c_pack], '.test-pack')
1559
# This would raise ValueError in bug #437003, but should not raise an
1563
def test_pack_with_missing_inventory(self):
1564
# Similar to test_pack_with_missing_inventory, but this time, we force
1565
# the A inventory to actually be gone from the repository.
1566
(repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
1567
) = self.make_branch_with_disjoint_inventory_and_revision()
1568
inv_a_pack = repo._pack_collection.get_pack_by_name(inv_a_pack_name)
1569
repo._pack_collection._remove_pack_from_memory(inv_a_pack)
1570
packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1571
repo._pack_collection.all_packs(), '.test-pack')
1572
e = self.assertRaises(ValueError, packer.pack)
1573
packer.new_pack.abort()
1574
self.assertContainsRe(str(e),
1575
r"We are missing inventories for revisions: .*'A'")
1578
class TestCrossFormatPacks(TestCaseWithTransport):
1580
def log_pack(self, hint=None):
1581
self.calls.append(('pack', hint))
1582
self.orig_pack(hint=hint)
1583
if self.expect_hint:
1584
self.assertTrue(hint)
1586
def run_stream(self, src_fmt, target_fmt, expect_pack_called):
1587
self.expect_hint = expect_pack_called
1589
source_tree = self.make_branch_and_tree('src', format=src_fmt)
1590
source_tree.lock_write()
1591
self.addCleanup(source_tree.unlock)
1592
tip = source_tree.commit('foo')
1593
target = self.make_repository('target', format=target_fmt)
1595
self.addCleanup(target.unlock)
1596
source = source_tree.branch.repository._get_source(target._format)
1597
self.orig_pack = target.pack
1598
target.pack = self.log_pack
1599
search = target.search_missing_revision_ids(
1600
source_tree.branch.repository, revision_ids=[tip])
1601
stream = source.get_stream(search)
1602
from_format = source_tree.branch.repository._format
1603
sink = target._get_sink()
1604
sink.insert_stream(stream, from_format, [])
1605
if expect_pack_called:
1606
self.assertLength(1, self.calls)
1608
self.assertLength(0, self.calls)
1610
def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
1611
self.expect_hint = expect_pack_called
1613
source_tree = self.make_branch_and_tree('src', format=src_fmt)
1614
source_tree.lock_write()
1615
self.addCleanup(source_tree.unlock)
1616
tip = source_tree.commit('foo')
1617
target = self.make_repository('target', format=target_fmt)
1619
self.addCleanup(target.unlock)
1620
source = source_tree.branch.repository
1621
self.orig_pack = target.pack
1622
target.pack = self.log_pack
1623
target.fetch(source)
1624
if expect_pack_called:
1625
self.assertLength(1, self.calls)
1627
self.assertLength(0, self.calls)
1629
def test_sink_format_hint_no(self):
1630
# When the target format says packing makes no difference, pack is not
1632
self.run_stream('1.9', 'rich-root-pack', False)
1634
def test_sink_format_hint_yes(self):
1635
# When the target format says packing makes a difference, pack is
1637
self.run_stream('1.9', '2a', True)
1639
def test_sink_format_same_no(self):
1640
# When the formats are the same, pack is not called.
1641
self.run_stream('2a', '2a', False)
1643
def test_IDS_format_hint_no(self):
1644
# When the target format says packing makes no difference, pack is not
1646
self.run_fetch('1.9', 'rich-root-pack', False)
1648
def test_IDS_format_hint_yes(self):
1649
# When the target format says packing makes a difference, pack is
1651
self.run_fetch('1.9', '2a', True)
1653
def test_IDS_format_same_no(self):
1654
# When the formats are the same, pack is not called.
1655
self.run_fetch('2a', '2a', False)