634
433
class TestMisc(TestCase):
636
435
def test_unescape_xml(self):
637
436
"""We get some kind of error when malformed entities are passed"""
638
self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;')
641
class TestRepositoryFormatKnit3(TestCaseWithTransport):
643
def test_attribute__fetch_order(self):
644
"""Knits need topological data insertion."""
645
format = bzrdir.BzrDirMetaFormat1()
646
format.repository_format = knitrepo.RepositoryFormatKnit3()
647
repo = self.make_repository('.', format=format)
648
self.assertEqual('topological', repo._format._fetch_order)
650
def test_attribute__fetch_uses_deltas(self):
651
"""Knits reuse deltas."""
652
format = bzrdir.BzrDirMetaFormat1()
653
format.repository_format = knitrepo.RepositoryFormatKnit3()
654
repo = self.make_repository('.', format=format)
655
self.assertEqual(True, repo._format._fetch_uses_deltas)
657
def test_convert(self):
658
"""Ensure the upgrade adds weaves for roots"""
659
format = bzrdir.BzrDirMetaFormat1()
660
format.repository_format = knitrepo.RepositoryFormatKnit1()
661
tree = self.make_branch_and_tree('.', format)
662
tree.commit("Dull commit", rev_id="dull")
663
revision_tree = tree.branch.repository.revision_tree('dull')
664
revision_tree.lock_read()
666
self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
667
revision_tree.inventory.root.file_id)
669
revision_tree.unlock()
670
format = bzrdir.BzrDirMetaFormat1()
671
format.repository_format = knitrepo.RepositoryFormatKnit3()
672
upgrade.Convert('.', format)
673
tree = workingtree.WorkingTree.open('.')
674
revision_tree = tree.branch.repository.revision_tree('dull')
675
revision_tree.lock_read()
677
revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
679
revision_tree.unlock()
680
tree.commit("Another dull commit", rev_id='dull2')
681
revision_tree = tree.branch.repository.revision_tree('dull2')
682
revision_tree.lock_read()
683
self.addCleanup(revision_tree.unlock)
684
self.assertEqual('dull', revision_tree.inventory.root.revision)
686
def test_supports_external_lookups(self):
687
format = bzrdir.BzrDirMetaFormat1()
688
format.repository_format = knitrepo.RepositoryFormatKnit3()
689
repo = self.make_repository('.', format=format)
690
self.assertFalse(repo._format.supports_external_lookups)
693
class Test2a(tests.TestCaseWithMemoryTransport):
695
def test_fetch_combines_groups(self):
696
builder = self.make_branch_builder('source', format='2a')
697
builder.start_series()
698
builder.build_snapshot('1', None, [
699
('add', ('', 'root-id', 'directory', '')),
700
('add', ('file', 'file-id', 'file', 'content\n'))])
701
builder.build_snapshot('2', ['1'], [
702
('modify', ('file-id', 'content-2\n'))])
703
builder.finish_series()
704
source = builder.get_branch()
705
target = self.make_repository('target', format='2a')
706
target.fetch(source.repository)
708
self.addCleanup(target.unlock)
709
details = target.texts._index.get_build_details(
710
[('file-id', '1',), ('file-id', '2',)])
711
file_1_details = details[('file-id', '1')]
712
file_2_details = details[('file-id', '2')]
713
# The index, and what to read off disk, should be the same for both
714
# versions of the file.
715
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
717
def test_fetch_combines_groups(self):
718
builder = self.make_branch_builder('source', format='2a')
719
builder.start_series()
720
builder.build_snapshot('1', None, [
721
('add', ('', 'root-id', 'directory', '')),
722
('add', ('file', 'file-id', 'file', 'content\n'))])
723
builder.build_snapshot('2', ['1'], [
724
('modify', ('file-id', 'content-2\n'))])
725
builder.finish_series()
726
source = builder.get_branch()
727
target = self.make_repository('target', format='2a')
728
target.fetch(source.repository)
730
self.addCleanup(target.unlock)
731
details = target.texts._index.get_build_details(
732
[('file-id', '1',), ('file-id', '2',)])
733
file_1_details = details[('file-id', '1')]
734
file_2_details = details[('file-id', '2')]
735
# The index, and what to read off disk, should be the same for both
736
# versions of the file.
737
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
739
def test_fetch_combines_groups(self):
740
builder = self.make_branch_builder('source', format='2a')
741
builder.start_series()
742
builder.build_snapshot('1', None, [
743
('add', ('', 'root-id', 'directory', '')),
744
('add', ('file', 'file-id', 'file', 'content\n'))])
745
builder.build_snapshot('2', ['1'], [
746
('modify', ('file-id', 'content-2\n'))])
747
builder.finish_series()
748
source = builder.get_branch()
749
target = self.make_repository('target', format='2a')
750
target.fetch(source.repository)
752
self.addCleanup(target.unlock)
753
details = target.texts._index.get_build_details(
754
[('file-id', '1',), ('file-id', '2',)])
755
file_1_details = details[('file-id', '1')]
756
file_2_details = details[('file-id', '2')]
757
# The index, and what to read off disk, should be the same for both
758
# versions of the file.
759
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
761
def test_format_pack_compresses_True(self):
762
repo = self.make_repository('repo', format='2a')
763
self.assertTrue(repo._format.pack_compresses)
765
def test_inventories_use_chk_map_with_parent_base_dict(self):
766
tree = self.make_branch_and_memory_tree('repo', format="2a")
768
tree.add([''], ['TREE_ROOT'])
769
revid = tree.commit("foo")
772
self.addCleanup(tree.unlock)
773
inv = tree.branch.repository.get_inventory(revid)
774
self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
775
inv.parent_id_basename_to_file_id._ensure_root()
776
inv.id_to_entry._ensure_root()
777
self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
778
self.assertEqual(65536,
779
inv.parent_id_basename_to_file_id._root_node.maximum_size)
781
def test_autopack_unchanged_chk_nodes(self):
782
# at 20 unchanged commits, chk pages are packed that are split into
783
# two groups such that the new pack being made doesn't have all its
784
# pages in the source packs (though they are in the repository).
785
# Use a memory backed repository, we don't need to hit disk for this
786
tree = self.make_branch_and_memory_tree('tree', format='2a')
788
self.addCleanup(tree.unlock)
789
tree.add([''], ['TREE_ROOT'])
790
for pos in range(20):
791
tree.commit(str(pos))
793
def test_pack_with_hint(self):
794
tree = self.make_branch_and_memory_tree('tree', format='2a')
796
self.addCleanup(tree.unlock)
797
tree.add([''], ['TREE_ROOT'])
798
# 1 commit to leave untouched
800
to_keep = tree.branch.repository._pack_collection.names()
804
all = tree.branch.repository._pack_collection.names()
805
combine = list(set(all) - set(to_keep))
806
self.assertLength(3, all)
807
self.assertLength(2, combine)
808
tree.branch.repository.pack(hint=combine)
809
final = tree.branch.repository._pack_collection.names()
810
self.assertLength(2, final)
811
self.assertFalse(combine[0] in final)
812
self.assertFalse(combine[1] in final)
813
self.assertSubset(to_keep, final)
815
def test_stream_source_to_gc(self):
816
source = self.make_repository('source', format='2a')
817
target = self.make_repository('target', format='2a')
818
stream = source._get_source(target._format)
819
self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
821
def test_stream_source_to_non_gc(self):
822
source = self.make_repository('source', format='2a')
823
target = self.make_repository('target', format='rich-root-pack')
824
stream = source._get_source(target._format)
825
# We don't want the child GroupCHKStreamSource
826
self.assertIs(type(stream), repository.StreamSource)
828
def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
829
source_builder = self.make_branch_builder('source',
831
# We have to build a fairly large tree, so that we are sure the chk
832
# pages will have split into multiple pages.
833
entries = [('add', ('', 'a-root-id', 'directory', None))]
834
for i in 'abcdefghijklmnopqrstuvwxyz123456789':
835
for j in 'abcdefghijklmnopqrstuvwxyz123456789':
838
content = 'content for %s\n' % (fname,)
839
entries.append(('add', (fname, fid, 'file', content)))
840
source_builder.start_series()
841
source_builder.build_snapshot('rev-1', None, entries)
842
# Now change a few of them, so we get a few new pages for the second
844
source_builder.build_snapshot('rev-2', ['rev-1'], [
845
('modify', ('aa-id', 'new content for aa-id\n')),
846
('modify', ('cc-id', 'new content for cc-id\n')),
847
('modify', ('zz-id', 'new content for zz-id\n')),
849
source_builder.finish_series()
850
source_branch = source_builder.get_branch()
851
source_branch.lock_read()
852
self.addCleanup(source_branch.unlock)
853
target = self.make_repository('target', format='2a')
854
source = source_branch.repository._get_source(target._format)
855
self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
857
# On a regular pass, getting the inventories and chk pages for rev-2
858
# would only get the newly created chk pages
859
search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
861
simple_chk_records = []
862
for vf_name, substream in source.get_stream(search):
863
if vf_name == 'chk_bytes':
864
for record in substream:
865
simple_chk_records.append(record.key)
869
# 3 pages, the root (InternalNode), + 2 pages which actually changed
870
self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
871
('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
872
('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
873
('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
875
# Now, when we do a similar call using 'get_stream_for_missing_keys'
876
# we should get a much larger set of pages.
877
missing = [('inventories', 'rev-2')]
878
full_chk_records = []
879
for vf_name, substream in source.get_stream_for_missing_keys(missing):
880
if vf_name == 'inventories':
881
for record in substream:
882
self.assertEqual(('rev-2',), record.key)
883
elif vf_name == 'chk_bytes':
884
for record in substream:
885
full_chk_records.append(record.key)
887
self.fail('Should not be getting a stream of %s' % (vf_name,))
888
# We have 257 records now. This is because we have 1 root page, and 256
889
# leaf pages in a complete listing.
890
self.assertEqual(257, len(full_chk_records))
891
self.assertSubset(simple_chk_records, full_chk_records)
893
def test_inconsistency_fatal(self):
894
repo = self.make_repository('repo', format='2a')
895
self.assertTrue(repo.revisions._index._inconsistency_fatal)
896
self.assertFalse(repo.texts._index._inconsistency_fatal)
897
self.assertFalse(repo.inventories._index._inconsistency_fatal)
898
self.assertFalse(repo.signatures._index._inconsistency_fatal)
899
self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
902
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
904
def test_source_to_exact_pack_092(self):
905
source = self.make_repository('source', format='pack-0.92')
906
target = self.make_repository('target', format='pack-0.92')
907
stream_source = source._get_source(target._format)
908
self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
910
def test_source_to_exact_pack_rich_root_pack(self):
911
source = self.make_repository('source', format='rich-root-pack')
912
target = self.make_repository('target', format='rich-root-pack')
913
stream_source = source._get_source(target._format)
914
self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
916
def test_source_to_exact_pack_19(self):
917
source = self.make_repository('source', format='1.9')
918
target = self.make_repository('target', format='1.9')
919
stream_source = source._get_source(target._format)
920
self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
922
def test_source_to_exact_pack_19_rich_root(self):
923
source = self.make_repository('source', format='1.9-rich-root')
924
target = self.make_repository('target', format='1.9-rich-root')
925
stream_source = source._get_source(target._format)
926
self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
928
def test_source_to_remote_exact_pack_19(self):
929
trans = self.make_smart_server('target')
931
source = self.make_repository('source', format='1.9')
932
target = self.make_repository('target', format='1.9')
933
target = repository.Repository.open(trans.base)
934
stream_source = source._get_source(target._format)
935
self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
937
def test_stream_source_to_non_exact(self):
938
source = self.make_repository('source', format='pack-0.92')
939
target = self.make_repository('target', format='1.9')
940
stream = source._get_source(target._format)
941
self.assertIs(type(stream), repository.StreamSource)
943
def test_stream_source_to_non_exact_rich_root(self):
944
source = self.make_repository('source', format='1.9')
945
target = self.make_repository('target', format='1.9-rich-root')
946
stream = source._get_source(target._format)
947
self.assertIs(type(stream), repository.StreamSource)
949
def test_source_to_remote_non_exact_pack_19(self):
950
trans = self.make_smart_server('target')
952
source = self.make_repository('source', format='1.9')
953
target = self.make_repository('target', format='1.6')
954
target = repository.Repository.open(trans.base)
955
stream_source = source._get_source(target._format)
956
self.assertIs(type(stream_source), repository.StreamSource)
958
def test_stream_source_to_knit(self):
959
source = self.make_repository('source', format='pack-0.92')
960
target = self.make_repository('target', format='dirstate')
961
stream = source._get_source(target._format)
962
self.assertIs(type(stream), repository.StreamSource)
965
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
966
"""Tests for _find_parent_ids_of_revisions."""
969
super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
970
self.builder = self.make_branch_builder('source',
971
format='development6-rich-root')
972
self.builder.start_series()
973
self.builder.build_snapshot('initial', None,
974
[('add', ('', 'tree-root', 'directory', None))])
975
self.repo = self.builder.get_branch().repository
976
self.addCleanup(self.builder.finish_series)
978
def assertParentIds(self, expected_result, rev_set):
979
self.assertEqual(sorted(expected_result),
980
sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
982
def test_simple(self):
983
self.builder.build_snapshot('revid1', None, [])
984
self.builder.build_snapshot('revid2', ['revid1'], [])
986
self.assertParentIds(['revid1'], rev_set)
988
def test_not_first_parent(self):
989
self.builder.build_snapshot('revid1', None, [])
990
self.builder.build_snapshot('revid2', ['revid1'], [])
991
self.builder.build_snapshot('revid3', ['revid2'], [])
992
rev_set = ['revid3', 'revid2']
993
self.assertParentIds(['revid1'], rev_set)
995
def test_not_null(self):
996
rev_set = ['initial']
997
self.assertParentIds([], rev_set)
999
def test_not_null_set(self):
1000
self.builder.build_snapshot('revid1', None, [])
1001
rev_set = [_mod_revision.NULL_REVISION]
1002
self.assertParentIds([], rev_set)
1004
def test_ghost(self):
1005
self.builder.build_snapshot('revid1', None, [])
1006
rev_set = ['ghost', 'revid1']
1007
self.assertParentIds(['initial'], rev_set)
1009
def test_ghost_parent(self):
1010
self.builder.build_snapshot('revid1', None, [])
1011
self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
1012
rev_set = ['revid2', 'revid1']
1013
self.assertParentIds(['ghost', 'initial'], rev_set)
1015
def test_righthand_parent(self):
1016
self.builder.build_snapshot('revid1', None, [])
1017
self.builder.build_snapshot('revid2a', ['revid1'], [])
1018
self.builder.build_snapshot('revid2b', ['revid1'], [])
1019
self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
1020
rev_set = ['revid3', 'revid2a']
1021
self.assertParentIds(['revid1', 'revid2b'], rev_set)
1024
class TestWithBrokenRepo(TestCaseWithTransport):
1025
"""These tests seem to be more appropriate as interface tests?"""
1027
def make_broken_repository(self):
1028
# XXX: This function is borrowed from Aaron's "Reconcile can fix bad
1029
# parent references" branch which is due to land in bzr.dev soon. Once
1030
# it does, this duplication should be removed.
1031
repo = self.make_repository('broken-repo')
1035
cleanups.append(repo.unlock)
1036
repo.start_write_group()
1037
cleanups.append(repo.commit_write_group)
1038
# make rev1a: A well-formed revision, containing 'file1'
1039
inv = inventory.Inventory(revision_id='rev1a')
1040
inv.root.revision = 'rev1a'
1041
self.add_file(repo, inv, 'file1', 'rev1a', [])
1042
repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
1043
repo.add_inventory('rev1a', inv, [])
1044
revision = _mod_revision.Revision('rev1a',
1045
committer='jrandom@example.com', timestamp=0,
1046
inventory_sha1='', timezone=0, message='foo', parent_ids=[])
1047
repo.add_revision('rev1a',revision, inv)
1049
# make rev1b, which has no Revision, but has an Inventory, and
1051
inv = inventory.Inventory(revision_id='rev1b')
1052
inv.root.revision = 'rev1b'
1053
self.add_file(repo, inv, 'file1', 'rev1b', [])
1054
repo.add_inventory('rev1b', inv, [])
1056
# make rev2, with file1 and file2
1058
# file1 has 'rev1b' as an ancestor, even though this is not
1059
# mentioned by 'rev1a', making it an unreferenced ancestor
1060
inv = inventory.Inventory()
1061
self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b'])
1062
self.add_file(repo, inv, 'file2', 'rev2', [])
1063
self.add_revision(repo, 'rev2', inv, ['rev1a'])
1065
# make ghost revision rev1c
1066
inv = inventory.Inventory()
1067
self.add_file(repo, inv, 'file2', 'rev1c', [])
1069
# make rev3 with file2
1070
# file2 refers to 'rev1c', which is a ghost in this repository, so
1071
# file2 cannot have rev1c as its ancestor.
1072
inv = inventory.Inventory()
1073
self.add_file(repo, inv, 'file2', 'rev3', ['rev1c'])
1074
self.add_revision(repo, 'rev3', inv, ['rev1c'])
1077
for cleanup in reversed(cleanups):
1080
def add_revision(self, repo, revision_id, inv, parent_ids):
1081
inv.revision_id = revision_id
1082
inv.root.revision = revision_id
1083
repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
1084
repo.add_inventory(revision_id, inv, parent_ids)
1085
revision = _mod_revision.Revision(revision_id,
1086
committer='jrandom@example.com', timestamp=0, inventory_sha1='',
1087
timezone=0, message='foo', parent_ids=parent_ids)
1088
repo.add_revision(revision_id,revision, inv)
1090
def add_file(self, repo, inv, filename, revision, parents):
1091
file_id = filename + '-id'
1092
entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT')
1093
entry.revision = revision
1096
text_key = (file_id, revision)
1097
parent_keys = [(file_id, parent) for parent in parents]
1098
repo.texts.add_lines(text_key, parent_keys, ['line\n'])
1100
def test_insert_from_broken_repo(self):
1101
"""Inserting a data stream from a broken repository won't silently
1102
corrupt the target repository.
1104
broken_repo = self.make_broken_repository()
1105
empty_repo = self.make_repository('empty-repo')
1107
empty_repo.fetch(broken_repo)
1108
except (errors.RevisionNotPresent, errors.BzrCheckError):
1109
# Test successful: compression parent not being copied leads to
1112
empty_repo.lock_read()
1113
self.addCleanup(empty_repo.unlock)
1114
text = empty_repo.texts.get_record_stream(
1115
[('file2-id', 'rev3')], 'topological', True).next()
1116
self.assertEqual('line\n', text.get_bytes_as('fulltext'))
1119
class TestRepositoryPackCollection(TestCaseWithTransport):
1121
def get_format(self):
1122
return bzrdir.format_registry.make_bzrdir('pack-0.92')
1124
def get_packs(self):
1125
format = self.get_format()
1126
repo = self.make_repository('.', format=format)
1127
return repo._pack_collection
1129
def make_packs_and_alt_repo(self, write_lock=False):
1130
"""Create a pack repo with 3 packs, and access it via a second repo."""
1131
tree = self.make_branch_and_tree('.', format=self.get_format())
1133
self.addCleanup(tree.unlock)
1134
rev1 = tree.commit('one')
1135
rev2 = tree.commit('two')
1136
rev3 = tree.commit('three')
1137
r = repository.Repository.open('.')
1142
self.addCleanup(r.unlock)
1143
packs = r._pack_collection
1144
packs.ensure_loaded()
1145
return tree, r, packs, [rev1, rev2, rev3]
1147
def test__clear_obsolete_packs(self):
1148
packs = self.get_packs()
1149
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1150
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1151
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1152
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1153
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1154
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1155
res = packs._clear_obsolete_packs()
1156
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1157
self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1159
def test__clear_obsolete_packs_preserve(self):
1160
packs = self.get_packs()
1161
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1162
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1163
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1164
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1165
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1166
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1167
res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
1168
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1169
self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1170
sorted(obsolete_pack_trans.list_dir('.')))
1172
def test__max_pack_count(self):
1173
"""The maximum pack count is a function of the number of revisions."""
1174
# no revisions - one pack, so that we can have a revision free repo
1175
# without it blowing up
1176
packs = self.get_packs()
1177
self.assertEqual(1, packs._max_pack_count(0))
1178
# after that the sum of the digits, - check the first 1-9
1179
self.assertEqual(1, packs._max_pack_count(1))
1180
self.assertEqual(2, packs._max_pack_count(2))
1181
self.assertEqual(3, packs._max_pack_count(3))
1182
self.assertEqual(4, packs._max_pack_count(4))
1183
self.assertEqual(5, packs._max_pack_count(5))
1184
self.assertEqual(6, packs._max_pack_count(6))
1185
self.assertEqual(7, packs._max_pack_count(7))
1186
self.assertEqual(8, packs._max_pack_count(8))
1187
self.assertEqual(9, packs._max_pack_count(9))
1188
# check the boundary cases with two digits for the next decade
1189
self.assertEqual(1, packs._max_pack_count(10))
1190
self.assertEqual(2, packs._max_pack_count(11))
1191
self.assertEqual(10, packs._max_pack_count(19))
1192
self.assertEqual(2, packs._max_pack_count(20))
1193
self.assertEqual(3, packs._max_pack_count(21))
1194
# check some arbitrary big numbers
1195
self.assertEqual(25, packs._max_pack_count(112894))
1197
def test_repr(self):
1198
packs = self.get_packs()
1199
self.assertContainsRe(repr(packs),
1200
'RepositoryPackCollection(.*Repository(.*))')
1202
def test__obsolete_packs(self):
1203
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1204
names = packs.names()
1205
pack = packs.get_pack_by_name(names[0])
1206
# Schedule this one for removal
1207
packs._remove_pack_from_memory(pack)
1208
# Simulate a concurrent update by renaming the .pack file and one of
1210
packs.transport.rename('packs/%s.pack' % (names[0],),
1211
'obsolete_packs/%s.pack' % (names[0],))
1212
packs.transport.rename('indices/%s.iix' % (names[0],),
1213
'obsolete_packs/%s.iix' % (names[0],))
1214
# Now trigger the obsoletion, and ensure that all the remaining files
1216
packs._obsolete_packs([pack])
1217
self.assertEqual([n + '.pack' for n in names[1:]],
1218
sorted(packs._pack_transport.list_dir('.')))
1219
# names[0] should not be present in the index anymore
1220
self.assertEqual(names[1:],
1221
sorted(set([osutils.splitext(n)[0] for n in
1222
packs._index_transport.list_dir('.')])))
1224
def test_pack_distribution_zero(self):
1225
packs = self.get_packs()
1226
self.assertEqual([0], packs.pack_distribution(0))
1228
def test_ensure_loaded_unlocked(self):
1229
packs = self.get_packs()
1230
self.assertRaises(errors.ObjectNotLocked,
1231
packs.ensure_loaded)
1233
def test_pack_distribution_one_to_nine(self):
1234
packs = self.get_packs()
1235
self.assertEqual([1],
1236
packs.pack_distribution(1))
1237
self.assertEqual([1, 1],
1238
packs.pack_distribution(2))
1239
self.assertEqual([1, 1, 1],
1240
packs.pack_distribution(3))
1241
self.assertEqual([1, 1, 1, 1],
1242
packs.pack_distribution(4))
1243
self.assertEqual([1, 1, 1, 1, 1],
1244
packs.pack_distribution(5))
1245
self.assertEqual([1, 1, 1, 1, 1, 1],
1246
packs.pack_distribution(6))
1247
self.assertEqual([1, 1, 1, 1, 1, 1, 1],
1248
packs.pack_distribution(7))
1249
self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1],
1250
packs.pack_distribution(8))
1251
self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1],
1252
packs.pack_distribution(9))
1254
def test_pack_distribution_stable_at_boundaries(self):
1255
"""When there are multi-rev packs the counts are stable."""
1256
packs = self.get_packs()
1258
self.assertEqual([10], packs.pack_distribution(10))
1259
self.assertEqual([10, 1], packs.pack_distribution(11))
1260
self.assertEqual([10, 10], packs.pack_distribution(20))
1261
self.assertEqual([10, 10, 1], packs.pack_distribution(21))
1263
self.assertEqual([100], packs.pack_distribution(100))
1264
self.assertEqual([100, 1], packs.pack_distribution(101))
1265
self.assertEqual([100, 10, 1], packs.pack_distribution(111))
1266
self.assertEqual([100, 100], packs.pack_distribution(200))
1267
self.assertEqual([100, 100, 1], packs.pack_distribution(201))
1268
self.assertEqual([100, 100, 10, 1], packs.pack_distribution(211))
1270
def test_plan_pack_operations_2009_revisions_skip_all_packs(self):
1271
packs = self.get_packs()
1272
existing_packs = [(2000, "big"), (9, "medium")]
1273
# rev count - 2009 -> 2x1000 + 9x1
1274
pack_operations = packs.plan_autopack_combinations(
1275
existing_packs, [1000, 1000, 1, 1, 1, 1, 1, 1, 1, 1, 1])
1276
self.assertEqual([], pack_operations)
1278
def test_plan_pack_operations_2010_revisions_skip_all_packs(self):
1279
packs = self.get_packs()
1280
existing_packs = [(2000, "big"), (9, "medium"), (1, "single")]
1281
# rev count - 2010 -> 2x1000 + 1x10
1282
pack_operations = packs.plan_autopack_combinations(
1283
existing_packs, [1000, 1000, 10])
1284
self.assertEqual([], pack_operations)
1286
def test_plan_pack_operations_2010_combines_smallest_two(self):
1287
packs = self.get_packs()
1288
existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"),
1290
# rev count - 2010 -> 2x1000 + 1x10 (3)
1291
pack_operations = packs.plan_autopack_combinations(
1292
existing_packs, [1000, 1000, 10])
1293
self.assertEqual([[2, ["single2", "single1"]]], pack_operations)
1295
def test_plan_pack_operations_creates_a_single_op(self):
1296
packs = self.get_packs()
1297
existing_packs = [(50, 'a'), (40, 'b'), (30, 'c'), (10, 'd'),
1298
(10, 'e'), (6, 'f'), (4, 'g')]
1299
# rev count 150 -> 1x100 and 5x10
1300
# The two size 10 packs do not need to be touched. The 50, 40, 30 would
1301
# be combined into a single 120 size pack, and the 6 & 4 would
1302
# becombined into a size 10 pack. However, if we have to rewrite them,
1303
# we save a pack file with no increased I/O by putting them into the
1305
distribution = packs.pack_distribution(150)
1306
pack_operations = packs.plan_autopack_combinations(existing_packs,
1308
self.assertEqual([[130, ['a', 'b', 'c', 'f', 'g']]], pack_operations)
1310
def test_all_packs_none(self):
1311
format = self.get_format()
1312
tree = self.make_branch_and_tree('.', format=format)
1314
self.addCleanup(tree.unlock)
1315
packs = tree.branch.repository._pack_collection
1316
packs.ensure_loaded()
1317
self.assertEqual([], packs.all_packs())
1319
def test_all_packs_one(self):
1320
format = self.get_format()
1321
tree = self.make_branch_and_tree('.', format=format)
1322
tree.commit('start')
1324
self.addCleanup(tree.unlock)
1325
packs = tree.branch.repository._pack_collection
1326
packs.ensure_loaded()
1328
packs.get_pack_by_name(packs.names()[0])],
1331
def test_all_packs_two(self):
1332
format = self.get_format()
1333
tree = self.make_branch_and_tree('.', format=format)
1334
tree.commit('start')
1335
tree.commit('continue')
1337
self.addCleanup(tree.unlock)
1338
packs = tree.branch.repository._pack_collection
1339
packs.ensure_loaded()
1341
packs.get_pack_by_name(packs.names()[0]),
1342
packs.get_pack_by_name(packs.names()[1]),
1343
], packs.all_packs())
1345
def test_get_pack_by_name(self):
1346
format = self.get_format()
1347
tree = self.make_branch_and_tree('.', format=format)
1348
tree.commit('start')
1350
self.addCleanup(tree.unlock)
1351
packs = tree.branch.repository._pack_collection
1353
packs.ensure_loaded()
1354
name = packs.names()[0]
1355
pack_1 = packs.get_pack_by_name(name)
1356
# the pack should be correctly initialised
1357
sizes = packs._names[name]
1358
rev_index = GraphIndex(packs._index_transport, name + '.rix', sizes[0])
1359
inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1])
1360
txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2])
1361
sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3])
1362
self.assertEqual(pack_repo.ExistingPack(packs._pack_transport,
1363
name, rev_index, inv_index, txt_index, sig_index), pack_1)
1364
# and the same instance should be returned on successive calls.
1365
self.assertTrue(pack_1 is packs.get_pack_by_name(name))
1367
def test_reload_pack_names_new_entry(self):
1368
tree, r, packs, revs = self.make_packs_and_alt_repo()
1369
names = packs.names()
1370
# Add a new pack file into the repository
1371
rev4 = tree.commit('four')
1372
new_names = tree.branch.repository._pack_collection.names()
1373
new_name = set(new_names).difference(names)
1374
self.assertEqual(1, len(new_name))
1375
new_name = new_name.pop()
1376
# The old collection hasn't noticed yet
1377
self.assertEqual(names, packs.names())
1378
self.assertTrue(packs.reload_pack_names())
1379
self.assertEqual(new_names, packs.names())
1380
# And the repository can access the new revision
1381
self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
1382
self.assertFalse(packs.reload_pack_names())
1384
def test_reload_pack_names_added_and_removed(self):
1385
tree, r, packs, revs = self.make_packs_and_alt_repo()
1386
names = packs.names()
1387
# Now repack the whole thing
1388
tree.branch.repository.pack()
1389
new_names = tree.branch.repository._pack_collection.names()
1390
# The other collection hasn't noticed yet
1391
self.assertEqual(names, packs.names())
1392
self.assertTrue(packs.reload_pack_names())
1393
self.assertEqual(new_names, packs.names())
1394
self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1395
self.assertFalse(packs.reload_pack_names())
1397
def test_reload_pack_names_preserves_pending(self):
1398
# TODO: Update this to also test for pending-deleted names
1399
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1400
# We will add one pack (via start_write_group + insert_record_stream),
1401
# and remove another pack (via _remove_pack_from_memory)
1402
orig_names = packs.names()
1403
orig_at_load = packs._packs_at_load
1404
to_remove_name = iter(orig_names).next()
1405
r.start_write_group()
1406
self.addCleanup(r.abort_write_group)
1407
r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1408
('text', 'rev'), (), None, 'content\n')])
1409
new_pack = packs._new_pack
1410
self.assertTrue(new_pack.data_inserted())
1412
packs.allocate(new_pack)
1413
packs._new_pack = None
1414
removed_pack = packs.get_pack_by_name(to_remove_name)
1415
packs._remove_pack_from_memory(removed_pack)
1416
names = packs.names()
1417
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1418
new_names = set([x[0][0] for x in new_nodes])
1419
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1420
self.assertEqual(set(names) - set(orig_names), new_names)
1421
self.assertEqual(set([new_pack.name]), new_names)
1422
self.assertEqual([to_remove_name],
1423
sorted([x[0][0] for x in deleted_nodes]))
1424
packs.reload_pack_names()
1425
reloaded_names = packs.names()
1426
self.assertEqual(orig_at_load, packs._packs_at_load)
1427
self.assertEqual(names, reloaded_names)
1428
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1429
new_names = set([x[0][0] for x in new_nodes])
1430
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1431
self.assertEqual(set(names) - set(orig_names), new_names)
1432
self.assertEqual(set([new_pack.name]), new_names)
1433
self.assertEqual([to_remove_name],
1434
sorted([x[0][0] for x in deleted_nodes]))
1436
def test_autopack_obsoletes_new_pack(self):
1437
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1438
packs._max_pack_count = lambda x: 1
1439
packs.pack_distribution = lambda x: [10]
1440
r.start_write_group()
1441
r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1442
('bogus-rev',), (), None, 'bogus-content\n')])
1443
# This should trigger an autopack, which will combine everything into a
1445
new_names = r.commit_write_group()
1446
names = packs.names()
1447
self.assertEqual(1, len(names))
1448
self.assertEqual([names[0] + '.pack'],
1449
packs._pack_transport.list_dir('.'))
1451
def test_autopack_reloads_and_stops(self):
1452
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1453
# After we have determined what needs to be autopacked, trigger a
1454
# full-pack via the other repo which will cause us to re-evaluate and
1455
# decide we don't need to do anything
1456
orig_execute = packs._execute_pack_operations
1457
def _munged_execute_pack_ops(*args, **kwargs):
1458
tree.branch.repository.pack()
1459
return orig_execute(*args, **kwargs)
1460
packs._execute_pack_operations = _munged_execute_pack_ops
1461
packs._max_pack_count = lambda x: 1
1462
packs.pack_distribution = lambda x: [10]
1463
self.assertFalse(packs.autopack())
1464
self.assertEqual(1, len(packs.names()))
1465
self.assertEqual(tree.branch.repository._pack_collection.names(),
1468
def test__save_pack_names(self):
1469
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1470
names = packs.names()
1471
pack = packs.get_pack_by_name(names[0])
1472
packs._remove_pack_from_memory(pack)
1473
packs._save_pack_names(obsolete_packs=[pack])
1474
cur_packs = packs._pack_transport.list_dir('.')
1475
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1476
# obsolete_packs will also have stuff like .rix and .iix present.
1477
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1478
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1479
self.assertEqual([pack.name], sorted(obsolete_names))
1481
def test__save_pack_names_already_obsoleted(self):
1482
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1483
names = packs.names()
1484
pack = packs.get_pack_by_name(names[0])
1485
packs._remove_pack_from_memory(pack)
1486
# We are going to simulate a concurrent autopack by manually obsoleting
1487
# the pack directly.
1488
packs._obsolete_packs([pack])
1489
packs._save_pack_names(clear_obsolete_packs=True,
1490
obsolete_packs=[pack])
1491
cur_packs = packs._pack_transport.list_dir('.')
1492
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1493
# Note that while we set clear_obsolete_packs=True, it should not
1494
# delete a pack file that we have also scheduled for obsoletion.
1495
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1496
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1497
self.assertEqual([pack.name], sorted(obsolete_names))
1501
class TestPack(TestCaseWithTransport):
1502
"""Tests for the Pack object."""
1504
def assertCurrentlyEqual(self, left, right):
1505
self.assertTrue(left == right)
1506
self.assertTrue(right == left)
1507
self.assertFalse(left != right)
1508
self.assertFalse(right != left)
1510
def assertCurrentlyNotEqual(self, left, right):
1511
self.assertFalse(left == right)
1512
self.assertFalse(right == left)
1513
self.assertTrue(left != right)
1514
self.assertTrue(right != left)
1516
def test___eq____ne__(self):
1517
left = pack_repo.ExistingPack('', '', '', '', '', '')
1518
right = pack_repo.ExistingPack('', '', '', '', '', '')
1519
self.assertCurrentlyEqual(left, right)
1520
# change all attributes and ensure equality changes as we do.
1521
left.revision_index = 'a'
1522
self.assertCurrentlyNotEqual(left, right)
1523
right.revision_index = 'a'
1524
self.assertCurrentlyEqual(left, right)
1525
left.inventory_index = 'a'
1526
self.assertCurrentlyNotEqual(left, right)
1527
right.inventory_index = 'a'
1528
self.assertCurrentlyEqual(left, right)
1529
left.text_index = 'a'
1530
self.assertCurrentlyNotEqual(left, right)
1531
right.text_index = 'a'
1532
self.assertCurrentlyEqual(left, right)
1533
left.signature_index = 'a'
1534
self.assertCurrentlyNotEqual(left, right)
1535
right.signature_index = 'a'
1536
self.assertCurrentlyEqual(left, right)
1538
self.assertCurrentlyNotEqual(left, right)
1540
self.assertCurrentlyEqual(left, right)
1541
left.transport = 'a'
1542
self.assertCurrentlyNotEqual(left, right)
1543
right.transport = 'a'
1544
self.assertCurrentlyEqual(left, right)
1546
def test_file_name(self):
1547
pack = pack_repo.ExistingPack('', 'a_name', '', '', '', '')
1548
self.assertEqual('a_name.pack', pack.file_name())
1551
class TestNewPack(TestCaseWithTransport):
1552
"""Tests for pack_repo.NewPack."""
1554
def test_new_instance_attributes(self):
1555
upload_transport = self.get_transport('upload')
1556
pack_transport = self.get_transport('pack')
1557
index_transport = self.get_transport('index')
1558
upload_transport.mkdir('.')
1559
collection = pack_repo.RepositoryPackCollection(
1561
transport=self.get_transport('.'),
1562
index_transport=index_transport,
1563
upload_transport=upload_transport,
1564
pack_transport=pack_transport,
1565
index_builder_class=BTreeBuilder,
1566
index_class=BTreeGraphIndex,
1567
use_chk_index=False)
1568
pack = pack_repo.NewPack(collection)
1569
self.addCleanup(pack.abort) # Make sure the write stream gets closed
1570
self.assertIsInstance(pack.revision_index, BTreeBuilder)
1571
self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1572
self.assertIsInstance(pack._hash, type(osutils.md5()))
1573
self.assertTrue(pack.upload_transport is upload_transport)
1574
self.assertTrue(pack.index_transport is index_transport)
1575
self.assertTrue(pack.pack_transport is pack_transport)
1576
self.assertEqual(None, pack.index_sizes)
1577
self.assertEqual(20, len(pack.random_name))
1578
self.assertIsInstance(pack.random_name, str)
1579
self.assertIsInstance(pack.start_time, float)
1582
class TestPacker(TestCaseWithTransport):
1583
"""Tests for the packs repository Packer class."""
1585
def test_pack_optimizes_pack_order(self):
1586
builder = self.make_branch_builder('.', format="1.9")
1587
builder.start_series()
1588
builder.build_snapshot('A', None, [
1589
('add', ('', 'root-id', 'directory', None)),
1590
('add', ('f', 'f-id', 'file', 'content\n'))])
1591
builder.build_snapshot('B', ['A'],
1592
[('modify', ('f-id', 'new-content\n'))])
1593
builder.build_snapshot('C', ['B'],
1594
[('modify', ('f-id', 'third-content\n'))])
1595
builder.build_snapshot('D', ['C'],
1596
[('modify', ('f-id', 'fourth-content\n'))])
1597
b = builder.get_branch()
1599
builder.finish_series()
1600
self.addCleanup(b.unlock)
1601
# At this point, we should have 4 pack files available
1602
# Because of how they were built, they correspond to
1603
# ['D', 'C', 'B', 'A']
1604
packs = b.repository._pack_collection.packs
1605
packer = pack_repo.Packer(b.repository._pack_collection,
1607
revision_ids=['B', 'C'])
1608
# Now, when we are copying the B & C revisions, their pack files should
1609
# be moved to the front of the stack
1610
# The new ordering moves B & C to the front of the .packs attribute,
1611
# and leaves the others in the original order.
1612
new_packs = [packs[1], packs[2], packs[0], packs[3]]
1613
new_pack = packer.pack()
1614
self.assertEqual(new_packs, packer.packs)
1617
class TestOptimisingPacker(TestCaseWithTransport):
1618
"""Tests for the OptimisingPacker class."""
1620
def get_pack_collection(self):
1621
repo = self.make_repository('.')
1622
return repo._pack_collection
1624
def test_open_pack_will_optimise(self):
1625
packer = pack_repo.OptimisingPacker(self.get_pack_collection(),
1627
new_pack = packer.open_pack()
1628
self.addCleanup(new_pack.abort) # ensure cleanup
1629
self.assertIsInstance(new_pack, pack_repo.NewPack)
1630
self.assertTrue(new_pack.revision_index._optimize_for_size)
1631
self.assertTrue(new_pack.inventory_index._optimize_for_size)
1632
self.assertTrue(new_pack.text_index._optimize_for_size)
1633
self.assertTrue(new_pack.signature_index._optimize_for_size)
1636
class TestCrossFormatPacks(TestCaseWithTransport):
1638
def log_pack(self, hint=None):
1639
self.calls.append(('pack', hint))
1640
self.orig_pack(hint=hint)
1641
if self.expect_hint:
1642
self.assertTrue(hint)
1644
def run_stream(self, src_fmt, target_fmt, expect_pack_called):
1645
self.expect_hint = expect_pack_called
1647
source_tree = self.make_branch_and_tree('src', format=src_fmt)
1648
source_tree.lock_write()
1649
self.addCleanup(source_tree.unlock)
1650
tip = source_tree.commit('foo')
1651
target = self.make_repository('target', format=target_fmt)
1653
self.addCleanup(target.unlock)
1654
source = source_tree.branch.repository._get_source(target._format)
1655
self.orig_pack = target.pack
1656
target.pack = self.log_pack
1657
search = target.search_missing_revision_ids(
1658
source_tree.branch.repository, tip)
1659
stream = source.get_stream(search)
1660
from_format = source_tree.branch.repository._format
1661
sink = target._get_sink()
1662
sink.insert_stream(stream, from_format, [])
1663
if expect_pack_called:
1664
self.assertLength(1, self.calls)
1666
self.assertLength(0, self.calls)
1668
def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
1669
self.expect_hint = expect_pack_called
1671
source_tree = self.make_branch_and_tree('src', format=src_fmt)
1672
source_tree.lock_write()
1673
self.addCleanup(source_tree.unlock)
1674
tip = source_tree.commit('foo')
1675
target = self.make_repository('target', format=target_fmt)
1677
self.addCleanup(target.unlock)
1678
source = source_tree.branch.repository
1679
self.orig_pack = target.pack
1680
target.pack = self.log_pack
1681
target.fetch(source)
1682
if expect_pack_called:
1683
self.assertLength(1, self.calls)
1685
self.assertLength(0, self.calls)
1687
def test_sink_format_hint_no(self):
1688
# When the target format says packing makes no difference, pack is not
1690
self.run_stream('1.9', 'rich-root-pack', False)
1692
def test_sink_format_hint_yes(self):
1693
# When the target format says packing makes a difference, pack is
1695
self.run_stream('1.9', '2a', True)
1697
def test_sink_format_same_no(self):
1698
# When the formats are the same, pack is not called.
1699
self.run_stream('2a', '2a', False)
1701
def test_IDS_format_hint_no(self):
1702
# When the target format says packing makes no difference, pack is not
1704
self.run_fetch('1.9', 'rich-root-pack', False)
1706
def test_IDS_format_hint_yes(self):
1707
# When the target format says packing makes a difference, pack is
1709
self.run_fetch('1.9', '2a', True)
1711
def test_IDS_format_same_no(self):
1712
# When the formats are the same, pack is not called.
1713
self.run_fetch('2a', '2a', False)
437
self.assertRaises(KeyError, repository._unescape_xml, 'foo&bar;')