685
700
self.assertEqual(65536,
686
701
inv.parent_id_basename_to_file_id._root_node.maximum_size)
703
def test_autopack_unchanged_chk_nodes(self):
704
# at 20 unchanged commits, chk pages are packed that are split into
705
# two groups such that the new pack being made doesn't have all its
706
# pages in the source packs (though they are in the repository).
707
tree = self.make_branch_and_tree('tree', format='2a')
708
for pos in range(20):
709
tree.commit(str(pos))
711
def test_pack_with_hint(self):
712
tree = self.make_branch_and_tree('tree', format='2a')
713
# 1 commit to leave untouched
715
to_keep = tree.branch.repository._pack_collection.names()
719
all = tree.branch.repository._pack_collection.names()
720
combine = list(set(all) - set(to_keep))
721
self.assertLength(3, all)
722
self.assertLength(2, combine)
723
tree.branch.repository.pack(hint=combine)
724
final = tree.branch.repository._pack_collection.names()
725
self.assertLength(2, final)
726
self.assertFalse(combine[0] in final)
727
self.assertFalse(combine[1] in final)
728
self.assertSubset(to_keep, final)
730
def test_stream_source_to_gc(self):
731
source = self.make_repository('source', format='2a')
732
target = self.make_repository('target', format='2a')
733
stream = source._get_source(target._format)
734
self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
736
def test_stream_source_to_non_gc(self):
737
source = self.make_repository('source', format='2a')
738
target = self.make_repository('target', format='rich-root-pack')
739
stream = source._get_source(target._format)
740
# We don't want the child GroupCHKStreamSource
741
self.assertIs(type(stream), repository.StreamSource)
743
def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
744
source_builder = self.make_branch_builder('source',
746
# We have to build a fairly large tree, so that we are sure the chk
747
# pages will have split into multiple pages.
748
entries = [('add', ('', 'a-root-id', 'directory', None))]
749
for i in 'abcdefghijklmnopqrstuvwxyz123456789':
750
for j in 'abcdefghijklmnopqrstuvwxyz123456789':
753
content = 'content for %s\n' % (fname,)
754
entries.append(('add', (fname, fid, 'file', content)))
755
source_builder.start_series()
756
source_builder.build_snapshot('rev-1', None, entries)
757
# Now change a few of them, so we get a few new pages for the second
759
source_builder.build_snapshot('rev-2', ['rev-1'], [
760
('modify', ('aa-id', 'new content for aa-id\n')),
761
('modify', ('cc-id', 'new content for cc-id\n')),
762
('modify', ('zz-id', 'new content for zz-id\n')),
764
source_builder.finish_series()
765
source_branch = source_builder.get_branch()
766
source_branch.lock_read()
767
self.addCleanup(source_branch.unlock)
768
target = self.make_repository('target', format='2a')
769
source = source_branch.repository._get_source(target._format)
770
self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
772
# On a regular pass, getting the inventories and chk pages for rev-2
773
# would only get the newly created chk pages
774
search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
776
simple_chk_records = []
777
for vf_name, substream in source.get_stream(search):
778
if vf_name == 'chk_bytes':
779
for record in substream:
780
simple_chk_records.append(record.key)
784
# 3 pages, the root (InternalNode), + 2 pages which actually changed
785
self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
786
('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
787
('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
788
('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
790
# Now, when we do a similar call using 'get_stream_for_missing_keys'
791
# we should get a much larger set of pages.
792
missing = [('inventories', 'rev-2')]
793
full_chk_records = []
794
for vf_name, substream in source.get_stream_for_missing_keys(missing):
795
if vf_name == 'inventories':
796
for record in substream:
797
self.assertEqual(('rev-2',), record.key)
798
elif vf_name == 'chk_bytes':
799
for record in substream:
800
full_chk_records.append(record.key)
802
self.fail('Should not be getting a stream of %s' % (vf_name,))
803
# We have 257 records now. This is because we have 1 root page, and 256
804
# leaf pages in a complete listing.
805
self.assertEqual(257, len(full_chk_records))
806
self.assertSubset(simple_chk_records, full_chk_records)
808
def test_inconsistency_fatal(self):
809
repo = self.make_repository('repo', format='2a')
810
self.assertTrue(repo.revisions._index._inconsistency_fatal)
811
self.assertFalse(repo.texts._index._inconsistency_fatal)
812
self.assertFalse(repo.inventories._index._inconsistency_fatal)
813
self.assertFalse(repo.signatures._index._inconsistency_fatal)
814
self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
817
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
819
def test_source_to_exact_pack_092(self):
820
source = self.make_repository('source', format='pack-0.92')
821
target = self.make_repository('target', format='pack-0.92')
822
stream_source = source._get_source(target._format)
823
self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
825
def test_source_to_exact_pack_rich_root_pack(self):
826
source = self.make_repository('source', format='rich-root-pack')
827
target = self.make_repository('target', format='rich-root-pack')
828
stream_source = source._get_source(target._format)
829
self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
831
def test_source_to_exact_pack_19(self):
832
source = self.make_repository('source', format='1.9')
833
target = self.make_repository('target', format='1.9')
834
stream_source = source._get_source(target._format)
835
self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
837
def test_source_to_exact_pack_19_rich_root(self):
838
source = self.make_repository('source', format='1.9-rich-root')
839
target = self.make_repository('target', format='1.9-rich-root')
840
stream_source = source._get_source(target._format)
841
self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
843
def test_source_to_remote_exact_pack_19(self):
844
trans = self.make_smart_server('target')
846
source = self.make_repository('source', format='1.9')
847
target = self.make_repository('target', format='1.9')
848
target = repository.Repository.open(trans.base)
849
stream_source = source._get_source(target._format)
850
self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
852
def test_stream_source_to_non_exact(self):
853
source = self.make_repository('source', format='pack-0.92')
854
target = self.make_repository('target', format='1.9')
855
stream = source._get_source(target._format)
856
self.assertIs(type(stream), repository.StreamSource)
858
def test_stream_source_to_non_exact_rich_root(self):
859
source = self.make_repository('source', format='1.9')
860
target = self.make_repository('target', format='1.9-rich-root')
861
stream = source._get_source(target._format)
862
self.assertIs(type(stream), repository.StreamSource)
864
def test_source_to_remote_non_exact_pack_19(self):
865
trans = self.make_smart_server('target')
867
source = self.make_repository('source', format='1.9')
868
target = self.make_repository('target', format='1.6')
869
target = repository.Repository.open(trans.base)
870
stream_source = source._get_source(target._format)
871
self.assertIs(type(stream_source), repository.StreamSource)
873
def test_stream_source_to_knit(self):
874
source = self.make_repository('source', format='pack-0.92')
875
target = self.make_repository('target', format='dirstate')
876
stream = source._get_source(target._format)
877
self.assertIs(type(stream), repository.StreamSource)
689
880
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
690
881
"""Tests for _find_parent_ids_of_revisions."""
1206
1406
self.assertTrue(new_pack.signature_index._optimize_for_size)
1209
class TestGCCHKPackCollection(TestCaseWithTransport):
1211
def test_stream_source_to_gc(self):
1212
source = self.make_repository('source', format='development6-rich-root')
1213
target = self.make_repository('target', format='development6-rich-root')
1214
stream = source._get_source(target._format)
1215
self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
1217
def test_stream_source_to_non_gc(self):
1218
source = self.make_repository('source', format='development6-rich-root')
1219
target = self.make_repository('target', format='rich-root-pack')
1220
stream = source._get_source(target._format)
1221
# We don't want the child GroupCHKStreamSource
1222
self.assertIs(type(stream), repository.StreamSource)
1224
def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
1225
source_builder = self.make_branch_builder('source',
1226
format='development6-rich-root')
1227
# We have to build a fairly large tree, so that we are sure the chk
1228
# pages will have split into multiple pages.
1229
entries = [('add', ('', 'a-root-id', 'directory', None))]
1230
for i in 'abcdefghijklmnopqrstuvwxyz123456789':
1231
for j in 'abcdefghijklmnopqrstuvwxyz123456789':
1234
content = 'content for %s\n' % (fname,)
1235
entries.append(('add', (fname, fid, 'file', content)))
1236
source_builder.start_series()
1237
source_builder.build_snapshot('rev-1', None, entries)
1238
# Now change a few of them, so we get a few new pages for the second
1240
source_builder.build_snapshot('rev-2', ['rev-1'], [
1241
('modify', ('aa-id', 'new content for aa-id\n')),
1242
('modify', ('cc-id', 'new content for cc-id\n')),
1243
('modify', ('zz-id', 'new content for zz-id\n')),
1245
source_builder.finish_series()
1246
source_branch = source_builder.get_branch()
1247
source_branch.lock_read()
1248
self.addCleanup(source_branch.unlock)
1249
target = self.make_repository('target', format='development6-rich-root')
1250
source = source_branch.repository._get_source(target._format)
1251
self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
1253
# On a regular pass, getting the inventories and chk pages for rev-2
1254
# would only get the newly created chk pages
1255
search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
1257
simple_chk_records = []
1258
for vf_name, substream in source.get_stream(search):
1259
if vf_name == 'chk_bytes':
1260
for record in substream:
1261
simple_chk_records.append(record.key)
1265
# 3 pages, the root (InternalNode), + 2 pages which actually changed
1266
self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
1267
('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
1268
('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
1269
('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
1271
# Now, when we do a similar call using 'get_stream_for_missing_keys'
1272
# we should get a much larger set of pages.
1273
missing = [('inventories', 'rev-2')]
1274
full_chk_records = []
1275
for vf_name, substream in source.get_stream_for_missing_keys(missing):
1276
if vf_name == 'inventories':
1277
for record in substream:
1278
self.assertEqual(('rev-2',), record.key)
1279
elif vf_name == 'chk_bytes':
1280
for record in substream:
1281
full_chk_records.append(record.key)
1283
self.fail('Should not be getting a stream of %s' % (vf_name,))
1284
# We have 257 records now. This is because we have 1 root page, and 256
1285
# leaf pages in a complete listing.
1286
self.assertEqual(257, len(full_chk_records))
1287
self.assertSubset(simple_chk_records, full_chk_records)
1409
class TestCrossFormatPacks(TestCaseWithTransport):
1411
def log_pack(self, hint=None):
1412
self.calls.append(('pack', hint))
1413
self.orig_pack(hint=hint)
1414
if self.expect_hint:
1415
self.assertTrue(hint)
1417
def run_stream(self, src_fmt, target_fmt, expect_pack_called):
1418
self.expect_hint = expect_pack_called
1420
source_tree = self.make_branch_and_tree('src', format=src_fmt)
1421
source_tree.lock_write()
1422
self.addCleanup(source_tree.unlock)
1423
tip = source_tree.commit('foo')
1424
target = self.make_repository('target', format=target_fmt)
1426
self.addCleanup(target.unlock)
1427
source = source_tree.branch.repository._get_source(target._format)
1428
self.orig_pack = target.pack
1429
target.pack = self.log_pack
1430
search = target.search_missing_revision_ids(
1431
source_tree.branch.repository, tip)
1432
stream = source.get_stream(search)
1433
from_format = source_tree.branch.repository._format
1434
sink = target._get_sink()
1435
sink.insert_stream(stream, from_format, [])
1436
if expect_pack_called:
1437
self.assertLength(1, self.calls)
1439
self.assertLength(0, self.calls)
1441
def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
1442
self.expect_hint = expect_pack_called
1444
source_tree = self.make_branch_and_tree('src', format=src_fmt)
1445
source_tree.lock_write()
1446
self.addCleanup(source_tree.unlock)
1447
tip = source_tree.commit('foo')
1448
target = self.make_repository('target', format=target_fmt)
1450
self.addCleanup(target.unlock)
1451
source = source_tree.branch.repository
1452
self.orig_pack = target.pack
1453
target.pack = self.log_pack
1454
target.fetch(source)
1455
if expect_pack_called:
1456
self.assertLength(1, self.calls)
1458
self.assertLength(0, self.calls)
1460
def test_sink_format_hint_no(self):
1461
# When the target format says packing makes no difference, pack is not
1463
self.run_stream('1.9', 'rich-root-pack', False)
1465
def test_sink_format_hint_yes(self):
1466
# When the target format says packing makes a difference, pack is
1468
self.run_stream('1.9', '2a', True)
1470
def test_sink_format_same_no(self):
1471
# When the formats are the same, pack is not called.
1472
self.run_stream('2a', '2a', False)
1474
def test_IDS_format_hint_no(self):
1475
# When the target format says packing makes no difference, pack is not
1477
self.run_fetch('1.9', 'rich-root-pack', False)
1479
def test_IDS_format_hint_yes(self):
1480
# When the target format says packing makes a difference, pack is
1482
self.run_fetch('1.9', '2a', True)
1484
def test_IDS_format_same_no(self):
1485
# When the formats are the same, pack is not called.
1486
self.run_fetch('2a', '2a', False)