716
719
self.assertFalse(combine[1] in final)
717
720
self.assertSubset(to_keep, final)
722
def test_stream_source_to_gc(self):
723
source = self.make_repository('source', format='2a')
724
target = self.make_repository('target', format='2a')
725
stream = source._get_source(target._format)
726
self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
728
def test_stream_source_to_non_gc(self):
729
source = self.make_repository('source', format='2a')
730
target = self.make_repository('target', format='rich-root-pack')
731
stream = source._get_source(target._format)
732
# We don't want the child GroupCHKStreamSource
733
self.assertIs(type(stream), repository.StreamSource)
735
def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
736
source_builder = self.make_branch_builder('source',
738
# We have to build a fairly large tree, so that we are sure the chk
739
# pages will have split into multiple pages.
740
entries = [('add', ('', 'a-root-id', 'directory', None))]
741
for i in 'abcdefghijklmnopqrstuvwxyz123456789':
742
for j in 'abcdefghijklmnopqrstuvwxyz123456789':
745
content = 'content for %s\n' % (fname,)
746
entries.append(('add', (fname, fid, 'file', content)))
747
source_builder.start_series()
748
source_builder.build_snapshot('rev-1', None, entries)
749
# Now change a few of them, so we get a few new pages for the second
751
source_builder.build_snapshot('rev-2', ['rev-1'], [
752
('modify', ('aa-id', 'new content for aa-id\n')),
753
('modify', ('cc-id', 'new content for cc-id\n')),
754
('modify', ('zz-id', 'new content for zz-id\n')),
756
source_builder.finish_series()
757
source_branch = source_builder.get_branch()
758
source_branch.lock_read()
759
self.addCleanup(source_branch.unlock)
760
target = self.make_repository('target', format='2a')
761
source = source_branch.repository._get_source(target._format)
762
self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
764
# On a regular pass, getting the inventories and chk pages for rev-2
765
# would only get the newly created chk pages
766
search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
768
simple_chk_records = []
769
for vf_name, substream in source.get_stream(search):
770
if vf_name == 'chk_bytes':
771
for record in substream:
772
simple_chk_records.append(record.key)
776
# 3 pages, the root (InternalNode), + 2 pages which actually changed
777
self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
778
('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
779
('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
780
('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
782
# Now, when we do a similar call using 'get_stream_for_missing_keys'
783
# we should get a much larger set of pages.
784
missing = [('inventories', 'rev-2')]
785
full_chk_records = []
786
for vf_name, substream in source.get_stream_for_missing_keys(missing):
787
if vf_name == 'inventories':
788
for record in substream:
789
self.assertEqual(('rev-2',), record.key)
790
elif vf_name == 'chk_bytes':
791
for record in substream:
792
full_chk_records.append(record.key)
794
self.fail('Should not be getting a stream of %s' % (vf_name,))
795
# We have 257 records now. This is because we have 1 root page, and 256
796
# leaf pages in a complete listing.
797
self.assertEqual(257, len(full_chk_records))
798
self.assertSubset(simple_chk_records, full_chk_records)
800
def test_inconsistency_fatal(self):
801
repo = self.make_repository('repo', format='2a')
802
self.assertTrue(repo.revisions._index._inconsistency_fatal)
803
self.assertFalse(repo.texts._index._inconsistency_fatal)
804
self.assertFalse(repo.inventories._index._inconsistency_fatal)
805
self.assertFalse(repo.signatures._index._inconsistency_fatal)
806
self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
809
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
811
def test_source_to_exact_pack_092(self):
812
source = self.make_repository('source', format='pack-0.92')
813
target = self.make_repository('target', format='pack-0.92')
814
stream_source = source._get_source(target._format)
815
self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
817
def test_source_to_exact_pack_rich_root_pack(self):
818
source = self.make_repository('source', format='rich-root-pack')
819
target = self.make_repository('target', format='rich-root-pack')
820
stream_source = source._get_source(target._format)
821
self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
823
def test_source_to_exact_pack_19(self):
824
source = self.make_repository('source', format='1.9')
825
target = self.make_repository('target', format='1.9')
826
stream_source = source._get_source(target._format)
827
self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
829
def test_source_to_exact_pack_19_rich_root(self):
830
source = self.make_repository('source', format='1.9-rich-root')
831
target = self.make_repository('target', format='1.9-rich-root')
832
stream_source = source._get_source(target._format)
833
self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
835
def test_source_to_remote_exact_pack_19(self):
836
trans = self.make_smart_server('target')
838
source = self.make_repository('source', format='1.9')
839
target = self.make_repository('target', format='1.9')
840
target = repository.Repository.open(trans.base)
841
stream_source = source._get_source(target._format)
842
self.assertIsInstance(stream_source, pack_repo.KnitPackStreamSource)
844
def test_stream_source_to_non_exact(self):
845
source = self.make_repository('source', format='pack-0.92')
846
target = self.make_repository('target', format='1.9')
847
stream = source._get_source(target._format)
848
self.assertIs(type(stream), repository.StreamSource)
850
def test_stream_source_to_non_exact_rich_root(self):
851
source = self.make_repository('source', format='1.9')
852
target = self.make_repository('target', format='1.9-rich-root')
853
stream = source._get_source(target._format)
854
self.assertIs(type(stream), repository.StreamSource)
856
def test_source_to_remote_non_exact_pack_19(self):
857
trans = self.make_smart_server('target')
859
source = self.make_repository('source', format='1.9')
860
target = self.make_repository('target', format='1.6')
861
target = repository.Repository.open(trans.base)
862
stream_source = source._get_source(target._format)
863
self.assertIs(type(stream_source), repository.StreamSource)
865
def test_stream_source_to_knit(self):
866
source = self.make_repository('source', format='pack-0.92')
867
target = self.make_repository('target', format='dirstate')
868
stream = source._get_source(target._format)
869
self.assertIs(type(stream), repository.StreamSource)
720
872
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
721
873
"""Tests for _find_parent_ids_of_revisions."""
1237
1395
self.assertTrue(new_pack.signature_index._optimize_for_size)
1240
class TestGCCHKPackCollection(TestCaseWithTransport):
1242
def test_stream_source_to_gc(self):
1243
source = self.make_repository('source', format='development6-rich-root')
1244
target = self.make_repository('target', format='development6-rich-root')
1245
stream = source._get_source(target._format)
1246
self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
1248
def test_stream_source_to_non_gc(self):
1249
source = self.make_repository('source', format='development6-rich-root')
1250
target = self.make_repository('target', format='rich-root-pack')
1251
stream = source._get_source(target._format)
1252
# We don't want the child GroupCHKStreamSource
1253
self.assertIs(type(stream), repository.StreamSource)
1255
def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
1256
source_builder = self.make_branch_builder('source',
1257
format='development6-rich-root')
1258
# We have to build a fairly large tree, so that we are sure the chk
1259
# pages will have split into multiple pages.
1260
entries = [('add', ('', 'a-root-id', 'directory', None))]
1261
for i in 'abcdefghijklmnopqrstuvwxyz123456789':
1262
for j in 'abcdefghijklmnopqrstuvwxyz123456789':
1265
content = 'content for %s\n' % (fname,)
1266
entries.append(('add', (fname, fid, 'file', content)))
1267
source_builder.start_series()
1268
source_builder.build_snapshot('rev-1', None, entries)
1269
# Now change a few of them, so we get a few new pages for the second
1271
source_builder.build_snapshot('rev-2', ['rev-1'], [
1272
('modify', ('aa-id', 'new content for aa-id\n')),
1273
('modify', ('cc-id', 'new content for cc-id\n')),
1274
('modify', ('zz-id', 'new content for zz-id\n')),
1276
source_builder.finish_series()
1277
source_branch = source_builder.get_branch()
1278
source_branch.lock_read()
1279
self.addCleanup(source_branch.unlock)
1280
target = self.make_repository('target', format='development6-rich-root')
1281
source = source_branch.repository._get_source(target._format)
1282
self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
1284
# On a regular pass, getting the inventories and chk pages for rev-2
1285
# would only get the newly created chk pages
1286
search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
1288
simple_chk_records = []
1289
for vf_name, substream in source.get_stream(search):
1290
if vf_name == 'chk_bytes':
1291
for record in substream:
1292
simple_chk_records.append(record.key)
1296
# 3 pages, the root (InternalNode), + 2 pages which actually changed
1297
self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
1298
('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
1299
('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
1300
('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
1302
# Now, when we do a similar call using 'get_stream_for_missing_keys'
1303
# we should get a much larger set of pages.
1304
missing = [('inventories', 'rev-2')]
1305
full_chk_records = []
1306
for vf_name, substream in source.get_stream_for_missing_keys(missing):
1307
if vf_name == 'inventories':
1308
for record in substream:
1309
self.assertEqual(('rev-2',), record.key)
1310
elif vf_name == 'chk_bytes':
1311
for record in substream:
1312
full_chk_records.append(record.key)
1314
self.fail('Should not be getting a stream of %s' % (vf_name,))
1315
# We have 257 records now. This is because we have 1 root page, and 256
1316
# leaf pages in a complete listing.
1317
self.assertEqual(257, len(full_chk_records))
1318
self.assertSubset(simple_chk_records, full_chk_records)
1321
1398
class TestCrossFormatPacks(TestCaseWithTransport):
1323
1400
def log_pack(self, hint=None):