673
679
self.assertFalse(repo._format.supports_external_lookups)
676
class Test2a(TestCaseWithTransport):
682
class Test2a(tests.TestCaseWithMemoryTransport):
684
def test_fetch_combines_groups(self):
685
builder = self.make_branch_builder('source', format='2a')
686
builder.start_series()
687
builder.build_snapshot('1', None, [
688
('add', ('', 'root-id', 'directory', '')),
689
('add', ('file', 'file-id', 'file', 'content\n'))])
690
builder.build_snapshot('2', ['1'], [
691
('modify', ('file-id', 'content-2\n'))])
692
builder.finish_series()
693
source = builder.get_branch()
694
target = self.make_repository('target', format='2a')
695
target.fetch(source.repository)
697
self.addCleanup(target.unlock)
698
details = target.texts._index.get_build_details(
699
[('file-id', '1',), ('file-id', '2',)])
700
file_1_details = details[('file-id', '1')]
701
file_2_details = details[('file-id', '2')]
702
# The index, and what to read off disk, should be the same for both
703
# versions of the file.
704
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
706
def test_fetch_combines_groups(self):
707
builder = self.make_branch_builder('source', format='2a')
708
builder.start_series()
709
builder.build_snapshot('1', None, [
710
('add', ('', 'root-id', 'directory', '')),
711
('add', ('file', 'file-id', 'file', 'content\n'))])
712
builder.build_snapshot('2', ['1'], [
713
('modify', ('file-id', 'content-2\n'))])
714
builder.finish_series()
715
source = builder.get_branch()
716
target = self.make_repository('target', format='2a')
717
target.fetch(source.repository)
719
self.addCleanup(target.unlock)
720
details = target.texts._index.get_build_details(
721
[('file-id', '1',), ('file-id', '2',)])
722
file_1_details = details[('file-id', '1')]
723
file_2_details = details[('file-id', '2')]
724
# The index, and what to read off disk, should be the same for both
725
# versions of the file.
726
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
728
def test_fetch_combines_groups(self):
729
builder = self.make_branch_builder('source', format='2a')
730
builder.start_series()
731
builder.build_snapshot('1', None, [
732
('add', ('', 'root-id', 'directory', '')),
733
('add', ('file', 'file-id', 'file', 'content\n'))])
734
builder.build_snapshot('2', ['1'], [
735
('modify', ('file-id', 'content-2\n'))])
736
builder.finish_series()
737
source = builder.get_branch()
738
target = self.make_repository('target', format='2a')
739
target.fetch(source.repository)
741
self.addCleanup(target.unlock)
742
details = target.texts._index.get_build_details(
743
[('file-id', '1',), ('file-id', '2',)])
744
file_1_details = details[('file-id', '1')]
745
file_2_details = details[('file-id', '2')]
746
# The index, and what to read off disk, should be the same for both
747
# versions of the file.
748
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
678
750
def test_format_pack_compresses_True(self):
679
751
repo = self.make_repository('repo', format='2a')
680
752
self.assertTrue(repo._format.pack_compresses)
682
754
def test_inventories_use_chk_map_with_parent_base_dict(self):
683
tree = self.make_branch_and_tree('repo', format="2a")
755
tree = self.make_branch_and_memory_tree('repo', format="2a")
757
tree.add([''], ['TREE_ROOT'])
684
758
revid = tree.commit("foo")
686
761
self.addCleanup(tree.unlock)
687
762
inv = tree.branch.repository.get_inventory(revid)
696
771
# at 20 unchanged commits, chk pages are packed that are split into
697
772
# two groups such that the new pack being made doesn't have all its
698
773
# pages in the source packs (though they are in the repository).
699
tree = self.make_branch_and_tree('tree', format='2a')
774
# Use a memory backed repository, we don't need to hit disk for this
775
tree = self.make_branch_and_memory_tree('tree', format='2a')
777
self.addCleanup(tree.unlock)
778
tree.add([''], ['TREE_ROOT'])
700
779
for pos in range(20):
701
780
tree.commit(str(pos))
703
782
def test_pack_with_hint(self):
704
tree = self.make_branch_and_tree('tree', format='2a')
783
tree = self.make_branch_and_memory_tree('tree', format='2a')
785
self.addCleanup(tree.unlock)
786
tree.add([''], ['TREE_ROOT'])
705
787
# 1 commit to leave untouched
707
789
to_keep = tree.branch.repository._pack_collection.names()
1009
1093
broken_repo = self.make_broken_repository()
1010
1094
empty_repo = self.make_repository('empty-repo')
1011
# See bug https://bugs.launchpad.net/bzr/+bug/389141 for information
1012
# about why this was turned into expectFailure
1013
self.expectFailure('new Stream fetch fills in missing compression'
1014
' parents (bug #389141)',
1015
self.assertRaises, (errors.RevisionNotPresent, errors.BzrCheckError),
1016
empty_repo.fetch, broken_repo)
1017
self.assertRaises((errors.RevisionNotPresent, errors.BzrCheckError),
1018
empty_repo.fetch, broken_repo)
1096
empty_repo.fetch(broken_repo)
1097
except (errors.RevisionNotPresent, errors.BzrCheckError):
1098
# Test successful: compression parent not being copied leads to
1101
empty_repo.lock_read()
1102
self.addCleanup(empty_repo.unlock)
1103
text = empty_repo.texts.get_record_stream(
1104
[('file2-id', 'rev3')], 'topological', True).next()
1105
self.assertEqual('line\n', text.get_bytes_as('fulltext'))
1021
1108
class TestRepositoryPackCollection(TestCaseWithTransport):
1046
1133
packs.ensure_loaded()
1047
1134
return tree, r, packs, [rev1, rev2, rev3]
1136
def test__clear_obsolete_packs(self):
1137
packs = self.get_packs()
1138
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1139
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1140
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1141
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1142
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1143
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1144
res = packs._clear_obsolete_packs()
1145
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1146
self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1148
def test__clear_obsolete_packs_preserve(self):
1149
packs = self.get_packs()
1150
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1151
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1152
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1153
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1154
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1155
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1156
res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
1157
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1158
self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1159
sorted(obsolete_pack_trans.list_dir('.')))
1049
1161
def test__max_pack_count(self):
1050
1162
"""The maximum pack count is a function of the number of revisions."""
1051
1163
# no revisions - one pack, so that we can have a revision free repo
1071
1183
# check some arbitrary big numbers
1072
1184
self.assertEqual(25, packs._max_pack_count(112894))
1186
def test_repr(self):
1187
packs = self.get_packs()
1188
self.assertContainsRe(repr(packs),
1189
'RepositoryPackCollection(.*Repository(.*))')
1191
def test__obsolete_packs(self):
1192
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1193
names = packs.names()
1194
pack = packs.get_pack_by_name(names[0])
1195
# Schedule this one for removal
1196
packs._remove_pack_from_memory(pack)
1197
# Simulate a concurrent update by renaming the .pack file and one of
1199
packs.transport.rename('packs/%s.pack' % (names[0],),
1200
'obsolete_packs/%s.pack' % (names[0],))
1201
packs.transport.rename('indices/%s.iix' % (names[0],),
1202
'obsolete_packs/%s.iix' % (names[0],))
1203
# Now trigger the obsoletion, and ensure that all the remaining files
1205
packs._obsolete_packs([pack])
1206
self.assertEqual([n + '.pack' for n in names[1:]],
1207
sorted(packs._pack_transport.list_dir('.')))
1208
# names[0] should not be present in the index anymore
1209
self.assertEqual(names[1:],
1210
sorted(set([osutils.splitext(n)[0] for n in
1211
packs._index_transport.list_dir('.')])))
1074
1213
def test_pack_distribution_zero(self):
1075
1214
packs = self.get_packs()
1076
1215
self.assertEqual([0], packs.pack_distribution(0))
1244
1383
self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1245
1384
self.assertFalse(packs.reload_pack_names())
1386
def test_reload_pack_names_preserves_pending(self):
1387
# TODO: Update this to also test for pending-deleted names
1388
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1389
# We will add one pack (via start_write_group + insert_record_stream),
1390
# and remove another pack (via _remove_pack_from_memory)
1391
orig_names = packs.names()
1392
orig_at_load = packs._packs_at_load
1393
to_remove_name = iter(orig_names).next()
1394
r.start_write_group()
1395
self.addCleanup(r.abort_write_group)
1396
r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1397
('text', 'rev'), (), None, 'content\n')])
1398
new_pack = packs._new_pack
1399
self.assertTrue(new_pack.data_inserted())
1401
packs.allocate(new_pack)
1402
packs._new_pack = None
1403
removed_pack = packs.get_pack_by_name(to_remove_name)
1404
packs._remove_pack_from_memory(removed_pack)
1405
names = packs.names()
1406
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1407
new_names = set([x[0][0] for x in new_nodes])
1408
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1409
self.assertEqual(set(names) - set(orig_names), new_names)
1410
self.assertEqual(set([new_pack.name]), new_names)
1411
self.assertEqual([to_remove_name],
1412
sorted([x[0][0] for x in deleted_nodes]))
1413
packs.reload_pack_names()
1414
reloaded_names = packs.names()
1415
self.assertEqual(orig_at_load, packs._packs_at_load)
1416
self.assertEqual(names, reloaded_names)
1417
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1418
new_names = set([x[0][0] for x in new_nodes])
1419
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1420
self.assertEqual(set(names) - set(orig_names), new_names)
1421
self.assertEqual(set([new_pack.name]), new_names)
1422
self.assertEqual([to_remove_name],
1423
sorted([x[0][0] for x in deleted_nodes]))
1425
def test_autopack_obsoletes_new_pack(self):
1426
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1427
packs._max_pack_count = lambda x: 1
1428
packs.pack_distribution = lambda x: [10]
1429
r.start_write_group()
1430
r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1431
('bogus-rev',), (), None, 'bogus-content\n')])
1432
# This should trigger an autopack, which will combine everything into a
1434
new_names = r.commit_write_group()
1435
names = packs.names()
1436
self.assertEqual(1, len(names))
1437
self.assertEqual([names[0] + '.pack'],
1438
packs._pack_transport.list_dir('.'))
1247
1440
def test_autopack_reloads_and_stops(self):
1248
1441
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1249
1442
# After we have determined what needs to be autopacked, trigger a
1261
1454
self.assertEqual(tree.branch.repository._pack_collection.names(),
1457
def test__save_pack_names(self):
1458
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1459
names = packs.names()
1460
pack = packs.get_pack_by_name(names[0])
1461
packs._remove_pack_from_memory(pack)
1462
packs._save_pack_names(obsolete_packs=[pack])
1463
cur_packs = packs._pack_transport.list_dir('.')
1464
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1465
# obsolete_packs will also have stuff like .rix and .iix present.
1466
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1467
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1468
self.assertEqual([pack.name], sorted(obsolete_names))
1470
def test__save_pack_names_already_obsoleted(self):
1471
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1472
names = packs.names()
1473
pack = packs.get_pack_by_name(names[0])
1474
packs._remove_pack_from_memory(pack)
1475
# We are going to simulate a concurrent autopack by manually obsoleting
1476
# the pack directly.
1477
packs._obsolete_packs([pack])
1478
packs._save_pack_names(clear_obsolete_packs=True,
1479
obsolete_packs=[pack])
1480
cur_packs = packs._pack_transport.list_dir('.')
1481
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1482
# Note that while we set clear_obsolete_packs=True, it should not
1483
# delete a pack file that we have also scheduled for obsoletion.
1484
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1485
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1486
self.assertEqual([pack.name], sorted(obsolete_names))
1265
1490
class TestPack(TestCaseWithTransport):
1266
1491
"""Tests for the Pack object."""