689
681
self.assertFalse(repo._format.supports_external_lookups)
692
class Test2a(tests.TestCaseWithMemoryTransport):
694
def test_fetch_combines_groups(self):
695
builder = self.make_branch_builder('source', format='2a')
696
builder.start_series()
697
builder.build_snapshot('1', None, [
698
('add', ('', 'root-id', 'directory', '')),
699
('add', ('file', 'file-id', 'file', 'content\n'))])
700
builder.build_snapshot('2', ['1'], [
701
('modify', ('file-id', 'content-2\n'))])
702
builder.finish_series()
703
source = builder.get_branch()
704
target = self.make_repository('target', format='2a')
705
target.fetch(source.repository)
707
self.addCleanup(target.unlock)
708
details = target.texts._index.get_build_details(
709
[('file-id', '1',), ('file-id', '2',)])
710
file_1_details = details[('file-id', '1')]
711
file_2_details = details[('file-id', '2')]
712
# The index, and what to read off disk, should be the same for both
713
# versions of the file.
714
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
716
def test_fetch_combines_groups(self):
717
builder = self.make_branch_builder('source', format='2a')
718
builder.start_series()
719
builder.build_snapshot('1', None, [
720
('add', ('', 'root-id', 'directory', '')),
721
('add', ('file', 'file-id', 'file', 'content\n'))])
722
builder.build_snapshot('2', ['1'], [
723
('modify', ('file-id', 'content-2\n'))])
724
builder.finish_series()
725
source = builder.get_branch()
726
target = self.make_repository('target', format='2a')
727
target.fetch(source.repository)
729
self.addCleanup(target.unlock)
730
details = target.texts._index.get_build_details(
731
[('file-id', '1',), ('file-id', '2',)])
732
file_1_details = details[('file-id', '1')]
733
file_2_details = details[('file-id', '2')]
734
# The index, and what to read off disk, should be the same for both
735
# versions of the file.
736
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
738
def test_fetch_combines_groups(self):
739
builder = self.make_branch_builder('source', format='2a')
740
builder.start_series()
741
builder.build_snapshot('1', None, [
742
('add', ('', 'root-id', 'directory', '')),
743
('add', ('file', 'file-id', 'file', 'content\n'))])
744
builder.build_snapshot('2', ['1'], [
745
('modify', ('file-id', 'content-2\n'))])
746
builder.finish_series()
747
source = builder.get_branch()
748
target = self.make_repository('target', format='2a')
749
target.fetch(source.repository)
751
self.addCleanup(target.unlock)
752
details = target.texts._index.get_build_details(
753
[('file-id', '1',), ('file-id', '2',)])
754
file_1_details = details[('file-id', '1')]
755
file_2_details = details[('file-id', '2')]
756
# The index, and what to read off disk, should be the same for both
757
# versions of the file.
758
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
684
class Test2a(TestCaseWithTransport):
760
686
def test_format_pack_compresses_True(self):
761
687
repo = self.make_repository('repo', format='2a')
762
688
self.assertTrue(repo._format.pack_compresses)
764
690
def test_inventories_use_chk_map_with_parent_base_dict(self):
765
tree = self.make_branch_and_memory_tree('repo', format="2a")
767
tree.add([''], ['TREE_ROOT'])
691
tree = self.make_branch_and_tree('repo', format="2a")
768
692
revid = tree.commit("foo")
771
694
self.addCleanup(tree.unlock)
772
695
inv = tree.branch.repository.get_inventory(revid)
781
704
# at 20 unchanged commits, chk pages are packed that are split into
782
705
# two groups such that the new pack being made doesn't have all its
783
706
# pages in the source packs (though they are in the repository).
784
# Use a memory backed repository, we don't need to hit disk for this
785
tree = self.make_branch_and_memory_tree('tree', format='2a')
787
self.addCleanup(tree.unlock)
788
tree.add([''], ['TREE_ROOT'])
707
tree = self.make_branch_and_tree('tree', format='2a')
789
708
for pos in range(20):
790
709
tree.commit(str(pos))
792
711
def test_pack_with_hint(self):
793
tree = self.make_branch_and_memory_tree('tree', format='2a')
795
self.addCleanup(tree.unlock)
796
tree.add([''], ['TREE_ROOT'])
712
tree = self.make_branch_and_tree('tree', format='2a')
797
713
# 1 commit to leave untouched
799
715
to_keep = tree.branch.repository._pack_collection.names()
1143
1057
packs.ensure_loaded()
1144
1058
return tree, r, packs, [rev1, rev2, rev3]
1146
def test__clear_obsolete_packs(self):
1147
packs = self.get_packs()
1148
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1149
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1150
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1151
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1152
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1153
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1154
res = packs._clear_obsolete_packs()
1155
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1156
self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1158
def test__clear_obsolete_packs_preserve(self):
1159
packs = self.get_packs()
1160
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1161
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1162
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1163
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1164
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1165
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1166
res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
1167
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1168
self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1169
sorted(obsolete_pack_trans.list_dir('.')))
1171
1060
def test__max_pack_count(self):
1172
1061
"""The maximum pack count is a function of the number of revisions."""
1173
1062
# no revisions - one pack, so that we can have a revision free repo
1193
1082
# check some arbitrary big numbers
1194
1083
self.assertEqual(25, packs._max_pack_count(112894))
1196
def test_repr(self):
1197
packs = self.get_packs()
1198
self.assertContainsRe(repr(packs),
1199
'RepositoryPackCollection(.*Repository(.*))')
1201
def test__obsolete_packs(self):
1202
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1203
names = packs.names()
1204
pack = packs.get_pack_by_name(names[0])
1205
# Schedule this one for removal
1206
packs._remove_pack_from_memory(pack)
1207
# Simulate a concurrent update by renaming the .pack file and one of
1209
packs.transport.rename('packs/%s.pack' % (names[0],),
1210
'obsolete_packs/%s.pack' % (names[0],))
1211
packs.transport.rename('indices/%s.iix' % (names[0],),
1212
'obsolete_packs/%s.iix' % (names[0],))
1213
# Now trigger the obsoletion, and ensure that all the remaining files
1215
packs._obsolete_packs([pack])
1216
self.assertEqual([n + '.pack' for n in names[1:]],
1217
sorted(packs._pack_transport.list_dir('.')))
1218
# names[0] should not be present in the index anymore
1219
self.assertEqual(names[1:],
1220
sorted(set([osutils.splitext(n)[0] for n in
1221
packs._index_transport.list_dir('.')])))
1223
1085
def test_pack_distribution_zero(self):
1224
1086
packs = self.get_packs()
1225
1087
self.assertEqual([0], packs.pack_distribution(0))
1393
1255
self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1394
1256
self.assertFalse(packs.reload_pack_names())
1396
def test_reload_pack_names_preserves_pending(self):
1397
# TODO: Update this to also test for pending-deleted names
1398
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1399
# We will add one pack (via start_write_group + insert_record_stream),
1400
# and remove another pack (via _remove_pack_from_memory)
1401
orig_names = packs.names()
1402
orig_at_load = packs._packs_at_load
1403
to_remove_name = iter(orig_names).next()
1404
r.start_write_group()
1405
self.addCleanup(r.abort_write_group)
1406
r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1407
('text', 'rev'), (), None, 'content\n')])
1408
new_pack = packs._new_pack
1409
self.assertTrue(new_pack.data_inserted())
1411
packs.allocate(new_pack)
1412
packs._new_pack = None
1413
removed_pack = packs.get_pack_by_name(to_remove_name)
1414
packs._remove_pack_from_memory(removed_pack)
1415
names = packs.names()
1416
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1417
new_names = set([x[0][0] for x in new_nodes])
1418
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1419
self.assertEqual(set(names) - set(orig_names), new_names)
1420
self.assertEqual(set([new_pack.name]), new_names)
1421
self.assertEqual([to_remove_name],
1422
sorted([x[0][0] for x in deleted_nodes]))
1423
packs.reload_pack_names()
1424
reloaded_names = packs.names()
1425
self.assertEqual(orig_at_load, packs._packs_at_load)
1426
self.assertEqual(names, reloaded_names)
1427
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1428
new_names = set([x[0][0] for x in new_nodes])
1429
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1430
self.assertEqual(set(names) - set(orig_names), new_names)
1431
self.assertEqual(set([new_pack.name]), new_names)
1432
self.assertEqual([to_remove_name],
1433
sorted([x[0][0] for x in deleted_nodes]))
1435
def test_autopack_obsoletes_new_pack(self):
1436
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1437
packs._max_pack_count = lambda x: 1
1438
packs.pack_distribution = lambda x: [10]
1439
r.start_write_group()
1440
r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1441
('bogus-rev',), (), None, 'bogus-content\n')])
1442
# This should trigger an autopack, which will combine everything into a
1444
new_names = r.commit_write_group()
1445
names = packs.names()
1446
self.assertEqual(1, len(names))
1447
self.assertEqual([names[0] + '.pack'],
1448
packs._pack_transport.list_dir('.'))
1450
1258
def test_autopack_reloads_and_stops(self):
1451
1259
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1452
1260
# After we have determined what needs to be autopacked, trigger a
1464
1272
self.assertEqual(tree.branch.repository._pack_collection.names(),
1467
def test__save_pack_names(self):
1468
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1469
names = packs.names()
1470
pack = packs.get_pack_by_name(names[0])
1471
packs._remove_pack_from_memory(pack)
1472
packs._save_pack_names(obsolete_packs=[pack])
1473
cur_packs = packs._pack_transport.list_dir('.')
1474
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1475
# obsolete_packs will also have stuff like .rix and .iix present.
1476
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1477
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1478
self.assertEqual([pack.name], sorted(obsolete_names))
1480
def test__save_pack_names_already_obsoleted(self):
1481
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1482
names = packs.names()
1483
pack = packs.get_pack_by_name(names[0])
1484
packs._remove_pack_from_memory(pack)
1485
# We are going to simulate a concurrent autopack by manually obsoleting
1486
# the pack directly.
1487
packs._obsolete_packs([pack])
1488
packs._save_pack_names(clear_obsolete_packs=True,
1489
obsolete_packs=[pack])
1490
cur_packs = packs._pack_transport.list_dir('.')
1491
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1492
# Note that while we set clear_obsolete_packs=True, it should not
1493
# delete a pack file that we have also scheduled for obsoletion.
1494
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1495
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1496
self.assertEqual([pack.name], sorted(obsolete_names))
1500
1276
class TestPack(TestCaseWithTransport):
1501
1277
"""Tests for the Pack object."""