673
690
self.assertFalse(repo._format.supports_external_lookups)
676
class Test2a(TestCaseWithTransport):
693
class Test2a(tests.TestCaseWithMemoryTransport):
695
def test_fetch_combines_groups(self):
696
builder = self.make_branch_builder('source', format='2a')
697
builder.start_series()
698
builder.build_snapshot('1', None, [
699
('add', ('', 'root-id', 'directory', '')),
700
('add', ('file', 'file-id', 'file', 'content\n'))])
701
builder.build_snapshot('2', ['1'], [
702
('modify', ('file-id', 'content-2\n'))])
703
builder.finish_series()
704
source = builder.get_branch()
705
target = self.make_repository('target', format='2a')
706
target.fetch(source.repository)
708
self.addCleanup(target.unlock)
709
details = target.texts._index.get_build_details(
710
[('file-id', '1',), ('file-id', '2',)])
711
file_1_details = details[('file-id', '1')]
712
file_2_details = details[('file-id', '2')]
713
# The index, and what to read off disk, should be the same for both
714
# versions of the file.
715
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
717
def test_fetch_combines_groups(self):
718
builder = self.make_branch_builder('source', format='2a')
719
builder.start_series()
720
builder.build_snapshot('1', None, [
721
('add', ('', 'root-id', 'directory', '')),
722
('add', ('file', 'file-id', 'file', 'content\n'))])
723
builder.build_snapshot('2', ['1'], [
724
('modify', ('file-id', 'content-2\n'))])
725
builder.finish_series()
726
source = builder.get_branch()
727
target = self.make_repository('target', format='2a')
728
target.fetch(source.repository)
730
self.addCleanup(target.unlock)
731
details = target.texts._index.get_build_details(
732
[('file-id', '1',), ('file-id', '2',)])
733
file_1_details = details[('file-id', '1')]
734
file_2_details = details[('file-id', '2')]
735
# The index, and what to read off disk, should be the same for both
736
# versions of the file.
737
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
739
def test_fetch_combines_groups(self):
740
builder = self.make_branch_builder('source', format='2a')
741
builder.start_series()
742
builder.build_snapshot('1', None, [
743
('add', ('', 'root-id', 'directory', '')),
744
('add', ('file', 'file-id', 'file', 'content\n'))])
745
builder.build_snapshot('2', ['1'], [
746
('modify', ('file-id', 'content-2\n'))])
747
builder.finish_series()
748
source = builder.get_branch()
749
target = self.make_repository('target', format='2a')
750
target.fetch(source.repository)
752
self.addCleanup(target.unlock)
753
details = target.texts._index.get_build_details(
754
[('file-id', '1',), ('file-id', '2',)])
755
file_1_details = details[('file-id', '1')]
756
file_2_details = details[('file-id', '2')]
757
# The index, and what to read off disk, should be the same for both
758
# versions of the file.
759
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
678
761
def test_format_pack_compresses_True(self):
679
762
repo = self.make_repository('repo', format='2a')
680
763
self.assertTrue(repo._format.pack_compresses)
682
765
def test_inventories_use_chk_map_with_parent_base_dict(self):
683
tree = self.make_branch_and_tree('repo', format="2a")
766
tree = self.make_branch_and_memory_tree('repo', format="2a")
768
tree.add([''], ['TREE_ROOT'])
684
769
revid = tree.commit("foo")
686
772
self.addCleanup(tree.unlock)
687
773
inv = tree.branch.repository.get_inventory(revid)
696
782
# at 20 unchanged commits, chk pages are packed that are split into
697
783
# two groups such that the new pack being made doesn't have all its
698
784
# pages in the source packs (though they are in the repository).
699
tree = self.make_branch_and_tree('tree', format='2a')
785
# Use a memory backed repository, we don't need to hit disk for this
786
tree = self.make_branch_and_memory_tree('tree', format='2a')
788
self.addCleanup(tree.unlock)
789
tree.add([''], ['TREE_ROOT'])
700
790
for pos in range(20):
701
791
tree.commit(str(pos))
703
793
def test_pack_with_hint(self):
704
tree = self.make_branch_and_tree('tree', format='2a')
794
tree = self.make_branch_and_memory_tree('tree', format='2a')
796
self.addCleanup(tree.unlock)
797
tree.add([''], ['TREE_ROOT'])
705
798
# 1 commit to leave untouched
707
800
to_keep = tree.branch.repository._pack_collection.names()
1009
1104
broken_repo = self.make_broken_repository()
1010
1105
empty_repo = self.make_repository('empty-repo')
1011
# See bug https://bugs.launchpad.net/bzr/+bug/389141 for information
1012
# about why this was turned into expectFailure
1013
self.expectFailure('new Stream fetch fills in missing compression'
1014
' parents (bug #389141)',
1015
self.assertRaises, (errors.RevisionNotPresent, errors.BzrCheckError),
1016
empty_repo.fetch, broken_repo)
1017
self.assertRaises((errors.RevisionNotPresent, errors.BzrCheckError),
1018
empty_repo.fetch, broken_repo)
1107
empty_repo.fetch(broken_repo)
1108
except (errors.RevisionNotPresent, errors.BzrCheckError):
1109
# Test successful: compression parent not being copied leads to
1112
empty_repo.lock_read()
1113
self.addCleanup(empty_repo.unlock)
1114
text = empty_repo.texts.get_record_stream(
1115
[('file2-id', 'rev3')], 'topological', True).next()
1116
self.assertEqual('line\n', text.get_bytes_as('fulltext'))
1021
1119
class TestRepositoryPackCollection(TestCaseWithTransport):
1046
1144
packs.ensure_loaded()
1047
1145
return tree, r, packs, [rev1, rev2, rev3]
1147
def test__clear_obsolete_packs(self):
1148
packs = self.get_packs()
1149
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1150
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1151
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1152
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1153
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1154
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1155
res = packs._clear_obsolete_packs()
1156
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1157
self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1159
def test__clear_obsolete_packs_preserve(self):
1160
packs = self.get_packs()
1161
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1162
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1163
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1164
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1165
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1166
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1167
res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
1168
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1169
self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1170
sorted(obsolete_pack_trans.list_dir('.')))
1049
1172
def test__max_pack_count(self):
1050
1173
"""The maximum pack count is a function of the number of revisions."""
1051
1174
# no revisions - one pack, so that we can have a revision free repo
1071
1194
# check some arbitrary big numbers
1072
1195
self.assertEqual(25, packs._max_pack_count(112894))
1197
def test_repr(self):
1198
packs = self.get_packs()
1199
self.assertContainsRe(repr(packs),
1200
'RepositoryPackCollection(.*Repository(.*))')
1202
def test__obsolete_packs(self):
1203
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1204
names = packs.names()
1205
pack = packs.get_pack_by_name(names[0])
1206
# Schedule this one for removal
1207
packs._remove_pack_from_memory(pack)
1208
# Simulate a concurrent update by renaming the .pack file and one of
1210
packs.transport.rename('packs/%s.pack' % (names[0],),
1211
'obsolete_packs/%s.pack' % (names[0],))
1212
packs.transport.rename('indices/%s.iix' % (names[0],),
1213
'obsolete_packs/%s.iix' % (names[0],))
1214
# Now trigger the obsoletion, and ensure that all the remaining files
1216
packs._obsolete_packs([pack])
1217
self.assertEqual([n + '.pack' for n in names[1:]],
1218
sorted(packs._pack_transport.list_dir('.')))
1219
# names[0] should not be present in the index anymore
1220
self.assertEqual(names[1:],
1221
sorted(set([osutils.splitext(n)[0] for n in
1222
packs._index_transport.list_dir('.')])))
1074
1224
def test_pack_distribution_zero(self):
1075
1225
packs = self.get_packs()
1076
1226
self.assertEqual([0], packs.pack_distribution(0))
1244
1394
self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1245
1395
self.assertFalse(packs.reload_pack_names())
1397
def test_reload_pack_names_preserves_pending(self):
1398
# TODO: Update this to also test for pending-deleted names
1399
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1400
# We will add one pack (via start_write_group + insert_record_stream),
1401
# and remove another pack (via _remove_pack_from_memory)
1402
orig_names = packs.names()
1403
orig_at_load = packs._packs_at_load
1404
to_remove_name = iter(orig_names).next()
1405
r.start_write_group()
1406
self.addCleanup(r.abort_write_group)
1407
r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1408
('text', 'rev'), (), None, 'content\n')])
1409
new_pack = packs._new_pack
1410
self.assertTrue(new_pack.data_inserted())
1412
packs.allocate(new_pack)
1413
packs._new_pack = None
1414
removed_pack = packs.get_pack_by_name(to_remove_name)
1415
packs._remove_pack_from_memory(removed_pack)
1416
names = packs.names()
1417
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1418
new_names = set([x[0][0] for x in new_nodes])
1419
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1420
self.assertEqual(set(names) - set(orig_names), new_names)
1421
self.assertEqual(set([new_pack.name]), new_names)
1422
self.assertEqual([to_remove_name],
1423
sorted([x[0][0] for x in deleted_nodes]))
1424
packs.reload_pack_names()
1425
reloaded_names = packs.names()
1426
self.assertEqual(orig_at_load, packs._packs_at_load)
1427
self.assertEqual(names, reloaded_names)
1428
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1429
new_names = set([x[0][0] for x in new_nodes])
1430
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1431
self.assertEqual(set(names) - set(orig_names), new_names)
1432
self.assertEqual(set([new_pack.name]), new_names)
1433
self.assertEqual([to_remove_name],
1434
sorted([x[0][0] for x in deleted_nodes]))
1436
def test_autopack_obsoletes_new_pack(self):
1437
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1438
packs._max_pack_count = lambda x: 1
1439
packs.pack_distribution = lambda x: [10]
1440
r.start_write_group()
1441
r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1442
('bogus-rev',), (), None, 'bogus-content\n')])
1443
# This should trigger an autopack, which will combine everything into a
1445
new_names = r.commit_write_group()
1446
names = packs.names()
1447
self.assertEqual(1, len(names))
1448
self.assertEqual([names[0] + '.pack'],
1449
packs._pack_transport.list_dir('.'))
1247
1451
def test_autopack_reloads_and_stops(self):
1248
1452
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1249
1453
# After we have determined what needs to be autopacked, trigger a
1261
1465
self.assertEqual(tree.branch.repository._pack_collection.names(),
1468
def test__save_pack_names(self):
1469
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1470
names = packs.names()
1471
pack = packs.get_pack_by_name(names[0])
1472
packs._remove_pack_from_memory(pack)
1473
packs._save_pack_names(obsolete_packs=[pack])
1474
cur_packs = packs._pack_transport.list_dir('.')
1475
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1476
# obsolete_packs will also have stuff like .rix and .iix present.
1477
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1478
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1479
self.assertEqual([pack.name], sorted(obsolete_names))
1481
def test__save_pack_names_already_obsoleted(self):
1482
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1483
names = packs.names()
1484
pack = packs.get_pack_by_name(names[0])
1485
packs._remove_pack_from_memory(pack)
1486
# We are going to simulate a concurrent autopack by manually obsoleting
1487
# the pack directly.
1488
packs._obsolete_packs([pack])
1489
packs._save_pack_names(clear_obsolete_packs=True,
1490
obsolete_packs=[pack])
1491
cur_packs = packs._pack_transport.list_dir('.')
1492
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1493
# Note that while we set clear_obsolete_packs=True, it should not
1494
# delete a pack file that we have also scheduled for obsoletion.
1495
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1496
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1497
self.assertEqual([pack.name], sorted(obsolete_names))
1265
1501
class TestPack(TestCaseWithTransport):
1266
1502
"""Tests for the Pack object."""