680
673
self.assertFalse(repo._format.supports_external_lookups)
683
class Test2a(tests.TestCaseWithMemoryTransport):
685
def test_chk_bytes_uses_custom_btree_parser(self):
686
mt = self.make_branch_and_memory_tree('test', format='2a')
688
self.addCleanup(mt.unlock)
689
mt.add([''], ['root-id'])
691
index = mt.branch.repository.chk_bytes._index._graph_index._indices[0]
692
self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
693
# It should also work if we re-open the repo
694
repo = mt.branch.repository.bzrdir.open_repository()
696
self.addCleanup(repo.unlock)
697
index = repo.chk_bytes._index._graph_index._indices[0]
698
self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
700
def test_fetch_combines_groups(self):
701
builder = self.make_branch_builder('source', format='2a')
702
builder.start_series()
703
builder.build_snapshot('1', None, [
704
('add', ('', 'root-id', 'directory', '')),
705
('add', ('file', 'file-id', 'file', 'content\n'))])
706
builder.build_snapshot('2', ['1'], [
707
('modify', ('file-id', 'content-2\n'))])
708
builder.finish_series()
709
source = builder.get_branch()
710
target = self.make_repository('target', format='2a')
711
target.fetch(source.repository)
713
self.addCleanup(target.unlock)
714
details = target.texts._index.get_build_details(
715
[('file-id', '1',), ('file-id', '2',)])
716
file_1_details = details[('file-id', '1')]
717
file_2_details = details[('file-id', '2')]
718
# The index, and what to read off disk, should be the same for both
719
# versions of the file.
720
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
722
def test_fetch_combines_groups(self):
723
builder = self.make_branch_builder('source', format='2a')
724
builder.start_series()
725
builder.build_snapshot('1', None, [
726
('add', ('', 'root-id', 'directory', '')),
727
('add', ('file', 'file-id', 'file', 'content\n'))])
728
builder.build_snapshot('2', ['1'], [
729
('modify', ('file-id', 'content-2\n'))])
730
builder.finish_series()
731
source = builder.get_branch()
732
target = self.make_repository('target', format='2a')
733
target.fetch(source.repository)
735
self.addCleanup(target.unlock)
736
details = target.texts._index.get_build_details(
737
[('file-id', '1',), ('file-id', '2',)])
738
file_1_details = details[('file-id', '1')]
739
file_2_details = details[('file-id', '2')]
740
# The index, and what to read off disk, should be the same for both
741
# versions of the file.
742
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
744
def test_fetch_combines_groups(self):
745
builder = self.make_branch_builder('source', format='2a')
746
builder.start_series()
747
builder.build_snapshot('1', None, [
748
('add', ('', 'root-id', 'directory', '')),
749
('add', ('file', 'file-id', 'file', 'content\n'))])
750
builder.build_snapshot('2', ['1'], [
751
('modify', ('file-id', 'content-2\n'))])
752
builder.finish_series()
753
source = builder.get_branch()
754
target = self.make_repository('target', format='2a')
755
target.fetch(source.repository)
757
self.addCleanup(target.unlock)
758
details = target.texts._index.get_build_details(
759
[('file-id', '1',), ('file-id', '2',)])
760
file_1_details = details[('file-id', '1')]
761
file_2_details = details[('file-id', '2')]
762
# The index, and what to read off disk, should be the same for both
763
# versions of the file.
764
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
676
class Test2a(TestCaseWithTransport):
766
678
def test_format_pack_compresses_True(self):
767
679
repo = self.make_repository('repo', format='2a')
768
680
self.assertTrue(repo._format.pack_compresses)
770
682
def test_inventories_use_chk_map_with_parent_base_dict(self):
771
tree = self.make_branch_and_memory_tree('repo', format="2a")
773
tree.add([''], ['TREE_ROOT'])
683
tree = self.make_branch_and_tree('repo', format="2a")
774
684
revid = tree.commit("foo")
777
686
self.addCleanup(tree.unlock)
778
687
inv = tree.branch.repository.get_inventory(revid)
1109
1009
broken_repo = self.make_broken_repository()
1110
1010
empty_repo = self.make_repository('empty-repo')
1112
empty_repo.fetch(broken_repo)
1113
except (errors.RevisionNotPresent, errors.BzrCheckError):
1114
# Test successful: compression parent not being copied leads to
1117
empty_repo.lock_read()
1118
self.addCleanup(empty_repo.unlock)
1119
text = empty_repo.texts.get_record_stream(
1120
[('file2-id', 'rev3')], 'topological', True).next()
1121
self.assertEqual('line\n', text.get_bytes_as('fulltext'))
1011
# See bug https://bugs.launchpad.net/bzr/+bug/389141 for information
1012
# about why this was turned into expectFailure
1013
self.expectFailure('new Stream fetch fills in missing compression'
1014
' parents (bug #389141)',
1015
self.assertRaises, (errors.RevisionNotPresent, errors.BzrCheckError),
1016
empty_repo.fetch, broken_repo)
1017
self.assertRaises((errors.RevisionNotPresent, errors.BzrCheckError),
1018
empty_repo.fetch, broken_repo)
1124
1021
class TestRepositoryPackCollection(TestCaseWithTransport):
1149
1046
packs.ensure_loaded()
1150
1047
return tree, r, packs, [rev1, rev2, rev3]
1152
def test__clear_obsolete_packs(self):
1153
packs = self.get_packs()
1154
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1155
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1156
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1157
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1158
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1159
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1160
res = packs._clear_obsolete_packs()
1161
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1162
self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1164
def test__clear_obsolete_packs_preserve(self):
1165
packs = self.get_packs()
1166
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1167
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1168
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1169
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1170
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1171
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1172
res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
1173
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1174
self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1175
sorted(obsolete_pack_trans.list_dir('.')))
1177
1049
def test__max_pack_count(self):
1178
1050
"""The maximum pack count is a function of the number of revisions."""
1179
1051
# no revisions - one pack, so that we can have a revision free repo
1199
1071
# check some arbitrary big numbers
1200
1072
self.assertEqual(25, packs._max_pack_count(112894))
1202
def test_repr(self):
1203
packs = self.get_packs()
1204
self.assertContainsRe(repr(packs),
1205
'RepositoryPackCollection(.*Repository(.*))')
1207
def test__obsolete_packs(self):
1208
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1209
names = packs.names()
1210
pack = packs.get_pack_by_name(names[0])
1211
# Schedule this one for removal
1212
packs._remove_pack_from_memory(pack)
1213
# Simulate a concurrent update by renaming the .pack file and one of
1215
packs.transport.rename('packs/%s.pack' % (names[0],),
1216
'obsolete_packs/%s.pack' % (names[0],))
1217
packs.transport.rename('indices/%s.iix' % (names[0],),
1218
'obsolete_packs/%s.iix' % (names[0],))
1219
# Now trigger the obsoletion, and ensure that all the remaining files
1221
packs._obsolete_packs([pack])
1222
self.assertEqual([n + '.pack' for n in names[1:]],
1223
sorted(packs._pack_transport.list_dir('.')))
1224
# names[0] should not be present in the index anymore
1225
self.assertEqual(names[1:],
1226
sorted(set([osutils.splitext(n)[0] for n in
1227
packs._index_transport.list_dir('.')])))
1229
1074
def test_pack_distribution_zero(self):
1230
1075
packs = self.get_packs()
1231
1076
self.assertEqual([0], packs.pack_distribution(0))
1399
1244
self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1400
1245
self.assertFalse(packs.reload_pack_names())
1402
def test_reload_pack_names_preserves_pending(self):
1403
# TODO: Update this to also test for pending-deleted names
1404
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1405
# We will add one pack (via start_write_group + insert_record_stream),
1406
# and remove another pack (via _remove_pack_from_memory)
1407
orig_names = packs.names()
1408
orig_at_load = packs._packs_at_load
1409
to_remove_name = iter(orig_names).next()
1410
r.start_write_group()
1411
self.addCleanup(r.abort_write_group)
1412
r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1413
('text', 'rev'), (), None, 'content\n')])
1414
new_pack = packs._new_pack
1415
self.assertTrue(new_pack.data_inserted())
1417
packs.allocate(new_pack)
1418
packs._new_pack = None
1419
removed_pack = packs.get_pack_by_name(to_remove_name)
1420
packs._remove_pack_from_memory(removed_pack)
1421
names = packs.names()
1422
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1423
new_names = set([x[0][0] for x in new_nodes])
1424
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1425
self.assertEqual(set(names) - set(orig_names), new_names)
1426
self.assertEqual(set([new_pack.name]), new_names)
1427
self.assertEqual([to_remove_name],
1428
sorted([x[0][0] for x in deleted_nodes]))
1429
packs.reload_pack_names()
1430
reloaded_names = packs.names()
1431
self.assertEqual(orig_at_load, packs._packs_at_load)
1432
self.assertEqual(names, reloaded_names)
1433
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1434
new_names = set([x[0][0] for x in new_nodes])
1435
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1436
self.assertEqual(set(names) - set(orig_names), new_names)
1437
self.assertEqual(set([new_pack.name]), new_names)
1438
self.assertEqual([to_remove_name],
1439
sorted([x[0][0] for x in deleted_nodes]))
1441
def test_autopack_obsoletes_new_pack(self):
1442
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1443
packs._max_pack_count = lambda x: 1
1444
packs.pack_distribution = lambda x: [10]
1445
r.start_write_group()
1446
r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1447
('bogus-rev',), (), None, 'bogus-content\n')])
1448
# This should trigger an autopack, which will combine everything into a
1450
new_names = r.commit_write_group()
1451
names = packs.names()
1452
self.assertEqual(1, len(names))
1453
self.assertEqual([names[0] + '.pack'],
1454
packs._pack_transport.list_dir('.'))
1456
1247
def test_autopack_reloads_and_stops(self):
1457
1248
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1458
1249
# After we have determined what needs to be autopacked, trigger a
1470
1261
self.assertEqual(tree.branch.repository._pack_collection.names(),
1473
def test__save_pack_names(self):
1474
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1475
names = packs.names()
1476
pack = packs.get_pack_by_name(names[0])
1477
packs._remove_pack_from_memory(pack)
1478
packs._save_pack_names(obsolete_packs=[pack])
1479
cur_packs = packs._pack_transport.list_dir('.')
1480
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1481
# obsolete_packs will also have stuff like .rix and .iix present.
1482
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1483
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1484
self.assertEqual([pack.name], sorted(obsolete_names))
1486
def test__save_pack_names_already_obsoleted(self):
1487
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1488
names = packs.names()
1489
pack = packs.get_pack_by_name(names[0])
1490
packs._remove_pack_from_memory(pack)
1491
# We are going to simulate a concurrent autopack by manually obsoleting
1492
# the pack directly.
1493
packs._obsolete_packs([pack])
1494
packs._save_pack_names(clear_obsolete_packs=True,
1495
obsolete_packs=[pack])
1496
cur_packs = packs._pack_transport.list_dir('.')
1497
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1498
# Note that while we set clear_obsolete_packs=True, it should not
1499
# delete a pack file that we have also scheduled for obsoletion.
1500
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1501
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1502
self.assertEqual([pack.name], sorted(obsolete_names))
1506
1265
class TestPack(TestCaseWithTransport):
1507
1266
"""Tests for the Pack object."""