38
from bzrlib.branchbuilder import BranchBuilder
37
39
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
38
from bzrlib.index import GraphIndex
40
from bzrlib.index import GraphIndex, InMemoryGraphIndex
39
41
from bzrlib.repository import RepositoryFormat
42
from bzrlib.smart import server
40
43
from bzrlib.tests import (
42
45
TestCaseWithTransport,
44
49
from bzrlib.transport import (
53
from bzrlib.transport.memory import MemoryServer
47
54
from bzrlib import (
53
62
revision as _mod_revision,
58
67
from bzrlib.repofmt import (
466
468
# Arguably, the deserialise_inventory should detect a mismatch, and
467
469
# raise an error, rather than silently using one revision_id over the
469
self.assertRaises(AssertionError, repo._deserialise_inventory,
471
self.assertRaises(AssertionError, repo.deserialise_inventory,
470
472
'test-rev-id', inv_xml)
471
inv = repo._deserialise_inventory('other-rev-id', inv_xml)
473
inv = repo.deserialise_inventory('other-rev-id', inv_xml)
472
474
self.assertEqual('other-rev-id', inv.root.revision)
474
476
def test_supports_external_lookups(self):
725
727
# versions of the file.
726
728
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
728
def test_fetch_combines_groups(self):
729
builder = self.make_branch_builder('source', format='2a')
730
builder.start_series()
731
builder.build_snapshot('1', None, [
732
('add', ('', 'root-id', 'directory', '')),
733
('add', ('file', 'file-id', 'file', 'content\n'))])
734
builder.build_snapshot('2', ['1'], [
735
('modify', ('file-id', 'content-2\n'))])
736
builder.finish_series()
737
source = builder.get_branch()
738
target = self.make_repository('target', format='2a')
739
target.fetch(source.repository)
741
self.addCleanup(target.unlock)
742
details = target.texts._index.get_build_details(
743
[('file-id', '1',), ('file-id', '2',)])
744
file_1_details = details[('file-id', '1')]
745
file_2_details = details[('file-id', '2')]
746
# The index, and what to read off disk, should be the same for both
747
# versions of the file.
748
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
750
730
def test_format_pack_compresses_True(self):
751
731
repo = self.make_repository('repo', format='2a')
752
732
self.assertTrue(repo._format.pack_compresses)
1028
1008
inv = inventory.Inventory(revision_id='rev1a')
1029
1009
inv.root.revision = 'rev1a'
1030
1010
self.add_file(repo, inv, 'file1', 'rev1a', [])
1031
repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
1032
1011
repo.add_inventory('rev1a', inv, [])
1033
1012
revision = _mod_revision.Revision('rev1a',
1034
1013
committer='jrandom@example.com', timestamp=0,
1069
1048
def add_revision(self, repo, revision_id, inv, parent_ids):
1070
1049
inv.revision_id = revision_id
1071
1050
inv.root.revision = revision_id
1072
repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
1073
1051
repo.add_inventory(revision_id, inv, parent_ids)
1074
1052
revision = _mod_revision.Revision(revision_id,
1075
1053
committer='jrandom@example.com', timestamp=0, inventory_sha1='',
1133
1111
packs.ensure_loaded()
1134
1112
return tree, r, packs, [rev1, rev2, rev3]
1136
def test__clear_obsolete_packs(self):
1137
packs = self.get_packs()
1138
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1139
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1140
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1141
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1142
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1143
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1144
res = packs._clear_obsolete_packs()
1145
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1146
self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1148
def test__clear_obsolete_packs_preserve(self):
1149
packs = self.get_packs()
1150
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1151
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1152
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1153
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1154
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1155
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1156
res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
1157
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1158
self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1159
sorted(obsolete_pack_trans.list_dir('.')))
1161
1114
def test__max_pack_count(self):
1162
1115
"""The maximum pack count is a function of the number of revisions."""
1163
1116
# no revisions - one pack, so that we can have a revision free repo
1183
1136
# check some arbitrary big numbers
1184
1137
self.assertEqual(25, packs._max_pack_count(112894))
1186
def test_repr(self):
1187
packs = self.get_packs()
1188
self.assertContainsRe(repr(packs),
1189
'RepositoryPackCollection(.*Repository(.*))')
1191
def test__obsolete_packs(self):
1192
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1193
names = packs.names()
1194
pack = packs.get_pack_by_name(names[0])
1195
# Schedule this one for removal
1196
packs._remove_pack_from_memory(pack)
1197
# Simulate a concurrent update by renaming the .pack file and one of
1199
packs.transport.rename('packs/%s.pack' % (names[0],),
1200
'obsolete_packs/%s.pack' % (names[0],))
1201
packs.transport.rename('indices/%s.iix' % (names[0],),
1202
'obsolete_packs/%s.iix' % (names[0],))
1203
# Now trigger the obsoletion, and ensure that all the remaining files
1205
packs._obsolete_packs([pack])
1206
self.assertEqual([n + '.pack' for n in names[1:]],
1207
sorted(packs._pack_transport.list_dir('.')))
1208
# names[0] should not be present in the index anymore
1209
self.assertEqual(names[1:],
1210
sorted(set([osutils.splitext(n)[0] for n in
1211
packs._index_transport.list_dir('.')])))
1213
1139
def test_pack_distribution_zero(self):
1214
1140
packs = self.get_packs()
1215
1141
self.assertEqual([0], packs.pack_distribution(0))
1383
1309
self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1384
1310
self.assertFalse(packs.reload_pack_names())
1386
def test_reload_pack_names_preserves_pending(self):
1387
# TODO: Update this to also test for pending-deleted names
1388
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1389
# We will add one pack (via start_write_group + insert_record_stream),
1390
# and remove another pack (via _remove_pack_from_memory)
1391
orig_names = packs.names()
1392
orig_at_load = packs._packs_at_load
1393
to_remove_name = iter(orig_names).next()
1394
r.start_write_group()
1395
self.addCleanup(r.abort_write_group)
1396
r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1397
('text', 'rev'), (), None, 'content\n')])
1398
new_pack = packs._new_pack
1399
self.assertTrue(new_pack.data_inserted())
1401
packs.allocate(new_pack)
1402
packs._new_pack = None
1403
removed_pack = packs.get_pack_by_name(to_remove_name)
1404
packs._remove_pack_from_memory(removed_pack)
1405
names = packs.names()
1406
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1407
new_names = set([x[0][0] for x in new_nodes])
1408
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1409
self.assertEqual(set(names) - set(orig_names), new_names)
1410
self.assertEqual(set([new_pack.name]), new_names)
1411
self.assertEqual([to_remove_name],
1412
sorted([x[0][0] for x in deleted_nodes]))
1413
packs.reload_pack_names()
1414
reloaded_names = packs.names()
1415
self.assertEqual(orig_at_load, packs._packs_at_load)
1416
self.assertEqual(names, reloaded_names)
1417
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1418
new_names = set([x[0][0] for x in new_nodes])
1419
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1420
self.assertEqual(set(names) - set(orig_names), new_names)
1421
self.assertEqual(set([new_pack.name]), new_names)
1422
self.assertEqual([to_remove_name],
1423
sorted([x[0][0] for x in deleted_nodes]))
1425
def test_autopack_obsoletes_new_pack(self):
1426
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1427
packs._max_pack_count = lambda x: 1
1428
packs.pack_distribution = lambda x: [10]
1429
r.start_write_group()
1430
r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1431
('bogus-rev',), (), None, 'bogus-content\n')])
1432
# This should trigger an autopack, which will combine everything into a
1434
new_names = r.commit_write_group()
1435
names = packs.names()
1436
self.assertEqual(1, len(names))
1437
self.assertEqual([names[0] + '.pack'],
1438
packs._pack_transport.list_dir('.'))
1440
1312
def test_autopack_reloads_and_stops(self):
1441
1313
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1442
1314
# After we have determined what needs to be autopacked, trigger a
1454
1326
self.assertEqual(tree.branch.repository._pack_collection.names(),
1457
def test__save_pack_names(self):
1458
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1459
names = packs.names()
1460
pack = packs.get_pack_by_name(names[0])
1461
packs._remove_pack_from_memory(pack)
1462
packs._save_pack_names(obsolete_packs=[pack])
1463
cur_packs = packs._pack_transport.list_dir('.')
1464
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1465
# obsolete_packs will also have stuff like .rix and .iix present.
1466
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1467
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1468
self.assertEqual([pack.name], sorted(obsolete_names))
1470
def test__save_pack_names_already_obsoleted(self):
1471
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1472
names = packs.names()
1473
pack = packs.get_pack_by_name(names[0])
1474
packs._remove_pack_from_memory(pack)
1475
# We are going to simulate a concurrent autopack by manually obsoleting
1476
# the pack directly.
1477
packs._obsolete_packs([pack])
1478
packs._save_pack_names(clear_obsolete_packs=True,
1479
obsolete_packs=[pack])
1480
cur_packs = packs._pack_transport.list_dir('.')
1481
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1482
# Note that while we set clear_obsolete_packs=True, it should not
1483
# delete a pack file that we have also scheduled for obsoletion.
1484
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1485
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1486
self.assertEqual([pack.name], sorted(obsolete_names))
1490
1330
class TestPack(TestCaseWithTransport):
1491
1331
"""Tests for the Pack object."""