476
476
# Arguably, the deserialise_inventory should detect a mismatch, and
477
477
# raise an error, rather than silently using one revision_id over the
479
self.assertRaises(AssertionError, repo._deserialise_inventory,
479
self.assertRaises(AssertionError, repo.deserialise_inventory,
480
480
'test-rev-id', inv_xml)
481
inv = repo._deserialise_inventory('other-rev-id', inv_xml)
481
inv = repo.deserialise_inventory('other-rev-id', inv_xml)
482
482
self.assertEqual('other-rev-id', inv.root.revision)
484
484
def test_supports_external_lookups(self):
1143
1143
packs.ensure_loaded()
1144
1144
return tree, r, packs, [rev1, rev2, rev3]
1146
def test__clear_obsolete_packs(self):
1147
packs = self.get_packs()
1148
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1149
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1150
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1151
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1152
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1153
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1154
res = packs._clear_obsolete_packs()
1155
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1156
self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1158
def test__clear_obsolete_packs_preserve(self):
1159
packs = self.get_packs()
1160
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1161
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1162
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1163
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1164
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1165
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1166
res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
1167
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1168
self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1169
sorted(obsolete_pack_trans.list_dir('.')))
1171
1146
def test__max_pack_count(self):
1172
1147
"""The maximum pack count is a function of the number of revisions."""
1173
1148
# no revisions - one pack, so that we can have a revision free repo
1198
1173
self.assertContainsRe(repr(packs),
1199
1174
'RepositoryPackCollection(.*Repository(.*))')
1201
def test__obsolete_packs(self):
1202
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1203
names = packs.names()
1204
pack = packs.get_pack_by_name(names[0])
1205
# Schedule this one for removal
1206
packs._remove_pack_from_memory(pack)
1207
# Simulate a concurrent update by renaming the .pack file and one of
1209
packs.transport.rename('packs/%s.pack' % (names[0],),
1210
'obsolete_packs/%s.pack' % (names[0],))
1211
packs.transport.rename('indices/%s.iix' % (names[0],),
1212
'obsolete_packs/%s.iix' % (names[0],))
1213
# Now trigger the obsoletion, and ensure that all the remaining files
1215
packs._obsolete_packs([pack])
1216
self.assertEqual([n + '.pack' for n in names[1:]],
1217
sorted(packs._pack_transport.list_dir('.')))
1218
# names[0] should not be present in the index anymore
1219
self.assertEqual(names[1:],
1220
sorted(set([osutils.splitext(n)[0] for n in
1221
packs._index_transport.list_dir('.')])))
1223
1176
def test_pack_distribution_zero(self):
1224
1177
packs = self.get_packs()
1225
1178
self.assertEqual([0], packs.pack_distribution(0))
1393
1346
self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1394
1347
self.assertFalse(packs.reload_pack_names())
1396
def test_reload_pack_names_preserves_pending(self):
1397
# TODO: Update this to also test for pending-deleted names
1398
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1399
# We will add one pack (via start_write_group + insert_record_stream),
1400
# and remove another pack (via _remove_pack_from_memory)
1401
orig_names = packs.names()
1402
orig_at_load = packs._packs_at_load
1403
to_remove_name = iter(orig_names).next()
1404
r.start_write_group()
1405
self.addCleanup(r.abort_write_group)
1406
r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1407
('text', 'rev'), (), None, 'content\n')])
1408
new_pack = packs._new_pack
1409
self.assertTrue(new_pack.data_inserted())
1411
packs.allocate(new_pack)
1412
packs._new_pack = None
1413
removed_pack = packs.get_pack_by_name(to_remove_name)
1414
packs._remove_pack_from_memory(removed_pack)
1415
names = packs.names()
1416
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1417
new_names = set([x[0][0] for x in new_nodes])
1418
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1419
self.assertEqual(set(names) - set(orig_names), new_names)
1420
self.assertEqual(set([new_pack.name]), new_names)
1421
self.assertEqual([to_remove_name],
1422
sorted([x[0][0] for x in deleted_nodes]))
1423
packs.reload_pack_names()
1424
reloaded_names = packs.names()
1425
self.assertEqual(orig_at_load, packs._packs_at_load)
1426
self.assertEqual(names, reloaded_names)
1427
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1428
new_names = set([x[0][0] for x in new_nodes])
1429
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1430
self.assertEqual(set(names) - set(orig_names), new_names)
1431
self.assertEqual(set([new_pack.name]), new_names)
1432
self.assertEqual([to_remove_name],
1433
sorted([x[0][0] for x in deleted_nodes]))
1435
def test_autopack_obsoletes_new_pack(self):
1436
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1437
packs._max_pack_count = lambda x: 1
1438
packs.pack_distribution = lambda x: [10]
1439
r.start_write_group()
1440
r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1441
('bogus-rev',), (), None, 'bogus-content\n')])
1442
# This should trigger an autopack, which will combine everything into a
1444
new_names = r.commit_write_group()
1445
names = packs.names()
1446
self.assertEqual(1, len(names))
1447
self.assertEqual([names[0] + '.pack'],
1448
packs._pack_transport.list_dir('.'))
1450
1349
def test_autopack_reloads_and_stops(self):
1451
1350
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1452
1351
# After we have determined what needs to be autopacked, trigger a
1464
1363
self.assertEqual(tree.branch.repository._pack_collection.names(),
1467
def test__save_pack_names(self):
1468
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1469
names = packs.names()
1470
pack = packs.get_pack_by_name(names[0])
1471
packs._remove_pack_from_memory(pack)
1472
packs._save_pack_names(obsolete_packs=[pack])
1473
cur_packs = packs._pack_transport.list_dir('.')
1474
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1475
# obsolete_packs will also have stuff like .rix and .iix present.
1476
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1477
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1478
self.assertEqual([pack.name], sorted(obsolete_names))
1480
def test__save_pack_names_already_obsoleted(self):
1481
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1482
names = packs.names()
1483
pack = packs.get_pack_by_name(names[0])
1484
packs._remove_pack_from_memory(pack)
1485
# We are going to simulate a concurrent autopack by manually obsoleting
1486
# the pack directly.
1487
packs._obsolete_packs([pack])
1488
packs._save_pack_names(clear_obsolete_packs=True,
1489
obsolete_packs=[pack])
1490
cur_packs = packs._pack_transport.list_dir('.')
1491
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1492
# Note that while we set clear_obsolete_packs=True, it should not
1493
# delete a pack file that we have also scheduled for obsoletion.
1494
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1495
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1496
self.assertEqual([pack.name], sorted(obsolete_names))
1500
1367
class TestPack(TestCaseWithTransport):
1501
1368
"""Tests for the Pack object."""