1135
1144
packs.ensure_loaded()
1136
1145
return tree, r, packs, [rev1, rev2, rev3]
1147
def test__clear_obsolete_packs(self):
1148
packs = self.get_packs()
1149
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1150
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1151
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1152
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1153
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1154
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1155
res = packs._clear_obsolete_packs()
1156
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1157
self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1159
def test__clear_obsolete_packs_preserve(self):
1160
packs = self.get_packs()
1161
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1162
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1163
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1164
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1165
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1166
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1167
res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
1168
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1169
self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1170
sorted(obsolete_pack_trans.list_dir('.')))
1138
1172
def test__max_pack_count(self):
1139
1173
"""The maximum pack count is a function of the number of revisions."""
1140
1174
# no revisions - one pack, so that we can have a revision free repo
1160
1194
# check some arbitrary big numbers
1161
1195
self.assertEqual(25, packs._max_pack_count(112894))
1197
def test_repr(self):
1198
packs = self.get_packs()
1199
self.assertContainsRe(repr(packs),
1200
'RepositoryPackCollection(.*Repository(.*))')
1202
def test__obsolete_packs(self):
1203
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1204
names = packs.names()
1205
pack = packs.get_pack_by_name(names[0])
1206
# Schedule this one for removal
1207
packs._remove_pack_from_memory(pack)
1208
# Simulate a concurrent update by renaming the .pack file and one of
1210
packs.transport.rename('packs/%s.pack' % (names[0],),
1211
'obsolete_packs/%s.pack' % (names[0],))
1212
packs.transport.rename('indices/%s.iix' % (names[0],),
1213
'obsolete_packs/%s.iix' % (names[0],))
1214
# Now trigger the obsoletion, and ensure that all the remaining files
1216
packs._obsolete_packs([pack])
1217
self.assertEqual([n + '.pack' for n in names[1:]],
1218
sorted(packs._pack_transport.list_dir('.')))
1219
# names[0] should not be present in the index anymore
1220
self.assertEqual(names[1:],
1221
sorted(set([osutils.splitext(n)[0] for n in
1222
packs._index_transport.list_dir('.')])))
1163
1224
def test_pack_distribution_zero(self):
1164
1225
packs = self.get_packs()
1165
1226
self.assertEqual([0], packs.pack_distribution(0))
1333
1394
self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1334
1395
self.assertFalse(packs.reload_pack_names())
1397
def test_reload_pack_names_preserves_pending(self):
1398
# TODO: Update this to also test for pending-deleted names
1399
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1400
# We will add one pack (via start_write_group + insert_record_stream),
1401
# and remove another pack (via _remove_pack_from_memory)
1402
orig_names = packs.names()
1403
orig_at_load = packs._packs_at_load
1404
to_remove_name = iter(orig_names).next()
1405
r.start_write_group()
1406
self.addCleanup(r.abort_write_group)
1407
r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1408
('text', 'rev'), (), None, 'content\n')])
1409
new_pack = packs._new_pack
1410
self.assertTrue(new_pack.data_inserted())
1412
packs.allocate(new_pack)
1413
packs._new_pack = None
1414
removed_pack = packs.get_pack_by_name(to_remove_name)
1415
packs._remove_pack_from_memory(removed_pack)
1416
names = packs.names()
1417
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1418
new_names = set([x[0][0] for x in new_nodes])
1419
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1420
self.assertEqual(set(names) - set(orig_names), new_names)
1421
self.assertEqual(set([new_pack.name]), new_names)
1422
self.assertEqual([to_remove_name],
1423
sorted([x[0][0] for x in deleted_nodes]))
1424
packs.reload_pack_names()
1425
reloaded_names = packs.names()
1426
self.assertEqual(orig_at_load, packs._packs_at_load)
1427
self.assertEqual(names, reloaded_names)
1428
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1429
new_names = set([x[0][0] for x in new_nodes])
1430
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1431
self.assertEqual(set(names) - set(orig_names), new_names)
1432
self.assertEqual(set([new_pack.name]), new_names)
1433
self.assertEqual([to_remove_name],
1434
sorted([x[0][0] for x in deleted_nodes]))
1436
def test_autopack_obsoletes_new_pack(self):
1437
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1438
packs._max_pack_count = lambda x: 1
1439
packs.pack_distribution = lambda x: [10]
1440
r.start_write_group()
1441
r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1442
('bogus-rev',), (), None, 'bogus-content\n')])
1443
# This should trigger an autopack, which will combine everything into a
1445
new_names = r.commit_write_group()
1446
names = packs.names()
1447
self.assertEqual(1, len(names))
1448
self.assertEqual([names[0] + '.pack'],
1449
packs._pack_transport.list_dir('.'))
1336
1451
def test_autopack_reloads_and_stops(self):
1337
1452
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1338
1453
# After we have determined what needs to be autopacked, trigger a
1350
1465
self.assertEqual(tree.branch.repository._pack_collection.names(),
1468
def test__save_pack_names(self):
1469
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1470
names = packs.names()
1471
pack = packs.get_pack_by_name(names[0])
1472
packs._remove_pack_from_memory(pack)
1473
packs._save_pack_names(obsolete_packs=[pack])
1474
cur_packs = packs._pack_transport.list_dir('.')
1475
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1476
# obsolete_packs will also have stuff like .rix and .iix present.
1477
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1478
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1479
self.assertEqual([pack.name], sorted(obsolete_names))
1481
def test__save_pack_names_already_obsoleted(self):
1482
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1483
names = packs.names()
1484
pack = packs.get_pack_by_name(names[0])
1485
packs._remove_pack_from_memory(pack)
1486
# We are going to simulate a concurrent autopack by manually obsoleting
1487
# the pack directly.
1488
packs._obsolete_packs([pack])
1489
packs._save_pack_names(clear_obsolete_packs=True,
1490
obsolete_packs=[pack])
1491
cur_packs = packs._pack_transport.list_dir('.')
1492
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1493
# Note that while we set clear_obsolete_packs=True, it should not
1494
# delete a pack file that we have also scheduled for obsoletion.
1495
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1496
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1497
self.assertEqual([pack.name], sorted(obsolete_names))
1354
1501
class TestPack(TestCaseWithTransport):
1355
1502
"""Tests for the Pack object."""