25
25
from stat import S_ISDIR
26
from StringIO import StringIO
30
from bzrlib.errors import (NotBranchError,
29
from bzrlib.errors import (NoSuchFile,
32
30
UnknownFormatError,
33
31
UnsupportedFormatError,
35
33
from bzrlib import (
39
from bzrlib.branchbuilder import BranchBuilder
40
38
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
41
from bzrlib.index import GraphIndex, InMemoryGraphIndex
39
from bzrlib.index import GraphIndex
42
40
from bzrlib.repository import RepositoryFormat
43
from bzrlib.smart import server
44
41
from bzrlib.tests import (
46
43
TestCaseWithTransport,
50
45
from bzrlib.transport import (
54
from bzrlib.transport.memory import MemoryServer
55
48
from bzrlib import (
63
54
revision as _mod_revision,
68
59
from bzrlib.repofmt import (
476
467
# Arguably, the deserialise_inventory should detect a mismatch, and
477
468
# raise an error, rather than silently using one revision_id over the
479
self.assertRaises(AssertionError, repo.deserialise_inventory,
470
self.assertRaises(AssertionError, repo._deserialise_inventory,
480
471
'test-rev-id', inv_xml)
481
inv = repo.deserialise_inventory('other-rev-id', inv_xml)
472
inv = repo._deserialise_inventory('other-rev-id', inv_xml)
482
473
self.assertEqual('other-rev-id', inv.root.revision)
484
475
def test_supports_external_lookups(self):
692
683
class Test2a(tests.TestCaseWithMemoryTransport):
685
def test_chk_bytes_uses_custom_btree_parser(self):
686
mt = self.make_branch_and_memory_tree('test', format='2a')
688
self.addCleanup(mt.unlock)
689
mt.add([''], ['root-id'])
691
index = mt.branch.repository.chk_bytes._index._graph_index._indices[0]
692
self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
693
# It should also work if we re-open the repo
694
repo = mt.branch.repository.bzrdir.open_repository()
696
self.addCleanup(repo.unlock)
697
index = repo.chk_bytes._index._graph_index._indices[0]
698
self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
694
700
def test_fetch_combines_groups(self):
695
701
builder = self.make_branch_builder('source', format='2a')
696
702
builder.start_series()
1143
1149
packs.ensure_loaded()
1144
1150
return tree, r, packs, [rev1, rev2, rev3]
1152
def test__clear_obsolete_packs(self):
1153
packs = self.get_packs()
1154
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1155
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1156
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1157
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1158
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1159
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1160
res = packs._clear_obsolete_packs()
1161
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1162
self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1164
def test__clear_obsolete_packs_preserve(self):
1165
packs = self.get_packs()
1166
obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1167
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1168
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1169
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1170
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1171
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1172
res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
1173
self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1174
self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1175
sorted(obsolete_pack_trans.list_dir('.')))
1146
1177
def test__max_pack_count(self):
1147
1178
"""The maximum pack count is a function of the number of revisions."""
1148
1179
# no revisions - one pack, so that we can have a revision free repo
1173
1204
self.assertContainsRe(repr(packs),
1174
1205
'RepositoryPackCollection(.*Repository(.*))')
1207
def test__obsolete_packs(self):
1208
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1209
names = packs.names()
1210
pack = packs.get_pack_by_name(names[0])
1211
# Schedule this one for removal
1212
packs._remove_pack_from_memory(pack)
1213
# Simulate a concurrent update by renaming the .pack file and one of
1215
packs.transport.rename('packs/%s.pack' % (names[0],),
1216
'obsolete_packs/%s.pack' % (names[0],))
1217
packs.transport.rename('indices/%s.iix' % (names[0],),
1218
'obsolete_packs/%s.iix' % (names[0],))
1219
# Now trigger the obsoletion, and ensure that all the remaining files
1221
packs._obsolete_packs([pack])
1222
self.assertEqual([n + '.pack' for n in names[1:]],
1223
sorted(packs._pack_transport.list_dir('.')))
1224
# names[0] should not be present in the index anymore
1225
self.assertEqual(names[1:],
1226
sorted(set([osutils.splitext(n)[0] for n in
1227
packs._index_transport.list_dir('.')])))
1176
1229
def test_pack_distribution_zero(self):
1177
1230
packs = self.get_packs()
1178
1231
self.assertEqual([0], packs.pack_distribution(0))
1346
1399
self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1347
1400
self.assertFalse(packs.reload_pack_names())
1402
def test_reload_pack_names_preserves_pending(self):
1403
# TODO: Update this to also test for pending-deleted names
1404
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1405
# We will add one pack (via start_write_group + insert_record_stream),
1406
# and remove another pack (via _remove_pack_from_memory)
1407
orig_names = packs.names()
1408
orig_at_load = packs._packs_at_load
1409
to_remove_name = iter(orig_names).next()
1410
r.start_write_group()
1411
self.addCleanup(r.abort_write_group)
1412
r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1413
('text', 'rev'), (), None, 'content\n')])
1414
new_pack = packs._new_pack
1415
self.assertTrue(new_pack.data_inserted())
1417
packs.allocate(new_pack)
1418
packs._new_pack = None
1419
removed_pack = packs.get_pack_by_name(to_remove_name)
1420
packs._remove_pack_from_memory(removed_pack)
1421
names = packs.names()
1422
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1423
new_names = set([x[0][0] for x in new_nodes])
1424
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1425
self.assertEqual(set(names) - set(orig_names), new_names)
1426
self.assertEqual(set([new_pack.name]), new_names)
1427
self.assertEqual([to_remove_name],
1428
sorted([x[0][0] for x in deleted_nodes]))
1429
packs.reload_pack_names()
1430
reloaded_names = packs.names()
1431
self.assertEqual(orig_at_load, packs._packs_at_load)
1432
self.assertEqual(names, reloaded_names)
1433
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1434
new_names = set([x[0][0] for x in new_nodes])
1435
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1436
self.assertEqual(set(names) - set(orig_names), new_names)
1437
self.assertEqual(set([new_pack.name]), new_names)
1438
self.assertEqual([to_remove_name],
1439
sorted([x[0][0] for x in deleted_nodes]))
1441
def test_autopack_obsoletes_new_pack(self):
1442
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1443
packs._max_pack_count = lambda x: 1
1444
packs.pack_distribution = lambda x: [10]
1445
r.start_write_group()
1446
r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1447
('bogus-rev',), (), None, 'bogus-content\n')])
1448
# This should trigger an autopack, which will combine everything into a
1450
new_names = r.commit_write_group()
1451
names = packs.names()
1452
self.assertEqual(1, len(names))
1453
self.assertEqual([names[0] + '.pack'],
1454
packs._pack_transport.list_dir('.'))
1349
1456
def test_autopack_reloads_and_stops(self):
1350
1457
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1351
1458
# After we have determined what needs to be autopacked, trigger a
1363
1470
self.assertEqual(tree.branch.repository._pack_collection.names(),
1473
def test__save_pack_names(self):
1474
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1475
names = packs.names()
1476
pack = packs.get_pack_by_name(names[0])
1477
packs._remove_pack_from_memory(pack)
1478
packs._save_pack_names(obsolete_packs=[pack])
1479
cur_packs = packs._pack_transport.list_dir('.')
1480
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1481
# obsolete_packs will also have stuff like .rix and .iix present.
1482
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1483
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1484
self.assertEqual([pack.name], sorted(obsolete_names))
1486
def test__save_pack_names_already_obsoleted(self):
1487
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1488
names = packs.names()
1489
pack = packs.get_pack_by_name(names[0])
1490
packs._remove_pack_from_memory(pack)
1491
# We are going to simulate a concurrent autopack by manually obsoleting
1492
# the pack directly.
1493
packs._obsolete_packs([pack])
1494
packs._save_pack_names(clear_obsolete_packs=True,
1495
obsolete_packs=[pack])
1496
cur_packs = packs._pack_transport.list_dir('.')
1497
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1498
# Note that while we set clear_obsolete_packs=True, it should not
1499
# delete a pack file that we have also scheduled for obsoletion.
1500
obsolete_packs = packs.transport.list_dir('obsolete_packs')
1501
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1502
self.assertEqual([pack.name], sorted(obsolete_names))
1367
1506
class TestPack(TestCaseWithTransport):
1368
1507
"""Tests for the Pack object."""