542
545
# pair that it returns true on for the is_compatible static method
544
547
dummy_a = DummyRepository()
548
dummy_a._format = RepositoryFormat()
545
549
dummy_b = DummyRepository()
550
dummy_b._format = RepositoryFormat()
546
551
repo = self.make_repository('.')
547
552
# hack dummies to look like repo somewhat.
548
553
dummy_a._serializer = repo._serializer
554
dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
555
dummy_a._format.rich_root_data = repo._format.rich_root_data
549
556
dummy_b._serializer = repo._serializer
557
dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
558
dummy_b._format.rich_root_data = repo._format.rich_root_data
550
559
repository.InterRepository.register_optimiser(InterDummy)
552
561
# we should get the default for something InterDummy returns False
676
685
class Test2a(TestCaseWithTransport):
687
def test_fetch_combines_groups(self):
688
builder = self.make_branch_builder('source', format='2a')
689
builder.start_series()
690
builder.build_snapshot('1', None, [
691
('add', ('', 'root-id', 'directory', '')),
692
('add', ('file', 'file-id', 'file', 'content\n'))])
693
builder.build_snapshot('2', ['1'], [
694
('modify', ('file-id', 'content-2\n'))])
695
builder.finish_series()
696
source = builder.get_branch()
697
target = self.make_repository('target', format='2a')
698
target.fetch(source.repository)
700
self.addCleanup(target.unlock)
701
details = target.texts._index.get_build_details(
702
[('file-id', '1',), ('file-id', '2',)])
703
file_1_details = details[('file-id', '1')]
704
file_2_details = details[('file-id', '2')]
705
# The index, and what to read off disk, should be the same for both
706
# versions of the file.
707
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
678
709
def test_format_pack_compresses_True(self):
679
710
repo = self.make_repository('repo', format='2a')
680
711
self.assertTrue(repo._format.pack_compresses)
946
977
inv = inventory.Inventory(revision_id='rev1a')
947
978
inv.root.revision = 'rev1a'
948
979
self.add_file(repo, inv, 'file1', 'rev1a', [])
980
repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
949
981
repo.add_inventory('rev1a', inv, [])
950
982
revision = _mod_revision.Revision('rev1a',
951
983
committer='jrandom@example.com', timestamp=0,
986
1018
def add_revision(self, repo, revision_id, inv, parent_ids):
987
1019
inv.revision_id = revision_id
988
1020
inv.root.revision = revision_id
1021
repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
989
1022
repo.add_inventory(revision_id, inv, parent_ids)
990
1023
revision = _mod_revision.Revision(revision_id,
991
1024
committer='jrandom@example.com', timestamp=0, inventory_sha1='',
1009
1042
broken_repo = self.make_broken_repository()
1010
1043
empty_repo = self.make_repository('empty-repo')
1011
# See bug https://bugs.launchpad.net/bzr/+bug/389141 for information
1012
# about why this was turned into expectFailure
1013
self.expectFailure('new Stream fetch fills in missing compression'
1014
' parents (bug #389141)',
1015
self.assertRaises, (errors.RevisionNotPresent, errors.BzrCheckError),
1016
empty_repo.fetch, broken_repo)
1017
self.assertRaises((errors.RevisionNotPresent, errors.BzrCheckError),
1018
empty_repo.fetch, broken_repo)
1045
empty_repo.fetch(broken_repo)
1046
except (errors.RevisionNotPresent, errors.BzrCheckError):
1047
# Test successful: compression parent not being copied leads to
1050
empty_repo.lock_read()
1051
self.addCleanup(empty_repo.unlock)
1052
text = empty_repo.texts.get_record_stream(
1053
[('file2-id', 'rev3')], 'topological', True).next()
1054
self.assertEqual('line\n', text.get_bytes_as('fulltext'))
1021
1057
class TestRepositoryPackCollection(TestCaseWithTransport):
1031
1067
def make_packs_and_alt_repo(self, write_lock=False):
1032
1068
"""Create a pack repo with 3 packs, and access it via a second repo."""
1033
tree = self.make_branch_and_tree('.')
1069
tree = self.make_branch_and_tree('.', format=self.get_format())
1034
1070
tree.lock_write()
1035
1071
self.addCleanup(tree.unlock)
1036
1072
rev1 = tree.commit('one')
1244
1280
self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1245
1281
self.assertFalse(packs.reload_pack_names())
1283
def test_reload_pack_names_preserves_pending(self):
1284
# TODO: Update this to also test for pending-deleted names
1285
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1286
# We will add one pack (via start_write_group + insert_record_stream),
1287
# and remove another pack (via _remove_pack_from_memory)
1288
orig_names = packs.names()
1289
orig_at_load = packs._packs_at_load
1290
to_remove_name = iter(orig_names).next()
1291
r.start_write_group()
1292
self.addCleanup(r.abort_write_group)
1293
r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1294
('text', 'rev'), (), None, 'content\n')])
1295
new_pack = packs._new_pack
1296
self.assertTrue(new_pack.data_inserted())
1298
packs.allocate(new_pack)
1299
packs._new_pack = None
1300
removed_pack = packs.get_pack_by_name(to_remove_name)
1301
packs._remove_pack_from_memory(removed_pack)
1302
names = packs.names()
1303
all_nodes, deleted_nodes, new_nodes = packs._diff_pack_names()
1304
new_names = set([x[0][0] for x in new_nodes])
1305
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1306
self.assertEqual(set(names) - set(orig_names), new_names)
1307
self.assertEqual(set([new_pack.name]), new_names)
1308
self.assertEqual([to_remove_name],
1309
sorted([x[0][0] for x in deleted_nodes]))
1310
packs.reload_pack_names()
1311
reloaded_names = packs.names()
1312
self.assertEqual(orig_at_load, packs._packs_at_load)
1313
self.assertEqual(names, reloaded_names)
1314
all_nodes, deleted_nodes, new_nodes = packs._diff_pack_names()
1315
new_names = set([x[0][0] for x in new_nodes])
1316
self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1317
self.assertEqual(set(names) - set(orig_names), new_names)
1318
self.assertEqual(set([new_pack.name]), new_names)
1319
self.assertEqual([to_remove_name],
1320
sorted([x[0][0] for x in deleted_nodes]))
1247
1322
def test_autopack_reloads_and_stops(self):
1248
1323
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1249
1324
# After we have determined what needs to be autopacked, trigger a
1346
1421
"""Tests for the packs repository Packer class."""
1348
1423
def test_pack_optimizes_pack_order(self):
1349
builder = self.make_branch_builder('.')
1424
builder = self.make_branch_builder('.', format="1.9")
1350
1425
builder.start_series()
1351
1426
builder.build_snapshot('A', None, [
1352
1427
('add', ('', 'root-id', 'directory', None)),