~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: Martin Pool
  • Date: 2010-04-01 04:41:18 UTC
  • mto: This revision was merged to the branch mainline in revision 5128.
  • Revision ID: mbp@sourcefrog.net-20100401044118-shyctqc02ob08ngz
ignore .testrepository

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006, 2007, 2008, 2009 Canonical Ltd
 
1
# Copyright (C) 2006-2010 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
24
24
 
25
25
from stat import S_ISDIR
26
26
from StringIO import StringIO
 
27
import sys
27
28
 
28
29
import bzrlib
29
30
from bzrlib.errors import (NotBranchError,
50
51
    fakenfs,
51
52
    get_transport,
52
53
    )
53
 
from bzrlib.transport.memory import MemoryServer
54
54
from bzrlib import (
55
55
    bencode,
56
56
    bzrdir,
62
62
    revision as _mod_revision,
63
63
    symbol_versioning,
64
64
    upgrade,
 
65
    versionedfile,
65
66
    workingtree,
66
67
    )
67
68
from bzrlib.repofmt import (
252
253
        tree = control.create_workingtree()
253
254
        tree.add(['foo'], ['Foo:Bar'], ['file'])
254
255
        tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
255
 
        tree.commit('first post', rev_id='first')
 
256
        try:
 
257
            tree.commit('first post', rev_id='first')
 
258
        except errors.IllegalPath:
 
259
            if sys.platform != 'win32':
 
260
                raise
 
261
            self.knownFailure('Foo:Bar cannot be used as a file-id on windows'
 
262
                              ' in repo format 7')
 
263
            return
256
264
        self.assertEqualDiff(
257
265
            '# bzr weave file v5\n'
258
266
            'i\n'
456
464
        repo = self.make_repository('.',
457
465
                format=bzrdir.format_registry.get('knit')())
458
466
        inv_xml = '<inventory format="5">\n</inventory>\n'
459
 
        inv = repo.deserialise_inventory('test-rev-id', inv_xml)
 
467
        inv = repo._deserialise_inventory('test-rev-id', inv_xml)
460
468
        self.assertEqual('test-rev-id', inv.root.revision)
461
469
 
462
470
    def test_deserialise_uses_global_revision_id(self):
468
476
        # Arguably, the deserialise_inventory should detect a mismatch, and
469
477
        # raise an error, rather than silently using one revision_id over the
470
478
        # other.
471
 
        self.assertRaises(AssertionError, repo.deserialise_inventory,
 
479
        self.assertRaises(AssertionError, repo._deserialise_inventory,
472
480
            'test-rev-id', inv_xml)
473
 
        inv = repo.deserialise_inventory('other-rev-id', inv_xml)
 
481
        inv = repo._deserialise_inventory('other-rev-id', inv_xml)
474
482
        self.assertEqual('other-rev-id', inv.root.revision)
475
483
 
476
484
    def test_supports_external_lookups(self):
486
494
    _serializer = None
487
495
 
488
496
    def supports_rich_root(self):
 
497
        if self._format is not None:
 
498
            return self._format.rich_root_data
489
499
        return False
490
500
 
491
501
    def get_graph(self):
542
552
        # pair that it returns true on for the is_compatible static method
543
553
        # check
544
554
        dummy_a = DummyRepository()
 
555
        dummy_a._format = RepositoryFormat()
545
556
        dummy_b = DummyRepository()
 
557
        dummy_b._format = RepositoryFormat()
546
558
        repo = self.make_repository('.')
547
559
        # hack dummies to look like repo somewhat.
548
560
        dummy_a._serializer = repo._serializer
 
561
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
 
562
        dummy_a._format.rich_root_data = repo._format.rich_root_data
549
563
        dummy_b._serializer = repo._serializer
 
564
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
 
565
        dummy_b._format.rich_root_data = repo._format.rich_root_data
550
566
        repository.InterRepository.register_optimiser(InterDummy)
551
567
        try:
552
568
            # we should get the default for something InterDummy returns False
673
689
        self.assertFalse(repo._format.supports_external_lookups)
674
690
 
675
691
 
676
 
class Test2a(TestCaseWithTransport):
 
692
class Test2a(tests.TestCaseWithMemoryTransport):
 
693
 
 
694
    def test_fetch_combines_groups(self):
 
695
        builder = self.make_branch_builder('source', format='2a')
 
696
        builder.start_series()
 
697
        builder.build_snapshot('1', None, [
 
698
            ('add', ('', 'root-id', 'directory', '')),
 
699
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
700
        builder.build_snapshot('2', ['1'], [
 
701
            ('modify', ('file-id', 'content-2\n'))])
 
702
        builder.finish_series()
 
703
        source = builder.get_branch()
 
704
        target = self.make_repository('target', format='2a')
 
705
        target.fetch(source.repository)
 
706
        target.lock_read()
 
707
        self.addCleanup(target.unlock)
 
708
        details = target.texts._index.get_build_details(
 
709
            [('file-id', '1',), ('file-id', '2',)])
 
710
        file_1_details = details[('file-id', '1')]
 
711
        file_2_details = details[('file-id', '2')]
 
712
        # The index, and what to read off disk, should be the same for both
 
713
        # versions of the file.
 
714
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
715
 
 
716
    def test_fetch_combines_groups(self):
 
717
        builder = self.make_branch_builder('source', format='2a')
 
718
        builder.start_series()
 
719
        builder.build_snapshot('1', None, [
 
720
            ('add', ('', 'root-id', 'directory', '')),
 
721
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
722
        builder.build_snapshot('2', ['1'], [
 
723
            ('modify', ('file-id', 'content-2\n'))])
 
724
        builder.finish_series()
 
725
        source = builder.get_branch()
 
726
        target = self.make_repository('target', format='2a')
 
727
        target.fetch(source.repository)
 
728
        target.lock_read()
 
729
        self.addCleanup(target.unlock)
 
730
        details = target.texts._index.get_build_details(
 
731
            [('file-id', '1',), ('file-id', '2',)])
 
732
        file_1_details = details[('file-id', '1')]
 
733
        file_2_details = details[('file-id', '2')]
 
734
        # The index, and what to read off disk, should be the same for both
 
735
        # versions of the file.
 
736
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
737
 
 
738
    def test_fetch_combines_groups(self):
 
739
        builder = self.make_branch_builder('source', format='2a')
 
740
        builder.start_series()
 
741
        builder.build_snapshot('1', None, [
 
742
            ('add', ('', 'root-id', 'directory', '')),
 
743
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
744
        builder.build_snapshot('2', ['1'], [
 
745
            ('modify', ('file-id', 'content-2\n'))])
 
746
        builder.finish_series()
 
747
        source = builder.get_branch()
 
748
        target = self.make_repository('target', format='2a')
 
749
        target.fetch(source.repository)
 
750
        target.lock_read()
 
751
        self.addCleanup(target.unlock)
 
752
        details = target.texts._index.get_build_details(
 
753
            [('file-id', '1',), ('file-id', '2',)])
 
754
        file_1_details = details[('file-id', '1')]
 
755
        file_2_details = details[('file-id', '2')]
 
756
        # The index, and what to read off disk, should be the same for both
 
757
        # versions of the file.
 
758
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
677
759
 
678
760
    def test_format_pack_compresses_True(self):
679
761
        repo = self.make_repository('repo', format='2a')
680
762
        self.assertTrue(repo._format.pack_compresses)
681
763
 
682
764
    def test_inventories_use_chk_map_with_parent_base_dict(self):
683
 
        tree = self.make_branch_and_tree('repo', format="2a")
 
765
        tree = self.make_branch_and_memory_tree('repo', format="2a")
 
766
        tree.lock_write()
 
767
        tree.add([''], ['TREE_ROOT'])
684
768
        revid = tree.commit("foo")
 
769
        tree.unlock()
685
770
        tree.lock_read()
686
771
        self.addCleanup(tree.unlock)
687
772
        inv = tree.branch.repository.get_inventory(revid)
696
781
        # at 20 unchanged commits, chk pages are packed that are split into
697
782
        # two groups such that the new pack being made doesn't have all its
698
783
        # pages in the source packs (though they are in the repository).
699
 
        tree = self.make_branch_and_tree('tree', format='2a')
 
784
        # Use a memory backed repository, we don't need to hit disk for this
 
785
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
786
        tree.lock_write()
 
787
        self.addCleanup(tree.unlock)
 
788
        tree.add([''], ['TREE_ROOT'])
700
789
        for pos in range(20):
701
790
            tree.commit(str(pos))
702
791
 
703
792
    def test_pack_with_hint(self):
704
 
        tree = self.make_branch_and_tree('tree', format='2a')
 
793
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
794
        tree.lock_write()
 
795
        self.addCleanup(tree.unlock)
 
796
        tree.add([''], ['TREE_ROOT'])
705
797
        # 1 commit to leave untouched
706
798
        tree.commit('1')
707
799
        to_keep = tree.branch.repository._pack_collection.names()
946
1038
            inv = inventory.Inventory(revision_id='rev1a')
947
1039
            inv.root.revision = 'rev1a'
948
1040
            self.add_file(repo, inv, 'file1', 'rev1a', [])
 
1041
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
949
1042
            repo.add_inventory('rev1a', inv, [])
950
1043
            revision = _mod_revision.Revision('rev1a',
951
1044
                committer='jrandom@example.com', timestamp=0,
986
1079
    def add_revision(self, repo, revision_id, inv, parent_ids):
987
1080
        inv.revision_id = revision_id
988
1081
        inv.root.revision = revision_id
 
1082
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
989
1083
        repo.add_inventory(revision_id, inv, parent_ids)
990
1084
        revision = _mod_revision.Revision(revision_id,
991
1085
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
1008
1102
        """
1009
1103
        broken_repo = self.make_broken_repository()
1010
1104
        empty_repo = self.make_repository('empty-repo')
1011
 
        # See bug https://bugs.launchpad.net/bzr/+bug/389141 for information
1012
 
        # about why this was turned into expectFailure
1013
 
        self.expectFailure('new Stream fetch fills in missing compression'
1014
 
           ' parents (bug #389141)',
1015
 
           self.assertRaises, (errors.RevisionNotPresent, errors.BzrCheckError),
1016
 
                              empty_repo.fetch, broken_repo)
1017
 
        self.assertRaises((errors.RevisionNotPresent, errors.BzrCheckError),
1018
 
                          empty_repo.fetch, broken_repo)
 
1105
        try:
 
1106
            empty_repo.fetch(broken_repo)
 
1107
        except (errors.RevisionNotPresent, errors.BzrCheckError):
 
1108
            # Test successful: compression parent not being copied leads to
 
1109
            # error.
 
1110
            return
 
1111
        empty_repo.lock_read()
 
1112
        self.addCleanup(empty_repo.unlock)
 
1113
        text = empty_repo.texts.get_record_stream(
 
1114
            [('file2-id', 'rev3')], 'topological', True).next()
 
1115
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
1019
1116
 
1020
1117
 
1021
1118
class TestRepositoryPackCollection(TestCaseWithTransport):
1030
1127
 
1031
1128
    def make_packs_and_alt_repo(self, write_lock=False):
1032
1129
        """Create a pack repo with 3 packs, and access it via a second repo."""
1033
 
        tree = self.make_branch_and_tree('.')
 
1130
        tree = self.make_branch_and_tree('.', format=self.get_format())
1034
1131
        tree.lock_write()
1035
1132
        self.addCleanup(tree.unlock)
1036
1133
        rev1 = tree.commit('one')
1046
1143
        packs.ensure_loaded()
1047
1144
        return tree, r, packs, [rev1, rev2, rev3]
1048
1145
 
 
1146
    def test__clear_obsolete_packs(self):
 
1147
        packs = self.get_packs()
 
1148
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
 
1149
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1150
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1151
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1152
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1153
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1154
        res = packs._clear_obsolete_packs()
 
1155
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
 
1156
        self.assertEqual([], obsolete_pack_trans.list_dir('.'))
 
1157
 
 
1158
    def test__clear_obsolete_packs_preserve(self):
 
1159
        packs = self.get_packs()
 
1160
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
 
1161
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1162
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1163
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1164
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1165
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1166
        res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
 
1167
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
 
1168
        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
 
1169
                         sorted(obsolete_pack_trans.list_dir('.')))
 
1170
 
1049
1171
    def test__max_pack_count(self):
1050
1172
        """The maximum pack count is a function of the number of revisions."""
1051
1173
        # no revisions - one pack, so that we can have a revision free repo
1071
1193
        # check some arbitrary big numbers
1072
1194
        self.assertEqual(25, packs._max_pack_count(112894))
1073
1195
 
 
1196
    def test_repr(self):
 
1197
        packs = self.get_packs()
 
1198
        self.assertContainsRe(repr(packs),
 
1199
            'RepositoryPackCollection(.*Repository(.*))')
 
1200
 
 
1201
    def test__obsolete_packs(self):
 
1202
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1203
        names = packs.names()
 
1204
        pack = packs.get_pack_by_name(names[0])
 
1205
        # Schedule this one for removal
 
1206
        packs._remove_pack_from_memory(pack)
 
1207
        # Simulate a concurrent update by renaming the .pack file and one of
 
1208
        # the indices
 
1209
        packs.transport.rename('packs/%s.pack' % (names[0],),
 
1210
                               'obsolete_packs/%s.pack' % (names[0],))
 
1211
        packs.transport.rename('indices/%s.iix' % (names[0],),
 
1212
                               'obsolete_packs/%s.iix' % (names[0],))
 
1213
        # Now trigger the obsoletion, and ensure that all the remaining files
 
1214
        # are still renamed
 
1215
        packs._obsolete_packs([pack])
 
1216
        self.assertEqual([n + '.pack' for n in names[1:]],
 
1217
                         sorted(packs._pack_transport.list_dir('.')))
 
1218
        # names[0] should not be present in the index anymore
 
1219
        self.assertEqual(names[1:],
 
1220
            sorted(set([osutils.splitext(n)[0] for n in
 
1221
                        packs._index_transport.list_dir('.')])))
 
1222
 
1074
1223
    def test_pack_distribution_zero(self):
1075
1224
        packs = self.get_packs()
1076
1225
        self.assertEqual([0], packs.pack_distribution(0))
1244
1393
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1245
1394
        self.assertFalse(packs.reload_pack_names())
1246
1395
 
 
1396
    def test_reload_pack_names_preserves_pending(self):
 
1397
        # TODO: Update this to also test for pending-deleted names
 
1398
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1399
        # We will add one pack (via start_write_group + insert_record_stream),
 
1400
        # and remove another pack (via _remove_pack_from_memory)
 
1401
        orig_names = packs.names()
 
1402
        orig_at_load = packs._packs_at_load
 
1403
        to_remove_name = iter(orig_names).next()
 
1404
        r.start_write_group()
 
1405
        self.addCleanup(r.abort_write_group)
 
1406
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
 
1407
            ('text', 'rev'), (), None, 'content\n')])
 
1408
        new_pack = packs._new_pack
 
1409
        self.assertTrue(new_pack.data_inserted())
 
1410
        new_pack.finish()
 
1411
        packs.allocate(new_pack)
 
1412
        packs._new_pack = None
 
1413
        removed_pack = packs.get_pack_by_name(to_remove_name)
 
1414
        packs._remove_pack_from_memory(removed_pack)
 
1415
        names = packs.names()
 
1416
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1417
        new_names = set([x[0][0] for x in new_nodes])
 
1418
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1419
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1420
        self.assertEqual(set([new_pack.name]), new_names)
 
1421
        self.assertEqual([to_remove_name],
 
1422
                         sorted([x[0][0] for x in deleted_nodes]))
 
1423
        packs.reload_pack_names()
 
1424
        reloaded_names = packs.names()
 
1425
        self.assertEqual(orig_at_load, packs._packs_at_load)
 
1426
        self.assertEqual(names, reloaded_names)
 
1427
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1428
        new_names = set([x[0][0] for x in new_nodes])
 
1429
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1430
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1431
        self.assertEqual(set([new_pack.name]), new_names)
 
1432
        self.assertEqual([to_remove_name],
 
1433
                         sorted([x[0][0] for x in deleted_nodes]))
 
1434
 
 
1435
    def test_autopack_obsoletes_new_pack(self):
 
1436
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1437
        packs._max_pack_count = lambda x: 1
 
1438
        packs.pack_distribution = lambda x: [10]
 
1439
        r.start_write_group()
 
1440
        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
 
1441
            ('bogus-rev',), (), None, 'bogus-content\n')])
 
1442
        # This should trigger an autopack, which will combine everything into a
 
1443
        # single pack file.
 
1444
        new_names = r.commit_write_group()
 
1445
        names = packs.names()
 
1446
        self.assertEqual(1, len(names))
 
1447
        self.assertEqual([names[0] + '.pack'],
 
1448
                         packs._pack_transport.list_dir('.'))
 
1449
 
1247
1450
    def test_autopack_reloads_and_stops(self):
1248
1451
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1249
1452
        # After we have determined what needs to be autopacked, trigger a
1261
1464
        self.assertEqual(tree.branch.repository._pack_collection.names(),
1262
1465
                         packs.names())
1263
1466
 
 
1467
    def test__save_pack_names(self):
 
1468
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1469
        names = packs.names()
 
1470
        pack = packs.get_pack_by_name(names[0])
 
1471
        packs._remove_pack_from_memory(pack)
 
1472
        packs._save_pack_names(obsolete_packs=[pack])
 
1473
        cur_packs = packs._pack_transport.list_dir('.')
 
1474
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1475
        # obsolete_packs will also have stuff like .rix and .iix present.
 
1476
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1477
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1478
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1479
 
 
1480
    def test__save_pack_names_already_obsoleted(self):
 
1481
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1482
        names = packs.names()
 
1483
        pack = packs.get_pack_by_name(names[0])
 
1484
        packs._remove_pack_from_memory(pack)
 
1485
        # We are going to simulate a concurrent autopack by manually obsoleting
 
1486
        # the pack directly.
 
1487
        packs._obsolete_packs([pack])
 
1488
        packs._save_pack_names(clear_obsolete_packs=True,
 
1489
                               obsolete_packs=[pack])
 
1490
        cur_packs = packs._pack_transport.list_dir('.')
 
1491
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1492
        # Note that while we set clear_obsolete_packs=True, it should not
 
1493
        # delete a pack file that we have also scheduled for obsoletion.
 
1494
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1495
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1496
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1497
 
 
1498
 
1264
1499
 
1265
1500
class TestPack(TestCaseWithTransport):
1266
1501
    """Tests for the Pack object."""
1330
1565
            index_class=BTreeGraphIndex,
1331
1566
            use_chk_index=False)
1332
1567
        pack = pack_repo.NewPack(collection)
 
1568
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
1333
1569
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
1334
1570
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1335
1571
        self.assertIsInstance(pack._hash, type(osutils.md5()))
1346
1582
    """Tests for the packs repository Packer class."""
1347
1583
 
1348
1584
    def test_pack_optimizes_pack_order(self):
1349
 
        builder = self.make_branch_builder('.')
 
1585
        builder = self.make_branch_builder('.', format="1.9")
1350
1586
        builder.start_series()
1351
1587
        builder.build_snapshot('A', None, [
1352
1588
            ('add', ('', 'root-id', 'directory', None)),
1388
1624
        packer = pack_repo.OptimisingPacker(self.get_pack_collection(),
1389
1625
                                            [], '.test')
1390
1626
        new_pack = packer.open_pack()
 
1627
        self.addCleanup(new_pack.abort) # ensure cleanup
1391
1628
        self.assertIsInstance(new_pack, pack_repo.NewPack)
1392
1629
        self.assertTrue(new_pack.revision_index._optimize_for_size)
1393
1630
        self.assertTrue(new_pack.inventory_index._optimize_for_size)