~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: Ian Clatworthy
  • Date: 2010-02-19 03:02:07 UTC
  • mto: (4797.23.1 integration-2.1)
  • mto: This revision was merged to the branch mainline in revision 5055.
  • Revision ID: ian.clatworthy@canonical.com-20100219030207-zpbzx021zavx4sqt
What's New in 2.1 - a summary of changes since 2.0

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006, 2007, 2008, 2009 Canonical Ltd
 
1
# Copyright (C) 2006-2010 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
24
24
 
25
25
from stat import S_ISDIR
26
26
from StringIO import StringIO
 
27
import sys
27
28
 
28
29
import bzrlib
29
30
from bzrlib.errors import (NotBranchError,
62
63
    revision as _mod_revision,
63
64
    symbol_versioning,
64
65
    upgrade,
 
66
    versionedfile,
65
67
    workingtree,
66
68
    )
67
69
from bzrlib.repofmt import (
252
254
        tree = control.create_workingtree()
253
255
        tree.add(['foo'], ['Foo:Bar'], ['file'])
254
256
        tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
255
 
        tree.commit('first post', rev_id='first')
 
257
        try:
 
258
            tree.commit('first post', rev_id='first')
 
259
        except errors.IllegalPath:
 
260
            if sys.platform != 'win32':
 
261
                raise
 
262
            self.knownFailure('Foo:Bar cannot be used as a file-id on windows'
 
263
                              ' in repo format 7')
 
264
            return
256
265
        self.assertEqualDiff(
257
266
            '# bzr weave file v5\n'
258
267
            'i\n'
486
495
    _serializer = None
487
496
 
488
497
    def supports_rich_root(self):
 
498
        if self._format is not None:
 
499
            return self._format.rich_root_data
489
500
        return False
490
501
 
491
502
    def get_graph(self):
542
553
        # pair that it returns true on for the is_compatible static method
543
554
        # check
544
555
        dummy_a = DummyRepository()
 
556
        dummy_a._format = RepositoryFormat()
545
557
        dummy_b = DummyRepository()
 
558
        dummy_b._format = RepositoryFormat()
546
559
        repo = self.make_repository('.')
547
560
        # hack dummies to look like repo somewhat.
548
561
        dummy_a._serializer = repo._serializer
 
562
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
 
563
        dummy_a._format.rich_root_data = repo._format.rich_root_data
549
564
        dummy_b._serializer = repo._serializer
 
565
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
 
566
        dummy_b._format.rich_root_data = repo._format.rich_root_data
550
567
        repository.InterRepository.register_optimiser(InterDummy)
551
568
        try:
552
569
            # we should get the default for something InterDummy returns False
673
690
        self.assertFalse(repo._format.supports_external_lookups)
674
691
 
675
692
 
676
 
class Test2a(TestCaseWithTransport):
 
693
class Test2a(tests.TestCaseWithMemoryTransport):
 
694
 
 
695
    def test_fetch_combines_groups(self):
 
696
        builder = self.make_branch_builder('source', format='2a')
 
697
        builder.start_series()
 
698
        builder.build_snapshot('1', None, [
 
699
            ('add', ('', 'root-id', 'directory', '')),
 
700
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
701
        builder.build_snapshot('2', ['1'], [
 
702
            ('modify', ('file-id', 'content-2\n'))])
 
703
        builder.finish_series()
 
704
        source = builder.get_branch()
 
705
        target = self.make_repository('target', format='2a')
 
706
        target.fetch(source.repository)
 
707
        target.lock_read()
 
708
        self.addCleanup(target.unlock)
 
709
        details = target.texts._index.get_build_details(
 
710
            [('file-id', '1',), ('file-id', '2',)])
 
711
        file_1_details = details[('file-id', '1')]
 
712
        file_2_details = details[('file-id', '2')]
 
713
        # The index, and what to read off disk, should be the same for both
 
714
        # versions of the file.
 
715
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
716
 
 
717
    def test_fetch_combines_groups(self):
 
718
        builder = self.make_branch_builder('source', format='2a')
 
719
        builder.start_series()
 
720
        builder.build_snapshot('1', None, [
 
721
            ('add', ('', 'root-id', 'directory', '')),
 
722
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
723
        builder.build_snapshot('2', ['1'], [
 
724
            ('modify', ('file-id', 'content-2\n'))])
 
725
        builder.finish_series()
 
726
        source = builder.get_branch()
 
727
        target = self.make_repository('target', format='2a')
 
728
        target.fetch(source.repository)
 
729
        target.lock_read()
 
730
        self.addCleanup(target.unlock)
 
731
        details = target.texts._index.get_build_details(
 
732
            [('file-id', '1',), ('file-id', '2',)])
 
733
        file_1_details = details[('file-id', '1')]
 
734
        file_2_details = details[('file-id', '2')]
 
735
        # The index, and what to read off disk, should be the same for both
 
736
        # versions of the file.
 
737
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
738
 
 
739
    def test_fetch_combines_groups(self):
 
740
        builder = self.make_branch_builder('source', format='2a')
 
741
        builder.start_series()
 
742
        builder.build_snapshot('1', None, [
 
743
            ('add', ('', 'root-id', 'directory', '')),
 
744
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
745
        builder.build_snapshot('2', ['1'], [
 
746
            ('modify', ('file-id', 'content-2\n'))])
 
747
        builder.finish_series()
 
748
        source = builder.get_branch()
 
749
        target = self.make_repository('target', format='2a')
 
750
        target.fetch(source.repository)
 
751
        target.lock_read()
 
752
        self.addCleanup(target.unlock)
 
753
        details = target.texts._index.get_build_details(
 
754
            [('file-id', '1',), ('file-id', '2',)])
 
755
        file_1_details = details[('file-id', '1')]
 
756
        file_2_details = details[('file-id', '2')]
 
757
        # The index, and what to read off disk, should be the same for both
 
758
        # versions of the file.
 
759
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
677
760
 
678
761
    def test_format_pack_compresses_True(self):
679
762
        repo = self.make_repository('repo', format='2a')
680
763
        self.assertTrue(repo._format.pack_compresses)
681
764
 
682
765
    def test_inventories_use_chk_map_with_parent_base_dict(self):
683
 
        tree = self.make_branch_and_tree('repo', format="2a")
 
766
        tree = self.make_branch_and_memory_tree('repo', format="2a")
 
767
        tree.lock_write()
 
768
        tree.add([''], ['TREE_ROOT'])
684
769
        revid = tree.commit("foo")
 
770
        tree.unlock()
685
771
        tree.lock_read()
686
772
        self.addCleanup(tree.unlock)
687
773
        inv = tree.branch.repository.get_inventory(revid)
696
782
        # at 20 unchanged commits, chk pages are packed that are split into
697
783
        # two groups such that the new pack being made doesn't have all its
698
784
        # pages in the source packs (though they are in the repository).
699
 
        tree = self.make_branch_and_tree('tree', format='2a')
 
785
        # Use a memory backed repository, we don't need to hit disk for this
 
786
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
787
        tree.lock_write()
 
788
        self.addCleanup(tree.unlock)
 
789
        tree.add([''], ['TREE_ROOT'])
700
790
        for pos in range(20):
701
791
            tree.commit(str(pos))
702
792
 
703
793
    def test_pack_with_hint(self):
704
 
        tree = self.make_branch_and_tree('tree', format='2a')
 
794
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
795
        tree.lock_write()
 
796
        self.addCleanup(tree.unlock)
 
797
        tree.add([''], ['TREE_ROOT'])
705
798
        # 1 commit to leave untouched
706
799
        tree.commit('1')
707
800
        to_keep = tree.branch.repository._pack_collection.names()
946
1039
            inv = inventory.Inventory(revision_id='rev1a')
947
1040
            inv.root.revision = 'rev1a'
948
1041
            self.add_file(repo, inv, 'file1', 'rev1a', [])
 
1042
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
949
1043
            repo.add_inventory('rev1a', inv, [])
950
1044
            revision = _mod_revision.Revision('rev1a',
951
1045
                committer='jrandom@example.com', timestamp=0,
986
1080
    def add_revision(self, repo, revision_id, inv, parent_ids):
987
1081
        inv.revision_id = revision_id
988
1082
        inv.root.revision = revision_id
 
1083
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
989
1084
        repo.add_inventory(revision_id, inv, parent_ids)
990
1085
        revision = _mod_revision.Revision(revision_id,
991
1086
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
1008
1103
        """
1009
1104
        broken_repo = self.make_broken_repository()
1010
1105
        empty_repo = self.make_repository('empty-repo')
1011
 
        # See bug https://bugs.launchpad.net/bzr/+bug/389141 for information
1012
 
        # about why this was turned into expectFailure
1013
 
        self.expectFailure('new Stream fetch fills in missing compression'
1014
 
           ' parents (bug #389141)',
1015
 
           self.assertRaises, (errors.RevisionNotPresent, errors.BzrCheckError),
1016
 
                              empty_repo.fetch, broken_repo)
1017
 
        self.assertRaises((errors.RevisionNotPresent, errors.BzrCheckError),
1018
 
                          empty_repo.fetch, broken_repo)
 
1106
        try:
 
1107
            empty_repo.fetch(broken_repo)
 
1108
        except (errors.RevisionNotPresent, errors.BzrCheckError):
 
1109
            # Test successful: compression parent not being copied leads to
 
1110
            # error.
 
1111
            return
 
1112
        empty_repo.lock_read()
 
1113
        self.addCleanup(empty_repo.unlock)
 
1114
        text = empty_repo.texts.get_record_stream(
 
1115
            [('file2-id', 'rev3')], 'topological', True).next()
 
1116
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
1019
1117
 
1020
1118
 
1021
1119
class TestRepositoryPackCollection(TestCaseWithTransport):
1030
1128
 
1031
1129
    def make_packs_and_alt_repo(self, write_lock=False):
1032
1130
        """Create a pack repo with 3 packs, and access it via a second repo."""
1033
 
        tree = self.make_branch_and_tree('.')
 
1131
        tree = self.make_branch_and_tree('.', format=self.get_format())
1034
1132
        tree.lock_write()
1035
1133
        self.addCleanup(tree.unlock)
1036
1134
        rev1 = tree.commit('one')
1046
1144
        packs.ensure_loaded()
1047
1145
        return tree, r, packs, [rev1, rev2, rev3]
1048
1146
 
 
1147
    def test__clear_obsolete_packs(self):
 
1148
        packs = self.get_packs()
 
1149
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
 
1150
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1151
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1152
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1153
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1154
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1155
        res = packs._clear_obsolete_packs()
 
1156
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
 
1157
        self.assertEqual([], obsolete_pack_trans.list_dir('.'))
 
1158
 
 
1159
    def test__clear_obsolete_packs_preserve(self):
 
1160
        packs = self.get_packs()
 
1161
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
 
1162
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1163
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1164
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1165
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1166
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1167
        res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
 
1168
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
 
1169
        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
 
1170
                         sorted(obsolete_pack_trans.list_dir('.')))
 
1171
 
1049
1172
    def test__max_pack_count(self):
1050
1173
        """The maximum pack count is a function of the number of revisions."""
1051
1174
        # no revisions - one pack, so that we can have a revision free repo
1071
1194
        # check some arbitrary big numbers
1072
1195
        self.assertEqual(25, packs._max_pack_count(112894))
1073
1196
 
 
1197
    def test_repr(self):
 
1198
        packs = self.get_packs()
 
1199
        self.assertContainsRe(repr(packs),
 
1200
            'RepositoryPackCollection(.*Repository(.*))')
 
1201
 
 
1202
    def test__obsolete_packs(self):
 
1203
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1204
        names = packs.names()
 
1205
        pack = packs.get_pack_by_name(names[0])
 
1206
        # Schedule this one for removal
 
1207
        packs._remove_pack_from_memory(pack)
 
1208
        # Simulate a concurrent update by renaming the .pack file and one of
 
1209
        # the indices
 
1210
        packs.transport.rename('packs/%s.pack' % (names[0],),
 
1211
                               'obsolete_packs/%s.pack' % (names[0],))
 
1212
        packs.transport.rename('indices/%s.iix' % (names[0],),
 
1213
                               'obsolete_packs/%s.iix' % (names[0],))
 
1214
        # Now trigger the obsoletion, and ensure that all the remaining files
 
1215
        # are still renamed
 
1216
        packs._obsolete_packs([pack])
 
1217
        self.assertEqual([n + '.pack' for n in names[1:]],
 
1218
                         sorted(packs._pack_transport.list_dir('.')))
 
1219
        # names[0] should not be present in the index anymore
 
1220
        self.assertEqual(names[1:],
 
1221
            sorted(set([osutils.splitext(n)[0] for n in
 
1222
                        packs._index_transport.list_dir('.')])))
 
1223
 
1074
1224
    def test_pack_distribution_zero(self):
1075
1225
        packs = self.get_packs()
1076
1226
        self.assertEqual([0], packs.pack_distribution(0))
1244
1394
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1245
1395
        self.assertFalse(packs.reload_pack_names())
1246
1396
 
 
1397
    def test_reload_pack_names_preserves_pending(self):
 
1398
        # TODO: Update this to also test for pending-deleted names
 
1399
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1400
        # We will add one pack (via start_write_group + insert_record_stream),
 
1401
        # and remove another pack (via _remove_pack_from_memory)
 
1402
        orig_names = packs.names()
 
1403
        orig_at_load = packs._packs_at_load
 
1404
        to_remove_name = iter(orig_names).next()
 
1405
        r.start_write_group()
 
1406
        self.addCleanup(r.abort_write_group)
 
1407
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
 
1408
            ('text', 'rev'), (), None, 'content\n')])
 
1409
        new_pack = packs._new_pack
 
1410
        self.assertTrue(new_pack.data_inserted())
 
1411
        new_pack.finish()
 
1412
        packs.allocate(new_pack)
 
1413
        packs._new_pack = None
 
1414
        removed_pack = packs.get_pack_by_name(to_remove_name)
 
1415
        packs._remove_pack_from_memory(removed_pack)
 
1416
        names = packs.names()
 
1417
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1418
        new_names = set([x[0][0] for x in new_nodes])
 
1419
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1420
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1421
        self.assertEqual(set([new_pack.name]), new_names)
 
1422
        self.assertEqual([to_remove_name],
 
1423
                         sorted([x[0][0] for x in deleted_nodes]))
 
1424
        packs.reload_pack_names()
 
1425
        reloaded_names = packs.names()
 
1426
        self.assertEqual(orig_at_load, packs._packs_at_load)
 
1427
        self.assertEqual(names, reloaded_names)
 
1428
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1429
        new_names = set([x[0][0] for x in new_nodes])
 
1430
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1431
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1432
        self.assertEqual(set([new_pack.name]), new_names)
 
1433
        self.assertEqual([to_remove_name],
 
1434
                         sorted([x[0][0] for x in deleted_nodes]))
 
1435
 
 
1436
    def test_autopack_obsoletes_new_pack(self):
 
1437
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1438
        packs._max_pack_count = lambda x: 1
 
1439
        packs.pack_distribution = lambda x: [10]
 
1440
        r.start_write_group()
 
1441
        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
 
1442
            ('bogus-rev',), (), None, 'bogus-content\n')])
 
1443
        # This should trigger an autopack, which will combine everything into a
 
1444
        # single pack file.
 
1445
        new_names = r.commit_write_group()
 
1446
        names = packs.names()
 
1447
        self.assertEqual(1, len(names))
 
1448
        self.assertEqual([names[0] + '.pack'],
 
1449
                         packs._pack_transport.list_dir('.'))
 
1450
 
1247
1451
    def test_autopack_reloads_and_stops(self):
1248
1452
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1249
1453
        # After we have determined what needs to be autopacked, trigger a
1261
1465
        self.assertEqual(tree.branch.repository._pack_collection.names(),
1262
1466
                         packs.names())
1263
1467
 
 
1468
    def test__save_pack_names(self):
 
1469
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1470
        names = packs.names()
 
1471
        pack = packs.get_pack_by_name(names[0])
 
1472
        packs._remove_pack_from_memory(pack)
 
1473
        packs._save_pack_names(obsolete_packs=[pack])
 
1474
        cur_packs = packs._pack_transport.list_dir('.')
 
1475
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1476
        # obsolete_packs will also have stuff like .rix and .iix present.
 
1477
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1478
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1479
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1480
 
 
1481
    def test__save_pack_names_already_obsoleted(self):
 
1482
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1483
        names = packs.names()
 
1484
        pack = packs.get_pack_by_name(names[0])
 
1485
        packs._remove_pack_from_memory(pack)
 
1486
        # We are going to simulate a concurrent autopack by manually obsoleting
 
1487
        # the pack directly.
 
1488
        packs._obsolete_packs([pack])
 
1489
        packs._save_pack_names(clear_obsolete_packs=True,
 
1490
                               obsolete_packs=[pack])
 
1491
        cur_packs = packs._pack_transport.list_dir('.')
 
1492
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1493
        # Note that while we set clear_obsolete_packs=True, it should not
 
1494
        # delete a pack file that we have also scheduled for obsoletion.
 
1495
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1496
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1497
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1498
 
 
1499
 
1264
1500
 
1265
1501
class TestPack(TestCaseWithTransport):
1266
1502
    """Tests for the Pack object."""
1330
1566
            index_class=BTreeGraphIndex,
1331
1567
            use_chk_index=False)
1332
1568
        pack = pack_repo.NewPack(collection)
 
1569
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
1333
1570
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
1334
1571
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1335
1572
        self.assertIsInstance(pack._hash, type(osutils.md5()))
1346
1583
    """Tests for the packs repository Packer class."""
1347
1584
 
1348
1585
    def test_pack_optimizes_pack_order(self):
1349
 
        builder = self.make_branch_builder('.')
 
1586
        builder = self.make_branch_builder('.', format="1.9")
1350
1587
        builder.start_series()
1351
1588
        builder.build_snapshot('A', None, [
1352
1589
            ('add', ('', 'root-id', 'directory', None)),
1388
1625
        packer = pack_repo.OptimisingPacker(self.get_pack_collection(),
1389
1626
                                            [], '.test')
1390
1627
        new_pack = packer.open_pack()
 
1628
        self.addCleanup(new_pack.abort) # ensure cleanup
1391
1629
        self.assertIsInstance(new_pack, pack_repo.NewPack)
1392
1630
        self.assertTrue(new_pack.revision_index._optimize_for_size)
1393
1631
        self.assertTrue(new_pack.inventory_index._optimize_for_size)