~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: Matthew Fuller
  • Date: 2009-08-18 08:10:44 UTC
  • mto: (4772.1.1 integration)
  • mto: This revision was merged to the branch mainline in revision 4773.
  • Revision ID: fullermd@over-yonder.net-20090818081044-2due6ius01c4pwjl
Fix up some doctests to handle things ending up as RevisionSpec_dwim's
instead of RS_revno, and ending up as _dwim's (which may error
eventually, but won't until we try to evaluate them) instead of
insta-errors.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2010 Canonical Ltd
 
1
# Copyright (C) 2006, 2007, 2008, 2009 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
24
24
 
25
25
from stat import S_ISDIR
26
26
from StringIO import StringIO
27
 
import sys
28
27
 
29
28
import bzrlib
30
29
from bzrlib.errors import (NotBranchError,
51
50
    fakenfs,
52
51
    get_transport,
53
52
    )
 
53
from bzrlib.transport.memory import MemoryServer
54
54
from bzrlib import (
55
55
    bencode,
56
56
    bzrdir,
62
62
    revision as _mod_revision,
63
63
    symbol_versioning,
64
64
    upgrade,
65
 
    versionedfile,
66
65
    workingtree,
67
66
    )
68
67
from bzrlib.repofmt import (
253
252
        tree = control.create_workingtree()
254
253
        tree.add(['foo'], ['Foo:Bar'], ['file'])
255
254
        tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
256
 
        try:
257
 
            tree.commit('first post', rev_id='first')
258
 
        except errors.IllegalPath:
259
 
            if sys.platform != 'win32':
260
 
                raise
261
 
            self.knownFailure('Foo:Bar cannot be used as a file-id on windows'
262
 
                              ' in repo format 7')
263
 
            return
 
255
        tree.commit('first post', rev_id='first')
264
256
        self.assertEqualDiff(
265
257
            '# bzr weave file v5\n'
266
258
            'i\n'
464
456
        repo = self.make_repository('.',
465
457
                format=bzrdir.format_registry.get('knit')())
466
458
        inv_xml = '<inventory format="5">\n</inventory>\n'
467
 
        inv = repo._deserialise_inventory('test-rev-id', inv_xml)
 
459
        inv = repo.deserialise_inventory('test-rev-id', inv_xml)
468
460
        self.assertEqual('test-rev-id', inv.root.revision)
469
461
 
470
462
    def test_deserialise_uses_global_revision_id(self):
476
468
        # Arguably, the deserialise_inventory should detect a mismatch, and
477
469
        # raise an error, rather than silently using one revision_id over the
478
470
        # other.
479
 
        self.assertRaises(AssertionError, repo._deserialise_inventory,
 
471
        self.assertRaises(AssertionError, repo.deserialise_inventory,
480
472
            'test-rev-id', inv_xml)
481
 
        inv = repo._deserialise_inventory('other-rev-id', inv_xml)
 
473
        inv = repo.deserialise_inventory('other-rev-id', inv_xml)
482
474
        self.assertEqual('other-rev-id', inv.root.revision)
483
475
 
484
476
    def test_supports_external_lookups(self):
689
681
        self.assertFalse(repo._format.supports_external_lookups)
690
682
 
691
683
 
692
 
class Test2a(tests.TestCaseWithMemoryTransport):
693
 
 
694
 
    def test_fetch_combines_groups(self):
695
 
        builder = self.make_branch_builder('source', format='2a')
696
 
        builder.start_series()
697
 
        builder.build_snapshot('1', None, [
698
 
            ('add', ('', 'root-id', 'directory', '')),
699
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
700
 
        builder.build_snapshot('2', ['1'], [
701
 
            ('modify', ('file-id', 'content-2\n'))])
702
 
        builder.finish_series()
703
 
        source = builder.get_branch()
704
 
        target = self.make_repository('target', format='2a')
705
 
        target.fetch(source.repository)
706
 
        target.lock_read()
707
 
        self.addCleanup(target.unlock)
708
 
        details = target.texts._index.get_build_details(
709
 
            [('file-id', '1',), ('file-id', '2',)])
710
 
        file_1_details = details[('file-id', '1')]
711
 
        file_2_details = details[('file-id', '2')]
712
 
        # The index, and what to read off disk, should be the same for both
713
 
        # versions of the file.
714
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
715
 
 
716
 
    def test_fetch_combines_groups(self):
717
 
        builder = self.make_branch_builder('source', format='2a')
718
 
        builder.start_series()
719
 
        builder.build_snapshot('1', None, [
720
 
            ('add', ('', 'root-id', 'directory', '')),
721
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
722
 
        builder.build_snapshot('2', ['1'], [
723
 
            ('modify', ('file-id', 'content-2\n'))])
724
 
        builder.finish_series()
725
 
        source = builder.get_branch()
726
 
        target = self.make_repository('target', format='2a')
727
 
        target.fetch(source.repository)
728
 
        target.lock_read()
729
 
        self.addCleanup(target.unlock)
730
 
        details = target.texts._index.get_build_details(
731
 
            [('file-id', '1',), ('file-id', '2',)])
732
 
        file_1_details = details[('file-id', '1')]
733
 
        file_2_details = details[('file-id', '2')]
734
 
        # The index, and what to read off disk, should be the same for both
735
 
        # versions of the file.
736
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
737
 
 
738
 
    def test_fetch_combines_groups(self):
739
 
        builder = self.make_branch_builder('source', format='2a')
740
 
        builder.start_series()
741
 
        builder.build_snapshot('1', None, [
742
 
            ('add', ('', 'root-id', 'directory', '')),
743
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
744
 
        builder.build_snapshot('2', ['1'], [
745
 
            ('modify', ('file-id', 'content-2\n'))])
746
 
        builder.finish_series()
747
 
        source = builder.get_branch()
748
 
        target = self.make_repository('target', format='2a')
749
 
        target.fetch(source.repository)
750
 
        target.lock_read()
751
 
        self.addCleanup(target.unlock)
752
 
        details = target.texts._index.get_build_details(
753
 
            [('file-id', '1',), ('file-id', '2',)])
754
 
        file_1_details = details[('file-id', '1')]
755
 
        file_2_details = details[('file-id', '2')]
756
 
        # The index, and what to read off disk, should be the same for both
757
 
        # versions of the file.
758
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
684
class Test2a(TestCaseWithTransport):
759
685
 
760
686
    def test_format_pack_compresses_True(self):
761
687
        repo = self.make_repository('repo', format='2a')
762
688
        self.assertTrue(repo._format.pack_compresses)
763
689
 
764
690
    def test_inventories_use_chk_map_with_parent_base_dict(self):
765
 
        tree = self.make_branch_and_memory_tree('repo', format="2a")
766
 
        tree.lock_write()
767
 
        tree.add([''], ['TREE_ROOT'])
 
691
        tree = self.make_branch_and_tree('repo', format="2a")
768
692
        revid = tree.commit("foo")
769
 
        tree.unlock()
770
693
        tree.lock_read()
771
694
        self.addCleanup(tree.unlock)
772
695
        inv = tree.branch.repository.get_inventory(revid)
781
704
        # at 20 unchanged commits, chk pages are packed that are split into
782
705
        # two groups such that the new pack being made doesn't have all its
783
706
        # pages in the source packs (though they are in the repository).
784
 
        # Use a memory backed repository, we don't need to hit disk for this
785
 
        tree = self.make_branch_and_memory_tree('tree', format='2a')
786
 
        tree.lock_write()
787
 
        self.addCleanup(tree.unlock)
788
 
        tree.add([''], ['TREE_ROOT'])
 
707
        tree = self.make_branch_and_tree('tree', format='2a')
789
708
        for pos in range(20):
790
709
            tree.commit(str(pos))
791
710
 
792
711
    def test_pack_with_hint(self):
793
 
        tree = self.make_branch_and_memory_tree('tree', format='2a')
794
 
        tree.lock_write()
795
 
        self.addCleanup(tree.unlock)
796
 
        tree.add([''], ['TREE_ROOT'])
 
712
        tree = self.make_branch_and_tree('tree', format='2a')
797
713
        # 1 commit to leave untouched
798
714
        tree.commit('1')
799
715
        to_keep = tree.branch.repository._pack_collection.names()
1038
954
            inv = inventory.Inventory(revision_id='rev1a')
1039
955
            inv.root.revision = 'rev1a'
1040
956
            self.add_file(repo, inv, 'file1', 'rev1a', [])
1041
 
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
1042
957
            repo.add_inventory('rev1a', inv, [])
1043
958
            revision = _mod_revision.Revision('rev1a',
1044
959
                committer='jrandom@example.com', timestamp=0,
1079
994
    def add_revision(self, repo, revision_id, inv, parent_ids):
1080
995
        inv.revision_id = revision_id
1081
996
        inv.root.revision = revision_id
1082
 
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
1083
997
        repo.add_inventory(revision_id, inv, parent_ids)
1084
998
        revision = _mod_revision.Revision(revision_id,
1085
999
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
1143
1057
        packs.ensure_loaded()
1144
1058
        return tree, r, packs, [rev1, rev2, rev3]
1145
1059
 
1146
 
    def test__clear_obsolete_packs(self):
1147
 
        packs = self.get_packs()
1148
 
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1149
 
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1150
 
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1151
 
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1152
 
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1153
 
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1154
 
        res = packs._clear_obsolete_packs()
1155
 
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1156
 
        self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1157
 
 
1158
 
    def test__clear_obsolete_packs_preserve(self):
1159
 
        packs = self.get_packs()
1160
 
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1161
 
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1162
 
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1163
 
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1164
 
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1165
 
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1166
 
        res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
1167
 
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1168
 
        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1169
 
                         sorted(obsolete_pack_trans.list_dir('.')))
1170
 
 
1171
1060
    def test__max_pack_count(self):
1172
1061
        """The maximum pack count is a function of the number of revisions."""
1173
1062
        # no revisions - one pack, so that we can have a revision free repo
1193
1082
        # check some arbitrary big numbers
1194
1083
        self.assertEqual(25, packs._max_pack_count(112894))
1195
1084
 
1196
 
    def test_repr(self):
1197
 
        packs = self.get_packs()
1198
 
        self.assertContainsRe(repr(packs),
1199
 
            'RepositoryPackCollection(.*Repository(.*))')
1200
 
 
1201
 
    def test__obsolete_packs(self):
1202
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1203
 
        names = packs.names()
1204
 
        pack = packs.get_pack_by_name(names[0])
1205
 
        # Schedule this one for removal
1206
 
        packs._remove_pack_from_memory(pack)
1207
 
        # Simulate a concurrent update by renaming the .pack file and one of
1208
 
        # the indices
1209
 
        packs.transport.rename('packs/%s.pack' % (names[0],),
1210
 
                               'obsolete_packs/%s.pack' % (names[0],))
1211
 
        packs.transport.rename('indices/%s.iix' % (names[0],),
1212
 
                               'obsolete_packs/%s.iix' % (names[0],))
1213
 
        # Now trigger the obsoletion, and ensure that all the remaining files
1214
 
        # are still renamed
1215
 
        packs._obsolete_packs([pack])
1216
 
        self.assertEqual([n + '.pack' for n in names[1:]],
1217
 
                         sorted(packs._pack_transport.list_dir('.')))
1218
 
        # names[0] should not be present in the index anymore
1219
 
        self.assertEqual(names[1:],
1220
 
            sorted(set([osutils.splitext(n)[0] for n in
1221
 
                        packs._index_transport.list_dir('.')])))
1222
 
 
1223
1085
    def test_pack_distribution_zero(self):
1224
1086
        packs = self.get_packs()
1225
1087
        self.assertEqual([0], packs.pack_distribution(0))
1393
1255
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1394
1256
        self.assertFalse(packs.reload_pack_names())
1395
1257
 
1396
 
    def test_reload_pack_names_preserves_pending(self):
1397
 
        # TODO: Update this to also test for pending-deleted names
1398
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1399
 
        # We will add one pack (via start_write_group + insert_record_stream),
1400
 
        # and remove another pack (via _remove_pack_from_memory)
1401
 
        orig_names = packs.names()
1402
 
        orig_at_load = packs._packs_at_load
1403
 
        to_remove_name = iter(orig_names).next()
1404
 
        r.start_write_group()
1405
 
        self.addCleanup(r.abort_write_group)
1406
 
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1407
 
            ('text', 'rev'), (), None, 'content\n')])
1408
 
        new_pack = packs._new_pack
1409
 
        self.assertTrue(new_pack.data_inserted())
1410
 
        new_pack.finish()
1411
 
        packs.allocate(new_pack)
1412
 
        packs._new_pack = None
1413
 
        removed_pack = packs.get_pack_by_name(to_remove_name)
1414
 
        packs._remove_pack_from_memory(removed_pack)
1415
 
        names = packs.names()
1416
 
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1417
 
        new_names = set([x[0][0] for x in new_nodes])
1418
 
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1419
 
        self.assertEqual(set(names) - set(orig_names), new_names)
1420
 
        self.assertEqual(set([new_pack.name]), new_names)
1421
 
        self.assertEqual([to_remove_name],
1422
 
                         sorted([x[0][0] for x in deleted_nodes]))
1423
 
        packs.reload_pack_names()
1424
 
        reloaded_names = packs.names()
1425
 
        self.assertEqual(orig_at_load, packs._packs_at_load)
1426
 
        self.assertEqual(names, reloaded_names)
1427
 
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1428
 
        new_names = set([x[0][0] for x in new_nodes])
1429
 
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1430
 
        self.assertEqual(set(names) - set(orig_names), new_names)
1431
 
        self.assertEqual(set([new_pack.name]), new_names)
1432
 
        self.assertEqual([to_remove_name],
1433
 
                         sorted([x[0][0] for x in deleted_nodes]))
1434
 
 
1435
 
    def test_autopack_obsoletes_new_pack(self):
1436
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1437
 
        packs._max_pack_count = lambda x: 1
1438
 
        packs.pack_distribution = lambda x: [10]
1439
 
        r.start_write_group()
1440
 
        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1441
 
            ('bogus-rev',), (), None, 'bogus-content\n')])
1442
 
        # This should trigger an autopack, which will combine everything into a
1443
 
        # single pack file.
1444
 
        new_names = r.commit_write_group()
1445
 
        names = packs.names()
1446
 
        self.assertEqual(1, len(names))
1447
 
        self.assertEqual([names[0] + '.pack'],
1448
 
                         packs._pack_transport.list_dir('.'))
1449
 
 
1450
1258
    def test_autopack_reloads_and_stops(self):
1451
1259
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1452
1260
        # After we have determined what needs to be autopacked, trigger a
1464
1272
        self.assertEqual(tree.branch.repository._pack_collection.names(),
1465
1273
                         packs.names())
1466
1274
 
1467
 
    def test__save_pack_names(self):
1468
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1469
 
        names = packs.names()
1470
 
        pack = packs.get_pack_by_name(names[0])
1471
 
        packs._remove_pack_from_memory(pack)
1472
 
        packs._save_pack_names(obsolete_packs=[pack])
1473
 
        cur_packs = packs._pack_transport.list_dir('.')
1474
 
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1475
 
        # obsolete_packs will also have stuff like .rix and .iix present.
1476
 
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
1477
 
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1478
 
        self.assertEqual([pack.name], sorted(obsolete_names))
1479
 
 
1480
 
    def test__save_pack_names_already_obsoleted(self):
1481
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1482
 
        names = packs.names()
1483
 
        pack = packs.get_pack_by_name(names[0])
1484
 
        packs._remove_pack_from_memory(pack)
1485
 
        # We are going to simulate a concurrent autopack by manually obsoleting
1486
 
        # the pack directly.
1487
 
        packs._obsolete_packs([pack])
1488
 
        packs._save_pack_names(clear_obsolete_packs=True,
1489
 
                               obsolete_packs=[pack])
1490
 
        cur_packs = packs._pack_transport.list_dir('.')
1491
 
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1492
 
        # Note that while we set clear_obsolete_packs=True, it should not
1493
 
        # delete a pack file that we have also scheduled for obsoletion.
1494
 
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
1495
 
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1496
 
        self.assertEqual([pack.name], sorted(obsolete_names))
1497
 
 
1498
 
 
1499
1275
 
1500
1276
class TestPack(TestCaseWithTransport):
1501
1277
    """Tests for the Pack object."""
1565
1341
            index_class=BTreeGraphIndex,
1566
1342
            use_chk_index=False)
1567
1343
        pack = pack_repo.NewPack(collection)
1568
 
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
1569
1344
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
1570
1345
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1571
1346
        self.assertIsInstance(pack._hash, type(osutils.md5()))
1624
1399
        packer = pack_repo.OptimisingPacker(self.get_pack_collection(),
1625
1400
                                            [], '.test')
1626
1401
        new_pack = packer.open_pack()
1627
 
        self.addCleanup(new_pack.abort) # ensure cleanup
1628
1402
        self.assertIsInstance(new_pack, pack_repo.NewPack)
1629
1403
        self.assertTrue(new_pack.revision_index._optimize_for_size)
1630
1404
        self.assertTrue(new_pack.inventory_index._optimize_for_size)