~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: Gary van der Merwe
  • Date: 2010-08-02 19:56:52 UTC
  • mfrom: (5050.3.18 2.2)
  • mto: (5050.3.19 2.2)
  • mto: This revision was merged to the branch mainline in revision 5371.
  • Revision ID: garyvdm@gmail.com-20100802195652-o1ppjemhwrr98i61
MergeĀ lp:bzr/2.2.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006, 2007, 2008, 2009 Canonical Ltd
 
1
# Copyright (C) 2006-2010 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
23
23
"""
24
24
 
25
25
from stat import S_ISDIR
26
 
from StringIO import StringIO
 
26
import sys
27
27
 
28
28
import bzrlib
29
 
from bzrlib.errors import (NotBranchError,
30
 
                           NoSuchFile,
 
29
from bzrlib.errors import (NoSuchFile,
31
30
                           UnknownFormatError,
32
31
                           UnsupportedFormatError,
33
32
                           )
35
34
    graph,
36
35
    tests,
37
36
    )
38
 
from bzrlib.branchbuilder import BranchBuilder
39
37
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
40
 
from bzrlib.index import GraphIndex, InMemoryGraphIndex
 
38
from bzrlib.index import GraphIndex
41
39
from bzrlib.repository import RepositoryFormat
42
 
from bzrlib.smart import server
43
40
from bzrlib.tests import (
44
41
    TestCase,
45
42
    TestCaseWithTransport,
46
 
    TestSkipped,
47
 
    test_knit,
48
43
    )
49
44
from bzrlib.transport import (
50
 
    fakenfs,
51
45
    get_transport,
52
46
    )
53
 
from bzrlib.transport.memory import MemoryServer
54
47
from bzrlib import (
55
 
    bencode,
56
48
    bzrdir,
57
49
    errors,
58
50
    inventory,
59
51
    osutils,
60
 
    progress,
61
52
    repository,
62
53
    revision as _mod_revision,
63
 
    symbol_versioning,
64
54
    upgrade,
 
55
    versionedfile,
65
56
    workingtree,
66
57
    )
67
58
from bzrlib.repofmt import (
252
243
        tree = control.create_workingtree()
253
244
        tree.add(['foo'], ['Foo:Bar'], ['file'])
254
245
        tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
255
 
        tree.commit('first post', rev_id='first')
 
246
        try:
 
247
            tree.commit('first post', rev_id='first')
 
248
        except errors.IllegalPath:
 
249
            if sys.platform != 'win32':
 
250
                raise
 
251
            self.knownFailure('Foo:Bar cannot be used as a file-id on windows'
 
252
                              ' in repo format 7')
 
253
            return
256
254
        self.assertEqualDiff(
257
255
            '# bzr weave file v5\n'
258
256
            'i\n'
456
454
        repo = self.make_repository('.',
457
455
                format=bzrdir.format_registry.get('knit')())
458
456
        inv_xml = '<inventory format="5">\n</inventory>\n'
459
 
        inv = repo.deserialise_inventory('test-rev-id', inv_xml)
 
457
        inv = repo._deserialise_inventory('test-rev-id', inv_xml)
460
458
        self.assertEqual('test-rev-id', inv.root.revision)
461
459
 
462
460
    def test_deserialise_uses_global_revision_id(self):
468
466
        # Arguably, the deserialise_inventory should detect a mismatch, and
469
467
        # raise an error, rather than silently using one revision_id over the
470
468
        # other.
471
 
        self.assertRaises(AssertionError, repo.deserialise_inventory,
 
469
        self.assertRaises(AssertionError, repo._deserialise_inventory,
472
470
            'test-rev-id', inv_xml)
473
 
        inv = repo.deserialise_inventory('other-rev-id', inv_xml)
 
471
        inv = repo._deserialise_inventory('other-rev-id', inv_xml)
474
472
        self.assertEqual('other-rev-id', inv.root.revision)
475
473
 
476
474
    def test_supports_external_lookups(self):
486
484
    _serializer = None
487
485
 
488
486
    def supports_rich_root(self):
 
487
        if self._format is not None:
 
488
            return self._format.rich_root_data
489
489
        return False
490
490
 
491
491
    def get_graph(self):
542
542
        # pair that it returns true on for the is_compatible static method
543
543
        # check
544
544
        dummy_a = DummyRepository()
 
545
        dummy_a._format = RepositoryFormat()
545
546
        dummy_b = DummyRepository()
 
547
        dummy_b._format = RepositoryFormat()
546
548
        repo = self.make_repository('.')
547
549
        # hack dummies to look like repo somewhat.
548
550
        dummy_a._serializer = repo._serializer
 
551
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
 
552
        dummy_a._format.rich_root_data = repo._format.rich_root_data
549
553
        dummy_b._serializer = repo._serializer
 
554
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
 
555
        dummy_b._format.rich_root_data = repo._format.rich_root_data
550
556
        repository.InterRepository.register_optimiser(InterDummy)
551
557
        try:
552
558
            # we should get the default for something InterDummy returns False
673
679
        self.assertFalse(repo._format.supports_external_lookups)
674
680
 
675
681
 
676
 
class Test2a(TestCaseWithTransport):
 
682
class Test2a(tests.TestCaseWithMemoryTransport):
 
683
 
 
684
    def test_fetch_combines_groups(self):
 
685
        builder = self.make_branch_builder('source', format='2a')
 
686
        builder.start_series()
 
687
        builder.build_snapshot('1', None, [
 
688
            ('add', ('', 'root-id', 'directory', '')),
 
689
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
690
        builder.build_snapshot('2', ['1'], [
 
691
            ('modify', ('file-id', 'content-2\n'))])
 
692
        builder.finish_series()
 
693
        source = builder.get_branch()
 
694
        target = self.make_repository('target', format='2a')
 
695
        target.fetch(source.repository)
 
696
        target.lock_read()
 
697
        self.addCleanup(target.unlock)
 
698
        details = target.texts._index.get_build_details(
 
699
            [('file-id', '1',), ('file-id', '2',)])
 
700
        file_1_details = details[('file-id', '1')]
 
701
        file_2_details = details[('file-id', '2')]
 
702
        # The index, and what to read off disk, should be the same for both
 
703
        # versions of the file.
 
704
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
705
 
 
706
    def test_fetch_combines_groups(self):
 
707
        builder = self.make_branch_builder('source', format='2a')
 
708
        builder.start_series()
 
709
        builder.build_snapshot('1', None, [
 
710
            ('add', ('', 'root-id', 'directory', '')),
 
711
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
712
        builder.build_snapshot('2', ['1'], [
 
713
            ('modify', ('file-id', 'content-2\n'))])
 
714
        builder.finish_series()
 
715
        source = builder.get_branch()
 
716
        target = self.make_repository('target', format='2a')
 
717
        target.fetch(source.repository)
 
718
        target.lock_read()
 
719
        self.addCleanup(target.unlock)
 
720
        details = target.texts._index.get_build_details(
 
721
            [('file-id', '1',), ('file-id', '2',)])
 
722
        file_1_details = details[('file-id', '1')]
 
723
        file_2_details = details[('file-id', '2')]
 
724
        # The index, and what to read off disk, should be the same for both
 
725
        # versions of the file.
 
726
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
727
 
 
728
    def test_fetch_combines_groups(self):
 
729
        builder = self.make_branch_builder('source', format='2a')
 
730
        builder.start_series()
 
731
        builder.build_snapshot('1', None, [
 
732
            ('add', ('', 'root-id', 'directory', '')),
 
733
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
734
        builder.build_snapshot('2', ['1'], [
 
735
            ('modify', ('file-id', 'content-2\n'))])
 
736
        builder.finish_series()
 
737
        source = builder.get_branch()
 
738
        target = self.make_repository('target', format='2a')
 
739
        target.fetch(source.repository)
 
740
        target.lock_read()
 
741
        self.addCleanup(target.unlock)
 
742
        details = target.texts._index.get_build_details(
 
743
            [('file-id', '1',), ('file-id', '2',)])
 
744
        file_1_details = details[('file-id', '1')]
 
745
        file_2_details = details[('file-id', '2')]
 
746
        # The index, and what to read off disk, should be the same for both
 
747
        # versions of the file.
 
748
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
677
749
 
678
750
    def test_format_pack_compresses_True(self):
679
751
        repo = self.make_repository('repo', format='2a')
680
752
        self.assertTrue(repo._format.pack_compresses)
681
753
 
682
754
    def test_inventories_use_chk_map_with_parent_base_dict(self):
683
 
        tree = self.make_branch_and_tree('repo', format="2a")
 
755
        tree = self.make_branch_and_memory_tree('repo', format="2a")
 
756
        tree.lock_write()
 
757
        tree.add([''], ['TREE_ROOT'])
684
758
        revid = tree.commit("foo")
 
759
        tree.unlock()
685
760
        tree.lock_read()
686
761
        self.addCleanup(tree.unlock)
687
762
        inv = tree.branch.repository.get_inventory(revid)
696
771
        # at 20 unchanged commits, chk pages are packed that are split into
697
772
        # two groups such that the new pack being made doesn't have all its
698
773
        # pages in the source packs (though they are in the repository).
699
 
        tree = self.make_branch_and_tree('tree', format='2a')
 
774
        # Use a memory backed repository, we don't need to hit disk for this
 
775
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
776
        tree.lock_write()
 
777
        self.addCleanup(tree.unlock)
 
778
        tree.add([''], ['TREE_ROOT'])
700
779
        for pos in range(20):
701
780
            tree.commit(str(pos))
702
781
 
703
782
    def test_pack_with_hint(self):
704
 
        tree = self.make_branch_and_tree('tree', format='2a')
 
783
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
784
        tree.lock_write()
 
785
        self.addCleanup(tree.unlock)
 
786
        tree.add([''], ['TREE_ROOT'])
705
787
        # 1 commit to leave untouched
706
788
        tree.commit('1')
707
789
        to_keep = tree.branch.repository._pack_collection.names()
946
1028
            inv = inventory.Inventory(revision_id='rev1a')
947
1029
            inv.root.revision = 'rev1a'
948
1030
            self.add_file(repo, inv, 'file1', 'rev1a', [])
 
1031
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
949
1032
            repo.add_inventory('rev1a', inv, [])
950
1033
            revision = _mod_revision.Revision('rev1a',
951
1034
                committer='jrandom@example.com', timestamp=0,
986
1069
    def add_revision(self, repo, revision_id, inv, parent_ids):
987
1070
        inv.revision_id = revision_id
988
1071
        inv.root.revision = revision_id
 
1072
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
989
1073
        repo.add_inventory(revision_id, inv, parent_ids)
990
1074
        revision = _mod_revision.Revision(revision_id,
991
1075
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
1008
1092
        """
1009
1093
        broken_repo = self.make_broken_repository()
1010
1094
        empty_repo = self.make_repository('empty-repo')
1011
 
        # See bug https://bugs.launchpad.net/bzr/+bug/389141 for information
1012
 
        # about why this was turned into expectFailure
1013
 
        self.expectFailure('new Stream fetch fills in missing compression'
1014
 
           ' parents (bug #389141)',
1015
 
           self.assertRaises, (errors.RevisionNotPresent, errors.BzrCheckError),
1016
 
                              empty_repo.fetch, broken_repo)
1017
 
        self.assertRaises((errors.RevisionNotPresent, errors.BzrCheckError),
1018
 
                          empty_repo.fetch, broken_repo)
 
1095
        try:
 
1096
            empty_repo.fetch(broken_repo)
 
1097
        except (errors.RevisionNotPresent, errors.BzrCheckError):
 
1098
            # Test successful: compression parent not being copied leads to
 
1099
            # error.
 
1100
            return
 
1101
        empty_repo.lock_read()
 
1102
        self.addCleanup(empty_repo.unlock)
 
1103
        text = empty_repo.texts.get_record_stream(
 
1104
            [('file2-id', 'rev3')], 'topological', True).next()
 
1105
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
1019
1106
 
1020
1107
 
1021
1108
class TestRepositoryPackCollection(TestCaseWithTransport):
1030
1117
 
1031
1118
    def make_packs_and_alt_repo(self, write_lock=False):
1032
1119
        """Create a pack repo with 3 packs, and access it via a second repo."""
1033
 
        tree = self.make_branch_and_tree('.')
 
1120
        tree = self.make_branch_and_tree('.', format=self.get_format())
1034
1121
        tree.lock_write()
1035
1122
        self.addCleanup(tree.unlock)
1036
1123
        rev1 = tree.commit('one')
1046
1133
        packs.ensure_loaded()
1047
1134
        return tree, r, packs, [rev1, rev2, rev3]
1048
1135
 
 
1136
    def test__clear_obsolete_packs(self):
 
1137
        packs = self.get_packs()
 
1138
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
 
1139
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1140
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1141
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1142
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1143
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1144
        res = packs._clear_obsolete_packs()
 
1145
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
 
1146
        self.assertEqual([], obsolete_pack_trans.list_dir('.'))
 
1147
 
 
1148
    def test__clear_obsolete_packs_preserve(self):
 
1149
        packs = self.get_packs()
 
1150
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
 
1151
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
 
1152
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
 
1153
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
 
1154
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
 
1155
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
 
1156
        res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
 
1157
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
 
1158
        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
 
1159
                         sorted(obsolete_pack_trans.list_dir('.')))
 
1160
 
1049
1161
    def test__max_pack_count(self):
1050
1162
        """The maximum pack count is a function of the number of revisions."""
1051
1163
        # no revisions - one pack, so that we can have a revision free repo
1071
1183
        # check some arbitrary big numbers
1072
1184
        self.assertEqual(25, packs._max_pack_count(112894))
1073
1185
 
 
1186
    def test_repr(self):
 
1187
        packs = self.get_packs()
 
1188
        self.assertContainsRe(repr(packs),
 
1189
            'RepositoryPackCollection(.*Repository(.*))')
 
1190
 
 
1191
    def test__obsolete_packs(self):
 
1192
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1193
        names = packs.names()
 
1194
        pack = packs.get_pack_by_name(names[0])
 
1195
        # Schedule this one for removal
 
1196
        packs._remove_pack_from_memory(pack)
 
1197
        # Simulate a concurrent update by renaming the .pack file and one of
 
1198
        # the indices
 
1199
        packs.transport.rename('packs/%s.pack' % (names[0],),
 
1200
                               'obsolete_packs/%s.pack' % (names[0],))
 
1201
        packs.transport.rename('indices/%s.iix' % (names[0],),
 
1202
                               'obsolete_packs/%s.iix' % (names[0],))
 
1203
        # Now trigger the obsoletion, and ensure that all the remaining files
 
1204
        # are still renamed
 
1205
        packs._obsolete_packs([pack])
 
1206
        self.assertEqual([n + '.pack' for n in names[1:]],
 
1207
                         sorted(packs._pack_transport.list_dir('.')))
 
1208
        # names[0] should not be present in the index anymore
 
1209
        self.assertEqual(names[1:],
 
1210
            sorted(set([osutils.splitext(n)[0] for n in
 
1211
                        packs._index_transport.list_dir('.')])))
 
1212
 
1074
1213
    def test_pack_distribution_zero(self):
1075
1214
        packs = self.get_packs()
1076
1215
        self.assertEqual([0], packs.pack_distribution(0))
1244
1383
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1245
1384
        self.assertFalse(packs.reload_pack_names())
1246
1385
 
 
1386
    def test_reload_pack_names_preserves_pending(self):
 
1387
        # TODO: Update this to also test for pending-deleted names
 
1388
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1389
        # We will add one pack (via start_write_group + insert_record_stream),
 
1390
        # and remove another pack (via _remove_pack_from_memory)
 
1391
        orig_names = packs.names()
 
1392
        orig_at_load = packs._packs_at_load
 
1393
        to_remove_name = iter(orig_names).next()
 
1394
        r.start_write_group()
 
1395
        self.addCleanup(r.abort_write_group)
 
1396
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
 
1397
            ('text', 'rev'), (), None, 'content\n')])
 
1398
        new_pack = packs._new_pack
 
1399
        self.assertTrue(new_pack.data_inserted())
 
1400
        new_pack.finish()
 
1401
        packs.allocate(new_pack)
 
1402
        packs._new_pack = None
 
1403
        removed_pack = packs.get_pack_by_name(to_remove_name)
 
1404
        packs._remove_pack_from_memory(removed_pack)
 
1405
        names = packs.names()
 
1406
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1407
        new_names = set([x[0][0] for x in new_nodes])
 
1408
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1409
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1410
        self.assertEqual(set([new_pack.name]), new_names)
 
1411
        self.assertEqual([to_remove_name],
 
1412
                         sorted([x[0][0] for x in deleted_nodes]))
 
1413
        packs.reload_pack_names()
 
1414
        reloaded_names = packs.names()
 
1415
        self.assertEqual(orig_at_load, packs._packs_at_load)
 
1416
        self.assertEqual(names, reloaded_names)
 
1417
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
 
1418
        new_names = set([x[0][0] for x in new_nodes])
 
1419
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
 
1420
        self.assertEqual(set(names) - set(orig_names), new_names)
 
1421
        self.assertEqual(set([new_pack.name]), new_names)
 
1422
        self.assertEqual([to_remove_name],
 
1423
                         sorted([x[0][0] for x in deleted_nodes]))
 
1424
 
 
1425
    def test_autopack_obsoletes_new_pack(self):
 
1426
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1427
        packs._max_pack_count = lambda x: 1
 
1428
        packs.pack_distribution = lambda x: [10]
 
1429
        r.start_write_group()
 
1430
        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
 
1431
            ('bogus-rev',), (), None, 'bogus-content\n')])
 
1432
        # This should trigger an autopack, which will combine everything into a
 
1433
        # single pack file.
 
1434
        new_names = r.commit_write_group()
 
1435
        names = packs.names()
 
1436
        self.assertEqual(1, len(names))
 
1437
        self.assertEqual([names[0] + '.pack'],
 
1438
                         packs._pack_transport.list_dir('.'))
 
1439
 
1247
1440
    def test_autopack_reloads_and_stops(self):
1248
1441
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1249
1442
        # After we have determined what needs to be autopacked, trigger a
1261
1454
        self.assertEqual(tree.branch.repository._pack_collection.names(),
1262
1455
                         packs.names())
1263
1456
 
 
1457
    def test__save_pack_names(self):
 
1458
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1459
        names = packs.names()
 
1460
        pack = packs.get_pack_by_name(names[0])
 
1461
        packs._remove_pack_from_memory(pack)
 
1462
        packs._save_pack_names(obsolete_packs=[pack])
 
1463
        cur_packs = packs._pack_transport.list_dir('.')
 
1464
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1465
        # obsolete_packs will also have stuff like .rix and .iix present.
 
1466
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1467
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1468
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1469
 
 
1470
    def test__save_pack_names_already_obsoleted(self):
 
1471
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
 
1472
        names = packs.names()
 
1473
        pack = packs.get_pack_by_name(names[0])
 
1474
        packs._remove_pack_from_memory(pack)
 
1475
        # We are going to simulate a concurrent autopack by manually obsoleting
 
1476
        # the pack directly.
 
1477
        packs._obsolete_packs([pack])
 
1478
        packs._save_pack_names(clear_obsolete_packs=True,
 
1479
                               obsolete_packs=[pack])
 
1480
        cur_packs = packs._pack_transport.list_dir('.')
 
1481
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
 
1482
        # Note that while we set clear_obsolete_packs=True, it should not
 
1483
        # delete a pack file that we have also scheduled for obsoletion.
 
1484
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
 
1485
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
 
1486
        self.assertEqual([pack.name], sorted(obsolete_names))
 
1487
 
 
1488
 
1264
1489
 
1265
1490
class TestPack(TestCaseWithTransport):
1266
1491
    """Tests for the Pack object."""
1330
1555
            index_class=BTreeGraphIndex,
1331
1556
            use_chk_index=False)
1332
1557
        pack = pack_repo.NewPack(collection)
 
1558
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
1333
1559
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
1334
1560
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1335
1561
        self.assertIsInstance(pack._hash, type(osutils.md5()))
1346
1572
    """Tests for the packs repository Packer class."""
1347
1573
 
1348
1574
    def test_pack_optimizes_pack_order(self):
1349
 
        builder = self.make_branch_builder('.')
 
1575
        builder = self.make_branch_builder('.', format="1.9")
1350
1576
        builder.start_series()
1351
1577
        builder.build_snapshot('A', None, [
1352
1578
            ('add', ('', 'root-id', 'directory', None)),
1388
1614
        packer = pack_repo.OptimisingPacker(self.get_pack_collection(),
1389
1615
                                            [], '.test')
1390
1616
        new_pack = packer.open_pack()
 
1617
        self.addCleanup(new_pack.abort) # ensure cleanup
1391
1618
        self.assertIsInstance(new_pack, pack_repo.NewPack)
1392
1619
        self.assertTrue(new_pack.revision_index._optimize_for_size)
1393
1620
        self.assertTrue(new_pack.inventory_index._optimize_for_size)