~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: Robert Collins
  • Date: 2006-03-01 03:26:23 UTC
  • mto: (1594.2.4 integration)
  • mto: This revision was merged to the branch mainline in revision 1596.
  • Revision ID: robertc@robertcollins.net-20060301032623-9d3c073e102f2239
Move WeaveStore down into bzrlib.store.versioned.weave.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2011 Canonical Ltd
2
 
#
 
1
# (C) 2006 Canonical Ltd
 
2
 
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
5
5
# the Free Software Foundation; either version 2 of the License, or
6
6
# (at your option) any later version.
7
 
#
 
7
 
8
8
# This program is distributed in the hope that it will be useful,
9
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11
11
# GNU General Public License for more details.
12
 
#
 
12
 
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
16
16
 
17
17
"""Tests for the Repository facility that are not interface tests.
18
18
 
19
 
For interface tests see tests/per_repository/*.py.
 
19
For interface tests see tests/repository_implementations/*.py.
20
20
 
21
21
For concrete class tests see this file, and for storage formats tests
22
22
also see this file.
23
23
"""
24
24
 
25
 
from stat import S_ISDIR
 
25
from stat import *
 
26
from StringIO import StringIO
26
27
 
27
28
import bzrlib
28
 
from bzrlib.errors import (
29
 
    UnknownFormatError,
30
 
    UnsupportedFormatError,
31
 
    )
32
 
from bzrlib import (
33
 
    btree_index,
34
 
    graph,
35
 
    symbol_versioning,
36
 
    tests,
37
 
    transport,
38
 
    )
39
 
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex
40
 
from bzrlib.index import GraphIndex
41
 
from bzrlib.repository import RepositoryFormat
42
 
from bzrlib.tests import (
43
 
    TestCase,
44
 
    TestCaseWithTransport,
45
 
    )
46
 
from bzrlib import (
47
 
    bzrdir,
48
 
    errors,
49
 
    inventory,
50
 
    osutils,
51
 
    repository,
52
 
    revision as _mod_revision,
53
 
    upgrade,
54
 
    versionedfile,
55
 
    vf_repository,
56
 
    workingtree,
57
 
    )
58
 
from bzrlib.repofmt import (
59
 
    groupcompress_repo,
60
 
    knitrepo,
61
 
    knitpack_repo,
62
 
    pack_repo,
63
 
    )
 
29
import bzrlib.bzrdir as bzrdir
 
30
import bzrlib.errors as errors
 
31
from bzrlib.errors import (NotBranchError,
 
32
                           NoSuchFile,
 
33
                           UnknownFormatError,
 
34
                           UnsupportedFormatError,
 
35
                           )
 
36
import bzrlib.repository as repository
 
37
from bzrlib.tests import TestCase, TestCaseWithTransport
 
38
from bzrlib.transport import get_transport
 
39
from bzrlib.transport.http import HttpServer
 
40
from bzrlib.transport.memory import MemoryServer
64
41
 
65
42
 
66
43
class TestDefaultFormat(TestCase):
67
44
 
68
45
    def test_get_set_default_format(self):
69
 
        old_default = bzrdir.format_registry.get('default')
70
 
        private_default = old_default().repository_format.__class__
71
 
        old_format = repository.format_registry.get_default()
72
 
        self.assertTrue(isinstance(old_format, private_default))
73
 
        def make_sample_bzrdir():
74
 
            my_bzrdir = bzrdir.BzrDirMetaFormat1()
75
 
            my_bzrdir.repository_format = SampleRepositoryFormat()
76
 
            return my_bzrdir
77
 
        bzrdir.format_registry.remove('default')
78
 
        bzrdir.format_registry.register('sample', make_sample_bzrdir, '')
79
 
        bzrdir.format_registry.set_default('sample')
 
46
        old_format = repository.RepositoryFormat.get_default_format()
 
47
        # default is None - we cannot create a Repository independently yet
 
48
        self.assertTrue(isinstance(old_format, repository.RepositoryFormat7))
 
49
        repository.RepositoryFormat.set_default_format(SampleRepositoryFormat())
80
50
        # creating a repository should now create an instrumented dir.
81
51
        try:
82
52
            # the default branch format is used by the meta dir format
83
53
            # which is not the default bzrdir format at this point
84
 
            dir = bzrdir.BzrDirMetaFormat1().initialize('memory:///')
 
54
            dir = bzrdir.BzrDirMetaFormat1().initialize('memory:/')
85
55
            result = dir.create_repository()
86
56
            self.assertEqual(result, 'A bzr repository dir')
87
57
        finally:
88
 
            bzrdir.format_registry.remove('default')
89
 
            bzrdir.format_registry.remove('sample')
90
 
            bzrdir.format_registry.register('default', old_default, '')
91
 
        self.assertIsInstance(repository.format_registry.get_default(),
92
 
                              old_format.__class__)
 
58
            repository.RepositoryFormat.set_default_format(old_format)
 
59
        self.assertEqual(old_format, repository.RepositoryFormat.get_default_format())
93
60
 
94
61
 
95
62
class SampleRepositoryFormat(repository.RepositoryFormat):
96
63
    """A sample format
97
64
 
98
 
    this format is initializable, unsupported to aid in testing the
 
65
    this format is initializable, unsupported to aid in testing the 
99
66
    open and open(unsupported=True) routines.
100
67
    """
101
68
 
106
73
    def initialize(self, a_bzrdir, shared=False):
107
74
        """Initialize a repository in a BzrDir"""
108
75
        t = a_bzrdir.get_repository_transport(self)
109
 
        t.put_bytes('format', self.get_format_string())
 
76
        t.put('format', StringIO(self.get_format_string()))
110
77
        return 'A bzr repository dir'
111
78
 
112
79
    def is_supported(self):
116
83
        return "opened repository."
117
84
 
118
85
 
119
 
class SampleExtraRepositoryFormat(repository.RepositoryFormat):
120
 
    """A sample format that can not be used in a metadir
121
 
 
122
 
    """
123
 
 
124
 
    def get_format_string(self):
125
 
        raise NotImplementedError
126
 
 
127
 
 
128
86
class TestRepositoryFormat(TestCaseWithTransport):
129
87
    """Tests for the Repository format detection used by the bzr meta dir facility.BzrBranchFormat facility."""
130
88
 
131
89
    def test_find_format(self):
132
90
        # is the right format object found for a repository?
133
91
        # create a branch with a few known format objects.
134
 
        # this is not quite the same as
 
92
        # this is not quite the same as 
135
93
        self.build_tree(["foo/", "bar/"])
136
94
        def check_format(format, url):
137
95
            dir = format._matchingbzrdir.initialize(url)
138
96
            format.initialize(dir)
139
 
            t = transport.get_transport_from_path(url)
 
97
            t = get_transport(url)
140
98
            found_format = repository.RepositoryFormat.find_format(dir)
141
 
            self.assertIsInstance(found_format, format.__class__)
142
 
        check_format(repository.format_registry.get_default(), "bar")
143
 
 
 
99
            self.failUnless(isinstance(found_format, format.__class__))
 
100
        check_format(repository.RepositoryFormat7(), "bar")
 
101
        
144
102
    def test_find_format_no_repository(self):
145
103
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
146
104
        self.assertRaises(errors.NoRepositoryPresent,
155
113
                          dir)
156
114
 
157
115
    def test_register_unregister_format(self):
158
 
        # Test deprecated format registration functions
159
116
        format = SampleRepositoryFormat()
160
117
        # make a control dir
161
118
        dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
162
119
        # make a repo
163
120
        format.initialize(dir)
164
121
        # register a format for it.
165
 
        self.applyDeprecated(symbol_versioning.deprecated_in((2, 4, 0)),
166
 
            repository.RepositoryFormat.register_format, format)
 
122
        repository.RepositoryFormat.register_format(format)
167
123
        # which repository.Open will refuse (not supported)
168
 
        self.assertRaises(UnsupportedFormatError, repository.Repository.open,
169
 
            self.get_url())
 
124
        self.assertRaises(UnsupportedFormatError, repository.Repository.open, self.get_url())
170
125
        # but open(unsupported) will work
171
126
        self.assertEqual(format.open(dir), "opened repository.")
172
127
        # unregister the format
173
 
        self.applyDeprecated(symbol_versioning.deprecated_in((2, 4, 0)),
174
 
            repository.RepositoryFormat.unregister_format, format)
175
 
 
176
 
 
177
 
class TestRepositoryFormatRegistry(TestCase):
178
 
 
179
 
    def setUp(self):
180
 
        super(TestRepositoryFormatRegistry, self).setUp()
181
 
        self.registry = repository.RepositoryFormatRegistry()
182
 
 
183
 
    def test_register_unregister_format(self):
184
 
        format = SampleRepositoryFormat()
185
 
        self.registry.register(format)
186
 
        self.assertEquals(format, self.registry.get("Sample .bzr repository format."))
187
 
        self.registry.remove(format)
188
 
        self.assertRaises(KeyError, self.registry.get, "Sample .bzr repository format.")
189
 
 
190
 
    def test_get_all(self):
191
 
        format = SampleRepositoryFormat()
192
 
        self.assertEquals([], self.registry._get_all())
193
 
        self.registry.register(format)
194
 
        self.assertEquals([format], self.registry._get_all())
195
 
 
196
 
    def test_register_extra(self):
197
 
        format = SampleExtraRepositoryFormat()
198
 
        self.assertEquals([], self.registry._get_all())
199
 
        self.registry.register_extra(format)
200
 
        self.assertEquals([format], self.registry._get_all())
201
 
 
202
 
    def test_register_extra_lazy(self):
203
 
        self.assertEquals([], self.registry._get_all())
204
 
        self.registry.register_extra_lazy("bzrlib.tests.test_repository",
205
 
            "SampleExtraRepositoryFormat")
206
 
        formats = self.registry._get_all()
207
 
        self.assertEquals(1, len(formats))
208
 
        self.assertIsInstance(formats[0], SampleExtraRepositoryFormat)
 
128
        repository.RepositoryFormat.unregister_format(format)
 
129
 
 
130
 
 
131
class TestFormat6(TestCaseWithTransport):
 
132
 
 
133
    def test_no_ancestry_weave(self):
 
134
        control = bzrdir.BzrDirFormat6().initialize(self.get_url())
 
135
        repo = repository.RepositoryFormat6().initialize(control)
 
136
        # We no longer need to create the ancestry.weave file
 
137
        # since it is *never* used.
 
138
        self.assertRaises(NoSuchFile,
 
139
                          control.transport.get,
 
140
                          'ancestry.weave')
 
141
 
 
142
 
 
143
class TestFormat7(TestCaseWithTransport):
 
144
    
 
145
    def test_disk_layout(self):
 
146
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
147
        repo = repository.RepositoryFormat7().initialize(control)
 
148
        # in case of side effects of locking.
 
149
        repo.lock_write()
 
150
        repo.unlock()
 
151
        # we want:
 
152
        # format 'Bazaar-NG Repository format 7'
 
153
        # lock ''
 
154
        # inventory.weave == empty_weave
 
155
        # empty revision-store directory
 
156
        # empty weaves directory
 
157
        t = control.get_repository_transport(None)
 
158
        self.assertEqualDiff('Bazaar-NG Repository format 7',
 
159
                             t.get('format').read())
 
160
        self.assertEqualDiff('', t.get('lock').read())
 
161
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
 
162
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
 
163
        self.assertEqualDiff('# bzr weave file v5\n'
 
164
                             'w\n'
 
165
                             'W\n',
 
166
                             t.get('inventory.weave').read())
 
167
 
 
168
    def test_shared_disk_layout(self):
 
169
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
170
        repo = repository.RepositoryFormat7().initialize(control, shared=True)
 
171
        # we want:
 
172
        # format 'Bazaar-NG Repository format 7'
 
173
        # lock ''
 
174
        # inventory.weave == empty_weave
 
175
        # empty revision-store directory
 
176
        # empty weaves directory
 
177
        # a 'shared-storage' marker file.
 
178
        t = control.get_repository_transport(None)
 
179
        self.assertEqualDiff('Bazaar-NG Repository format 7',
 
180
                             t.get('format').read())
 
181
        self.assertEqualDiff('', t.get('lock').read())
 
182
        self.assertEqualDiff('', t.get('shared-storage').read())
 
183
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
 
184
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
 
185
        self.assertEqualDiff('# bzr weave file v5\n'
 
186
                             'w\n'
 
187
                             'W\n',
 
188
                             t.get('inventory.weave').read())
 
189
 
 
190
    def test_shared_no_tree_disk_layout(self):
 
191
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
 
192
        repo = repository.RepositoryFormat7().initialize(control, shared=True)
 
193
        repo.set_make_working_trees(False)
 
194
        # we want:
 
195
        # format 'Bazaar-NG Repository format 7'
 
196
        # lock ''
 
197
        # inventory.weave == empty_weave
 
198
        # empty revision-store directory
 
199
        # empty weaves directory
 
200
        # a 'shared-storage' marker file.
 
201
        t = control.get_repository_transport(None)
 
202
        self.assertEqualDiff('Bazaar-NG Repository format 7',
 
203
                             t.get('format').read())
 
204
        self.assertEqualDiff('', t.get('lock').read())
 
205
        self.assertEqualDiff('', t.get('shared-storage').read())
 
206
        self.assertEqualDiff('', t.get('no-working-trees').read())
 
207
        repo.set_make_working_trees(True)
 
208
        self.assertFalse(t.has('no-working-trees'))
 
209
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
 
210
        self.assertTrue(S_ISDIR(t.stat('weaves').st_mode))
 
211
        self.assertEqualDiff('# bzr weave file v5\n'
 
212
                             'w\n'
 
213
                             'W\n',
 
214
                             t.get('inventory.weave').read())
209
215
 
210
216
 
211
217
class TestFormatKnit1(TestCaseWithTransport):
212
 
 
213
 
    def test_attribute__fetch_order(self):
214
 
        """Knits need topological data insertion."""
215
 
        repo = self.make_repository('.',
216
 
                format=bzrdir.format_registry.get('knit')())
217
 
        self.assertEqual('topological', repo._format._fetch_order)
218
 
 
219
 
    def test_attribute__fetch_uses_deltas(self):
220
 
        """Knits reuse deltas."""
221
 
        repo = self.make_repository('.',
222
 
                format=bzrdir.format_registry.get('knit')())
223
 
        self.assertEqual(True, repo._format._fetch_uses_deltas)
224
 
 
 
218
    
225
219
    def test_disk_layout(self):
226
220
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
227
 
        repo = knitrepo.RepositoryFormatKnit1().initialize(control)
 
221
        repo = repository.RepositoryFormatKnit1().initialize(control)
228
222
        # in case of side effects of locking.
229
223
        repo.lock_write()
230
224
        repo.unlock()
231
225
        # we want:
232
226
        # format 'Bazaar-NG Knit Repository Format 1'
233
 
        # lock: is a directory
 
227
        # lock ''
234
228
        # inventory.weave == empty_weave
235
229
        # empty revision-store directory
236
230
        # empty weaves directory
237
231
        t = control.get_repository_transport(None)
238
232
        self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
239
233
                             t.get('format').read())
240
 
        # XXX: no locks left when unlocked at the moment
241
 
        # self.assertEqualDiff('', t.get('lock').read())
 
234
        self.assertEqualDiff('', t.get('lock').read())
 
235
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
242
236
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
243
 
        self.check_knits(t)
244
 
        # Check per-file knits.
245
 
        branch = control.create_branch()
246
 
        tree = control.create_workingtree()
247
 
        tree.add(['foo'], ['Nasty-IdC:'], ['file'])
248
 
        tree.put_file_bytes_non_atomic('Nasty-IdC:', '')
249
 
        tree.commit('1st post', rev_id='foo')
250
 
        self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a',
251
 
            '\nfoo fulltext 0 81  :')
252
 
 
253
 
    def assertHasKnit(self, t, knit_name, extra_content=''):
254
 
        """Assert that knit_name exists on t."""
255
 
        self.assertEqualDiff('# bzr knit index 8\n' + extra_content,
256
 
                             t.get(knit_name + '.kndx').read())
257
 
 
258
 
    def check_knits(self, t):
259
 
        """check knit content for a repository."""
260
 
        self.assertHasKnit(t, 'inventory')
261
 
        self.assertHasKnit(t, 'revisions')
262
 
        self.assertHasKnit(t, 'signatures')
 
237
        # cheating and using a weave for now.
 
238
        self.assertEqualDiff('# bzr weave file v5\n'
 
239
                             'w\n'
 
240
                             'W\n',
 
241
                             t.get('inventory.weave').read())
263
242
 
264
243
    def test_shared_disk_layout(self):
265
244
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
266
 
        repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
 
245
        repo = repository.RepositoryFormatKnit1().initialize(control, shared=True)
267
246
        # we want:
268
247
        # format 'Bazaar-NG Knit Repository Format 1'
269
 
        # lock: is a directory
 
248
        # lock ''
270
249
        # inventory.weave == empty_weave
271
250
        # empty revision-store directory
272
251
        # empty weaves directory
274
253
        t = control.get_repository_transport(None)
275
254
        self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
276
255
                             t.get('format').read())
277
 
        # XXX: no locks left when unlocked at the moment
278
 
        # self.assertEqualDiff('', t.get('lock').read())
 
256
        self.assertEqualDiff('', t.get('lock').read())
279
257
        self.assertEqualDiff('', t.get('shared-storage').read())
 
258
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
280
259
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
281
 
        self.check_knits(t)
 
260
        # cheating and using a weave for now.
 
261
        self.assertEqualDiff('# bzr weave file v5\n'
 
262
                             'w\n'
 
263
                             'W\n',
 
264
                             t.get('inventory.weave').read())
282
265
 
283
266
    def test_shared_no_tree_disk_layout(self):
284
267
        control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url())
285
 
        repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True)
 
268
        repo = repository.RepositoryFormatKnit1().initialize(control, shared=True)
286
269
        repo.set_make_working_trees(False)
287
270
        # we want:
288
271
        # format 'Bazaar-NG Knit Repository Format 1'
294
277
        t = control.get_repository_transport(None)
295
278
        self.assertEqualDiff('Bazaar-NG Knit Repository Format 1',
296
279
                             t.get('format').read())
297
 
        # XXX: no locks left when unlocked at the moment
298
 
        # self.assertEqualDiff('', t.get('lock').read())
 
280
        self.assertEqualDiff('', t.get('lock').read())
299
281
        self.assertEqualDiff('', t.get('shared-storage').read())
300
282
        self.assertEqualDiff('', t.get('no-working-trees').read())
301
283
        repo.set_make_working_trees(True)
302
284
        self.assertFalse(t.has('no-working-trees'))
 
285
        self.assertTrue(S_ISDIR(t.stat('revision-store').st_mode))
303
286
        self.assertTrue(S_ISDIR(t.stat('knits').st_mode))
304
 
        self.check_knits(t)
305
 
 
306
 
    def test_deserialise_sets_root_revision(self):
307
 
        """We must have a inventory.root.revision
308
 
 
309
 
        Old versions of the XML5 serializer did not set the revision_id for
310
 
        the whole inventory. So we grab the one from the expected text. Which
311
 
        is valid when the api is not being abused.
312
 
        """
313
 
        repo = self.make_repository('.',
314
 
                format=bzrdir.format_registry.get('knit')())
315
 
        inv_xml = '<inventory format="5">\n</inventory>\n'
316
 
        inv = repo._deserialise_inventory('test-rev-id', inv_xml)
317
 
        self.assertEqual('test-rev-id', inv.root.revision)
318
 
 
319
 
    def test_deserialise_uses_global_revision_id(self):
320
 
        """If it is set, then we re-use the global revision id"""
321
 
        repo = self.make_repository('.',
322
 
                format=bzrdir.format_registry.get('knit')())
323
 
        inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n'
324
 
                   '</inventory>\n')
325
 
        # Arguably, the deserialise_inventory should detect a mismatch, and
326
 
        # raise an error, rather than silently using one revision_id over the
327
 
        # other.
328
 
        self.assertRaises(AssertionError, repo._deserialise_inventory,
329
 
            'test-rev-id', inv_xml)
330
 
        inv = repo._deserialise_inventory('other-rev-id', inv_xml)
331
 
        self.assertEqual('other-rev-id', inv.root.revision)
332
 
 
333
 
    def test_supports_external_lookups(self):
334
 
        repo = self.make_repository('.',
335
 
                format=bzrdir.format_registry.get('knit')())
336
 
        self.assertFalse(repo._format.supports_external_lookups)
337
 
 
338
 
 
339
 
class DummyRepository(object):
340
 
    """A dummy repository for testing."""
341
 
 
342
 
    _format = None
343
 
    _serializer = None
344
 
 
345
 
    def supports_rich_root(self):
346
 
        if self._format is not None:
347
 
            return self._format.rich_root_data
348
 
        return False
349
 
 
350
 
    def get_graph(self):
351
 
        raise NotImplementedError
352
 
 
353
 
    def get_parent_map(self, revision_ids):
354
 
        raise NotImplementedError
355
 
 
356
 
 
357
 
class InterDummy(repository.InterRepository):
358
 
    """An inter-repository optimised code path for DummyRepository.
359
 
 
360
 
    This is for use during testing where we use DummyRepository as repositories
 
287
        # cheating and using a weave for now.
 
288
        self.assertEqualDiff('# bzr weave file v5\n'
 
289
                             'w\n'
 
290
                             'W\n',
 
291
                             t.get('inventory.weave').read())
 
292
 
 
293
 
 
294
class InterString(repository.InterRepository):
 
295
    """An inter-repository optimised code path for strings.
 
296
 
 
297
    This is for use during testing where we use strings as repositories
361
298
    so that none of the default regsitered inter-repository classes will
362
 
    MATCH.
 
299
    match.
363
300
    """
364
301
 
365
302
    @staticmethod
366
303
    def is_compatible(repo_source, repo_target):
367
 
        """InterDummy is compatible with DummyRepository."""
368
 
        return (isinstance(repo_source, DummyRepository) and
369
 
            isinstance(repo_target, DummyRepository))
 
304
        """InterString is compatible with strings-as-repos."""
 
305
        return isinstance(repo_source, str) and isinstance(repo_target, str)
370
306
 
371
307
 
372
308
class TestInterRepository(TestCaseWithTransport):
378
314
        # This also tests that the default registered optimised interrepository
379
315
        # classes do not barf inappropriately when a surprising repository type
380
316
        # is handed to them.
381
 
        dummy_a = DummyRepository()
382
 
        dummy_a._format = RepositoryFormat()
383
 
        dummy_a._format.supports_full_versioned_files = True
384
 
        dummy_b = DummyRepository()
385
 
        dummy_b._format = RepositoryFormat()
386
 
        dummy_b._format.supports_full_versioned_files = True
 
317
        dummy_a = "Repository 1."
 
318
        dummy_b = "Repository 2."
387
319
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
388
320
 
389
321
    def assertGetsDefaultInterRepository(self, repo_a, repo_b):
390
 
        """Asserts that InterRepository.get(repo_a, repo_b) -> the default.
391
 
 
392
 
        The effective default is now InterSameDataRepository because there is
393
 
        no actual sane default in the presence of incompatible data models.
394
 
        """
 
322
        """Asserts that InterRepository.get(repo_a, repo_b) -> the default."""
395
323
        inter_repo = repository.InterRepository.get(repo_a, repo_b)
396
 
        self.assertEqual(vf_repository.InterSameDataRepository,
 
324
        self.assertEqual(repository.InterRepository,
397
325
                         inter_repo.__class__)
398
326
        self.assertEqual(repo_a, inter_repo.source)
399
327
        self.assertEqual(repo_b, inter_repo.target)
404
332
        # and that it is correctly selected when given a repository
405
333
        # pair that it returns true on for the is_compatible static method
406
334
        # check
407
 
        dummy_a = DummyRepository()
408
 
        dummy_a._format = RepositoryFormat()
409
 
        dummy_b = DummyRepository()
410
 
        dummy_b._format = RepositoryFormat()
411
 
        repo = self.make_repository('.')
412
 
        # hack dummies to look like repo somewhat.
413
 
        dummy_a._serializer = repo._serializer
414
 
        dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference
415
 
        dummy_a._format.rich_root_data = repo._format.rich_root_data
416
 
        dummy_a._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
417
 
        dummy_b._serializer = repo._serializer
418
 
        dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference
419
 
        dummy_b._format.rich_root_data = repo._format.rich_root_data
420
 
        dummy_b._format.supports_full_versioned_files = repo._format.supports_full_versioned_files
421
 
        repository.InterRepository.register_optimiser(InterDummy)
 
335
        dummy_a = "Repository 1."
 
336
        dummy_b = "Repository 2."
 
337
        repository.InterRepository.register_optimiser(InterString)
422
338
        try:
423
 
            # we should get the default for something InterDummy returns False
 
339
            # we should get the default for something InterString returns False
424
340
            # to
425
 
            self.assertFalse(InterDummy.is_compatible(dummy_a, repo))
426
 
            self.assertGetsDefaultInterRepository(dummy_a, repo)
427
 
            # and we should get an InterDummy for a pair it 'likes'
428
 
            self.assertTrue(InterDummy.is_compatible(dummy_a, dummy_b))
 
341
            self.assertFalse(InterString.is_compatible(dummy_a, None))
 
342
            self.assertGetsDefaultInterRepository(dummy_a, None)
 
343
            # and we should get an InterString for a pair it 'likes'
 
344
            self.assertTrue(InterString.is_compatible(dummy_a, dummy_b))
429
345
            inter_repo = repository.InterRepository.get(dummy_a, dummy_b)
430
 
            self.assertEqual(InterDummy, inter_repo.__class__)
 
346
            self.assertEqual(InterString, inter_repo.__class__)
431
347
            self.assertEqual(dummy_a, inter_repo.source)
432
348
            self.assertEqual(dummy_b, inter_repo.target)
433
349
        finally:
434
 
            repository.InterRepository.unregister_optimiser(InterDummy)
 
350
            repository.InterRepository.unregister_optimiser(InterString)
435
351
        # now we should get the default InterRepository object again.
436
352
        self.assertGetsDefaultInterRepository(dummy_a, dummy_b)
437
353
 
438
354
 
439
 
class TestRepositoryFormat1(knitrepo.RepositoryFormatKnit1):
440
 
 
441
 
    def get_format_string(self):
442
 
        return "Test Format 1"
443
 
 
444
 
 
445
 
class TestRepositoryFormat2(knitrepo.RepositoryFormatKnit1):
446
 
 
447
 
    def get_format_string(self):
448
 
        return "Test Format 2"
 
355
class TestInterWeaveRepo(TestCaseWithTransport):
 
356
 
 
357
    def test_is_compatible_and_registered(self):
 
358
        # InterWeaveRepo is compatible when either side
 
359
        # is a format 5/6/7 branch
 
360
        formats = [repository.RepositoryFormat5(),
 
361
                   repository.RepositoryFormat6(),
 
362
                   repository.RepositoryFormat7()]
 
363
        incompatible_formats = [repository.RepositoryFormat4(),
 
364
                                repository.RepositoryFormatKnit1(),
 
365
                                ]
 
366
        repo_a = self.make_repository('a')
 
367
        repo_b = self.make_repository('b')
 
368
        is_compatible = repository.InterWeaveRepo.is_compatible
 
369
        for source in incompatible_formats:
 
370
            # force incompatible left then right
 
371
            repo_a._format = source
 
372
            repo_b._format = formats[0]
 
373
            self.assertFalse(is_compatible(repo_a, repo_b))
 
374
            self.assertFalse(is_compatible(repo_b, repo_a))
 
375
        for source in formats:
 
376
            repo_a._format = source
 
377
            for target in formats:
 
378
                repo_b._format = target
 
379
                self.assertTrue(is_compatible(repo_a, repo_b))
 
380
        self.assertEqual(repository.InterWeaveRepo,
 
381
                         repository.InterRepository.get(repo_a,
 
382
                                                        repo_b).__class__)
449
383
 
450
384
 
451
385
class TestRepositoryConverter(TestCaseWithTransport):
452
386
 
453
387
    def test_convert_empty(self):
454
 
        source_format = TestRepositoryFormat1()
455
 
        target_format = TestRepositoryFormat2()
456
 
        repository.format_registry.register(source_format)
457
 
        self.addCleanup(repository.format_registry.remove,
458
 
            source_format)
459
 
        repository.format_registry.register(target_format)
460
 
        self.addCleanup(repository.format_registry.remove,
461
 
            target_format)
462
 
        t = self.get_transport()
 
388
        t = get_transport(self.get_url('.'))
463
389
        t.mkdir('repository')
464
390
        repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository')
465
 
        repo = TestRepositoryFormat1().initialize(repo_dir)
 
391
        repo = repository.RepositoryFormat7().initialize(repo_dir)
 
392
        target_format = repository.RepositoryFormatKnit1()
 
393
        pb = bzrlib.ui.ui_factory.progress_bar()
 
394
 
466
395
        converter = repository.CopyConverter(target_format)
467
 
        pb = bzrlib.ui.ui_factory.nested_progress_bar()
468
 
        try:
469
 
            converter.convert(repo, pb)
470
 
        finally:
471
 
            pb.finished()
 
396
        converter.convert(repo, pb)
472
397
        repo = repo_dir.open_repository()
473
398
        self.assertTrue(isinstance(target_format, repo._format.__class__))
474
399
 
475
 
 
476
 
class TestRepositoryFormatKnit3(TestCaseWithTransport):
477
 
 
478
 
    def test_attribute__fetch_order(self):
479
 
        """Knits need topological data insertion."""
480
 
        format = bzrdir.BzrDirMetaFormat1()
481
 
        format.repository_format = knitrepo.RepositoryFormatKnit3()
482
 
        repo = self.make_repository('.', format=format)
483
 
        self.assertEqual('topological', repo._format._fetch_order)
484
 
 
485
 
    def test_attribute__fetch_uses_deltas(self):
486
 
        """Knits reuse deltas."""
487
 
        format = bzrdir.BzrDirMetaFormat1()
488
 
        format.repository_format = knitrepo.RepositoryFormatKnit3()
489
 
        repo = self.make_repository('.', format=format)
490
 
        self.assertEqual(True, repo._format._fetch_uses_deltas)
491
 
 
492
 
    def test_convert(self):
493
 
        """Ensure the upgrade adds weaves for roots"""
494
 
        format = bzrdir.BzrDirMetaFormat1()
495
 
        format.repository_format = knitrepo.RepositoryFormatKnit1()
496
 
        tree = self.make_branch_and_tree('.', format)
497
 
        tree.commit("Dull commit", rev_id="dull")
498
 
        revision_tree = tree.branch.repository.revision_tree('dull')
499
 
        revision_tree.lock_read()
500
 
        try:
501
 
            self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines,
502
 
                revision_tree.inventory.root.file_id)
503
 
        finally:
504
 
            revision_tree.unlock()
505
 
        format = bzrdir.BzrDirMetaFormat1()
506
 
        format.repository_format = knitrepo.RepositoryFormatKnit3()
507
 
        upgrade.Convert('.', format)
508
 
        tree = workingtree.WorkingTree.open('.')
509
 
        revision_tree = tree.branch.repository.revision_tree('dull')
510
 
        revision_tree.lock_read()
511
 
        try:
512
 
            revision_tree.get_file_lines(revision_tree.inventory.root.file_id)
513
 
        finally:
514
 
            revision_tree.unlock()
515
 
        tree.commit("Another dull commit", rev_id='dull2')
516
 
        revision_tree = tree.branch.repository.revision_tree('dull2')
517
 
        revision_tree.lock_read()
518
 
        self.addCleanup(revision_tree.unlock)
519
 
        self.assertEqual('dull', revision_tree.inventory.root.revision)
520
 
 
521
 
    def test_supports_external_lookups(self):
522
 
        format = bzrdir.BzrDirMetaFormat1()
523
 
        format.repository_format = knitrepo.RepositoryFormatKnit3()
524
 
        repo = self.make_repository('.', format=format)
525
 
        self.assertFalse(repo._format.supports_external_lookups)
526
 
 
527
 
 
528
 
class Test2a(tests.TestCaseWithMemoryTransport):
529
 
 
530
 
    def test_chk_bytes_uses_custom_btree_parser(self):
531
 
        mt = self.make_branch_and_memory_tree('test', format='2a')
532
 
        mt.lock_write()
533
 
        self.addCleanup(mt.unlock)
534
 
        mt.add([''], ['root-id'])
535
 
        mt.commit('first')
536
 
        index = mt.branch.repository.chk_bytes._index._graph_index._indices[0]
537
 
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
538
 
        # It should also work if we re-open the repo
539
 
        repo = mt.branch.repository.bzrdir.open_repository()
540
 
        repo.lock_read()
541
 
        self.addCleanup(repo.unlock)
542
 
        index = repo.chk_bytes._index._graph_index._indices[0]
543
 
        self.assertEqual(btree_index._gcchk_factory, index._leaf_factory)
544
 
 
545
 
    def test_fetch_combines_groups(self):
546
 
        builder = self.make_branch_builder('source', format='2a')
547
 
        builder.start_series()
548
 
        builder.build_snapshot('1', None, [
549
 
            ('add', ('', 'root-id', 'directory', '')),
550
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
551
 
        builder.build_snapshot('2', ['1'], [
552
 
            ('modify', ('file-id', 'content-2\n'))])
553
 
        builder.finish_series()
554
 
        source = builder.get_branch()
555
 
        target = self.make_repository('target', format='2a')
556
 
        target.fetch(source.repository)
557
 
        target.lock_read()
558
 
        self.addCleanup(target.unlock)
559
 
        details = target.texts._index.get_build_details(
560
 
            [('file-id', '1',), ('file-id', '2',)])
561
 
        file_1_details = details[('file-id', '1')]
562
 
        file_2_details = details[('file-id', '2')]
563
 
        # The index, and what to read off disk, should be the same for both
564
 
        # versions of the file.
565
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
566
 
 
567
 
    def test_fetch_combines_groups(self):
568
 
        builder = self.make_branch_builder('source', format='2a')
569
 
        builder.start_series()
570
 
        builder.build_snapshot('1', None, [
571
 
            ('add', ('', 'root-id', 'directory', '')),
572
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
573
 
        builder.build_snapshot('2', ['1'], [
574
 
            ('modify', ('file-id', 'content-2\n'))])
575
 
        builder.finish_series()
576
 
        source = builder.get_branch()
577
 
        target = self.make_repository('target', format='2a')
578
 
        target.fetch(source.repository)
579
 
        target.lock_read()
580
 
        self.addCleanup(target.unlock)
581
 
        details = target.texts._index.get_build_details(
582
 
            [('file-id', '1',), ('file-id', '2',)])
583
 
        file_1_details = details[('file-id', '1')]
584
 
        file_2_details = details[('file-id', '2')]
585
 
        # The index, and what to read off disk, should be the same for both
586
 
        # versions of the file.
587
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
588
 
 
589
 
    def test_fetch_combines_groups(self):
590
 
        builder = self.make_branch_builder('source', format='2a')
591
 
        builder.start_series()
592
 
        builder.build_snapshot('1', None, [
593
 
            ('add', ('', 'root-id', 'directory', '')),
594
 
            ('add', ('file', 'file-id', 'file', 'content\n'))])
595
 
        builder.build_snapshot('2', ['1'], [
596
 
            ('modify', ('file-id', 'content-2\n'))])
597
 
        builder.finish_series()
598
 
        source = builder.get_branch()
599
 
        target = self.make_repository('target', format='2a')
600
 
        target.fetch(source.repository)
601
 
        target.lock_read()
602
 
        self.addCleanup(target.unlock)
603
 
        details = target.texts._index.get_build_details(
604
 
            [('file-id', '1',), ('file-id', '2',)])
605
 
        file_1_details = details[('file-id', '1')]
606
 
        file_2_details = details[('file-id', '2')]
607
 
        # The index, and what to read off disk, should be the same for both
608
 
        # versions of the file.
609
 
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
610
 
 
611
 
    def test_format_pack_compresses_True(self):
612
 
        repo = self.make_repository('repo', format='2a')
613
 
        self.assertTrue(repo._format.pack_compresses)
614
 
 
615
 
    def test_inventories_use_chk_map_with_parent_base_dict(self):
616
 
        tree = self.make_branch_and_memory_tree('repo', format="2a")
617
 
        tree.lock_write()
618
 
        tree.add([''], ['TREE_ROOT'])
619
 
        revid = tree.commit("foo")
620
 
        tree.unlock()
621
 
        tree.lock_read()
622
 
        self.addCleanup(tree.unlock)
623
 
        inv = tree.branch.repository.get_inventory(revid)
624
 
        self.assertNotEqual(None, inv.parent_id_basename_to_file_id)
625
 
        inv.parent_id_basename_to_file_id._ensure_root()
626
 
        inv.id_to_entry._ensure_root()
627
 
        self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size)
628
 
        self.assertEqual(65536,
629
 
            inv.parent_id_basename_to_file_id._root_node.maximum_size)
630
 
 
631
 
    def test_autopack_unchanged_chk_nodes(self):
632
 
        # at 20 unchanged commits, chk pages are packed that are split into
633
 
        # two groups such that the new pack being made doesn't have all its
634
 
        # pages in the source packs (though they are in the repository).
635
 
        # Use a memory backed repository, we don't need to hit disk for this
636
 
        tree = self.make_branch_and_memory_tree('tree', format='2a')
637
 
        tree.lock_write()
638
 
        self.addCleanup(tree.unlock)
639
 
        tree.add([''], ['TREE_ROOT'])
640
 
        for pos in range(20):
641
 
            tree.commit(str(pos))
642
 
 
643
 
    def test_pack_with_hint(self):
644
 
        tree = self.make_branch_and_memory_tree('tree', format='2a')
645
 
        tree.lock_write()
646
 
        self.addCleanup(tree.unlock)
647
 
        tree.add([''], ['TREE_ROOT'])
648
 
        # 1 commit to leave untouched
649
 
        tree.commit('1')
650
 
        to_keep = tree.branch.repository._pack_collection.names()
651
 
        # 2 to combine
652
 
        tree.commit('2')
653
 
        tree.commit('3')
654
 
        all = tree.branch.repository._pack_collection.names()
655
 
        combine = list(set(all) - set(to_keep))
656
 
        self.assertLength(3, all)
657
 
        self.assertLength(2, combine)
658
 
        tree.branch.repository.pack(hint=combine)
659
 
        final = tree.branch.repository._pack_collection.names()
660
 
        self.assertLength(2, final)
661
 
        self.assertFalse(combine[0] in final)
662
 
        self.assertFalse(combine[1] in final)
663
 
        self.assertSubset(to_keep, final)
664
 
 
665
 
    def test_stream_source_to_gc(self):
666
 
        source = self.make_repository('source', format='2a')
667
 
        target = self.make_repository('target', format='2a')
668
 
        stream = source._get_source(target._format)
669
 
        self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource)
670
 
 
671
 
    def test_stream_source_to_non_gc(self):
672
 
        source = self.make_repository('source', format='2a')
673
 
        target = self.make_repository('target', format='rich-root-pack')
674
 
        stream = source._get_source(target._format)
675
 
        # We don't want the child GroupCHKStreamSource
676
 
        self.assertIs(type(stream), vf_repository.StreamSource)
677
 
 
678
 
    def test_get_stream_for_missing_keys_includes_all_chk_refs(self):
679
 
        source_builder = self.make_branch_builder('source',
680
 
                            format='2a')
681
 
        # We have to build a fairly large tree, so that we are sure the chk
682
 
        # pages will have split into multiple pages.
683
 
        entries = [('add', ('', 'a-root-id', 'directory', None))]
684
 
        for i in 'abcdefghijklmnopqrstuvwxyz123456789':
685
 
            for j in 'abcdefghijklmnopqrstuvwxyz123456789':
686
 
                fname = i + j
687
 
                fid = fname + '-id'
688
 
                content = 'content for %s\n' % (fname,)
689
 
                entries.append(('add', (fname, fid, 'file', content)))
690
 
        source_builder.start_series()
691
 
        source_builder.build_snapshot('rev-1', None, entries)
692
 
        # Now change a few of them, so we get a few new pages for the second
693
 
        # revision
694
 
        source_builder.build_snapshot('rev-2', ['rev-1'], [
695
 
            ('modify', ('aa-id', 'new content for aa-id\n')),
696
 
            ('modify', ('cc-id', 'new content for cc-id\n')),
697
 
            ('modify', ('zz-id', 'new content for zz-id\n')),
698
 
            ])
699
 
        source_builder.finish_series()
700
 
        source_branch = source_builder.get_branch()
701
 
        source_branch.lock_read()
702
 
        self.addCleanup(source_branch.unlock)
703
 
        target = self.make_repository('target', format='2a')
704
 
        source = source_branch.repository._get_source(target._format)
705
 
        self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource)
706
 
 
707
 
        # On a regular pass, getting the inventories and chk pages for rev-2
708
 
        # would only get the newly created chk pages
709
 
        search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1,
710
 
                                    set(['rev-2']))
711
 
        simple_chk_records = []
712
 
        for vf_name, substream in source.get_stream(search):
713
 
            if vf_name == 'chk_bytes':
714
 
                for record in substream:
715
 
                    simple_chk_records.append(record.key)
716
 
            else:
717
 
                for _ in substream:
718
 
                    continue
719
 
        # 3 pages, the root (InternalNode), + 2 pages which actually changed
720
 
        self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',),
721
 
                          ('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',),
722
 
                          ('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',),
723
 
                          ('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)],
724
 
                         simple_chk_records)
725
 
        # Now, when we do a similar call using 'get_stream_for_missing_keys'
726
 
        # we should get a much larger set of pages.
727
 
        missing = [('inventories', 'rev-2')]
728
 
        full_chk_records = []
729
 
        for vf_name, substream in source.get_stream_for_missing_keys(missing):
730
 
            if vf_name == 'inventories':
731
 
                for record in substream:
732
 
                    self.assertEqual(('rev-2',), record.key)
733
 
            elif vf_name == 'chk_bytes':
734
 
                for record in substream:
735
 
                    full_chk_records.append(record.key)
736
 
            else:
737
 
                self.fail('Should not be getting a stream of %s' % (vf_name,))
738
 
        # We have 257 records now. This is because we have 1 root page, and 256
739
 
        # leaf pages in a complete listing.
740
 
        self.assertEqual(257, len(full_chk_records))
741
 
        self.assertSubset(simple_chk_records, full_chk_records)
742
 
 
743
 
    def test_inconsistency_fatal(self):
744
 
        repo = self.make_repository('repo', format='2a')
745
 
        self.assertTrue(repo.revisions._index._inconsistency_fatal)
746
 
        self.assertFalse(repo.texts._index._inconsistency_fatal)
747
 
        self.assertFalse(repo.inventories._index._inconsistency_fatal)
748
 
        self.assertFalse(repo.signatures._index._inconsistency_fatal)
749
 
        self.assertFalse(repo.chk_bytes._index._inconsistency_fatal)
750
 
 
751
 
 
752
 
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport):
753
 
 
754
 
    def test_source_to_exact_pack_092(self):
755
 
        source = self.make_repository('source', format='pack-0.92')
756
 
        target = self.make_repository('target', format='pack-0.92')
757
 
        stream_source = source._get_source(target._format)
758
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
759
 
 
760
 
    def test_source_to_exact_pack_rich_root_pack(self):
761
 
        source = self.make_repository('source', format='rich-root-pack')
762
 
        target = self.make_repository('target', format='rich-root-pack')
763
 
        stream_source = source._get_source(target._format)
764
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
765
 
 
766
 
    def test_source_to_exact_pack_19(self):
767
 
        source = self.make_repository('source', format='1.9')
768
 
        target = self.make_repository('target', format='1.9')
769
 
        stream_source = source._get_source(target._format)
770
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
771
 
 
772
 
    def test_source_to_exact_pack_19_rich_root(self):
773
 
        source = self.make_repository('source', format='1.9-rich-root')
774
 
        target = self.make_repository('target', format='1.9-rich-root')
775
 
        stream_source = source._get_source(target._format)
776
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
777
 
 
778
 
    def test_source_to_remote_exact_pack_19(self):
779
 
        trans = self.make_smart_server('target')
780
 
        trans.ensure_base()
781
 
        source = self.make_repository('source', format='1.9')
782
 
        target = self.make_repository('target', format='1.9')
783
 
        target = repository.Repository.open(trans.base)
784
 
        stream_source = source._get_source(target._format)
785
 
        self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource)
786
 
 
787
 
    def test_stream_source_to_non_exact(self):
788
 
        source = self.make_repository('source', format='pack-0.92')
789
 
        target = self.make_repository('target', format='1.9')
790
 
        stream = source._get_source(target._format)
791
 
        self.assertIs(type(stream), vf_repository.StreamSource)
792
 
 
793
 
    def test_stream_source_to_non_exact_rich_root(self):
794
 
        source = self.make_repository('source', format='1.9')
795
 
        target = self.make_repository('target', format='1.9-rich-root')
796
 
        stream = source._get_source(target._format)
797
 
        self.assertIs(type(stream), vf_repository.StreamSource)
798
 
 
799
 
    def test_source_to_remote_non_exact_pack_19(self):
800
 
        trans = self.make_smart_server('target')
801
 
        trans.ensure_base()
802
 
        source = self.make_repository('source', format='1.9')
803
 
        target = self.make_repository('target', format='1.6')
804
 
        target = repository.Repository.open(trans.base)
805
 
        stream_source = source._get_source(target._format)
806
 
        self.assertIs(type(stream_source), vf_repository.StreamSource)
807
 
 
808
 
    def test_stream_source_to_knit(self):
809
 
        source = self.make_repository('source', format='pack-0.92')
810
 
        target = self.make_repository('target', format='dirstate')
811
 
        stream = source._get_source(target._format)
812
 
        self.assertIs(type(stream), vf_repository.StreamSource)
813
 
 
814
 
 
815
 
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport):
816
 
    """Tests for _find_parent_ids_of_revisions."""
817
 
 
818
 
    def setUp(self):
819
 
        super(TestDevelopment6FindParentIdsOfRevisions, self).setUp()
820
 
        self.builder = self.make_branch_builder('source')
821
 
        self.builder.start_series()
822
 
        self.builder.build_snapshot('initial', None,
823
 
            [('add', ('', 'tree-root', 'directory', None))])
824
 
        self.repo = self.builder.get_branch().repository
825
 
        self.addCleanup(self.builder.finish_series)
826
 
 
827
 
    def assertParentIds(self, expected_result, rev_set):
828
 
        self.assertEqual(sorted(expected_result),
829
 
            sorted(self.repo._find_parent_ids_of_revisions(rev_set)))
830
 
 
831
 
    def test_simple(self):
832
 
        self.builder.build_snapshot('revid1', None, [])
833
 
        self.builder.build_snapshot('revid2', ['revid1'], [])
834
 
        rev_set = ['revid2']
835
 
        self.assertParentIds(['revid1'], rev_set)
836
 
 
837
 
    def test_not_first_parent(self):
838
 
        self.builder.build_snapshot('revid1', None, [])
839
 
        self.builder.build_snapshot('revid2', ['revid1'], [])
840
 
        self.builder.build_snapshot('revid3', ['revid2'], [])
841
 
        rev_set = ['revid3', 'revid2']
842
 
        self.assertParentIds(['revid1'], rev_set)
843
 
 
844
 
    def test_not_null(self):
845
 
        rev_set = ['initial']
846
 
        self.assertParentIds([], rev_set)
847
 
 
848
 
    def test_not_null_set(self):
849
 
        self.builder.build_snapshot('revid1', None, [])
850
 
        rev_set = [_mod_revision.NULL_REVISION]
851
 
        self.assertParentIds([], rev_set)
852
 
 
853
 
    def test_ghost(self):
854
 
        self.builder.build_snapshot('revid1', None, [])
855
 
        rev_set = ['ghost', 'revid1']
856
 
        self.assertParentIds(['initial'], rev_set)
857
 
 
858
 
    def test_ghost_parent(self):
859
 
        self.builder.build_snapshot('revid1', None, [])
860
 
        self.builder.build_snapshot('revid2', ['revid1', 'ghost'], [])
861
 
        rev_set = ['revid2', 'revid1']
862
 
        self.assertParentIds(['ghost', 'initial'], rev_set)
863
 
 
864
 
    def test_righthand_parent(self):
865
 
        self.builder.build_snapshot('revid1', None, [])
866
 
        self.builder.build_snapshot('revid2a', ['revid1'], [])
867
 
        self.builder.build_snapshot('revid2b', ['revid1'], [])
868
 
        self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], [])
869
 
        rev_set = ['revid3', 'revid2a']
870
 
        self.assertParentIds(['revid1', 'revid2b'], rev_set)
871
 
 
872
 
 
873
 
class TestWithBrokenRepo(TestCaseWithTransport):
874
 
    """These tests seem to be more appropriate as interface tests?"""
875
 
 
876
 
    def make_broken_repository(self):
877
 
        # XXX: This function is borrowed from Aaron's "Reconcile can fix bad
878
 
        # parent references" branch which is due to land in bzr.dev soon.  Once
879
 
        # it does, this duplication should be removed.
880
 
        repo = self.make_repository('broken-repo')
881
 
        cleanups = []
882
 
        try:
883
 
            repo.lock_write()
884
 
            cleanups.append(repo.unlock)
885
 
            repo.start_write_group()
886
 
            cleanups.append(repo.commit_write_group)
887
 
            # make rev1a: A well-formed revision, containing 'file1'
888
 
            inv = inventory.Inventory(revision_id='rev1a')
889
 
            inv.root.revision = 'rev1a'
890
 
            self.add_file(repo, inv, 'file1', 'rev1a', [])
891
 
            repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], [])
892
 
            repo.add_inventory('rev1a', inv, [])
893
 
            revision = _mod_revision.Revision('rev1a',
894
 
                committer='jrandom@example.com', timestamp=0,
895
 
                inventory_sha1='', timezone=0, message='foo', parent_ids=[])
896
 
            repo.add_revision('rev1a',revision, inv)
897
 
 
898
 
            # make rev1b, which has no Revision, but has an Inventory, and
899
 
            # file1
900
 
            inv = inventory.Inventory(revision_id='rev1b')
901
 
            inv.root.revision = 'rev1b'
902
 
            self.add_file(repo, inv, 'file1', 'rev1b', [])
903
 
            repo.add_inventory('rev1b', inv, [])
904
 
 
905
 
            # make rev2, with file1 and file2
906
 
            # file2 is sane
907
 
            # file1 has 'rev1b' as an ancestor, even though this is not
908
 
            # mentioned by 'rev1a', making it an unreferenced ancestor
909
 
            inv = inventory.Inventory()
910
 
            self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b'])
911
 
            self.add_file(repo, inv, 'file2', 'rev2', [])
912
 
            self.add_revision(repo, 'rev2', inv, ['rev1a'])
913
 
 
914
 
            # make ghost revision rev1c
915
 
            inv = inventory.Inventory()
916
 
            self.add_file(repo, inv, 'file2', 'rev1c', [])
917
 
 
918
 
            # make rev3 with file2
919
 
            # file2 refers to 'rev1c', which is a ghost in this repository, so
920
 
            # file2 cannot have rev1c as its ancestor.
921
 
            inv = inventory.Inventory()
922
 
            self.add_file(repo, inv, 'file2', 'rev3', ['rev1c'])
923
 
            self.add_revision(repo, 'rev3', inv, ['rev1c'])
924
 
            return repo
925
 
        finally:
926
 
            for cleanup in reversed(cleanups):
927
 
                cleanup()
928
 
 
929
 
    def add_revision(self, repo, revision_id, inv, parent_ids):
930
 
        inv.revision_id = revision_id
931
 
        inv.root.revision = revision_id
932
 
        repo.texts.add_lines((inv.root.file_id, revision_id), [], [])
933
 
        repo.add_inventory(revision_id, inv, parent_ids)
934
 
        revision = _mod_revision.Revision(revision_id,
935
 
            committer='jrandom@example.com', timestamp=0, inventory_sha1='',
936
 
            timezone=0, message='foo', parent_ids=parent_ids)
937
 
        repo.add_revision(revision_id,revision, inv)
938
 
 
939
 
    def add_file(self, repo, inv, filename, revision, parents):
940
 
        file_id = filename + '-id'
941
 
        entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT')
942
 
        entry.revision = revision
943
 
        entry.text_size = 0
944
 
        inv.add(entry)
945
 
        text_key = (file_id, revision)
946
 
        parent_keys = [(file_id, parent) for parent in parents]
947
 
        repo.texts.add_lines(text_key, parent_keys, ['line\n'])
948
 
 
949
 
    def test_insert_from_broken_repo(self):
950
 
        """Inserting a data stream from a broken repository won't silently
951
 
        corrupt the target repository.
952
 
        """
953
 
        broken_repo = self.make_broken_repository()
954
 
        empty_repo = self.make_repository('empty-repo')
955
 
        try:
956
 
            empty_repo.fetch(broken_repo)
957
 
        except (errors.RevisionNotPresent, errors.BzrCheckError):
958
 
            # Test successful: compression parent not being copied leads to
959
 
            # error.
960
 
            return
961
 
        empty_repo.lock_read()
962
 
        self.addCleanup(empty_repo.unlock)
963
 
        text = empty_repo.texts.get_record_stream(
964
 
            [('file2-id', 'rev3')], 'topological', True).next()
965
 
        self.assertEqual('line\n', text.get_bytes_as('fulltext'))
966
 
 
967
 
 
968
 
class TestRepositoryPackCollection(TestCaseWithTransport):
969
 
 
970
 
    def get_format(self):
971
 
        return bzrdir.format_registry.make_bzrdir('pack-0.92')
972
 
 
973
 
    def get_packs(self):
974
 
        format = self.get_format()
975
 
        repo = self.make_repository('.', format=format)
976
 
        return repo._pack_collection
977
 
 
978
 
    def make_packs_and_alt_repo(self, write_lock=False):
979
 
        """Create a pack repo with 3 packs, and access it via a second repo."""
980
 
        tree = self.make_branch_and_tree('.', format=self.get_format())
981
 
        tree.lock_write()
982
 
        self.addCleanup(tree.unlock)
983
 
        rev1 = tree.commit('one')
984
 
        rev2 = tree.commit('two')
985
 
        rev3 = tree.commit('three')
986
 
        r = repository.Repository.open('.')
987
 
        if write_lock:
988
 
            r.lock_write()
989
 
        else:
990
 
            r.lock_read()
991
 
        self.addCleanup(r.unlock)
992
 
        packs = r._pack_collection
993
 
        packs.ensure_loaded()
994
 
        return tree, r, packs, [rev1, rev2, rev3]
995
 
 
996
 
    def test__clear_obsolete_packs(self):
997
 
        packs = self.get_packs()
998
 
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
999
 
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1000
 
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1001
 
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1002
 
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1003
 
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1004
 
        res = packs._clear_obsolete_packs()
1005
 
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1006
 
        self.assertEqual([], obsolete_pack_trans.list_dir('.'))
1007
 
 
1008
 
    def test__clear_obsolete_packs_preserve(self):
1009
 
        packs = self.get_packs()
1010
 
        obsolete_pack_trans = packs.transport.clone('obsolete_packs')
1011
 
        obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n')
1012
 
        obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n')
1013
 
        obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n')
1014
 
        obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n')
1015
 
        obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n')
1016
 
        res = packs._clear_obsolete_packs(preserve=set(['a-pack']))
1017
 
        self.assertEqual(['a-pack', 'another-pack'], sorted(res))
1018
 
        self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'],
1019
 
                         sorted(obsolete_pack_trans.list_dir('.')))
1020
 
 
1021
 
    def test__max_pack_count(self):
1022
 
        """The maximum pack count is a function of the number of revisions."""
1023
 
        # no revisions - one pack, so that we can have a revision free repo
1024
 
        # without it blowing up
1025
 
        packs = self.get_packs()
1026
 
        self.assertEqual(1, packs._max_pack_count(0))
1027
 
        # after that the sum of the digits, - check the first 1-9
1028
 
        self.assertEqual(1, packs._max_pack_count(1))
1029
 
        self.assertEqual(2, packs._max_pack_count(2))
1030
 
        self.assertEqual(3, packs._max_pack_count(3))
1031
 
        self.assertEqual(4, packs._max_pack_count(4))
1032
 
        self.assertEqual(5, packs._max_pack_count(5))
1033
 
        self.assertEqual(6, packs._max_pack_count(6))
1034
 
        self.assertEqual(7, packs._max_pack_count(7))
1035
 
        self.assertEqual(8, packs._max_pack_count(8))
1036
 
        self.assertEqual(9, packs._max_pack_count(9))
1037
 
        # check the boundary cases with two digits for the next decade
1038
 
        self.assertEqual(1, packs._max_pack_count(10))
1039
 
        self.assertEqual(2, packs._max_pack_count(11))
1040
 
        self.assertEqual(10, packs._max_pack_count(19))
1041
 
        self.assertEqual(2, packs._max_pack_count(20))
1042
 
        self.assertEqual(3, packs._max_pack_count(21))
1043
 
        # check some arbitrary big numbers
1044
 
        self.assertEqual(25, packs._max_pack_count(112894))
1045
 
 
1046
 
    def test_repr(self):
1047
 
        packs = self.get_packs()
1048
 
        self.assertContainsRe(repr(packs),
1049
 
            'RepositoryPackCollection(.*Repository(.*))')
1050
 
 
1051
 
    def test__obsolete_packs(self):
1052
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1053
 
        names = packs.names()
1054
 
        pack = packs.get_pack_by_name(names[0])
1055
 
        # Schedule this one for removal
1056
 
        packs._remove_pack_from_memory(pack)
1057
 
        # Simulate a concurrent update by renaming the .pack file and one of
1058
 
        # the indices
1059
 
        packs.transport.rename('packs/%s.pack' % (names[0],),
1060
 
                               'obsolete_packs/%s.pack' % (names[0],))
1061
 
        packs.transport.rename('indices/%s.iix' % (names[0],),
1062
 
                               'obsolete_packs/%s.iix' % (names[0],))
1063
 
        # Now trigger the obsoletion, and ensure that all the remaining files
1064
 
        # are still renamed
1065
 
        packs._obsolete_packs([pack])
1066
 
        self.assertEqual([n + '.pack' for n in names[1:]],
1067
 
                         sorted(packs._pack_transport.list_dir('.')))
1068
 
        # names[0] should not be present in the index anymore
1069
 
        self.assertEqual(names[1:],
1070
 
            sorted(set([osutils.splitext(n)[0] for n in
1071
 
                        packs._index_transport.list_dir('.')])))
1072
 
 
1073
 
    def test_pack_distribution_zero(self):
1074
 
        packs = self.get_packs()
1075
 
        self.assertEqual([0], packs.pack_distribution(0))
1076
 
 
1077
 
    def test_ensure_loaded_unlocked(self):
1078
 
        packs = self.get_packs()
1079
 
        self.assertRaises(errors.ObjectNotLocked,
1080
 
                          packs.ensure_loaded)
1081
 
 
1082
 
    def test_pack_distribution_one_to_nine(self):
1083
 
        packs = self.get_packs()
1084
 
        self.assertEqual([1],
1085
 
            packs.pack_distribution(1))
1086
 
        self.assertEqual([1, 1],
1087
 
            packs.pack_distribution(2))
1088
 
        self.assertEqual([1, 1, 1],
1089
 
            packs.pack_distribution(3))
1090
 
        self.assertEqual([1, 1, 1, 1],
1091
 
            packs.pack_distribution(4))
1092
 
        self.assertEqual([1, 1, 1, 1, 1],
1093
 
            packs.pack_distribution(5))
1094
 
        self.assertEqual([1, 1, 1, 1, 1, 1],
1095
 
            packs.pack_distribution(6))
1096
 
        self.assertEqual([1, 1, 1, 1, 1, 1, 1],
1097
 
            packs.pack_distribution(7))
1098
 
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1],
1099
 
            packs.pack_distribution(8))
1100
 
        self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1],
1101
 
            packs.pack_distribution(9))
1102
 
 
1103
 
    def test_pack_distribution_stable_at_boundaries(self):
1104
 
        """When there are multi-rev packs the counts are stable."""
1105
 
        packs = self.get_packs()
1106
 
        # in 10s:
1107
 
        self.assertEqual([10], packs.pack_distribution(10))
1108
 
        self.assertEqual([10, 1], packs.pack_distribution(11))
1109
 
        self.assertEqual([10, 10], packs.pack_distribution(20))
1110
 
        self.assertEqual([10, 10, 1], packs.pack_distribution(21))
1111
 
        # 100s
1112
 
        self.assertEqual([100], packs.pack_distribution(100))
1113
 
        self.assertEqual([100, 1], packs.pack_distribution(101))
1114
 
        self.assertEqual([100, 10, 1], packs.pack_distribution(111))
1115
 
        self.assertEqual([100, 100], packs.pack_distribution(200))
1116
 
        self.assertEqual([100, 100, 1], packs.pack_distribution(201))
1117
 
        self.assertEqual([100, 100, 10, 1], packs.pack_distribution(211))
1118
 
 
1119
 
    def test_plan_pack_operations_2009_revisions_skip_all_packs(self):
1120
 
        packs = self.get_packs()
1121
 
        existing_packs = [(2000, "big"), (9, "medium")]
1122
 
        # rev count - 2009 -> 2x1000 + 9x1
1123
 
        pack_operations = packs.plan_autopack_combinations(
1124
 
            existing_packs, [1000, 1000, 1, 1, 1, 1, 1, 1, 1, 1, 1])
1125
 
        self.assertEqual([], pack_operations)
1126
 
 
1127
 
    def test_plan_pack_operations_2010_revisions_skip_all_packs(self):
1128
 
        packs = self.get_packs()
1129
 
        existing_packs = [(2000, "big"), (9, "medium"), (1, "single")]
1130
 
        # rev count - 2010 -> 2x1000 + 1x10
1131
 
        pack_operations = packs.plan_autopack_combinations(
1132
 
            existing_packs, [1000, 1000, 10])
1133
 
        self.assertEqual([], pack_operations)
1134
 
 
1135
 
    def test_plan_pack_operations_2010_combines_smallest_two(self):
1136
 
        packs = self.get_packs()
1137
 
        existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"),
1138
 
            (1, "single1")]
1139
 
        # rev count - 2010 -> 2x1000 + 1x10 (3)
1140
 
        pack_operations = packs.plan_autopack_combinations(
1141
 
            existing_packs, [1000, 1000, 10])
1142
 
        self.assertEqual([[2, ["single2", "single1"]]], pack_operations)
1143
 
 
1144
 
    def test_plan_pack_operations_creates_a_single_op(self):
1145
 
        packs = self.get_packs()
1146
 
        existing_packs = [(50, 'a'), (40, 'b'), (30, 'c'), (10, 'd'),
1147
 
                          (10, 'e'), (6, 'f'), (4, 'g')]
1148
 
        # rev count 150 -> 1x100 and 5x10
1149
 
        # The two size 10 packs do not need to be touched. The 50, 40, 30 would
1150
 
        # be combined into a single 120 size pack, and the 6 & 4 would
1151
 
        # becombined into a size 10 pack. However, if we have to rewrite them,
1152
 
        # we save a pack file with no increased I/O by putting them into the
1153
 
        # same file.
1154
 
        distribution = packs.pack_distribution(150)
1155
 
        pack_operations = packs.plan_autopack_combinations(existing_packs,
1156
 
                                                           distribution)
1157
 
        self.assertEqual([[130, ['a', 'b', 'c', 'f', 'g']]], pack_operations)
1158
 
 
1159
 
    def test_all_packs_none(self):
1160
 
        format = self.get_format()
1161
 
        tree = self.make_branch_and_tree('.', format=format)
1162
 
        tree.lock_read()
1163
 
        self.addCleanup(tree.unlock)
1164
 
        packs = tree.branch.repository._pack_collection
1165
 
        packs.ensure_loaded()
1166
 
        self.assertEqual([], packs.all_packs())
1167
 
 
1168
 
    def test_all_packs_one(self):
1169
 
        format = self.get_format()
1170
 
        tree = self.make_branch_and_tree('.', format=format)
1171
 
        tree.commit('start')
1172
 
        tree.lock_read()
1173
 
        self.addCleanup(tree.unlock)
1174
 
        packs = tree.branch.repository._pack_collection
1175
 
        packs.ensure_loaded()
1176
 
        self.assertEqual([
1177
 
            packs.get_pack_by_name(packs.names()[0])],
1178
 
            packs.all_packs())
1179
 
 
1180
 
    def test_all_packs_two(self):
1181
 
        format = self.get_format()
1182
 
        tree = self.make_branch_and_tree('.', format=format)
1183
 
        tree.commit('start')
1184
 
        tree.commit('continue')
1185
 
        tree.lock_read()
1186
 
        self.addCleanup(tree.unlock)
1187
 
        packs = tree.branch.repository._pack_collection
1188
 
        packs.ensure_loaded()
1189
 
        self.assertEqual([
1190
 
            packs.get_pack_by_name(packs.names()[0]),
1191
 
            packs.get_pack_by_name(packs.names()[1]),
1192
 
            ], packs.all_packs())
1193
 
 
1194
 
    def test_get_pack_by_name(self):
1195
 
        format = self.get_format()
1196
 
        tree = self.make_branch_and_tree('.', format=format)
1197
 
        tree.commit('start')
1198
 
        tree.lock_read()
1199
 
        self.addCleanup(tree.unlock)
1200
 
        packs = tree.branch.repository._pack_collection
1201
 
        packs.reset()
1202
 
        packs.ensure_loaded()
1203
 
        name = packs.names()[0]
1204
 
        pack_1 = packs.get_pack_by_name(name)
1205
 
        # the pack should be correctly initialised
1206
 
        sizes = packs._names[name]
1207
 
        rev_index = GraphIndex(packs._index_transport, name + '.rix', sizes[0])
1208
 
        inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1])
1209
 
        txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2])
1210
 
        sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3])
1211
 
        self.assertEqual(pack_repo.ExistingPack(packs._pack_transport,
1212
 
            name, rev_index, inv_index, txt_index, sig_index), pack_1)
1213
 
        # and the same instance should be returned on successive calls.
1214
 
        self.assertTrue(pack_1 is packs.get_pack_by_name(name))
1215
 
 
1216
 
    def test_reload_pack_names_new_entry(self):
1217
 
        tree, r, packs, revs = self.make_packs_and_alt_repo()
1218
 
        names = packs.names()
1219
 
        # Add a new pack file into the repository
1220
 
        rev4 = tree.commit('four')
1221
 
        new_names = tree.branch.repository._pack_collection.names()
1222
 
        new_name = set(new_names).difference(names)
1223
 
        self.assertEqual(1, len(new_name))
1224
 
        new_name = new_name.pop()
1225
 
        # The old collection hasn't noticed yet
1226
 
        self.assertEqual(names, packs.names())
1227
 
        self.assertTrue(packs.reload_pack_names())
1228
 
        self.assertEqual(new_names, packs.names())
1229
 
        # And the repository can access the new revision
1230
 
        self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4]))
1231
 
        self.assertFalse(packs.reload_pack_names())
1232
 
 
1233
 
    def test_reload_pack_names_added_and_removed(self):
1234
 
        tree, r, packs, revs = self.make_packs_and_alt_repo()
1235
 
        names = packs.names()
1236
 
        # Now repack the whole thing
1237
 
        tree.branch.repository.pack()
1238
 
        new_names = tree.branch.repository._pack_collection.names()
1239
 
        # The other collection hasn't noticed yet
1240
 
        self.assertEqual(names, packs.names())
1241
 
        self.assertTrue(packs.reload_pack_names())
1242
 
        self.assertEqual(new_names, packs.names())
1243
 
        self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]]))
1244
 
        self.assertFalse(packs.reload_pack_names())
1245
 
 
1246
 
    def test_reload_pack_names_preserves_pending(self):
1247
 
        # TODO: Update this to also test for pending-deleted names
1248
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1249
 
        # We will add one pack (via start_write_group + insert_record_stream),
1250
 
        # and remove another pack (via _remove_pack_from_memory)
1251
 
        orig_names = packs.names()
1252
 
        orig_at_load = packs._packs_at_load
1253
 
        to_remove_name = iter(orig_names).next()
1254
 
        r.start_write_group()
1255
 
        self.addCleanup(r.abort_write_group)
1256
 
        r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
1257
 
            ('text', 'rev'), (), None, 'content\n')])
1258
 
        new_pack = packs._new_pack
1259
 
        self.assertTrue(new_pack.data_inserted())
1260
 
        new_pack.finish()
1261
 
        packs.allocate(new_pack)
1262
 
        packs._new_pack = None
1263
 
        removed_pack = packs.get_pack_by_name(to_remove_name)
1264
 
        packs._remove_pack_from_memory(removed_pack)
1265
 
        names = packs.names()
1266
 
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1267
 
        new_names = set([x[0][0] for x in new_nodes])
1268
 
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1269
 
        self.assertEqual(set(names) - set(orig_names), new_names)
1270
 
        self.assertEqual(set([new_pack.name]), new_names)
1271
 
        self.assertEqual([to_remove_name],
1272
 
                         sorted([x[0][0] for x in deleted_nodes]))
1273
 
        packs.reload_pack_names()
1274
 
        reloaded_names = packs.names()
1275
 
        self.assertEqual(orig_at_load, packs._packs_at_load)
1276
 
        self.assertEqual(names, reloaded_names)
1277
 
        all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names()
1278
 
        new_names = set([x[0][0] for x in new_nodes])
1279
 
        self.assertEqual(names, sorted([x[0][0] for x in all_nodes]))
1280
 
        self.assertEqual(set(names) - set(orig_names), new_names)
1281
 
        self.assertEqual(set([new_pack.name]), new_names)
1282
 
        self.assertEqual([to_remove_name],
1283
 
                         sorted([x[0][0] for x in deleted_nodes]))
1284
 
 
1285
 
    def test_autopack_obsoletes_new_pack(self):
1286
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1287
 
        packs._max_pack_count = lambda x: 1
1288
 
        packs.pack_distribution = lambda x: [10]
1289
 
        r.start_write_group()
1290
 
        r.revisions.insert_record_stream([versionedfile.FulltextContentFactory(
1291
 
            ('bogus-rev',), (), None, 'bogus-content\n')])
1292
 
        # This should trigger an autopack, which will combine everything into a
1293
 
        # single pack file.
1294
 
        new_names = r.commit_write_group()
1295
 
        names = packs.names()
1296
 
        self.assertEqual(1, len(names))
1297
 
        self.assertEqual([names[0] + '.pack'],
1298
 
                         packs._pack_transport.list_dir('.'))
1299
 
 
1300
 
    def test_autopack_reloads_and_stops(self):
1301
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1302
 
        # After we have determined what needs to be autopacked, trigger a
1303
 
        # full-pack via the other repo which will cause us to re-evaluate and
1304
 
        # decide we don't need to do anything
1305
 
        orig_execute = packs._execute_pack_operations
1306
 
        def _munged_execute_pack_ops(*args, **kwargs):
1307
 
            tree.branch.repository.pack()
1308
 
            return orig_execute(*args, **kwargs)
1309
 
        packs._execute_pack_operations = _munged_execute_pack_ops
1310
 
        packs._max_pack_count = lambda x: 1
1311
 
        packs.pack_distribution = lambda x: [10]
1312
 
        self.assertFalse(packs.autopack())
1313
 
        self.assertEqual(1, len(packs.names()))
1314
 
        self.assertEqual(tree.branch.repository._pack_collection.names(),
1315
 
                         packs.names())
1316
 
 
1317
 
    def test__save_pack_names(self):
1318
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1319
 
        names = packs.names()
1320
 
        pack = packs.get_pack_by_name(names[0])
1321
 
        packs._remove_pack_from_memory(pack)
1322
 
        packs._save_pack_names(obsolete_packs=[pack])
1323
 
        cur_packs = packs._pack_transport.list_dir('.')
1324
 
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1325
 
        # obsolete_packs will also have stuff like .rix and .iix present.
1326
 
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
1327
 
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1328
 
        self.assertEqual([pack.name], sorted(obsolete_names))
1329
 
 
1330
 
    def test__save_pack_names_already_obsoleted(self):
1331
 
        tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True)
1332
 
        names = packs.names()
1333
 
        pack = packs.get_pack_by_name(names[0])
1334
 
        packs._remove_pack_from_memory(pack)
1335
 
        # We are going to simulate a concurrent autopack by manually obsoleting
1336
 
        # the pack directly.
1337
 
        packs._obsolete_packs([pack])
1338
 
        packs._save_pack_names(clear_obsolete_packs=True,
1339
 
                               obsolete_packs=[pack])
1340
 
        cur_packs = packs._pack_transport.list_dir('.')
1341
 
        self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs))
1342
 
        # Note that while we set clear_obsolete_packs=True, it should not
1343
 
        # delete a pack file that we have also scheduled for obsoletion.
1344
 
        obsolete_packs = packs.transport.list_dir('obsolete_packs')
1345
 
        obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs])
1346
 
        self.assertEqual([pack.name], sorted(obsolete_names))
1347
 
 
1348
 
 
1349
 
 
1350
 
class TestPack(TestCaseWithTransport):
1351
 
    """Tests for the Pack object."""
1352
 
 
1353
 
    def assertCurrentlyEqual(self, left, right):
1354
 
        self.assertTrue(left == right)
1355
 
        self.assertTrue(right == left)
1356
 
        self.assertFalse(left != right)
1357
 
        self.assertFalse(right != left)
1358
 
 
1359
 
    def assertCurrentlyNotEqual(self, left, right):
1360
 
        self.assertFalse(left == right)
1361
 
        self.assertFalse(right == left)
1362
 
        self.assertTrue(left != right)
1363
 
        self.assertTrue(right != left)
1364
 
 
1365
 
    def test___eq____ne__(self):
1366
 
        left = pack_repo.ExistingPack('', '', '', '', '', '')
1367
 
        right = pack_repo.ExistingPack('', '', '', '', '', '')
1368
 
        self.assertCurrentlyEqual(left, right)
1369
 
        # change all attributes and ensure equality changes as we do.
1370
 
        left.revision_index = 'a'
1371
 
        self.assertCurrentlyNotEqual(left, right)
1372
 
        right.revision_index = 'a'
1373
 
        self.assertCurrentlyEqual(left, right)
1374
 
        left.inventory_index = 'a'
1375
 
        self.assertCurrentlyNotEqual(left, right)
1376
 
        right.inventory_index = 'a'
1377
 
        self.assertCurrentlyEqual(left, right)
1378
 
        left.text_index = 'a'
1379
 
        self.assertCurrentlyNotEqual(left, right)
1380
 
        right.text_index = 'a'
1381
 
        self.assertCurrentlyEqual(left, right)
1382
 
        left.signature_index = 'a'
1383
 
        self.assertCurrentlyNotEqual(left, right)
1384
 
        right.signature_index = 'a'
1385
 
        self.assertCurrentlyEqual(left, right)
1386
 
        left.name = 'a'
1387
 
        self.assertCurrentlyNotEqual(left, right)
1388
 
        right.name = 'a'
1389
 
        self.assertCurrentlyEqual(left, right)
1390
 
        left.transport = 'a'
1391
 
        self.assertCurrentlyNotEqual(left, right)
1392
 
        right.transport = 'a'
1393
 
        self.assertCurrentlyEqual(left, right)
1394
 
 
1395
 
    def test_file_name(self):
1396
 
        pack = pack_repo.ExistingPack('', 'a_name', '', '', '', '')
1397
 
        self.assertEqual('a_name.pack', pack.file_name())
1398
 
 
1399
 
 
1400
 
class TestNewPack(TestCaseWithTransport):
1401
 
    """Tests for pack_repo.NewPack."""
1402
 
 
1403
 
    def test_new_instance_attributes(self):
1404
 
        upload_transport = self.get_transport('upload')
1405
 
        pack_transport = self.get_transport('pack')
1406
 
        index_transport = self.get_transport('index')
1407
 
        upload_transport.mkdir('.')
1408
 
        collection = pack_repo.RepositoryPackCollection(
1409
 
            repo=None,
1410
 
            transport=self.get_transport('.'),
1411
 
            index_transport=index_transport,
1412
 
            upload_transport=upload_transport,
1413
 
            pack_transport=pack_transport,
1414
 
            index_builder_class=BTreeBuilder,
1415
 
            index_class=BTreeGraphIndex,
1416
 
            use_chk_index=False)
1417
 
        pack = pack_repo.NewPack(collection)
1418
 
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
1419
 
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
1420
 
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1421
 
        self.assertIsInstance(pack._hash, type(osutils.md5()))
1422
 
        self.assertTrue(pack.upload_transport is upload_transport)
1423
 
        self.assertTrue(pack.index_transport is index_transport)
1424
 
        self.assertTrue(pack.pack_transport is pack_transport)
1425
 
        self.assertEqual(None, pack.index_sizes)
1426
 
        self.assertEqual(20, len(pack.random_name))
1427
 
        self.assertIsInstance(pack.random_name, str)
1428
 
        self.assertIsInstance(pack.start_time, float)
1429
 
 
1430
 
 
1431
 
class TestPacker(TestCaseWithTransport):
1432
 
    """Tests for the packs repository Packer class."""
1433
 
 
1434
 
    def test_pack_optimizes_pack_order(self):
1435
 
        builder = self.make_branch_builder('.', format="1.9")
1436
 
        builder.start_series()
1437
 
        builder.build_snapshot('A', None, [
1438
 
            ('add', ('', 'root-id', 'directory', None)),
1439
 
            ('add', ('f', 'f-id', 'file', 'content\n'))])
1440
 
        builder.build_snapshot('B', ['A'],
1441
 
            [('modify', ('f-id', 'new-content\n'))])
1442
 
        builder.build_snapshot('C', ['B'],
1443
 
            [('modify', ('f-id', 'third-content\n'))])
1444
 
        builder.build_snapshot('D', ['C'],
1445
 
            [('modify', ('f-id', 'fourth-content\n'))])
1446
 
        b = builder.get_branch()
1447
 
        b.lock_read()
1448
 
        builder.finish_series()
1449
 
        self.addCleanup(b.unlock)
1450
 
        # At this point, we should have 4 pack files available
1451
 
        # Because of how they were built, they correspond to
1452
 
        # ['D', 'C', 'B', 'A']
1453
 
        packs = b.repository._pack_collection.packs
1454
 
        packer = knitpack_repo.KnitPacker(b.repository._pack_collection,
1455
 
                                  packs, 'testing',
1456
 
                                  revision_ids=['B', 'C'])
1457
 
        # Now, when we are copying the B & C revisions, their pack files should
1458
 
        # be moved to the front of the stack
1459
 
        # The new ordering moves B & C to the front of the .packs attribute,
1460
 
        # and leaves the others in the original order.
1461
 
        new_packs = [packs[1], packs[2], packs[0], packs[3]]
1462
 
        new_pack = packer.pack()
1463
 
        self.assertEqual(new_packs, packer.packs)
1464
 
 
1465
 
 
1466
 
class TestOptimisingPacker(TestCaseWithTransport):
1467
 
    """Tests for the OptimisingPacker class."""
1468
 
 
1469
 
    def get_pack_collection(self):
1470
 
        repo = self.make_repository('.')
1471
 
        return repo._pack_collection
1472
 
 
1473
 
    def test_open_pack_will_optimise(self):
1474
 
        packer = knitpack_repo.OptimisingKnitPacker(self.get_pack_collection(),
1475
 
                                            [], '.test')
1476
 
        new_pack = packer.open_pack()
1477
 
        self.addCleanup(new_pack.abort) # ensure cleanup
1478
 
        self.assertIsInstance(new_pack, pack_repo.NewPack)
1479
 
        self.assertTrue(new_pack.revision_index._optimize_for_size)
1480
 
        self.assertTrue(new_pack.inventory_index._optimize_for_size)
1481
 
        self.assertTrue(new_pack.text_index._optimize_for_size)
1482
 
        self.assertTrue(new_pack.signature_index._optimize_for_size)
1483
 
 
1484
 
 
1485
 
class TestGCCHKPacker(TestCaseWithTransport):
1486
 
 
1487
 
    def make_abc_branch(self):
1488
 
        builder = self.make_branch_builder('source')
1489
 
        builder.start_series()
1490
 
        builder.build_snapshot('A', None, [
1491
 
            ('add', ('', 'root-id', 'directory', None)),
1492
 
            ('add', ('file', 'file-id', 'file', 'content\n')),
1493
 
            ])
1494
 
        builder.build_snapshot('B', ['A'], [
1495
 
            ('add', ('dir', 'dir-id', 'directory', None))])
1496
 
        builder.build_snapshot('C', ['B'], [
1497
 
            ('modify', ('file-id', 'new content\n'))])
1498
 
        builder.finish_series()
1499
 
        return builder.get_branch()
1500
 
 
1501
 
    def make_branch_with_disjoint_inventory_and_revision(self):
1502
 
        """a repo with separate packs for a revisions Revision and Inventory.
1503
 
 
1504
 
        There will be one pack file that holds the Revision content, and one
1505
 
        for the Inventory content.
1506
 
 
1507
 
        :return: (repository,
1508
 
                  pack_name_with_rev_A_Revision,
1509
 
                  pack_name_with_rev_A_Inventory,
1510
 
                  pack_name_with_rev_C_content)
1511
 
        """
1512
 
        b_source = self.make_abc_branch()
1513
 
        b_base = b_source.bzrdir.sprout('base', revision_id='A').open_branch()
1514
 
        b_stacked = b_base.bzrdir.sprout('stacked', stacked=True).open_branch()
1515
 
        b_stacked.lock_write()
1516
 
        self.addCleanup(b_stacked.unlock)
1517
 
        b_stacked.fetch(b_source, 'B')
1518
 
        # Now re-open the stacked repo directly (no fallbacks) so that we can
1519
 
        # fill in the A rev.
1520
 
        repo_not_stacked = b_stacked.bzrdir.open_repository()
1521
 
        repo_not_stacked.lock_write()
1522
 
        self.addCleanup(repo_not_stacked.unlock)
1523
 
        # Now we should have a pack file with A's inventory, but not its
1524
 
        # Revision
1525
 
        self.assertEqual([('A',), ('B',)],
1526
 
                         sorted(repo_not_stacked.inventories.keys()))
1527
 
        self.assertEqual([('B',)],
1528
 
                         sorted(repo_not_stacked.revisions.keys()))
1529
 
        stacked_pack_names = repo_not_stacked._pack_collection.names()
1530
 
        # We have a couple names here, figure out which has A's inventory
1531
 
        for name in stacked_pack_names:
1532
 
            pack = repo_not_stacked._pack_collection.get_pack_by_name(name)
1533
 
            keys = [n[1] for n in pack.inventory_index.iter_all_entries()]
1534
 
            if ('A',) in keys:
1535
 
                inv_a_pack_name = name
1536
 
                break
1537
 
        else:
1538
 
            self.fail('Could not find pack containing A\'s inventory')
1539
 
        repo_not_stacked.fetch(b_source.repository, 'A')
1540
 
        self.assertEqual([('A',), ('B',)],
1541
 
                         sorted(repo_not_stacked.revisions.keys()))
1542
 
        new_pack_names = set(repo_not_stacked._pack_collection.names())
1543
 
        rev_a_pack_names = new_pack_names.difference(stacked_pack_names)
1544
 
        self.assertEqual(1, len(rev_a_pack_names))
1545
 
        rev_a_pack_name = list(rev_a_pack_names)[0]
1546
 
        # Now fetch 'C', so we have a couple pack files to join
1547
 
        repo_not_stacked.fetch(b_source.repository, 'C')
1548
 
        rev_c_pack_names = set(repo_not_stacked._pack_collection.names())
1549
 
        rev_c_pack_names = rev_c_pack_names.difference(new_pack_names)
1550
 
        self.assertEqual(1, len(rev_c_pack_names))
1551
 
        rev_c_pack_name = list(rev_c_pack_names)[0]
1552
 
        return (repo_not_stacked, rev_a_pack_name, inv_a_pack_name,
1553
 
                rev_c_pack_name)
1554
 
 
1555
 
    def test_pack_with_distant_inventories(self):
1556
 
        # See https://bugs.launchpad.net/bzr/+bug/437003
1557
 
        # When repacking, it is possible to have an inventory in a different
1558
 
        # pack file than the associated revision. An autopack can then come
1559
 
        # along, and miss that inventory, and complain.
1560
 
        (repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
1561
 
         ) = self.make_branch_with_disjoint_inventory_and_revision()
1562
 
        a_pack = repo._pack_collection.get_pack_by_name(rev_a_pack_name)
1563
 
        c_pack = repo._pack_collection.get_pack_by_name(rev_c_pack_name)
1564
 
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1565
 
                    [a_pack, c_pack], '.test-pack')
1566
 
        # This would raise ValueError in bug #437003, but should not raise an
1567
 
        # error once fixed.
1568
 
        packer.pack()
1569
 
 
1570
 
    def test_pack_with_missing_inventory(self):
1571
 
        # Similar to test_pack_with_missing_inventory, but this time, we force
1572
 
        # the A inventory to actually be gone from the repository.
1573
 
        (repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name
1574
 
         ) = self.make_branch_with_disjoint_inventory_and_revision()
1575
 
        inv_a_pack = repo._pack_collection.get_pack_by_name(inv_a_pack_name)
1576
 
        repo._pack_collection._remove_pack_from_memory(inv_a_pack)
1577
 
        packer = groupcompress_repo.GCCHKPacker(repo._pack_collection,
1578
 
            repo._pack_collection.all_packs(), '.test-pack')
1579
 
        e = self.assertRaises(ValueError, packer.pack)
1580
 
        packer.new_pack.abort()
1581
 
        self.assertContainsRe(str(e),
1582
 
            r"We are missing inventories for revisions: .*'A'")
1583
 
 
1584
 
 
1585
 
class TestCrossFormatPacks(TestCaseWithTransport):
1586
 
 
1587
 
    def log_pack(self, hint=None):
1588
 
        self.calls.append(('pack', hint))
1589
 
        self.orig_pack(hint=hint)
1590
 
        if self.expect_hint:
1591
 
            self.assertTrue(hint)
1592
 
 
1593
 
    def run_stream(self, src_fmt, target_fmt, expect_pack_called):
1594
 
        self.expect_hint = expect_pack_called
1595
 
        self.calls = []
1596
 
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
1597
 
        source_tree.lock_write()
1598
 
        self.addCleanup(source_tree.unlock)
1599
 
        tip = source_tree.commit('foo')
1600
 
        target = self.make_repository('target', format=target_fmt)
1601
 
        target.lock_write()
1602
 
        self.addCleanup(target.unlock)
1603
 
        source = source_tree.branch.repository._get_source(target._format)
1604
 
        self.orig_pack = target.pack
1605
 
        self.overrideAttr(target, "pack", self.log_pack)
1606
 
        search = target.search_missing_revision_ids(
1607
 
            source_tree.branch.repository, revision_ids=[tip])
1608
 
        stream = source.get_stream(search)
1609
 
        from_format = source_tree.branch.repository._format
1610
 
        sink = target._get_sink()
1611
 
        sink.insert_stream(stream, from_format, [])
1612
 
        if expect_pack_called:
1613
 
            self.assertLength(1, self.calls)
1614
 
        else:
1615
 
            self.assertLength(0, self.calls)
1616
 
 
1617
 
    def run_fetch(self, src_fmt, target_fmt, expect_pack_called):
1618
 
        self.expect_hint = expect_pack_called
1619
 
        self.calls = []
1620
 
        source_tree = self.make_branch_and_tree('src', format=src_fmt)
1621
 
        source_tree.lock_write()
1622
 
        self.addCleanup(source_tree.unlock)
1623
 
        tip = source_tree.commit('foo')
1624
 
        target = self.make_repository('target', format=target_fmt)
1625
 
        target.lock_write()
1626
 
        self.addCleanup(target.unlock)
1627
 
        source = source_tree.branch.repository
1628
 
        self.orig_pack = target.pack
1629
 
        self.overrideAttr(target, "pack", self.log_pack)
1630
 
        target.fetch(source)
1631
 
        if expect_pack_called:
1632
 
            self.assertLength(1, self.calls)
1633
 
        else:
1634
 
            self.assertLength(0, self.calls)
1635
 
 
1636
 
    def test_sink_format_hint_no(self):
1637
 
        # When the target format says packing makes no difference, pack is not
1638
 
        # called.
1639
 
        self.run_stream('1.9', 'rich-root-pack', False)
1640
 
 
1641
 
    def test_sink_format_hint_yes(self):
1642
 
        # When the target format says packing makes a difference, pack is
1643
 
        # called.
1644
 
        self.run_stream('1.9', '2a', True)
1645
 
 
1646
 
    def test_sink_format_same_no(self):
1647
 
        # When the formats are the same, pack is not called.
1648
 
        self.run_stream('2a', '2a', False)
1649
 
 
1650
 
    def test_IDS_format_hint_no(self):
1651
 
        # When the target format says packing makes no difference, pack is not
1652
 
        # called.
1653
 
        self.run_fetch('1.9', 'rich-root-pack', False)
1654
 
 
1655
 
    def test_IDS_format_hint_yes(self):
1656
 
        # When the target format says packing makes a difference, pack is
1657
 
        # called.
1658
 
        self.run_fetch('1.9', '2a', True)
1659
 
 
1660
 
    def test_IDS_format_same_no(self):
1661
 
        # When the formats are the same, pack is not called.
1662
 
        self.run_fetch('2a', '2a', False)
1663
 
 
1664
 
 
1665
 
class Test_LazyListJoin(tests.TestCase):
1666
 
 
1667
 
    def test__repr__(self):
1668
 
        lazy = repository._LazyListJoin(['a'], ['b'])
1669
 
        self.assertEqual("bzrlib.repository._LazyListJoin((['a'], ['b']))",
1670
 
                         repr(lazy))