~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_fetch.py

  • Committer: Martin Pool
  • Date: 2010-01-29 10:36:23 UTC
  • mto: This revision was merged to the branch mainline in revision 4992.
  • Revision ID: mbp@sourcefrog.net-20100129103623-hywka5hymo5z13jw
Change url to canonical.com or wiki, plus some doc improvements in passing

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2005 Canonical Ltd
 
1
# Copyright (C) 2005, 2007 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
12
12
#
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16
16
 
17
17
import os
18
18
import re
19
19
import sys
20
20
 
 
21
import bzrlib
21
22
from bzrlib import (
22
23
    bzrdir,
23
24
    errors,
 
25
    osutils,
 
26
    merge,
24
27
    repository,
 
28
    versionedfile,
25
29
    )
26
30
from bzrlib.branch import Branch
27
31
from bzrlib.bzrdir import BzrDir
28
 
from bzrlib.builtins import merge
29
 
import bzrlib.errors
30
32
from bzrlib.repofmt import knitrepo
31
33
from bzrlib.tests import TestCaseWithTransport
32
 
from bzrlib.tests.HTTPTestUtil import TestCaseWithWebserver
 
34
from bzrlib.tests.http_utils import TestCaseWithWebserver
33
35
from bzrlib.tests.test_revision import make_branches
34
36
from bzrlib.trace import mutter
35
37
from bzrlib.upgrade import Convert
36
38
from bzrlib.workingtree import WorkingTree
37
39
 
 
40
# These tests are a bit old; please instead add new tests into
 
41
# per_interrepository/ so they'll run on all relevant
 
42
# combinations.
 
43
 
38
44
 
39
45
def has_revision(branch, revision_id):
40
46
    return branch.repository.has_revision(revision_id)
41
47
 
42
48
def fetch_steps(self, br_a, br_b, writable_a):
43
49
    """A foreign test method for testing fetch locally and remotely."""
44
 
     
 
50
 
45
51
    # TODO RBC 20060201 make this a repository test.
46
52
    repo_b = br_b.repository
47
53
    self.assertFalse(repo_b.has_revision(br_a.revision_history()[3]))
48
54
    self.assertTrue(repo_b.has_revision(br_a.revision_history()[2]))
49
55
    self.assertEquals(len(br_b.revision_history()), 7)
50
 
    self.assertEquals(br_b.fetch(br_a, br_a.revision_history()[2])[0], 0)
 
56
    br_b.fetch(br_a, br_a.revision_history()[2])
51
57
    # branch.fetch is not supposed to alter the revision history
52
58
    self.assertEquals(len(br_b.revision_history()), 7)
53
59
    self.assertFalse(repo_b.has_revision(br_a.revision_history()[3]))
54
60
 
55
61
    # fetching the next revision up in sample data copies one revision
56
 
    self.assertEquals(br_b.fetch(br_a, br_a.revision_history()[3])[0], 1)
 
62
    br_b.fetch(br_a, br_a.revision_history()[3])
57
63
    self.assertTrue(repo_b.has_revision(br_a.revision_history()[3]))
58
64
    self.assertFalse(has_revision(br_a, br_b.revision_history()[6]))
59
65
    self.assertTrue(br_a.repository.has_revision(br_b.revision_history()[5]))
61
67
    # When a non-branch ancestor is missing, it should be unlisted...
62
68
    # as its not reference from the inventory weave.
63
69
    br_b4 = self.make_branch('br_4')
64
 
    count, failures = br_b4.fetch(br_b)
65
 
    self.assertEqual(count, 7)
66
 
    self.assertEqual(failures, [])
 
70
    br_b4.fetch(br_b)
67
71
 
68
 
    self.assertEqual(writable_a.fetch(br_b)[0], 1)
 
72
    writable_a.fetch(br_b)
69
73
    self.assertTrue(has_revision(br_a, br_b.revision_history()[3]))
70
74
    self.assertTrue(has_revision(br_a, br_b.revision_history()[4]))
71
 
        
 
75
 
72
76
    br_b2 = self.make_branch('br_b2')
73
 
    self.assertEquals(br_b2.fetch(br_b)[0], 7)
 
77
    br_b2.fetch(br_b)
74
78
    self.assertTrue(has_revision(br_b2, br_b.revision_history()[4]))
75
79
    self.assertTrue(has_revision(br_b2, br_a.revision_history()[2]))
76
80
    self.assertFalse(has_revision(br_b2, br_a.revision_history()[3]))
77
81
 
78
82
    br_a2 = self.make_branch('br_a2')
79
 
    self.assertEquals(br_a2.fetch(br_a)[0], 9)
 
83
    br_a2.fetch(br_a)
80
84
    self.assertTrue(has_revision(br_a2, br_b.revision_history()[4]))
81
85
    self.assertTrue(has_revision(br_a2, br_a.revision_history()[3]))
82
86
    self.assertTrue(has_revision(br_a2, br_a.revision_history()[2]))
83
87
 
84
88
    br_a3 = self.make_branch('br_a3')
85
 
    # pulling a branch with no revisions grabs nothing, regardless of 
 
89
    # pulling a branch with no revisions grabs nothing, regardless of
86
90
    # whats in the inventory.
87
 
    self.assertEquals(br_a3.fetch(br_a2)[0], 0)
 
91
    br_a3.fetch(br_a2)
88
92
    for revno in range(4):
89
93
        self.assertFalse(
90
94
            br_a3.repository.has_revision(br_a.revision_history()[revno]))
91
 
    self.assertEqual(br_a3.fetch(br_a2, br_a.revision_history()[2])[0], 3)
 
95
    br_a3.fetch(br_a2, br_a.revision_history()[2])
92
96
    # pull the 3 revisions introduced by a@u-0-3
93
 
    fetched = br_a3.fetch(br_a2, br_a.revision_history()[3])[0]
94
 
    self.assertEquals(fetched, 3, "fetched %d instead of 3" % fetched)
95
 
    # InstallFailed should be raised if the branch is missing the revision
 
97
    br_a3.fetch(br_a2, br_a.revision_history()[3])
 
98
    # NoSuchRevision should be raised if the branch is missing the revision
96
99
    # that was requested.
97
 
    self.assertRaises(bzrlib.errors.InstallFailed, br_a3.fetch, br_a2, 'pizza')
98
 
    # InstallFailed should be raised if the branch is missing a revision
99
 
    # from its own revision history
100
 
    br_a2.append_revision('a-b-c')
101
 
    self.assertRaises(bzrlib.errors.InstallFailed, br_a3.fetch, br_a2)
 
100
    self.assertRaises(errors.NoSuchRevision, br_a3.fetch, br_a2, 'pizza')
102
101
 
103
 
    # TODO: ADHB 20070116 Perhaps set_last_revision shouldn't accept
104
 
    #       revisions which are not present?  In that case, this test
105
 
    #       must be rewritten.
106
 
    #
107
 
    #       RBC 20060403 the way to do this is to uncommit the revision from
108
 
    #       the repository after the commit
 
102
    # TODO: Test trying to fetch from a branch that points to a revision not
 
103
    # actually present in its repository.  Not every branch format allows you
 
104
    # to directly point to such revisions, so it's a bit complicated to
 
105
    # construct.  One way would be to uncommit and gc the revision, but not
 
106
    # every branch supports that.  -- mbp 20070814
109
107
 
110
108
    #TODO: test that fetch correctly does reweaving when needed. RBC 20051008
111
 
    # Note that this means - updating the weave when ghosts are filled in to 
 
109
    # Note that this means - updating the weave when ghosts are filled in to
112
110
    # add the right parents.
113
111
 
114
112
 
116
114
 
117
115
    def test_fetch(self):
118
116
        #highest indices a: 5, b: 7
119
 
        br_a, br_b = make_branches(self)
 
117
        br_a, br_b = make_branches(self, format='dirstate-tags')
120
118
        fetch_steps(self, br_a, br_b, br_a)
121
119
 
122
120
    def test_fetch_self(self):
123
121
        wt = self.make_branch_and_tree('br')
124
 
        self.assertEqual(wt.branch.fetch(wt.branch), (0, []))
 
122
        wt.branch.fetch(wt.branch)
125
123
 
126
124
    def test_fetch_root_knit(self):
127
125
        """Ensure that knit2.fetch() updates the root knit
128
 
        
 
126
 
129
127
        This tests the case where the root has a new revision, but there are no
130
128
        corresponding filename, parent, contents or other changes.
131
129
        """
147
145
        branch = self.make_branch('branch', format=knit2_format)
148
146
        branch.pull(tree.branch, stop_revision='rev1')
149
147
        repo = branch.repository
150
 
        root_knit = repo.weave_store.get_weave('tree-root',
151
 
                                                repo.get_transaction())
152
 
        # Make sure fetch retrieved only what we requested
153
 
        self.assertTrue('rev1' in root_knit)
154
 
        self.assertTrue('rev2' not in root_knit)
 
148
        repo.lock_read()
 
149
        try:
 
150
            # Make sure fetch retrieved only what we requested
 
151
            self.assertEqual({('tree-root', 'rev1'):()},
 
152
                repo.texts.get_parent_map(
 
153
                    [('tree-root', 'rev1'), ('tree-root', 'rev2')]))
 
154
        finally:
 
155
            repo.unlock()
155
156
        branch.pull(tree.branch)
156
 
        root_knit = repo.weave_store.get_weave('tree-root',
157
 
                                                repo.get_transaction())
158
157
        # Make sure that the next revision in the root knit was retrieved,
159
158
        # even though the text, name, parent_id, etc., were unchanged.
160
 
        self.assertTrue('rev2' in root_knit)
 
159
        repo.lock_read()
 
160
        try:
 
161
            # Make sure fetch retrieved only what we requested
 
162
            self.assertEqual({('tree-root', 'rev2'):(('tree-root', 'rev1'),)},
 
163
                repo.texts.get_parent_map([('tree-root', 'rev2')]))
 
164
        finally:
 
165
            repo.unlock()
161
166
 
162
167
    def test_fetch_incompatible(self):
163
168
        knit_tree = self.make_branch_and_tree('knit', format='knit')
164
169
        knit3_tree = self.make_branch_and_tree('knit3',
165
170
            format='dirstate-with-subtree')
166
171
        knit3_tree.commit('blah')
167
 
        self.assertRaises(errors.IncompatibleRepositories,
168
 
                          knit_tree.branch.fetch, knit3_tree.branch)
 
172
        e = self.assertRaises(errors.IncompatibleRepositories,
 
173
                              knit_tree.branch.fetch, knit3_tree.branch)
 
174
        self.assertContainsRe(str(e),
 
175
            r"(?m).*/knit.*\nis not compatible with\n.*/knit3/.*\n"
 
176
            r"different rich-root support")
169
177
 
170
178
 
171
179
class TestMergeFetch(TestCaseWithTransport):
179
187
        wt2 = self.make_branch_and_tree('br2')
180
188
        br2 = wt2.branch
181
189
        wt2.commit(message='rev 2-1', rev_id='2-1')
182
 
        merge(other_revision=['br1', -1], base_revision=['br1', 0],
183
 
              this_dir='br2')
 
190
        wt2.merge_from_branch(br1, from_revision='null:')
184
191
        self._check_revs_present(br2)
185
192
 
186
193
    def test_merge_fetches(self):
191
198
        dir_2 = br1.bzrdir.sprout('br2')
192
199
        br2 = dir_2.open_branch()
193
200
        wt1.commit(message='rev 1-2', rev_id='1-2')
194
 
        dir_2.open_workingtree().commit(message='rev 2-1', rev_id='2-1')
195
 
        merge(other_revision=['br1', -1], base_revision=[None, None], 
196
 
              this_dir='br2')
 
201
        wt2 = dir_2.open_workingtree()
 
202
        wt2.commit(message='rev 2-1', rev_id='2-1')
 
203
        wt2.merge_from_branch(br1)
197
204
        self._check_revs_present(br2)
198
205
 
199
206
    def _check_revs_present(self, br2):
228
235
    def test_merge_fetches_file_history(self):
229
236
        """Merge brings across file histories"""
230
237
        br2 = Branch.open('br2')
231
 
        merge(other_revision=['br1', -1], base_revision=[None, None], 
232
 
              this_dir='br2')
 
238
        br1 = Branch.open('br1')
 
239
        wt2 = WorkingTree.open('br2').merge_from_branch(br1)
 
240
        br2.lock_read()
 
241
        self.addCleanup(br2.unlock)
233
242
        for rev_id, text in [('1-2', 'original from 1\n'),
234
243
                             ('1-3', 'agreement\n'),
235
244
                             ('2-1', 'contents in 2\n'),
263
272
 
264
273
    def test_weaves_are_retrieved_once(self):
265
274
        self.build_tree(("source/", "source/file", "target/"))
266
 
        wt = self.make_branch_and_tree('source')
 
275
        # This test depends on knit dasta storage.
 
276
        wt = self.make_branch_and_tree('source', format='dirstate-tags')
267
277
        branch = wt.branch
268
278
        wt.add(["file"], ["id"])
269
279
        wt.commit("added file")
270
 
        print >>open("source/file", 'w'), "blah"
 
280
        open("source/file", 'w').write("blah\n")
271
281
        wt.commit("changed file")
272
282
        target = BzrDir.create_branch_and_repo("target/")
273
283
        source = Branch.open(self.get_readonly_url("source/"))
274
 
        self.assertEqual(target.fetch(source), (2, []))
275
 
        # this is the path to the literal file. As format changes 
 
284
        target.fetch(source)
 
285
        # this is the path to the literal file. As format changes
276
286
        # occur it needs to be updated. FIXME: ask the store for the
277
287
        # path.
278
288
        self.log("web server logs are:")
279
289
        http_logs = self.get_readonly_server().logs
280
290
        self.log('\n'.join(http_logs))
281
 
        # unfortunately this log entry is branch format specific. We could 
282
 
        # factor out the 'what files does this format use' to a method on the 
 
291
        # unfortunately this log entry is branch format specific. We could
 
292
        # factor out the 'what files does this format use' to a method on the
283
293
        # repository, which would let us to this generically. RBC 20060419
 
294
        # RBC 20080408: Or perhaps we can assert that no files are fully read
 
295
        # twice?
284
296
        self.assertEqual(1, self._count_log_matches('/ce/id.kndx', http_logs))
285
297
        self.assertEqual(1, self._count_log_matches('/ce/id.knit', http_logs))
286
298
        self.assertEqual(1, self._count_log_matches('inventory.kndx', http_logs))
287
 
        # this r-h check test will prevent regressions, but it currently already 
 
299
        # this r-h check test will prevent regressions, but it currently already
288
300
        # passes, before the patch to cache-rh is applied :[
289
301
        self.assertTrue(1 >= self._count_log_matches('revision-history',
290
302
                                                     http_logs))
292
304
                                                     http_logs))
293
305
        # FIXME naughty poking in there.
294
306
        self.get_readonly_server().logs = []
295
 
        # check there is nothing more to fetch
296
 
        source = Branch.open(self.get_readonly_url("source/"))
297
 
        self.assertEqual(target.fetch(source), (0, []))
 
307
        # check there is nothing more to fetch.  We take care to re-use the
 
308
        # existing transport so that the request logs we're about to examine
 
309
        # aren't cluttered with redundant probes for a smart server.
 
310
        # XXX: Perhaps this further parameterisation: test http with smart
 
311
        # server, and test http without smart server?
 
312
        source = Branch.open(
 
313
            self.get_readonly_url("source/"),
 
314
            possible_transports=[source.bzrdir.root_transport])
 
315
        target.fetch(source)
298
316
        # should make just two requests
299
317
        http_logs = self.get_readonly_server().logs
300
318
        self.log("web server logs are:")
301
319
        self.log('\n'.join(http_logs))
302
320
        self.assertEqual(1, self._count_log_matches('branch-format', http_logs))
303
321
        self.assertEqual(1, self._count_log_matches('branch/format', http_logs))
304
 
        self.assertEqual(1, self._count_log_matches('repository/format', http_logs))
 
322
        self.assertEqual(1, self._count_log_matches('repository/format',
 
323
            http_logs))
 
324
        self.assertEqual(1, self._count_log_matches('revisions.kndx',
 
325
            http_logs))
305
326
        self.assertTrue(1 >= self._count_log_matches('revision-history',
306
327
                                                     http_logs))
307
328
        self.assertTrue(1 >= self._count_log_matches('last-revision',
308
329
                                                     http_logs))
309
 
        self.assertEqual(4, len(http_logs))
 
330
        self.assertLength(5, http_logs)
 
331
 
 
332
 
 
333
class TestKnitToPackFetch(TestCaseWithTransport):
 
334
 
 
335
    def find_get_record_stream(self, calls, expected_count=1):
 
336
        """In a list of calls, find the last 'get_record_stream'.
 
337
 
 
338
        :param expected_count: The number of calls we should exepect to find.
 
339
            If a different number is found, an assertion is raised.
 
340
        """
 
341
        get_record_call = None
 
342
        call_count = 0
 
343
        for call in calls:
 
344
            if call[0] == 'get_record_stream':
 
345
                call_count += 1
 
346
                get_record_call = call
 
347
        self.assertEqual(expected_count, call_count)
 
348
        return get_record_call
 
349
 
 
350
    def test_fetch_with_deltas_no_delta_closure(self):
 
351
        tree = self.make_branch_and_tree('source', format='dirstate')
 
352
        target = self.make_repository('target', format='pack-0.92')
 
353
        self.build_tree(['source/file'])
 
354
        tree.set_root_id('root-id')
 
355
        tree.add('file', 'file-id')
 
356
        tree.commit('one', rev_id='rev-one')
 
357
        source = tree.branch.repository
 
358
        source.texts = versionedfile.RecordingVersionedFilesDecorator(
 
359
                        source.texts)
 
360
        source.signatures = versionedfile.RecordingVersionedFilesDecorator(
 
361
                        source.signatures)
 
362
        source.revisions = versionedfile.RecordingVersionedFilesDecorator(
 
363
                        source.revisions)
 
364
        source.inventories = versionedfile.RecordingVersionedFilesDecorator(
 
365
                        source.inventories)
 
366
        # precondition
 
367
        self.assertTrue(target._format._fetch_uses_deltas)
 
368
        target.fetch(source, revision_id='rev-one')
 
369
        self.assertEqual(('get_record_stream', [('file-id', 'rev-one')],
 
370
                          target._format._fetch_order, False),
 
371
                         self.find_get_record_stream(source.texts.calls))
 
372
        self.assertEqual(('get_record_stream', [('rev-one',)],
 
373
          target._format._fetch_order, False),
 
374
          self.find_get_record_stream(source.inventories.calls, 2))
 
375
        self.assertEqual(('get_record_stream', [('rev-one',)],
 
376
                          target._format._fetch_order, False),
 
377
                         self.find_get_record_stream(source.revisions.calls))
 
378
        # XXX: Signatures is special, and slightly broken. The
 
379
        # standard item_keys_introduced_by actually does a lookup for every
 
380
        # signature to see if it exists, rather than waiting to do them all at
 
381
        # once at the end. The fetch code then does an all-at-once and just
 
382
        # allows for some of them to be missing.
 
383
        # So we know there will be extra calls, but the *last* one is the one
 
384
        # we care about.
 
385
        signature_calls = source.signatures.calls[-1:]
 
386
        self.assertEqual(('get_record_stream', [('rev-one',)],
 
387
                          target._format._fetch_order, False),
 
388
                         self.find_get_record_stream(signature_calls))
 
389
 
 
390
    def test_fetch_no_deltas_with_delta_closure(self):
 
391
        tree = self.make_branch_and_tree('source', format='dirstate')
 
392
        target = self.make_repository('target', format='pack-0.92')
 
393
        self.build_tree(['source/file'])
 
394
        tree.set_root_id('root-id')
 
395
        tree.add('file', 'file-id')
 
396
        tree.commit('one', rev_id='rev-one')
 
397
        source = tree.branch.repository
 
398
        source.texts = versionedfile.RecordingVersionedFilesDecorator(
 
399
                        source.texts)
 
400
        source.signatures = versionedfile.RecordingVersionedFilesDecorator(
 
401
                        source.signatures)
 
402
        source.revisions = versionedfile.RecordingVersionedFilesDecorator(
 
403
                        source.revisions)
 
404
        source.inventories = versionedfile.RecordingVersionedFilesDecorator(
 
405
                        source.inventories)
 
406
        # XXX: This won't work in general, but for the dirstate format it does.
 
407
        self.overrideAttr(target._format, '_fetch_uses_deltas', False)
 
408
        target.fetch(source, revision_id='rev-one')
 
409
        self.assertEqual(('get_record_stream', [('file-id', 'rev-one')],
 
410
                          target._format._fetch_order, True),
 
411
                         self.find_get_record_stream(source.texts.calls))
 
412
        self.assertEqual(('get_record_stream', [('rev-one',)],
 
413
            target._format._fetch_order, True),
 
414
            self.find_get_record_stream(source.inventories.calls, 2))
 
415
        self.assertEqual(('get_record_stream', [('rev-one',)],
 
416
                          target._format._fetch_order, True),
 
417
                         self.find_get_record_stream(source.revisions.calls))
 
418
        # XXX: Signatures is special, and slightly broken. The
 
419
        # standard item_keys_introduced_by actually does a lookup for every
 
420
        # signature to see if it exists, rather than waiting to do them all at
 
421
        # once at the end. The fetch code then does an all-at-once and just
 
422
        # allows for some of them to be missing.
 
423
        # So we know there will be extra calls, but the *last* one is the one
 
424
        # we care about.
 
425
        signature_calls = source.signatures.calls[-1:]
 
426
        self.assertEqual(('get_record_stream', [('rev-one',)],
 
427
                          target._format._fetch_order, True),
 
428
                         self.find_get_record_stream(signature_calls))
 
429
 
 
430
    def test_fetch_revisions_with_deltas_into_pack(self):
 
431
        # See BUG #261339, dev versions of bzr could accidentally create deltas
 
432
        # in revision texts in knit branches (when fetching from packs). So we
 
433
        # ensure that *if* a knit repository has a delta in revisions, that it
 
434
        # gets properly expanded back into a fulltext when stored in the pack
 
435
        # file.
 
436
        tree = self.make_branch_and_tree('source', format='dirstate')
 
437
        target = self.make_repository('target', format='pack-0.92')
 
438
        self.build_tree(['source/file'])
 
439
        tree.set_root_id('root-id')
 
440
        tree.add('file', 'file-id')
 
441
        tree.commit('one', rev_id='rev-one')
 
442
        # Hack the KVF for revisions so that it "accidentally" allows a delta
 
443
        tree.branch.repository.revisions._max_delta_chain = 200
 
444
        tree.commit('two', rev_id='rev-two')
 
445
        source = tree.branch.repository
 
446
        # Ensure that we stored a delta
 
447
        source.lock_read()
 
448
        self.addCleanup(source.unlock)
 
449
        record = source.revisions.get_record_stream([('rev-two',)],
 
450
            'unordered', False).next()
 
451
        self.assertEqual('knit-delta-gz', record.storage_kind)
 
452
        target.fetch(tree.branch.repository, revision_id='rev-two')
 
453
        # The record should get expanded back to a fulltext
 
454
        target.lock_read()
 
455
        self.addCleanup(target.unlock)
 
456
        record = target.revisions.get_record_stream([('rev-two',)],
 
457
            'unordered', False).next()
 
458
        self.assertEqual('knit-ft-gz', record.storage_kind)
 
459
 
 
460
    def test_fetch_with_fallback_and_merge(self):
 
461
        builder = self.make_branch_builder('source', format='pack-0.92')
 
462
        builder.start_series()
 
463
        # graph
 
464
        #   A
 
465
        #   |\
 
466
        #   B C
 
467
        #   | |
 
468
        #   | D
 
469
        #   | |
 
470
        #   | E
 
471
        #    \|
 
472
        #     F
 
473
        # A & B are present in the base (stacked-on) repository, A-E are
 
474
        # present in the source.
 
475
        # This reproduces bug #304841
 
476
        # We need a large enough inventory that total size of compressed deltas
 
477
        # is shorter than the size of a compressed fulltext. We have to use
 
478
        # random ids because otherwise the inventory fulltext compresses too
 
479
        # well and the deltas get bigger.
 
480
        to_add = [
 
481
            ('add', ('', 'TREE_ROOT', 'directory', None))]
 
482
        for i in xrange(10):
 
483
            fname = 'file%03d' % (i,)
 
484
            fileid = '%s-%s' % (fname, osutils.rand_chars(64))
 
485
            to_add.append(('add', (fname, fileid, 'file', 'content\n')))
 
486
        builder.build_snapshot('A', None, to_add)
 
487
        builder.build_snapshot('B', ['A'], [])
 
488
        builder.build_snapshot('C', ['A'], [])
 
489
        builder.build_snapshot('D', ['C'], [])
 
490
        builder.build_snapshot('E', ['D'], [])
 
491
        builder.build_snapshot('F', ['E', 'B'], [])
 
492
        builder.finish_series()
 
493
        source_branch = builder.get_branch()
 
494
        source_branch.bzrdir.sprout('base', revision_id='B')
 
495
        target_branch = self.make_branch('target', format='1.6')
 
496
        target_branch.set_stacked_on_url('../base')
 
497
        source = source_branch.repository
 
498
        source.lock_read()
 
499
        self.addCleanup(source.unlock)
 
500
        source.inventories = versionedfile.OrderingVersionedFilesDecorator(
 
501
                        source.inventories,
 
502
                        key_priority={('E',): 1, ('D',): 2, ('C',): 4,
 
503
                                      ('F',): 3})
 
504
        # Ensure that the content is yielded in the proper order, and given as
 
505
        # the expected kinds
 
506
        records = [(record.key, record.storage_kind)
 
507
                   for record in source.inventories.get_record_stream(
 
508
                        [('D',), ('C',), ('E',), ('F',)], 'unordered', False)]
 
509
        self.assertEqual([(('E',), 'knit-delta-gz'), (('D',), 'knit-delta-gz'),
 
510
                          (('F',), 'knit-delta-gz'), (('C',), 'knit-delta-gz')],
 
511
                          records)
 
512
 
 
513
        target_branch.lock_write()
 
514
        self.addCleanup(target_branch.unlock)
 
515
        target = target_branch.repository
 
516
        target.fetch(source, revision_id='F')
 
517
        # 'C' should be expanded to a fulltext, but D and E should still be
 
518
        # deltas
 
519
        stream = target.inventories.get_record_stream(
 
520
            [('C',), ('D',), ('E',), ('F',)],
 
521
            'unordered', False)
 
522
        kinds = dict((record.key, record.storage_kind) for record in stream)
 
523
        self.assertEqual({('C',): 'knit-ft-gz', ('D',): 'knit-delta-gz',
 
524
                          ('E',): 'knit-delta-gz', ('F',): 'knit-delta-gz'},
 
525
                         kinds)
 
526
 
 
527
 
 
528
class Test1To2Fetch(TestCaseWithTransport):
 
529
    """Tests for Model1To2 failure modes"""
 
530
 
 
531
    def make_tree_and_repo(self):
 
532
        self.tree = self.make_branch_and_tree('tree', format='pack-0.92')
 
533
        self.repo = self.make_repository('rich-repo', format='rich-root-pack')
 
534
        self.repo.lock_write()
 
535
        self.addCleanup(self.repo.unlock)
 
536
 
 
537
    def do_fetch_order_test(self, first, second):
 
538
        """Test that fetch works no matter what the set order of revision is.
 
539
 
 
540
        This test depends on the order of items in a set, which is
 
541
        implementation-dependant, so we test A, B and then B, A.
 
542
        """
 
543
        self.make_tree_and_repo()
 
544
        self.tree.commit('Commit 1', rev_id=first)
 
545
        self.tree.commit('Commit 2', rev_id=second)
 
546
        self.repo.fetch(self.tree.branch.repository, second)
 
547
 
 
548
    def test_fetch_order_AB(self):
 
549
        """See do_fetch_order_test"""
 
550
        self.do_fetch_order_test('A', 'B')
 
551
 
 
552
    def test_fetch_order_BA(self):
 
553
        """See do_fetch_order_test"""
 
554
        self.do_fetch_order_test('B', 'A')
 
555
 
 
556
    def get_parents(self, file_id, revision_id):
 
557
        self.repo.lock_read()
 
558
        try:
 
559
            parent_map = self.repo.texts.get_parent_map([(file_id, revision_id)])
 
560
            return parent_map[(file_id, revision_id)]
 
561
        finally:
 
562
            self.repo.unlock()
 
563
 
 
564
    def test_fetch_ghosts(self):
 
565
        self.make_tree_and_repo()
 
566
        self.tree.commit('first commit', rev_id='left-parent')
 
567
        self.tree.add_parent_tree_id('ghost-parent')
 
568
        fork = self.tree.bzrdir.sprout('fork', 'null:').open_workingtree()
 
569
        fork.commit('not a ghost', rev_id='not-ghost-parent')
 
570
        self.tree.branch.repository.fetch(fork.branch.repository,
 
571
                                     'not-ghost-parent')
 
572
        self.tree.add_parent_tree_id('not-ghost-parent')
 
573
        self.tree.commit('second commit', rev_id='second-id')
 
574
        self.repo.fetch(self.tree.branch.repository, 'second-id')
 
575
        root_id = self.tree.get_root_id()
 
576
        self.assertEqual(
 
577
            ((root_id, 'left-parent'), (root_id, 'not-ghost-parent')),
 
578
            self.get_parents(root_id, 'second-id'))
 
579
 
 
580
    def make_two_commits(self, change_root, fetch_twice):
 
581
        self.make_tree_and_repo()
 
582
        self.tree.commit('first commit', rev_id='first-id')
 
583
        if change_root:
 
584
            self.tree.set_root_id('unique-id')
 
585
        self.tree.commit('second commit', rev_id='second-id')
 
586
        if fetch_twice:
 
587
            self.repo.fetch(self.tree.branch.repository, 'first-id')
 
588
        self.repo.fetch(self.tree.branch.repository, 'second-id')
 
589
 
 
590
    def test_fetch_changed_root(self):
 
591
        self.make_two_commits(change_root=True, fetch_twice=False)
 
592
        self.assertEqual((), self.get_parents('unique-id', 'second-id'))
 
593
 
 
594
    def test_two_fetch_changed_root(self):
 
595
        self.make_two_commits(change_root=True, fetch_twice=True)
 
596
        self.assertEqual((), self.get_parents('unique-id', 'second-id'))
 
597
 
 
598
    def test_two_fetches(self):
 
599
        self.make_two_commits(change_root=False, fetch_twice=True)
 
600
        self.assertEqual((('TREE_ROOT', 'first-id'),),
 
601
            self.get_parents('TREE_ROOT', 'second-id'))