~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_fetch.py

  • Committer: Frank Aspell
  • Date: 2009-02-17 11:40:05 UTC
  • mto: (4054.1.1 doc)
  • mto: This revision was merged to the branch mainline in revision 4056.
  • Revision ID: frankaspell@googlemail.com-20090217114005-ojufrp6rqht664um
Fixed typos.

Fixed some typos in bzr doc's using "aspell -l en -c FILENAME".

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2005 by Canonical Ltd
 
1
# Copyright (C) 2005, 2007 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
15
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
16
16
 
17
17
import os
 
18
import re
18
19
import sys
19
20
 
 
21
import bzrlib
 
22
from bzrlib import (
 
23
    bzrdir,
 
24
    errors,
 
25
    osutils,
 
26
    merge,
 
27
    repository,
 
28
    versionedfile,
 
29
    )
20
30
from bzrlib.branch import Branch
21
31
from bzrlib.bzrdir import BzrDir
22
 
from bzrlib.builtins import merge
23
 
import bzrlib.errors
 
32
from bzrlib.repofmt import knitrepo
24
33
from bzrlib.tests import TestCaseWithTransport
25
 
from bzrlib.tests.HTTPTestUtil import TestCaseWithWebserver
 
34
from bzrlib.tests.http_utils import TestCaseWithWebserver
26
35
from bzrlib.tests.test_revision import make_branches
27
36
from bzrlib.trace import mutter
 
37
from bzrlib.upgrade import Convert
28
38
from bzrlib.workingtree import WorkingTree
29
39
 
 
40
# These tests are a bit old; please instead add new tests into
 
41
# interrepository_implementations/ so they'll run on all relevant
 
42
# combinations.
 
43
 
30
44
 
31
45
def has_revision(branch, revision_id):
32
46
    return branch.repository.has_revision(revision_id)
86
100
    self.assertEquals(fetched, 3, "fetched %d instead of 3" % fetched)
87
101
    # InstallFailed should be raised if the branch is missing the revision
88
102
    # that was requested.
89
 
    self.assertRaises(bzrlib.errors.InstallFailed, br_a3.fetch, br_a2, 'pizza')
90
 
    # InstallFailed should be raised if the branch is missing a revision
91
 
    # from its own revision history
92
 
    br_a2.append_revision('a-b-c')
93
 
    self.assertRaises(bzrlib.errors.InstallFailed, br_a3.fetch, br_a2)
 
103
    self.assertRaises(errors.InstallFailed, br_a3.fetch, br_a2, 'pizza')
94
104
 
95
 
    # TODO: jam 20051218 Branch should no longer allow append_revision for revisions
96
 
    #       which don't exist. So this test needs to be rewritten
97
 
    #       RBC 20060403 the way to do this is to uncommit the revision from the
98
 
    #           repository after the commit
 
105
    # TODO: Test trying to fetch from a branch that points to a revision not
 
106
    # actually present in its repository.  Not every branch format allows you
 
107
    # to directly point to such revisions, so it's a bit complicated to
 
108
    # construct.  One way would be to uncommit and gc the revision, but not
 
109
    # every branch supports that.  -- mbp 20070814
99
110
 
100
111
    #TODO: test that fetch correctly does reweaving when needed. RBC 20051008
101
112
    # Note that this means - updating the weave when ghosts are filled in to 
106
117
 
107
118
    def test_fetch(self):
108
119
        #highest indices a: 5, b: 7
109
 
        br_a, br_b = make_branches(self)
 
120
        br_a, br_b = make_branches(self, format='dirstate-tags')
110
121
        fetch_steps(self, br_a, br_b, br_a)
111
122
 
112
123
    def test_fetch_self(self):
113
124
        wt = self.make_branch_and_tree('br')
114
125
        self.assertEqual(wt.branch.fetch(wt.branch), (0, []))
115
126
 
 
127
    def test_fetch_root_knit(self):
 
128
        """Ensure that knit2.fetch() updates the root knit
 
129
        
 
130
        This tests the case where the root has a new revision, but there are no
 
131
        corresponding filename, parent, contents or other changes.
 
132
        """
 
133
        knit1_format = bzrdir.BzrDirMetaFormat1()
 
134
        knit1_format.repository_format = knitrepo.RepositoryFormatKnit1()
 
135
        knit2_format = bzrdir.BzrDirMetaFormat1()
 
136
        knit2_format.repository_format = knitrepo.RepositoryFormatKnit3()
 
137
        # we start with a knit1 repository because that causes the
 
138
        # root revision to change for each commit, even though the content,
 
139
        # parent, name, and other attributes are unchanged.
 
140
        tree = self.make_branch_and_tree('tree', knit1_format)
 
141
        tree.set_root_id('tree-root')
 
142
        tree.commit('rev1', rev_id='rev1')
 
143
        tree.commit('rev2', rev_id='rev2')
 
144
 
 
145
        # Now we convert it to a knit2 repository so that it has a root knit
 
146
        Convert(tree.basedir, knit2_format)
 
147
        tree = WorkingTree.open(tree.basedir)
 
148
        branch = self.make_branch('branch', format=knit2_format)
 
149
        branch.pull(tree.branch, stop_revision='rev1')
 
150
        repo = branch.repository
 
151
        repo.lock_read()
 
152
        try:
 
153
            # Make sure fetch retrieved only what we requested
 
154
            self.assertEqual({('tree-root', 'rev1'):()},
 
155
                repo.texts.get_parent_map(
 
156
                    [('tree-root', 'rev1'), ('tree-root', 'rev2')]))
 
157
        finally:
 
158
            repo.unlock()
 
159
        branch.pull(tree.branch)
 
160
        # Make sure that the next revision in the root knit was retrieved,
 
161
        # even though the text, name, parent_id, etc., were unchanged.
 
162
        repo.lock_read()
 
163
        try:
 
164
            # Make sure fetch retrieved only what we requested
 
165
            self.assertEqual({('tree-root', 'rev2'):(('tree-root', 'rev1'),)},
 
166
                repo.texts.get_parent_map([('tree-root', 'rev2')]))
 
167
        finally:
 
168
            repo.unlock()
 
169
 
 
170
    def test_fetch_incompatible(self):
 
171
        knit_tree = self.make_branch_and_tree('knit', format='knit')
 
172
        knit3_tree = self.make_branch_and_tree('knit3',
 
173
            format='dirstate-with-subtree')
 
174
        knit3_tree.commit('blah')
 
175
        e = self.assertRaises(errors.IncompatibleRepositories,
 
176
                              knit_tree.branch.fetch, knit3_tree.branch)
 
177
        self.assertContainsRe(str(e),
 
178
            r"(?m).*/knit.*\nis not compatible with\n.*/knit3/.*\n"
 
179
            r"different rich-root support")
 
180
 
116
181
 
117
182
class TestMergeFetch(TestCaseWithTransport):
118
183
 
125
190
        wt2 = self.make_branch_and_tree('br2')
126
191
        br2 = wt2.branch
127
192
        wt2.commit(message='rev 2-1', rev_id='2-1')
128
 
        merge(other_revision=['br1', -1], base_revision=['br1', 0],
129
 
              this_dir='br2')
 
193
        wt2.merge_from_branch(br1, from_revision='null:')
130
194
        self._check_revs_present(br2)
131
195
 
132
196
    def test_merge_fetches(self):
137
201
        dir_2 = br1.bzrdir.sprout('br2')
138
202
        br2 = dir_2.open_branch()
139
203
        wt1.commit(message='rev 1-2', rev_id='1-2')
140
 
        dir_2.open_workingtree().commit(message='rev 2-1', rev_id='2-1')
141
 
        merge(other_revision=['br1', -1], base_revision=[None, None], 
142
 
              this_dir='br2')
 
204
        wt2 = dir_2.open_workingtree()
 
205
        wt2.commit(message='rev 2-1', rev_id='2-1')
 
206
        wt2.merge_from_branch(br1)
143
207
        self._check_revs_present(br2)
144
208
 
145
209
    def _check_revs_present(self, br2):
174
238
    def test_merge_fetches_file_history(self):
175
239
        """Merge brings across file histories"""
176
240
        br2 = Branch.open('br2')
177
 
        merge(other_revision=['br1', -1], base_revision=[None, None], 
178
 
              this_dir='br2')
 
241
        br1 = Branch.open('br1')
 
242
        wt2 = WorkingTree.open('br2').merge_from_branch(br1)
 
243
        br2.lock_read()
 
244
        self.addCleanup(br2.unlock)
179
245
        for rev_id, text in [('1-2', 'original from 1\n'),
180
246
                             ('1-3', 'agreement\n'),
181
247
                             ('2-1', 'contents in 2\n'),
198
264
 
199
265
    def _count_log_matches(self, target, logs):
200
266
        """Count the number of times the target file pattern was fetched in an http log"""
201
 
        log_pattern = '%s HTTP/1.1" 200 - "-" "bzr/%s' % \
202
 
            (target, bzrlib.__version__)
 
267
        get_succeeds_re = re.compile(
 
268
            '.*"GET .*%s HTTP/1.1" 20[06] - "-" "bzr/%s' %
 
269
            (     target,                    bzrlib.__version__))
203
270
        c = 0
204
271
        for line in logs:
205
 
            # TODO: perhaps use a regexp instead so we can match more
206
 
            # precisely?
207
 
            if line.find(log_pattern) > -1:
 
272
            if get_succeeds_re.match(line):
208
273
                c += 1
209
274
        return c
210
275
 
211
276
    def test_weaves_are_retrieved_once(self):
212
277
        self.build_tree(("source/", "source/file", "target/"))
213
 
        wt = self.make_branch_and_tree('source')
 
278
        # This test depends on knit dasta storage.
 
279
        wt = self.make_branch_and_tree('source', format='dirstate-tags')
214
280
        branch = wt.branch
215
281
        wt.add(["file"], ["id"])
216
282
        wt.commit("added file")
217
 
        print >>open("source/file", 'w'), "blah"
 
283
        open("source/file", 'w').write("blah\n")
218
284
        wt.commit("changed file")
219
285
        target = BzrDir.create_branch_and_repo("target/")
220
286
        source = Branch.open(self.get_readonly_url("source/"))
221
287
        self.assertEqual(target.fetch(source), (2, []))
222
 
        log_pattern = '%%s HTTP/1.1" 200 - "-" "bzr/%s' % bzrlib.__version__
223
288
        # this is the path to the literal file. As format changes 
224
289
        # occur it needs to be updated. FIXME: ask the store for the
225
290
        # path.
229
294
        # unfortunately this log entry is branch format specific. We could 
230
295
        # factor out the 'what files does this format use' to a method on the 
231
296
        # repository, which would let us to this generically. RBC 20060419
 
297
        # RBC 20080408: Or perhaps we can assert that no files are fully read
 
298
        # twice?
232
299
        self.assertEqual(1, self._count_log_matches('/ce/id.kndx', http_logs))
233
300
        self.assertEqual(1, self._count_log_matches('/ce/id.knit', http_logs))
234
301
        self.assertEqual(1, self._count_log_matches('inventory.kndx', http_logs))
235
302
        # this r-h check test will prevent regressions, but it currently already 
236
303
        # passes, before the patch to cache-rh is applied :[
237
 
        self.assertEqual(1, self._count_log_matches('revision-history', http_logs))
 
304
        self.assertTrue(1 >= self._count_log_matches('revision-history',
 
305
                                                     http_logs))
 
306
        self.assertTrue(1 >= self._count_log_matches('last-revision',
 
307
                                                     http_logs))
238
308
        # FIXME naughty poking in there.
239
309
        self.get_readonly_server().logs = []
240
 
        # check there is nothing more to fetch
241
 
        source = Branch.open(self.get_readonly_url("source/"))
 
310
        # check there is nothing more to fetch.  We take care to re-use the
 
311
        # existing transport so that the request logs we're about to examine
 
312
        # aren't cluttered with redundant probes for a smart server.
 
313
        # XXX: Perhaps this further parameterisation: test http with smart
 
314
        # server, and test http without smart server?
 
315
        source = Branch.open(
 
316
            self.get_readonly_url("source/"),
 
317
            possible_transports=[source.bzrdir.root_transport])
242
318
        self.assertEqual(target.fetch(source), (0, []))
243
319
        # should make just two requests
244
320
        http_logs = self.get_readonly_server().logs
246
322
        self.log('\n'.join(http_logs))
247
323
        self.assertEqual(1, self._count_log_matches('branch-format', http_logs))
248
324
        self.assertEqual(1, self._count_log_matches('branch/format', http_logs))
249
 
        self.assertEqual(1, self._count_log_matches('repository/format', http_logs))
250
 
        self.assertEqual(1, self._count_log_matches('revision-history', http_logs))
 
325
        self.assertEqual(1, self._count_log_matches('repository/format',
 
326
            http_logs))
 
327
        self.assertTrue(1 >= self._count_log_matches('revision-history',
 
328
                                                     http_logs))
 
329
        self.assertTrue(1 >= self._count_log_matches('last-revision',
 
330
                                                     http_logs))
251
331
        self.assertEqual(4, len(http_logs))
 
332
 
 
333
 
 
334
class TestKnitToPackFetch(TestCaseWithTransport):
 
335
 
 
336
    def find_get_record_stream(self, calls):
 
337
        """In a list of calls, find 'get_record_stream' calls.
 
338
 
 
339
        This also ensures that there is only one get_record_stream call.
 
340
        """
 
341
        get_record_call = None
 
342
        for call in calls:
 
343
            if call[0] == 'get_record_stream':
 
344
                self.assertIs(None, get_record_call,
 
345
                              "there should only be one call to"
 
346
                              " get_record_stream")
 
347
                get_record_call = call
 
348
        self.assertIsNot(None, get_record_call,
 
349
                         "there should be exactly one call to "
 
350
                         " get_record_stream")
 
351
        return get_record_call
 
352
 
 
353
    def test_fetch_with_deltas_no_delta_closure(self):
 
354
        tree = self.make_branch_and_tree('source', format='dirstate')
 
355
        target = self.make_repository('target', format='pack-0.92')
 
356
        self.build_tree(['source/file'])
 
357
        tree.set_root_id('root-id')
 
358
        tree.add('file', 'file-id')
 
359
        tree.commit('one', rev_id='rev-one')
 
360
        source = tree.branch.repository
 
361
        source.texts = versionedfile.RecordingVersionedFilesDecorator(
 
362
                        source.texts)
 
363
        source.signatures = versionedfile.RecordingVersionedFilesDecorator(
 
364
                        source.signatures)
 
365
        source.revisions = versionedfile.RecordingVersionedFilesDecorator(
 
366
                        source.revisions)
 
367
        source.inventories = versionedfile.RecordingVersionedFilesDecorator(
 
368
                        source.inventories)
 
369
        # precondition
 
370
        self.assertTrue(target._fetch_uses_deltas)
 
371
        target.fetch(source, revision_id='rev-one')
 
372
        self.assertEqual(('get_record_stream', [('file-id', 'rev-one')],
 
373
                          target._fetch_order, False),
 
374
                         self.find_get_record_stream(source.texts.calls))
 
375
        self.assertEqual(('get_record_stream', [('rev-one',)],
 
376
                          target._fetch_order, False),
 
377
                         self.find_get_record_stream(source.inventories.calls))
 
378
        self.assertEqual(('get_record_stream', [('rev-one',)],
 
379
                          target._fetch_order, False),
 
380
                         self.find_get_record_stream(source.revisions.calls))
 
381
        # XXX: Signatures is special, and slightly broken. The
 
382
        # standard item_keys_introduced_by actually does a lookup for every
 
383
        # signature to see if it exists, rather than waiting to do them all at
 
384
        # once at the end. The fetch code then does an all-at-once and just
 
385
        # allows for some of them to be missing.
 
386
        # So we know there will be extra calls, but the *last* one is the one
 
387
        # we care about.
 
388
        signature_calls = source.signatures.calls[-1:]
 
389
        self.assertEqual(('get_record_stream', [('rev-one',)],
 
390
                          target._fetch_order, False),
 
391
                         self.find_get_record_stream(signature_calls))
 
392
 
 
393
    def test_fetch_no_deltas_with_delta_closure(self):
 
394
        tree = self.make_branch_and_tree('source', format='dirstate')
 
395
        target = self.make_repository('target', format='pack-0.92')
 
396
        self.build_tree(['source/file'])
 
397
        tree.set_root_id('root-id')
 
398
        tree.add('file', 'file-id')
 
399
        tree.commit('one', rev_id='rev-one')
 
400
        source = tree.branch.repository
 
401
        source.texts = versionedfile.RecordingVersionedFilesDecorator(
 
402
                        source.texts)
 
403
        source.signatures = versionedfile.RecordingVersionedFilesDecorator(
 
404
                        source.signatures)
 
405
        source.revisions = versionedfile.RecordingVersionedFilesDecorator(
 
406
                        source.revisions)
 
407
        source.inventories = versionedfile.RecordingVersionedFilesDecorator(
 
408
                        source.inventories)
 
409
        target._fetch_uses_deltas = False
 
410
        target.fetch(source, revision_id='rev-one')
 
411
        self.assertEqual(('get_record_stream', [('file-id', 'rev-one')],
 
412
                          target._fetch_order, True),
 
413
                         self.find_get_record_stream(source.texts.calls))
 
414
        self.assertEqual(('get_record_stream', [('rev-one',)],
 
415
                          target._fetch_order, True),
 
416
                         self.find_get_record_stream(source.inventories.calls))
 
417
        self.assertEqual(('get_record_stream', [('rev-one',)],
 
418
                          target._fetch_order, True),
 
419
                         self.find_get_record_stream(source.revisions.calls))
 
420
        # XXX: Signatures is special, and slightly broken. The
 
421
        # standard item_keys_introduced_by actually does a lookup for every
 
422
        # signature to see if it exists, rather than waiting to do them all at
 
423
        # once at the end. The fetch code then does an all-at-once and just
 
424
        # allows for some of them to be missing.
 
425
        # So we know there will be extra calls, but the *last* one is the one
 
426
        # we care about.
 
427
        signature_calls = source.signatures.calls[-1:]
 
428
        self.assertEqual(('get_record_stream', [('rev-one',)],
 
429
                          target._fetch_order, True),
 
430
                         self.find_get_record_stream(signature_calls))
 
431
 
 
432
    def test_fetch_revisions_with_deltas_into_pack(self):
 
433
        # See BUG #261339, dev versions of bzr could accidentally create deltas
 
434
        # in revision texts in knit branches (when fetching from packs). So we
 
435
        # ensure that *if* a knit repository has a delta in revisions, that it
 
436
        # gets properly expanded back into a fulltext when stored in the pack
 
437
        # file.
 
438
        tree = self.make_branch_and_tree('source', format='dirstate')
 
439
        target = self.make_repository('target', format='pack-0.92')
 
440
        self.build_tree(['source/file'])
 
441
        tree.set_root_id('root-id')
 
442
        tree.add('file', 'file-id')
 
443
        tree.commit('one', rev_id='rev-one')
 
444
        # Hack the KVF for revisions so that it "accidentally" allows a delta
 
445
        tree.branch.repository.revisions._max_delta_chain = 200
 
446
        tree.commit('two', rev_id='rev-two')
 
447
        source = tree.branch.repository
 
448
        # Ensure that we stored a delta
 
449
        source.lock_read()
 
450
        self.addCleanup(source.unlock)
 
451
        record = source.revisions.get_record_stream([('rev-two',)],
 
452
            'unordered', False).next()
 
453
        self.assertEqual('knit-delta-gz', record.storage_kind)
 
454
        target.fetch(tree.branch.repository, revision_id='rev-two')
 
455
        # The record should get expanded back to a fulltext
 
456
        target.lock_read()
 
457
        self.addCleanup(target.unlock)
 
458
        record = target.revisions.get_record_stream([('rev-two',)],
 
459
            'unordered', False).next()
 
460
        self.assertEqual('knit-ft-gz', record.storage_kind)
 
461
 
 
462
    def test_fetch_with_fallback_and_merge(self):
 
463
        builder = self.make_branch_builder('source', format='pack-0.92')
 
464
        builder.start_series()
 
465
        # graph
 
466
        #   A
 
467
        #   |\
 
468
        #   B C
 
469
        #   | |
 
470
        #   | D
 
471
        #   | |
 
472
        #   | E
 
473
        #    \|
 
474
        #     F
 
475
        # A & B are present in the base (stacked-on) repository, A-E are
 
476
        # present in the source.
 
477
        # This reproduces bug #304841
 
478
        # We need a large enough inventory that total size of compressed deltas
 
479
        # is shorter than the size of a compressed fulltext. We have to use
 
480
        # random ids because otherwise the inventory fulltext compresses too
 
481
        # well and the deltas get bigger.
 
482
        to_add = [
 
483
            ('add', ('', 'TREE_ROOT', 'directory', None))]
 
484
        for i in xrange(10):
 
485
            fname = 'file%03d' % (i,)
 
486
            fileid = '%s-%s' % (fname, osutils.rand_chars(64))
 
487
            to_add.append(('add', (fname, fileid, 'file', 'content\n')))
 
488
        builder.build_snapshot('A', None, to_add)
 
489
        builder.build_snapshot('B', ['A'], [])
 
490
        builder.build_snapshot('C', ['A'], [])
 
491
        builder.build_snapshot('D', ['C'], [])
 
492
        builder.build_snapshot('E', ['D'], [])
 
493
        builder.build_snapshot('F', ['E', 'B'], [])
 
494
        builder.finish_series()
 
495
        source_branch = builder.get_branch()
 
496
        source_branch.bzrdir.sprout('base', revision_id='B')
 
497
        target_branch = self.make_branch('target', format='1.6')
 
498
        target_branch.set_stacked_on_url('../base')
 
499
        source = source_branch.repository
 
500
        source.lock_read()
 
501
        self.addCleanup(source.unlock)
 
502
        source.inventories = versionedfile.OrderingVersionedFilesDecorator(
 
503
                        source.inventories,
 
504
                        key_priority={('E',): 1, ('D',): 2, ('C',): 4,
 
505
                                      ('F',): 3})
 
506
        # Ensure that the content is yielded in the proper order, and given as
 
507
        # the expected kinds
 
508
        records = [(record.key, record.storage_kind)
 
509
                   for record in source.inventories.get_record_stream(
 
510
                        [('D',), ('C',), ('E',), ('F',)], 'unordered', False)]
 
511
        self.assertEqual([(('E',), 'knit-delta-gz'), (('D',), 'knit-delta-gz'),
 
512
                          (('F',), 'knit-delta-gz'), (('C',), 'knit-delta-gz')],
 
513
                          records)
 
514
 
 
515
        target_branch.lock_write()
 
516
        self.addCleanup(target_branch.unlock)
 
517
        target = target_branch.repository
 
518
        target.fetch(source, revision_id='F')
 
519
        # 'C' should be expanded to a fulltext, but D and E should still be
 
520
        # deltas
 
521
        stream = target.inventories.get_record_stream(
 
522
            [('C',), ('D',), ('E',), ('F',)],
 
523
            'unordered', False)
 
524
        kinds = dict((record.key, record.storage_kind) for record in stream)
 
525
        self.assertEqual({('C',): 'knit-ft-gz', ('D',): 'knit-delta-gz',
 
526
                          ('E',): 'knit-delta-gz', ('F',): 'knit-delta-gz'},
 
527
                         kinds)
 
528
 
 
529
 
 
530
class Test1To2Fetch(TestCaseWithTransport):
 
531
    """Tests for Model1To2 failure modes"""
 
532
 
 
533
    def make_tree_and_repo(self):
 
534
        self.tree = self.make_branch_and_tree('tree', format='pack-0.92')
 
535
        self.repo = self.make_repository('rich-repo', format='rich-root-pack')
 
536
        self.repo.lock_write()
 
537
        self.addCleanup(self.repo.unlock)
 
538
 
 
539
    def do_fetch_order_test(self, first, second):
 
540
        """Test that fetch works no matter what the set order of revision is.
 
541
 
 
542
        This test depends on the order of items in a set, which is
 
543
        implementation-dependant, so we test A, B and then B, A.
 
544
        """
 
545
        self.make_tree_and_repo()
 
546
        self.tree.commit('Commit 1', rev_id=first)
 
547
        self.tree.commit('Commit 2', rev_id=second)
 
548
        self.repo.fetch(self.tree.branch.repository, second)
 
549
 
 
550
    def test_fetch_order_AB(self):
 
551
        """See do_fetch_order_test"""
 
552
        self.do_fetch_order_test('A', 'B')
 
553
 
 
554
    def test_fetch_order_BA(self):
 
555
        """See do_fetch_order_test"""
 
556
        self.do_fetch_order_test('B', 'A')
 
557
 
 
558
    def get_parents(self, file_id, revision_id):
 
559
        self.repo.lock_read()
 
560
        try:
 
561
            parent_map = self.repo.texts.get_parent_map([(file_id, revision_id)])
 
562
            return parent_map[(file_id, revision_id)]
 
563
        finally:
 
564
            self.repo.unlock()
 
565
 
 
566
    def test_fetch_ghosts(self):
 
567
        self.make_tree_and_repo()
 
568
        self.tree.commit('first commit', rev_id='left-parent')
 
569
        self.tree.add_parent_tree_id('ghost-parent')
 
570
        fork = self.tree.bzrdir.sprout('fork', 'null:').open_workingtree()
 
571
        fork.commit('not a ghost', rev_id='not-ghost-parent')
 
572
        self.tree.branch.repository.fetch(fork.branch.repository,
 
573
                                     'not-ghost-parent')
 
574
        self.tree.add_parent_tree_id('not-ghost-parent')
 
575
        self.tree.commit('second commit', rev_id='second-id')
 
576
        self.repo.fetch(self.tree.branch.repository, 'second-id')
 
577
        root_id = self.tree.get_root_id()
 
578
        self.assertEqual(
 
579
            ((root_id, 'left-parent'), (root_id, 'ghost-parent'),
 
580
             (root_id, 'not-ghost-parent')),
 
581
            self.get_parents(root_id, 'second-id'))
 
582
 
 
583
    def make_two_commits(self, change_root, fetch_twice):
 
584
        self.make_tree_and_repo()
 
585
        self.tree.commit('first commit', rev_id='first-id')
 
586
        if change_root:
 
587
            self.tree.set_root_id('unique-id')
 
588
        self.tree.commit('second commit', rev_id='second-id')
 
589
        if fetch_twice:
 
590
            self.repo.fetch(self.tree.branch.repository, 'first-id')
 
591
        self.repo.fetch(self.tree.branch.repository, 'second-id')
 
592
 
 
593
    def test_fetch_changed_root(self):
 
594
        self.make_two_commits(change_root=True, fetch_twice=False)
 
595
        self.assertEqual((), self.get_parents('unique-id', 'second-id'))
 
596
 
 
597
    def test_two_fetch_changed_root(self):
 
598
        self.make_two_commits(change_root=True, fetch_twice=True)
 
599
        self.assertEqual((), self.get_parents('unique-id', 'second-id'))
 
600
 
 
601
    def test_two_fetches(self):
 
602
        self.make_two_commits(change_root=False, fetch_twice=True)
 
603
        self.assertEqual((('TREE_ROOT', 'first-id'),),
 
604
            self.get_parents('TREE_ROOT', 'second-id'))