~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_fetch.py

  • Committer: Jelmer Vernooij
  • Date: 2009-01-28 18:42:55 UTC
  • mto: This revision was merged to the branch mainline in revision 3968.
  • Revision ID: jelmer@samba.org-20090128184255-bdmklkvm83ltk191
Update NEWS

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2005 Canonical Ltd
 
1
# Copyright (C) 2005, 2007 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
18
18
import re
19
19
import sys
20
20
 
21
 
from bzrlib import bzrdir, repository
 
21
import bzrlib
 
22
from bzrlib import (
 
23
    bzrdir,
 
24
    errors,
 
25
    osutils,
 
26
    merge,
 
27
    repository,
 
28
    versionedfile,
 
29
    )
22
30
from bzrlib.branch import Branch
23
31
from bzrlib.bzrdir import BzrDir
24
 
from bzrlib.builtins import merge
25
 
import bzrlib.errors
26
32
from bzrlib.repofmt import knitrepo
27
33
from bzrlib.tests import TestCaseWithTransport
28
 
from bzrlib.tests.HTTPTestUtil import TestCaseWithWebserver
 
34
from bzrlib.tests.http_utils import TestCaseWithWebserver
29
35
from bzrlib.tests.test_revision import make_branches
30
36
from bzrlib.trace import mutter
31
37
from bzrlib.upgrade import Convert
32
38
from bzrlib.workingtree import WorkingTree
33
39
 
 
40
# These tests are a bit old; please instead add new tests into
 
41
# interrepository_implementations/ so they'll run on all relevant
 
42
# combinations.
 
43
 
34
44
 
35
45
def has_revision(branch, revision_id):
36
46
    return branch.repository.has_revision(revision_id)
90
100
    self.assertEquals(fetched, 3, "fetched %d instead of 3" % fetched)
91
101
    # InstallFailed should be raised if the branch is missing the revision
92
102
    # that was requested.
93
 
    self.assertRaises(bzrlib.errors.InstallFailed, br_a3.fetch, br_a2, 'pizza')
94
 
    # InstallFailed should be raised if the branch is missing a revision
95
 
    # from its own revision history
96
 
    br_a2.append_revision('a-b-c')
97
 
    self.assertRaises(bzrlib.errors.InstallFailed, br_a3.fetch, br_a2)
 
103
    self.assertRaises(errors.InstallFailed, br_a3.fetch, br_a2, 'pizza')
98
104
 
99
 
    # TODO: ADHB 20070116 Perhaps set_last_revision shouldn't accept
100
 
    #       revisions which are not present?  In that case, this test
101
 
    #       must be rewritten.
102
 
    #
103
 
    #       RBC 20060403 the way to do this is to uncommit the revision from
104
 
    #       the repository after the commit
 
105
    # TODO: Test trying to fetch from a branch that points to a revision not
 
106
    # actually present in its repository.  Not every branch format allows you
 
107
    # to directly point to such revisions, so it's a bit complicated to
 
108
    # construct.  One way would be to uncommit and gc the revision, but not
 
109
    # every branch supports that.  -- mbp 20070814
105
110
 
106
111
    #TODO: test that fetch correctly does reweaving when needed. RBC 20051008
107
112
    # Note that this means - updating the weave when ghosts are filled in to 
112
117
 
113
118
    def test_fetch(self):
114
119
        #highest indices a: 5, b: 7
115
 
        br_a, br_b = make_branches(self)
 
120
        br_a, br_b = make_branches(self, format='dirstate-tags')
116
121
        fetch_steps(self, br_a, br_b, br_a)
117
122
 
118
123
    def test_fetch_self(self):
128
133
        knit1_format = bzrdir.BzrDirMetaFormat1()
129
134
        knit1_format.repository_format = knitrepo.RepositoryFormatKnit1()
130
135
        knit2_format = bzrdir.BzrDirMetaFormat1()
131
 
        knit2_format.repository_format = knitrepo.RepositoryFormatKnit2()
 
136
        knit2_format.repository_format = knitrepo.RepositoryFormatKnit3()
132
137
        # we start with a knit1 repository because that causes the
133
138
        # root revision to change for each commit, even though the content,
134
139
        # parent, name, and other attributes are unchanged.
143
148
        branch = self.make_branch('branch', format=knit2_format)
144
149
        branch.pull(tree.branch, stop_revision='rev1')
145
150
        repo = branch.repository
146
 
        root_knit = repo.weave_store.get_weave('tree-root',
147
 
                                                repo.get_transaction())
148
 
        # Make sure fetch retrieved only what we requested
149
 
        self.assertTrue('rev1' in root_knit)
150
 
        self.assertTrue('rev2' not in root_knit)
 
151
        repo.lock_read()
 
152
        try:
 
153
            # Make sure fetch retrieved only what we requested
 
154
            self.assertEqual({('tree-root', 'rev1'):()},
 
155
                repo.texts.get_parent_map(
 
156
                    [('tree-root', 'rev1'), ('tree-root', 'rev2')]))
 
157
        finally:
 
158
            repo.unlock()
151
159
        branch.pull(tree.branch)
152
 
        root_knit = repo.weave_store.get_weave('tree-root',
153
 
                                                repo.get_transaction())
154
160
        # Make sure that the next revision in the root knit was retrieved,
155
161
        # even though the text, name, parent_id, etc., were unchanged.
156
 
        self.assertTrue('rev2' in root_knit)
 
162
        repo.lock_read()
 
163
        try:
 
164
            # Make sure fetch retrieved only what we requested
 
165
            self.assertEqual({('tree-root', 'rev2'):(('tree-root', 'rev1'),)},
 
166
                repo.texts.get_parent_map([('tree-root', 'rev2')]))
 
167
        finally:
 
168
            repo.unlock()
 
169
 
 
170
    def test_fetch_incompatible(self):
 
171
        knit_tree = self.make_branch_and_tree('knit', format='knit')
 
172
        knit3_tree = self.make_branch_and_tree('knit3',
 
173
            format='dirstate-with-subtree')
 
174
        knit3_tree.commit('blah')
 
175
        e = self.assertRaises(errors.IncompatibleRepositories,
 
176
                              knit_tree.branch.fetch, knit3_tree.branch)
 
177
        self.assertContainsRe(str(e),
 
178
            r"(?m).*/knit.*\nis not compatible with\n.*/knit3/.*\n"
 
179
            r"different rich-root support")
157
180
 
158
181
 
159
182
class TestMergeFetch(TestCaseWithTransport):
167
190
        wt2 = self.make_branch_and_tree('br2')
168
191
        br2 = wt2.branch
169
192
        wt2.commit(message='rev 2-1', rev_id='2-1')
170
 
        merge(other_revision=['br1', -1], base_revision=['br1', 0],
171
 
              this_dir='br2')
 
193
        wt2.merge_from_branch(br1, from_revision='null:')
172
194
        self._check_revs_present(br2)
173
195
 
174
196
    def test_merge_fetches(self):
179
201
        dir_2 = br1.bzrdir.sprout('br2')
180
202
        br2 = dir_2.open_branch()
181
203
        wt1.commit(message='rev 1-2', rev_id='1-2')
182
 
        dir_2.open_workingtree().commit(message='rev 2-1', rev_id='2-1')
183
 
        merge(other_revision=['br1', -1], base_revision=[None, None], 
184
 
              this_dir='br2')
 
204
        wt2 = dir_2.open_workingtree()
 
205
        wt2.commit(message='rev 2-1', rev_id='2-1')
 
206
        wt2.merge_from_branch(br1)
185
207
        self._check_revs_present(br2)
186
208
 
187
209
    def _check_revs_present(self, br2):
216
238
    def test_merge_fetches_file_history(self):
217
239
        """Merge brings across file histories"""
218
240
        br2 = Branch.open('br2')
219
 
        merge(other_revision=['br1', -1], base_revision=[None, None], 
220
 
              this_dir='br2')
 
241
        br1 = Branch.open('br1')
 
242
        wt2 = WorkingTree.open('br2').merge_from_branch(br1)
 
243
        br2.lock_read()
 
244
        self.addCleanup(br2.unlock)
221
245
        for rev_id, text in [('1-2', 'original from 1\n'),
222
246
                             ('1-3', 'agreement\n'),
223
247
                             ('2-1', 'contents in 2\n'),
251
275
 
252
276
    def test_weaves_are_retrieved_once(self):
253
277
        self.build_tree(("source/", "source/file", "target/"))
254
 
        wt = self.make_branch_and_tree('source')
 
278
        # This test depends on knit dasta storage.
 
279
        wt = self.make_branch_and_tree('source', format='dirstate-tags')
255
280
        branch = wt.branch
256
281
        wt.add(["file"], ["id"])
257
282
        wt.commit("added file")
258
 
        print >>open("source/file", 'w'), "blah"
 
283
        open("source/file", 'w').write("blah\n")
259
284
        wt.commit("changed file")
260
285
        target = BzrDir.create_branch_and_repo("target/")
261
286
        source = Branch.open(self.get_readonly_url("source/"))
269
294
        # unfortunately this log entry is branch format specific. We could 
270
295
        # factor out the 'what files does this format use' to a method on the 
271
296
        # repository, which would let us to this generically. RBC 20060419
 
297
        # RBC 20080408: Or perhaps we can assert that no files are fully read
 
298
        # twice?
272
299
        self.assertEqual(1, self._count_log_matches('/ce/id.kndx', http_logs))
273
300
        self.assertEqual(1, self._count_log_matches('/ce/id.knit', http_logs))
274
301
        self.assertEqual(1, self._count_log_matches('inventory.kndx', http_logs))
280
307
                                                     http_logs))
281
308
        # FIXME naughty poking in there.
282
309
        self.get_readonly_server().logs = []
283
 
        # check there is nothing more to fetch
284
 
        source = Branch.open(self.get_readonly_url("source/"))
 
310
        # check there is nothing more to fetch.  We take care to re-use the
 
311
        # existing transport so that the request logs we're about to examine
 
312
        # aren't cluttered with redundant probes for a smart server.
 
313
        # XXX: Perhaps this further parameterisation: test http with smart
 
314
        # server, and test http without smart server?
 
315
        source = Branch.open(
 
316
            self.get_readonly_url("source/"),
 
317
            possible_transports=[source.bzrdir.root_transport])
285
318
        self.assertEqual(target.fetch(source), (0, []))
286
319
        # should make just two requests
287
320
        http_logs = self.get_readonly_server().logs
289
322
        self.log('\n'.join(http_logs))
290
323
        self.assertEqual(1, self._count_log_matches('branch-format', http_logs))
291
324
        self.assertEqual(1, self._count_log_matches('branch/format', http_logs))
292
 
        self.assertEqual(1, self._count_log_matches('repository/format', http_logs))
 
325
        self.assertEqual(1, self._count_log_matches('repository/format',
 
326
            http_logs))
293
327
        self.assertTrue(1 >= self._count_log_matches('revision-history',
294
328
                                                     http_logs))
295
329
        self.assertTrue(1 >= self._count_log_matches('last-revision',
296
330
                                                     http_logs))
297
331
        self.assertEqual(4, len(http_logs))
 
332
 
 
333
 
 
334
class TestKnitToPackFetch(TestCaseWithTransport):
 
335
 
 
336
    def find_get_record_stream(self, calls):
 
337
        """In a list of calls, find 'get_record_stream' calls.
 
338
 
 
339
        This also ensures that there is only one get_record_stream call.
 
340
        """
 
341
        get_record_call = None
 
342
        for call in calls:
 
343
            if call[0] == 'get_record_stream':
 
344
                self.assertIs(None, get_record_call,
 
345
                              "there should only be one call to"
 
346
                              " get_record_stream")
 
347
                get_record_call = call
 
348
        self.assertIsNot(None, get_record_call,
 
349
                         "there should be exactly one call to "
 
350
                         " get_record_stream")
 
351
        return get_record_call
 
352
 
 
353
    def test_fetch_with_deltas_no_delta_closure(self):
 
354
        tree = self.make_branch_and_tree('source', format='dirstate')
 
355
        target = self.make_repository('target', format='pack-0.92')
 
356
        self.build_tree(['source/file'])
 
357
        tree.set_root_id('root-id')
 
358
        tree.add('file', 'file-id')
 
359
        tree.commit('one', rev_id='rev-one')
 
360
        source = tree.branch.repository
 
361
        source.texts = versionedfile.RecordingVersionedFilesDecorator(
 
362
                        source.texts)
 
363
        source.signatures = versionedfile.RecordingVersionedFilesDecorator(
 
364
                        source.signatures)
 
365
        source.revisions = versionedfile.RecordingVersionedFilesDecorator(
 
366
                        source.revisions)
 
367
        source.inventories = versionedfile.RecordingVersionedFilesDecorator(
 
368
                        source.inventories)
 
369
        # precondition
 
370
        self.assertTrue(target._fetch_uses_deltas)
 
371
        target.fetch(source, revision_id='rev-one')
 
372
        self.assertEqual(('get_record_stream', [('file-id', 'rev-one')],
 
373
                          target._fetch_order, False),
 
374
                         self.find_get_record_stream(source.texts.calls))
 
375
        self.assertEqual(('get_record_stream', [('rev-one',)],
 
376
                          target._fetch_order, False),
 
377
                         self.find_get_record_stream(source.inventories.calls))
 
378
        self.assertEqual(('get_record_stream', [('rev-one',)],
 
379
                          target._fetch_order, False),
 
380
                         self.find_get_record_stream(source.revisions.calls))
 
381
        # XXX: Signatures is special, and slightly broken. The
 
382
        # standard item_keys_introduced_by actually does a lookup for every
 
383
        # signature to see if it exists, rather than waiting to do them all at
 
384
        # once at the end. The fetch code then does an all-at-once and just
 
385
        # allows for some of them to be missing.
 
386
        # So we know there will be extra calls, but the *last* one is the one
 
387
        # we care about.
 
388
        signature_calls = source.signatures.calls[-1:]
 
389
        self.assertEqual(('get_record_stream', [('rev-one',)],
 
390
                          target._fetch_order, False),
 
391
                         self.find_get_record_stream(signature_calls))
 
392
 
 
393
    def test_fetch_no_deltas_with_delta_closure(self):
 
394
        tree = self.make_branch_and_tree('source', format='dirstate')
 
395
        target = self.make_repository('target', format='pack-0.92')
 
396
        self.build_tree(['source/file'])
 
397
        tree.set_root_id('root-id')
 
398
        tree.add('file', 'file-id')
 
399
        tree.commit('one', rev_id='rev-one')
 
400
        source = tree.branch.repository
 
401
        source.texts = versionedfile.RecordingVersionedFilesDecorator(
 
402
                        source.texts)
 
403
        source.signatures = versionedfile.RecordingVersionedFilesDecorator(
 
404
                        source.signatures)
 
405
        source.revisions = versionedfile.RecordingVersionedFilesDecorator(
 
406
                        source.revisions)
 
407
        source.inventories = versionedfile.RecordingVersionedFilesDecorator(
 
408
                        source.inventories)
 
409
        target._fetch_uses_deltas = False
 
410
        target.fetch(source, revision_id='rev-one')
 
411
        self.assertEqual(('get_record_stream', [('file-id', 'rev-one')],
 
412
                          target._fetch_order, True),
 
413
                         self.find_get_record_stream(source.texts.calls))
 
414
        self.assertEqual(('get_record_stream', [('rev-one',)],
 
415
                          target._fetch_order, True),
 
416
                         self.find_get_record_stream(source.inventories.calls))
 
417
        self.assertEqual(('get_record_stream', [('rev-one',)],
 
418
                          target._fetch_order, True),
 
419
                         self.find_get_record_stream(source.revisions.calls))
 
420
        # XXX: Signatures is special, and slightly broken. The
 
421
        # standard item_keys_introduced_by actually does a lookup for every
 
422
        # signature to see if it exists, rather than waiting to do them all at
 
423
        # once at the end. The fetch code then does an all-at-once and just
 
424
        # allows for some of them to be missing.
 
425
        # So we know there will be extra calls, but the *last* one is the one
 
426
        # we care about.
 
427
        signature_calls = source.signatures.calls[-1:]
 
428
        self.assertEqual(('get_record_stream', [('rev-one',)],
 
429
                          target._fetch_order, True),
 
430
                         self.find_get_record_stream(signature_calls))
 
431
 
 
432
    def test_fetch_revisions_with_deltas_into_pack(self):
 
433
        # See BUG #261339, dev versions of bzr could accidentally create deltas
 
434
        # in revision texts in knit branches (when fetching from packs). So we
 
435
        # ensure that *if* a knit repository has a delta in revisions, that it
 
436
        # gets properly expanded back into a fulltext when stored in the pack
 
437
        # file.
 
438
        tree = self.make_branch_and_tree('source', format='dirstate')
 
439
        target = self.make_repository('target', format='pack-0.92')
 
440
        self.build_tree(['source/file'])
 
441
        tree.set_root_id('root-id')
 
442
        tree.add('file', 'file-id')
 
443
        tree.commit('one', rev_id='rev-one')
 
444
        # Hack the KVF for revisions so that it "accidentally" allows a delta
 
445
        tree.branch.repository.revisions._max_delta_chain = 200
 
446
        tree.commit('two', rev_id='rev-two')
 
447
        source = tree.branch.repository
 
448
        # Ensure that we stored a delta
 
449
        source.lock_read()
 
450
        self.addCleanup(source.unlock)
 
451
        record = source.revisions.get_record_stream([('rev-two',)],
 
452
            'unordered', False).next()
 
453
        self.assertEqual('knit-delta-gz', record.storage_kind)
 
454
        target.fetch(tree.branch.repository, revision_id='rev-two')
 
455
        # The record should get expanded back to a fulltext
 
456
        target.lock_read()
 
457
        self.addCleanup(target.unlock)
 
458
        record = target.revisions.get_record_stream([('rev-two',)],
 
459
            'unordered', False).next()
 
460
        self.assertEqual('knit-ft-gz', record.storage_kind)
 
461
 
 
462
    def test_fetch_with_fallback_and_merge(self):
 
463
        builder = self.make_branch_builder('source', format='pack-0.92')
 
464
        builder.start_series()
 
465
        # graph
 
466
        #   A
 
467
        #   |\
 
468
        #   B C
 
469
        #   | |
 
470
        #   | D
 
471
        #   | |
 
472
        #   | E
 
473
        #    \|
 
474
        #     F
 
475
        # A & B are present in the base (stacked-on) repository, A-E are
 
476
        # present in the source.
 
477
        # This reproduces bug #304841
 
478
        # We need a large enough inventory that total size of compressed deltas
 
479
        # is shorter than the size of a compressed fulltext. We have to use
 
480
        # random ids because otherwise the inventory fulltext compresses too
 
481
        # well and the deltas get bigger.
 
482
        to_add = [
 
483
            ('add', ('', 'TREE_ROOT', 'directory', None))]
 
484
        for i in xrange(10):
 
485
            fname = 'file%03d' % (i,)
 
486
            fileid = '%s-%s' % (fname, osutils.rand_chars(64))
 
487
            to_add.append(('add', (fname, fileid, 'file', 'content\n')))
 
488
        builder.build_snapshot('A', None, to_add)
 
489
        builder.build_snapshot('B', ['A'], [])
 
490
        builder.build_snapshot('C', ['A'], [])
 
491
        builder.build_snapshot('D', ['C'], [])
 
492
        builder.build_snapshot('E', ['D'], [])
 
493
        builder.build_snapshot('F', ['E', 'B'], [])
 
494
        builder.finish_series()
 
495
        source_branch = builder.get_branch()
 
496
        source_branch.bzrdir.sprout('base', revision_id='B')
 
497
        target_branch = self.make_branch('target', format='1.6')
 
498
        target_branch.set_stacked_on_url('../base')
 
499
        source = source_branch.repository
 
500
        source.lock_read()
 
501
        self.addCleanup(source.unlock)
 
502
        source.inventories = versionedfile.OrderingVersionedFilesDecorator(
 
503
                        source.inventories,
 
504
                        key_priority={('E',): 1, ('D',): 2, ('C',): 4,
 
505
                                      ('F',): 3})
 
506
        # Ensure that the content is yielded in the proper order, and given as
 
507
        # the expected kinds
 
508
        records = [(record.key, record.storage_kind)
 
509
                   for record in source.inventories.get_record_stream(
 
510
                        [('D',), ('C',), ('E',), ('F',)], 'unordered', False)]
 
511
        self.assertEqual([(('E',), 'knit-delta-gz'), (('D',), 'knit-delta-gz'),
 
512
                          (('F',), 'knit-delta-gz'), (('C',), 'knit-delta-gz')],
 
513
                          records)
 
514
 
 
515
        target_branch.lock_write()
 
516
        self.addCleanup(target_branch.unlock)
 
517
        target = target_branch.repository
 
518
        target.fetch(source, revision_id='F')
 
519
        # 'C' should be expanded to a fulltext, but D and E should still be
 
520
        # deltas
 
521
        stream = target.inventories.get_record_stream(
 
522
            [('C',), ('D',), ('E',), ('F',)],
 
523
            'unordered', False)
 
524
        kinds = dict((record.key, record.storage_kind) for record in stream)
 
525
        self.assertEqual({('C',): 'knit-ft-gz', ('D',): 'knit-delta-gz',
 
526
                          ('E',): 'knit-delta-gz', ('F',): 'knit-delta-gz'},
 
527
                         kinds)
 
528
 
 
529
 
 
530
class Test1To2Fetch(TestCaseWithTransport):
 
531
    """Tests for Model1To2 failure modes"""
 
532
 
 
533
    def make_tree_and_repo(self):
 
534
        self.tree = self.make_branch_and_tree('tree', format='pack-0.92')
 
535
        self.repo = self.make_repository('rich-repo', format='rich-root-pack')
 
536
        self.repo.lock_write()
 
537
        self.addCleanup(self.repo.unlock)
 
538
 
 
539
    def do_fetch_order_test(self, first, second):
 
540
        """Test that fetch works no matter what the set order of revision is.
 
541
 
 
542
        This test depends on the order of items in a set, which is
 
543
        implementation-dependant, so we test A, B and then B, A.
 
544
        """
 
545
        self.make_tree_and_repo()
 
546
        self.tree.commit('Commit 1', rev_id=first)
 
547
        self.tree.commit('Commit 2', rev_id=second)
 
548
        self.repo.fetch(self.tree.branch.repository, second)
 
549
 
 
550
    def test_fetch_order_AB(self):
 
551
        """See do_fetch_order_test"""
 
552
        self.do_fetch_order_test('A', 'B')
 
553
 
 
554
    def test_fetch_order_BA(self):
 
555
        """See do_fetch_order_test"""
 
556
        self.do_fetch_order_test('B', 'A')
 
557
 
 
558
    def get_parents(self, file_id, revision_id):
 
559
        self.repo.lock_read()
 
560
        try:
 
561
            parent_map = self.repo.texts.get_parent_map([(file_id, revision_id)])
 
562
            return parent_map[(file_id, revision_id)]
 
563
        finally:
 
564
            self.repo.unlock()
 
565
 
 
566
    def test_fetch_ghosts(self):
 
567
        self.make_tree_and_repo()
 
568
        self.tree.commit('first commit', rev_id='left-parent')
 
569
        self.tree.add_parent_tree_id('ghost-parent')
 
570
        fork = self.tree.bzrdir.sprout('fork', 'null:').open_workingtree()
 
571
        fork.commit('not a ghost', rev_id='not-ghost-parent')
 
572
        self.tree.branch.repository.fetch(fork.branch.repository,
 
573
                                     'not-ghost-parent')
 
574
        self.tree.add_parent_tree_id('not-ghost-parent')
 
575
        self.tree.commit('second commit', rev_id='second-id')
 
576
        self.repo.fetch(self.tree.branch.repository, 'second-id')
 
577
        root_id = self.tree.get_root_id()
 
578
        self.assertEqual(
 
579
            ((root_id, 'left-parent'), (root_id, 'ghost-parent'),
 
580
             (root_id, 'not-ghost-parent')),
 
581
            self.get_parents(root_id, 'second-id'))
 
582
 
 
583
    def make_two_commits(self, change_root, fetch_twice):
 
584
        self.make_tree_and_repo()
 
585
        self.tree.commit('first commit', rev_id='first-id')
 
586
        if change_root:
 
587
            self.tree.set_root_id('unique-id')
 
588
        self.tree.commit('second commit', rev_id='second-id')
 
589
        if fetch_twice:
 
590
            self.repo.fetch(self.tree.branch.repository, 'first-id')
 
591
        self.repo.fetch(self.tree.branch.repository, 'second-id')
 
592
 
 
593
    def test_fetch_changed_root(self):
 
594
        self.make_two_commits(change_root=True, fetch_twice=False)
 
595
        self.assertEqual((), self.get_parents('unique-id', 'second-id'))
 
596
 
 
597
    def test_two_fetch_changed_root(self):
 
598
        self.make_two_commits(change_root=True, fetch_twice=True)
 
599
        self.assertEqual((), self.get_parents('unique-id', 'second-id'))
 
600
 
 
601
    def test_two_fetches(self):
 
602
        self.make_two_commits(change_root=False, fetch_twice=True)
 
603
        self.assertEqual((('TREE_ROOT', 'first-id'),),
 
604
            self.get_parents('TREE_ROOT', 'second-id'))