~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_fetch.py

  • Committer: Vincent Ladeuil
  • Date: 2013-10-04 09:56:23 UTC
  • mto: (6588.1.1 trunk)
  • mto: This revision was merged to the branch mainline in revision 6589.
  • Revision ID: v.ladeuil+lp@free.fr-20131004095623-xlan34vg0y51gdb5
Stricter checks on configuration option names

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2005 Canonical Ltd
 
1
# Copyright (C) 2005-2011 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
12
12
#
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
16
 
 
17
 
import os
18
 
import re
19
 
import sys
20
 
 
21
 
from bzrlib import bzrdir, repository
 
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 
16
 
 
17
from bzrlib import (
 
18
    bzrdir,
 
19
    errors,
 
20
    osutils,
 
21
    revision as _mod_revision,
 
22
    versionedfile,
 
23
    )
22
24
from bzrlib.branch import Branch
23
 
from bzrlib.bzrdir import BzrDir
24
 
from bzrlib.builtins import merge
25
 
import bzrlib.errors
26
25
from bzrlib.repofmt import knitrepo
27
26
from bzrlib.tests import TestCaseWithTransport
28
 
from bzrlib.tests.HTTPTestUtil import TestCaseWithWebserver
29
27
from bzrlib.tests.test_revision import make_branches
30
 
from bzrlib.trace import mutter
31
28
from bzrlib.upgrade import Convert
32
29
from bzrlib.workingtree import WorkingTree
33
30
 
 
31
# These tests are a bit old; please instead add new tests into
 
32
# per_interrepository/ so they'll run on all relevant
 
33
# combinations.
 
34
 
34
35
 
35
36
def has_revision(branch, revision_id):
36
37
    return branch.repository.has_revision(revision_id)
37
38
 
 
39
 
 
40
def revision_history(branch):
 
41
    branch.lock_read()
 
42
    try:
 
43
        graph = branch.repository.get_graph()
 
44
        history = list(graph.iter_lefthand_ancestry(branch.last_revision(),
 
45
            [_mod_revision.NULL_REVISION]))
 
46
    finally:
 
47
        branch.unlock()
 
48
    history.reverse()
 
49
    return history
 
50
 
 
51
 
38
52
def fetch_steps(self, br_a, br_b, writable_a):
39
53
    """A foreign test method for testing fetch locally and remotely."""
40
 
     
 
54
 
41
55
    # TODO RBC 20060201 make this a repository test.
42
56
    repo_b = br_b.repository
43
 
    self.assertFalse(repo_b.has_revision(br_a.revision_history()[3]))
44
 
    self.assertTrue(repo_b.has_revision(br_a.revision_history()[2]))
45
 
    self.assertEquals(len(br_b.revision_history()), 7)
46
 
    self.assertEquals(br_b.fetch(br_a, br_a.revision_history()[2])[0], 0)
 
57
    self.assertFalse(repo_b.has_revision(revision_history(br_a)[3]))
 
58
    self.assertTrue(repo_b.has_revision(revision_history(br_a)[2]))
 
59
    self.assertEquals(len(revision_history(br_b)), 7)
 
60
    br_b.fetch(br_a, revision_history(br_a)[2])
47
61
    # branch.fetch is not supposed to alter the revision history
48
 
    self.assertEquals(len(br_b.revision_history()), 7)
49
 
    self.assertFalse(repo_b.has_revision(br_a.revision_history()[3]))
 
62
    self.assertEquals(len(revision_history(br_b)), 7)
 
63
    self.assertFalse(repo_b.has_revision(revision_history(br_a)[3]))
50
64
 
51
65
    # fetching the next revision up in sample data copies one revision
52
 
    self.assertEquals(br_b.fetch(br_a, br_a.revision_history()[3])[0], 1)
53
 
    self.assertTrue(repo_b.has_revision(br_a.revision_history()[3]))
54
 
    self.assertFalse(has_revision(br_a, br_b.revision_history()[6]))
55
 
    self.assertTrue(br_a.repository.has_revision(br_b.revision_history()[5]))
 
66
    br_b.fetch(br_a, revision_history(br_a)[3])
 
67
    self.assertTrue(repo_b.has_revision(revision_history(br_a)[3]))
 
68
    self.assertFalse(has_revision(br_a, revision_history(br_b)[6]))
 
69
    self.assertTrue(br_a.repository.has_revision(revision_history(br_b)[5]))
56
70
 
57
71
    # When a non-branch ancestor is missing, it should be unlisted...
58
72
    # as its not reference from the inventory weave.
59
73
    br_b4 = self.make_branch('br_4')
60
 
    count, failures = br_b4.fetch(br_b)
61
 
    self.assertEqual(count, 7)
62
 
    self.assertEqual(failures, [])
63
 
 
64
 
    self.assertEqual(writable_a.fetch(br_b)[0], 1)
65
 
    self.assertTrue(has_revision(br_a, br_b.revision_history()[3]))
66
 
    self.assertTrue(has_revision(br_a, br_b.revision_history()[4]))
67
 
        
 
74
    br_b4.fetch(br_b)
 
75
 
 
76
    writable_a.fetch(br_b)
 
77
    self.assertTrue(has_revision(br_a, revision_history(br_b)[3]))
 
78
    self.assertTrue(has_revision(br_a, revision_history(br_b)[4]))
 
79
 
68
80
    br_b2 = self.make_branch('br_b2')
69
 
    self.assertEquals(br_b2.fetch(br_b)[0], 7)
70
 
    self.assertTrue(has_revision(br_b2, br_b.revision_history()[4]))
71
 
    self.assertTrue(has_revision(br_b2, br_a.revision_history()[2]))
72
 
    self.assertFalse(has_revision(br_b2, br_a.revision_history()[3]))
 
81
    br_b2.fetch(br_b)
 
82
    self.assertTrue(has_revision(br_b2, revision_history(br_b)[4]))
 
83
    self.assertTrue(has_revision(br_b2, revision_history(br_a)[2]))
 
84
    self.assertFalse(has_revision(br_b2, revision_history(br_a)[3]))
73
85
 
74
86
    br_a2 = self.make_branch('br_a2')
75
 
    self.assertEquals(br_a2.fetch(br_a)[0], 9)
76
 
    self.assertTrue(has_revision(br_a2, br_b.revision_history()[4]))
77
 
    self.assertTrue(has_revision(br_a2, br_a.revision_history()[3]))
78
 
    self.assertTrue(has_revision(br_a2, br_a.revision_history()[2]))
 
87
    br_a2.fetch(br_a)
 
88
    self.assertTrue(has_revision(br_a2, revision_history(br_b)[4]))
 
89
    self.assertTrue(has_revision(br_a2, revision_history(br_a)[3]))
 
90
    self.assertTrue(has_revision(br_a2, revision_history(br_a)[2]))
79
91
 
80
92
    br_a3 = self.make_branch('br_a3')
81
 
    # pulling a branch with no revisions grabs nothing, regardless of 
 
93
    # pulling a branch with no revisions grabs nothing, regardless of
82
94
    # whats in the inventory.
83
 
    self.assertEquals(br_a3.fetch(br_a2)[0], 0)
 
95
    br_a3.fetch(br_a2)
84
96
    for revno in range(4):
85
97
        self.assertFalse(
86
 
            br_a3.repository.has_revision(br_a.revision_history()[revno]))
87
 
    self.assertEqual(br_a3.fetch(br_a2, br_a.revision_history()[2])[0], 3)
 
98
            br_a3.repository.has_revision(revision_history(br_a)[revno]))
 
99
    br_a3.fetch(br_a2, revision_history(br_a)[2])
88
100
    # pull the 3 revisions introduced by a@u-0-3
89
 
    fetched = br_a3.fetch(br_a2, br_a.revision_history()[3])[0]
90
 
    self.assertEquals(fetched, 3, "fetched %d instead of 3" % fetched)
91
 
    # InstallFailed should be raised if the branch is missing the revision
 
101
    br_a3.fetch(br_a2, revision_history(br_a)[3])
 
102
    # NoSuchRevision should be raised if the branch is missing the revision
92
103
    # that was requested.
93
 
    self.assertRaises(bzrlib.errors.InstallFailed, br_a3.fetch, br_a2, 'pizza')
94
 
    # InstallFailed should be raised if the branch is missing a revision
95
 
    # from its own revision history
96
 
    br_a2.append_revision('a-b-c')
97
 
    self.assertRaises(bzrlib.errors.InstallFailed, br_a3.fetch, br_a2)
 
104
    self.assertRaises(errors.NoSuchRevision, br_a3.fetch, br_a2, 'pizza')
98
105
 
99
 
    # TODO: ADHB 20070116 Perhaps set_last_revision shouldn't accept
100
 
    #       revisions which are not present?  In that case, this test
101
 
    #       must be rewritten.
102
 
    #
103
 
    #       RBC 20060403 the way to do this is to uncommit the revision from
104
 
    #       the repository after the commit
 
106
    # TODO: Test trying to fetch from a branch that points to a revision not
 
107
    # actually present in its repository.  Not every branch format allows you
 
108
    # to directly point to such revisions, so it's a bit complicated to
 
109
    # construct.  One way would be to uncommit and gc the revision, but not
 
110
    # every branch supports that.  -- mbp 20070814
105
111
 
106
112
    #TODO: test that fetch correctly does reweaving when needed. RBC 20051008
107
 
    # Note that this means - updating the weave when ghosts are filled in to 
 
113
    # Note that this means - updating the weave when ghosts are filled in to
108
114
    # add the right parents.
109
115
 
110
116
 
112
118
 
113
119
    def test_fetch(self):
114
120
        #highest indices a: 5, b: 7
115
 
        br_a, br_b = make_branches(self)
 
121
        br_a, br_b = make_branches(self, format='dirstate-tags')
116
122
        fetch_steps(self, br_a, br_b, br_a)
117
123
 
118
124
    def test_fetch_self(self):
119
125
        wt = self.make_branch_and_tree('br')
120
 
        self.assertEqual(wt.branch.fetch(wt.branch), (0, []))
 
126
        wt.branch.fetch(wt.branch)
121
127
 
122
128
    def test_fetch_root_knit(self):
123
129
        """Ensure that knit2.fetch() updates the root knit
124
 
        
 
130
 
125
131
        This tests the case where the root has a new revision, but there are no
126
132
        corresponding filename, parent, contents or other changes.
127
133
        """
128
134
        knit1_format = bzrdir.BzrDirMetaFormat1()
129
135
        knit1_format.repository_format = knitrepo.RepositoryFormatKnit1()
130
136
        knit2_format = bzrdir.BzrDirMetaFormat1()
131
 
        knit2_format.repository_format = knitrepo.RepositoryFormatKnit2()
 
137
        knit2_format.repository_format = knitrepo.RepositoryFormatKnit3()
132
138
        # we start with a knit1 repository because that causes the
133
139
        # root revision to change for each commit, even though the content,
134
140
        # parent, name, and other attributes are unchanged.
143
149
        branch = self.make_branch('branch', format=knit2_format)
144
150
        branch.pull(tree.branch, stop_revision='rev1')
145
151
        repo = branch.repository
146
 
        root_knit = repo.weave_store.get_weave('tree-root',
147
 
                                                repo.get_transaction())
148
 
        # Make sure fetch retrieved only what we requested
149
 
        self.assertTrue('rev1' in root_knit)
150
 
        self.assertTrue('rev2' not in root_knit)
 
152
        repo.lock_read()
 
153
        try:
 
154
            # Make sure fetch retrieved only what we requested
 
155
            self.assertEqual({('tree-root', 'rev1'):()},
 
156
                repo.texts.get_parent_map(
 
157
                    [('tree-root', 'rev1'), ('tree-root', 'rev2')]))
 
158
        finally:
 
159
            repo.unlock()
151
160
        branch.pull(tree.branch)
152
 
        root_knit = repo.weave_store.get_weave('tree-root',
153
 
                                                repo.get_transaction())
154
161
        # Make sure that the next revision in the root knit was retrieved,
155
162
        # even though the text, name, parent_id, etc., were unchanged.
156
 
        self.assertTrue('rev2' in root_knit)
 
163
        repo.lock_read()
 
164
        try:
 
165
            # Make sure fetch retrieved only what we requested
 
166
            self.assertEqual({('tree-root', 'rev2'):(('tree-root', 'rev1'),)},
 
167
                repo.texts.get_parent_map([('tree-root', 'rev2')]))
 
168
        finally:
 
169
            repo.unlock()
 
170
 
 
171
    def test_fetch_incompatible(self):
 
172
        knit_tree = self.make_branch_and_tree('knit', format='knit')
 
173
        knit3_tree = self.make_branch_and_tree('knit3',
 
174
            format='dirstate-with-subtree')
 
175
        knit3_tree.commit('blah')
 
176
        e = self.assertRaises(errors.IncompatibleRepositories,
 
177
                              knit_tree.branch.fetch, knit3_tree.branch)
 
178
        self.assertContainsRe(str(e),
 
179
            r"(?m).*/knit.*\nis not compatible with\n.*/knit3/.*\n"
 
180
            r"different rich-root support")
157
181
 
158
182
 
159
183
class TestMergeFetch(TestCaseWithTransport):
167
191
        wt2 = self.make_branch_and_tree('br2')
168
192
        br2 = wt2.branch
169
193
        wt2.commit(message='rev 2-1', rev_id='2-1')
170
 
        merge(other_revision=['br1', -1], base_revision=['br1', 0],
171
 
              this_dir='br2')
 
194
        wt2.merge_from_branch(br1, from_revision='null:')
172
195
        self._check_revs_present(br2)
173
196
 
174
197
    def test_merge_fetches(self):
179
202
        dir_2 = br1.bzrdir.sprout('br2')
180
203
        br2 = dir_2.open_branch()
181
204
        wt1.commit(message='rev 1-2', rev_id='1-2')
182
 
        dir_2.open_workingtree().commit(message='rev 2-1', rev_id='2-1')
183
 
        merge(other_revision=['br1', -1], base_revision=[None, None], 
184
 
              this_dir='br2')
 
205
        wt2 = dir_2.open_workingtree()
 
206
        wt2.commit(message='rev 2-1', rev_id='2-1')
 
207
        wt2.merge_from_branch(br1)
185
208
        self._check_revs_present(br2)
186
209
 
187
210
    def _check_revs_present(self, br2):
216
239
    def test_merge_fetches_file_history(self):
217
240
        """Merge brings across file histories"""
218
241
        br2 = Branch.open('br2')
219
 
        merge(other_revision=['br1', -1], base_revision=[None, None], 
220
 
              this_dir='br2')
 
242
        br1 = Branch.open('br1')
 
243
        wt2 = WorkingTree.open('br2').merge_from_branch(br1)
 
244
        br2.lock_read()
 
245
        self.addCleanup(br2.unlock)
221
246
        for rev_id, text in [('1-2', 'original from 1\n'),
222
247
                             ('1-3', 'agreement\n'),
223
248
                             ('2-1', 'contents in 2\n'),
227
252
                    rev_id).get_file_text('this-file-id'), text)
228
253
 
229
254
 
230
 
class TestHttpFetch(TestCaseWithWebserver):
231
 
    # FIXME RBC 20060124 this really isn't web specific, perhaps an
232
 
    # instrumented readonly transport? Can we do an instrumented
233
 
    # adapter and use self.get_readonly_url ?
234
 
 
235
 
    def test_fetch(self):
236
 
        #highest indices a: 5, b: 7
237
 
        br_a, br_b = make_branches(self)
238
 
        br_rem_a = Branch.open(self.get_readonly_url('branch1'))
239
 
        fetch_steps(self, br_rem_a, br_b, br_a)
240
 
 
241
 
    def _count_log_matches(self, target, logs):
242
 
        """Count the number of times the target file pattern was fetched in an http log"""
243
 
        get_succeeds_re = re.compile(
244
 
            '.*"GET .*%s HTTP/1.1" 20[06] - "-" "bzr/%s' %
245
 
            (     target,                    bzrlib.__version__))
246
 
        c = 0
247
 
        for line in logs:
248
 
            if get_succeeds_re.match(line):
249
 
                c += 1
250
 
        return c
251
 
 
252
 
    def test_weaves_are_retrieved_once(self):
253
 
        self.build_tree(("source/", "source/file", "target/"))
254
 
        wt = self.make_branch_and_tree('source')
255
 
        branch = wt.branch
256
 
        wt.add(["file"], ["id"])
257
 
        wt.commit("added file")
258
 
        print >>open("source/file", 'w'), "blah"
259
 
        wt.commit("changed file")
260
 
        target = BzrDir.create_branch_and_repo("target/")
261
 
        source = Branch.open(self.get_readonly_url("source/"))
262
 
        self.assertEqual(target.fetch(source), (2, []))
263
 
        # this is the path to the literal file. As format changes 
264
 
        # occur it needs to be updated. FIXME: ask the store for the
265
 
        # path.
266
 
        self.log("web server logs are:")
267
 
        http_logs = self.get_readonly_server().logs
268
 
        self.log('\n'.join(http_logs))
269
 
        # unfortunately this log entry is branch format specific. We could 
270
 
        # factor out the 'what files does this format use' to a method on the 
271
 
        # repository, which would let us to this generically. RBC 20060419
272
 
        self.assertEqual(1, self._count_log_matches('/ce/id.kndx', http_logs))
273
 
        self.assertEqual(1, self._count_log_matches('/ce/id.knit', http_logs))
274
 
        self.assertEqual(1, self._count_log_matches('inventory.kndx', http_logs))
275
 
        # this r-h check test will prevent regressions, but it currently already 
276
 
        # passes, before the patch to cache-rh is applied :[
277
 
        self.assertTrue(1 >= self._count_log_matches('revision-history',
278
 
                                                     http_logs))
279
 
        self.assertTrue(1 >= self._count_log_matches('last-revision',
280
 
                                                     http_logs))
281
 
        # FIXME naughty poking in there.
282
 
        self.get_readonly_server().logs = []
283
 
        # check there is nothing more to fetch
284
 
        source = Branch.open(self.get_readonly_url("source/"))
285
 
        self.assertEqual(target.fetch(source), (0, []))
286
 
        # should make just two requests
287
 
        http_logs = self.get_readonly_server().logs
288
 
        self.log("web server logs are:")
289
 
        self.log('\n'.join(http_logs))
290
 
        self.assertEqual(1, self._count_log_matches('branch-format', http_logs))
291
 
        self.assertEqual(1, self._count_log_matches('branch/format', http_logs))
292
 
        self.assertEqual(1, self._count_log_matches('repository/format', http_logs))
293
 
        self.assertTrue(1 >= self._count_log_matches('revision-history',
294
 
                                                     http_logs))
295
 
        self.assertTrue(1 >= self._count_log_matches('last-revision',
296
 
                                                     http_logs))
297
 
        self.assertEqual(4, len(http_logs))
 
255
class TestKnitToPackFetch(TestCaseWithTransport):
 
256
 
 
257
    def find_get_record_stream(self, calls, expected_count=1):
 
258
        """In a list of calls, find the last 'get_record_stream'.
 
259
 
 
260
        :param expected_count: The number of calls we should exepect to find.
 
261
            If a different number is found, an assertion is raised.
 
262
        """
 
263
        get_record_call = None
 
264
        call_count = 0
 
265
        for call in calls:
 
266
            if call[0] == 'get_record_stream':
 
267
                call_count += 1
 
268
                get_record_call = call
 
269
        self.assertEqual(expected_count, call_count)
 
270
        return get_record_call
 
271
 
 
272
    def test_fetch_with_deltas_no_delta_closure(self):
 
273
        tree = self.make_branch_and_tree('source', format='dirstate')
 
274
        target = self.make_repository('target', format='pack-0.92')
 
275
        self.build_tree(['source/file'])
 
276
        tree.set_root_id('root-id')
 
277
        tree.add('file', 'file-id')
 
278
        tree.commit('one', rev_id='rev-one')
 
279
        source = tree.branch.repository
 
280
        source.texts = versionedfile.RecordingVersionedFilesDecorator(
 
281
                        source.texts)
 
282
        source.signatures = versionedfile.RecordingVersionedFilesDecorator(
 
283
                        source.signatures)
 
284
        source.revisions = versionedfile.RecordingVersionedFilesDecorator(
 
285
                        source.revisions)
 
286
        source.inventories = versionedfile.RecordingVersionedFilesDecorator(
 
287
                        source.inventories)
 
288
        # precondition
 
289
        self.assertTrue(target._format._fetch_uses_deltas)
 
290
        target.fetch(source, revision_id='rev-one')
 
291
        self.assertEqual(('get_record_stream', [('file-id', 'rev-one')],
 
292
                          target._format._fetch_order, False),
 
293
                         self.find_get_record_stream(source.texts.calls))
 
294
        self.assertEqual(('get_record_stream', [('rev-one',)],
 
295
          target._format._fetch_order, False),
 
296
          self.find_get_record_stream(source.inventories.calls, 2))
 
297
        self.assertEqual(('get_record_stream', [('rev-one',)],
 
298
                          target._format._fetch_order, False),
 
299
                         self.find_get_record_stream(source.revisions.calls))
 
300
        # XXX: Signatures is special, and slightly broken. The
 
301
        # standard item_keys_introduced_by actually does a lookup for every
 
302
        # signature to see if it exists, rather than waiting to do them all at
 
303
        # once at the end. The fetch code then does an all-at-once and just
 
304
        # allows for some of them to be missing.
 
305
        # So we know there will be extra calls, but the *last* one is the one
 
306
        # we care about.
 
307
        signature_calls = source.signatures.calls[-1:]
 
308
        self.assertEqual(('get_record_stream', [('rev-one',)],
 
309
                          target._format._fetch_order, False),
 
310
                         self.find_get_record_stream(signature_calls))
 
311
 
 
312
    def test_fetch_no_deltas_with_delta_closure(self):
 
313
        tree = self.make_branch_and_tree('source', format='dirstate')
 
314
        target = self.make_repository('target', format='pack-0.92')
 
315
        self.build_tree(['source/file'])
 
316
        tree.set_root_id('root-id')
 
317
        tree.add('file', 'file-id')
 
318
        tree.commit('one', rev_id='rev-one')
 
319
        source = tree.branch.repository
 
320
        source.texts = versionedfile.RecordingVersionedFilesDecorator(
 
321
                        source.texts)
 
322
        source.signatures = versionedfile.RecordingVersionedFilesDecorator(
 
323
                        source.signatures)
 
324
        source.revisions = versionedfile.RecordingVersionedFilesDecorator(
 
325
                        source.revisions)
 
326
        source.inventories = versionedfile.RecordingVersionedFilesDecorator(
 
327
                        source.inventories)
 
328
        # XXX: This won't work in general, but for the dirstate format it does.
 
329
        self.overrideAttr(target._format, '_fetch_uses_deltas', False)
 
330
        target.fetch(source, revision_id='rev-one')
 
331
        self.assertEqual(('get_record_stream', [('file-id', 'rev-one')],
 
332
                          target._format._fetch_order, True),
 
333
                         self.find_get_record_stream(source.texts.calls))
 
334
        self.assertEqual(('get_record_stream', [('rev-one',)],
 
335
            target._format._fetch_order, True),
 
336
            self.find_get_record_stream(source.inventories.calls, 2))
 
337
        self.assertEqual(('get_record_stream', [('rev-one',)],
 
338
                          target._format._fetch_order, True),
 
339
                         self.find_get_record_stream(source.revisions.calls))
 
340
        # XXX: Signatures is special, and slightly broken. The
 
341
        # standard item_keys_introduced_by actually does a lookup for every
 
342
        # signature to see if it exists, rather than waiting to do them all at
 
343
        # once at the end. The fetch code then does an all-at-once and just
 
344
        # allows for some of them to be missing.
 
345
        # So we know there will be extra calls, but the *last* one is the one
 
346
        # we care about.
 
347
        signature_calls = source.signatures.calls[-1:]
 
348
        self.assertEqual(('get_record_stream', [('rev-one',)],
 
349
                          target._format._fetch_order, True),
 
350
                         self.find_get_record_stream(signature_calls))
 
351
 
 
352
    def test_fetch_revisions_with_deltas_into_pack(self):
 
353
        # See BUG #261339, dev versions of bzr could accidentally create deltas
 
354
        # in revision texts in knit branches (when fetching from packs). So we
 
355
        # ensure that *if* a knit repository has a delta in revisions, that it
 
356
        # gets properly expanded back into a fulltext when stored in the pack
 
357
        # file.
 
358
        tree = self.make_branch_and_tree('source', format='dirstate')
 
359
        target = self.make_repository('target', format='pack-0.92')
 
360
        self.build_tree(['source/file'])
 
361
        tree.set_root_id('root-id')
 
362
        tree.add('file', 'file-id')
 
363
        tree.commit('one', rev_id='rev-one')
 
364
        # Hack the KVF for revisions so that it "accidentally" allows a delta
 
365
        tree.branch.repository.revisions._max_delta_chain = 200
 
366
        tree.commit('two', rev_id='rev-two')
 
367
        source = tree.branch.repository
 
368
        # Ensure that we stored a delta
 
369
        source.lock_read()
 
370
        self.addCleanup(source.unlock)
 
371
        record = source.revisions.get_record_stream([('rev-two',)],
 
372
            'unordered', False).next()
 
373
        self.assertEqual('knit-delta-gz', record.storage_kind)
 
374
        target.fetch(tree.branch.repository, revision_id='rev-two')
 
375
        # The record should get expanded back to a fulltext
 
376
        target.lock_read()
 
377
        self.addCleanup(target.unlock)
 
378
        record = target.revisions.get_record_stream([('rev-two',)],
 
379
            'unordered', False).next()
 
380
        self.assertEqual('knit-ft-gz', record.storage_kind)
 
381
 
 
382
    def test_fetch_with_fallback_and_merge(self):
 
383
        builder = self.make_branch_builder('source', format='pack-0.92')
 
384
        builder.start_series()
 
385
        # graph
 
386
        #   A
 
387
        #   |\
 
388
        #   B C
 
389
        #   | |
 
390
        #   | D
 
391
        #   | |
 
392
        #   | E
 
393
        #    \|
 
394
        #     F
 
395
        # A & B are present in the base (stacked-on) repository, A-E are
 
396
        # present in the source.
 
397
        # This reproduces bug #304841
 
398
        # We need a large enough inventory that total size of compressed deltas
 
399
        # is shorter than the size of a compressed fulltext. We have to use
 
400
        # random ids because otherwise the inventory fulltext compresses too
 
401
        # well and the deltas get bigger.
 
402
        to_add = [
 
403
            ('add', ('', 'TREE_ROOT', 'directory', None))]
 
404
        for i in xrange(10):
 
405
            fname = 'file%03d' % (i,)
 
406
            fileid = '%s-%s' % (fname, osutils.rand_chars(64))
 
407
            to_add.append(('add', (fname, fileid, 'file', 'content\n')))
 
408
        builder.build_snapshot('A', None, to_add)
 
409
        builder.build_snapshot('B', ['A'], [])
 
410
        builder.build_snapshot('C', ['A'], [])
 
411
        builder.build_snapshot('D', ['C'], [])
 
412
        builder.build_snapshot('E', ['D'], [])
 
413
        builder.build_snapshot('F', ['E', 'B'], [])
 
414
        builder.finish_series()
 
415
        source_branch = builder.get_branch()
 
416
        source_branch.bzrdir.sprout('base', revision_id='B')
 
417
        target_branch = self.make_branch('target', format='1.6')
 
418
        target_branch.set_stacked_on_url('../base')
 
419
        source = source_branch.repository
 
420
        source.lock_read()
 
421
        self.addCleanup(source.unlock)
 
422
        source.inventories = versionedfile.OrderingVersionedFilesDecorator(
 
423
                        source.inventories,
 
424
                        key_priority={('E',): 1, ('D',): 2, ('C',): 4,
 
425
                                      ('F',): 3})
 
426
        # Ensure that the content is yielded in the proper order, and given as
 
427
        # the expected kinds
 
428
        records = [(record.key, record.storage_kind)
 
429
                   for record in source.inventories.get_record_stream(
 
430
                        [('D',), ('C',), ('E',), ('F',)], 'unordered', False)]
 
431
        self.assertEqual([(('E',), 'knit-delta-gz'), (('D',), 'knit-delta-gz'),
 
432
                          (('F',), 'knit-delta-gz'), (('C',), 'knit-delta-gz')],
 
433
                          records)
 
434
 
 
435
        target_branch.lock_write()
 
436
        self.addCleanup(target_branch.unlock)
 
437
        target = target_branch.repository
 
438
        target.fetch(source, revision_id='F')
 
439
        # 'C' should be expanded to a fulltext, but D and E should still be
 
440
        # deltas
 
441
        stream = target.inventories.get_record_stream(
 
442
            [('C',), ('D',), ('E',), ('F',)],
 
443
            'unordered', False)
 
444
        kinds = dict((record.key, record.storage_kind) for record in stream)
 
445
        self.assertEqual({('C',): 'knit-ft-gz', ('D',): 'knit-delta-gz',
 
446
                          ('E',): 'knit-delta-gz', ('F',): 'knit-delta-gz'},
 
447
                         kinds)
 
448
 
 
449
 
 
450
class Test1To2Fetch(TestCaseWithTransport):
 
451
    """Tests for Model1To2 failure modes"""
 
452
 
 
453
    def make_tree_and_repo(self):
 
454
        self.tree = self.make_branch_and_tree('tree', format='pack-0.92')
 
455
        self.repo = self.make_repository('rich-repo', format='rich-root-pack')
 
456
        self.repo.lock_write()
 
457
        self.addCleanup(self.repo.unlock)
 
458
 
 
459
    def do_fetch_order_test(self, first, second):
 
460
        """Test that fetch works no matter what the set order of revision is.
 
461
 
 
462
        This test depends on the order of items in a set, which is
 
463
        implementation-dependant, so we test A, B and then B, A.
 
464
        """
 
465
        self.make_tree_and_repo()
 
466
        self.tree.commit('Commit 1', rev_id=first)
 
467
        self.tree.commit('Commit 2', rev_id=second)
 
468
        self.repo.fetch(self.tree.branch.repository, second)
 
469
 
 
470
    def test_fetch_order_AB(self):
 
471
        """See do_fetch_order_test"""
 
472
        self.do_fetch_order_test('A', 'B')
 
473
 
 
474
    def test_fetch_order_BA(self):
 
475
        """See do_fetch_order_test"""
 
476
        self.do_fetch_order_test('B', 'A')
 
477
 
 
478
    def get_parents(self, file_id, revision_id):
 
479
        self.repo.lock_read()
 
480
        try:
 
481
            parent_map = self.repo.texts.get_parent_map([(file_id, revision_id)])
 
482
            return parent_map[(file_id, revision_id)]
 
483
        finally:
 
484
            self.repo.unlock()
 
485
 
 
486
    def test_fetch_ghosts(self):
 
487
        self.make_tree_and_repo()
 
488
        self.tree.commit('first commit', rev_id='left-parent')
 
489
        self.tree.add_parent_tree_id('ghost-parent')
 
490
        fork = self.tree.bzrdir.sprout('fork', 'null:').open_workingtree()
 
491
        fork.commit('not a ghost', rev_id='not-ghost-parent')
 
492
        self.tree.branch.repository.fetch(fork.branch.repository,
 
493
                                     'not-ghost-parent')
 
494
        self.tree.add_parent_tree_id('not-ghost-parent')
 
495
        self.tree.commit('second commit', rev_id='second-id')
 
496
        self.repo.fetch(self.tree.branch.repository, 'second-id')
 
497
        root_id = self.tree.get_root_id()
 
498
        self.assertEqual(
 
499
            ((root_id, 'left-parent'), (root_id, 'not-ghost-parent')),
 
500
            self.get_parents(root_id, 'second-id'))
 
501
 
 
502
    def make_two_commits(self, change_root, fetch_twice):
 
503
        self.make_tree_and_repo()
 
504
        self.tree.commit('first commit', rev_id='first-id')
 
505
        if change_root:
 
506
            self.tree.set_root_id('unique-id')
 
507
        self.tree.commit('second commit', rev_id='second-id')
 
508
        if fetch_twice:
 
509
            self.repo.fetch(self.tree.branch.repository, 'first-id')
 
510
        self.repo.fetch(self.tree.branch.repository, 'second-id')
 
511
 
 
512
    def test_fetch_changed_root(self):
 
513
        self.make_two_commits(change_root=True, fetch_twice=False)
 
514
        self.assertEqual((), self.get_parents('unique-id', 'second-id'))
 
515
 
 
516
    def test_two_fetch_changed_root(self):
 
517
        self.make_two_commits(change_root=True, fetch_twice=True)
 
518
        self.assertEqual((), self.get_parents('unique-id', 'second-id'))
 
519
 
 
520
    def test_two_fetches(self):
 
521
        self.make_two_commits(change_root=False, fetch_twice=True)
 
522
        self.assertEqual((('TREE_ROOT', 'first-id'),),
 
523
            self.get_parents('TREE_ROOT', 'second-id'))