~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_fetch.py

  • Committer: Robert Collins
  • Date: 2007-05-07 16:48:14 UTC
  • mto: This revision was merged to the branch mainline in revision 2485.
  • Revision ID: robertc@robertcollins.net-20070507164814-wpagonutf4b5cf8s
Move HACKING to docs/developers/HACKING and adjust Makefile to accomodate this.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2005-2011 Canonical Ltd
 
1
# Copyright (C) 2005 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
12
12
#
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 
16
 
 
17
import os
 
18
import re
 
19
import sys
16
20
 
17
21
from bzrlib import (
18
22
    bzrdir,
19
23
    errors,
20
 
    osutils,
21
 
    revision as _mod_revision,
22
 
    versionedfile,
 
24
    repository,
23
25
    )
24
26
from bzrlib.branch import Branch
 
27
from bzrlib.bzrdir import BzrDir
 
28
from bzrlib.builtins import merge
 
29
import bzrlib.errors
25
30
from bzrlib.repofmt import knitrepo
26
31
from bzrlib.tests import TestCaseWithTransport
 
32
from bzrlib.tests.HTTPTestUtil import TestCaseWithWebserver
27
33
from bzrlib.tests.test_revision import make_branches
 
34
from bzrlib.trace import mutter
28
35
from bzrlib.upgrade import Convert
29
36
from bzrlib.workingtree import WorkingTree
30
37
 
31
 
# These tests are a bit old; please instead add new tests into
32
 
# per_interrepository/ so they'll run on all relevant
33
 
# combinations.
34
 
 
35
38
 
36
39
def has_revision(branch, revision_id):
37
40
    return branch.repository.has_revision(revision_id)
38
41
 
39
 
 
40
 
def revision_history(branch):
41
 
    branch.lock_read()
42
 
    try:
43
 
        graph = branch.repository.get_graph()
44
 
        history = list(graph.iter_lefthand_ancestry(branch.last_revision(),
45
 
            [_mod_revision.NULL_REVISION]))
46
 
    finally:
47
 
        branch.unlock()
48
 
    history.reverse()
49
 
    return history
50
 
 
51
 
 
52
42
def fetch_steps(self, br_a, br_b, writable_a):
53
43
    """A foreign test method for testing fetch locally and remotely."""
54
 
 
 
44
     
55
45
    # TODO RBC 20060201 make this a repository test.
56
46
    repo_b = br_b.repository
57
 
    self.assertFalse(repo_b.has_revision(revision_history(br_a)[3]))
58
 
    self.assertTrue(repo_b.has_revision(revision_history(br_a)[2]))
59
 
    self.assertEquals(len(revision_history(br_b)), 7)
60
 
    br_b.fetch(br_a, revision_history(br_a)[2])
 
47
    self.assertFalse(repo_b.has_revision(br_a.revision_history()[3]))
 
48
    self.assertTrue(repo_b.has_revision(br_a.revision_history()[2]))
 
49
    self.assertEquals(len(br_b.revision_history()), 7)
 
50
    self.assertEquals(br_b.fetch(br_a, br_a.revision_history()[2])[0], 0)
61
51
    # branch.fetch is not supposed to alter the revision history
62
 
    self.assertEquals(len(revision_history(br_b)), 7)
63
 
    self.assertFalse(repo_b.has_revision(revision_history(br_a)[3]))
 
52
    self.assertEquals(len(br_b.revision_history()), 7)
 
53
    self.assertFalse(repo_b.has_revision(br_a.revision_history()[3]))
64
54
 
65
55
    # fetching the next revision up in sample data copies one revision
66
 
    br_b.fetch(br_a, revision_history(br_a)[3])
67
 
    self.assertTrue(repo_b.has_revision(revision_history(br_a)[3]))
68
 
    self.assertFalse(has_revision(br_a, revision_history(br_b)[6]))
69
 
    self.assertTrue(br_a.repository.has_revision(revision_history(br_b)[5]))
 
56
    self.assertEquals(br_b.fetch(br_a, br_a.revision_history()[3])[0], 1)
 
57
    self.assertTrue(repo_b.has_revision(br_a.revision_history()[3]))
 
58
    self.assertFalse(has_revision(br_a, br_b.revision_history()[6]))
 
59
    self.assertTrue(br_a.repository.has_revision(br_b.revision_history()[5]))
70
60
 
71
61
    # When a non-branch ancestor is missing, it should be unlisted...
72
62
    # as its not reference from the inventory weave.
73
63
    br_b4 = self.make_branch('br_4')
74
 
    br_b4.fetch(br_b)
75
 
 
76
 
    writable_a.fetch(br_b)
77
 
    self.assertTrue(has_revision(br_a, revision_history(br_b)[3]))
78
 
    self.assertTrue(has_revision(br_a, revision_history(br_b)[4]))
79
 
 
 
64
    count, failures = br_b4.fetch(br_b)
 
65
    self.assertEqual(count, 7)
 
66
    self.assertEqual(failures, [])
 
67
 
 
68
    self.assertEqual(writable_a.fetch(br_b)[0], 1)
 
69
    self.assertTrue(has_revision(br_a, br_b.revision_history()[3]))
 
70
    self.assertTrue(has_revision(br_a, br_b.revision_history()[4]))
 
71
        
80
72
    br_b2 = self.make_branch('br_b2')
81
 
    br_b2.fetch(br_b)
82
 
    self.assertTrue(has_revision(br_b2, revision_history(br_b)[4]))
83
 
    self.assertTrue(has_revision(br_b2, revision_history(br_a)[2]))
84
 
    self.assertFalse(has_revision(br_b2, revision_history(br_a)[3]))
 
73
    self.assertEquals(br_b2.fetch(br_b)[0], 7)
 
74
    self.assertTrue(has_revision(br_b2, br_b.revision_history()[4]))
 
75
    self.assertTrue(has_revision(br_b2, br_a.revision_history()[2]))
 
76
    self.assertFalse(has_revision(br_b2, br_a.revision_history()[3]))
85
77
 
86
78
    br_a2 = self.make_branch('br_a2')
87
 
    br_a2.fetch(br_a)
88
 
    self.assertTrue(has_revision(br_a2, revision_history(br_b)[4]))
89
 
    self.assertTrue(has_revision(br_a2, revision_history(br_a)[3]))
90
 
    self.assertTrue(has_revision(br_a2, revision_history(br_a)[2]))
 
79
    self.assertEquals(br_a2.fetch(br_a)[0], 9)
 
80
    self.assertTrue(has_revision(br_a2, br_b.revision_history()[4]))
 
81
    self.assertTrue(has_revision(br_a2, br_a.revision_history()[3]))
 
82
    self.assertTrue(has_revision(br_a2, br_a.revision_history()[2]))
91
83
 
92
84
    br_a3 = self.make_branch('br_a3')
93
 
    # pulling a branch with no revisions grabs nothing, regardless of
 
85
    # pulling a branch with no revisions grabs nothing, regardless of 
94
86
    # whats in the inventory.
95
 
    br_a3.fetch(br_a2)
 
87
    self.assertEquals(br_a3.fetch(br_a2)[0], 0)
96
88
    for revno in range(4):
97
89
        self.assertFalse(
98
 
            br_a3.repository.has_revision(revision_history(br_a)[revno]))
99
 
    br_a3.fetch(br_a2, revision_history(br_a)[2])
 
90
            br_a3.repository.has_revision(br_a.revision_history()[revno]))
 
91
    self.assertEqual(br_a3.fetch(br_a2, br_a.revision_history()[2])[0], 3)
100
92
    # pull the 3 revisions introduced by a@u-0-3
101
 
    br_a3.fetch(br_a2, revision_history(br_a)[3])
102
 
    # NoSuchRevision should be raised if the branch is missing the revision
 
93
    fetched = br_a3.fetch(br_a2, br_a.revision_history()[3])[0]
 
94
    self.assertEquals(fetched, 3, "fetched %d instead of 3" % fetched)
 
95
    # InstallFailed should be raised if the branch is missing the revision
103
96
    # that was requested.
104
 
    self.assertRaises(errors.NoSuchRevision, br_a3.fetch, br_a2, 'pizza')
 
97
    self.assertRaises(bzrlib.errors.InstallFailed, br_a3.fetch, br_a2, 'pizza')
 
98
    # InstallFailed should be raised if the branch is missing a revision
 
99
    # from its own revision history
 
100
    br_a2.append_revision('a-b-c')
 
101
    self.assertRaises(bzrlib.errors.InstallFailed, br_a3.fetch, br_a2)
105
102
 
106
 
    # TODO: Test trying to fetch from a branch that points to a revision not
107
 
    # actually present in its repository.  Not every branch format allows you
108
 
    # to directly point to such revisions, so it's a bit complicated to
109
 
    # construct.  One way would be to uncommit and gc the revision, but not
110
 
    # every branch supports that.  -- mbp 20070814
 
103
    # TODO: ADHB 20070116 Perhaps set_last_revision shouldn't accept
 
104
    #       revisions which are not present?  In that case, this test
 
105
    #       must be rewritten.
 
106
    #
 
107
    #       RBC 20060403 the way to do this is to uncommit the revision from
 
108
    #       the repository after the commit
111
109
 
112
110
    #TODO: test that fetch correctly does reweaving when needed. RBC 20051008
113
 
    # Note that this means - updating the weave when ghosts are filled in to
 
111
    # Note that this means - updating the weave when ghosts are filled in to 
114
112
    # add the right parents.
115
113
 
116
114
 
118
116
 
119
117
    def test_fetch(self):
120
118
        #highest indices a: 5, b: 7
121
 
        br_a, br_b = make_branches(self, format='dirstate-tags')
 
119
        br_a, br_b = make_branches(self)
122
120
        fetch_steps(self, br_a, br_b, br_a)
123
121
 
124
122
    def test_fetch_self(self):
125
123
        wt = self.make_branch_and_tree('br')
126
 
        wt.branch.fetch(wt.branch)
 
124
        self.assertEqual(wt.branch.fetch(wt.branch), (0, []))
127
125
 
128
126
    def test_fetch_root_knit(self):
129
127
        """Ensure that knit2.fetch() updates the root knit
130
 
 
 
128
        
131
129
        This tests the case where the root has a new revision, but there are no
132
130
        corresponding filename, parent, contents or other changes.
133
131
        """
149
147
        branch = self.make_branch('branch', format=knit2_format)
150
148
        branch.pull(tree.branch, stop_revision='rev1')
151
149
        repo = branch.repository
152
 
        repo.lock_read()
153
 
        try:
154
 
            # Make sure fetch retrieved only what we requested
155
 
            self.assertEqual({('tree-root', 'rev1'):()},
156
 
                repo.texts.get_parent_map(
157
 
                    [('tree-root', 'rev1'), ('tree-root', 'rev2')]))
158
 
        finally:
159
 
            repo.unlock()
 
150
        root_knit = repo.weave_store.get_weave('tree-root',
 
151
                                                repo.get_transaction())
 
152
        # Make sure fetch retrieved only what we requested
 
153
        self.assertTrue('rev1' in root_knit)
 
154
        self.assertTrue('rev2' not in root_knit)
160
155
        branch.pull(tree.branch)
 
156
        root_knit = repo.weave_store.get_weave('tree-root',
 
157
                                                repo.get_transaction())
161
158
        # Make sure that the next revision in the root knit was retrieved,
162
159
        # even though the text, name, parent_id, etc., were unchanged.
163
 
        repo.lock_read()
164
 
        try:
165
 
            # Make sure fetch retrieved only what we requested
166
 
            self.assertEqual({('tree-root', 'rev2'):(('tree-root', 'rev1'),)},
167
 
                repo.texts.get_parent_map([('tree-root', 'rev2')]))
168
 
        finally:
169
 
            repo.unlock()
 
160
        self.assertTrue('rev2' in root_knit)
170
161
 
171
162
    def test_fetch_incompatible(self):
172
163
        knit_tree = self.make_branch_and_tree('knit', format='knit')
173
164
        knit3_tree = self.make_branch_and_tree('knit3',
174
165
            format='dirstate-with-subtree')
175
166
        knit3_tree.commit('blah')
176
 
        e = self.assertRaises(errors.IncompatibleRepositories,
177
 
                              knit_tree.branch.fetch, knit3_tree.branch)
178
 
        self.assertContainsRe(str(e),
179
 
            r"(?m).*/knit.*\nis not compatible with\n.*/knit3/.*\n"
180
 
            r"different rich-root support")
 
167
        self.assertRaises(errors.IncompatibleRepositories,
 
168
                          knit_tree.branch.fetch, knit3_tree.branch)
181
169
 
182
170
 
183
171
class TestMergeFetch(TestCaseWithTransport):
191
179
        wt2 = self.make_branch_and_tree('br2')
192
180
        br2 = wt2.branch
193
181
        wt2.commit(message='rev 2-1', rev_id='2-1')
194
 
        wt2.merge_from_branch(br1, from_revision='null:')
 
182
        merge(other_revision=['br1', -1], base_revision=['br1', 0],
 
183
              this_dir='br2')
195
184
        self._check_revs_present(br2)
196
185
 
197
186
    def test_merge_fetches(self):
202
191
        dir_2 = br1.bzrdir.sprout('br2')
203
192
        br2 = dir_2.open_branch()
204
193
        wt1.commit(message='rev 1-2', rev_id='1-2')
205
 
        wt2 = dir_2.open_workingtree()
206
 
        wt2.commit(message='rev 2-1', rev_id='2-1')
207
 
        wt2.merge_from_branch(br1)
 
194
        dir_2.open_workingtree().commit(message='rev 2-1', rev_id='2-1')
 
195
        merge(other_revision=['br1', -1], base_revision=[None, None], 
 
196
              this_dir='br2')
208
197
        self._check_revs_present(br2)
209
198
 
210
199
    def _check_revs_present(self, br2):
239
228
    def test_merge_fetches_file_history(self):
240
229
        """Merge brings across file histories"""
241
230
        br2 = Branch.open('br2')
242
 
        br1 = Branch.open('br1')
243
 
        wt2 = WorkingTree.open('br2').merge_from_branch(br1)
244
 
        br2.lock_read()
245
 
        self.addCleanup(br2.unlock)
 
231
        merge(other_revision=['br1', -1], base_revision=[None, None], 
 
232
              this_dir='br2')
246
233
        for rev_id, text in [('1-2', 'original from 1\n'),
247
234
                             ('1-3', 'agreement\n'),
248
235
                             ('2-1', 'contents in 2\n'),
252
239
                    rev_id).get_file_text('this-file-id'), text)
253
240
 
254
241
 
255
 
class TestKnitToPackFetch(TestCaseWithTransport):
256
 
 
257
 
    def find_get_record_stream(self, calls, expected_count=1):
258
 
        """In a list of calls, find the last 'get_record_stream'.
259
 
 
260
 
        :param expected_count: The number of calls we should exepect to find.
261
 
            If a different number is found, an assertion is raised.
262
 
        """
263
 
        get_record_call = None
264
 
        call_count = 0
265
 
        for call in calls:
266
 
            if call[0] == 'get_record_stream':
267
 
                call_count += 1
268
 
                get_record_call = call
269
 
        self.assertEqual(expected_count, call_count)
270
 
        return get_record_call
271
 
 
272
 
    def test_fetch_with_deltas_no_delta_closure(self):
273
 
        tree = self.make_branch_and_tree('source', format='dirstate')
274
 
        target = self.make_repository('target', format='pack-0.92')
275
 
        self.build_tree(['source/file'])
276
 
        tree.set_root_id('root-id')
277
 
        tree.add('file', 'file-id')
278
 
        tree.commit('one', rev_id='rev-one')
279
 
        source = tree.branch.repository
280
 
        source.texts = versionedfile.RecordingVersionedFilesDecorator(
281
 
                        source.texts)
282
 
        source.signatures = versionedfile.RecordingVersionedFilesDecorator(
283
 
                        source.signatures)
284
 
        source.revisions = versionedfile.RecordingVersionedFilesDecorator(
285
 
                        source.revisions)
286
 
        source.inventories = versionedfile.RecordingVersionedFilesDecorator(
287
 
                        source.inventories)
288
 
        # precondition
289
 
        self.assertTrue(target._format._fetch_uses_deltas)
290
 
        target.fetch(source, revision_id='rev-one')
291
 
        self.assertEqual(('get_record_stream', [('file-id', 'rev-one')],
292
 
                          target._format._fetch_order, False),
293
 
                         self.find_get_record_stream(source.texts.calls))
294
 
        self.assertEqual(('get_record_stream', [('rev-one',)],
295
 
          target._format._fetch_order, False),
296
 
          self.find_get_record_stream(source.inventories.calls, 2))
297
 
        self.assertEqual(('get_record_stream', [('rev-one',)],
298
 
                          target._format._fetch_order, False),
299
 
                         self.find_get_record_stream(source.revisions.calls))
300
 
        # XXX: Signatures is special, and slightly broken. The
301
 
        # standard item_keys_introduced_by actually does a lookup for every
302
 
        # signature to see if it exists, rather than waiting to do them all at
303
 
        # once at the end. The fetch code then does an all-at-once and just
304
 
        # allows for some of them to be missing.
305
 
        # So we know there will be extra calls, but the *last* one is the one
306
 
        # we care about.
307
 
        signature_calls = source.signatures.calls[-1:]
308
 
        self.assertEqual(('get_record_stream', [('rev-one',)],
309
 
                          target._format._fetch_order, False),
310
 
                         self.find_get_record_stream(signature_calls))
311
 
 
312
 
    def test_fetch_no_deltas_with_delta_closure(self):
313
 
        tree = self.make_branch_and_tree('source', format='dirstate')
314
 
        target = self.make_repository('target', format='pack-0.92')
315
 
        self.build_tree(['source/file'])
316
 
        tree.set_root_id('root-id')
317
 
        tree.add('file', 'file-id')
318
 
        tree.commit('one', rev_id='rev-one')
319
 
        source = tree.branch.repository
320
 
        source.texts = versionedfile.RecordingVersionedFilesDecorator(
321
 
                        source.texts)
322
 
        source.signatures = versionedfile.RecordingVersionedFilesDecorator(
323
 
                        source.signatures)
324
 
        source.revisions = versionedfile.RecordingVersionedFilesDecorator(
325
 
                        source.revisions)
326
 
        source.inventories = versionedfile.RecordingVersionedFilesDecorator(
327
 
                        source.inventories)
328
 
        # XXX: This won't work in general, but for the dirstate format it does.
329
 
        self.overrideAttr(target._format, '_fetch_uses_deltas', False)
330
 
        target.fetch(source, revision_id='rev-one')
331
 
        self.assertEqual(('get_record_stream', [('file-id', 'rev-one')],
332
 
                          target._format._fetch_order, True),
333
 
                         self.find_get_record_stream(source.texts.calls))
334
 
        self.assertEqual(('get_record_stream', [('rev-one',)],
335
 
            target._format._fetch_order, True),
336
 
            self.find_get_record_stream(source.inventories.calls, 2))
337
 
        self.assertEqual(('get_record_stream', [('rev-one',)],
338
 
                          target._format._fetch_order, True),
339
 
                         self.find_get_record_stream(source.revisions.calls))
340
 
        # XXX: Signatures is special, and slightly broken. The
341
 
        # standard item_keys_introduced_by actually does a lookup for every
342
 
        # signature to see if it exists, rather than waiting to do them all at
343
 
        # once at the end. The fetch code then does an all-at-once and just
344
 
        # allows for some of them to be missing.
345
 
        # So we know there will be extra calls, but the *last* one is the one
346
 
        # we care about.
347
 
        signature_calls = source.signatures.calls[-1:]
348
 
        self.assertEqual(('get_record_stream', [('rev-one',)],
349
 
                          target._format._fetch_order, True),
350
 
                         self.find_get_record_stream(signature_calls))
351
 
 
352
 
    def test_fetch_revisions_with_deltas_into_pack(self):
353
 
        # See BUG #261339, dev versions of bzr could accidentally create deltas
354
 
        # in revision texts in knit branches (when fetching from packs). So we
355
 
        # ensure that *if* a knit repository has a delta in revisions, that it
356
 
        # gets properly expanded back into a fulltext when stored in the pack
357
 
        # file.
358
 
        tree = self.make_branch_and_tree('source', format='dirstate')
359
 
        target = self.make_repository('target', format='pack-0.92')
360
 
        self.build_tree(['source/file'])
361
 
        tree.set_root_id('root-id')
362
 
        tree.add('file', 'file-id')
363
 
        tree.commit('one', rev_id='rev-one')
364
 
        # Hack the KVF for revisions so that it "accidentally" allows a delta
365
 
        tree.branch.repository.revisions._max_delta_chain = 200
366
 
        tree.commit('two', rev_id='rev-two')
367
 
        source = tree.branch.repository
368
 
        # Ensure that we stored a delta
369
 
        source.lock_read()
370
 
        self.addCleanup(source.unlock)
371
 
        record = source.revisions.get_record_stream([('rev-two',)],
372
 
            'unordered', False).next()
373
 
        self.assertEqual('knit-delta-gz', record.storage_kind)
374
 
        target.fetch(tree.branch.repository, revision_id='rev-two')
375
 
        # The record should get expanded back to a fulltext
376
 
        target.lock_read()
377
 
        self.addCleanup(target.unlock)
378
 
        record = target.revisions.get_record_stream([('rev-two',)],
379
 
            'unordered', False).next()
380
 
        self.assertEqual('knit-ft-gz', record.storage_kind)
381
 
 
382
 
    def test_fetch_with_fallback_and_merge(self):
383
 
        builder = self.make_branch_builder('source', format='pack-0.92')
384
 
        builder.start_series()
385
 
        # graph
386
 
        #   A
387
 
        #   |\
388
 
        #   B C
389
 
        #   | |
390
 
        #   | D
391
 
        #   | |
392
 
        #   | E
393
 
        #    \|
394
 
        #     F
395
 
        # A & B are present in the base (stacked-on) repository, A-E are
396
 
        # present in the source.
397
 
        # This reproduces bug #304841
398
 
        # We need a large enough inventory that total size of compressed deltas
399
 
        # is shorter than the size of a compressed fulltext. We have to use
400
 
        # random ids because otherwise the inventory fulltext compresses too
401
 
        # well and the deltas get bigger.
402
 
        to_add = [
403
 
            ('add', ('', 'TREE_ROOT', 'directory', None))]
404
 
        for i in xrange(10):
405
 
            fname = 'file%03d' % (i,)
406
 
            fileid = '%s-%s' % (fname, osutils.rand_chars(64))
407
 
            to_add.append(('add', (fname, fileid, 'file', 'content\n')))
408
 
        builder.build_snapshot('A', None, to_add)
409
 
        builder.build_snapshot('B', ['A'], [])
410
 
        builder.build_snapshot('C', ['A'], [])
411
 
        builder.build_snapshot('D', ['C'], [])
412
 
        builder.build_snapshot('E', ['D'], [])
413
 
        builder.build_snapshot('F', ['E', 'B'], [])
414
 
        builder.finish_series()
415
 
        source_branch = builder.get_branch()
416
 
        source_branch.bzrdir.sprout('base', revision_id='B')
417
 
        target_branch = self.make_branch('target', format='1.6')
418
 
        target_branch.set_stacked_on_url('../base')
419
 
        source = source_branch.repository
420
 
        source.lock_read()
421
 
        self.addCleanup(source.unlock)
422
 
        source.inventories = versionedfile.OrderingVersionedFilesDecorator(
423
 
                        source.inventories,
424
 
                        key_priority={('E',): 1, ('D',): 2, ('C',): 4,
425
 
                                      ('F',): 3})
426
 
        # Ensure that the content is yielded in the proper order, and given as
427
 
        # the expected kinds
428
 
        records = [(record.key, record.storage_kind)
429
 
                   for record in source.inventories.get_record_stream(
430
 
                        [('D',), ('C',), ('E',), ('F',)], 'unordered', False)]
431
 
        self.assertEqual([(('E',), 'knit-delta-gz'), (('D',), 'knit-delta-gz'),
432
 
                          (('F',), 'knit-delta-gz'), (('C',), 'knit-delta-gz')],
433
 
                          records)
434
 
 
435
 
        target_branch.lock_write()
436
 
        self.addCleanup(target_branch.unlock)
437
 
        target = target_branch.repository
438
 
        target.fetch(source, revision_id='F')
439
 
        # 'C' should be expanded to a fulltext, but D and E should still be
440
 
        # deltas
441
 
        stream = target.inventories.get_record_stream(
442
 
            [('C',), ('D',), ('E',), ('F',)],
443
 
            'unordered', False)
444
 
        kinds = dict((record.key, record.storage_kind) for record in stream)
445
 
        self.assertEqual({('C',): 'knit-ft-gz', ('D',): 'knit-delta-gz',
446
 
                          ('E',): 'knit-delta-gz', ('F',): 'knit-delta-gz'},
447
 
                         kinds)
448
 
 
449
 
 
450
 
class Test1To2Fetch(TestCaseWithTransport):
451
 
    """Tests for Model1To2 failure modes"""
452
 
 
453
 
    def make_tree_and_repo(self):
454
 
        self.tree = self.make_branch_and_tree('tree', format='pack-0.92')
455
 
        self.repo = self.make_repository('rich-repo', format='rich-root-pack')
456
 
        self.repo.lock_write()
457
 
        self.addCleanup(self.repo.unlock)
458
 
 
459
 
    def do_fetch_order_test(self, first, second):
460
 
        """Test that fetch works no matter what the set order of revision is.
461
 
 
462
 
        This test depends on the order of items in a set, which is
463
 
        implementation-dependant, so we test A, B and then B, A.
464
 
        """
465
 
        self.make_tree_and_repo()
466
 
        self.tree.commit('Commit 1', rev_id=first)
467
 
        self.tree.commit('Commit 2', rev_id=second)
468
 
        self.repo.fetch(self.tree.branch.repository, second)
469
 
 
470
 
    def test_fetch_order_AB(self):
471
 
        """See do_fetch_order_test"""
472
 
        self.do_fetch_order_test('A', 'B')
473
 
 
474
 
    def test_fetch_order_BA(self):
475
 
        """See do_fetch_order_test"""
476
 
        self.do_fetch_order_test('B', 'A')
477
 
 
478
 
    def get_parents(self, file_id, revision_id):
479
 
        self.repo.lock_read()
480
 
        try:
481
 
            parent_map = self.repo.texts.get_parent_map([(file_id, revision_id)])
482
 
            return parent_map[(file_id, revision_id)]
483
 
        finally:
484
 
            self.repo.unlock()
485
 
 
486
 
    def test_fetch_ghosts(self):
487
 
        self.make_tree_and_repo()
488
 
        self.tree.commit('first commit', rev_id='left-parent')
489
 
        self.tree.add_parent_tree_id('ghost-parent')
490
 
        fork = self.tree.bzrdir.sprout('fork', 'null:').open_workingtree()
491
 
        fork.commit('not a ghost', rev_id='not-ghost-parent')
492
 
        self.tree.branch.repository.fetch(fork.branch.repository,
493
 
                                     'not-ghost-parent')
494
 
        self.tree.add_parent_tree_id('not-ghost-parent')
495
 
        self.tree.commit('second commit', rev_id='second-id')
496
 
        self.repo.fetch(self.tree.branch.repository, 'second-id')
497
 
        root_id = self.tree.get_root_id()
498
 
        self.assertEqual(
499
 
            ((root_id, 'left-parent'), (root_id, 'not-ghost-parent')),
500
 
            self.get_parents(root_id, 'second-id'))
501
 
 
502
 
    def make_two_commits(self, change_root, fetch_twice):
503
 
        self.make_tree_and_repo()
504
 
        self.tree.commit('first commit', rev_id='first-id')
505
 
        if change_root:
506
 
            self.tree.set_root_id('unique-id')
507
 
        self.tree.commit('second commit', rev_id='second-id')
508
 
        if fetch_twice:
509
 
            self.repo.fetch(self.tree.branch.repository, 'first-id')
510
 
        self.repo.fetch(self.tree.branch.repository, 'second-id')
511
 
 
512
 
    def test_fetch_changed_root(self):
513
 
        self.make_two_commits(change_root=True, fetch_twice=False)
514
 
        self.assertEqual((), self.get_parents('unique-id', 'second-id'))
515
 
 
516
 
    def test_two_fetch_changed_root(self):
517
 
        self.make_two_commits(change_root=True, fetch_twice=True)
518
 
        self.assertEqual((), self.get_parents('unique-id', 'second-id'))
519
 
 
520
 
    def test_two_fetches(self):
521
 
        self.make_two_commits(change_root=False, fetch_twice=True)
522
 
        self.assertEqual((('TREE_ROOT', 'first-id'),),
523
 
            self.get_parents('TREE_ROOT', 'second-id'))
 
242
class TestHttpFetch(TestCaseWithWebserver):
 
243
    # FIXME RBC 20060124 this really isn't web specific, perhaps an
 
244
    # instrumented readonly transport? Can we do an instrumented
 
245
    # adapter and use self.get_readonly_url ?
 
246
 
 
247
    def test_fetch(self):
 
248
        #highest indices a: 5, b: 7
 
249
        br_a, br_b = make_branches(self)
 
250
        br_rem_a = Branch.open(self.get_readonly_url('branch1'))
 
251
        fetch_steps(self, br_rem_a, br_b, br_a)
 
252
 
 
253
    def _count_log_matches(self, target, logs):
 
254
        """Count the number of times the target file pattern was fetched in an http log"""
 
255
        get_succeeds_re = re.compile(
 
256
            '.*"GET .*%s HTTP/1.1" 20[06] - "-" "bzr/%s' %
 
257
            (     target,                    bzrlib.__version__))
 
258
        c = 0
 
259
        for line in logs:
 
260
            if get_succeeds_re.match(line):
 
261
                c += 1
 
262
        return c
 
263
 
 
264
    def test_weaves_are_retrieved_once(self):
 
265
        self.build_tree(("source/", "source/file", "target/"))
 
266
        wt = self.make_branch_and_tree('source')
 
267
        branch = wt.branch
 
268
        wt.add(["file"], ["id"])
 
269
        wt.commit("added file")
 
270
        print >>open("source/file", 'w'), "blah"
 
271
        wt.commit("changed file")
 
272
        target = BzrDir.create_branch_and_repo("target/")
 
273
        source = Branch.open(self.get_readonly_url("source/"))
 
274
        self.assertEqual(target.fetch(source), (2, []))
 
275
        # this is the path to the literal file. As format changes 
 
276
        # occur it needs to be updated. FIXME: ask the store for the
 
277
        # path.
 
278
        self.log("web server logs are:")
 
279
        http_logs = self.get_readonly_server().logs
 
280
        self.log('\n'.join(http_logs))
 
281
        # unfortunately this log entry is branch format specific. We could 
 
282
        # factor out the 'what files does this format use' to a method on the 
 
283
        # repository, which would let us to this generically. RBC 20060419
 
284
        self.assertEqual(1, self._count_log_matches('/ce/id.kndx', http_logs))
 
285
        self.assertEqual(1, self._count_log_matches('/ce/id.knit', http_logs))
 
286
        self.assertEqual(1, self._count_log_matches('inventory.kndx', http_logs))
 
287
        # this r-h check test will prevent regressions, but it currently already 
 
288
        # passes, before the patch to cache-rh is applied :[
 
289
        self.assertTrue(1 >= self._count_log_matches('revision-history',
 
290
                                                     http_logs))
 
291
        self.assertTrue(1 >= self._count_log_matches('last-revision',
 
292
                                                     http_logs))
 
293
        # FIXME naughty poking in there.
 
294
        self.get_readonly_server().logs = []
 
295
        # check there is nothing more to fetch
 
296
        source = Branch.open(self.get_readonly_url("source/"))
 
297
        self.assertEqual(target.fetch(source), (0, []))
 
298
        # should make just two requests
 
299
        http_logs = self.get_readonly_server().logs
 
300
        self.log("web server logs are:")
 
301
        self.log('\n'.join(http_logs))
 
302
        self.assertEqual(1, self._count_log_matches('branch-format', http_logs))
 
303
        self.assertEqual(1, self._count_log_matches('branch/format', http_logs))
 
304
        self.assertEqual(1, self._count_log_matches('repository/format', http_logs))
 
305
        self.assertTrue(1 >= self._count_log_matches('revision-history',
 
306
                                                     http_logs))
 
307
        self.assertTrue(1 >= self._count_log_matches('last-revision',
 
308
                                                     http_logs))
 
309
        self.assertEqual(4, len(http_logs))