~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_fetch.py

  • Committer: Andrew Bennetts
  • Date: 2007-03-26 06:24:01 UTC
  • mto: This revision was merged to the branch mainline in revision 2376.
  • Revision ID: andrew.bennetts@canonical.com-20070326062401-k3nbefzje5332jaf
Deal with review comments from Robert:

  * Add my name to the NEWS file
  * Move the test case to a new module in branch_implementations
  * Remove revision_history cruft from identitymap and test_identitymap
  * Improve some docstrings

Also, this fixes a bug where revision_history was not returning a copy of the
cached data, allowing the cache to be corrupted.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2005, 2007, 2010 Canonical Ltd
 
1
# Copyright (C) 2005 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
12
12
#
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
16
16
 
17
17
import os
18
18
import re
19
19
import sys
20
20
 
21
 
import bzrlib
22
 
from bzrlib import (
23
 
    bzrdir,
24
 
    errors,
25
 
    osutils,
26
 
    merge,
27
 
    repository,
28
 
    versionedfile,
29
 
    )
 
21
from bzrlib import bzrdir, repository
30
22
from bzrlib.branch import Branch
31
23
from bzrlib.bzrdir import BzrDir
 
24
from bzrlib.builtins import merge
 
25
import bzrlib.errors
32
26
from bzrlib.repofmt import knitrepo
33
27
from bzrlib.tests import TestCaseWithTransport
 
28
from bzrlib.tests.HTTPTestUtil import TestCaseWithWebserver
34
29
from bzrlib.tests.test_revision import make_branches
35
30
from bzrlib.trace import mutter
36
31
from bzrlib.upgrade import Convert
37
32
from bzrlib.workingtree import WorkingTree
38
33
 
39
 
# These tests are a bit old; please instead add new tests into
40
 
# per_interrepository/ so they'll run on all relevant
41
 
# combinations.
42
 
 
43
34
 
44
35
def has_revision(branch, revision_id):
45
36
    return branch.repository.has_revision(revision_id)
46
37
 
47
38
def fetch_steps(self, br_a, br_b, writable_a):
48
39
    """A foreign test method for testing fetch locally and remotely."""
49
 
 
 
40
     
50
41
    # TODO RBC 20060201 make this a repository test.
51
42
    repo_b = br_b.repository
52
43
    self.assertFalse(repo_b.has_revision(br_a.revision_history()[3]))
53
44
    self.assertTrue(repo_b.has_revision(br_a.revision_history()[2]))
54
45
    self.assertEquals(len(br_b.revision_history()), 7)
55
 
    br_b.fetch(br_a, br_a.revision_history()[2])
 
46
    self.assertEquals(br_b.fetch(br_a, br_a.revision_history()[2])[0], 0)
56
47
    # branch.fetch is not supposed to alter the revision history
57
48
    self.assertEquals(len(br_b.revision_history()), 7)
58
49
    self.assertFalse(repo_b.has_revision(br_a.revision_history()[3]))
59
50
 
60
51
    # fetching the next revision up in sample data copies one revision
61
 
    br_b.fetch(br_a, br_a.revision_history()[3])
 
52
    self.assertEquals(br_b.fetch(br_a, br_a.revision_history()[3])[0], 1)
62
53
    self.assertTrue(repo_b.has_revision(br_a.revision_history()[3]))
63
54
    self.assertFalse(has_revision(br_a, br_b.revision_history()[6]))
64
55
    self.assertTrue(br_a.repository.has_revision(br_b.revision_history()[5]))
66
57
    # When a non-branch ancestor is missing, it should be unlisted...
67
58
    # as its not reference from the inventory weave.
68
59
    br_b4 = self.make_branch('br_4')
69
 
    br_b4.fetch(br_b)
 
60
    count, failures = br_b4.fetch(br_b)
 
61
    self.assertEqual(count, 7)
 
62
    self.assertEqual(failures, [])
70
63
 
71
 
    writable_a.fetch(br_b)
 
64
    self.assertEqual(writable_a.fetch(br_b)[0], 1)
72
65
    self.assertTrue(has_revision(br_a, br_b.revision_history()[3]))
73
66
    self.assertTrue(has_revision(br_a, br_b.revision_history()[4]))
74
 
 
 
67
        
75
68
    br_b2 = self.make_branch('br_b2')
76
 
    br_b2.fetch(br_b)
 
69
    self.assertEquals(br_b2.fetch(br_b)[0], 7)
77
70
    self.assertTrue(has_revision(br_b2, br_b.revision_history()[4]))
78
71
    self.assertTrue(has_revision(br_b2, br_a.revision_history()[2]))
79
72
    self.assertFalse(has_revision(br_b2, br_a.revision_history()[3]))
80
73
 
81
74
    br_a2 = self.make_branch('br_a2')
82
 
    br_a2.fetch(br_a)
 
75
    self.assertEquals(br_a2.fetch(br_a)[0], 9)
83
76
    self.assertTrue(has_revision(br_a2, br_b.revision_history()[4]))
84
77
    self.assertTrue(has_revision(br_a2, br_a.revision_history()[3]))
85
78
    self.assertTrue(has_revision(br_a2, br_a.revision_history()[2]))
86
79
 
87
80
    br_a3 = self.make_branch('br_a3')
88
 
    # pulling a branch with no revisions grabs nothing, regardless of
 
81
    # pulling a branch with no revisions grabs nothing, regardless of 
89
82
    # whats in the inventory.
90
 
    br_a3.fetch(br_a2)
 
83
    self.assertEquals(br_a3.fetch(br_a2)[0], 0)
91
84
    for revno in range(4):
92
85
        self.assertFalse(
93
86
            br_a3.repository.has_revision(br_a.revision_history()[revno]))
94
 
    br_a3.fetch(br_a2, br_a.revision_history()[2])
 
87
    self.assertEqual(br_a3.fetch(br_a2, br_a.revision_history()[2])[0], 3)
95
88
    # pull the 3 revisions introduced by a@u-0-3
96
 
    br_a3.fetch(br_a2, br_a.revision_history()[3])
97
 
    # NoSuchRevision should be raised if the branch is missing the revision
 
89
    fetched = br_a3.fetch(br_a2, br_a.revision_history()[3])[0]
 
90
    self.assertEquals(fetched, 3, "fetched %d instead of 3" % fetched)
 
91
    # InstallFailed should be raised if the branch is missing the revision
98
92
    # that was requested.
99
 
    self.assertRaises(errors.NoSuchRevision, br_a3.fetch, br_a2, 'pizza')
 
93
    self.assertRaises(bzrlib.errors.InstallFailed, br_a3.fetch, br_a2, 'pizza')
 
94
    # InstallFailed should be raised if the branch is missing a revision
 
95
    # from its own revision history
 
96
    br_a2.append_revision('a-b-c')
 
97
    self.assertRaises(bzrlib.errors.InstallFailed, br_a3.fetch, br_a2)
100
98
 
101
 
    # TODO: Test trying to fetch from a branch that points to a revision not
102
 
    # actually present in its repository.  Not every branch format allows you
103
 
    # to directly point to such revisions, so it's a bit complicated to
104
 
    # construct.  One way would be to uncommit and gc the revision, but not
105
 
    # every branch supports that.  -- mbp 20070814
 
99
    # TODO: ADHB 20070116 Perhaps set_last_revision shouldn't accept
 
100
    #       revisions which are not present?  In that case, this test
 
101
    #       must be rewritten.
 
102
    #
 
103
    #       RBC 20060403 the way to do this is to uncommit the revision from
 
104
    #       the repository after the commit
106
105
 
107
106
    #TODO: test that fetch correctly does reweaving when needed. RBC 20051008
108
 
    # Note that this means - updating the weave when ghosts are filled in to
 
107
    # Note that this means - updating the weave when ghosts are filled in to 
109
108
    # add the right parents.
110
109
 
111
110
 
113
112
 
114
113
    def test_fetch(self):
115
114
        #highest indices a: 5, b: 7
116
 
        br_a, br_b = make_branches(self, format='dirstate-tags')
 
115
        br_a, br_b = make_branches(self)
117
116
        fetch_steps(self, br_a, br_b, br_a)
118
117
 
119
118
    def test_fetch_self(self):
120
119
        wt = self.make_branch_and_tree('br')
121
 
        wt.branch.fetch(wt.branch)
 
120
        self.assertEqual(wt.branch.fetch(wt.branch), (0, []))
122
121
 
123
122
    def test_fetch_root_knit(self):
124
123
        """Ensure that knit2.fetch() updates the root knit
125
 
 
 
124
        
126
125
        This tests the case where the root has a new revision, but there are no
127
126
        corresponding filename, parent, contents or other changes.
128
127
        """
144
143
        branch = self.make_branch('branch', format=knit2_format)
145
144
        branch.pull(tree.branch, stop_revision='rev1')
146
145
        repo = branch.repository
147
 
        repo.lock_read()
148
 
        try:
149
 
            # Make sure fetch retrieved only what we requested
150
 
            self.assertEqual({('tree-root', 'rev1'):()},
151
 
                repo.texts.get_parent_map(
152
 
                    [('tree-root', 'rev1'), ('tree-root', 'rev2')]))
153
 
        finally:
154
 
            repo.unlock()
 
146
        root_knit = repo.weave_store.get_weave('tree-root',
 
147
                                                repo.get_transaction())
 
148
        # Make sure fetch retrieved only what we requested
 
149
        self.assertTrue('rev1' in root_knit)
 
150
        self.assertTrue('rev2' not in root_knit)
155
151
        branch.pull(tree.branch)
 
152
        root_knit = repo.weave_store.get_weave('tree-root',
 
153
                                                repo.get_transaction())
156
154
        # Make sure that the next revision in the root knit was retrieved,
157
155
        # even though the text, name, parent_id, etc., were unchanged.
158
 
        repo.lock_read()
159
 
        try:
160
 
            # Make sure fetch retrieved only what we requested
161
 
            self.assertEqual({('tree-root', 'rev2'):(('tree-root', 'rev1'),)},
162
 
                repo.texts.get_parent_map([('tree-root', 'rev2')]))
163
 
        finally:
164
 
            repo.unlock()
165
 
 
166
 
    def test_fetch_incompatible(self):
167
 
        knit_tree = self.make_branch_and_tree('knit', format='knit')
168
 
        knit3_tree = self.make_branch_and_tree('knit3',
169
 
            format='dirstate-with-subtree')
170
 
        knit3_tree.commit('blah')
171
 
        e = self.assertRaises(errors.IncompatibleRepositories,
172
 
                              knit_tree.branch.fetch, knit3_tree.branch)
173
 
        self.assertContainsRe(str(e),
174
 
            r"(?m).*/knit.*\nis not compatible with\n.*/knit3/.*\n"
175
 
            r"different rich-root support")
 
156
        self.assertTrue('rev2' in root_knit)
176
157
 
177
158
 
178
159
class TestMergeFetch(TestCaseWithTransport):
186
167
        wt2 = self.make_branch_and_tree('br2')
187
168
        br2 = wt2.branch
188
169
        wt2.commit(message='rev 2-1', rev_id='2-1')
189
 
        wt2.merge_from_branch(br1, from_revision='null:')
 
170
        merge(other_revision=['br1', -1], base_revision=['br1', 0],
 
171
              this_dir='br2')
190
172
        self._check_revs_present(br2)
191
173
 
192
174
    def test_merge_fetches(self):
197
179
        dir_2 = br1.bzrdir.sprout('br2')
198
180
        br2 = dir_2.open_branch()
199
181
        wt1.commit(message='rev 1-2', rev_id='1-2')
200
 
        wt2 = dir_2.open_workingtree()
201
 
        wt2.commit(message='rev 2-1', rev_id='2-1')
202
 
        wt2.merge_from_branch(br1)
 
182
        dir_2.open_workingtree().commit(message='rev 2-1', rev_id='2-1')
 
183
        merge(other_revision=['br1', -1], base_revision=[None, None], 
 
184
              this_dir='br2')
203
185
        self._check_revs_present(br2)
204
186
 
205
187
    def _check_revs_present(self, br2):
234
216
    def test_merge_fetches_file_history(self):
235
217
        """Merge brings across file histories"""
236
218
        br2 = Branch.open('br2')
237
 
        br1 = Branch.open('br1')
238
 
        wt2 = WorkingTree.open('br2').merge_from_branch(br1)
239
 
        br2.lock_read()
240
 
        self.addCleanup(br2.unlock)
 
219
        merge(other_revision=['br1', -1], base_revision=[None, None], 
 
220
              this_dir='br2')
241
221
        for rev_id, text in [('1-2', 'original from 1\n'),
242
222
                             ('1-3', 'agreement\n'),
243
223
                             ('2-1', 'contents in 2\n'),
247
227
                    rev_id).get_file_text('this-file-id'), text)
248
228
 
249
229
 
250
 
class TestKnitToPackFetch(TestCaseWithTransport):
251
 
 
252
 
    def find_get_record_stream(self, calls, expected_count=1):
253
 
        """In a list of calls, find the last 'get_record_stream'.
254
 
 
255
 
        :param expected_count: The number of calls we should exepect to find.
256
 
            If a different number is found, an assertion is raised.
257
 
        """
258
 
        get_record_call = None
259
 
        call_count = 0
260
 
        for call in calls:
261
 
            if call[0] == 'get_record_stream':
262
 
                call_count += 1
263
 
                get_record_call = call
264
 
        self.assertEqual(expected_count, call_count)
265
 
        return get_record_call
266
 
 
267
 
    def test_fetch_with_deltas_no_delta_closure(self):
268
 
        tree = self.make_branch_and_tree('source', format='dirstate')
269
 
        target = self.make_repository('target', format='pack-0.92')
270
 
        self.build_tree(['source/file'])
271
 
        tree.set_root_id('root-id')
272
 
        tree.add('file', 'file-id')
273
 
        tree.commit('one', rev_id='rev-one')
274
 
        source = tree.branch.repository
275
 
        source.texts = versionedfile.RecordingVersionedFilesDecorator(
276
 
                        source.texts)
277
 
        source.signatures = versionedfile.RecordingVersionedFilesDecorator(
278
 
                        source.signatures)
279
 
        source.revisions = versionedfile.RecordingVersionedFilesDecorator(
280
 
                        source.revisions)
281
 
        source.inventories = versionedfile.RecordingVersionedFilesDecorator(
282
 
                        source.inventories)
283
 
        # precondition
284
 
        self.assertTrue(target._format._fetch_uses_deltas)
285
 
        target.fetch(source, revision_id='rev-one')
286
 
        self.assertEqual(('get_record_stream', [('file-id', 'rev-one')],
287
 
                          target._format._fetch_order, False),
288
 
                         self.find_get_record_stream(source.texts.calls))
289
 
        self.assertEqual(('get_record_stream', [('rev-one',)],
290
 
          target._format._fetch_order, False),
291
 
          self.find_get_record_stream(source.inventories.calls, 2))
292
 
        self.assertEqual(('get_record_stream', [('rev-one',)],
293
 
                          target._format._fetch_order, False),
294
 
                         self.find_get_record_stream(source.revisions.calls))
295
 
        # XXX: Signatures is special, and slightly broken. The
296
 
        # standard item_keys_introduced_by actually does a lookup for every
297
 
        # signature to see if it exists, rather than waiting to do them all at
298
 
        # once at the end. The fetch code then does an all-at-once and just
299
 
        # allows for some of them to be missing.
300
 
        # So we know there will be extra calls, but the *last* one is the one
301
 
        # we care about.
302
 
        signature_calls = source.signatures.calls[-1:]
303
 
        self.assertEqual(('get_record_stream', [('rev-one',)],
304
 
                          target._format._fetch_order, False),
305
 
                         self.find_get_record_stream(signature_calls))
306
 
 
307
 
    def test_fetch_no_deltas_with_delta_closure(self):
308
 
        tree = self.make_branch_and_tree('source', format='dirstate')
309
 
        target = self.make_repository('target', format='pack-0.92')
310
 
        self.build_tree(['source/file'])
311
 
        tree.set_root_id('root-id')
312
 
        tree.add('file', 'file-id')
313
 
        tree.commit('one', rev_id='rev-one')
314
 
        source = tree.branch.repository
315
 
        source.texts = versionedfile.RecordingVersionedFilesDecorator(
316
 
                        source.texts)
317
 
        source.signatures = versionedfile.RecordingVersionedFilesDecorator(
318
 
                        source.signatures)
319
 
        source.revisions = versionedfile.RecordingVersionedFilesDecorator(
320
 
                        source.revisions)
321
 
        source.inventories = versionedfile.RecordingVersionedFilesDecorator(
322
 
                        source.inventories)
323
 
        # XXX: This won't work in general, but for the dirstate format it does.
324
 
        self.overrideAttr(target._format, '_fetch_uses_deltas', False)
325
 
        target.fetch(source, revision_id='rev-one')
326
 
        self.assertEqual(('get_record_stream', [('file-id', 'rev-one')],
327
 
                          target._format._fetch_order, True),
328
 
                         self.find_get_record_stream(source.texts.calls))
329
 
        self.assertEqual(('get_record_stream', [('rev-one',)],
330
 
            target._format._fetch_order, True),
331
 
            self.find_get_record_stream(source.inventories.calls, 2))
332
 
        self.assertEqual(('get_record_stream', [('rev-one',)],
333
 
                          target._format._fetch_order, True),
334
 
                         self.find_get_record_stream(source.revisions.calls))
335
 
        # XXX: Signatures is special, and slightly broken. The
336
 
        # standard item_keys_introduced_by actually does a lookup for every
337
 
        # signature to see if it exists, rather than waiting to do them all at
338
 
        # once at the end. The fetch code then does an all-at-once and just
339
 
        # allows for some of them to be missing.
340
 
        # So we know there will be extra calls, but the *last* one is the one
341
 
        # we care about.
342
 
        signature_calls = source.signatures.calls[-1:]
343
 
        self.assertEqual(('get_record_stream', [('rev-one',)],
344
 
                          target._format._fetch_order, True),
345
 
                         self.find_get_record_stream(signature_calls))
346
 
 
347
 
    def test_fetch_revisions_with_deltas_into_pack(self):
348
 
        # See BUG #261339, dev versions of bzr could accidentally create deltas
349
 
        # in revision texts in knit branches (when fetching from packs). So we
350
 
        # ensure that *if* a knit repository has a delta in revisions, that it
351
 
        # gets properly expanded back into a fulltext when stored in the pack
352
 
        # file.
353
 
        tree = self.make_branch_and_tree('source', format='dirstate')
354
 
        target = self.make_repository('target', format='pack-0.92')
355
 
        self.build_tree(['source/file'])
356
 
        tree.set_root_id('root-id')
357
 
        tree.add('file', 'file-id')
358
 
        tree.commit('one', rev_id='rev-one')
359
 
        # Hack the KVF for revisions so that it "accidentally" allows a delta
360
 
        tree.branch.repository.revisions._max_delta_chain = 200
361
 
        tree.commit('two', rev_id='rev-two')
362
 
        source = tree.branch.repository
363
 
        # Ensure that we stored a delta
364
 
        source.lock_read()
365
 
        self.addCleanup(source.unlock)
366
 
        record = source.revisions.get_record_stream([('rev-two',)],
367
 
            'unordered', False).next()
368
 
        self.assertEqual('knit-delta-gz', record.storage_kind)
369
 
        target.fetch(tree.branch.repository, revision_id='rev-two')
370
 
        # The record should get expanded back to a fulltext
371
 
        target.lock_read()
372
 
        self.addCleanup(target.unlock)
373
 
        record = target.revisions.get_record_stream([('rev-two',)],
374
 
            'unordered', False).next()
375
 
        self.assertEqual('knit-ft-gz', record.storage_kind)
376
 
 
377
 
    def test_fetch_with_fallback_and_merge(self):
378
 
        builder = self.make_branch_builder('source', format='pack-0.92')
379
 
        builder.start_series()
380
 
        # graph
381
 
        #   A
382
 
        #   |\
383
 
        #   B C
384
 
        #   | |
385
 
        #   | D
386
 
        #   | |
387
 
        #   | E
388
 
        #    \|
389
 
        #     F
390
 
        # A & B are present in the base (stacked-on) repository, A-E are
391
 
        # present in the source.
392
 
        # This reproduces bug #304841
393
 
        # We need a large enough inventory that total size of compressed deltas
394
 
        # is shorter than the size of a compressed fulltext. We have to use
395
 
        # random ids because otherwise the inventory fulltext compresses too
396
 
        # well and the deltas get bigger.
397
 
        to_add = [
398
 
            ('add', ('', 'TREE_ROOT', 'directory', None))]
399
 
        for i in xrange(10):
400
 
            fname = 'file%03d' % (i,)
401
 
            fileid = '%s-%s' % (fname, osutils.rand_chars(64))
402
 
            to_add.append(('add', (fname, fileid, 'file', 'content\n')))
403
 
        builder.build_snapshot('A', None, to_add)
404
 
        builder.build_snapshot('B', ['A'], [])
405
 
        builder.build_snapshot('C', ['A'], [])
406
 
        builder.build_snapshot('D', ['C'], [])
407
 
        builder.build_snapshot('E', ['D'], [])
408
 
        builder.build_snapshot('F', ['E', 'B'], [])
409
 
        builder.finish_series()
410
 
        source_branch = builder.get_branch()
411
 
        source_branch.bzrdir.sprout('base', revision_id='B')
412
 
        target_branch = self.make_branch('target', format='1.6')
413
 
        target_branch.set_stacked_on_url('../base')
414
 
        source = source_branch.repository
415
 
        source.lock_read()
416
 
        self.addCleanup(source.unlock)
417
 
        source.inventories = versionedfile.OrderingVersionedFilesDecorator(
418
 
                        source.inventories,
419
 
                        key_priority={('E',): 1, ('D',): 2, ('C',): 4,
420
 
                                      ('F',): 3})
421
 
        # Ensure that the content is yielded in the proper order, and given as
422
 
        # the expected kinds
423
 
        records = [(record.key, record.storage_kind)
424
 
                   for record in source.inventories.get_record_stream(
425
 
                        [('D',), ('C',), ('E',), ('F',)], 'unordered', False)]
426
 
        self.assertEqual([(('E',), 'knit-delta-gz'), (('D',), 'knit-delta-gz'),
427
 
                          (('F',), 'knit-delta-gz'), (('C',), 'knit-delta-gz')],
428
 
                          records)
429
 
 
430
 
        target_branch.lock_write()
431
 
        self.addCleanup(target_branch.unlock)
432
 
        target = target_branch.repository
433
 
        target.fetch(source, revision_id='F')
434
 
        # 'C' should be expanded to a fulltext, but D and E should still be
435
 
        # deltas
436
 
        stream = target.inventories.get_record_stream(
437
 
            [('C',), ('D',), ('E',), ('F',)],
438
 
            'unordered', False)
439
 
        kinds = dict((record.key, record.storage_kind) for record in stream)
440
 
        self.assertEqual({('C',): 'knit-ft-gz', ('D',): 'knit-delta-gz',
441
 
                          ('E',): 'knit-delta-gz', ('F',): 'knit-delta-gz'},
442
 
                         kinds)
443
 
 
444
 
 
445
 
class Test1To2Fetch(TestCaseWithTransport):
446
 
    """Tests for Model1To2 failure modes"""
447
 
 
448
 
    def make_tree_and_repo(self):
449
 
        self.tree = self.make_branch_and_tree('tree', format='pack-0.92')
450
 
        self.repo = self.make_repository('rich-repo', format='rich-root-pack')
451
 
        self.repo.lock_write()
452
 
        self.addCleanup(self.repo.unlock)
453
 
 
454
 
    def do_fetch_order_test(self, first, second):
455
 
        """Test that fetch works no matter what the set order of revision is.
456
 
 
457
 
        This test depends on the order of items in a set, which is
458
 
        implementation-dependant, so we test A, B and then B, A.
459
 
        """
460
 
        self.make_tree_and_repo()
461
 
        self.tree.commit('Commit 1', rev_id=first)
462
 
        self.tree.commit('Commit 2', rev_id=second)
463
 
        self.repo.fetch(self.tree.branch.repository, second)
464
 
 
465
 
    def test_fetch_order_AB(self):
466
 
        """See do_fetch_order_test"""
467
 
        self.do_fetch_order_test('A', 'B')
468
 
 
469
 
    def test_fetch_order_BA(self):
470
 
        """See do_fetch_order_test"""
471
 
        self.do_fetch_order_test('B', 'A')
472
 
 
473
 
    def get_parents(self, file_id, revision_id):
474
 
        self.repo.lock_read()
475
 
        try:
476
 
            parent_map = self.repo.texts.get_parent_map([(file_id, revision_id)])
477
 
            return parent_map[(file_id, revision_id)]
478
 
        finally:
479
 
            self.repo.unlock()
480
 
 
481
 
    def test_fetch_ghosts(self):
482
 
        self.make_tree_and_repo()
483
 
        self.tree.commit('first commit', rev_id='left-parent')
484
 
        self.tree.add_parent_tree_id('ghost-parent')
485
 
        fork = self.tree.bzrdir.sprout('fork', 'null:').open_workingtree()
486
 
        fork.commit('not a ghost', rev_id='not-ghost-parent')
487
 
        self.tree.branch.repository.fetch(fork.branch.repository,
488
 
                                     'not-ghost-parent')
489
 
        self.tree.add_parent_tree_id('not-ghost-parent')
490
 
        self.tree.commit('second commit', rev_id='second-id')
491
 
        self.repo.fetch(self.tree.branch.repository, 'second-id')
492
 
        root_id = self.tree.get_root_id()
493
 
        self.assertEqual(
494
 
            ((root_id, 'left-parent'), (root_id, 'not-ghost-parent')),
495
 
            self.get_parents(root_id, 'second-id'))
496
 
 
497
 
    def make_two_commits(self, change_root, fetch_twice):
498
 
        self.make_tree_and_repo()
499
 
        self.tree.commit('first commit', rev_id='first-id')
500
 
        if change_root:
501
 
            self.tree.set_root_id('unique-id')
502
 
        self.tree.commit('second commit', rev_id='second-id')
503
 
        if fetch_twice:
504
 
            self.repo.fetch(self.tree.branch.repository, 'first-id')
505
 
        self.repo.fetch(self.tree.branch.repository, 'second-id')
506
 
 
507
 
    def test_fetch_changed_root(self):
508
 
        self.make_two_commits(change_root=True, fetch_twice=False)
509
 
        self.assertEqual((), self.get_parents('unique-id', 'second-id'))
510
 
 
511
 
    def test_two_fetch_changed_root(self):
512
 
        self.make_two_commits(change_root=True, fetch_twice=True)
513
 
        self.assertEqual((), self.get_parents('unique-id', 'second-id'))
514
 
 
515
 
    def test_two_fetches(self):
516
 
        self.make_two_commits(change_root=False, fetch_twice=True)
517
 
        self.assertEqual((('TREE_ROOT', 'first-id'),),
518
 
            self.get_parents('TREE_ROOT', 'second-id'))
 
230
class TestHttpFetch(TestCaseWithWebserver):
 
231
    # FIXME RBC 20060124 this really isn't web specific, perhaps an
 
232
    # instrumented readonly transport? Can we do an instrumented
 
233
    # adapter and use self.get_readonly_url ?
 
234
 
 
235
    def test_fetch(self):
 
236
        #highest indices a: 5, b: 7
 
237
        br_a, br_b = make_branches(self)
 
238
        br_rem_a = Branch.open(self.get_readonly_url('branch1'))
 
239
        fetch_steps(self, br_rem_a, br_b, br_a)
 
240
 
 
241
    def _count_log_matches(self, target, logs):
 
242
        """Count the number of times the target file pattern was fetched in an http log"""
 
243
        get_succeeds_re = re.compile(
 
244
            '.*"GET .*%s HTTP/1.1" 20[06] - "-" "bzr/%s' %
 
245
            (     target,                    bzrlib.__version__))
 
246
        c = 0
 
247
        for line in logs:
 
248
            if get_succeeds_re.match(line):
 
249
                c += 1
 
250
        return c
 
251
 
 
252
    def test_weaves_are_retrieved_once(self):
 
253
        self.build_tree(("source/", "source/file", "target/"))
 
254
        wt = self.make_branch_and_tree('source')
 
255
        branch = wt.branch
 
256
        wt.add(["file"], ["id"])
 
257
        wt.commit("added file")
 
258
        print >>open("source/file", 'w'), "blah"
 
259
        wt.commit("changed file")
 
260
        target = BzrDir.create_branch_and_repo("target/")
 
261
        source = Branch.open(self.get_readonly_url("source/"))
 
262
        self.assertEqual(target.fetch(source), (2, []))
 
263
        # this is the path to the literal file. As format changes 
 
264
        # occur it needs to be updated. FIXME: ask the store for the
 
265
        # path.
 
266
        self.log("web server logs are:")
 
267
        http_logs = self.get_readonly_server().logs
 
268
        self.log('\n'.join(http_logs))
 
269
        # unfortunately this log entry is branch format specific. We could 
 
270
        # factor out the 'what files does this format use' to a method on the 
 
271
        # repository, which would let us to this generically. RBC 20060419
 
272
        self.assertEqual(1, self._count_log_matches('/ce/id.kndx', http_logs))
 
273
        self.assertEqual(1, self._count_log_matches('/ce/id.knit', http_logs))
 
274
        self.assertEqual(1, self._count_log_matches('inventory.kndx', http_logs))
 
275
        # this r-h check test will prevent regressions, but it currently already 
 
276
        # passes, before the patch to cache-rh is applied :[
 
277
        self.assertTrue(1 >= self._count_log_matches('revision-history',
 
278
                                                     http_logs))
 
279
        self.assertTrue(1 >= self._count_log_matches('last-revision',
 
280
                                                     http_logs))
 
281
        # FIXME naughty poking in there.
 
282
        self.get_readonly_server().logs = []
 
283
        # check there is nothing more to fetch
 
284
        source = Branch.open(self.get_readonly_url("source/"))
 
285
        self.assertEqual(target.fetch(source), (0, []))
 
286
        # should make just two requests
 
287
        http_logs = self.get_readonly_server().logs
 
288
        self.log("web server logs are:")
 
289
        self.log('\n'.join(http_logs))
 
290
        self.assertEqual(1, self._count_log_matches('branch-format', http_logs))
 
291
        self.assertEqual(1, self._count_log_matches('branch/format', http_logs))
 
292
        self.assertEqual(1, self._count_log_matches('repository/format', http_logs))
 
293
        self.assertTrue(1 >= self._count_log_matches('revision-history',
 
294
                                                     http_logs))
 
295
        self.assertTrue(1 >= self._count_log_matches('last-revision',
 
296
                                                     http_logs))
 
297
        self.assertEqual(4, len(http_logs))