~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_fetch.py

  • Committer: Vincent Ladeuil
  • Date: 2011-08-12 09:49:24 UTC
  • mfrom: (6015.9.10 2.4)
  • mto: This revision was merged to the branch mainline in revision 6066.
  • Revision ID: v.ladeuil+lp@free.fr-20110812094924-knc5s0g7vs31a2f1
Merge 2.4 into trunk

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2005, 2007 Canonical Ltd
 
1
# Copyright (C) 2005-2011 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
12
12
#
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
 
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
16
 
 
17
 
import os
18
 
import re
19
 
import sys
20
 
 
21
 
import bzrlib
 
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 
16
 
22
17
from bzrlib import (
23
18
    bzrdir,
24
19
    errors,
25
 
    merge,
26
 
    repository,
 
20
    osutils,
27
21
    versionedfile,
28
22
    )
29
23
from bzrlib.branch import Branch
30
 
from bzrlib.bzrdir import BzrDir
31
24
from bzrlib.repofmt import knitrepo
32
25
from bzrlib.tests import TestCaseWithTransport
33
 
from bzrlib.tests.http_utils import TestCaseWithWebserver
34
26
from bzrlib.tests.test_revision import make_branches
35
 
from bzrlib.trace import mutter
36
27
from bzrlib.upgrade import Convert
37
28
from bzrlib.workingtree import WorkingTree
38
29
 
39
30
# These tests are a bit old; please instead add new tests into
40
 
# interrepository_implementations/ so they'll run on all relevant
 
31
# per_interrepository/ so they'll run on all relevant
41
32
# combinations.
42
33
 
43
34
 
46
37
 
47
38
def fetch_steps(self, br_a, br_b, writable_a):
48
39
    """A foreign test method for testing fetch locally and remotely."""
49
 
     
 
40
 
50
41
    # TODO RBC 20060201 make this a repository test.
51
42
    repo_b = br_b.repository
52
43
    self.assertFalse(repo_b.has_revision(br_a.revision_history()[3]))
53
44
    self.assertTrue(repo_b.has_revision(br_a.revision_history()[2]))
54
45
    self.assertEquals(len(br_b.revision_history()), 7)
55
 
    self.assertEquals(br_b.fetch(br_a, br_a.revision_history()[2])[0], 0)
 
46
    br_b.fetch(br_a, br_a.revision_history()[2])
56
47
    # branch.fetch is not supposed to alter the revision history
57
48
    self.assertEquals(len(br_b.revision_history()), 7)
58
49
    self.assertFalse(repo_b.has_revision(br_a.revision_history()[3]))
59
50
 
60
51
    # fetching the next revision up in sample data copies one revision
61
 
    self.assertEquals(br_b.fetch(br_a, br_a.revision_history()[3])[0], 1)
 
52
    br_b.fetch(br_a, br_a.revision_history()[3])
62
53
    self.assertTrue(repo_b.has_revision(br_a.revision_history()[3]))
63
54
    self.assertFalse(has_revision(br_a, br_b.revision_history()[6]))
64
55
    self.assertTrue(br_a.repository.has_revision(br_b.revision_history()[5]))
66
57
    # When a non-branch ancestor is missing, it should be unlisted...
67
58
    # as its not reference from the inventory weave.
68
59
    br_b4 = self.make_branch('br_4')
69
 
    count, failures = br_b4.fetch(br_b)
70
 
    self.assertEqual(count, 7)
71
 
    self.assertEqual(failures, [])
 
60
    br_b4.fetch(br_b)
72
61
 
73
 
    self.assertEqual(writable_a.fetch(br_b)[0], 1)
 
62
    writable_a.fetch(br_b)
74
63
    self.assertTrue(has_revision(br_a, br_b.revision_history()[3]))
75
64
    self.assertTrue(has_revision(br_a, br_b.revision_history()[4]))
76
 
        
 
65
 
77
66
    br_b2 = self.make_branch('br_b2')
78
 
    self.assertEquals(br_b2.fetch(br_b)[0], 7)
 
67
    br_b2.fetch(br_b)
79
68
    self.assertTrue(has_revision(br_b2, br_b.revision_history()[4]))
80
69
    self.assertTrue(has_revision(br_b2, br_a.revision_history()[2]))
81
70
    self.assertFalse(has_revision(br_b2, br_a.revision_history()[3]))
82
71
 
83
72
    br_a2 = self.make_branch('br_a2')
84
 
    self.assertEquals(br_a2.fetch(br_a)[0], 9)
 
73
    br_a2.fetch(br_a)
85
74
    self.assertTrue(has_revision(br_a2, br_b.revision_history()[4]))
86
75
    self.assertTrue(has_revision(br_a2, br_a.revision_history()[3]))
87
76
    self.assertTrue(has_revision(br_a2, br_a.revision_history()[2]))
88
77
 
89
78
    br_a3 = self.make_branch('br_a3')
90
 
    # pulling a branch with no revisions grabs nothing, regardless of 
 
79
    # pulling a branch with no revisions grabs nothing, regardless of
91
80
    # whats in the inventory.
92
 
    self.assertEquals(br_a3.fetch(br_a2)[0], 0)
 
81
    br_a3.fetch(br_a2)
93
82
    for revno in range(4):
94
83
        self.assertFalse(
95
84
            br_a3.repository.has_revision(br_a.revision_history()[revno]))
96
 
    self.assertEqual(br_a3.fetch(br_a2, br_a.revision_history()[2])[0], 3)
 
85
    br_a3.fetch(br_a2, br_a.revision_history()[2])
97
86
    # pull the 3 revisions introduced by a@u-0-3
98
 
    fetched = br_a3.fetch(br_a2, br_a.revision_history()[3])[0]
99
 
    self.assertEquals(fetched, 3, "fetched %d instead of 3" % fetched)
100
 
    # InstallFailed should be raised if the branch is missing the revision
 
87
    br_a3.fetch(br_a2, br_a.revision_history()[3])
 
88
    # NoSuchRevision should be raised if the branch is missing the revision
101
89
    # that was requested.
102
 
    self.assertRaises(errors.InstallFailed, br_a3.fetch, br_a2, 'pizza')
 
90
    self.assertRaises(errors.NoSuchRevision, br_a3.fetch, br_a2, 'pizza')
103
91
 
104
92
    # TODO: Test trying to fetch from a branch that points to a revision not
105
93
    # actually present in its repository.  Not every branch format allows you
108
96
    # every branch supports that.  -- mbp 20070814
109
97
 
110
98
    #TODO: test that fetch correctly does reweaving when needed. RBC 20051008
111
 
    # Note that this means - updating the weave when ghosts are filled in to 
 
99
    # Note that this means - updating the weave when ghosts are filled in to
112
100
    # add the right parents.
113
101
 
114
102
 
121
109
 
122
110
    def test_fetch_self(self):
123
111
        wt = self.make_branch_and_tree('br')
124
 
        self.assertEqual(wt.branch.fetch(wt.branch), (0, []))
 
112
        wt.branch.fetch(wt.branch)
125
113
 
126
114
    def test_fetch_root_knit(self):
127
115
        """Ensure that knit2.fetch() updates the root knit
128
 
        
 
116
 
129
117
        This tests the case where the root has a new revision, but there are no
130
118
        corresponding filename, parent, contents or other changes.
131
119
        """
250
238
                    rev_id).get_file_text('this-file-id'), text)
251
239
 
252
240
 
253
 
class TestHttpFetch(TestCaseWithWebserver):
254
 
    # FIXME RBC 20060124 this really isn't web specific, perhaps an
255
 
    # instrumented readonly transport? Can we do an instrumented
256
 
    # adapter and use self.get_readonly_url ?
257
 
 
258
 
    def test_fetch(self):
259
 
        #highest indices a: 5, b: 7
260
 
        br_a, br_b = make_branches(self)
261
 
        br_rem_a = Branch.open(self.get_readonly_url('branch1'))
262
 
        fetch_steps(self, br_rem_a, br_b, br_a)
263
 
 
264
 
    def _count_log_matches(self, target, logs):
265
 
        """Count the number of times the target file pattern was fetched in an http log"""
266
 
        get_succeeds_re = re.compile(
267
 
            '.*"GET .*%s HTTP/1.1" 20[06] - "-" "bzr/%s' %
268
 
            (     target,                    bzrlib.__version__))
269
 
        c = 0
270
 
        for line in logs:
271
 
            if get_succeeds_re.match(line):
272
 
                c += 1
273
 
        return c
274
 
 
275
 
    def test_weaves_are_retrieved_once(self):
276
 
        self.build_tree(("source/", "source/file", "target/"))
277
 
        # This test depends on knit dasta storage.
278
 
        wt = self.make_branch_and_tree('source', format='dirstate-tags')
279
 
        branch = wt.branch
280
 
        wt.add(["file"], ["id"])
281
 
        wt.commit("added file")
282
 
        open("source/file", 'w').write("blah\n")
283
 
        wt.commit("changed file")
284
 
        target = BzrDir.create_branch_and_repo("target/")
285
 
        source = Branch.open(self.get_readonly_url("source/"))
286
 
        self.assertEqual(target.fetch(source), (2, []))
287
 
        # this is the path to the literal file. As format changes 
288
 
        # occur it needs to be updated. FIXME: ask the store for the
289
 
        # path.
290
 
        self.log("web server logs are:")
291
 
        http_logs = self.get_readonly_server().logs
292
 
        self.log('\n'.join(http_logs))
293
 
        # unfortunately this log entry is branch format specific. We could 
294
 
        # factor out the 'what files does this format use' to a method on the 
295
 
        # repository, which would let us to this generically. RBC 20060419
296
 
        # RBC 20080408: Or perhaps we can assert that no files are fully read
297
 
        # twice?
298
 
        self.assertEqual(1, self._count_log_matches('/ce/id.kndx', http_logs))
299
 
        self.assertEqual(1, self._count_log_matches('/ce/id.knit', http_logs))
300
 
        self.assertEqual(1, self._count_log_matches('inventory.kndx', http_logs))
301
 
        # this r-h check test will prevent regressions, but it currently already 
302
 
        # passes, before the patch to cache-rh is applied :[
303
 
        self.assertTrue(1 >= self._count_log_matches('revision-history',
304
 
                                                     http_logs))
305
 
        self.assertTrue(1 >= self._count_log_matches('last-revision',
306
 
                                                     http_logs))
307
 
        # FIXME naughty poking in there.
308
 
        self.get_readonly_server().logs = []
309
 
        # check there is nothing more to fetch.  We take care to re-use the
310
 
        # existing transport so that the request logs we're about to examine
311
 
        # aren't cluttered with redundant probes for a smart server.
312
 
        # XXX: Perhaps this further parameterisation: test http with smart
313
 
        # server, and test http without smart server?
314
 
        source = Branch.open(
315
 
            self.get_readonly_url("source/"),
316
 
            possible_transports=[source.bzrdir.root_transport])
317
 
        self.assertEqual(target.fetch(source), (0, []))
318
 
        # should make just two requests
319
 
        http_logs = self.get_readonly_server().logs
320
 
        self.log("web server logs are:")
321
 
        self.log('\n'.join(http_logs))
322
 
        self.assertEqual(1, self._count_log_matches('branch-format', http_logs))
323
 
        self.assertEqual(1, self._count_log_matches('branch/format', http_logs))
324
 
        self.assertEqual(1, self._count_log_matches('repository/format',
325
 
            http_logs))
326
 
        self.assertTrue(1 >= self._count_log_matches('revision-history',
327
 
                                                     http_logs))
328
 
        self.assertTrue(1 >= self._count_log_matches('last-revision',
329
 
                                                     http_logs))
330
 
        self.assertEqual(4, len(http_logs))
331
 
 
332
 
 
333
241
class TestKnitToPackFetch(TestCaseWithTransport):
334
242
 
335
 
    def find_get_record_stream(self, calls):
336
 
        """In a list of calls, find 'get_record_stream' calls.
 
243
    def find_get_record_stream(self, calls, expected_count=1):
 
244
        """In a list of calls, find the last 'get_record_stream'.
337
245
 
338
 
        This also ensures that there is only one get_record_stream call.
 
246
        :param expected_count: The number of calls we should exepect to find.
 
247
            If a different number is found, an assertion is raised.
339
248
        """
340
249
        get_record_call = None
 
250
        call_count = 0
341
251
        for call in calls:
342
252
            if call[0] == 'get_record_stream':
343
 
                self.assertIs(None, get_record_call,
344
 
                              "there should only be one call to"
345
 
                              " get_record_stream")
 
253
                call_count += 1
346
254
                get_record_call = call
347
 
        self.assertIsNot(None, get_record_call,
348
 
                         "there should be exactly one call to "
349
 
                         " get_record_stream")
 
255
        self.assertEqual(expected_count, call_count)
350
256
        return get_record_call
351
257
 
352
258
    def test_fetch_with_deltas_no_delta_closure(self):
366
272
        source.inventories = versionedfile.RecordingVersionedFilesDecorator(
367
273
                        source.inventories)
368
274
        # precondition
369
 
        self.assertTrue(target._fetch_uses_deltas)
 
275
        self.assertTrue(target._format._fetch_uses_deltas)
370
276
        target.fetch(source, revision_id='rev-one')
371
277
        self.assertEqual(('get_record_stream', [('file-id', 'rev-one')],
372
 
                          target._fetch_order, False),
 
278
                          target._format._fetch_order, False),
373
279
                         self.find_get_record_stream(source.texts.calls))
374
280
        self.assertEqual(('get_record_stream', [('rev-one',)],
375
 
                          target._fetch_order, False),
376
 
                         self.find_get_record_stream(source.inventories.calls))
377
 
        # Because of bugs in the old fetch code, revisions could accidentally
378
 
        # have deltas present in knits. However, it was never intended, so we
379
 
        # always for include_delta_closure=True, to make sure we get fulltexts.
380
 
        # bug #261339
 
281
          target._format._fetch_order, False),
 
282
          self.find_get_record_stream(source.inventories.calls, 2))
381
283
        self.assertEqual(('get_record_stream', [('rev-one',)],
382
 
                          target._fetch_order, True),
 
284
                          target._format._fetch_order, False),
383
285
                         self.find_get_record_stream(source.revisions.calls))
384
286
        # XXX: Signatures is special, and slightly broken. The
385
287
        # standard item_keys_introduced_by actually does a lookup for every
390
292
        # we care about.
391
293
        signature_calls = source.signatures.calls[-1:]
392
294
        self.assertEqual(('get_record_stream', [('rev-one',)],
393
 
                          target._fetch_order, True),
 
295
                          target._format._fetch_order, False),
394
296
                         self.find_get_record_stream(signature_calls))
395
297
 
396
298
    def test_fetch_no_deltas_with_delta_closure(self):
409
311
                        source.revisions)
410
312
        source.inventories = versionedfile.RecordingVersionedFilesDecorator(
411
313
                        source.inventories)
412
 
        target._fetch_uses_deltas = False
 
314
        # XXX: This won't work in general, but for the dirstate format it does.
 
315
        self.overrideAttr(target._format, '_fetch_uses_deltas', False)
413
316
        target.fetch(source, revision_id='rev-one')
414
317
        self.assertEqual(('get_record_stream', [('file-id', 'rev-one')],
415
 
                          target._fetch_order, True),
 
318
                          target._format._fetch_order, True),
416
319
                         self.find_get_record_stream(source.texts.calls))
417
320
        self.assertEqual(('get_record_stream', [('rev-one',)],
418
 
                          target._fetch_order, True),
419
 
                         self.find_get_record_stream(source.inventories.calls))
 
321
            target._format._fetch_order, True),
 
322
            self.find_get_record_stream(source.inventories.calls, 2))
420
323
        self.assertEqual(('get_record_stream', [('rev-one',)],
421
 
                          target._fetch_order, True),
 
324
                          target._format._fetch_order, True),
422
325
                         self.find_get_record_stream(source.revisions.calls))
423
326
        # XXX: Signatures is special, and slightly broken. The
424
327
        # standard item_keys_introduced_by actually does a lookup for every
429
332
        # we care about.
430
333
        signature_calls = source.signatures.calls[-1:]
431
334
        self.assertEqual(('get_record_stream', [('rev-one',)],
432
 
                          target._fetch_order, True),
 
335
                          target._format._fetch_order, True),
433
336
                         self.find_get_record_stream(signature_calls))
434
337
 
 
338
    def test_fetch_revisions_with_deltas_into_pack(self):
 
339
        # See BUG #261339, dev versions of bzr could accidentally create deltas
 
340
        # in revision texts in knit branches (when fetching from packs). So we
 
341
        # ensure that *if* a knit repository has a delta in revisions, that it
 
342
        # gets properly expanded back into a fulltext when stored in the pack
 
343
        # file.
 
344
        tree = self.make_branch_and_tree('source', format='dirstate')
 
345
        target = self.make_repository('target', format='pack-0.92')
 
346
        self.build_tree(['source/file'])
 
347
        tree.set_root_id('root-id')
 
348
        tree.add('file', 'file-id')
 
349
        tree.commit('one', rev_id='rev-one')
 
350
        # Hack the KVF for revisions so that it "accidentally" allows a delta
 
351
        tree.branch.repository.revisions._max_delta_chain = 200
 
352
        tree.commit('two', rev_id='rev-two')
 
353
        source = tree.branch.repository
 
354
        # Ensure that we stored a delta
 
355
        source.lock_read()
 
356
        self.addCleanup(source.unlock)
 
357
        record = source.revisions.get_record_stream([('rev-two',)],
 
358
            'unordered', False).next()
 
359
        self.assertEqual('knit-delta-gz', record.storage_kind)
 
360
        target.fetch(tree.branch.repository, revision_id='rev-two')
 
361
        # The record should get expanded back to a fulltext
 
362
        target.lock_read()
 
363
        self.addCleanup(target.unlock)
 
364
        record = target.revisions.get_record_stream([('rev-two',)],
 
365
            'unordered', False).next()
 
366
        self.assertEqual('knit-ft-gz', record.storage_kind)
 
367
 
 
368
    def test_fetch_with_fallback_and_merge(self):
 
369
        builder = self.make_branch_builder('source', format='pack-0.92')
 
370
        builder.start_series()
 
371
        # graph
 
372
        #   A
 
373
        #   |\
 
374
        #   B C
 
375
        #   | |
 
376
        #   | D
 
377
        #   | |
 
378
        #   | E
 
379
        #    \|
 
380
        #     F
 
381
        # A & B are present in the base (stacked-on) repository, A-E are
 
382
        # present in the source.
 
383
        # This reproduces bug #304841
 
384
        # We need a large enough inventory that total size of compressed deltas
 
385
        # is shorter than the size of a compressed fulltext. We have to use
 
386
        # random ids because otherwise the inventory fulltext compresses too
 
387
        # well and the deltas get bigger.
 
388
        to_add = [
 
389
            ('add', ('', 'TREE_ROOT', 'directory', None))]
 
390
        for i in xrange(10):
 
391
            fname = 'file%03d' % (i,)
 
392
            fileid = '%s-%s' % (fname, osutils.rand_chars(64))
 
393
            to_add.append(('add', (fname, fileid, 'file', 'content\n')))
 
394
        builder.build_snapshot('A', None, to_add)
 
395
        builder.build_snapshot('B', ['A'], [])
 
396
        builder.build_snapshot('C', ['A'], [])
 
397
        builder.build_snapshot('D', ['C'], [])
 
398
        builder.build_snapshot('E', ['D'], [])
 
399
        builder.build_snapshot('F', ['E', 'B'], [])
 
400
        builder.finish_series()
 
401
        source_branch = builder.get_branch()
 
402
        source_branch.bzrdir.sprout('base', revision_id='B')
 
403
        target_branch = self.make_branch('target', format='1.6')
 
404
        target_branch.set_stacked_on_url('../base')
 
405
        source = source_branch.repository
 
406
        source.lock_read()
 
407
        self.addCleanup(source.unlock)
 
408
        source.inventories = versionedfile.OrderingVersionedFilesDecorator(
 
409
                        source.inventories,
 
410
                        key_priority={('E',): 1, ('D',): 2, ('C',): 4,
 
411
                                      ('F',): 3})
 
412
        # Ensure that the content is yielded in the proper order, and given as
 
413
        # the expected kinds
 
414
        records = [(record.key, record.storage_kind)
 
415
                   for record in source.inventories.get_record_stream(
 
416
                        [('D',), ('C',), ('E',), ('F',)], 'unordered', False)]
 
417
        self.assertEqual([(('E',), 'knit-delta-gz'), (('D',), 'knit-delta-gz'),
 
418
                          (('F',), 'knit-delta-gz'), (('C',), 'knit-delta-gz')],
 
419
                          records)
 
420
 
 
421
        target_branch.lock_write()
 
422
        self.addCleanup(target_branch.unlock)
 
423
        target = target_branch.repository
 
424
        target.fetch(source, revision_id='F')
 
425
        # 'C' should be expanded to a fulltext, but D and E should still be
 
426
        # deltas
 
427
        stream = target.inventories.get_record_stream(
 
428
            [('C',), ('D',), ('E',), ('F',)],
 
429
            'unordered', False)
 
430
        kinds = dict((record.key, record.storage_kind) for record in stream)
 
431
        self.assertEqual({('C',): 'knit-ft-gz', ('D',): 'knit-delta-gz',
 
432
                          ('E',): 'knit-delta-gz', ('F',): 'knit-delta-gz'},
 
433
                         kinds)
 
434
 
435
435
 
436
436
class Test1To2Fetch(TestCaseWithTransport):
437
437
    """Tests for Model1To2 failure modes"""
482
482
        self.repo.fetch(self.tree.branch.repository, 'second-id')
483
483
        root_id = self.tree.get_root_id()
484
484
        self.assertEqual(
485
 
            ((root_id, 'left-parent'), (root_id, 'ghost-parent'),
486
 
             (root_id, 'not-ghost-parent')),
 
485
            ((root_id, 'left-parent'), (root_id, 'not-ghost-parent')),
487
486
            self.get_parents(root_id, 'second-id'))
488
487
 
489
488
    def make_two_commits(self, change_root, fetch_twice):