13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21
17
from bzrlib import (
21
revision as _mod_revision,
26
24
from bzrlib.branch import Branch
27
from bzrlib.bzrdir import BzrDir
28
from bzrlib.builtins import merge
30
25
from bzrlib.repofmt import knitrepo
31
26
from bzrlib.tests import TestCaseWithTransport
32
from bzrlib.tests.HTTPTestUtil import TestCaseWithWebserver
33
27
from bzrlib.tests.test_revision import make_branches
34
from bzrlib.trace import mutter
35
28
from bzrlib.upgrade import Convert
36
29
from bzrlib.workingtree import WorkingTree
31
# These tests are a bit old; please instead add new tests into
32
# per_interrepository/ so they'll run on all relevant
39
36
def has_revision(branch, revision_id):
40
37
return branch.repository.has_revision(revision_id)
40
def revision_history(branch):
43
graph = branch.repository.get_graph()
44
history = list(graph.iter_lefthand_ancestry(branch.last_revision(),
45
[_mod_revision.NULL_REVISION]))
42
52
def fetch_steps(self, br_a, br_b, writable_a):
43
53
"""A foreign test method for testing fetch locally and remotely."""
45
55
# TODO RBC 20060201 make this a repository test.
46
56
repo_b = br_b.repository
47
self.assertFalse(repo_b.has_revision(br_a.revision_history()[3]))
48
self.assertTrue(repo_b.has_revision(br_a.revision_history()[2]))
49
self.assertEquals(len(br_b.revision_history()), 7)
50
self.assertEquals(br_b.fetch(br_a, br_a.revision_history()[2])[0], 0)
57
self.assertFalse(repo_b.has_revision(revision_history(br_a)[3]))
58
self.assertTrue(repo_b.has_revision(revision_history(br_a)[2]))
59
self.assertEquals(len(revision_history(br_b)), 7)
60
br_b.fetch(br_a, revision_history(br_a)[2])
51
61
# branch.fetch is not supposed to alter the revision history
52
self.assertEquals(len(br_b.revision_history()), 7)
53
self.assertFalse(repo_b.has_revision(br_a.revision_history()[3]))
62
self.assertEquals(len(revision_history(br_b)), 7)
63
self.assertFalse(repo_b.has_revision(revision_history(br_a)[3]))
55
65
# fetching the next revision up in sample data copies one revision
56
self.assertEquals(br_b.fetch(br_a, br_a.revision_history()[3])[0], 1)
57
self.assertTrue(repo_b.has_revision(br_a.revision_history()[3]))
58
self.assertFalse(has_revision(br_a, br_b.revision_history()[6]))
59
self.assertTrue(br_a.repository.has_revision(br_b.revision_history()[5]))
66
br_b.fetch(br_a, revision_history(br_a)[3])
67
self.assertTrue(repo_b.has_revision(revision_history(br_a)[3]))
68
self.assertFalse(has_revision(br_a, revision_history(br_b)[6]))
69
self.assertTrue(br_a.repository.has_revision(revision_history(br_b)[5]))
61
71
# When a non-branch ancestor is missing, it should be unlisted...
62
72
# as its not reference from the inventory weave.
63
73
br_b4 = self.make_branch('br_4')
64
count, failures = br_b4.fetch(br_b)
65
self.assertEqual(count, 7)
66
self.assertEqual(failures, [])
68
self.assertEqual(writable_a.fetch(br_b)[0], 1)
69
self.assertTrue(has_revision(br_a, br_b.revision_history()[3]))
70
self.assertTrue(has_revision(br_a, br_b.revision_history()[4]))
76
writable_a.fetch(br_b)
77
self.assertTrue(has_revision(br_a, revision_history(br_b)[3]))
78
self.assertTrue(has_revision(br_a, revision_history(br_b)[4]))
72
80
br_b2 = self.make_branch('br_b2')
73
self.assertEquals(br_b2.fetch(br_b)[0], 7)
74
self.assertTrue(has_revision(br_b2, br_b.revision_history()[4]))
75
self.assertTrue(has_revision(br_b2, br_a.revision_history()[2]))
76
self.assertFalse(has_revision(br_b2, br_a.revision_history()[3]))
82
self.assertTrue(has_revision(br_b2, revision_history(br_b)[4]))
83
self.assertTrue(has_revision(br_b2, revision_history(br_a)[2]))
84
self.assertFalse(has_revision(br_b2, revision_history(br_a)[3]))
78
86
br_a2 = self.make_branch('br_a2')
79
self.assertEquals(br_a2.fetch(br_a)[0], 9)
80
self.assertTrue(has_revision(br_a2, br_b.revision_history()[4]))
81
self.assertTrue(has_revision(br_a2, br_a.revision_history()[3]))
82
self.assertTrue(has_revision(br_a2, br_a.revision_history()[2]))
88
self.assertTrue(has_revision(br_a2, revision_history(br_b)[4]))
89
self.assertTrue(has_revision(br_a2, revision_history(br_a)[3]))
90
self.assertTrue(has_revision(br_a2, revision_history(br_a)[2]))
84
92
br_a3 = self.make_branch('br_a3')
85
# pulling a branch with no revisions grabs nothing, regardless of
93
# pulling a branch with no revisions grabs nothing, regardless of
86
94
# whats in the inventory.
87
self.assertEquals(br_a3.fetch(br_a2)[0], 0)
88
96
for revno in range(4):
90
br_a3.repository.has_revision(br_a.revision_history()[revno]))
91
self.assertEqual(br_a3.fetch(br_a2, br_a.revision_history()[2])[0], 3)
98
br_a3.repository.has_revision(revision_history(br_a)[revno]))
99
br_a3.fetch(br_a2, revision_history(br_a)[2])
92
100
# pull the 3 revisions introduced by a@u-0-3
93
fetched = br_a3.fetch(br_a2, br_a.revision_history()[3])[0]
94
self.assertEquals(fetched, 3, "fetched %d instead of 3" % fetched)
95
# InstallFailed should be raised if the branch is missing the revision
101
br_a3.fetch(br_a2, revision_history(br_a)[3])
102
# NoSuchRevision should be raised if the branch is missing the revision
96
103
# that was requested.
97
self.assertRaises(bzrlib.errors.InstallFailed, br_a3.fetch, br_a2, 'pizza')
98
# InstallFailed should be raised if the branch is missing a revision
99
# from its own revision history
100
br_a2.append_revision('a-b-c')
101
self.assertRaises(bzrlib.errors.InstallFailed, br_a3.fetch, br_a2)
104
self.assertRaises(errors.NoSuchRevision, br_a3.fetch, br_a2, 'pizza')
103
# TODO: ADHB 20070116 Perhaps set_last_revision shouldn't accept
104
# revisions which are not present? In that case, this test
107
# RBC 20060403 the way to do this is to uncommit the revision from
108
# the repository after the commit
106
# TODO: Test trying to fetch from a branch that points to a revision not
107
# actually present in its repository. Not every branch format allows you
108
# to directly point to such revisions, so it's a bit complicated to
109
# construct. One way would be to uncommit and gc the revision, but not
110
# every branch supports that. -- mbp 20070814
110
112
#TODO: test that fetch correctly does reweaving when needed. RBC 20051008
111
# Note that this means - updating the weave when ghosts are filled in to
113
# Note that this means - updating the weave when ghosts are filled in to
112
114
# add the right parents.
239
252
rev_id).get_file_text('this-file-id'), text)
242
class TestHttpFetch(TestCaseWithWebserver):
243
# FIXME RBC 20060124 this really isn't web specific, perhaps an
244
# instrumented readonly transport? Can we do an instrumented
245
# adapter and use self.get_readonly_url ?
247
def test_fetch(self):
248
#highest indices a: 5, b: 7
249
br_a, br_b = make_branches(self)
250
br_rem_a = Branch.open(self.get_readonly_url('branch1'))
251
fetch_steps(self, br_rem_a, br_b, br_a)
253
def _count_log_matches(self, target, logs):
254
"""Count the number of times the target file pattern was fetched in an http log"""
255
get_succeeds_re = re.compile(
256
'.*"GET .*%s HTTP/1.1" 20[06] - "-" "bzr/%s' %
257
( target, bzrlib.__version__))
260
if get_succeeds_re.match(line):
264
def test_weaves_are_retrieved_once(self):
265
self.build_tree(("source/", "source/file", "target/"))
266
wt = self.make_branch_and_tree('source')
268
wt.add(["file"], ["id"])
269
wt.commit("added file")
270
print >>open("source/file", 'w'), "blah"
271
wt.commit("changed file")
272
target = BzrDir.create_branch_and_repo("target/")
273
source = Branch.open(self.get_readonly_url("source/"))
274
self.assertEqual(target.fetch(source), (2, []))
275
# this is the path to the literal file. As format changes
276
# occur it needs to be updated. FIXME: ask the store for the
278
self.log("web server logs are:")
279
http_logs = self.get_readonly_server().logs
280
self.log('\n'.join(http_logs))
281
# unfortunately this log entry is branch format specific. We could
282
# factor out the 'what files does this format use' to a method on the
283
# repository, which would let us to this generically. RBC 20060419
284
self.assertEqual(1, self._count_log_matches('/ce/id.kndx', http_logs))
285
self.assertEqual(1, self._count_log_matches('/ce/id.knit', http_logs))
286
self.assertEqual(1, self._count_log_matches('inventory.kndx', http_logs))
287
# this r-h check test will prevent regressions, but it currently already
288
# passes, before the patch to cache-rh is applied :[
289
self.assertTrue(1 >= self._count_log_matches('revision-history',
291
self.assertTrue(1 >= self._count_log_matches('last-revision',
293
# FIXME naughty poking in there.
294
self.get_readonly_server().logs = []
295
# check there is nothing more to fetch
296
source = Branch.open(self.get_readonly_url("source/"))
297
self.assertEqual(target.fetch(source), (0, []))
298
# should make just two requests
299
http_logs = self.get_readonly_server().logs
300
self.log("web server logs are:")
301
self.log('\n'.join(http_logs))
302
self.assertEqual(1, self._count_log_matches('branch-format', http_logs))
303
self.assertEqual(1, self._count_log_matches('branch/format', http_logs))
304
self.assertEqual(1, self._count_log_matches('repository/format', http_logs))
305
self.assertTrue(1 >= self._count_log_matches('revision-history',
307
self.assertTrue(1 >= self._count_log_matches('last-revision',
309
self.assertEqual(4, len(http_logs))
255
class TestKnitToPackFetch(TestCaseWithTransport):
257
def find_get_record_stream(self, calls, expected_count=1):
258
"""In a list of calls, find the last 'get_record_stream'.
260
:param expected_count: The number of calls we should exepect to find.
261
If a different number is found, an assertion is raised.
263
get_record_call = None
266
if call[0] == 'get_record_stream':
268
get_record_call = call
269
self.assertEqual(expected_count, call_count)
270
return get_record_call
272
def test_fetch_with_deltas_no_delta_closure(self):
273
tree = self.make_branch_and_tree('source', format='dirstate')
274
target = self.make_repository('target', format='pack-0.92')
275
self.build_tree(['source/file'])
276
tree.set_root_id('root-id')
277
tree.add('file', 'file-id')
278
tree.commit('one', rev_id='rev-one')
279
source = tree.branch.repository
280
source.texts = versionedfile.RecordingVersionedFilesDecorator(
282
source.signatures = versionedfile.RecordingVersionedFilesDecorator(
284
source.revisions = versionedfile.RecordingVersionedFilesDecorator(
286
source.inventories = versionedfile.RecordingVersionedFilesDecorator(
289
self.assertTrue(target._format._fetch_uses_deltas)
290
target.fetch(source, revision_id='rev-one')
291
self.assertEqual(('get_record_stream', [('file-id', 'rev-one')],
292
target._format._fetch_order, False),
293
self.find_get_record_stream(source.texts.calls))
294
self.assertEqual(('get_record_stream', [('rev-one',)],
295
target._format._fetch_order, False),
296
self.find_get_record_stream(source.inventories.calls, 2))
297
self.assertEqual(('get_record_stream', [('rev-one',)],
298
target._format._fetch_order, False),
299
self.find_get_record_stream(source.revisions.calls))
300
# XXX: Signatures is special, and slightly broken. The
301
# standard item_keys_introduced_by actually does a lookup for every
302
# signature to see if it exists, rather than waiting to do them all at
303
# once at the end. The fetch code then does an all-at-once and just
304
# allows for some of them to be missing.
305
# So we know there will be extra calls, but the *last* one is the one
307
signature_calls = source.signatures.calls[-1:]
308
self.assertEqual(('get_record_stream', [('rev-one',)],
309
target._format._fetch_order, False),
310
self.find_get_record_stream(signature_calls))
312
def test_fetch_no_deltas_with_delta_closure(self):
313
tree = self.make_branch_and_tree('source', format='dirstate')
314
target = self.make_repository('target', format='pack-0.92')
315
self.build_tree(['source/file'])
316
tree.set_root_id('root-id')
317
tree.add('file', 'file-id')
318
tree.commit('one', rev_id='rev-one')
319
source = tree.branch.repository
320
source.texts = versionedfile.RecordingVersionedFilesDecorator(
322
source.signatures = versionedfile.RecordingVersionedFilesDecorator(
324
source.revisions = versionedfile.RecordingVersionedFilesDecorator(
326
source.inventories = versionedfile.RecordingVersionedFilesDecorator(
328
# XXX: This won't work in general, but for the dirstate format it does.
329
self.overrideAttr(target._format, '_fetch_uses_deltas', False)
330
target.fetch(source, revision_id='rev-one')
331
self.assertEqual(('get_record_stream', [('file-id', 'rev-one')],
332
target._format._fetch_order, True),
333
self.find_get_record_stream(source.texts.calls))
334
self.assertEqual(('get_record_stream', [('rev-one',)],
335
target._format._fetch_order, True),
336
self.find_get_record_stream(source.inventories.calls, 2))
337
self.assertEqual(('get_record_stream', [('rev-one',)],
338
target._format._fetch_order, True),
339
self.find_get_record_stream(source.revisions.calls))
340
# XXX: Signatures is special, and slightly broken. The
341
# standard item_keys_introduced_by actually does a lookup for every
342
# signature to see if it exists, rather than waiting to do them all at
343
# once at the end. The fetch code then does an all-at-once and just
344
# allows for some of them to be missing.
345
# So we know there will be extra calls, but the *last* one is the one
347
signature_calls = source.signatures.calls[-1:]
348
self.assertEqual(('get_record_stream', [('rev-one',)],
349
target._format._fetch_order, True),
350
self.find_get_record_stream(signature_calls))
352
def test_fetch_revisions_with_deltas_into_pack(self):
353
# See BUG #261339, dev versions of bzr could accidentally create deltas
354
# in revision texts in knit branches (when fetching from packs). So we
355
# ensure that *if* a knit repository has a delta in revisions, that it
356
# gets properly expanded back into a fulltext when stored in the pack
358
tree = self.make_branch_and_tree('source', format='dirstate')
359
target = self.make_repository('target', format='pack-0.92')
360
self.build_tree(['source/file'])
361
tree.set_root_id('root-id')
362
tree.add('file', 'file-id')
363
tree.commit('one', rev_id='rev-one')
364
# Hack the KVF for revisions so that it "accidentally" allows a delta
365
tree.branch.repository.revisions._max_delta_chain = 200
366
tree.commit('two', rev_id='rev-two')
367
source = tree.branch.repository
368
# Ensure that we stored a delta
370
self.addCleanup(source.unlock)
371
record = source.revisions.get_record_stream([('rev-two',)],
372
'unordered', False).next()
373
self.assertEqual('knit-delta-gz', record.storage_kind)
374
target.fetch(tree.branch.repository, revision_id='rev-two')
375
# The record should get expanded back to a fulltext
377
self.addCleanup(target.unlock)
378
record = target.revisions.get_record_stream([('rev-two',)],
379
'unordered', False).next()
380
self.assertEqual('knit-ft-gz', record.storage_kind)
382
def test_fetch_with_fallback_and_merge(self):
383
builder = self.make_branch_builder('source', format='pack-0.92')
384
builder.start_series()
395
# A & B are present in the base (stacked-on) repository, A-E are
396
# present in the source.
397
# This reproduces bug #304841
398
# We need a large enough inventory that total size of compressed deltas
399
# is shorter than the size of a compressed fulltext. We have to use
400
# random ids because otherwise the inventory fulltext compresses too
401
# well and the deltas get bigger.
403
('add', ('', 'TREE_ROOT', 'directory', None))]
405
fname = 'file%03d' % (i,)
406
fileid = '%s-%s' % (fname, osutils.rand_chars(64))
407
to_add.append(('add', (fname, fileid, 'file', 'content\n')))
408
builder.build_snapshot('A', None, to_add)
409
builder.build_snapshot('B', ['A'], [])
410
builder.build_snapshot('C', ['A'], [])
411
builder.build_snapshot('D', ['C'], [])
412
builder.build_snapshot('E', ['D'], [])
413
builder.build_snapshot('F', ['E', 'B'], [])
414
builder.finish_series()
415
source_branch = builder.get_branch()
416
source_branch.bzrdir.sprout('base', revision_id='B')
417
target_branch = self.make_branch('target', format='1.6')
418
target_branch.set_stacked_on_url('../base')
419
source = source_branch.repository
421
self.addCleanup(source.unlock)
422
source.inventories = versionedfile.OrderingVersionedFilesDecorator(
424
key_priority={('E',): 1, ('D',): 2, ('C',): 4,
426
# Ensure that the content is yielded in the proper order, and given as
428
records = [(record.key, record.storage_kind)
429
for record in source.inventories.get_record_stream(
430
[('D',), ('C',), ('E',), ('F',)], 'unordered', False)]
431
self.assertEqual([(('E',), 'knit-delta-gz'), (('D',), 'knit-delta-gz'),
432
(('F',), 'knit-delta-gz'), (('C',), 'knit-delta-gz')],
435
target_branch.lock_write()
436
self.addCleanup(target_branch.unlock)
437
target = target_branch.repository
438
target.fetch(source, revision_id='F')
439
# 'C' should be expanded to a fulltext, but D and E should still be
441
stream = target.inventories.get_record_stream(
442
[('C',), ('D',), ('E',), ('F',)],
444
kinds = dict((record.key, record.storage_kind) for record in stream)
445
self.assertEqual({('C',): 'knit-ft-gz', ('D',): 'knit-delta-gz',
446
('E',): 'knit-delta-gz', ('F',): 'knit-delta-gz'},
450
class Test1To2Fetch(TestCaseWithTransport):
451
"""Tests for Model1To2 failure modes"""
453
def make_tree_and_repo(self):
454
self.tree = self.make_branch_and_tree('tree', format='pack-0.92')
455
self.repo = self.make_repository('rich-repo', format='rich-root-pack')
456
self.repo.lock_write()
457
self.addCleanup(self.repo.unlock)
459
def do_fetch_order_test(self, first, second):
460
"""Test that fetch works no matter what the set order of revision is.
462
This test depends on the order of items in a set, which is
463
implementation-dependant, so we test A, B and then B, A.
465
self.make_tree_and_repo()
466
self.tree.commit('Commit 1', rev_id=first)
467
self.tree.commit('Commit 2', rev_id=second)
468
self.repo.fetch(self.tree.branch.repository, second)
470
def test_fetch_order_AB(self):
471
"""See do_fetch_order_test"""
472
self.do_fetch_order_test('A', 'B')
474
def test_fetch_order_BA(self):
475
"""See do_fetch_order_test"""
476
self.do_fetch_order_test('B', 'A')
478
def get_parents(self, file_id, revision_id):
479
self.repo.lock_read()
481
parent_map = self.repo.texts.get_parent_map([(file_id, revision_id)])
482
return parent_map[(file_id, revision_id)]
486
def test_fetch_ghosts(self):
487
self.make_tree_and_repo()
488
self.tree.commit('first commit', rev_id='left-parent')
489
self.tree.add_parent_tree_id('ghost-parent')
490
fork = self.tree.bzrdir.sprout('fork', 'null:').open_workingtree()
491
fork.commit('not a ghost', rev_id='not-ghost-parent')
492
self.tree.branch.repository.fetch(fork.branch.repository,
494
self.tree.add_parent_tree_id('not-ghost-parent')
495
self.tree.commit('second commit', rev_id='second-id')
496
self.repo.fetch(self.tree.branch.repository, 'second-id')
497
root_id = self.tree.get_root_id()
499
((root_id, 'left-parent'), (root_id, 'not-ghost-parent')),
500
self.get_parents(root_id, 'second-id'))
502
def make_two_commits(self, change_root, fetch_twice):
503
self.make_tree_and_repo()
504
self.tree.commit('first commit', rev_id='first-id')
506
self.tree.set_root_id('unique-id')
507
self.tree.commit('second commit', rev_id='second-id')
509
self.repo.fetch(self.tree.branch.repository, 'first-id')
510
self.repo.fetch(self.tree.branch.repository, 'second-id')
512
def test_fetch_changed_root(self):
513
self.make_two_commits(change_root=True, fetch_twice=False)
514
self.assertEqual((), self.get_parents('unique-id', 'second-id'))
516
def test_two_fetch_changed_root(self):
517
self.make_two_commits(change_root=True, fetch_twice=True)
518
self.assertEqual((), self.get_parents('unique-id', 'second-id'))
520
def test_two_fetches(self):
521
self.make_two_commits(change_root=False, fetch_twice=True)
522
self.assertEqual((('TREE_ROOT', 'first-id'),),
523
self.get_parents('TREE_ROOT', 'second-id'))