1
# Copyright (C) 2005 by Canonical Ltd
1
# Copyright (C) 2005, 2007 Canonical Ltd
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
5
5
# the Free Software Foundation; either version 2 of the License, or
6
6
# (at your option) any later version.
8
8
# This program is distributed in the hope that it will be useful,
9
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
11
# GNU General Public License for more details.
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
17
from bzrlib.selftest.testrevision import make_branches
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
30
from bzrlib.branch import Branch
31
from bzrlib.bzrdir import BzrDir
32
from bzrlib.repofmt import knitrepo
33
from bzrlib.tests import TestCaseWithTransport
34
from bzrlib.tests.http_utils import TestCaseWithWebserver
35
from bzrlib.tests.test_revision import make_branches
18
36
from bzrlib.trace import mutter
19
from bzrlib.branch import Branch
23
from bzrlib.selftest import TestCaseInTempDir
26
class TestFetch(TestCaseInTempDir):
28
from bzrlib.fetch import greedy_fetch, has_revision
32
return Branch(name, init=True)
34
#highest indices a: 5, b: 7
35
br_a, br_b = make_branches()
36
assert not has_revision(br_b, br_a.revision_history()[3])
37
assert has_revision(br_b, br_a.revision_history()[2])
38
assert len(br_b.revision_history()) == 7
39
assert greedy_fetch(br_b, br_a, br_a.revision_history()[2])[0] == 0
41
# greedy_fetch is not supposed to alter the revision history
42
assert len(br_b.revision_history()) == 7
43
assert not has_revision(br_b, br_a.revision_history()[3])
45
assert len(br_b.revision_history()) == 7
46
assert greedy_fetch(br_b, br_a, br_a.revision_history()[3])[0] == 1
47
assert has_revision(br_b, br_a.revision_history()[3])
48
assert not has_revision(br_a, br_b.revision_history()[3])
49
assert not has_revision(br_a, br_b.revision_history()[4])
51
# When a non-branch ancestor is missing, it should be a failure, not
53
br_a4 = new_branch('br_a4')
54
count, failures = greedy_fetch(br_a4, br_a)
56
assert failures == set((br_b.revision_history()[4],
57
br_b.revision_history()[5]))
59
assert greedy_fetch(br_a, br_b)[0] == 4
60
assert has_revision(br_a, br_b.revision_history()[3])
61
assert has_revision(br_a, br_b.revision_history()[4])
63
br_b2 = new_branch('br_b2')
64
assert greedy_fetch(br_b2, br_b)[0] == 7
65
assert has_revision(br_b2, br_b.revision_history()[4])
66
assert has_revision(br_b2, br_a.revision_history()[2])
67
assert not has_revision(br_b2, br_a.revision_history()[3])
69
br_a2 = new_branch('br_a2')
70
assert greedy_fetch(br_a2, br_a)[0] == 9
71
assert has_revision(br_a2, br_b.revision_history()[4])
72
assert has_revision(br_a2, br_a.revision_history()[3])
74
br_a3 = new_branch('br_a3')
75
assert greedy_fetch(br_a3, br_a2)[0] == 0
76
for revno in range(4):
77
assert not has_revision(br_a3, br_a.revision_history()[revno])
78
assert greedy_fetch(br_a3, br_a2, br_a.revision_history()[2])[0] == 3
79
fetched = greedy_fetch(br_a3, br_a2, br_a.revision_history()[3])[0]
80
assert fetched == 3, "fetched %d instead of 3" % fetched
81
# InstallFailed should be raised if the branch is missing the revision
83
self.assertRaises(bzrlib.errors.InstallFailed, greedy_fetch, br_a3,
85
# InstallFailed should be raised if the branch is missing a revision
86
# from its own revision history
87
br_a2.append_revision('a-b-c')
88
self.assertRaises(bzrlib.errors.InstallFailed, greedy_fetch, br_a3,
93
if __name__ == '__main__':
95
sys.exit(unittest.run_suite(unittest.makeSuite()))
37
from bzrlib.upgrade import Convert
38
from bzrlib.workingtree import WorkingTree
40
# These tests are a bit old; please instead add new tests into
41
# interrepository_implementations/ so they'll run on all relevant
45
def has_revision(branch, revision_id):
46
return branch.repository.has_revision(revision_id)
48
def fetch_steps(self, br_a, br_b, writable_a):
49
"""A foreign test method for testing fetch locally and remotely."""
51
# TODO RBC 20060201 make this a repository test.
52
repo_b = br_b.repository
53
self.assertFalse(repo_b.has_revision(br_a.revision_history()[3]))
54
self.assertTrue(repo_b.has_revision(br_a.revision_history()[2]))
55
self.assertEquals(len(br_b.revision_history()), 7)
56
br_b.fetch(br_a, br_a.revision_history()[2])
57
# branch.fetch is not supposed to alter the revision history
58
self.assertEquals(len(br_b.revision_history()), 7)
59
self.assertFalse(repo_b.has_revision(br_a.revision_history()[3]))
61
# fetching the next revision up in sample data copies one revision
62
br_b.fetch(br_a, br_a.revision_history()[3])
63
self.assertTrue(repo_b.has_revision(br_a.revision_history()[3]))
64
self.assertFalse(has_revision(br_a, br_b.revision_history()[6]))
65
self.assertTrue(br_a.repository.has_revision(br_b.revision_history()[5]))
67
# When a non-branch ancestor is missing, it should be unlisted...
68
# as its not reference from the inventory weave.
69
br_b4 = self.make_branch('br_4')
72
writable_a.fetch(br_b)
73
self.assertTrue(has_revision(br_a, br_b.revision_history()[3]))
74
self.assertTrue(has_revision(br_a, br_b.revision_history()[4]))
76
br_b2 = self.make_branch('br_b2')
78
self.assertTrue(has_revision(br_b2, br_b.revision_history()[4]))
79
self.assertTrue(has_revision(br_b2, br_a.revision_history()[2]))
80
self.assertFalse(has_revision(br_b2, br_a.revision_history()[3]))
82
br_a2 = self.make_branch('br_a2')
84
self.assertTrue(has_revision(br_a2, br_b.revision_history()[4]))
85
self.assertTrue(has_revision(br_a2, br_a.revision_history()[3]))
86
self.assertTrue(has_revision(br_a2, br_a.revision_history()[2]))
88
br_a3 = self.make_branch('br_a3')
89
# pulling a branch with no revisions grabs nothing, regardless of
90
# whats in the inventory.
92
for revno in range(4):
94
br_a3.repository.has_revision(br_a.revision_history()[revno]))
95
br_a3.fetch(br_a2, br_a.revision_history()[2])
96
# pull the 3 revisions introduced by a@u-0-3
97
br_a3.fetch(br_a2, br_a.revision_history()[3])
98
# NoSuchRevision should be raised if the branch is missing the revision
100
self.assertRaises(errors.NoSuchRevision, br_a3.fetch, br_a2, 'pizza')
102
# TODO: Test trying to fetch from a branch that points to a revision not
103
# actually present in its repository. Not every branch format allows you
104
# to directly point to such revisions, so it's a bit complicated to
105
# construct. One way would be to uncommit and gc the revision, but not
106
# every branch supports that. -- mbp 20070814
108
#TODO: test that fetch correctly does reweaving when needed. RBC 20051008
109
# Note that this means - updating the weave when ghosts are filled in to
110
# add the right parents.
113
class TestFetch(TestCaseWithTransport):
115
def test_fetch(self):
116
#highest indices a: 5, b: 7
117
br_a, br_b = make_branches(self, format='dirstate-tags')
118
fetch_steps(self, br_a, br_b, br_a)
120
def test_fetch_self(self):
121
wt = self.make_branch_and_tree('br')
122
wt.branch.fetch(wt.branch)
124
def test_fetch_root_knit(self):
125
"""Ensure that knit2.fetch() updates the root knit
127
This tests the case where the root has a new revision, but there are no
128
corresponding filename, parent, contents or other changes.
130
knit1_format = bzrdir.BzrDirMetaFormat1()
131
knit1_format.repository_format = knitrepo.RepositoryFormatKnit1()
132
knit2_format = bzrdir.BzrDirMetaFormat1()
133
knit2_format.repository_format = knitrepo.RepositoryFormatKnit3()
134
# we start with a knit1 repository because that causes the
135
# root revision to change for each commit, even though the content,
136
# parent, name, and other attributes are unchanged.
137
tree = self.make_branch_and_tree('tree', knit1_format)
138
tree.set_root_id('tree-root')
139
tree.commit('rev1', rev_id='rev1')
140
tree.commit('rev2', rev_id='rev2')
142
# Now we convert it to a knit2 repository so that it has a root knit
143
Convert(tree.basedir, knit2_format)
144
tree = WorkingTree.open(tree.basedir)
145
branch = self.make_branch('branch', format=knit2_format)
146
branch.pull(tree.branch, stop_revision='rev1')
147
repo = branch.repository
150
# Make sure fetch retrieved only what we requested
151
self.assertEqual({('tree-root', 'rev1'):()},
152
repo.texts.get_parent_map(
153
[('tree-root', 'rev1'), ('tree-root', 'rev2')]))
156
branch.pull(tree.branch)
157
# Make sure that the next revision in the root knit was retrieved,
158
# even though the text, name, parent_id, etc., were unchanged.
161
# Make sure fetch retrieved only what we requested
162
self.assertEqual({('tree-root', 'rev2'):(('tree-root', 'rev1'),)},
163
repo.texts.get_parent_map([('tree-root', 'rev2')]))
167
def test_fetch_incompatible(self):
168
knit_tree = self.make_branch_and_tree('knit', format='knit')
169
knit3_tree = self.make_branch_and_tree('knit3',
170
format='dirstate-with-subtree')
171
knit3_tree.commit('blah')
172
e = self.assertRaises(errors.IncompatibleRepositories,
173
knit_tree.branch.fetch, knit3_tree.branch)
174
self.assertContainsRe(str(e),
175
r"(?m).*/knit.*\nis not compatible with\n.*/knit3/.*\n"
176
r"different rich-root support")
179
class TestMergeFetch(TestCaseWithTransport):
181
def test_merge_fetches_unrelated(self):
182
"""Merge brings across history from unrelated source"""
183
wt1 = self.make_branch_and_tree('br1')
185
wt1.commit(message='rev 1-1', rev_id='1-1')
186
wt1.commit(message='rev 1-2', rev_id='1-2')
187
wt2 = self.make_branch_and_tree('br2')
189
wt2.commit(message='rev 2-1', rev_id='2-1')
190
wt2.merge_from_branch(br1, from_revision='null:')
191
self._check_revs_present(br2)
193
def test_merge_fetches(self):
194
"""Merge brings across history from source"""
195
wt1 = self.make_branch_and_tree('br1')
197
wt1.commit(message='rev 1-1', rev_id='1-1')
198
dir_2 = br1.bzrdir.sprout('br2')
199
br2 = dir_2.open_branch()
200
wt1.commit(message='rev 1-2', rev_id='1-2')
201
wt2 = dir_2.open_workingtree()
202
wt2.commit(message='rev 2-1', rev_id='2-1')
203
wt2.merge_from_branch(br1)
204
self._check_revs_present(br2)
206
def _check_revs_present(self, br2):
207
for rev_id in '1-1', '1-2', '2-1':
208
self.assertTrue(br2.repository.has_revision(rev_id))
209
rev = br2.repository.get_revision(rev_id)
210
self.assertEqual(rev.revision_id, rev_id)
211
self.assertTrue(br2.repository.get_inventory(rev_id))
214
class TestMergeFileHistory(TestCaseWithTransport):
217
super(TestMergeFileHistory, self).setUp()
218
wt1 = self.make_branch_and_tree('br1')
220
self.build_tree_contents([('br1/file', 'original contents\n')])
221
wt1.add('file', 'this-file-id')
222
wt1.commit(message='rev 1-1', rev_id='1-1')
223
dir_2 = br1.bzrdir.sprout('br2')
224
br2 = dir_2.open_branch()
225
wt2 = dir_2.open_workingtree()
226
self.build_tree_contents([('br1/file', 'original from 1\n')])
227
wt1.commit(message='rev 1-2', rev_id='1-2')
228
self.build_tree_contents([('br1/file', 'agreement\n')])
229
wt1.commit(message='rev 1-3', rev_id='1-3')
230
self.build_tree_contents([('br2/file', 'contents in 2\n')])
231
wt2.commit(message='rev 2-1', rev_id='2-1')
232
self.build_tree_contents([('br2/file', 'agreement\n')])
233
wt2.commit(message='rev 2-2', rev_id='2-2')
235
def test_merge_fetches_file_history(self):
236
"""Merge brings across file histories"""
237
br2 = Branch.open('br2')
238
br1 = Branch.open('br1')
239
wt2 = WorkingTree.open('br2').merge_from_branch(br1)
241
self.addCleanup(br2.unlock)
242
for rev_id, text in [('1-2', 'original from 1\n'),
243
('1-3', 'agreement\n'),
244
('2-1', 'contents in 2\n'),
245
('2-2', 'agreement\n')]:
246
self.assertEqualDiff(
247
br2.repository.revision_tree(
248
rev_id).get_file_text('this-file-id'), text)
251
class TestHttpFetch(TestCaseWithWebserver):
252
# FIXME RBC 20060124 this really isn't web specific, perhaps an
253
# instrumented readonly transport? Can we do an instrumented
254
# adapter and use self.get_readonly_url ?
256
def test_fetch(self):
257
#highest indices a: 5, b: 7
258
br_a, br_b = make_branches(self)
259
br_rem_a = Branch.open(self.get_readonly_url('branch1'))
260
fetch_steps(self, br_rem_a, br_b, br_a)
262
def _count_log_matches(self, target, logs):
263
"""Count the number of times the target file pattern was fetched in an http log"""
264
get_succeeds_re = re.compile(
265
'.*"GET .*%s HTTP/1.1" 20[06] - "-" "bzr/%s' %
266
( target, bzrlib.__version__))
269
if get_succeeds_re.match(line):
273
def test_weaves_are_retrieved_once(self):
274
self.build_tree(("source/", "source/file", "target/"))
275
# This test depends on knit dasta storage.
276
wt = self.make_branch_and_tree('source', format='dirstate-tags')
278
wt.add(["file"], ["id"])
279
wt.commit("added file")
280
open("source/file", 'w').write("blah\n")
281
wt.commit("changed file")
282
target = BzrDir.create_branch_and_repo("target/")
283
source = Branch.open(self.get_readonly_url("source/"))
285
# this is the path to the literal file. As format changes
286
# occur it needs to be updated. FIXME: ask the store for the
288
self.log("web server logs are:")
289
http_logs = self.get_readonly_server().logs
290
self.log('\n'.join(http_logs))
291
# unfortunately this log entry is branch format specific. We could
292
# factor out the 'what files does this format use' to a method on the
293
# repository, which would let us to this generically. RBC 20060419
294
# RBC 20080408: Or perhaps we can assert that no files are fully read
296
self.assertEqual(1, self._count_log_matches('/ce/id.kndx', http_logs))
297
self.assertEqual(1, self._count_log_matches('/ce/id.knit', http_logs))
298
self.assertEqual(1, self._count_log_matches('inventory.kndx', http_logs))
299
# this r-h check test will prevent regressions, but it currently already
300
# passes, before the patch to cache-rh is applied :[
301
self.assertTrue(1 >= self._count_log_matches('revision-history',
303
self.assertTrue(1 >= self._count_log_matches('last-revision',
305
# FIXME naughty poking in there.
306
self.get_readonly_server().logs = []
307
# check there is nothing more to fetch. We take care to re-use the
308
# existing transport so that the request logs we're about to examine
309
# aren't cluttered with redundant probes for a smart server.
310
# XXX: Perhaps this further parameterisation: test http with smart
311
# server, and test http without smart server?
312
source = Branch.open(
313
self.get_readonly_url("source/"),
314
possible_transports=[source.bzrdir.root_transport])
316
# should make just two requests
317
http_logs = self.get_readonly_server().logs
318
self.log("web server logs are:")
319
self.log('\n'.join(http_logs))
320
self.assertEqual(1, self._count_log_matches('branch-format', http_logs))
321
self.assertEqual(1, self._count_log_matches('branch/format', http_logs))
322
self.assertEqual(1, self._count_log_matches('repository/format',
324
self.assertEqual(1, self._count_log_matches('revisions.kndx',
326
self.assertTrue(1 >= self._count_log_matches('revision-history',
328
self.assertTrue(1 >= self._count_log_matches('last-revision',
330
self.assertLength(5, http_logs)
333
class TestKnitToPackFetch(TestCaseWithTransport):
335
def find_get_record_stream(self, calls, expected_count=1):
336
"""In a list of calls, find the last 'get_record_stream'.
338
:param expected_count: The number of calls we should exepect to find.
339
If a different number is found, an assertion is raised.
341
get_record_call = None
344
if call[0] == 'get_record_stream':
346
get_record_call = call
347
self.assertEqual(expected_count, call_count)
348
return get_record_call
350
def test_fetch_with_deltas_no_delta_closure(self):
351
tree = self.make_branch_and_tree('source', format='dirstate')
352
target = self.make_repository('target', format='pack-0.92')
353
self.build_tree(['source/file'])
354
tree.set_root_id('root-id')
355
tree.add('file', 'file-id')
356
tree.commit('one', rev_id='rev-one')
357
source = tree.branch.repository
358
source.texts = versionedfile.RecordingVersionedFilesDecorator(
360
source.signatures = versionedfile.RecordingVersionedFilesDecorator(
362
source.revisions = versionedfile.RecordingVersionedFilesDecorator(
364
source.inventories = versionedfile.RecordingVersionedFilesDecorator(
367
self.assertTrue(target._format._fetch_uses_deltas)
368
target.fetch(source, revision_id='rev-one')
369
self.assertEqual(('get_record_stream', [('file-id', 'rev-one')],
370
target._format._fetch_order, False),
371
self.find_get_record_stream(source.texts.calls))
372
self.assertEqual(('get_record_stream', [('rev-one',)],
373
target._format._fetch_order, False),
374
self.find_get_record_stream(source.inventories.calls, 2))
375
self.assertEqual(('get_record_stream', [('rev-one',)],
376
target._format._fetch_order, False),
377
self.find_get_record_stream(source.revisions.calls))
378
# XXX: Signatures is special, and slightly broken. The
379
# standard item_keys_introduced_by actually does a lookup for every
380
# signature to see if it exists, rather than waiting to do them all at
381
# once at the end. The fetch code then does an all-at-once and just
382
# allows for some of them to be missing.
383
# So we know there will be extra calls, but the *last* one is the one
385
signature_calls = source.signatures.calls[-1:]
386
self.assertEqual(('get_record_stream', [('rev-one',)],
387
target._format._fetch_order, False),
388
self.find_get_record_stream(signature_calls))
390
def test_fetch_no_deltas_with_delta_closure(self):
391
tree = self.make_branch_and_tree('source', format='dirstate')
392
target = self.make_repository('target', format='pack-0.92')
393
self.build_tree(['source/file'])
394
tree.set_root_id('root-id')
395
tree.add('file', 'file-id')
396
tree.commit('one', rev_id='rev-one')
397
source = tree.branch.repository
398
source.texts = versionedfile.RecordingVersionedFilesDecorator(
400
source.signatures = versionedfile.RecordingVersionedFilesDecorator(
402
source.revisions = versionedfile.RecordingVersionedFilesDecorator(
404
source.inventories = versionedfile.RecordingVersionedFilesDecorator(
406
# XXX: This won't work in general, but for the dirstate format it does.
407
old_fetch_uses_deltas_setting = target._format._fetch_uses_deltas
409
target._format._fetch_uses_deltas = old_fetch_uses_deltas_setting
410
self.addCleanup(restore)
411
target._format._fetch_uses_deltas = False
412
target.fetch(source, revision_id='rev-one')
413
self.assertEqual(('get_record_stream', [('file-id', 'rev-one')],
414
target._format._fetch_order, True),
415
self.find_get_record_stream(source.texts.calls))
416
self.assertEqual(('get_record_stream', [('rev-one',)],
417
target._format._fetch_order, True),
418
self.find_get_record_stream(source.inventories.calls, 2))
419
self.assertEqual(('get_record_stream', [('rev-one',)],
420
target._format._fetch_order, True),
421
self.find_get_record_stream(source.revisions.calls))
422
# XXX: Signatures is special, and slightly broken. The
423
# standard item_keys_introduced_by actually does a lookup for every
424
# signature to see if it exists, rather than waiting to do them all at
425
# once at the end. The fetch code then does an all-at-once and just
426
# allows for some of them to be missing.
427
# So we know there will be extra calls, but the *last* one is the one
429
signature_calls = source.signatures.calls[-1:]
430
self.assertEqual(('get_record_stream', [('rev-one',)],
431
target._format._fetch_order, True),
432
self.find_get_record_stream(signature_calls))
434
def test_fetch_revisions_with_deltas_into_pack(self):
435
# See BUG #261339, dev versions of bzr could accidentally create deltas
436
# in revision texts in knit branches (when fetching from packs). So we
437
# ensure that *if* a knit repository has a delta in revisions, that it
438
# gets properly expanded back into a fulltext when stored in the pack
440
tree = self.make_branch_and_tree('source', format='dirstate')
441
target = self.make_repository('target', format='pack-0.92')
442
self.build_tree(['source/file'])
443
tree.set_root_id('root-id')
444
tree.add('file', 'file-id')
445
tree.commit('one', rev_id='rev-one')
446
# Hack the KVF for revisions so that it "accidentally" allows a delta
447
tree.branch.repository.revisions._max_delta_chain = 200
448
tree.commit('two', rev_id='rev-two')
449
source = tree.branch.repository
450
# Ensure that we stored a delta
452
self.addCleanup(source.unlock)
453
record = source.revisions.get_record_stream([('rev-two',)],
454
'unordered', False).next()
455
self.assertEqual('knit-delta-gz', record.storage_kind)
456
target.fetch(tree.branch.repository, revision_id='rev-two')
457
# The record should get expanded back to a fulltext
459
self.addCleanup(target.unlock)
460
record = target.revisions.get_record_stream([('rev-two',)],
461
'unordered', False).next()
462
self.assertEqual('knit-ft-gz', record.storage_kind)
464
def test_fetch_with_fallback_and_merge(self):
465
builder = self.make_branch_builder('source', format='pack-0.92')
466
builder.start_series()
477
# A & B are present in the base (stacked-on) repository, A-E are
478
# present in the source.
479
# This reproduces bug #304841
480
# We need a large enough inventory that total size of compressed deltas
481
# is shorter than the size of a compressed fulltext. We have to use
482
# random ids because otherwise the inventory fulltext compresses too
483
# well and the deltas get bigger.
485
('add', ('', 'TREE_ROOT', 'directory', None))]
487
fname = 'file%03d' % (i,)
488
fileid = '%s-%s' % (fname, osutils.rand_chars(64))
489
to_add.append(('add', (fname, fileid, 'file', 'content\n')))
490
builder.build_snapshot('A', None, to_add)
491
builder.build_snapshot('B', ['A'], [])
492
builder.build_snapshot('C', ['A'], [])
493
builder.build_snapshot('D', ['C'], [])
494
builder.build_snapshot('E', ['D'], [])
495
builder.build_snapshot('F', ['E', 'B'], [])
496
builder.finish_series()
497
source_branch = builder.get_branch()
498
source_branch.bzrdir.sprout('base', revision_id='B')
499
target_branch = self.make_branch('target', format='1.6')
500
target_branch.set_stacked_on_url('../base')
501
source = source_branch.repository
503
self.addCleanup(source.unlock)
504
source.inventories = versionedfile.OrderingVersionedFilesDecorator(
506
key_priority={('E',): 1, ('D',): 2, ('C',): 4,
508
# Ensure that the content is yielded in the proper order, and given as
510
records = [(record.key, record.storage_kind)
511
for record in source.inventories.get_record_stream(
512
[('D',), ('C',), ('E',), ('F',)], 'unordered', False)]
513
self.assertEqual([(('E',), 'knit-delta-gz'), (('D',), 'knit-delta-gz'),
514
(('F',), 'knit-delta-gz'), (('C',), 'knit-delta-gz')],
517
target_branch.lock_write()
518
self.addCleanup(target_branch.unlock)
519
target = target_branch.repository
520
target.fetch(source, revision_id='F')
521
# 'C' should be expanded to a fulltext, but D and E should still be
523
stream = target.inventories.get_record_stream(
524
[('C',), ('D',), ('E',), ('F',)],
526
kinds = dict((record.key, record.storage_kind) for record in stream)
527
self.assertEqual({('C',): 'knit-ft-gz', ('D',): 'knit-delta-gz',
528
('E',): 'knit-delta-gz', ('F',): 'knit-delta-gz'},
532
class Test1To2Fetch(TestCaseWithTransport):
533
"""Tests for Model1To2 failure modes"""
535
def make_tree_and_repo(self):
536
self.tree = self.make_branch_and_tree('tree', format='pack-0.92')
537
self.repo = self.make_repository('rich-repo', format='rich-root-pack')
538
self.repo.lock_write()
539
self.addCleanup(self.repo.unlock)
541
def do_fetch_order_test(self, first, second):
542
"""Test that fetch works no matter what the set order of revision is.
544
This test depends on the order of items in a set, which is
545
implementation-dependant, so we test A, B and then B, A.
547
self.make_tree_and_repo()
548
self.tree.commit('Commit 1', rev_id=first)
549
self.tree.commit('Commit 2', rev_id=second)
550
self.repo.fetch(self.tree.branch.repository, second)
552
def test_fetch_order_AB(self):
553
"""See do_fetch_order_test"""
554
self.do_fetch_order_test('A', 'B')
556
def test_fetch_order_BA(self):
557
"""See do_fetch_order_test"""
558
self.do_fetch_order_test('B', 'A')
560
def get_parents(self, file_id, revision_id):
561
self.repo.lock_read()
563
parent_map = self.repo.texts.get_parent_map([(file_id, revision_id)])
564
return parent_map[(file_id, revision_id)]
568
def test_fetch_ghosts(self):
569
self.make_tree_and_repo()
570
self.tree.commit('first commit', rev_id='left-parent')
571
self.tree.add_parent_tree_id('ghost-parent')
572
fork = self.tree.bzrdir.sprout('fork', 'null:').open_workingtree()
573
fork.commit('not a ghost', rev_id='not-ghost-parent')
574
self.tree.branch.repository.fetch(fork.branch.repository,
576
self.tree.add_parent_tree_id('not-ghost-parent')
577
self.tree.commit('second commit', rev_id='second-id')
578
self.repo.fetch(self.tree.branch.repository, 'second-id')
579
root_id = self.tree.get_root_id()
581
((root_id, 'left-parent'), (root_id, 'not-ghost-parent')),
582
self.get_parents(root_id, 'second-id'))
584
def make_two_commits(self, change_root, fetch_twice):
585
self.make_tree_and_repo()
586
self.tree.commit('first commit', rev_id='first-id')
588
self.tree.set_root_id('unique-id')
589
self.tree.commit('second commit', rev_id='second-id')
591
self.repo.fetch(self.tree.branch.repository, 'first-id')
592
self.repo.fetch(self.tree.branch.repository, 'second-id')
594
def test_fetch_changed_root(self):
595
self.make_two_commits(change_root=True, fetch_twice=False)
596
self.assertEqual((), self.get_parents('unique-id', 'second-id'))
598
def test_two_fetch_changed_root(self):
599
self.make_two_commits(change_root=True, fetch_twice=True)
600
self.assertEqual((), self.get_parents('unique-id', 'second-id'))
602
def test_two_fetches(self):
603
self.make_two_commits(change_root=False, fetch_twice=True)
604
self.assertEqual((('TREE_ROOT', 'first-id'),),
605
self.get_parents('TREE_ROOT', 'second-id'))