1
# Copyright (C) 2005 Canonical Ltd
4
# Johan Rydberg <jrydberg@gnu.org>
6
# This program is free software; you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation; either version 2 of the License, or
9
# (at your option) any later version.
11
# This program is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14
# GNU General Public License for more details.
16
# You should have received a copy of the GNU General Public License
17
# along with this program; if not, write to the Free Software
18
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
# considered typical and check that it can be detected/corrected.
24
from StringIO import StringIO
32
from bzrlib.errors import (
34
RevisionAlreadyPresent,
37
from bzrlib.knit import (
42
from bzrlib.symbol_versioning import one_four
43
from bzrlib.tests import TestCaseWithMemoryTransport, TestSkipped
44
from bzrlib.tests.http_utils import TestCaseWithWebserver
45
from bzrlib.trace import mutter
46
from bzrlib.transport import get_transport
47
from bzrlib.transport.memory import MemoryTransport
48
from bzrlib.tsort import topo_sort
49
import bzrlib.versionedfile as versionedfile
50
from bzrlib.weave import WeaveFile
51
from bzrlib.weavefile import read_weave, write_weave
54
class VersionedFileTestMixIn(object):
55
"""A mixin test class for testing VersionedFiles.
57
This is not an adaptor-style test at this point because
58
theres no dynamic substitution of versioned file implementations,
59
they are strictly controlled by their owning repositories.
64
f.add_lines('r0', [], ['a\n', 'b\n'])
65
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
67
versions = f.versions()
68
self.assertTrue('r0' in versions)
69
self.assertTrue('r1' in versions)
70
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
71
self.assertEquals(f.get_text('r0'), 'a\nb\n')
72
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
73
self.assertEqual(2, len(f))
74
self.assertEqual(2, f.num_versions())
76
self.assertRaises(RevisionNotPresent,
77
f.add_lines, 'r2', ['foo'], [])
78
self.assertRaises(RevisionAlreadyPresent,
79
f.add_lines, 'r1', [], [])
81
# this checks that reopen with create=True does not break anything.
82
f = self.reopen_file(create=True)
85
def test_adds_with_parent_texts(self):
88
_, _, parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])
90
_, _, parent_texts['r1'] = f.add_lines_with_ghosts('r1',
91
['r0', 'ghost'], ['b\n', 'c\n'], parent_texts=parent_texts)
92
except NotImplementedError:
93
# if the format doesn't support ghosts, just add normally.
94
_, _, parent_texts['r1'] = f.add_lines('r1',
95
['r0'], ['b\n', 'c\n'], parent_texts=parent_texts)
96
f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)
97
self.assertNotEqual(None, parent_texts['r0'])
98
self.assertNotEqual(None, parent_texts['r1'])
100
versions = f.versions()
101
self.assertTrue('r0' in versions)
102
self.assertTrue('r1' in versions)
103
self.assertTrue('r2' in versions)
104
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
105
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
106
self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])
107
self.assertEqual(3, f.num_versions())
108
origins = f.annotate('r1')
109
self.assertEquals(origins[0][0], 'r0')
110
self.assertEquals(origins[1][0], 'r1')
111
origins = f.annotate('r2')
112
self.assertEquals(origins[0][0], 'r1')
113
self.assertEquals(origins[1][0], 'r2')
116
f = self.reopen_file()
119
def test_add_unicode_content(self):
120
# unicode content is not permitted in versioned files.
121
# versioned files version sequences of bytes only.
123
self.assertRaises(errors.BzrBadParameterUnicode,
124
vf.add_lines, 'a', [], ['a\n', u'b\n', 'c\n'])
126
(errors.BzrBadParameterUnicode, NotImplementedError),
127
vf.add_lines_with_ghosts, 'a', [], ['a\n', u'b\n', 'c\n'])
129
def test_add_follows_left_matching_blocks(self):
130
"""If we change left_matching_blocks, delta changes
132
Note: There are multiple correct deltas in this case, because
133
we start with 1 "a" and we get 3.
136
if isinstance(vf, WeaveFile):
137
raise TestSkipped("WeaveFile ignores left_matching_blocks")
138
vf.add_lines('1', [], ['a\n'])
139
vf.add_lines('2', ['1'], ['a\n', 'a\n', 'a\n'],
140
left_matching_blocks=[(0, 0, 1), (1, 3, 0)])
141
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('2'))
142
vf.add_lines('3', ['1'], ['a\n', 'a\n', 'a\n'],
143
left_matching_blocks=[(0, 2, 1), (1, 3, 0)])
144
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('3'))
146
def test_inline_newline_throws(self):
147
# \r characters are not permitted in lines being added
149
self.assertRaises(errors.BzrBadParameterContainsNewline,
150
vf.add_lines, 'a', [], ['a\n\n'])
152
(errors.BzrBadParameterContainsNewline, NotImplementedError),
153
vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])
154
# but inline CR's are allowed
155
vf.add_lines('a', [], ['a\r\n'])
157
vf.add_lines_with_ghosts('b', [], ['a\r\n'])
158
except NotImplementedError:
161
def test_add_reserved(self):
163
self.assertRaises(errors.ReservedId,
164
vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
166
def test_add_lines_nostoresha(self):
167
"""When nostore_sha is supplied using old content raises."""
169
empty_text = ('a', [])
170
sample_text_nl = ('b', ["foo\n", "bar\n"])
171
sample_text_no_nl = ('c', ["foo\n", "bar"])
173
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
174
sha, _, _ = vf.add_lines(version, [], lines)
176
# we now have a copy of all the lines in the vf.
177
for sha, (version, lines) in zip(
178
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
179
self.assertRaises(errors.ExistingContent,
180
vf.add_lines, version + "2", [], lines,
182
# and no new version should have been added.
183
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
186
def test_add_lines_with_ghosts_nostoresha(self):
187
"""When nostore_sha is supplied using old content raises."""
189
empty_text = ('a', [])
190
sample_text_nl = ('b', ["foo\n", "bar\n"])
191
sample_text_no_nl = ('c', ["foo\n", "bar"])
193
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
194
sha, _, _ = vf.add_lines(version, [], lines)
196
# we now have a copy of all the lines in the vf.
197
# is the test applicable to this vf implementation?
199
vf.add_lines_with_ghosts('d', [], [])
200
except NotImplementedError:
201
raise TestSkipped("add_lines_with_ghosts is optional")
202
for sha, (version, lines) in zip(
203
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
204
self.assertRaises(errors.ExistingContent,
205
vf.add_lines_with_ghosts, version + "2", [], lines,
207
# and no new version should have been added.
208
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
211
def test_add_lines_return_value(self):
212
# add_lines should return the sha1 and the text size.
214
empty_text = ('a', [])
215
sample_text_nl = ('b', ["foo\n", "bar\n"])
216
sample_text_no_nl = ('c', ["foo\n", "bar"])
217
# check results for the three cases:
218
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
219
# the first two elements are the same for all versioned files:
220
# - the digest and the size of the text. For some versioned files
221
# additional data is returned in additional tuple elements.
222
result = vf.add_lines(version, [], lines)
223
self.assertEqual(3, len(result))
224
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
226
# parents should not affect the result:
227
lines = sample_text_nl[1]
228
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
229
vf.add_lines('d', ['b', 'c'], lines)[0:2])
231
def test_get_reserved(self):
233
self.assertRaises(errors.ReservedId, vf.get_texts, ['b:'])
234
self.assertRaises(errors.ReservedId, vf.get_lines, 'b:')
235
self.assertRaises(errors.ReservedId, vf.get_text, 'b:')
237
def test_make_mpdiffs(self):
238
from bzrlib import multiparent
239
vf = self.get_file('foo')
240
sha1s = self._setup_for_deltas(vf)
241
new_vf = self.get_file('bar')
242
for version in multiparent.topo_iter(vf):
243
mpdiff = vf.make_mpdiffs([version])[0]
244
new_vf.add_mpdiffs([(version, vf.get_parent_map([version])[version],
245
vf.get_sha1(version), mpdiff)])
246
self.assertEqualDiff(vf.get_text(version),
247
new_vf.get_text(version))
249
def _setup_for_deltas(self, f):
250
self.assertFalse(f.has_version('base'))
251
# add texts that should trip the knit maximum delta chain threshold
252
# as well as doing parallel chains of data in knits.
253
# this is done by two chains of 25 insertions
254
f.add_lines('base', [], ['line\n'])
255
f.add_lines('noeol', ['base'], ['line'])
256
# detailed eol tests:
257
# shared last line with parent no-eol
258
f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])
259
# differing last line with parent, both no-eol
260
f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])
261
# add eol following a noneol parent, change content
262
f.add_lines('eol', ['noeol'], ['phone\n'])
263
# add eol following a noneol parent, no change content
264
f.add_lines('eolline', ['noeol'], ['line\n'])
265
# noeol with no parents:
266
f.add_lines('noeolbase', [], ['line'])
267
# noeol preceeding its leftmost parent in the output:
268
# this is done by making it a merge of two parents with no common
269
# anestry: noeolbase and noeol with the
270
# later-inserted parent the leftmost.
271
f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])
272
# two identical eol texts
273
f.add_lines('noeoldup', ['noeol'], ['line'])
275
text_name = 'chain1-'
277
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
278
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
279
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
280
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
281
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
282
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
283
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
284
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
285
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
286
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
287
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
288
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
289
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
290
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
291
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
292
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
293
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
294
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
295
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
296
19:'1ebed371807ba5935958ad0884595126e8c4e823',
297
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
298
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
299
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
300
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
301
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
302
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
304
for depth in range(26):
305
new_version = text_name + '%s' % depth
306
text = text + ['line\n']
307
f.add_lines(new_version, [next_parent], text)
308
next_parent = new_version
310
text_name = 'chain2-'
312
for depth in range(26):
313
new_version = text_name + '%s' % depth
314
text = text + ['line\n']
315
f.add_lines(new_version, [next_parent], text)
316
next_parent = new_version
319
def test_ancestry(self):
321
self.assertEqual([], f.get_ancestry([]))
322
f.add_lines('r0', [], ['a\n', 'b\n'])
323
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
324
f.add_lines('r2', ['r0'], ['b\n', 'c\n'])
325
f.add_lines('r3', ['r2'], ['b\n', 'c\n'])
326
f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])
327
self.assertEqual([], f.get_ancestry([]))
328
versions = f.get_ancestry(['rM'])
329
# there are some possibilities:
333
# so we check indexes
334
r0 = versions.index('r0')
335
r1 = versions.index('r1')
336
r2 = versions.index('r2')
337
self.assertFalse('r3' in versions)
338
rM = versions.index('rM')
339
self.assertTrue(r0 < r1)
340
self.assertTrue(r0 < r2)
341
self.assertTrue(r1 < rM)
342
self.assertTrue(r2 < rM)
344
self.assertRaises(RevisionNotPresent,
345
f.get_ancestry, ['rM', 'rX'])
347
self.assertEqual(set(f.get_ancestry('rM')),
348
set(f.get_ancestry('rM', topo_sorted=False)))
350
def test_mutate_after_finish(self):
352
f.transaction_finished()
353
self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])
354
self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])
355
self.assertRaises(errors.OutSideTransaction, f.join, '')
356
self.assertRaises(errors.OutSideTransaction, f.clone_text, 'base', 'bar', ['foo'])
358
def test_clear_cache(self):
360
# on a new file it should not error
362
# and after adding content, doing a clear_cache and a get should work.
363
f.add_lines('0', [], ['a'])
365
self.assertEqual(['a'], f.get_lines('0'))
367
def test_clone_text(self):
369
f.add_lines('r0', [], ['a\n', 'b\n'])
370
f.clone_text('r1', 'r0', ['r0'])
372
self.assertEquals(f.get_lines('r1'), f.get_lines('r0'))
373
self.assertEquals(f.get_lines('r1'), ['a\n', 'b\n'])
374
self.assertEqual({'r1':('r0',)}, f.get_parent_map(['r1']))
375
self.assertRaises(RevisionNotPresent,
376
f.clone_text, 'r2', 'rX', [])
377
self.assertRaises(RevisionAlreadyPresent,
378
f.clone_text, 'r1', 'r0', [])
380
verify_file(self.reopen_file())
382
def test_copy_to(self):
384
f.add_lines('0', [], ['a\n'])
385
t = MemoryTransport()
387
for suffix in f.__class__.get_suffixes():
388
self.assertTrue(t.has('foo' + suffix))
390
def test_get_suffixes(self):
393
self.assertEqual(f.__class__.get_suffixes(), f.__class__.get_suffixes())
394
# and should be a list
395
self.assertTrue(isinstance(f.__class__.get_suffixes(), list))
397
def build_graph(self, file, graph):
398
for node in topo_sort(graph.items()):
399
file.add_lines(node, graph[node], [])
401
def test_get_graph(self):
407
self.build_graph(f, graph)
408
self.assertEqual(graph, f.get_graph())
410
def test_get_graph_partial(self):
418
complex_graph.update(simple_a)
423
complex_graph.update(simple_b)
430
complex_graph.update(simple_gam)
432
simple_b_gam.update(simple_gam)
433
simple_b_gam.update(simple_b)
434
self.build_graph(f, complex_graph)
435
self.assertEqual(simple_a, f.get_graph(['a']))
436
self.assertEqual(simple_b, f.get_graph(['b']))
437
self.assertEqual(simple_gam, f.get_graph(['gam']))
438
self.assertEqual(simple_b_gam, f.get_graph(['b', 'gam']))
440
def test_get_parents(self):
442
f.add_lines('r0', [], ['a\n', 'b\n'])
443
f.add_lines('r1', [], ['a\n', 'b\n'])
444
f.add_lines('r2', [], ['a\n', 'b\n'])
445
f.add_lines('r3', [], ['a\n', 'b\n'])
446
f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
447
self.assertEqual(['r0', 'r1', 'r2', 'r3'],
448
self.applyDeprecated(one_four, f.get_parents, 'm'))
449
self.assertRaises(RevisionNotPresent,
450
self.applyDeprecated, one_four, f.get_parents, 'y')
452
def test_get_parent_map(self):
454
f.add_lines('r0', [], ['a\n', 'b\n'])
456
{'r0':()}, f.get_parent_map(['r0']))
457
f.add_lines('r1', ['r0'], ['a\n', 'b\n'])
459
{'r1':('r0',)}, f.get_parent_map(['r1']))
463
f.get_parent_map(['r0', 'r1']))
464
f.add_lines('r2', [], ['a\n', 'b\n'])
465
f.add_lines('r3', [], ['a\n', 'b\n'])
466
f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
468
{'m':('r0', 'r1', 'r2', 'r3')}, f.get_parent_map(['m']))
469
self.assertEqual({}, f.get_parent_map('y'))
473
f.get_parent_map(['r0', 'y', 'r1']))
475
def test_annotate(self):
477
f.add_lines('r0', [], ['a\n', 'b\n'])
478
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
479
origins = f.annotate('r1')
480
self.assertEquals(origins[0][0], 'r1')
481
self.assertEquals(origins[1][0], 'r0')
483
self.assertRaises(RevisionNotPresent,
486
def test_detection(self):
487
# Test weaves detect corruption.
489
# Weaves contain a checksum of their texts.
490
# When a text is extracted, this checksum should be
493
w = self.get_file_corrupted_text()
495
self.assertEqual('hello\n', w.get_text('v1'))
496
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
497
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
498
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
500
w = self.get_file_corrupted_checksum()
502
self.assertEqual('hello\n', w.get_text('v1'))
503
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
504
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
505
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
507
def get_file_corrupted_text(self):
508
"""Return a versioned file with corrupt text but valid metadata."""
509
raise NotImplementedError(self.get_file_corrupted_text)
511
def reopen_file(self, name='foo'):
512
"""Open the versioned file from disk again."""
513
raise NotImplementedError(self.reopen_file)
515
def test_iter_parents(self):
516
"""iter_parents returns the parents for many nodes."""
520
f.add_lines('r0', [], ['a\n', 'b\n'])
522
f.add_lines('r1', ['r0'], ['a\n', 'b\n'])
524
f.add_lines('r2', ['r1', 'r0'], ['a\n', 'b\n'])
526
# cases: each sample data individually:
527
self.assertEqual(set([('r0', ())]),
528
set(f.iter_parents(['r0'])))
529
self.assertEqual(set([('r1', ('r0', ))]),
530
set(f.iter_parents(['r1'])))
531
self.assertEqual(set([('r2', ('r1', 'r0'))]),
532
set(f.iter_parents(['r2'])))
533
# no nodes returned for a missing node
534
self.assertEqual(set(),
535
set(f.iter_parents(['missing'])))
536
# 1 node returned with missing nodes skipped
537
self.assertEqual(set([('r1', ('r0', ))]),
538
set(f.iter_parents(['ghost1', 'r1', 'ghost'])))
540
self.assertEqual(set([('r0', ()), ('r1', ('r0', ))]),
541
set(f.iter_parents(['r0', 'r1'])))
542
# 2 nodes returned, missing skipped
543
self.assertEqual(set([('r0', ()), ('r1', ('r0', ))]),
544
set(f.iter_parents(['a', 'r0', 'b', 'r1', 'c'])))
546
def test_iter_lines_added_or_present_in_versions(self):
547
# test that we get at least an equalset of the lines added by
548
# versions in the weave
549
# the ordering here is to make a tree so that dumb searches have
550
# more changes to muck up.
552
class InstrumentedProgress(progress.DummyProgress):
556
progress.DummyProgress.__init__(self)
559
def update(self, msg=None, current=None, total=None):
560
self.updates.append((msg, current, total))
563
# add a base to get included
564
vf.add_lines('base', [], ['base\n'])
565
# add a ancestor to be included on one side
566
vf.add_lines('lancestor', [], ['lancestor\n'])
567
# add a ancestor to be included on the other side
568
vf.add_lines('rancestor', ['base'], ['rancestor\n'])
569
# add a child of rancestor with no eofile-nl
570
vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])
571
# add a child of lancestor and base to join the two roots
572
vf.add_lines('otherchild',
573
['lancestor', 'base'],
574
['base\n', 'lancestor\n', 'otherchild\n'])
575
def iter_with_versions(versions, expected):
576
# now we need to see what lines are returned, and how often.
578
progress = InstrumentedProgress()
579
# iterate over the lines
580
for line in vf.iter_lines_added_or_present_in_versions(versions,
582
lines.setdefault(line, 0)
584
if []!= progress.updates:
585
self.assertEqual(expected, progress.updates)
587
lines = iter_with_versions(['child', 'otherchild'],
588
[('Walking content.', 0, 2),
589
('Walking content.', 1, 2),
590
('Walking content.', 2, 2)])
591
# we must see child and otherchild
592
self.assertTrue(lines[('child\n', 'child')] > 0)
593
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
594
# we dont care if we got more than that.
597
lines = iter_with_versions(None, [('Walking content.', 0, 5),
598
('Walking content.', 1, 5),
599
('Walking content.', 2, 5),
600
('Walking content.', 3, 5),
601
('Walking content.', 4, 5),
602
('Walking content.', 5, 5)])
603
# all lines must be seen at least once
604
self.assertTrue(lines[('base\n', 'base')] > 0)
605
self.assertTrue(lines[('lancestor\n', 'lancestor')] > 0)
606
self.assertTrue(lines[('rancestor\n', 'rancestor')] > 0)
607
self.assertTrue(lines[('child\n', 'child')] > 0)
608
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
610
def test_add_lines_with_ghosts(self):
611
# some versioned file formats allow lines to be added with parent
612
# information that is > than that in the format. Formats that do
613
# not support this need to raise NotImplementedError on the
614
# add_lines_with_ghosts api.
616
# add a revision with ghost parents
617
# The preferred form is utf8, but we should translate when needed
618
parent_id_unicode = u'b\xbfse'
619
parent_id_utf8 = parent_id_unicode.encode('utf8')
621
vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])
622
except NotImplementedError:
623
# check the other ghost apis are also not implemented
624
self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])
625
self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')
627
vf = self.reopen_file()
628
# test key graph related apis: getncestry, _graph, get_parents
630
# - these are ghost unaware and must not be reflect ghosts
631
self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))
633
self.applyDeprecated(one_four, vf.get_parents, 'notbxbfse'))
634
self.assertEqual({'notbxbfse':()}, vf.get_graph())
635
self.assertFalse(vf.has_version(parent_id_utf8))
636
# we have _with_ghost apis to give us ghost information.
637
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
638
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
639
self.assertEqual({'notbxbfse':(parent_id_utf8,)},
640
self.applyDeprecated(one_four, vf.get_graph_with_ghosts))
641
self.assertTrue(self.applyDeprecated(one_four, vf.has_ghost,
643
# if we add something that is a ghost of another, it should correct the
644
# results of the prior apis
645
vf.add_lines(parent_id_utf8, [], [])
646
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))
647
self.assertEqual({'notbxbfse':(parent_id_utf8,)},
648
vf.get_parent_map(['notbxbfse']))
649
self.assertEqual({parent_id_utf8:(),
650
'notbxbfse':(parent_id_utf8, ),
653
self.assertTrue(vf.has_version(parent_id_utf8))
654
# we have _with_ghost apis to give us ghost information.
655
self.assertEqual([parent_id_utf8, 'notbxbfse'],
656
vf.get_ancestry_with_ghosts(['notbxbfse']))
657
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
658
self.assertEqual({parent_id_utf8:(),
659
'notbxbfse':(parent_id_utf8,),
661
self.applyDeprecated(one_four, vf.get_graph_with_ghosts))
662
self.assertFalse(self.applyDeprecated(one_four, vf.has_ghost,
665
def test_add_lines_with_ghosts_after_normal_revs(self):
666
# some versioned file formats allow lines to be added with parent
667
# information that is > than that in the format. Formats that do
668
# not support this need to raise NotImplementedError on the
669
# add_lines_with_ghosts api.
671
# probe for ghost support
673
vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n'])
674
except NotImplementedError:
676
vf.add_lines_with_ghosts('references_ghost',
678
['line\n', 'line_b\n', 'line_c\n'])
679
origins = vf.annotate('references_ghost')
680
self.assertEquals(('base', 'line\n'), origins[0])
681
self.assertEquals(('base', 'line_b\n'), origins[1])
682
self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
684
def test_readonly_mode(self):
685
transport = get_transport(self.get_url('.'))
686
factory = self.get_factory()
687
vf = factory('id', transport, 0777, create=True, access_mode='w')
688
vf = factory('id', transport, access_mode='r')
689
self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
690
self.assertRaises(errors.ReadOnlyError,
691
vf.add_lines_with_ghosts,
695
self.assertRaises(errors.ReadOnlyError, vf.join, 'base')
696
self.assertRaises(errors.ReadOnlyError, vf.clone_text, 'base', 'bar', ['foo'])
698
def test_get_sha1(self):
699
# check the sha1 data is available
702
vf.add_lines('a', [], ['a\n'])
703
# the same file, different metadata
704
vf.add_lines('b', ['a'], ['a\n'])
705
# a file differing only in last newline.
706
vf.add_lines('c', [], ['a'])
708
'3f786850e387550fdab836ed7e6dc881de23001b', vf.get_sha1('a'))
710
'3f786850e387550fdab836ed7e6dc881de23001b', vf.get_sha1('b'))
712
'86f7e437faa5a7fce15d1ddcb9eaeaea377667b8', vf.get_sha1('c'))
714
self.assertEqual(['3f786850e387550fdab836ed7e6dc881de23001b',
715
'86f7e437faa5a7fce15d1ddcb9eaeaea377667b8',
716
'3f786850e387550fdab836ed7e6dc881de23001b'],
717
vf.get_sha1s(['a', 'c', 'b']))
720
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
722
def get_file(self, name='foo'):
723
return WeaveFile(name, get_transport(self.get_url('.')), create=True)
725
def get_file_corrupted_text(self):
726
w = WeaveFile('foo', get_transport(self.get_url('.')), create=True)
727
w.add_lines('v1', [], ['hello\n'])
728
w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
730
# We are going to invasively corrupt the text
731
# Make sure the internals of weave are the same
732
self.assertEqual([('{', 0)
740
self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'
741
, '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
746
w._weave[4] = 'There\n'
749
def get_file_corrupted_checksum(self):
750
w = self.get_file_corrupted_text()
752
w._weave[4] = 'there\n'
753
self.assertEqual('hello\nthere\n', w.get_text('v2'))
755
#Invalid checksum, first digit changed
756
w._sha1s[1] = 'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
759
def reopen_file(self, name='foo', create=False):
760
return WeaveFile(name, get_transport(self.get_url('.')), create=create)
762
def test_no_implicit_create(self):
763
self.assertRaises(errors.NoSuchFile,
766
get_transport(self.get_url('.')))
768
def get_factory(self):
772
class TestKnit(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
774
def get_file(self, name='foo'):
775
return self.get_factory()(name, get_transport(self.get_url('.')),
776
delta=True, create=True)
778
def get_factory(self):
779
return KnitVersionedFile
781
def get_file_corrupted_text(self):
782
knit = self.get_file()
783
knit.add_lines('v1', [], ['hello\n'])
784
knit.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
787
def reopen_file(self, name='foo', create=False):
788
return self.get_factory()(name, get_transport(self.get_url('.')),
792
def test_detection(self):
793
knit = self.get_file()
796
def test_no_implicit_create(self):
797
self.assertRaises(errors.NoSuchFile,
800
get_transport(self.get_url('.')))
803
class TestPlaintextKnit(TestKnit):
804
"""Test a knit with no cached annotations"""
806
def _factory(self, name, transport, file_mode=None, access_mode=None,
807
delta=True, create=False):
808
return KnitVersionedFile(name, transport, file_mode, access_mode,
809
KnitPlainFactory(), delta=delta,
812
def get_factory(self):
816
class TestPlanMergeVersionedFile(TestCaseWithMemoryTransport):
819
TestCaseWithMemoryTransport.setUp(self)
820
self.vf1 = KnitVersionedFile('root', self.get_transport(), create=True)
821
self.vf2 = KnitVersionedFile('root', self.get_transport(), create=True)
822
self.plan_merge_vf = versionedfile._PlanMergeVersionedFile('root',
823
[self.vf1, self.vf2])
825
def test_add_lines(self):
826
self.plan_merge_vf.add_lines('a:', [], [])
827
self.assertRaises(ValueError, self.plan_merge_vf.add_lines, 'a', [],
829
self.assertRaises(ValueError, self.plan_merge_vf.add_lines, 'a:', None,
831
self.assertRaises(ValueError, self.plan_merge_vf.add_lines, 'a:', [],
834
def test_ancestry(self):
835
self.vf1.add_lines('A', [], [])
836
self.vf1.add_lines('B', ['A'], [])
837
self.plan_merge_vf.add_lines('C:', ['B'], [])
838
self.plan_merge_vf.add_lines('D:', ['C:'], [])
839
self.assertEqual(set(['A', 'B', 'C:', 'D:']),
840
self.plan_merge_vf.get_ancestry('D:', topo_sorted=False))
842
def setup_abcde(self):
843
self.vf1.add_lines('A', [], ['a'])
844
self.vf1.add_lines('B', ['A'], ['b'])
845
self.vf2.add_lines('C', [], ['c'])
846
self.vf2.add_lines('D', ['C'], ['d'])
847
self.plan_merge_vf.add_lines('E:', ['B', 'D'], ['e'])
849
def test_ancestry_uses_all_versionedfiles(self):
851
self.assertEqual(set(['A', 'B', 'C', 'D', 'E:']),
852
self.plan_merge_vf.get_ancestry('E:', topo_sorted=False))
854
def test_ancestry_raises_revision_not_present(self):
855
error = self.assertRaises(errors.RevisionNotPresent,
856
self.plan_merge_vf.get_ancestry, 'E:', False)
857
self.assertContainsRe(str(error), '{E:} not present in "root"')
859
def test_get_parents(self):
861
self.assertEqual({'B':('A',)}, self.plan_merge_vf.get_parent_map(['B']))
862
self.assertEqual({'D':('C',)}, self.plan_merge_vf.get_parent_map(['D']))
863
self.assertEqual({'E:':('B', 'D')},
864
self.plan_merge_vf.get_parent_map(['E:']))
865
self.assertEqual({}, self.plan_merge_vf.get_parent_map(['F']))
870
}, self.plan_merge_vf.get_parent_map(['B', 'D', 'E:', 'F']))
872
def test_get_lines(self):
874
self.assertEqual(['a'], self.plan_merge_vf.get_lines('A'))
875
self.assertEqual(['c'], self.plan_merge_vf.get_lines('C'))
876
self.assertEqual(['e'], self.plan_merge_vf.get_lines('E:'))
877
error = self.assertRaises(errors.RevisionNotPresent,
878
self.plan_merge_vf.get_lines, 'F')
879
self.assertContainsRe(str(error), '{F} not present in "root"')
882
class InterString(versionedfile.InterVersionedFile):
883
"""An inter-versionedfile optimised code path for strings.
885
This is for use during testing where we use strings as versionedfiles
886
so that none of the default regsitered interversionedfile classes will
887
match - which lets us test the match logic.
891
def is_compatible(source, target):
892
"""InterString is compatible with strings-as-versionedfiles."""
893
return isinstance(source, str) and isinstance(target, str)
896
# TODO this and the InterRepository core logic should be consolidatable
897
# if we make the registry a separate class though we still need to
898
# test the behaviour in the active registry to catch failure-to-handle-
900
class TestInterVersionedFile(TestCaseWithMemoryTransport):
902
def test_get_default_inter_versionedfile(self):
903
# test that the InterVersionedFile.get(a, b) probes
904
# for a class where is_compatible(a, b) returns
905
# true and returns a default interversionedfile otherwise.
906
# This also tests that the default registered optimised interversionedfile
907
# classes do not barf inappropriately when a surprising versionedfile type
909
dummy_a = "VersionedFile 1."
910
dummy_b = "VersionedFile 2."
911
self.assertGetsDefaultInterVersionedFile(dummy_a, dummy_b)
913
def assertGetsDefaultInterVersionedFile(self, a, b):
914
"""Asserts that InterVersionedFile.get(a, b) -> the default."""
915
inter = versionedfile.InterVersionedFile.get(a, b)
916
self.assertEqual(versionedfile.InterVersionedFile,
918
self.assertEqual(a, inter.source)
919
self.assertEqual(b, inter.target)
921
def test_register_inter_versionedfile_class(self):
922
# test that a optimised code path provider - a
923
# InterVersionedFile subclass can be registered and unregistered
924
# and that it is correctly selected when given a versionedfile
925
# pair that it returns true on for the is_compatible static method
927
dummy_a = "VersionedFile 1."
928
dummy_b = "VersionedFile 2."
929
versionedfile.InterVersionedFile.register_optimiser(InterString)
931
# we should get the default for something InterString returns False
933
self.assertFalse(InterString.is_compatible(dummy_a, None))
934
self.assertGetsDefaultInterVersionedFile(dummy_a, None)
935
# and we should get an InterString for a pair it 'likes'
936
self.assertTrue(InterString.is_compatible(dummy_a, dummy_b))
937
inter = versionedfile.InterVersionedFile.get(dummy_a, dummy_b)
938
self.assertEqual(InterString, inter.__class__)
939
self.assertEqual(dummy_a, inter.source)
940
self.assertEqual(dummy_b, inter.target)
942
versionedfile.InterVersionedFile.unregister_optimiser(InterString)
943
# now we should get the default InterVersionedFile object again.
944
self.assertGetsDefaultInterVersionedFile(dummy_a, dummy_b)
947
class TestReadonlyHttpMixin(object):
949
def test_readonly_http_works(self):
950
# we should be able to read from http with a versioned file.
952
# try an empty file access
953
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
954
self.assertEqual([], readonly_vf.versions())
956
vf.add_lines('1', [], ['a\n'])
957
vf.add_lines('2', ['1'], ['b\n', 'a\n'])
958
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
959
self.assertEqual(['1', '2'], vf.versions())
960
for version in readonly_vf.versions():
961
readonly_vf.get_lines(version)
964
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
967
return WeaveFile('foo', get_transport(self.get_url('.')), create=True)
969
def get_factory(self):
973
class TestKnitHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
976
return KnitVersionedFile('foo', get_transport(self.get_url('.')),
977
delta=True, create=True)
979
def get_factory(self):
980
return KnitVersionedFile
983
class MergeCasesMixin(object):
985
def doMerge(self, base, a, b, mp):
986
from cStringIO import StringIO
987
from textwrap import dedent
993
w.add_lines('text0', [], map(addcrlf, base))
994
w.add_lines('text1', ['text0'], map(addcrlf, a))
995
w.add_lines('text2', ['text0'], map(addcrlf, b))
999
self.log('merge plan:')
1000
p = list(w.plan_merge('text1', 'text2'))
1001
for state, line in p:
1003
self.log('%12s | %s' % (state, line[:-1]))
1007
mt.writelines(w.weave_merge(p))
1009
self.log(mt.getvalue())
1011
mp = map(addcrlf, mp)
1012
self.assertEqual(mt.readlines(), mp)
1015
def testOneInsert(self):
1021
def testSeparateInserts(self):
1022
self.doMerge(['aaa', 'bbb', 'ccc'],
1023
['aaa', 'xxx', 'bbb', 'ccc'],
1024
['aaa', 'bbb', 'yyy', 'ccc'],
1025
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1027
def testSameInsert(self):
1028
self.doMerge(['aaa', 'bbb', 'ccc'],
1029
['aaa', 'xxx', 'bbb', 'ccc'],
1030
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],
1031
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1032
overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']
1033
def testOverlappedInsert(self):
1034
self.doMerge(['aaa', 'bbb'],
1035
['aaa', 'xxx', 'yyy', 'bbb'],
1036
['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)
1038
# really it ought to reduce this to
1039
# ['aaa', 'xxx', 'yyy', 'bbb']
1042
def testClashReplace(self):
1043
self.doMerge(['aaa'],
1046
['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz',
1049
def testNonClashInsert1(self):
1050
self.doMerge(['aaa'],
1053
['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz',
1056
def testNonClashInsert2(self):
1057
self.doMerge(['aaa'],
1063
def testDeleteAndModify(self):
1064
"""Clashing delete and modification.
1066
If one side modifies a region and the other deletes it then
1067
there should be a conflict with one side blank.
1070
#######################################
1071
# skippd, not working yet
1074
self.doMerge(['aaa', 'bbb', 'ccc'],
1075
['aaa', 'ddd', 'ccc'],
1077
['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc'])
1079
def _test_merge_from_strings(self, base, a, b, expected):
1081
w.add_lines('text0', [], base.splitlines(True))
1082
w.add_lines('text1', ['text0'], a.splitlines(True))
1083
w.add_lines('text2', ['text0'], b.splitlines(True))
1084
self.log('merge plan:')
1085
p = list(w.plan_merge('text1', 'text2'))
1086
for state, line in p:
1088
self.log('%12s | %s' % (state, line[:-1]))
1089
self.log('merge result:')
1090
result_text = ''.join(w.weave_merge(p))
1091
self.log(result_text)
1092
self.assertEqualDiff(result_text, expected)
1094
def test_weave_merge_conflicts(self):
1095
# does weave merge properly handle plans that end with unchanged?
1096
result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')]))
1097
self.assertEqual(result, 'hello\n')
1099
def test_deletion_extended(self):
1100
"""One side deletes, the other deletes more.
1117
self._test_merge_from_strings(base, a, b, result)
1119
def test_deletion_overlap(self):
1120
"""Delete overlapping regions with no other conflict.
1122
Arguably it'd be better to treat these as agreement, rather than
1123
conflict, but for now conflict is safer.
1151
self._test_merge_from_strings(base, a, b, result)
1153
def test_agreement_deletion(self):
1154
"""Agree to delete some lines, without conflicts."""
1176
self._test_merge_from_strings(base, a, b, result)
1178
def test_sync_on_deletion(self):
1179
"""Specific case of merge where we can synchronize incorrectly.
1181
A previous version of the weave merge concluded that the two versions
1182
agreed on deleting line 2, and this could be a synchronization point.
1183
Line 1 was then considered in isolation, and thought to be deleted on
1186
It's better to consider the whole thing as a disagreement region.
1197
a's replacement line 2
1210
a's replacement line 2
1217
self._test_merge_from_strings(base, a, b, result)
1220
class TestKnitMerge(TestCaseWithMemoryTransport, MergeCasesMixin):
1222
def get_file(self, name='foo'):
1223
return KnitVersionedFile(name, get_transport(self.get_url('.')),
1224
delta=True, create=True)
1226
def log_contents(self, w):
1230
class TestWeaveMerge(TestCaseWithMemoryTransport, MergeCasesMixin):
1232
def get_file(self, name='foo'):
1233
return WeaveFile(name, get_transport(self.get_url('.')), create=True)
1235
def log_contents(self, w):
1236
self.log('weave is:')
1238
write_weave(w, tmpf)
1239
self.log(tmpf.getvalue())
1241
overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======',
1242
'xxx', '>>>>>>> ', 'bbb']
1245
class TestFormatSignatures(TestCaseWithMemoryTransport):
1247
def get_knit_file(self, name, annotated):
1249
factory = KnitAnnotateFactory()
1251
factory = KnitPlainFactory()
1252
return KnitVersionedFile(
1253
name, get_transport(self.get_url('.')), create=True,
1256
def test_knit_format_signatures(self):
1257
"""Different formats of knit have different signature strings."""
1258
knit = self.get_knit_file('a', True)
1259
self.assertEqual('knit-annotated', knit.get_format_signature())
1260
knit = self.get_knit_file('p', False)
1261
self.assertEqual('knit-plain', knit.get_format_signature())