1
# Copyright (C) 2005 Canonical Ltd
4
# Johan Rydberg <jrydberg@gnu.org>
6
# This program is free software; you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation; either version 2 of the License, or
9
# (at your option) any later version.
11
# This program is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14
# GNU General Public License for more details.
16
# You should have received a copy of the GNU General Public License
17
# along with this program; if not, write to the Free Software
18
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
# considered typical and check that it can be detected/corrected.
24
from StringIO import StringIO
32
from bzrlib.errors import (
34
RevisionAlreadyPresent,
37
from bzrlib.knit import KnitVersionedFile, \
39
from bzrlib.tests import TestCaseWithTransport
40
from bzrlib.tests.HTTPTestUtil import TestCaseWithWebserver
41
from bzrlib.trace import mutter
42
from bzrlib.transport import get_transport
43
from bzrlib.transport.memory import MemoryTransport
44
from bzrlib.tsort import topo_sort
45
import bzrlib.versionedfile as versionedfile
46
from bzrlib.weave import WeaveFile
47
from bzrlib.weavefile import read_weave, write_weave
50
class VersionedFileTestMixIn(object):
51
"""A mixin test class for testing VersionedFiles.
53
This is not an adaptor-style test at this point because
54
theres no dynamic substitution of versioned file implementations,
55
they are strictly controlled by their owning repositories.
60
f.add_lines('r0', [], ['a\n', 'b\n'])
61
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
63
versions = f.versions()
64
self.assertTrue('r0' in versions)
65
self.assertTrue('r1' in versions)
66
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
67
self.assertEquals(f.get_text('r0'), 'a\nb\n')
68
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
69
self.assertEqual(2, len(f))
70
self.assertEqual(2, f.num_versions())
72
self.assertRaises(RevisionNotPresent,
73
f.add_lines, 'r2', ['foo'], [])
74
self.assertRaises(RevisionAlreadyPresent,
75
f.add_lines, 'r1', [], [])
77
# this checks that reopen with create=True does not break anything.
78
f = self.reopen_file(create=True)
81
def test_adds_with_parent_texts(self):
84
parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])
86
parent_texts['r1'] = f.add_lines_with_ghosts('r1',
89
parent_texts=parent_texts)
90
except NotImplementedError:
91
# if the format doesn't support ghosts, just add normally.
92
parent_texts['r1'] = f.add_lines('r1',
95
parent_texts=parent_texts)
96
f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)
97
self.assertNotEqual(None, parent_texts['r0'])
98
self.assertNotEqual(None, parent_texts['r1'])
100
versions = f.versions()
101
self.assertTrue('r0' in versions)
102
self.assertTrue('r1' in versions)
103
self.assertTrue('r2' in versions)
104
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
105
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
106
self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])
107
self.assertEqual(3, f.num_versions())
108
origins = f.annotate('r1')
109
self.assertEquals(origins[0][0], 'r0')
110
self.assertEquals(origins[1][0], 'r1')
111
origins = f.annotate('r2')
112
self.assertEquals(origins[0][0], 'r1')
113
self.assertEquals(origins[1][0], 'r2')
116
f = self.reopen_file()
119
def test_add_unicode_content(self):
120
# unicode content is not permitted in versioned files.
121
# versioned files version sequences of bytes only.
123
self.assertRaises(errors.BzrBadParameterUnicode,
124
vf.add_lines, 'a', [], ['a\n', u'b\n', 'c\n'])
126
(errors.BzrBadParameterUnicode, NotImplementedError),
127
vf.add_lines_with_ghosts, 'a', [], ['a\n', u'b\n', 'c\n'])
129
def test_inline_newline_throws(self):
130
# \r characters are not permitted in lines being added
132
self.assertRaises(errors.BzrBadParameterContainsNewline,
133
vf.add_lines, 'a', [], ['a\n\n'])
135
(errors.BzrBadParameterContainsNewline, NotImplementedError),
136
vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])
137
# but inline CR's are allowed
138
vf.add_lines('a', [], ['a\r\n'])
140
vf.add_lines_with_ghosts('b', [], ['a\r\n'])
141
except NotImplementedError:
144
def test_add_reserved(self):
146
self.assertRaises(errors.ReservedId,
147
vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
149
self.assertRaises(errors.ReservedId,
150
vf.add_delta, 'a:', [], None, 'sha1', False, ((0, 0, 0, []),))
152
def test_get_reserved(self):
154
self.assertRaises(errors.ReservedId, vf.get_delta, 'b:')
155
self.assertRaises(errors.ReservedId, vf.get_texts, ['b:'])
156
self.assertRaises(errors.ReservedId, vf.get_lines, 'b:')
157
self.assertRaises(errors.ReservedId, vf.get_text, 'b:')
159
def test_get_delta(self):
161
sha1s = self._setup_for_deltas(f)
162
expected_delta = (None, '6bfa09d82ce3e898ad4641ae13dd4fdb9cf0d76b', False,
163
[(0, 0, 1, [('base', 'line\n')])])
164
self.assertEqual(expected_delta, f.get_delta('base'))
166
text_name = 'chain1-'
167
for depth in range(26):
168
new_version = text_name + '%s' % depth
169
expected_delta = (next_parent, sha1s[depth],
171
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
172
self.assertEqual(expected_delta, f.get_delta(new_version))
173
next_parent = new_version
175
text_name = 'chain2-'
176
for depth in range(26):
177
new_version = text_name + '%s' % depth
178
expected_delta = (next_parent, sha1s[depth], False,
179
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
180
self.assertEqual(expected_delta, f.get_delta(new_version))
181
next_parent = new_version
182
# smoke test for eol support
183
expected_delta = ('base', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True, [])
184
self.assertEqual(['line'], f.get_lines('noeol'))
185
self.assertEqual(expected_delta, f.get_delta('noeol'))
187
def test_get_deltas(self):
189
sha1s = self._setup_for_deltas(f)
190
deltas = f.get_deltas(f.versions())
191
expected_delta = (None, '6bfa09d82ce3e898ad4641ae13dd4fdb9cf0d76b', False,
192
[(0, 0, 1, [('base', 'line\n')])])
193
self.assertEqual(expected_delta, deltas['base'])
195
text_name = 'chain1-'
196
for depth in range(26):
197
new_version = text_name + '%s' % depth
198
expected_delta = (next_parent, sha1s[depth],
200
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
201
self.assertEqual(expected_delta, deltas[new_version])
202
next_parent = new_version
204
text_name = 'chain2-'
205
for depth in range(26):
206
new_version = text_name + '%s' % depth
207
expected_delta = (next_parent, sha1s[depth], False,
208
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
209
self.assertEqual(expected_delta, deltas[new_version])
210
next_parent = new_version
211
# smoke tests for eol support
212
expected_delta = ('base', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True, [])
213
self.assertEqual(['line'], f.get_lines('noeol'))
214
self.assertEqual(expected_delta, deltas['noeol'])
215
# smoke tests for eol support - two noeol in a row same content
216
expected_deltas = (('noeol', '3ad7ee82dbd8f29ecba073f96e43e414b3f70a4d', True,
217
[(0, 1, 2, [('noeolsecond', 'line\n'), ('noeolsecond', 'line\n')])]),
218
('noeol', '3ad7ee82dbd8f29ecba073f96e43e414b3f70a4d', True,
219
[(0, 0, 1, [('noeolsecond', 'line\n')]), (1, 1, 0, [])]))
220
self.assertEqual(['line\n', 'line'], f.get_lines('noeolsecond'))
221
self.assertTrue(deltas['noeolsecond'] in expected_deltas)
222
# two no-eol in a row, different content
223
expected_delta = ('noeolsecond', '8bb553a84e019ef1149db082d65f3133b195223b', True,
224
[(1, 2, 1, [('noeolnotshared', 'phone\n')])])
225
self.assertEqual(['line\n', 'phone'], f.get_lines('noeolnotshared'))
226
self.assertEqual(expected_delta, deltas['noeolnotshared'])
227
# eol folling a no-eol with content change
228
expected_delta = ('noeol', 'a61f6fb6cfc4596e8d88c34a308d1e724caf8977', False,
229
[(0, 1, 1, [('eol', 'phone\n')])])
230
self.assertEqual(['phone\n'], f.get_lines('eol'))
231
self.assertEqual(expected_delta, deltas['eol'])
232
# eol folling a no-eol with content change
233
expected_delta = ('noeol', '6bfa09d82ce3e898ad4641ae13dd4fdb9cf0d76b', False,
234
[(0, 1, 1, [('eolline', 'line\n')])])
235
self.assertEqual(['line\n'], f.get_lines('eolline'))
236
self.assertEqual(expected_delta, deltas['eolline'])
237
# eol with no parents
238
expected_delta = (None, '264f39cab871e4cfd65b3a002f7255888bb5ed97', True,
239
[(0, 0, 1, [('noeolbase', 'line\n')])])
240
self.assertEqual(['line'], f.get_lines('noeolbase'))
241
self.assertEqual(expected_delta, deltas['noeolbase'])
242
# eol with two parents, in inverse insertion order
243
expected_deltas = (('noeolbase', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True,
244
[(0, 1, 1, [('eolbeforefirstparent', 'line\n')])]),
245
('noeolbase', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True,
246
[(0, 1, 1, [('eolbeforefirstparent', 'line\n')])]))
247
self.assertEqual(['line'], f.get_lines('eolbeforefirstparent'))
248
#self.assertTrue(deltas['eolbeforefirstparent'] in expected_deltas)
250
def test_make_mpdiff(self):
251
from bzrlib import multiparent
252
vf = self.get_file('foo')
253
sha1s = self._setup_for_deltas(vf)
254
new_vf = self.get_file('bar')
255
for version in multiparent.topo_iter(vf):
256
mpdiff = vf.make_mpdiff(version)
257
new_vf.add_mpdiff(version, vf.get_parents(version), mpdiff)
258
self.assertEqualDiff(vf.get_text(version),
259
new_vf.get_text(version))
261
def _setup_for_deltas(self, f):
262
self.assertRaises(errors.RevisionNotPresent, f.get_delta, 'base')
263
# add texts that should trip the knit maximum delta chain threshold
264
# as well as doing parallel chains of data in knits.
265
# this is done by two chains of 25 insertions
266
f.add_lines('base', [], ['line\n'])
267
f.add_lines('noeol', ['base'], ['line'])
268
# detailed eol tests:
269
# shared last line with parent no-eol
270
f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])
271
# differing last line with parent, both no-eol
272
f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])
273
# add eol following a noneol parent, change content
274
f.add_lines('eol', ['noeol'], ['phone\n'])
275
# add eol following a noneol parent, no change content
276
f.add_lines('eolline', ['noeol'], ['line\n'])
277
# noeol with no parents:
278
f.add_lines('noeolbase', [], ['line'])
279
# noeol preceeding its leftmost parent in the output:
280
# this is done by making it a merge of two parents with no common
281
# anestry: noeolbase and noeol with the
282
# later-inserted parent the leftmost.
283
f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])
284
# two identical eol texts
285
f.add_lines('noeoldup', ['noeol'], ['line'])
287
text_name = 'chain1-'
289
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
290
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
291
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
292
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
293
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
294
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
295
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
296
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
297
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
298
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
299
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
300
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
301
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
302
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
303
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
304
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
305
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
306
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
307
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
308
19:'1ebed371807ba5935958ad0884595126e8c4e823',
309
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
310
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
311
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
312
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
313
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
314
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
316
for depth in range(26):
317
new_version = text_name + '%s' % depth
318
text = text + ['line\n']
319
f.add_lines(new_version, [next_parent], text)
320
next_parent = new_version
322
text_name = 'chain2-'
324
for depth in range(26):
325
new_version = text_name + '%s' % depth
326
text = text + ['line\n']
327
f.add_lines(new_version, [next_parent], text)
328
next_parent = new_version
331
def test_add_delta(self):
332
# tests for the add-delta facility.
333
# at this point, optimising for speed, we assume no checks when deltas are inserted.
334
# this may need to be revisited.
335
source = self.get_file('source')
336
source.add_lines('base', [], ['line\n'])
338
text_name = 'chain1-'
340
for depth in range(26):
341
new_version = text_name + '%s' % depth
342
text = text + ['line\n']
343
source.add_lines(new_version, [next_parent], text)
344
next_parent = new_version
346
text_name = 'chain2-'
348
for depth in range(26):
349
new_version = text_name + '%s' % depth
350
text = text + ['line\n']
351
source.add_lines(new_version, [next_parent], text)
352
next_parent = new_version
353
source.add_lines('noeol', ['base'], ['line'])
355
target = self.get_file('target')
356
for version in source.versions():
357
parent, sha1, noeol, delta = source.get_delta(version)
358
target.add_delta(version,
359
source.get_parents(version),
364
self.assertRaises(RevisionAlreadyPresent,
365
target.add_delta, 'base', [], None, '', False, [])
366
for version in source.versions():
367
self.assertEqual(source.get_lines(version),
368
target.get_lines(version))
370
def test_ancestry(self):
372
self.assertEqual([], f.get_ancestry([]))
373
f.add_lines('r0', [], ['a\n', 'b\n'])
374
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
375
f.add_lines('r2', ['r0'], ['b\n', 'c\n'])
376
f.add_lines('r3', ['r2'], ['b\n', 'c\n'])
377
f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])
378
self.assertEqual([], f.get_ancestry([]))
379
versions = f.get_ancestry(['rM'])
380
# there are some possibilities:
384
# so we check indexes
385
r0 = versions.index('r0')
386
r1 = versions.index('r1')
387
r2 = versions.index('r2')
388
self.assertFalse('r3' in versions)
389
rM = versions.index('rM')
390
self.assertTrue(r0 < r1)
391
self.assertTrue(r0 < r2)
392
self.assertTrue(r1 < rM)
393
self.assertTrue(r2 < rM)
395
self.assertRaises(RevisionNotPresent,
396
f.get_ancestry, ['rM', 'rX'])
398
def test_mutate_after_finish(self):
400
f.transaction_finished()
401
self.assertRaises(errors.OutSideTransaction, f.add_delta, '', [], '', '', False, [])
402
self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])
403
self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])
404
self.assertRaises(errors.OutSideTransaction, f.fix_parents, '', [])
405
self.assertRaises(errors.OutSideTransaction, f.join, '')
406
self.assertRaises(errors.OutSideTransaction, f.clone_text, 'base', 'bar', ['foo'])
408
def test_clear_cache(self):
410
# on a new file it should not error
412
# and after adding content, doing a clear_cache and a get should work.
413
f.add_lines('0', [], ['a'])
415
self.assertEqual(['a'], f.get_lines('0'))
417
def test_clone_text(self):
419
f.add_lines('r0', [], ['a\n', 'b\n'])
420
f.clone_text('r1', 'r0', ['r0'])
422
self.assertEquals(f.get_lines('r1'), f.get_lines('r0'))
423
self.assertEquals(f.get_lines('r1'), ['a\n', 'b\n'])
424
self.assertEquals(f.get_parents('r1'), ['r0'])
426
self.assertRaises(RevisionNotPresent,
427
f.clone_text, 'r2', 'rX', [])
428
self.assertRaises(RevisionAlreadyPresent,
429
f.clone_text, 'r1', 'r0', [])
431
verify_file(self.reopen_file())
433
def test_create_empty(self):
435
f.add_lines('0', [], ['a\n'])
436
new_f = f.create_empty('t', MemoryTransport())
437
# smoke test, specific types should check it is honoured correctly for
438
# non type attributes
439
self.assertEqual([], new_f.versions())
440
self.assertTrue(isinstance(new_f, f.__class__))
442
def test_copy_to(self):
444
f.add_lines('0', [], ['a\n'])
445
t = MemoryTransport()
447
for suffix in f.__class__.get_suffixes():
448
self.assertTrue(t.has('foo' + suffix))
450
def test_get_suffixes(self):
453
self.assertEqual(f.__class__.get_suffixes(), f.__class__.get_suffixes())
454
# and should be a list
455
self.assertTrue(isinstance(f.__class__.get_suffixes(), list))
457
def build_graph(self, file, graph):
458
for node in topo_sort(graph.items()):
459
file.add_lines(node, graph[node], [])
461
def test_get_graph(self):
467
self.build_graph(f, graph)
468
self.assertEqual(graph, f.get_graph())
470
def test_get_graph_partial(self):
478
complex_graph.update(simple_a)
483
complex_graph.update(simple_b)
490
complex_graph.update(simple_gam)
492
simple_b_gam.update(simple_gam)
493
simple_b_gam.update(simple_b)
494
self.build_graph(f, complex_graph)
495
self.assertEqual(simple_a, f.get_graph(['a']))
496
self.assertEqual(simple_b, f.get_graph(['b']))
497
self.assertEqual(simple_gam, f.get_graph(['gam']))
498
self.assertEqual(simple_b_gam, f.get_graph(['b', 'gam']))
500
def test_get_parents(self):
502
f.add_lines('r0', [], ['a\n', 'b\n'])
503
f.add_lines('r1', [], ['a\n', 'b\n'])
504
f.add_lines('r2', [], ['a\n', 'b\n'])
505
f.add_lines('r3', [], ['a\n', 'b\n'])
506
f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
507
self.assertEquals(f.get_parents('m'), ['r0', 'r1', 'r2', 'r3'])
509
self.assertRaises(RevisionNotPresent,
512
def test_annotate(self):
514
f.add_lines('r0', [], ['a\n', 'b\n'])
515
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
516
origins = f.annotate('r1')
517
self.assertEquals(origins[0][0], 'r1')
518
self.assertEquals(origins[1][0], 'r0')
520
self.assertRaises(RevisionNotPresent,
524
# tests that walk returns all the inclusions for the requested
525
# revisions as well as the revisions changes themselves.
526
f = self.get_file('1')
527
f.add_lines('r0', [], ['a\n', 'b\n'])
528
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
529
f.add_lines('rX', ['r1'], ['d\n', 'b\n'])
530
f.add_lines('rY', ['r1'], ['c\n', 'e\n'])
533
for lineno, insert, dset, text in f.walk(['rX', 'rY']):
534
lines[text] = (insert, dset)
536
self.assertTrue(lines['a\n'], ('r0', set(['r1'])))
537
self.assertTrue(lines['b\n'], ('r0', set(['rY'])))
538
self.assertTrue(lines['c\n'], ('r1', set(['rX'])))
539
self.assertTrue(lines['d\n'], ('rX', set([])))
540
self.assertTrue(lines['e\n'], ('rY', set([])))
542
def test_detection(self):
543
# Test weaves detect corruption.
545
# Weaves contain a checksum of their texts.
546
# When a text is extracted, this checksum should be
549
w = self.get_file_corrupted_text()
551
self.assertEqual('hello\n', w.get_text('v1'))
552
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
553
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
554
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
556
w = self.get_file_corrupted_checksum()
558
self.assertEqual('hello\n', w.get_text('v1'))
559
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
560
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
561
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
563
def get_file_corrupted_text(self):
564
"""Return a versioned file with corrupt text but valid metadata."""
565
raise NotImplementedError(self.get_file_corrupted_text)
567
def reopen_file(self, name='foo'):
568
"""Open the versioned file from disk again."""
569
raise NotImplementedError(self.reopen_file)
571
def test_iter_lines_added_or_present_in_versions(self):
572
# test that we get at least an equalset of the lines added by
573
# versions in the weave
574
# the ordering here is to make a tree so that dumb searches have
575
# more changes to muck up.
577
class InstrumentedProgress(progress.DummyProgress):
581
progress.DummyProgress.__init__(self)
584
def update(self, msg=None, current=None, total=None):
585
self.updates.append((msg, current, total))
588
# add a base to get included
589
vf.add_lines('base', [], ['base\n'])
590
# add a ancestor to be included on one side
591
vf.add_lines('lancestor', [], ['lancestor\n'])
592
# add a ancestor to be included on the other side
593
vf.add_lines('rancestor', ['base'], ['rancestor\n'])
594
# add a child of rancestor with no eofile-nl
595
vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])
596
# add a child of lancestor and base to join the two roots
597
vf.add_lines('otherchild',
598
['lancestor', 'base'],
599
['base\n', 'lancestor\n', 'otherchild\n'])
600
def iter_with_versions(versions, expected):
601
# now we need to see what lines are returned, and how often.
608
progress = InstrumentedProgress()
609
# iterate over the lines
610
for line in vf.iter_lines_added_or_present_in_versions(versions,
613
if []!= progress.updates:
614
self.assertEqual(expected, progress.updates)
616
lines = iter_with_versions(['child', 'otherchild'],
617
[('Walking content.', 0, 2),
618
('Walking content.', 1, 2),
619
('Walking content.', 2, 2)])
620
# we must see child and otherchild
621
self.assertTrue(lines['child\n'] > 0)
622
self.assertTrue(lines['otherchild\n'] > 0)
623
# we dont care if we got more than that.
626
lines = iter_with_versions(None, [('Walking content.', 0, 5),
627
('Walking content.', 1, 5),
628
('Walking content.', 2, 5),
629
('Walking content.', 3, 5),
630
('Walking content.', 4, 5),
631
('Walking content.', 5, 5)])
632
# all lines must be seen at least once
633
self.assertTrue(lines['base\n'] > 0)
634
self.assertTrue(lines['lancestor\n'] > 0)
635
self.assertTrue(lines['rancestor\n'] > 0)
636
self.assertTrue(lines['child\n'] > 0)
637
self.assertTrue(lines['otherchild\n'] > 0)
639
def test_fix_parents(self):
640
# some versioned files allow incorrect parents to be corrected after
641
# insertion - this may not fix ancestry..
642
# if they do not supported, they just do not implement it.
643
# we test this as an interface test to ensure that those that *do*
644
# implementent it get it right.
646
vf.add_lines('notbase', [], [])
647
vf.add_lines('base', [], [])
649
vf.fix_parents('notbase', ['base'])
650
except NotImplementedError:
652
self.assertEqual(['base'], vf.get_parents('notbase'))
653
# open again, check it stuck.
655
self.assertEqual(['base'], vf.get_parents('notbase'))
657
def test_fix_parents_with_ghosts(self):
658
# when fixing parents, ghosts that are listed should not be ghosts
663
vf.add_lines_with_ghosts('notbase', ['base', 'stillghost'], [])
664
except NotImplementedError:
666
vf.add_lines('base', [], [])
667
vf.fix_parents('notbase', ['base', 'stillghost'])
668
self.assertEqual(['base'], vf.get_parents('notbase'))
669
# open again, check it stuck.
671
self.assertEqual(['base'], vf.get_parents('notbase'))
672
# and check the ghosts
673
self.assertEqual(['base', 'stillghost'],
674
vf.get_parents_with_ghosts('notbase'))
676
def test_add_lines_with_ghosts(self):
677
# some versioned file formats allow lines to be added with parent
678
# information that is > than that in the format. Formats that do
679
# not support this need to raise NotImplementedError on the
680
# add_lines_with_ghosts api.
682
# add a revision with ghost parents
683
# The preferred form is utf8, but we should translate when needed
684
parent_id_unicode = u'b\xbfse'
685
parent_id_utf8 = parent_id_unicode.encode('utf8')
687
vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])
688
except NotImplementedError:
689
# check the other ghost apis are also not implemented
690
self.assertRaises(NotImplementedError, vf.has_ghost, 'foo')
691
self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])
692
self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')
693
self.assertRaises(NotImplementedError, vf.get_graph_with_ghosts)
695
vf = self.reopen_file()
696
# test key graph related apis: getncestry, _graph, get_parents
698
# - these are ghost unaware and must not be reflect ghosts
699
self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))
700
self.assertEqual([], vf.get_parents('notbxbfse'))
701
self.assertEqual({'notbxbfse':[]}, vf.get_graph())
702
self.assertFalse(self.callDeprecated([osutils._revision_id_warning],
703
vf.has_version, parent_id_unicode))
704
self.assertFalse(vf.has_version(parent_id_utf8))
705
# we have _with_ghost apis to give us ghost information.
706
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
707
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
708
self.assertEqual({'notbxbfse':[parent_id_utf8]}, vf.get_graph_with_ghosts())
709
self.assertTrue(self.callDeprecated([osutils._revision_id_warning],
710
vf.has_ghost, parent_id_unicode))
711
self.assertTrue(vf.has_ghost(parent_id_utf8))
712
# if we add something that is a ghost of another, it should correct the
713
# results of the prior apis
714
self.callDeprecated([osutils._revision_id_warning],
715
vf.add_lines, parent_id_unicode, [], [])
716
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))
717
self.assertEqual([parent_id_utf8], vf.get_parents('notbxbfse'))
718
self.assertEqual({parent_id_utf8:[],
719
'notbxbfse':[parent_id_utf8],
722
self.assertTrue(self.callDeprecated([osutils._revision_id_warning],
723
vf.has_version, parent_id_unicode))
724
self.assertTrue(vf.has_version(parent_id_utf8))
725
# we have _with_ghost apis to give us ghost information.
726
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
727
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
728
self.assertEqual({parent_id_utf8:[],
729
'notbxbfse':[parent_id_utf8],
731
vf.get_graph_with_ghosts())
732
self.assertFalse(self.callDeprecated([osutils._revision_id_warning],
733
vf.has_ghost, parent_id_unicode))
734
self.assertFalse(vf.has_ghost(parent_id_utf8))
736
def test_add_lines_with_ghosts_after_normal_revs(self):
737
# some versioned file formats allow lines to be added with parent
738
# information that is > than that in the format. Formats that do
739
# not support this need to raise NotImplementedError on the
740
# add_lines_with_ghosts api.
742
# probe for ghost support
745
except NotImplementedError:
747
vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n'])
748
vf.add_lines_with_ghosts('references_ghost',
750
['line\n', 'line_b\n', 'line_c\n'])
751
origins = vf.annotate('references_ghost')
752
self.assertEquals(('base', 'line\n'), origins[0])
753
self.assertEquals(('base', 'line_b\n'), origins[1])
754
self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
756
def test_readonly_mode(self):
757
transport = get_transport(self.get_url('.'))
758
factory = self.get_factory()
759
vf = factory('id', transport, 0777, create=True, access_mode='w')
760
vf = factory('id', transport, access_mode='r')
761
self.assertRaises(errors.ReadOnlyError, vf.add_delta, '', [], '', '', False, [])
762
self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
763
self.assertRaises(errors.ReadOnlyError,
764
vf.add_lines_with_ghosts,
768
self.assertRaises(errors.ReadOnlyError, vf.fix_parents, 'base', [])
769
self.assertRaises(errors.ReadOnlyError, vf.join, 'base')
770
self.assertRaises(errors.ReadOnlyError, vf.clone_text, 'base', 'bar', ['foo'])
772
def test_get_sha1(self):
773
# check the sha1 data is available
776
vf.add_lines('a', [], ['a\n'])
777
# the same file, different metadata
778
vf.add_lines('b', ['a'], ['a\n'])
779
# a file differing only in last newline.
780
vf.add_lines('c', [], ['a'])
782
'3f786850e387550fdab836ed7e6dc881de23001b', vf.get_sha1('a'))
784
'3f786850e387550fdab836ed7e6dc881de23001b', vf.get_sha1('b'))
786
'86f7e437faa5a7fce15d1ddcb9eaeaea377667b8', vf.get_sha1('c'))
789
class TestWeave(TestCaseWithTransport, VersionedFileTestMixIn):
791
def get_file(self, name='foo'):
792
return WeaveFile(name, get_transport(self.get_url('.')), create=True)
794
def get_file_corrupted_text(self):
795
w = WeaveFile('foo', get_transport(self.get_url('.')), create=True)
796
w.add_lines('v1', [], ['hello\n'])
797
w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
799
# We are going to invasively corrupt the text
800
# Make sure the internals of weave are the same
801
self.assertEqual([('{', 0)
809
self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'
810
, '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
815
w._weave[4] = 'There\n'
818
def get_file_corrupted_checksum(self):
819
w = self.get_file_corrupted_text()
821
w._weave[4] = 'there\n'
822
self.assertEqual('hello\nthere\n', w.get_text('v2'))
824
#Invalid checksum, first digit changed
825
w._sha1s[1] = 'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
828
def reopen_file(self, name='foo', create=False):
829
return WeaveFile(name, get_transport(self.get_url('.')), create=create)
831
def test_no_implicit_create(self):
832
self.assertRaises(errors.NoSuchFile,
835
get_transport(self.get_url('.')))
837
def get_factory(self):
841
class TestKnit(TestCaseWithTransport, VersionedFileTestMixIn):
843
def get_file(self, name='foo'):
844
return KnitVersionedFile(name, get_transport(self.get_url('.')),
845
delta=True, create=True)
847
def get_factory(self):
848
return KnitVersionedFile
850
def get_file_corrupted_text(self):
851
knit = self.get_file()
852
knit.add_lines('v1', [], ['hello\n'])
853
knit.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
856
def reopen_file(self, name='foo', create=False):
857
return KnitVersionedFile(name, get_transport(self.get_url('.')),
861
def test_detection(self):
862
knit = self.get_file()
865
def test_no_implicit_create(self):
866
self.assertRaises(errors.NoSuchFile,
869
get_transport(self.get_url('.')))
872
class InterString(versionedfile.InterVersionedFile):
873
"""An inter-versionedfile optimised code path for strings.
875
This is for use during testing where we use strings as versionedfiles
876
so that none of the default regsitered interversionedfile classes will
877
match - which lets us test the match logic.
881
def is_compatible(source, target):
882
"""InterString is compatible with strings-as-versionedfiles."""
883
return isinstance(source, str) and isinstance(target, str)
886
# TODO this and the InterRepository core logic should be consolidatable
887
# if we make the registry a separate class though we still need to
888
# test the behaviour in the active registry to catch failure-to-handle-
890
class TestInterVersionedFile(TestCaseWithTransport):
892
def test_get_default_inter_versionedfile(self):
893
# test that the InterVersionedFile.get(a, b) probes
894
# for a class where is_compatible(a, b) returns
895
# true and returns a default interversionedfile otherwise.
896
# This also tests that the default registered optimised interversionedfile
897
# classes do not barf inappropriately when a surprising versionedfile type
899
dummy_a = "VersionedFile 1."
900
dummy_b = "VersionedFile 2."
901
self.assertGetsDefaultInterVersionedFile(dummy_a, dummy_b)
903
def assertGetsDefaultInterVersionedFile(self, a, b):
904
"""Asserts that InterVersionedFile.get(a, b) -> the default."""
905
inter = versionedfile.InterVersionedFile.get(a, b)
906
self.assertEqual(versionedfile.InterVersionedFile,
908
self.assertEqual(a, inter.source)
909
self.assertEqual(b, inter.target)
911
def test_register_inter_versionedfile_class(self):
912
# test that a optimised code path provider - a
913
# InterVersionedFile subclass can be registered and unregistered
914
# and that it is correctly selected when given a versionedfile
915
# pair that it returns true on for the is_compatible static method
917
dummy_a = "VersionedFile 1."
918
dummy_b = "VersionedFile 2."
919
versionedfile.InterVersionedFile.register_optimiser(InterString)
921
# we should get the default for something InterString returns False
923
self.assertFalse(InterString.is_compatible(dummy_a, None))
924
self.assertGetsDefaultInterVersionedFile(dummy_a, None)
925
# and we should get an InterString for a pair it 'likes'
926
self.assertTrue(InterString.is_compatible(dummy_a, dummy_b))
927
inter = versionedfile.InterVersionedFile.get(dummy_a, dummy_b)
928
self.assertEqual(InterString, inter.__class__)
929
self.assertEqual(dummy_a, inter.source)
930
self.assertEqual(dummy_b, inter.target)
932
versionedfile.InterVersionedFile.unregister_optimiser(InterString)
933
# now we should get the default InterVersionedFile object again.
934
self.assertGetsDefaultInterVersionedFile(dummy_a, dummy_b)
937
class TestReadonlyHttpMixin(object):
939
def test_readonly_http_works(self):
940
# we should be able to read from http with a versioned file.
942
# try an empty file access
943
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
944
self.assertEqual([], readonly_vf.versions())
946
vf.add_lines('1', [], ['a\n'])
947
vf.add_lines('2', ['1'], ['b\n', 'a\n'])
948
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
949
self.assertEqual(['1', '2'], vf.versions())
950
for version in readonly_vf.versions():
951
readonly_vf.get_lines(version)
954
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
957
return WeaveFile('foo', get_transport(self.get_url('.')), create=True)
959
def get_factory(self):
963
class TestKnitHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
966
return KnitVersionedFile('foo', get_transport(self.get_url('.')),
967
delta=True, create=True)
969
def get_factory(self):
970
return KnitVersionedFile
973
class MergeCasesMixin(object):
975
def doMerge(self, base, a, b, mp):
976
from cStringIO import StringIO
977
from textwrap import dedent
983
w.add_lines('text0', [], map(addcrlf, base))
984
w.add_lines('text1', ['text0'], map(addcrlf, a))
985
w.add_lines('text2', ['text0'], map(addcrlf, b))
989
self.log('merge plan:')
990
p = list(w.plan_merge('text1', 'text2'))
991
for state, line in p:
993
self.log('%12s | %s' % (state, line[:-1]))
997
mt.writelines(w.weave_merge(p))
999
self.log(mt.getvalue())
1001
mp = map(addcrlf, mp)
1002
self.assertEqual(mt.readlines(), mp)
1005
def testOneInsert(self):
1011
def testSeparateInserts(self):
1012
self.doMerge(['aaa', 'bbb', 'ccc'],
1013
['aaa', 'xxx', 'bbb', 'ccc'],
1014
['aaa', 'bbb', 'yyy', 'ccc'],
1015
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1017
def testSameInsert(self):
1018
self.doMerge(['aaa', 'bbb', 'ccc'],
1019
['aaa', 'xxx', 'bbb', 'ccc'],
1020
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],
1021
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1022
overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']
1023
def testOverlappedInsert(self):
1024
self.doMerge(['aaa', 'bbb'],
1025
['aaa', 'xxx', 'yyy', 'bbb'],
1026
['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)
1028
# really it ought to reduce this to
1029
# ['aaa', 'xxx', 'yyy', 'bbb']
1032
def testClashReplace(self):
1033
self.doMerge(['aaa'],
1036
['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz',
1039
def testNonClashInsert1(self):
1040
self.doMerge(['aaa'],
1043
['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz',
1046
def testNonClashInsert2(self):
1047
self.doMerge(['aaa'],
1053
def testDeleteAndModify(self):
1054
"""Clashing delete and modification.
1056
If one side modifies a region and the other deletes it then
1057
there should be a conflict with one side blank.
1060
#######################################
1061
# skippd, not working yet
1064
self.doMerge(['aaa', 'bbb', 'ccc'],
1065
['aaa', 'ddd', 'ccc'],
1067
['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc'])
1069
def _test_merge_from_strings(self, base, a, b, expected):
1071
w.add_lines('text0', [], base.splitlines(True))
1072
w.add_lines('text1', ['text0'], a.splitlines(True))
1073
w.add_lines('text2', ['text0'], b.splitlines(True))
1074
self.log('merge plan:')
1075
p = list(w.plan_merge('text1', 'text2'))
1076
for state, line in p:
1078
self.log('%12s | %s' % (state, line[:-1]))
1079
self.log('merge result:')
1080
result_text = ''.join(w.weave_merge(p))
1081
self.log(result_text)
1082
self.assertEqualDiff(result_text, expected)
1084
def test_weave_merge_conflicts(self):
1085
# does weave merge properly handle plans that end with unchanged?
1086
result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')]))
1087
self.assertEqual(result, 'hello\n')
1089
def test_deletion_extended(self):
1090
"""One side deletes, the other deletes more.
1107
self._test_merge_from_strings(base, a, b, result)
1109
def test_deletion_overlap(self):
1110
"""Delete overlapping regions with no other conflict.
1112
Arguably it'd be better to treat these as agreement, rather than
1113
conflict, but for now conflict is safer.
1141
self._test_merge_from_strings(base, a, b, result)
1143
def test_agreement_deletion(self):
1144
"""Agree to delete some lines, without conflicts."""
1166
self._test_merge_from_strings(base, a, b, result)
1168
def test_sync_on_deletion(self):
1169
"""Specific case of merge where we can synchronize incorrectly.
1171
A previous version of the weave merge concluded that the two versions
1172
agreed on deleting line 2, and this could be a synchronization point.
1173
Line 1 was then considered in isolation, and thought to be deleted on
1176
It's better to consider the whole thing as a disagreement region.
1187
a's replacement line 2
1200
a's replacement line 2
1207
self._test_merge_from_strings(base, a, b, result)
1210
class TestKnitMerge(TestCaseWithTransport, MergeCasesMixin):
1212
def get_file(self, name='foo'):
1213
return KnitVersionedFile(name, get_transport(self.get_url('.')),
1214
delta=True, create=True)
1216
def log_contents(self, w):
1220
class TestWeaveMerge(TestCaseWithTransport, MergeCasesMixin):
1222
def get_file(self, name='foo'):
1223
return WeaveFile(name, get_transport(self.get_url('.')), create=True)
1225
def log_contents(self, w):
1226
self.log('weave is:')
1228
write_weave(w, tmpf)
1229
self.log(tmpf.getvalue())
1231
overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======',
1232
'xxx', '>>>>>>> ', 'bbb']