1
# Copyright (C) 2005 Canonical Ltd
4
# Johan Rydberg <jrydberg@gnu.org>
6
# This program is free software; you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation; either version 2 of the License, or
9
# (at your option) any later version.
11
# This program is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14
# GNU General Public License for more details.
16
# You should have received a copy of the GNU General Public License
17
# along with this program; if not, write to the Free Software
18
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
# considered typical and check that it can be detected/corrected.
24
from StringIO import StringIO
32
from bzrlib.errors import (
34
RevisionAlreadyPresent,
37
from bzrlib.knit import KnitVersionedFile, \
39
from bzrlib.tests import TestCaseWithTransport
40
from bzrlib.tests.HTTPTestUtil import TestCaseWithWebserver
41
from bzrlib.trace import mutter
42
from bzrlib.transport import get_transport
43
from bzrlib.transport.memory import MemoryTransport
44
from bzrlib.tsort import topo_sort
45
import bzrlib.versionedfile as versionedfile
46
from bzrlib.weave import WeaveFile
47
from bzrlib.weavefile import read_weave, write_weave
50
class VersionedFileTestMixIn(object):
51
"""A mixin test class for testing VersionedFiles.
53
This is not an adaptor-style test at this point because
54
theres no dynamic substitution of versioned file implementations,
55
they are strictly controlled by their owning repositories.
60
f.add_lines('r0', [], ['a\n', 'b\n'])
61
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
63
versions = f.versions()
64
self.assertTrue('r0' in versions)
65
self.assertTrue('r1' in versions)
66
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
67
self.assertEquals(f.get_text('r0'), 'a\nb\n')
68
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
69
self.assertEqual(2, len(f))
70
self.assertEqual(2, f.num_versions())
72
self.assertRaises(RevisionNotPresent,
73
f.add_lines, 'r2', ['foo'], [])
74
self.assertRaises(RevisionAlreadyPresent,
75
f.add_lines, 'r1', [], [])
77
# this checks that reopen with create=True does not break anything.
78
f = self.reopen_file(create=True)
81
def test_adds_with_parent_texts(self):
84
parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])
86
parent_texts['r1'] = f.add_lines_with_ghosts('r1',
89
parent_texts=parent_texts)
90
except NotImplementedError:
91
# if the format doesn't support ghosts, just add normally.
92
parent_texts['r1'] = f.add_lines('r1',
95
parent_texts=parent_texts)
96
f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)
97
self.assertNotEqual(None, parent_texts['r0'])
98
self.assertNotEqual(None, parent_texts['r1'])
100
versions = f.versions()
101
self.assertTrue('r0' in versions)
102
self.assertTrue('r1' in versions)
103
self.assertTrue('r2' in versions)
104
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
105
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
106
self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])
107
self.assertEqual(3, f.num_versions())
108
origins = f.annotate('r1')
109
self.assertEquals(origins[0][0], 'r0')
110
self.assertEquals(origins[1][0], 'r1')
111
origins = f.annotate('r2')
112
self.assertEquals(origins[0][0], 'r1')
113
self.assertEquals(origins[1][0], 'r2')
116
f = self.reopen_file()
119
def test_add_unicode_content(self):
120
# unicode content is not permitted in versioned files.
121
# versioned files version sequences of bytes only.
123
self.assertRaises(errors.BzrBadParameterUnicode,
124
vf.add_lines, 'a', [], ['a\n', u'b\n', 'c\n'])
126
(errors.BzrBadParameterUnicode, NotImplementedError),
127
vf.add_lines_with_ghosts, 'a', [], ['a\n', u'b\n', 'c\n'])
129
def test_inline_newline_throws(self):
130
# \r characters are not permitted in lines being added
132
self.assertRaises(errors.BzrBadParameterContainsNewline,
133
vf.add_lines, 'a', [], ['a\n\n'])
135
(errors.BzrBadParameterContainsNewline, NotImplementedError),
136
vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])
137
# but inline CR's are allowed
138
vf.add_lines('a', [], ['a\r\n'])
140
vf.add_lines_with_ghosts('b', [], ['a\r\n'])
141
except NotImplementedError:
144
def test_add_reserved(self):
146
self.assertRaises(errors.ReservedId,
147
vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
149
self.assertRaises(errors.ReservedId,
150
vf.add_delta, 'a:', [], None, 'sha1', False, ((0, 0, 0, []),))
152
def test_get_reserved(self):
154
self.assertRaises(errors.ReservedId, vf.get_delta, 'b:')
155
self.assertRaises(errors.ReservedId, vf.get_texts, ['b:'])
156
self.assertRaises(errors.ReservedId, vf.get_lines, 'b:')
157
self.assertRaises(errors.ReservedId, vf.get_text, 'b:')
159
def test_get_delta(self):
161
sha1s = self._setup_for_deltas(f)
162
expected_delta = (None, '6bfa09d82ce3e898ad4641ae13dd4fdb9cf0d76b', False,
163
[(0, 0, 1, [('base', 'line\n')])])
164
self.assertEqual(expected_delta, f.get_delta('base'))
166
text_name = 'chain1-'
167
for depth in range(26):
168
new_version = text_name + '%s' % depth
169
expected_delta = (next_parent, sha1s[depth],
171
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
172
self.assertEqual(expected_delta, f.get_delta(new_version))
173
next_parent = new_version
175
text_name = 'chain2-'
176
for depth in range(26):
177
new_version = text_name + '%s' % depth
178
expected_delta = (next_parent, sha1s[depth], False,
179
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
180
self.assertEqual(expected_delta, f.get_delta(new_version))
181
next_parent = new_version
182
# smoke test for eol support
183
expected_delta = ('base', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True, [])
184
self.assertEqual(['line'], f.get_lines('noeol'))
185
self.assertEqual(expected_delta, f.get_delta('noeol'))
187
def test_get_deltas(self):
189
sha1s = self._setup_for_deltas(f)
190
deltas = f.get_deltas(f.versions())
191
expected_delta = (None, '6bfa09d82ce3e898ad4641ae13dd4fdb9cf0d76b', False,
192
[(0, 0, 1, [('base', 'line\n')])])
193
self.assertEqual(expected_delta, deltas['base'])
195
text_name = 'chain1-'
196
for depth in range(26):
197
new_version = text_name + '%s' % depth
198
expected_delta = (next_parent, sha1s[depth],
200
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
201
self.assertEqual(expected_delta, deltas[new_version])
202
next_parent = new_version
204
text_name = 'chain2-'
205
for depth in range(26):
206
new_version = text_name + '%s' % depth
207
expected_delta = (next_parent, sha1s[depth], False,
208
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
209
self.assertEqual(expected_delta, deltas[new_version])
210
next_parent = new_version
211
# smoke tests for eol support
212
expected_delta = ('base', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True, [])
213
self.assertEqual(['line'], f.get_lines('noeol'))
214
self.assertEqual(expected_delta, deltas['noeol'])
215
# smoke tests for eol support - two noeol in a row same content
216
expected_deltas = (('noeol', '3ad7ee82dbd8f29ecba073f96e43e414b3f70a4d', True,
217
[(0, 1, 2, [('noeolsecond', 'line\n'), ('noeolsecond', 'line\n')])]),
218
('noeol', '3ad7ee82dbd8f29ecba073f96e43e414b3f70a4d', True,
219
[(0, 0, 1, [('noeolsecond', 'line\n')]), (1, 1, 0, [])]))
220
self.assertEqual(['line\n', 'line'], f.get_lines('noeolsecond'))
221
self.assertTrue(deltas['noeolsecond'] in expected_deltas)
222
# two no-eol in a row, different content
223
expected_delta = ('noeolsecond', '8bb553a84e019ef1149db082d65f3133b195223b', True,
224
[(1, 2, 1, [('noeolnotshared', 'phone\n')])])
225
self.assertEqual(['line\n', 'phone'], f.get_lines('noeolnotshared'))
226
self.assertEqual(expected_delta, deltas['noeolnotshared'])
227
# eol folling a no-eol with content change
228
expected_delta = ('noeol', 'a61f6fb6cfc4596e8d88c34a308d1e724caf8977', False,
229
[(0, 1, 1, [('eol', 'phone\n')])])
230
self.assertEqual(['phone\n'], f.get_lines('eol'))
231
self.assertEqual(expected_delta, deltas['eol'])
232
# eol folling a no-eol with content change
233
expected_delta = ('noeol', '6bfa09d82ce3e898ad4641ae13dd4fdb9cf0d76b', False,
234
[(0, 1, 1, [('eolline', 'line\n')])])
235
self.assertEqual(['line\n'], f.get_lines('eolline'))
236
self.assertEqual(expected_delta, deltas['eolline'])
237
# eol with no parents
238
expected_delta = (None, '264f39cab871e4cfd65b3a002f7255888bb5ed97', True,
239
[(0, 0, 1, [('noeolbase', 'line\n')])])
240
self.assertEqual(['line'], f.get_lines('noeolbase'))
241
self.assertEqual(expected_delta, deltas['noeolbase'])
242
# eol with two parents, in inverse insertion order
243
expected_deltas = (('noeolbase', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True,
244
[(0, 1, 1, [('eolbeforefirstparent', 'line\n')])]),
245
('noeolbase', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True,
246
[(0, 1, 1, [('eolbeforefirstparent', 'line\n')])]))
247
self.assertEqual(['line'], f.get_lines('eolbeforefirstparent'))
248
#self.assertTrue(deltas['eolbeforefirstparent'] in expected_deltas)
250
def _setup_for_deltas(self, f):
251
self.assertRaises(errors.RevisionNotPresent, f.get_delta, 'base')
252
# add texts that should trip the knit maximum delta chain threshold
253
# as well as doing parallel chains of data in knits.
254
# this is done by two chains of 25 insertions
255
f.add_lines('base', [], ['line\n'])
256
f.add_lines('noeol', ['base'], ['line'])
257
# detailed eol tests:
258
# shared last line with parent no-eol
259
f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])
260
# differing last line with parent, both no-eol
261
f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])
262
# add eol following a noneol parent, change content
263
f.add_lines('eol', ['noeol'], ['phone\n'])
264
# add eol following a noneol parent, no change content
265
f.add_lines('eolline', ['noeol'], ['line\n'])
266
# noeol with no parents:
267
f.add_lines('noeolbase', [], ['line'])
268
# noeol preceeding its leftmost parent in the output:
269
# this is done by making it a merge of two parents with no common
270
# anestry: noeolbase and noeol with the
271
# later-inserted parent the leftmost.
272
f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])
273
# two identical eol texts
274
f.add_lines('noeoldup', ['noeol'], ['line'])
276
text_name = 'chain1-'
278
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
279
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
280
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
281
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
282
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
283
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
284
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
285
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
286
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
287
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
288
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
289
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
290
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
291
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
292
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
293
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
294
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
295
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
296
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
297
19:'1ebed371807ba5935958ad0884595126e8c4e823',
298
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
299
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
300
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
301
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
302
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
303
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
305
for depth in range(26):
306
new_version = text_name + '%s' % depth
307
text = text + ['line\n']
308
f.add_lines(new_version, [next_parent], text)
309
next_parent = new_version
311
text_name = 'chain2-'
313
for depth in range(26):
314
new_version = text_name + '%s' % depth
315
text = text + ['line\n']
316
f.add_lines(new_version, [next_parent], text)
317
next_parent = new_version
320
def test_add_delta(self):
321
# tests for the add-delta facility.
322
# at this point, optimising for speed, we assume no checks when deltas are inserted.
323
# this may need to be revisited.
324
source = self.get_file('source')
325
source.add_lines('base', [], ['line\n'])
327
text_name = 'chain1-'
329
for depth in range(26):
330
new_version = text_name + '%s' % depth
331
text = text + ['line\n']
332
source.add_lines(new_version, [next_parent], text)
333
next_parent = new_version
335
text_name = 'chain2-'
337
for depth in range(26):
338
new_version = text_name + '%s' % depth
339
text = text + ['line\n']
340
source.add_lines(new_version, [next_parent], text)
341
next_parent = new_version
342
source.add_lines('noeol', ['base'], ['line'])
344
target = self.get_file('target')
345
for version in source.versions():
346
parent, sha1, noeol, delta = source.get_delta(version)
347
target.add_delta(version,
348
source.get_parents(version),
353
self.assertRaises(RevisionAlreadyPresent,
354
target.add_delta, 'base', [], None, '', False, [])
355
for version in source.versions():
356
self.assertEqual(source.get_lines(version),
357
target.get_lines(version))
359
def test_ancestry(self):
361
self.assertEqual([], f.get_ancestry([]))
362
f.add_lines('r0', [], ['a\n', 'b\n'])
363
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
364
f.add_lines('r2', ['r0'], ['b\n', 'c\n'])
365
f.add_lines('r3', ['r2'], ['b\n', 'c\n'])
366
f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])
367
self.assertEqual([], f.get_ancestry([]))
368
versions = f.get_ancestry(['rM'])
369
# there are some possibilities:
373
# so we check indexes
374
r0 = versions.index('r0')
375
r1 = versions.index('r1')
376
r2 = versions.index('r2')
377
self.assertFalse('r3' in versions)
378
rM = versions.index('rM')
379
self.assertTrue(r0 < r1)
380
self.assertTrue(r0 < r2)
381
self.assertTrue(r1 < rM)
382
self.assertTrue(r2 < rM)
384
self.assertRaises(RevisionNotPresent,
385
f.get_ancestry, ['rM', 'rX'])
387
self.assertEqual(set(f.get_ancestry('rM')),
388
set(f.get_ancestry('rM', topo_sorted=False)))
390
def test_mutate_after_finish(self):
392
f.transaction_finished()
393
self.assertRaises(errors.OutSideTransaction, f.add_delta, '', [], '', '', False, [])
394
self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])
395
self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])
396
self.assertRaises(errors.OutSideTransaction, f.fix_parents, '', [])
397
self.assertRaises(errors.OutSideTransaction, f.join, '')
398
self.assertRaises(errors.OutSideTransaction, f.clone_text, 'base', 'bar', ['foo'])
400
def test_clear_cache(self):
402
# on a new file it should not error
404
# and after adding content, doing a clear_cache and a get should work.
405
f.add_lines('0', [], ['a'])
407
self.assertEqual(['a'], f.get_lines('0'))
409
def test_clone_text(self):
411
f.add_lines('r0', [], ['a\n', 'b\n'])
412
f.clone_text('r1', 'r0', ['r0'])
414
self.assertEquals(f.get_lines('r1'), f.get_lines('r0'))
415
self.assertEquals(f.get_lines('r1'), ['a\n', 'b\n'])
416
self.assertEquals(f.get_parents('r1'), ['r0'])
418
self.assertRaises(RevisionNotPresent,
419
f.clone_text, 'r2', 'rX', [])
420
self.assertRaises(RevisionAlreadyPresent,
421
f.clone_text, 'r1', 'r0', [])
423
verify_file(self.reopen_file())
425
def test_create_empty(self):
427
f.add_lines('0', [], ['a\n'])
428
new_f = f.create_empty('t', MemoryTransport())
429
# smoke test, specific types should check it is honoured correctly for
430
# non type attributes
431
self.assertEqual([], new_f.versions())
432
self.assertTrue(isinstance(new_f, f.__class__))
434
def test_copy_to(self):
436
f.add_lines('0', [], ['a\n'])
437
t = MemoryTransport()
439
for suffix in f.__class__.get_suffixes():
440
self.assertTrue(t.has('foo' + suffix))
442
def test_get_suffixes(self):
445
self.assertEqual(f.__class__.get_suffixes(), f.__class__.get_suffixes())
446
# and should be a list
447
self.assertTrue(isinstance(f.__class__.get_suffixes(), list))
449
def build_graph(self, file, graph):
450
for node in topo_sort(graph.items()):
451
file.add_lines(node, graph[node], [])
453
def test_get_graph(self):
459
self.build_graph(f, graph)
460
self.assertEqual(graph, f.get_graph())
462
def test_get_graph_partial(self):
470
complex_graph.update(simple_a)
475
complex_graph.update(simple_b)
482
complex_graph.update(simple_gam)
484
simple_b_gam.update(simple_gam)
485
simple_b_gam.update(simple_b)
486
self.build_graph(f, complex_graph)
487
self.assertEqual(simple_a, f.get_graph(['a']))
488
self.assertEqual(simple_b, f.get_graph(['b']))
489
self.assertEqual(simple_gam, f.get_graph(['gam']))
490
self.assertEqual(simple_b_gam, f.get_graph(['b', 'gam']))
492
def test_get_parents(self):
494
f.add_lines('r0', [], ['a\n', 'b\n'])
495
f.add_lines('r1', [], ['a\n', 'b\n'])
496
f.add_lines('r2', [], ['a\n', 'b\n'])
497
f.add_lines('r3', [], ['a\n', 'b\n'])
498
f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
499
self.assertEquals(f.get_parents('m'), ['r0', 'r1', 'r2', 'r3'])
501
self.assertRaises(RevisionNotPresent,
504
def test_annotate(self):
506
f.add_lines('r0', [], ['a\n', 'b\n'])
507
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
508
origins = f.annotate('r1')
509
self.assertEquals(origins[0][0], 'r1')
510
self.assertEquals(origins[1][0], 'r0')
512
self.assertRaises(RevisionNotPresent,
516
# tests that walk returns all the inclusions for the requested
517
# revisions as well as the revisions changes themselves.
518
f = self.get_file('1')
519
f.add_lines('r0', [], ['a\n', 'b\n'])
520
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
521
f.add_lines('rX', ['r1'], ['d\n', 'b\n'])
522
f.add_lines('rY', ['r1'], ['c\n', 'e\n'])
525
for lineno, insert, dset, text in f.walk(['rX', 'rY']):
526
lines[text] = (insert, dset)
528
self.assertTrue(lines['a\n'], ('r0', set(['r1'])))
529
self.assertTrue(lines['b\n'], ('r0', set(['rY'])))
530
self.assertTrue(lines['c\n'], ('r1', set(['rX'])))
531
self.assertTrue(lines['d\n'], ('rX', set([])))
532
self.assertTrue(lines['e\n'], ('rY', set([])))
534
def test_detection(self):
535
# Test weaves detect corruption.
537
# Weaves contain a checksum of their texts.
538
# When a text is extracted, this checksum should be
541
w = self.get_file_corrupted_text()
543
self.assertEqual('hello\n', w.get_text('v1'))
544
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
545
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
546
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
548
w = self.get_file_corrupted_checksum()
550
self.assertEqual('hello\n', w.get_text('v1'))
551
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
552
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
553
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
555
def get_file_corrupted_text(self):
556
"""Return a versioned file with corrupt text but valid metadata."""
557
raise NotImplementedError(self.get_file_corrupted_text)
559
def reopen_file(self, name='foo'):
560
"""Open the versioned file from disk again."""
561
raise NotImplementedError(self.reopen_file)
563
def test_iter_lines_added_or_present_in_versions(self):
564
# test that we get at least an equalset of the lines added by
565
# versions in the weave
566
# the ordering here is to make a tree so that dumb searches have
567
# more changes to muck up.
569
class InstrumentedProgress(progress.DummyProgress):
573
progress.DummyProgress.__init__(self)
576
def update(self, msg=None, current=None, total=None):
577
self.updates.append((msg, current, total))
580
# add a base to get included
581
vf.add_lines('base', [], ['base\n'])
582
# add a ancestor to be included on one side
583
vf.add_lines('lancestor', [], ['lancestor\n'])
584
# add a ancestor to be included on the other side
585
vf.add_lines('rancestor', ['base'], ['rancestor\n'])
586
# add a child of rancestor with no eofile-nl
587
vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])
588
# add a child of lancestor and base to join the two roots
589
vf.add_lines('otherchild',
590
['lancestor', 'base'],
591
['base\n', 'lancestor\n', 'otherchild\n'])
592
def iter_with_versions(versions, expected):
593
# now we need to see what lines are returned, and how often.
600
progress = InstrumentedProgress()
601
# iterate over the lines
602
for line in vf.iter_lines_added_or_present_in_versions(versions,
605
if []!= progress.updates:
606
self.assertEqual(expected, progress.updates)
608
lines = iter_with_versions(['child', 'otherchild'],
609
[('Walking content.', 0, 2),
610
('Walking content.', 1, 2),
611
('Walking content.', 2, 2)])
612
# we must see child and otherchild
613
self.assertTrue(lines['child\n'] > 0)
614
self.assertTrue(lines['otherchild\n'] > 0)
615
# we dont care if we got more than that.
618
lines = iter_with_versions(None, [('Walking content.', 0, 5),
619
('Walking content.', 1, 5),
620
('Walking content.', 2, 5),
621
('Walking content.', 3, 5),
622
('Walking content.', 4, 5),
623
('Walking content.', 5, 5)])
624
# all lines must be seen at least once
625
self.assertTrue(lines['base\n'] > 0)
626
self.assertTrue(lines['lancestor\n'] > 0)
627
self.assertTrue(lines['rancestor\n'] > 0)
628
self.assertTrue(lines['child\n'] > 0)
629
self.assertTrue(lines['otherchild\n'] > 0)
631
def test_fix_parents(self):
632
# some versioned files allow incorrect parents to be corrected after
633
# insertion - this may not fix ancestry..
634
# if they do not supported, they just do not implement it.
635
# we test this as an interface test to ensure that those that *do*
636
# implementent it get it right.
638
vf.add_lines('notbase', [], [])
639
vf.add_lines('base', [], [])
641
vf.fix_parents('notbase', ['base'])
642
except NotImplementedError:
644
self.assertEqual(['base'], vf.get_parents('notbase'))
645
# open again, check it stuck.
647
self.assertEqual(['base'], vf.get_parents('notbase'))
649
def test_fix_parents_with_ghosts(self):
650
# when fixing parents, ghosts that are listed should not be ghosts
655
vf.add_lines_with_ghosts('notbase', ['base', 'stillghost'], [])
656
except NotImplementedError:
658
vf.add_lines('base', [], [])
659
vf.fix_parents('notbase', ['base', 'stillghost'])
660
self.assertEqual(['base'], vf.get_parents('notbase'))
661
# open again, check it stuck.
663
self.assertEqual(['base'], vf.get_parents('notbase'))
664
# and check the ghosts
665
self.assertEqual(['base', 'stillghost'],
666
vf.get_parents_with_ghosts('notbase'))
668
def test_add_lines_with_ghosts(self):
669
# some versioned file formats allow lines to be added with parent
670
# information that is > than that in the format. Formats that do
671
# not support this need to raise NotImplementedError on the
672
# add_lines_with_ghosts api.
674
# add a revision with ghost parents
675
# The preferred form is utf8, but we should translate when needed
676
parent_id_unicode = u'b\xbfse'
677
parent_id_utf8 = parent_id_unicode.encode('utf8')
679
vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])
680
except NotImplementedError:
681
# check the other ghost apis are also not implemented
682
self.assertRaises(NotImplementedError, vf.has_ghost, 'foo')
683
self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])
684
self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')
685
self.assertRaises(NotImplementedError, vf.get_graph_with_ghosts)
687
vf = self.reopen_file()
688
# test key graph related apis: getncestry, _graph, get_parents
690
# - these are ghost unaware and must not be reflect ghosts
691
self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))
692
self.assertEqual([], vf.get_parents('notbxbfse'))
693
self.assertEqual({'notbxbfse':[]}, vf.get_graph())
694
self.assertFalse(self.callDeprecated([osutils._revision_id_warning],
695
vf.has_version, parent_id_unicode))
696
self.assertFalse(vf.has_version(parent_id_utf8))
697
# we have _with_ghost apis to give us ghost information.
698
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
699
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
700
self.assertEqual({'notbxbfse':[parent_id_utf8]}, vf.get_graph_with_ghosts())
701
self.assertTrue(self.callDeprecated([osutils._revision_id_warning],
702
vf.has_ghost, parent_id_unicode))
703
self.assertTrue(vf.has_ghost(parent_id_utf8))
704
# if we add something that is a ghost of another, it should correct the
705
# results of the prior apis
706
self.callDeprecated([osutils._revision_id_warning],
707
vf.add_lines, parent_id_unicode, [], [])
708
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))
709
self.assertEqual([parent_id_utf8], vf.get_parents('notbxbfse'))
710
self.assertEqual({parent_id_utf8:[],
711
'notbxbfse':[parent_id_utf8],
714
self.assertTrue(self.callDeprecated([osutils._revision_id_warning],
715
vf.has_version, parent_id_unicode))
716
self.assertTrue(vf.has_version(parent_id_utf8))
717
# we have _with_ghost apis to give us ghost information.
718
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
719
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
720
self.assertEqual({parent_id_utf8:[],
721
'notbxbfse':[parent_id_utf8],
723
vf.get_graph_with_ghosts())
724
self.assertFalse(self.callDeprecated([osutils._revision_id_warning],
725
vf.has_ghost, parent_id_unicode))
726
self.assertFalse(vf.has_ghost(parent_id_utf8))
728
def test_add_lines_with_ghosts_after_normal_revs(self):
729
# some versioned file formats allow lines to be added with parent
730
# information that is > than that in the format. Formats that do
731
# not support this need to raise NotImplementedError on the
732
# add_lines_with_ghosts api.
734
# probe for ghost support
737
except NotImplementedError:
739
vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n'])
740
vf.add_lines_with_ghosts('references_ghost',
742
['line\n', 'line_b\n', 'line_c\n'])
743
origins = vf.annotate('references_ghost')
744
self.assertEquals(('base', 'line\n'), origins[0])
745
self.assertEquals(('base', 'line_b\n'), origins[1])
746
self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
748
def test_readonly_mode(self):
749
transport = get_transport(self.get_url('.'))
750
factory = self.get_factory()
751
vf = factory('id', transport, 0777, create=True, access_mode='w')
752
vf = factory('id', transport, access_mode='r')
753
self.assertRaises(errors.ReadOnlyError, vf.add_delta, '', [], '', '', False, [])
754
self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
755
self.assertRaises(errors.ReadOnlyError,
756
vf.add_lines_with_ghosts,
760
self.assertRaises(errors.ReadOnlyError, vf.fix_parents, 'base', [])
761
self.assertRaises(errors.ReadOnlyError, vf.join, 'base')
762
self.assertRaises(errors.ReadOnlyError, vf.clone_text, 'base', 'bar', ['foo'])
764
def test_get_sha1(self):
765
# check the sha1 data is available
768
vf.add_lines('a', [], ['a\n'])
769
# the same file, different metadata
770
vf.add_lines('b', ['a'], ['a\n'])
771
# a file differing only in last newline.
772
vf.add_lines('c', [], ['a'])
774
'3f786850e387550fdab836ed7e6dc881de23001b', vf.get_sha1('a'))
776
'3f786850e387550fdab836ed7e6dc881de23001b', vf.get_sha1('b'))
778
'86f7e437faa5a7fce15d1ddcb9eaeaea377667b8', vf.get_sha1('c'))
781
class TestWeave(TestCaseWithTransport, VersionedFileTestMixIn):
783
def get_file(self, name='foo'):
784
return WeaveFile(name, get_transport(self.get_url('.')), create=True)
786
def get_file_corrupted_text(self):
787
w = WeaveFile('foo', get_transport(self.get_url('.')), create=True)
788
w.add_lines('v1', [], ['hello\n'])
789
w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
791
# We are going to invasively corrupt the text
792
# Make sure the internals of weave are the same
793
self.assertEqual([('{', 0)
801
self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'
802
, '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
807
w._weave[4] = 'There\n'
810
def get_file_corrupted_checksum(self):
811
w = self.get_file_corrupted_text()
813
w._weave[4] = 'there\n'
814
self.assertEqual('hello\nthere\n', w.get_text('v2'))
816
#Invalid checksum, first digit changed
817
w._sha1s[1] = 'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
820
def reopen_file(self, name='foo', create=False):
821
return WeaveFile(name, get_transport(self.get_url('.')), create=create)
823
def test_no_implicit_create(self):
824
self.assertRaises(errors.NoSuchFile,
827
get_transport(self.get_url('.')))
829
def get_factory(self):
833
class TestKnit(TestCaseWithTransport, VersionedFileTestMixIn):
835
def get_file(self, name='foo'):
836
return KnitVersionedFile(name, get_transport(self.get_url('.')),
837
delta=True, create=True)
839
def get_factory(self):
840
return KnitVersionedFile
842
def get_file_corrupted_text(self):
843
knit = self.get_file()
844
knit.add_lines('v1', [], ['hello\n'])
845
knit.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
848
def reopen_file(self, name='foo', create=False):
849
return KnitVersionedFile(name, get_transport(self.get_url('.')),
853
def test_detection(self):
854
knit = self.get_file()
857
def test_no_implicit_create(self):
858
self.assertRaises(errors.NoSuchFile,
861
get_transport(self.get_url('.')))
864
class InterString(versionedfile.InterVersionedFile):
865
"""An inter-versionedfile optimised code path for strings.
867
This is for use during testing where we use strings as versionedfiles
868
so that none of the default regsitered interversionedfile classes will
869
match - which lets us test the match logic.
873
def is_compatible(source, target):
874
"""InterString is compatible with strings-as-versionedfiles."""
875
return isinstance(source, str) and isinstance(target, str)
878
# TODO this and the InterRepository core logic should be consolidatable
879
# if we make the registry a separate class though we still need to
880
# test the behaviour in the active registry to catch failure-to-handle-
882
class TestInterVersionedFile(TestCaseWithTransport):
884
def test_get_default_inter_versionedfile(self):
885
# test that the InterVersionedFile.get(a, b) probes
886
# for a class where is_compatible(a, b) returns
887
# true and returns a default interversionedfile otherwise.
888
# This also tests that the default registered optimised interversionedfile
889
# classes do not barf inappropriately when a surprising versionedfile type
891
dummy_a = "VersionedFile 1."
892
dummy_b = "VersionedFile 2."
893
self.assertGetsDefaultInterVersionedFile(dummy_a, dummy_b)
895
def assertGetsDefaultInterVersionedFile(self, a, b):
896
"""Asserts that InterVersionedFile.get(a, b) -> the default."""
897
inter = versionedfile.InterVersionedFile.get(a, b)
898
self.assertEqual(versionedfile.InterVersionedFile,
900
self.assertEqual(a, inter.source)
901
self.assertEqual(b, inter.target)
903
def test_register_inter_versionedfile_class(self):
904
# test that a optimised code path provider - a
905
# InterVersionedFile subclass can be registered and unregistered
906
# and that it is correctly selected when given a versionedfile
907
# pair that it returns true on for the is_compatible static method
909
dummy_a = "VersionedFile 1."
910
dummy_b = "VersionedFile 2."
911
versionedfile.InterVersionedFile.register_optimiser(InterString)
913
# we should get the default for something InterString returns False
915
self.assertFalse(InterString.is_compatible(dummy_a, None))
916
self.assertGetsDefaultInterVersionedFile(dummy_a, None)
917
# and we should get an InterString for a pair it 'likes'
918
self.assertTrue(InterString.is_compatible(dummy_a, dummy_b))
919
inter = versionedfile.InterVersionedFile.get(dummy_a, dummy_b)
920
self.assertEqual(InterString, inter.__class__)
921
self.assertEqual(dummy_a, inter.source)
922
self.assertEqual(dummy_b, inter.target)
924
versionedfile.InterVersionedFile.unregister_optimiser(InterString)
925
# now we should get the default InterVersionedFile object again.
926
self.assertGetsDefaultInterVersionedFile(dummy_a, dummy_b)
929
class TestReadonlyHttpMixin(object):
931
def test_readonly_http_works(self):
932
# we should be able to read from http with a versioned file.
934
# try an empty file access
935
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
936
self.assertEqual([], readonly_vf.versions())
938
vf.add_lines('1', [], ['a\n'])
939
vf.add_lines('2', ['1'], ['b\n', 'a\n'])
940
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
941
self.assertEqual(['1', '2'], vf.versions())
942
for version in readonly_vf.versions():
943
readonly_vf.get_lines(version)
946
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
949
return WeaveFile('foo', get_transport(self.get_url('.')), create=True)
951
def get_factory(self):
955
class TestKnitHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
958
return KnitVersionedFile('foo', get_transport(self.get_url('.')),
959
delta=True, create=True)
961
def get_factory(self):
962
return KnitVersionedFile
965
class MergeCasesMixin(object):
967
def doMerge(self, base, a, b, mp):
968
from cStringIO import StringIO
969
from textwrap import dedent
975
w.add_lines('text0', [], map(addcrlf, base))
976
w.add_lines('text1', ['text0'], map(addcrlf, a))
977
w.add_lines('text2', ['text0'], map(addcrlf, b))
981
self.log('merge plan:')
982
p = list(w.plan_merge('text1', 'text2'))
983
for state, line in p:
985
self.log('%12s | %s' % (state, line[:-1]))
989
mt.writelines(w.weave_merge(p))
991
self.log(mt.getvalue())
993
mp = map(addcrlf, mp)
994
self.assertEqual(mt.readlines(), mp)
997
def testOneInsert(self):
1003
def testSeparateInserts(self):
1004
self.doMerge(['aaa', 'bbb', 'ccc'],
1005
['aaa', 'xxx', 'bbb', 'ccc'],
1006
['aaa', 'bbb', 'yyy', 'ccc'],
1007
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1009
def testSameInsert(self):
1010
self.doMerge(['aaa', 'bbb', 'ccc'],
1011
['aaa', 'xxx', 'bbb', 'ccc'],
1012
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],
1013
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1014
overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']
1015
def testOverlappedInsert(self):
1016
self.doMerge(['aaa', 'bbb'],
1017
['aaa', 'xxx', 'yyy', 'bbb'],
1018
['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)
1020
# really it ought to reduce this to
1021
# ['aaa', 'xxx', 'yyy', 'bbb']
1024
def testClashReplace(self):
1025
self.doMerge(['aaa'],
1028
['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz',
1031
def testNonClashInsert1(self):
1032
self.doMerge(['aaa'],
1035
['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz',
1038
def testNonClashInsert2(self):
1039
self.doMerge(['aaa'],
1045
def testDeleteAndModify(self):
1046
"""Clashing delete and modification.
1048
If one side modifies a region and the other deletes it then
1049
there should be a conflict with one side blank.
1052
#######################################
1053
# skippd, not working yet
1056
self.doMerge(['aaa', 'bbb', 'ccc'],
1057
['aaa', 'ddd', 'ccc'],
1059
['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc'])
1061
def _test_merge_from_strings(self, base, a, b, expected):
1063
w.add_lines('text0', [], base.splitlines(True))
1064
w.add_lines('text1', ['text0'], a.splitlines(True))
1065
w.add_lines('text2', ['text0'], b.splitlines(True))
1066
self.log('merge plan:')
1067
p = list(w.plan_merge('text1', 'text2'))
1068
for state, line in p:
1070
self.log('%12s | %s' % (state, line[:-1]))
1071
self.log('merge result:')
1072
result_text = ''.join(w.weave_merge(p))
1073
self.log(result_text)
1074
self.assertEqualDiff(result_text, expected)
1076
def test_weave_merge_conflicts(self):
1077
# does weave merge properly handle plans that end with unchanged?
1078
result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')]))
1079
self.assertEqual(result, 'hello\n')
1081
def test_deletion_extended(self):
1082
"""One side deletes, the other deletes more.
1099
self._test_merge_from_strings(base, a, b, result)
1101
def test_deletion_overlap(self):
1102
"""Delete overlapping regions with no other conflict.
1104
Arguably it'd be better to treat these as agreement, rather than
1105
conflict, but for now conflict is safer.
1133
self._test_merge_from_strings(base, a, b, result)
1135
def test_agreement_deletion(self):
1136
"""Agree to delete some lines, without conflicts."""
1158
self._test_merge_from_strings(base, a, b, result)
1160
def test_sync_on_deletion(self):
1161
"""Specific case of merge where we can synchronize incorrectly.
1163
A previous version of the weave merge concluded that the two versions
1164
agreed on deleting line 2, and this could be a synchronization point.
1165
Line 1 was then considered in isolation, and thought to be deleted on
1168
It's better to consider the whole thing as a disagreement region.
1179
a's replacement line 2
1192
a's replacement line 2
1199
self._test_merge_from_strings(base, a, b, result)
1202
class TestKnitMerge(TestCaseWithTransport, MergeCasesMixin):
1204
def get_file(self, name='foo'):
1205
return KnitVersionedFile(name, get_transport(self.get_url('.')),
1206
delta=True, create=True)
1208
def log_contents(self, w):
1212
class TestWeaveMerge(TestCaseWithTransport, MergeCasesMixin):
1214
def get_file(self, name='foo'):
1215
return WeaveFile(name, get_transport(self.get_url('.')), create=True)
1217
def log_contents(self, w):
1218
self.log('weave is:')
1220
write_weave(w, tmpf)
1221
self.log(tmpf.getvalue())
1223
overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======',
1224
'xxx', '>>>>>>> ', 'bbb']