1
# Copyright (C) 2005 Canonical Ltd
4
# Johan Rydberg <jrydberg@gnu.org>
6
# This program is free software; you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation; either version 2 of the License, or
9
# (at your option) any later version.
11
# This program is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14
# GNU General Public License for more details.
16
# You should have received a copy of the GNU General Public License
17
# along with this program; if not, write to the Free Software
18
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
# considered typical and check that it can be detected/corrected.
24
from StringIO import StringIO
32
from bzrlib.errors import (
34
RevisionAlreadyPresent,
37
from bzrlib.knit import KnitVersionedFile, \
39
from bzrlib.tests import TestCaseWithTransport
40
from bzrlib.tests.HTTPTestUtil import TestCaseWithWebserver
41
from bzrlib.trace import mutter
42
from bzrlib.transport import get_transport
43
from bzrlib.transport.memory import MemoryTransport
44
from bzrlib.tsort import topo_sort
45
import bzrlib.versionedfile as versionedfile
46
from bzrlib.weave import WeaveFile
47
from bzrlib.weavefile import read_weave, write_weave
50
class VersionedFileTestMixIn(object):
51
"""A mixin test class for testing VersionedFiles.
53
This is not an adaptor-style test at this point because
54
theres no dynamic substitution of versioned file implementations,
55
they are strictly controlled by their owning repositories.
60
f.add_lines('r0', [], ['a\n', 'b\n'])
61
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
63
versions = f.versions()
64
self.assertTrue('r0' in versions)
65
self.assertTrue('r1' in versions)
66
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
67
self.assertEquals(f.get_text('r0'), 'a\nb\n')
68
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
69
self.assertEqual(2, len(f))
70
self.assertEqual(2, f.num_versions())
72
self.assertRaises(RevisionNotPresent,
73
f.add_lines, 'r2', ['foo'], [])
74
self.assertRaises(RevisionAlreadyPresent,
75
f.add_lines, 'r1', [], [])
77
# this checks that reopen with create=True does not break anything.
78
f = self.reopen_file(create=True)
81
def test_adds_with_parent_texts(self):
84
parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])
86
parent_texts['r1'] = f.add_lines_with_ghosts('r1',
89
parent_texts=parent_texts)
90
except NotImplementedError:
91
# if the format doesn't support ghosts, just add normally.
92
parent_texts['r1'] = f.add_lines('r1',
95
parent_texts=parent_texts)
96
f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)
97
self.assertNotEqual(None, parent_texts['r0'])
98
self.assertNotEqual(None, parent_texts['r1'])
100
versions = f.versions()
101
self.assertTrue('r0' in versions)
102
self.assertTrue('r1' in versions)
103
self.assertTrue('r2' in versions)
104
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
105
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
106
self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])
107
self.assertEqual(3, f.num_versions())
108
origins = f.annotate('r1')
109
self.assertEquals(origins[0][0], 'r0')
110
self.assertEquals(origins[1][0], 'r1')
111
origins = f.annotate('r2')
112
self.assertEquals(origins[0][0], 'r1')
113
self.assertEquals(origins[1][0], 'r2')
116
f = self.reopen_file()
119
def test_add_unicode_content(self):
120
# unicode content is not permitted in versioned files.
121
# versioned files version sequences of bytes only.
123
self.assertRaises(errors.BzrBadParameterUnicode,
124
vf.add_lines, 'a', [], ['a\n', u'b\n', 'c\n'])
126
(errors.BzrBadParameterUnicode, NotImplementedError),
127
vf.add_lines_with_ghosts, 'a', [], ['a\n', u'b\n', 'c\n'])
129
def test_inline_newline_throws(self):
130
# \r characters are not permitted in lines being added
132
self.assertRaises(errors.BzrBadParameterContainsNewline,
133
vf.add_lines, 'a', [], ['a\n\n'])
135
(errors.BzrBadParameterContainsNewline, NotImplementedError),
136
vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])
137
# but inline CR's are allowed
138
vf.add_lines('a', [], ['a\r\n'])
140
vf.add_lines_with_ghosts('b', [], ['a\r\n'])
141
except NotImplementedError:
144
def test_add_reserved(self):
146
self.assertRaises(errors.ReservedId,
147
vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
149
self.assertRaises(errors.ReservedId,
150
vf.add_delta, 'a:', [], None, 'sha1', False, ((0, 0, 0, []),))
152
def test_get_reserved(self):
154
self.assertRaises(errors.ReservedId, vf.get_delta, 'b:')
155
self.assertRaises(errors.ReservedId, vf.get_texts, ['b:'])
156
self.assertRaises(errors.ReservedId, vf.get_lines, 'b:')
157
self.assertRaises(errors.ReservedId, vf.get_text, 'b:')
159
def test_get_delta(self):
161
sha1s = self._setup_for_deltas(f)
162
expected_delta = (None, '6bfa09d82ce3e898ad4641ae13dd4fdb9cf0d76b', False,
163
[(0, 0, 1, [('base', 'line\n')])])
164
self.assertEqual(expected_delta, f.get_delta('base'))
166
text_name = 'chain1-'
167
for depth in range(26):
168
new_version = text_name + '%s' % depth
169
expected_delta = (next_parent, sha1s[depth],
171
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
172
self.assertEqual(expected_delta, f.get_delta(new_version))
173
next_parent = new_version
175
text_name = 'chain2-'
176
for depth in range(26):
177
new_version = text_name + '%s' % depth
178
expected_delta = (next_parent, sha1s[depth], False,
179
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
180
self.assertEqual(expected_delta, f.get_delta(new_version))
181
next_parent = new_version
182
# smoke test for eol support
183
expected_delta = ('base', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True, [])
184
self.assertEqual(['line'], f.get_lines('noeol'))
185
self.assertEqual(expected_delta, f.get_delta('noeol'))
187
def test_get_deltas(self):
189
sha1s = self._setup_for_deltas(f)
190
deltas = f.get_deltas(f.versions())
191
expected_delta = (None, '6bfa09d82ce3e898ad4641ae13dd4fdb9cf0d76b', False,
192
[(0, 0, 1, [('base', 'line\n')])])
193
self.assertEqual(expected_delta, deltas['base'])
195
text_name = 'chain1-'
196
for depth in range(26):
197
new_version = text_name + '%s' % depth
198
expected_delta = (next_parent, sha1s[depth],
200
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
201
self.assertEqual(expected_delta, deltas[new_version])
202
next_parent = new_version
204
text_name = 'chain2-'
205
for depth in range(26):
206
new_version = text_name + '%s' % depth
207
expected_delta = (next_parent, sha1s[depth], False,
208
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
209
self.assertEqual(expected_delta, deltas[new_version])
210
next_parent = new_version
211
# smoke tests for eol support
212
expected_delta = ('base', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True, [])
213
self.assertEqual(['line'], f.get_lines('noeol'))
214
self.assertEqual(expected_delta, deltas['noeol'])
215
# smoke tests for eol support - two noeol in a row same content
216
expected_deltas = (('noeol', '3ad7ee82dbd8f29ecba073f96e43e414b3f70a4d', True,
217
[(0, 1, 2, [('noeolsecond', 'line\n'), ('noeolsecond', 'line\n')])]),
218
('noeol', '3ad7ee82dbd8f29ecba073f96e43e414b3f70a4d', True,
219
[(0, 0, 1, [('noeolsecond', 'line\n')]), (1, 1, 0, [])]))
220
self.assertEqual(['line\n', 'line'], f.get_lines('noeolsecond'))
221
self.assertTrue(deltas['noeolsecond'] in expected_deltas)
222
# two no-eol in a row, different content
223
expected_delta = ('noeolsecond', '8bb553a84e019ef1149db082d65f3133b195223b', True,
224
[(1, 2, 1, [('noeolnotshared', 'phone\n')])])
225
self.assertEqual(['line\n', 'phone'], f.get_lines('noeolnotshared'))
226
self.assertEqual(expected_delta, deltas['noeolnotshared'])
227
# eol folling a no-eol with content change
228
expected_delta = ('noeol', 'a61f6fb6cfc4596e8d88c34a308d1e724caf8977', False,
229
[(0, 1, 1, [('eol', 'phone\n')])])
230
self.assertEqual(['phone\n'], f.get_lines('eol'))
231
self.assertEqual(expected_delta, deltas['eol'])
232
# eol folling a no-eol with content change
233
expected_delta = ('noeol', '6bfa09d82ce3e898ad4641ae13dd4fdb9cf0d76b', False,
234
[(0, 1, 1, [('eolline', 'line\n')])])
235
self.assertEqual(['line\n'], f.get_lines('eolline'))
236
self.assertEqual(expected_delta, deltas['eolline'])
237
# eol with no parents
238
expected_delta = (None, '264f39cab871e4cfd65b3a002f7255888bb5ed97', True,
239
[(0, 0, 1, [('noeolbase', 'line\n')])])
240
self.assertEqual(['line'], f.get_lines('noeolbase'))
241
self.assertEqual(expected_delta, deltas['noeolbase'])
242
# eol with two parents, in inverse insertion order
243
expected_deltas = (('noeolbase', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True,
244
[(0, 1, 1, [('eolbeforefirstparent', 'line\n')])]),
245
('noeolbase', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True,
246
[(0, 1, 1, [('eolbeforefirstparent', 'line\n')])]))
247
self.assertEqual(['line'], f.get_lines('eolbeforefirstparent'))
248
#self.assertTrue(deltas['eolbeforefirstparent'] in expected_deltas)
250
def _setup_for_deltas(self, f):
251
self.assertRaises(errors.RevisionNotPresent, f.get_delta, 'base')
252
# add texts that should trip the knit maximum delta chain threshold
253
# as well as doing parallel chains of data in knits.
254
# this is done by two chains of 25 insertions
255
f.add_lines('base', [], ['line\n'])
256
f.add_lines('noeol', ['base'], ['line'])
257
# detailed eol tests:
258
# shared last line with parent no-eol
259
f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])
260
# differing last line with parent, both no-eol
261
f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])
262
# add eol following a noneol parent, change content
263
f.add_lines('eol', ['noeol'], ['phone\n'])
264
# add eol following a noneol parent, no change content
265
f.add_lines('eolline', ['noeol'], ['line\n'])
266
# noeol with no parents:
267
f.add_lines('noeolbase', [], ['line'])
268
# noeol preceeding its leftmost parent in the output:
269
# this is done by making it a merge of two parents with no common
270
# anestry: noeolbase and noeol with the
271
# later-inserted parent the leftmost.
272
f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])
273
# two identical eol texts
274
f.add_lines('noeoldup', ['noeol'], ['line'])
276
text_name = 'chain1-'
278
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
279
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
280
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
281
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
282
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
283
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
284
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
285
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
286
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
287
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
288
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
289
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
290
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
291
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
292
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
293
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
294
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
295
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
296
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
297
19:'1ebed371807ba5935958ad0884595126e8c4e823',
298
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
299
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
300
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
301
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
302
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
303
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
305
for depth in range(26):
306
new_version = text_name + '%s' % depth
307
text = text + ['line\n']
308
f.add_lines(new_version, [next_parent], text)
309
next_parent = new_version
311
text_name = 'chain2-'
313
for depth in range(26):
314
new_version = text_name + '%s' % depth
315
text = text + ['line\n']
316
f.add_lines(new_version, [next_parent], text)
317
next_parent = new_version
320
def test_add_delta(self):
321
# tests for the add-delta facility.
322
# at this point, optimising for speed, we assume no checks when deltas are inserted.
323
# this may need to be revisited.
324
source = self.get_file('source')
325
source.add_lines('base', [], ['line\n'])
327
text_name = 'chain1-'
329
for depth in range(26):
330
new_version = text_name + '%s' % depth
331
text = text + ['line\n']
332
source.add_lines(new_version, [next_parent], text)
333
next_parent = new_version
335
text_name = 'chain2-'
337
for depth in range(26):
338
new_version = text_name + '%s' % depth
339
text = text + ['line\n']
340
source.add_lines(new_version, [next_parent], text)
341
next_parent = new_version
342
source.add_lines('noeol', ['base'], ['line'])
344
target = self.get_file('target')
345
for version in source.versions():
346
parent, sha1, noeol, delta = source.get_delta(version)
347
target.add_delta(version,
348
source.get_parents(version),
353
self.assertRaises(RevisionAlreadyPresent,
354
target.add_delta, 'base', [], None, '', False, [])
355
for version in source.versions():
356
self.assertEqual(source.get_lines(version),
357
target.get_lines(version))
359
def test_ancestry(self):
361
self.assertEqual([], f.get_ancestry([]))
362
f.add_lines('r0', [], ['a\n', 'b\n'])
363
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
364
f.add_lines('r2', ['r0'], ['b\n', 'c\n'])
365
f.add_lines('r3', ['r2'], ['b\n', 'c\n'])
366
f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])
367
self.assertEqual([], f.get_ancestry([]))
368
versions = f.get_ancestry(['rM'])
369
# there are some possibilities:
373
# so we check indexes
374
r0 = versions.index('r0')
375
r1 = versions.index('r1')
376
r2 = versions.index('r2')
377
self.assertFalse('r3' in versions)
378
rM = versions.index('rM')
379
self.assertTrue(r0 < r1)
380
self.assertTrue(r0 < r2)
381
self.assertTrue(r1 < rM)
382
self.assertTrue(r2 < rM)
384
self.assertRaises(RevisionNotPresent,
385
f.get_ancestry, ['rM', 'rX'])
387
def test_mutate_after_finish(self):
389
f.transaction_finished()
390
self.assertRaises(errors.OutSideTransaction, f.add_delta, '', [], '', '', False, [])
391
self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])
392
self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])
393
self.assertRaises(errors.OutSideTransaction, f.fix_parents, '', [])
394
self.assertRaises(errors.OutSideTransaction, f.join, '')
395
self.assertRaises(errors.OutSideTransaction, f.clone_text, 'base', 'bar', ['foo'])
397
def test_clear_cache(self):
399
# on a new file it should not error
401
# and after adding content, doing a clear_cache and a get should work.
402
f.add_lines('0', [], ['a'])
404
self.assertEqual(['a'], f.get_lines('0'))
406
def test_clone_text(self):
408
f.add_lines('r0', [], ['a\n', 'b\n'])
409
f.clone_text('r1', 'r0', ['r0'])
411
self.assertEquals(f.get_lines('r1'), f.get_lines('r0'))
412
self.assertEquals(f.get_lines('r1'), ['a\n', 'b\n'])
413
self.assertEquals(f.get_parents('r1'), ['r0'])
415
self.assertRaises(RevisionNotPresent,
416
f.clone_text, 'r2', 'rX', [])
417
self.assertRaises(RevisionAlreadyPresent,
418
f.clone_text, 'r1', 'r0', [])
420
verify_file(self.reopen_file())
422
def test_create_empty(self):
424
f.add_lines('0', [], ['a\n'])
425
new_f = f.create_empty('t', MemoryTransport())
426
# smoke test, specific types should check it is honoured correctly for
427
# non type attributes
428
self.assertEqual([], new_f.versions())
429
self.assertTrue(isinstance(new_f, f.__class__))
431
def test_copy_to(self):
433
f.add_lines('0', [], ['a\n'])
434
t = MemoryTransport()
436
for suffix in f.__class__.get_suffixes():
437
self.assertTrue(t.has('foo' + suffix))
439
def test_get_suffixes(self):
442
self.assertEqual(f.__class__.get_suffixes(), f.__class__.get_suffixes())
443
# and should be a list
444
self.assertTrue(isinstance(f.__class__.get_suffixes(), list))
446
def build_graph(self, file, graph):
447
for node in topo_sort(graph.items()):
448
file.add_lines(node, graph[node], [])
450
def test_get_graph(self):
456
self.build_graph(f, graph)
457
self.assertEqual(graph, f.get_graph())
459
def test_get_graph_partial(self):
467
complex_graph.update(simple_a)
472
complex_graph.update(simple_b)
479
complex_graph.update(simple_gam)
481
simple_b_gam.update(simple_gam)
482
simple_b_gam.update(simple_b)
483
self.build_graph(f, complex_graph)
484
self.assertEqual(simple_a, f.get_graph(['a']))
485
self.assertEqual(simple_b, f.get_graph(['b']))
486
self.assertEqual(simple_gam, f.get_graph(['gam']))
487
self.assertEqual(simple_b_gam, f.get_graph(['b', 'gam']))
489
def test_get_parents(self):
491
f.add_lines('r0', [], ['a\n', 'b\n'])
492
f.add_lines('r1', [], ['a\n', 'b\n'])
493
f.add_lines('r2', [], ['a\n', 'b\n'])
494
f.add_lines('r3', [], ['a\n', 'b\n'])
495
f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
496
self.assertEquals(f.get_parents('m'), ['r0', 'r1', 'r2', 'r3'])
498
self.assertRaises(RevisionNotPresent,
501
def test_annotate(self):
503
f.add_lines('r0', [], ['a\n', 'b\n'])
504
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
505
origins = f.annotate('r1')
506
self.assertEquals(origins[0][0], 'r1')
507
self.assertEquals(origins[1][0], 'r0')
509
self.assertRaises(RevisionNotPresent,
513
# tests that walk returns all the inclusions for the requested
514
# revisions as well as the revisions changes themselves.
515
f = self.get_file('1')
516
f.add_lines('r0', [], ['a\n', 'b\n'])
517
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
518
f.add_lines('rX', ['r1'], ['d\n', 'b\n'])
519
f.add_lines('rY', ['r1'], ['c\n', 'e\n'])
522
for lineno, insert, dset, text in f.walk(['rX', 'rY']):
523
lines[text] = (insert, dset)
525
self.assertTrue(lines['a\n'], ('r0', set(['r1'])))
526
self.assertTrue(lines['b\n'], ('r0', set(['rY'])))
527
self.assertTrue(lines['c\n'], ('r1', set(['rX'])))
528
self.assertTrue(lines['d\n'], ('rX', set([])))
529
self.assertTrue(lines['e\n'], ('rY', set([])))
531
def test_detection(self):
532
# Test weaves detect corruption.
534
# Weaves contain a checksum of their texts.
535
# When a text is extracted, this checksum should be
538
w = self.get_file_corrupted_text()
540
self.assertEqual('hello\n', w.get_text('v1'))
541
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
542
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
543
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
545
w = self.get_file_corrupted_checksum()
547
self.assertEqual('hello\n', w.get_text('v1'))
548
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
549
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
550
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
552
def get_file_corrupted_text(self):
553
"""Return a versioned file with corrupt text but valid metadata."""
554
raise NotImplementedError(self.get_file_corrupted_text)
556
def reopen_file(self, name='foo'):
557
"""Open the versioned file from disk again."""
558
raise NotImplementedError(self.reopen_file)
560
def test_iter_lines_added_or_present_in_versions(self):
561
# test that we get at least an equalset of the lines added by
562
# versions in the weave
563
# the ordering here is to make a tree so that dumb searches have
564
# more changes to muck up.
566
class InstrumentedProgress(progress.DummyProgress):
570
progress.DummyProgress.__init__(self)
573
def update(self, msg=None, current=None, total=None):
574
self.updates.append((msg, current, total))
577
# add a base to get included
578
vf.add_lines('base', [], ['base\n'])
579
# add a ancestor to be included on one side
580
vf.add_lines('lancestor', [], ['lancestor\n'])
581
# add a ancestor to be included on the other side
582
vf.add_lines('rancestor', ['base'], ['rancestor\n'])
583
# add a child of rancestor with no eofile-nl
584
vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])
585
# add a child of lancestor and base to join the two roots
586
vf.add_lines('otherchild',
587
['lancestor', 'base'],
588
['base\n', 'lancestor\n', 'otherchild\n'])
589
def iter_with_versions(versions, expected):
590
# now we need to see what lines are returned, and how often.
597
progress = InstrumentedProgress()
598
# iterate over the lines
599
for line in vf.iter_lines_added_or_present_in_versions(versions,
602
if []!= progress.updates:
603
self.assertEqual(expected, progress.updates)
605
lines = iter_with_versions(['child', 'otherchild'],
606
[('Walking content.', 0, 2),
607
('Walking content.', 1, 2),
608
('Walking content.', 2, 2)])
609
# we must see child and otherchild
610
self.assertTrue(lines['child\n'] > 0)
611
self.assertTrue(lines['otherchild\n'] > 0)
612
# we dont care if we got more than that.
615
lines = iter_with_versions(None, [('Walking content.', 0, 5),
616
('Walking content.', 1, 5),
617
('Walking content.', 2, 5),
618
('Walking content.', 3, 5),
619
('Walking content.', 4, 5),
620
('Walking content.', 5, 5)])
621
# all lines must be seen at least once
622
self.assertTrue(lines['base\n'] > 0)
623
self.assertTrue(lines['lancestor\n'] > 0)
624
self.assertTrue(lines['rancestor\n'] > 0)
625
self.assertTrue(lines['child\n'] > 0)
626
self.assertTrue(lines['otherchild\n'] > 0)
628
def test_fix_parents(self):
629
# some versioned files allow incorrect parents to be corrected after
630
# insertion - this may not fix ancestry..
631
# if they do not supported, they just do not implement it.
632
# we test this as an interface test to ensure that those that *do*
633
# implementent it get it right.
635
vf.add_lines('notbase', [], [])
636
vf.add_lines('base', [], [])
638
vf.fix_parents('notbase', ['base'])
639
except NotImplementedError:
641
self.assertEqual(['base'], vf.get_parents('notbase'))
642
# open again, check it stuck.
644
self.assertEqual(['base'], vf.get_parents('notbase'))
646
def test_fix_parents_with_ghosts(self):
647
# when fixing parents, ghosts that are listed should not be ghosts
652
vf.add_lines_with_ghosts('notbase', ['base', 'stillghost'], [])
653
except NotImplementedError:
655
vf.add_lines('base', [], [])
656
vf.fix_parents('notbase', ['base', 'stillghost'])
657
self.assertEqual(['base'], vf.get_parents('notbase'))
658
# open again, check it stuck.
660
self.assertEqual(['base'], vf.get_parents('notbase'))
661
# and check the ghosts
662
self.assertEqual(['base', 'stillghost'],
663
vf.get_parents_with_ghosts('notbase'))
665
def test_add_lines_with_ghosts(self):
666
# some versioned file formats allow lines to be added with parent
667
# information that is > than that in the format. Formats that do
668
# not support this need to raise NotImplementedError on the
669
# add_lines_with_ghosts api.
671
# add a revision with ghost parents
672
# The preferred form is utf8, but we should translate when needed
673
parent_id_unicode = u'b\xbfse'
674
parent_id_utf8 = parent_id_unicode.encode('utf8')
676
vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])
677
except NotImplementedError:
678
# check the other ghost apis are also not implemented
679
self.assertRaises(NotImplementedError, vf.has_ghost, 'foo')
680
self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])
681
self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')
682
self.assertRaises(NotImplementedError, vf.get_graph_with_ghosts)
684
vf = self.reopen_file()
685
# test key graph related apis: getncestry, _graph, get_parents
687
# - these are ghost unaware and must not be reflect ghosts
688
self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))
689
self.assertEqual([], vf.get_parents('notbxbfse'))
690
self.assertEqual({'notbxbfse':[]}, vf.get_graph())
691
self.assertFalse(self.callDeprecated([osutils._revision_id_warning],
692
vf.has_version, parent_id_unicode))
693
self.assertFalse(vf.has_version(parent_id_utf8))
694
# we have _with_ghost apis to give us ghost information.
695
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
696
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
697
self.assertEqual({'notbxbfse':[parent_id_utf8]}, vf.get_graph_with_ghosts())
698
self.assertTrue(self.callDeprecated([osutils._revision_id_warning],
699
vf.has_ghost, parent_id_unicode))
700
self.assertTrue(vf.has_ghost(parent_id_utf8))
701
# if we add something that is a ghost of another, it should correct the
702
# results of the prior apis
703
self.callDeprecated([osutils._revision_id_warning],
704
vf.add_lines, parent_id_unicode, [], [])
705
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))
706
self.assertEqual([parent_id_utf8], vf.get_parents('notbxbfse'))
707
self.assertEqual({parent_id_utf8:[],
708
'notbxbfse':[parent_id_utf8],
711
self.assertTrue(self.callDeprecated([osutils._revision_id_warning],
712
vf.has_version, parent_id_unicode))
713
self.assertTrue(vf.has_version(parent_id_utf8))
714
# we have _with_ghost apis to give us ghost information.
715
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
716
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
717
self.assertEqual({parent_id_utf8:[],
718
'notbxbfse':[parent_id_utf8],
720
vf.get_graph_with_ghosts())
721
self.assertFalse(self.callDeprecated([osutils._revision_id_warning],
722
vf.has_ghost, parent_id_unicode))
723
self.assertFalse(vf.has_ghost(parent_id_utf8))
725
def test_add_lines_with_ghosts_after_normal_revs(self):
726
# some versioned file formats allow lines to be added with parent
727
# information that is > than that in the format. Formats that do
728
# not support this need to raise NotImplementedError on the
729
# add_lines_with_ghosts api.
731
# probe for ghost support
734
except NotImplementedError:
736
vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n'])
737
vf.add_lines_with_ghosts('references_ghost',
739
['line\n', 'line_b\n', 'line_c\n'])
740
origins = vf.annotate('references_ghost')
741
self.assertEquals(('base', 'line\n'), origins[0])
742
self.assertEquals(('base', 'line_b\n'), origins[1])
743
self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
745
def test_readonly_mode(self):
746
transport = get_transport(self.get_url('.'))
747
factory = self.get_factory()
748
vf = factory('id', transport, 0777, create=True, access_mode='w')
749
vf = factory('id', transport, access_mode='r')
750
self.assertRaises(errors.ReadOnlyError, vf.add_delta, '', [], '', '', False, [])
751
self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
752
self.assertRaises(errors.ReadOnlyError,
753
vf.add_lines_with_ghosts,
757
self.assertRaises(errors.ReadOnlyError, vf.fix_parents, 'base', [])
758
self.assertRaises(errors.ReadOnlyError, vf.join, 'base')
759
self.assertRaises(errors.ReadOnlyError, vf.clone_text, 'base', 'bar', ['foo'])
761
def test_get_sha1(self):
762
# check the sha1 data is available
765
vf.add_lines('a', [], ['a\n'])
766
# the same file, different metadata
767
vf.add_lines('b', ['a'], ['a\n'])
768
# a file differing only in last newline.
769
vf.add_lines('c', [], ['a'])
771
'3f786850e387550fdab836ed7e6dc881de23001b', vf.get_sha1('a'))
773
'3f786850e387550fdab836ed7e6dc881de23001b', vf.get_sha1('b'))
775
'86f7e437faa5a7fce15d1ddcb9eaeaea377667b8', vf.get_sha1('c'))
778
class TestWeave(TestCaseWithTransport, VersionedFileTestMixIn):
780
def get_file(self, name='foo'):
781
return WeaveFile(name, get_transport(self.get_url('.')), create=True)
783
def get_file_corrupted_text(self):
784
w = WeaveFile('foo', get_transport(self.get_url('.')), create=True)
785
w.add_lines('v1', [], ['hello\n'])
786
w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
788
# We are going to invasively corrupt the text
789
# Make sure the internals of weave are the same
790
self.assertEqual([('{', 0)
798
self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'
799
, '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
804
w._weave[4] = 'There\n'
807
def get_file_corrupted_checksum(self):
808
w = self.get_file_corrupted_text()
810
w._weave[4] = 'there\n'
811
self.assertEqual('hello\nthere\n', w.get_text('v2'))
813
#Invalid checksum, first digit changed
814
w._sha1s[1] = 'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
817
def reopen_file(self, name='foo', create=False):
818
return WeaveFile(name, get_transport(self.get_url('.')), create=create)
820
def test_no_implicit_create(self):
821
self.assertRaises(errors.NoSuchFile,
824
get_transport(self.get_url('.')))
826
def get_factory(self):
830
class TestKnit(TestCaseWithTransport, VersionedFileTestMixIn):
832
def get_file(self, name='foo'):
833
return KnitVersionedFile(name, get_transport(self.get_url('.')),
834
delta=True, create=True)
836
def get_factory(self):
837
return KnitVersionedFile
839
def get_file_corrupted_text(self):
840
knit = self.get_file()
841
knit.add_lines('v1', [], ['hello\n'])
842
knit.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
845
def reopen_file(self, name='foo', create=False):
846
return KnitVersionedFile(name, get_transport(self.get_url('.')),
850
def test_detection(self):
851
knit = self.get_file()
854
def test_no_implicit_create(self):
855
self.assertRaises(errors.NoSuchFile,
858
get_transport(self.get_url('.')))
861
class InterString(versionedfile.InterVersionedFile):
862
"""An inter-versionedfile optimised code path for strings.
864
This is for use during testing where we use strings as versionedfiles
865
so that none of the default regsitered interversionedfile classes will
866
match - which lets us test the match logic.
870
def is_compatible(source, target):
871
"""InterString is compatible with strings-as-versionedfiles."""
872
return isinstance(source, str) and isinstance(target, str)
875
# TODO this and the InterRepository core logic should be consolidatable
876
# if we make the registry a separate class though we still need to
877
# test the behaviour in the active registry to catch failure-to-handle-
879
class TestInterVersionedFile(TestCaseWithTransport):
881
def test_get_default_inter_versionedfile(self):
882
# test that the InterVersionedFile.get(a, b) probes
883
# for a class where is_compatible(a, b) returns
884
# true and returns a default interversionedfile otherwise.
885
# This also tests that the default registered optimised interversionedfile
886
# classes do not barf inappropriately when a surprising versionedfile type
888
dummy_a = "VersionedFile 1."
889
dummy_b = "VersionedFile 2."
890
self.assertGetsDefaultInterVersionedFile(dummy_a, dummy_b)
892
def assertGetsDefaultInterVersionedFile(self, a, b):
893
"""Asserts that InterVersionedFile.get(a, b) -> the default."""
894
inter = versionedfile.InterVersionedFile.get(a, b)
895
self.assertEqual(versionedfile.InterVersionedFile,
897
self.assertEqual(a, inter.source)
898
self.assertEqual(b, inter.target)
900
def test_register_inter_versionedfile_class(self):
901
# test that a optimised code path provider - a
902
# InterVersionedFile subclass can be registered and unregistered
903
# and that it is correctly selected when given a versionedfile
904
# pair that it returns true on for the is_compatible static method
906
dummy_a = "VersionedFile 1."
907
dummy_b = "VersionedFile 2."
908
versionedfile.InterVersionedFile.register_optimiser(InterString)
910
# we should get the default for something InterString returns False
912
self.assertFalse(InterString.is_compatible(dummy_a, None))
913
self.assertGetsDefaultInterVersionedFile(dummy_a, None)
914
# and we should get an InterString for a pair it 'likes'
915
self.assertTrue(InterString.is_compatible(dummy_a, dummy_b))
916
inter = versionedfile.InterVersionedFile.get(dummy_a, dummy_b)
917
self.assertEqual(InterString, inter.__class__)
918
self.assertEqual(dummy_a, inter.source)
919
self.assertEqual(dummy_b, inter.target)
921
versionedfile.InterVersionedFile.unregister_optimiser(InterString)
922
# now we should get the default InterVersionedFile object again.
923
self.assertGetsDefaultInterVersionedFile(dummy_a, dummy_b)
926
class TestReadonlyHttpMixin(object):
928
def test_readonly_http_works(self):
929
# we should be able to read from http with a versioned file.
931
# try an empty file access
932
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
933
self.assertEqual([], readonly_vf.versions())
935
vf.add_lines('1', [], ['a\n'])
936
vf.add_lines('2', ['1'], ['b\n', 'a\n'])
937
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
938
self.assertEqual(['1', '2'], vf.versions())
939
for version in readonly_vf.versions():
940
readonly_vf.get_lines(version)
943
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
946
return WeaveFile('foo', get_transport(self.get_url('.')), create=True)
948
def get_factory(self):
952
class TestKnitHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
955
return KnitVersionedFile('foo', get_transport(self.get_url('.')),
956
delta=True, create=True)
958
def get_factory(self):
959
return KnitVersionedFile
962
class MergeCasesMixin(object):
964
def doMerge(self, base, a, b, mp):
965
from cStringIO import StringIO
966
from textwrap import dedent
972
w.add_lines('text0', [], map(addcrlf, base))
973
w.add_lines('text1', ['text0'], map(addcrlf, a))
974
w.add_lines('text2', ['text0'], map(addcrlf, b))
978
self.log('merge plan:')
979
p = list(w.plan_merge('text1', 'text2'))
980
for state, line in p:
982
self.log('%12s | %s' % (state, line[:-1]))
986
mt.writelines(w.weave_merge(p))
988
self.log(mt.getvalue())
990
mp = map(addcrlf, mp)
991
self.assertEqual(mt.readlines(), mp)
994
def testOneInsert(self):
1000
def testSeparateInserts(self):
1001
self.doMerge(['aaa', 'bbb', 'ccc'],
1002
['aaa', 'xxx', 'bbb', 'ccc'],
1003
['aaa', 'bbb', 'yyy', 'ccc'],
1004
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1006
def testSameInsert(self):
1007
self.doMerge(['aaa', 'bbb', 'ccc'],
1008
['aaa', 'xxx', 'bbb', 'ccc'],
1009
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],
1010
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1011
overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']
1012
def testOverlappedInsert(self):
1013
self.doMerge(['aaa', 'bbb'],
1014
['aaa', 'xxx', 'yyy', 'bbb'],
1015
['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)
1017
# really it ought to reduce this to
1018
# ['aaa', 'xxx', 'yyy', 'bbb']
1021
def testClashReplace(self):
1022
self.doMerge(['aaa'],
1025
['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz',
1028
def testNonClashInsert1(self):
1029
self.doMerge(['aaa'],
1032
['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz',
1035
def testNonClashInsert2(self):
1036
self.doMerge(['aaa'],
1042
def testDeleteAndModify(self):
1043
"""Clashing delete and modification.
1045
If one side modifies a region and the other deletes it then
1046
there should be a conflict with one side blank.
1049
#######################################
1050
# skippd, not working yet
1053
self.doMerge(['aaa', 'bbb', 'ccc'],
1054
['aaa', 'ddd', 'ccc'],
1056
['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc'])
1058
def _test_merge_from_strings(self, base, a, b, expected):
1060
w.add_lines('text0', [], base.splitlines(True))
1061
w.add_lines('text1', ['text0'], a.splitlines(True))
1062
w.add_lines('text2', ['text0'], b.splitlines(True))
1063
self.log('merge plan:')
1064
p = list(w.plan_merge('text1', 'text2'))
1065
for state, line in p:
1067
self.log('%12s | %s' % (state, line[:-1]))
1068
self.log('merge result:')
1069
result_text = ''.join(w.weave_merge(p))
1070
self.log(result_text)
1071
self.assertEqualDiff(result_text, expected)
1073
def test_weave_merge_conflicts(self):
1074
# does weave merge properly handle plans that end with unchanged?
1075
result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')]))
1076
self.assertEqual(result, 'hello\n')
1078
def test_deletion_extended(self):
1079
"""One side deletes, the other deletes more.
1096
self._test_merge_from_strings(base, a, b, result)
1098
def test_deletion_overlap(self):
1099
"""Delete overlapping regions with no other conflict.
1101
Arguably it'd be better to treat these as agreement, rather than
1102
conflict, but for now conflict is safer.
1130
self._test_merge_from_strings(base, a, b, result)
1132
def test_agreement_deletion(self):
1133
"""Agree to delete some lines, without conflicts."""
1155
self._test_merge_from_strings(base, a, b, result)
1157
def test_sync_on_deletion(self):
1158
"""Specific case of merge where we can synchronize incorrectly.
1160
A previous version of the weave merge concluded that the two versions
1161
agreed on deleting line 2, and this could be a synchronization point.
1162
Line 1 was then considered in isolation, and thought to be deleted on
1165
It's better to consider the whole thing as a disagreement region.
1176
a's replacement line 2
1189
a's replacement line 2
1196
self._test_merge_from_strings(base, a, b, result)
1199
class TestKnitMerge(TestCaseWithTransport, MergeCasesMixin):
1201
def get_file(self, name='foo'):
1202
return KnitVersionedFile(name, get_transport(self.get_url('.')),
1203
delta=True, create=True)
1205
def log_contents(self, w):
1209
class TestWeaveMerge(TestCaseWithTransport, MergeCasesMixin):
1211
def get_file(self, name='foo'):
1212
return WeaveFile(name, get_transport(self.get_url('.')), create=True)
1214
def log_contents(self, w):
1215
self.log('weave is:')
1217
write_weave(w, tmpf)
1218
self.log(tmpf.getvalue())
1220
overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======',
1221
'xxx', '>>>>>>> ', 'bbb']