1
# Copyright (C) 2005 by Canonical Ltd
4
# Johan Rydberg <jrydberg@gnu.org>
6
# This program is free software; you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation; either version 2 of the License, or
9
# (at your option) any later version.
11
# This program is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14
# GNU General Public License for more details.
16
# You should have received a copy of the GNU General Public License
17
# along with this program; if not, write to the Free Software
18
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
# considered typical and check that it can be detected/corrected.
24
from StringIO import StringIO
27
import bzrlib.errors as errors
28
from bzrlib.errors import (
30
RevisionAlreadyPresent,
33
from bzrlib.knit import KnitVersionedFile, \
35
from bzrlib.tests import TestCaseWithTransport
36
from bzrlib.tests.HTTPTestUtil import TestCaseWithWebserver
37
from bzrlib.trace import mutter
38
from bzrlib.transport import get_transport
39
from bzrlib.transport.memory import MemoryTransport
40
from bzrlib.tsort import topo_sort
41
import bzrlib.versionedfile as versionedfile
42
from bzrlib.weave import WeaveFile
43
from bzrlib.weavefile import read_weave, write_weave
46
class VersionedFileTestMixIn(object):
47
"""A mixin test class for testing VersionedFiles.
49
This is not an adaptor-style test at this point because
50
theres no dynamic substitution of versioned file implementations,
51
they are strictly controlled by their owning repositories.
56
f.add_lines('r0', [], ['a\n', 'b\n'])
57
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
59
versions = f.versions()
60
self.assertTrue('r0' in versions)
61
self.assertTrue('r1' in versions)
62
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
63
self.assertEquals(f.get_text('r0'), 'a\nb\n')
64
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
65
self.assertEqual(2, len(f))
66
self.assertEqual(2, f.num_versions())
68
self.assertRaises(RevisionNotPresent,
69
f.add_lines, 'r2', ['foo'], [])
70
self.assertRaises(RevisionAlreadyPresent,
71
f.add_lines, 'r1', [], [])
73
# this checks that reopen with create=True does not break anything.
74
f = self.reopen_file(create=True)
77
def test_adds_with_parent_texts(self):
80
parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])
82
parent_texts['r1'] = f.add_lines_with_ghosts('r1',
85
parent_texts=parent_texts)
86
except NotImplementedError:
87
# if the format doesn't support ghosts, just add normally.
88
parent_texts['r1'] = f.add_lines('r1',
91
parent_texts=parent_texts)
92
f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)
93
self.assertNotEqual(None, parent_texts['r0'])
94
self.assertNotEqual(None, parent_texts['r1'])
96
versions = f.versions()
97
self.assertTrue('r0' in versions)
98
self.assertTrue('r1' in versions)
99
self.assertTrue('r2' in versions)
100
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
101
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
102
self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])
103
self.assertEqual(3, f.num_versions())
104
origins = f.annotate('r1')
105
self.assertEquals(origins[0][0], 'r0')
106
self.assertEquals(origins[1][0], 'r1')
107
origins = f.annotate('r2')
108
self.assertEquals(origins[0][0], 'r1')
109
self.assertEquals(origins[1][0], 'r2')
112
f = self.reopen_file()
115
def test_add_unicode_content(self):
116
# unicode content is not permitted in versioned files.
117
# versioned files version sequences of bytes only.
119
self.assertRaises(errors.BzrBadParameterUnicode,
120
vf.add_lines, 'a', [], ['a\n', u'b\n', 'c\n'])
122
(errors.BzrBadParameterUnicode, NotImplementedError),
123
vf.add_lines_with_ghosts, 'a', [], ['a\n', u'b\n', 'c\n'])
125
def test_inline_newline_throws(self):
126
# \r characters are not permitted in lines being added
128
self.assertRaises(errors.BzrBadParameterContainsNewline,
129
vf.add_lines, 'a', [], ['a\n\n'])
131
(errors.BzrBadParameterContainsNewline, NotImplementedError),
132
vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])
133
# but inline CR's are allowed
134
vf.add_lines('a', [], ['a\r\n'])
136
vf.add_lines_with_ghosts('b', [], ['a\r\n'])
137
except NotImplementedError:
140
def test_get_delta(self):
142
sha1s = self._setup_for_deltas(f)
143
expected_delta = (None, '6bfa09d82ce3e898ad4641ae13dd4fdb9cf0d76b', False,
144
[(0, 0, 1, [('base', 'line\n')])])
145
self.assertEqual(expected_delta, f.get_delta('base'))
147
text_name = 'chain1-'
148
for depth in range(26):
149
new_version = text_name + '%s' % depth
150
expected_delta = (next_parent, sha1s[depth],
152
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
153
self.assertEqual(expected_delta, f.get_delta(new_version))
154
next_parent = new_version
156
text_name = 'chain2-'
157
for depth in range(26):
158
new_version = text_name + '%s' % depth
159
expected_delta = (next_parent, sha1s[depth], False,
160
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
161
self.assertEqual(expected_delta, f.get_delta(new_version))
162
next_parent = new_version
163
# smoke test for eol support
164
expected_delta = ('base', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True, [])
165
self.assertEqual(['line'], f.get_lines('noeol'))
166
self.assertEqual(expected_delta, f.get_delta('noeol'))
168
def test_get_deltas(self):
170
sha1s = self._setup_for_deltas(f)
171
deltas = f.get_deltas(f.versions())
172
expected_delta = (None, '6bfa09d82ce3e898ad4641ae13dd4fdb9cf0d76b', False,
173
[(0, 0, 1, [('base', 'line\n')])])
174
self.assertEqual(expected_delta, deltas['base'])
176
text_name = 'chain1-'
177
for depth in range(26):
178
new_version = text_name + '%s' % depth
179
expected_delta = (next_parent, sha1s[depth],
181
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
182
self.assertEqual(expected_delta, deltas[new_version])
183
next_parent = new_version
185
text_name = 'chain2-'
186
for depth in range(26):
187
new_version = text_name + '%s' % depth
188
expected_delta = (next_parent, sha1s[depth], False,
189
[(depth + 1, depth + 1, 1, [(new_version, 'line\n')])])
190
self.assertEqual(expected_delta, deltas[new_version])
191
next_parent = new_version
192
# smoke tests for eol support
193
expected_delta = ('base', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True, [])
194
self.assertEqual(['line'], f.get_lines('noeol'))
195
self.assertEqual(expected_delta, deltas['noeol'])
196
# smoke tests for eol support - two noeol in a row same content
197
expected_deltas = (('noeol', '3ad7ee82dbd8f29ecba073f96e43e414b3f70a4d', True,
198
[(0, 1, 2, [(u'noeolsecond', 'line\n'), (u'noeolsecond', 'line\n')])]),
199
('noeol', '3ad7ee82dbd8f29ecba073f96e43e414b3f70a4d', True,
200
[(0, 0, 1, [('noeolsecond', 'line\n')]), (1, 1, 0, [])]))
201
self.assertEqual(['line\n', 'line'], f.get_lines('noeolsecond'))
202
self.assertTrue(deltas['noeolsecond'] in expected_deltas)
203
# two no-eol in a row, different content
204
expected_delta = ('noeolsecond', '8bb553a84e019ef1149db082d65f3133b195223b', True,
205
[(1, 2, 1, [(u'noeolnotshared', 'phone\n')])])
206
self.assertEqual(['line\n', 'phone'], f.get_lines('noeolnotshared'))
207
self.assertEqual(expected_delta, deltas['noeolnotshared'])
208
# eol folling a no-eol with content change
209
expected_delta = ('noeol', 'a61f6fb6cfc4596e8d88c34a308d1e724caf8977', False,
210
[(0, 1, 1, [(u'eol', 'phone\n')])])
211
self.assertEqual(['phone\n'], f.get_lines('eol'))
212
self.assertEqual(expected_delta, deltas['eol'])
213
# eol folling a no-eol with content change
214
expected_delta = ('noeol', '6bfa09d82ce3e898ad4641ae13dd4fdb9cf0d76b', False,
215
[(0, 1, 1, [(u'eolline', 'line\n')])])
216
self.assertEqual(['line\n'], f.get_lines('eolline'))
217
self.assertEqual(expected_delta, deltas['eolline'])
218
# eol with no parents
219
expected_delta = (None, '264f39cab871e4cfd65b3a002f7255888bb5ed97', True,
220
[(0, 0, 1, [(u'noeolbase', 'line\n')])])
221
self.assertEqual(['line'], f.get_lines('noeolbase'))
222
self.assertEqual(expected_delta, deltas['noeolbase'])
223
# eol with two parents, in inverse insertion order
224
expected_deltas = (('noeolbase', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True,
225
[(0, 1, 1, [(u'eolbeforefirstparent', 'line\n')])]),
226
('noeolbase', '264f39cab871e4cfd65b3a002f7255888bb5ed97', True,
227
[(0, 1, 1, [(u'eolbeforefirstparent', 'line\n')])]))
228
self.assertEqual(['line'], f.get_lines('eolbeforefirstparent'))
229
#self.assertTrue(deltas['eolbeforefirstparent'] in expected_deltas)
231
def _setup_for_deltas(self, f):
232
self.assertRaises(errors.RevisionNotPresent, f.get_delta, 'base')
233
# add texts that should trip the knit maximum delta chain threshold
234
# as well as doing parallel chains of data in knits.
235
# this is done by two chains of 25 insertions
236
f.add_lines('base', [], ['line\n'])
237
f.add_lines('noeol', ['base'], ['line'])
238
# detailed eol tests:
239
# shared last line with parent no-eol
240
f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])
241
# differing last line with parent, both no-eol
242
f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])
243
# add eol following a noneol parent, change content
244
f.add_lines('eol', ['noeol'], ['phone\n'])
245
# add eol following a noneol parent, no change content
246
f.add_lines('eolline', ['noeol'], ['line\n'])
247
# noeol with no parents:
248
f.add_lines('noeolbase', [], ['line'])
249
# noeol preceeding its leftmost parent in the output:
250
# this is done by making it a merge of two parents with no common
251
# anestry: noeolbase and noeol with the
252
# later-inserted parent the leftmost.
253
f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])
254
# two identical eol texts
255
f.add_lines('noeoldup', ['noeol'], ['line'])
257
text_name = 'chain1-'
259
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
260
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
261
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
262
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
263
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
264
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
265
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
266
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
267
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
268
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
269
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
270
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
271
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
272
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
273
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
274
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
275
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
276
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
277
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
278
19:'1ebed371807ba5935958ad0884595126e8c4e823',
279
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
280
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
281
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
282
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
283
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
284
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
286
for depth in range(26):
287
new_version = text_name + '%s' % depth
288
text = text + ['line\n']
289
f.add_lines(new_version, [next_parent], text)
290
next_parent = new_version
292
text_name = 'chain2-'
294
for depth in range(26):
295
new_version = text_name + '%s' % depth
296
text = text + ['line\n']
297
f.add_lines(new_version, [next_parent], text)
298
next_parent = new_version
301
def test_add_delta(self):
302
# tests for the add-delta facility.
303
# at this point, optimising for speed, we assume no checks when deltas are inserted.
304
# this may need to be revisited.
305
source = self.get_file('source')
306
source.add_lines('base', [], ['line\n'])
308
text_name = 'chain1-'
310
for depth in range(26):
311
new_version = text_name + '%s' % depth
312
text = text + ['line\n']
313
source.add_lines(new_version, [next_parent], text)
314
next_parent = new_version
316
text_name = 'chain2-'
318
for depth in range(26):
319
new_version = text_name + '%s' % depth
320
text = text + ['line\n']
321
source.add_lines(new_version, [next_parent], text)
322
next_parent = new_version
323
source.add_lines('noeol', ['base'], ['line'])
325
target = self.get_file('target')
326
for version in source.versions():
327
parent, sha1, noeol, delta = source.get_delta(version)
328
target.add_delta(version,
329
source.get_parents(version),
334
self.assertRaises(RevisionAlreadyPresent,
335
target.add_delta, 'base', [], None, '', False, [])
336
for version in source.versions():
337
self.assertEqual(source.get_lines(version),
338
target.get_lines(version))
340
def test_ancestry(self):
342
self.assertEqual([], f.get_ancestry([]))
343
f.add_lines('r0', [], ['a\n', 'b\n'])
344
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
345
f.add_lines('r2', ['r0'], ['b\n', 'c\n'])
346
f.add_lines('r3', ['r2'], ['b\n', 'c\n'])
347
f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])
348
self.assertEqual([], f.get_ancestry([]))
349
versions = f.get_ancestry(['rM'])
350
# there are some possibilities:
354
# so we check indexes
355
r0 = versions.index('r0')
356
r1 = versions.index('r1')
357
r2 = versions.index('r2')
358
self.assertFalse('r3' in versions)
359
rM = versions.index('rM')
360
self.assertTrue(r0 < r1)
361
self.assertTrue(r0 < r2)
362
self.assertTrue(r1 < rM)
363
self.assertTrue(r2 < rM)
365
self.assertRaises(RevisionNotPresent,
366
f.get_ancestry, ['rM', 'rX'])
368
def test_mutate_after_finish(self):
370
f.transaction_finished()
371
self.assertRaises(errors.OutSideTransaction, f.add_delta, '', [], '', '', False, [])
372
self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])
373
self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])
374
self.assertRaises(errors.OutSideTransaction, f.fix_parents, '', [])
375
self.assertRaises(errors.OutSideTransaction, f.join, '')
376
self.assertRaises(errors.OutSideTransaction, f.clone_text, 'base', 'bar', ['foo'])
378
def test_clear_cache(self):
380
# on a new file it should not error
382
# and after adding content, doing a clear_cache and a get should work.
383
f.add_lines('0', [], ['a'])
385
self.assertEqual(['a'], f.get_lines('0'))
387
def test_clone_text(self):
389
f.add_lines('r0', [], ['a\n', 'b\n'])
390
f.clone_text('r1', 'r0', ['r0'])
392
self.assertEquals(f.get_lines('r1'), f.get_lines('r0'))
393
self.assertEquals(f.get_lines('r1'), ['a\n', 'b\n'])
394
self.assertEquals(f.get_parents('r1'), ['r0'])
396
self.assertRaises(RevisionNotPresent,
397
f.clone_text, 'r2', 'rX', [])
398
self.assertRaises(RevisionAlreadyPresent,
399
f.clone_text, 'r1', 'r0', [])
401
verify_file(self.reopen_file())
403
def test_create_empty(self):
405
f.add_lines('0', [], ['a\n'])
406
new_f = f.create_empty('t', MemoryTransport())
407
# smoke test, specific types should check it is honoured correctly for
408
# non type attributes
409
self.assertEqual([], new_f.versions())
410
self.assertTrue(isinstance(new_f, f.__class__))
412
def test_copy_to(self):
414
f.add_lines('0', [], ['a\n'])
415
t = MemoryTransport()
417
for suffix in f.__class__.get_suffixes():
418
self.assertTrue(t.has('foo' + suffix))
420
def test_get_suffixes(self):
423
self.assertEqual(f.__class__.get_suffixes(), f.__class__.get_suffixes())
424
# and should be a list
425
self.assertTrue(isinstance(f.__class__.get_suffixes(), list))
427
def build_graph(self, file, graph):
428
for node in topo_sort(graph.items()):
429
file.add_lines(node, graph[node], [])
431
def test_get_graph(self):
437
self.build_graph(f, graph)
438
self.assertEqual(graph, f.get_graph())
440
def test_get_graph_partial(self):
448
complex_graph.update(simple_a)
453
complex_graph.update(simple_b)
460
complex_graph.update(simple_gam)
462
simple_b_gam.update(simple_gam)
463
simple_b_gam.update(simple_b)
464
self.build_graph(f, complex_graph)
465
self.assertEqual(simple_a, f.get_graph(['a']))
466
self.assertEqual(simple_b, f.get_graph(['b']))
467
self.assertEqual(simple_gam, f.get_graph(['gam']))
468
self.assertEqual(simple_b_gam, f.get_graph(['b', 'gam']))
470
def test_get_parents(self):
472
f.add_lines('r0', [], ['a\n', 'b\n'])
473
f.add_lines('r1', [], ['a\n', 'b\n'])
474
f.add_lines('r2', [], ['a\n', 'b\n'])
475
f.add_lines('r3', [], ['a\n', 'b\n'])
476
f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
477
self.assertEquals(f.get_parents('m'), ['r0', 'r1', 'r2', 'r3'])
479
self.assertRaises(RevisionNotPresent,
482
def test_annotate(self):
484
f.add_lines('r0', [], ['a\n', 'b\n'])
485
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
486
origins = f.annotate('r1')
487
self.assertEquals(origins[0][0], 'r1')
488
self.assertEquals(origins[1][0], 'r0')
490
self.assertRaises(RevisionNotPresent,
494
# tests that walk returns all the inclusions for the requested
495
# revisions as well as the revisions changes themselves.
496
f = self.get_file('1')
497
f.add_lines('r0', [], ['a\n', 'b\n'])
498
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
499
f.add_lines('rX', ['r1'], ['d\n', 'b\n'])
500
f.add_lines('rY', ['r1'], ['c\n', 'e\n'])
503
for lineno, insert, dset, text in f.walk(['rX', 'rY']):
504
lines[text] = (insert, dset)
506
self.assertTrue(lines['a\n'], ('r0', set(['r1'])))
507
self.assertTrue(lines['b\n'], ('r0', set(['rY'])))
508
self.assertTrue(lines['c\n'], ('r1', set(['rX'])))
509
self.assertTrue(lines['d\n'], ('rX', set([])))
510
self.assertTrue(lines['e\n'], ('rY', set([])))
512
def test_detection(self):
513
# Test weaves detect corruption.
515
# Weaves contain a checksum of their texts.
516
# When a text is extracted, this checksum should be
519
w = self.get_file_corrupted_text()
521
self.assertEqual('hello\n', w.get_text('v1'))
522
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
523
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
524
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
526
w = self.get_file_corrupted_checksum()
528
self.assertEqual('hello\n', w.get_text('v1'))
529
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
530
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
531
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
533
def get_file_corrupted_text(self):
534
"""Return a versioned file with corrupt text but valid metadata."""
535
raise NotImplementedError(self.get_file_corrupted_text)
537
def reopen_file(self, name='foo'):
538
"""Open the versioned file from disk again."""
539
raise NotImplementedError(self.reopen_file)
541
def test_iter_lines_added_or_present_in_versions(self):
542
# test that we get at least an equalset of the lines added by
543
# versions in the weave
544
# the ordering here is to make a tree so that dumb searches have
545
# more changes to muck up.
547
# add a base to get included
548
vf.add_lines('base', [], ['base\n'])
549
# add a ancestor to be included on one side
550
vf.add_lines('lancestor', [], ['lancestor\n'])
551
# add a ancestor to be included on the other side
552
vf.add_lines('rancestor', ['base'], ['rancestor\n'])
553
# add a child of rancestor with no eofile-nl
554
vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])
555
# add a child of lancestor and base to join the two roots
556
vf.add_lines('otherchild',
557
['lancestor', 'base'],
558
['base\n', 'lancestor\n', 'otherchild\n'])
559
def iter_with_versions(versions):
560
# now we need to see what lines are returned, and how often.
567
# iterate over the lines
568
for line in vf.iter_lines_added_or_present_in_versions(versions):
571
lines = iter_with_versions(['child', 'otherchild'])
572
# we must see child and otherchild
573
self.assertTrue(lines['child\n'] > 0)
574
self.assertTrue(lines['otherchild\n'] > 0)
575
# we dont care if we got more than that.
578
lines = iter_with_versions(None)
579
# all lines must be seen at least once
580
self.assertTrue(lines['base\n'] > 0)
581
self.assertTrue(lines['lancestor\n'] > 0)
582
self.assertTrue(lines['rancestor\n'] > 0)
583
self.assertTrue(lines['child\n'] > 0)
584
self.assertTrue(lines['otherchild\n'] > 0)
586
def test_fix_parents(self):
587
# some versioned files allow incorrect parents to be corrected after
588
# insertion - this may not fix ancestry..
589
# if they do not supported, they just do not implement it.
590
# we test this as an interface test to ensure that those that *do*
591
# implementent it get it right.
593
vf.add_lines('notbase', [], [])
594
vf.add_lines('base', [], [])
596
vf.fix_parents('notbase', ['base'])
597
except NotImplementedError:
599
self.assertEqual(['base'], vf.get_parents('notbase'))
600
# open again, check it stuck.
602
self.assertEqual(['base'], vf.get_parents('notbase'))
604
def test_fix_parents_with_ghosts(self):
605
# when fixing parents, ghosts that are listed should not be ghosts
610
vf.add_lines_with_ghosts('notbase', ['base', 'stillghost'], [])
611
except NotImplementedError:
613
vf.add_lines('base', [], [])
614
vf.fix_parents('notbase', ['base', 'stillghost'])
615
self.assertEqual(['base'], vf.get_parents('notbase'))
616
# open again, check it stuck.
618
self.assertEqual(['base'], vf.get_parents('notbase'))
619
# and check the ghosts
620
self.assertEqual(['base', 'stillghost'],
621
vf.get_parents_with_ghosts('notbase'))
623
def test_add_lines_with_ghosts(self):
624
# some versioned file formats allow lines to be added with parent
625
# information that is > than that in the format. Formats that do
626
# not support this need to raise NotImplementedError on the
627
# add_lines_with_ghosts api.
629
# add a revision with ghost parents
631
vf.add_lines_with_ghosts(u'notbxbfse', [u'b\xbfse'], [])
632
except NotImplementedError:
633
# check the other ghost apis are also not implemented
634
self.assertRaises(NotImplementedError, vf.has_ghost, 'foo')
635
self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])
636
self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')
637
self.assertRaises(NotImplementedError, vf.get_graph_with_ghosts)
639
# test key graph related apis: getncestry, _graph, get_parents
641
# - these are ghost unaware and must not be reflect ghosts
642
self.assertEqual([u'notbxbfse'], vf.get_ancestry(u'notbxbfse'))
643
self.assertEqual([], vf.get_parents(u'notbxbfse'))
644
self.assertEqual({u'notbxbfse':[]}, vf.get_graph())
645
self.assertFalse(vf.has_version(u'b\xbfse'))
646
# we have _with_ghost apis to give us ghost information.
647
self.assertEqual([u'b\xbfse', u'notbxbfse'], vf.get_ancestry_with_ghosts([u'notbxbfse']))
648
self.assertEqual([u'b\xbfse'], vf.get_parents_with_ghosts(u'notbxbfse'))
649
self.assertEqual({u'notbxbfse':[u'b\xbfse']}, vf.get_graph_with_ghosts())
650
self.assertTrue(vf.has_ghost(u'b\xbfse'))
651
# if we add something that is a ghost of another, it should correct the
652
# results of the prior apis
653
vf.add_lines(u'b\xbfse', [], [])
654
self.assertEqual([u'b\xbfse', u'notbxbfse'], vf.get_ancestry([u'notbxbfse']))
655
self.assertEqual([u'b\xbfse'], vf.get_parents(u'notbxbfse'))
656
self.assertEqual({u'b\xbfse':[],
657
u'notbxbfse':[u'b\xbfse'],
660
self.assertTrue(vf.has_version(u'b\xbfse'))
661
# we have _with_ghost apis to give us ghost information.
662
self.assertEqual([u'b\xbfse', u'notbxbfse'], vf.get_ancestry_with_ghosts([u'notbxbfse']))
663
self.assertEqual([u'b\xbfse'], vf.get_parents_with_ghosts(u'notbxbfse'))
664
self.assertEqual({u'b\xbfse':[],
665
u'notbxbfse':[u'b\xbfse'],
667
vf.get_graph_with_ghosts())
668
self.assertFalse(vf.has_ghost(u'b\xbfse'))
670
def test_add_lines_with_ghosts_after_normal_revs(self):
671
# some versioned file formats allow lines to be added with parent
672
# information that is > than that in the format. Formats that do
673
# not support this need to raise NotImplementedError on the
674
# add_lines_with_ghosts api.
676
# probe for ghost support
679
except NotImplementedError:
681
vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n'])
682
vf.add_lines_with_ghosts('references_ghost',
684
['line\n', 'line_b\n', 'line_c\n'])
685
origins = vf.annotate('references_ghost')
686
self.assertEquals(('base', 'line\n'), origins[0])
687
self.assertEquals(('base', 'line_b\n'), origins[1])
688
self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
690
def test_readonly_mode(self):
691
transport = get_transport(self.get_url('.'))
692
factory = self.get_factory()
693
vf = factory('id', transport, 0777, create=True, access_mode='w')
694
vf = factory('id', transport, access_mode='r')
695
self.assertRaises(errors.ReadOnlyError, vf.add_delta, '', [], '', '', False, [])
696
self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
697
self.assertRaises(errors.ReadOnlyError,
698
vf.add_lines_with_ghosts,
702
self.assertRaises(errors.ReadOnlyError, vf.fix_parents, 'base', [])
703
self.assertRaises(errors.ReadOnlyError, vf.join, 'base')
704
self.assertRaises(errors.ReadOnlyError, vf.clone_text, 'base', 'bar', ['foo'])
706
def test_get_sha1(self):
707
# check the sha1 data is available
710
vf.add_lines('a', [], ['a\n'])
711
# the same file, different metadata
712
vf.add_lines('b', ['a'], ['a\n'])
713
# a file differing only in last newline.
714
vf.add_lines('c', [], ['a'])
716
'3f786850e387550fdab836ed7e6dc881de23001b', vf.get_sha1('a'))
718
'3f786850e387550fdab836ed7e6dc881de23001b', vf.get_sha1('b'))
720
'86f7e437faa5a7fce15d1ddcb9eaeaea377667b8', vf.get_sha1('c'))
723
class TestWeave(TestCaseWithTransport, VersionedFileTestMixIn):
725
def get_file(self, name='foo'):
726
return WeaveFile(name, get_transport(self.get_url('.')), create=True)
728
def get_file_corrupted_text(self):
729
w = WeaveFile('foo', get_transport(self.get_url('.')), create=True)
730
w.add_lines('v1', [], ['hello\n'])
731
w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
733
# We are going to invasively corrupt the text
734
# Make sure the internals of weave are the same
735
self.assertEqual([('{', 0)
743
self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'
744
, '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
749
w._weave[4] = 'There\n'
752
def get_file_corrupted_checksum(self):
753
w = self.get_file_corrupted_text()
755
w._weave[4] = 'there\n'
756
self.assertEqual('hello\nthere\n', w.get_text('v2'))
758
#Invalid checksum, first digit changed
759
w._sha1s[1] = 'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
762
def reopen_file(self, name='foo', create=False):
763
return WeaveFile(name, get_transport(self.get_url('.')), create=create)
765
def test_no_implicit_create(self):
766
self.assertRaises(errors.NoSuchFile,
769
get_transport(self.get_url('.')))
771
def get_factory(self):
775
class TestKnit(TestCaseWithTransport, VersionedFileTestMixIn):
777
def get_file(self, name='foo'):
778
return KnitVersionedFile(name, get_transport(self.get_url('.')),
779
delta=True, create=True)
781
def get_factory(self):
782
return KnitVersionedFile
784
def get_file_corrupted_text(self):
785
knit = self.get_file()
786
knit.add_lines('v1', [], ['hello\n'])
787
knit.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
790
def reopen_file(self, name='foo', create=False):
791
return KnitVersionedFile(name, get_transport(self.get_url('.')),
795
def test_detection(self):
796
knit = self.get_file()
799
def test_no_implicit_create(self):
800
self.assertRaises(errors.NoSuchFile,
803
get_transport(self.get_url('.')))
806
class InterString(versionedfile.InterVersionedFile):
807
"""An inter-versionedfile optimised code path for strings.
809
This is for use during testing where we use strings as versionedfiles
810
so that none of the default regsitered interversionedfile classes will
811
match - which lets us test the match logic.
815
def is_compatible(source, target):
816
"""InterString is compatible with strings-as-versionedfiles."""
817
return isinstance(source, str) and isinstance(target, str)
820
# TODO this and the InterRepository core logic should be consolidatable
821
# if we make the registry a separate class though we still need to
822
# test the behaviour in the active registry to catch failure-to-handle-
824
class TestInterVersionedFile(TestCaseWithTransport):
826
def test_get_default_inter_versionedfile(self):
827
# test that the InterVersionedFile.get(a, b) probes
828
# for a class where is_compatible(a, b) returns
829
# true and returns a default interversionedfile otherwise.
830
# This also tests that the default registered optimised interversionedfile
831
# classes do not barf inappropriately when a surprising versionedfile type
833
dummy_a = "VersionedFile 1."
834
dummy_b = "VersionedFile 2."
835
self.assertGetsDefaultInterVersionedFile(dummy_a, dummy_b)
837
def assertGetsDefaultInterVersionedFile(self, a, b):
838
"""Asserts that InterVersionedFile.get(a, b) -> the default."""
839
inter = versionedfile.InterVersionedFile.get(a, b)
840
self.assertEqual(versionedfile.InterVersionedFile,
842
self.assertEqual(a, inter.source)
843
self.assertEqual(b, inter.target)
845
def test_register_inter_versionedfile_class(self):
846
# test that a optimised code path provider - a
847
# InterVersionedFile subclass can be registered and unregistered
848
# and that it is correctly selected when given a versionedfile
849
# pair that it returns true on for the is_compatible static method
851
dummy_a = "VersionedFile 1."
852
dummy_b = "VersionedFile 2."
853
versionedfile.InterVersionedFile.register_optimiser(InterString)
855
# we should get the default for something InterString returns False
857
self.assertFalse(InterString.is_compatible(dummy_a, None))
858
self.assertGetsDefaultInterVersionedFile(dummy_a, None)
859
# and we should get an InterString for a pair it 'likes'
860
self.assertTrue(InterString.is_compatible(dummy_a, dummy_b))
861
inter = versionedfile.InterVersionedFile.get(dummy_a, dummy_b)
862
self.assertEqual(InterString, inter.__class__)
863
self.assertEqual(dummy_a, inter.source)
864
self.assertEqual(dummy_b, inter.target)
866
versionedfile.InterVersionedFile.unregister_optimiser(InterString)
867
# now we should get the default InterVersionedFile object again.
868
self.assertGetsDefaultInterVersionedFile(dummy_a, dummy_b)
871
class TestReadonlyHttpMixin(object):
873
def test_readonly_http_works(self):
874
# we should be able to read from http with a versioned file.
876
# try an empty file access
877
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
878
self.assertEqual([], readonly_vf.versions())
880
vf.add_lines('1', [], ['a\n'])
881
vf.add_lines('2', ['1'], ['b\n', 'a\n'])
882
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
883
self.assertEqual(['1', '2'], vf.versions())
884
for version in readonly_vf.versions():
885
readonly_vf.get_lines(version)
888
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
891
return WeaveFile('foo', get_transport(self.get_url('.')), create=True)
893
def get_factory(self):
897
class TestKnitHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
900
return KnitVersionedFile('foo', get_transport(self.get_url('.')),
901
delta=True, create=True)
903
def get_factory(self):
904
return KnitVersionedFile
907
class MergeCasesMixin(object):
909
def doMerge(self, base, a, b, mp):
910
from cStringIO import StringIO
911
from textwrap import dedent
917
w.add_lines('text0', [], map(addcrlf, base))
918
w.add_lines('text1', ['text0'], map(addcrlf, a))
919
w.add_lines('text2', ['text0'], map(addcrlf, b))
923
self.log('merge plan:')
924
p = list(w.plan_merge('text1', 'text2'))
925
for state, line in p:
927
self.log('%12s | %s' % (state, line[:-1]))
931
mt.writelines(w.weave_merge(p))
933
self.log(mt.getvalue())
935
mp = map(addcrlf, mp)
936
self.assertEqual(mt.readlines(), mp)
939
def testOneInsert(self):
945
def testSeparateInserts(self):
946
self.doMerge(['aaa', 'bbb', 'ccc'],
947
['aaa', 'xxx', 'bbb', 'ccc'],
948
['aaa', 'bbb', 'yyy', 'ccc'],
949
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
951
def testSameInsert(self):
952
self.doMerge(['aaa', 'bbb', 'ccc'],
953
['aaa', 'xxx', 'bbb', 'ccc'],
954
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],
955
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
956
overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']
957
def testOverlappedInsert(self):
958
self.doMerge(['aaa', 'bbb'],
959
['aaa', 'xxx', 'yyy', 'bbb'],
960
['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)
962
# really it ought to reduce this to
963
# ['aaa', 'xxx', 'yyy', 'bbb']
966
def testClashReplace(self):
967
self.doMerge(['aaa'],
970
['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz',
973
def testNonClashInsert1(self):
974
self.doMerge(['aaa'],
977
['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz',
980
def testNonClashInsert2(self):
981
self.doMerge(['aaa'],
987
def testDeleteAndModify(self):
988
"""Clashing delete and modification.
990
If one side modifies a region and the other deletes it then
991
there should be a conflict with one side blank.
994
#######################################
995
# skippd, not working yet
998
self.doMerge(['aaa', 'bbb', 'ccc'],
999
['aaa', 'ddd', 'ccc'],
1001
['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc'])
1003
def _test_merge_from_strings(self, base, a, b, expected):
1005
w.add_lines('text0', [], base.splitlines(True))
1006
w.add_lines('text1', ['text0'], a.splitlines(True))
1007
w.add_lines('text2', ['text0'], b.splitlines(True))
1008
self.log('merge plan:')
1009
p = list(w.plan_merge('text1', 'text2'))
1010
for state, line in p:
1012
self.log('%12s | %s' % (state, line[:-1]))
1013
self.log('merge result:')
1014
result_text = ''.join(w.weave_merge(p))
1015
self.log(result_text)
1016
self.assertEqualDiff(result_text, expected)
1018
def test_weave_merge_conflicts(self):
1019
# does weave merge properly handle plans that end with unchanged?
1020
result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')]))
1021
self.assertEqual(result, 'hello\n')
1023
def test_deletion_extended(self):
1024
"""One side deletes, the other deletes more.
1041
self._test_merge_from_strings(base, a, b, result)
1043
def test_deletion_overlap(self):
1044
"""Delete overlapping regions with no other conflict.
1046
Arguably it'd be better to treat these as agreement, rather than
1047
conflict, but for now conflict is safer.
1075
self._test_merge_from_strings(base, a, b, result)
1077
def test_agreement_deletion(self):
1078
"""Agree to delete some lines, without conflicts."""
1100
self._test_merge_from_strings(base, a, b, result)
1102
def test_sync_on_deletion(self):
1103
"""Specific case of merge where we can synchronize incorrectly.
1105
A previous version of the weave merge concluded that the two versions
1106
agreed on deleting line 2, and this could be a synchronization point.
1107
Line 1 was then considered in isolation, and thought to be deleted on
1110
It's better to consider the whole thing as a disagreement region.
1121
a's replacement line 2
1134
a's replacement line 2
1141
self._test_merge_from_strings(base, a, b, result)
1144
class TestKnitMerge(TestCaseWithTransport, MergeCasesMixin):
1146
def get_file(self, name='foo'):
1147
return KnitVersionedFile(name, get_transport(self.get_url('.')),
1148
delta=True, create=True)
1150
def log_contents(self, w):
1154
class TestWeaveMerge(TestCaseWithTransport, MergeCasesMixin):
1156
def get_file(self, name='foo'):
1157
return WeaveFile(name, get_transport(self.get_url('.')), create=True)
1159
def log_contents(self, w):
1160
self.log('weave is:')
1162
write_weave(w, tmpf)
1163
self.log(tmpf.getvalue())
1165
overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======',
1166
'xxx', '>>>>>>> ', 'bbb']