1
# Copyright (C) 2005, 2009 Canonical Ltd
4
# Johan Rydberg <jrydberg@gnu.org>
6
# This program is free software; you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation; either version 2 of the License, or
9
# (at your option) any later version.
11
# This program is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14
# GNU General Public License for more details.
16
# You should have received a copy of the GNU General Public License
17
# along with this program; if not, write to the Free Software
18
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
# considered typical and check that it can be detected/corrected.
24
from itertools import chain, izip
25
from StringIO import StringIO
35
from bzrlib.errors import (
37
RevisionAlreadyPresent,
40
from bzrlib.knit import (
47
from bzrlib.tests import (
49
TestCaseWithMemoryTransport,
53
split_suite_by_condition,
56
from bzrlib.tests.http_utils import TestCaseWithWebserver
57
from bzrlib.trace import mutter
58
from bzrlib.transport import get_transport
59
from bzrlib.transport.memory import MemoryTransport
60
from bzrlib.tsort import topo_sort
61
from bzrlib.tuned_gzip import GzipFile
62
import bzrlib.versionedfile as versionedfile
63
from bzrlib.versionedfile import (
65
HashEscapedPrefixMapper,
67
VirtualVersionedFiles,
68
make_versioned_files_factory,
70
from bzrlib.weave import WeaveFile
71
from bzrlib.weavefile import read_weave, write_weave
74
def load_tests(standard_tests, module, loader):
75
"""Parameterize VersionedFiles tests for different implementations."""
76
to_adapt, result = split_suite_by_condition(
77
standard_tests, condition_isinstance(TestVersionedFiles))
78
# We want to be sure of behaviour for:
79
# weaves prefix layout (weave texts)
80
# individually named weaves (weave inventories)
81
# annotated knits - prefix|hash|hash-escape layout, we test the third only
82
# as it is the most complex mapper.
83
# individually named knits
84
# individual no-graph knits in packs (signatures)
85
# individual graph knits in packs (inventories)
86
# individual graph nocompression knits in packs (revisions)
87
# plain text knits in packs (texts)
91
'factory':make_versioned_files_factory(WeaveFile,
92
ConstantMapper('inventory')),
95
'support_partial_insertion': False,
99
'factory':make_file_factory(False, ConstantMapper('revisions')),
102
'support_partial_insertion': False,
104
('named-nograph-nodelta-knit-pack', {
105
'cleanup':cleanup_pack_knit,
106
'factory':make_pack_factory(False, False, 1),
109
'support_partial_insertion': False,
111
('named-graph-knit-pack', {
112
'cleanup':cleanup_pack_knit,
113
'factory':make_pack_factory(True, True, 1),
116
'support_partial_insertion': True,
118
('named-graph-nodelta-knit-pack', {
119
'cleanup':cleanup_pack_knit,
120
'factory':make_pack_factory(True, False, 1),
123
'support_partial_insertion': False,
125
('groupcompress-nograph', {
126
'cleanup':groupcompress.cleanup_pack_group,
127
'factory':groupcompress.make_pack_factory(False, False, 1),
130
'support_partial_insertion':False,
133
len_two_scenarios = [
136
'factory':make_versioned_files_factory(WeaveFile,
140
'support_partial_insertion': False,
142
('annotated-knit-escape', {
144
'factory':make_file_factory(True, HashEscapedPrefixMapper()),
147
'support_partial_insertion': False,
149
('plain-knit-pack', {
150
'cleanup':cleanup_pack_knit,
151
'factory':make_pack_factory(True, True, 2),
154
'support_partial_insertion': True,
157
'cleanup':groupcompress.cleanup_pack_group,
158
'factory':groupcompress.make_pack_factory(True, False, 1),
161
'support_partial_insertion':False,
164
scenarios = len_one_scenarios + len_two_scenarios
165
return multiply_tests(to_adapt, scenarios, result)
168
def get_diamond_vf(f, trailing_eol=True, left_only=False):
169
"""Get a diamond graph to exercise deltas and merges.
171
:param trailing_eol: If True end the last line with \n.
175
'base': (('origin',),),
176
'left': (('base',),),
177
'right': (('base',),),
178
'merged': (('left',), ('right',)),
180
# insert a diamond graph to exercise deltas and merges.
185
f.add_lines('origin', [], ['origin' + last_char])
186
f.add_lines('base', ['origin'], ['base' + last_char])
187
f.add_lines('left', ['base'], ['base\n', 'left' + last_char])
189
f.add_lines('right', ['base'],
190
['base\n', 'right' + last_char])
191
f.add_lines('merged', ['left', 'right'],
192
['base\n', 'left\n', 'right\n', 'merged' + last_char])
196
def get_diamond_files(files, key_length, trailing_eol=True, left_only=False,
197
nograph=False, nokeys=False):
198
"""Get a diamond graph to exercise deltas and merges.
200
This creates a 5-node graph in files. If files supports 2-length keys two
201
graphs are made to exercise the support for multiple ids.
203
:param trailing_eol: If True end the last line with \n.
204
:param key_length: The length of keys in files. Currently supports length 1
206
:param left_only: If True do not add the right and merged nodes.
207
:param nograph: If True, do not provide parents to the add_lines calls;
208
this is useful for tests that need inserted data but have graphless
210
:param nokeys: If True, pass None is as the key for all insertions.
211
Currently implies nograph.
212
:return: The results of the add_lines calls.
219
prefixes = [('FileA',), ('FileB',)]
220
# insert a diamond graph to exercise deltas and merges.
226
def get_parents(suffix_list):
230
result = [prefix + suffix for suffix in suffix_list]
237
# we loop over each key because that spreads the inserts across prefixes,
238
# which is how commit operates.
239
for prefix in prefixes:
240
result.append(files.add_lines(prefix + get_key('origin'), (),
241
['origin' + last_char]))
242
for prefix in prefixes:
243
result.append(files.add_lines(prefix + get_key('base'),
244
get_parents([('origin',)]), ['base' + last_char]))
245
for prefix in prefixes:
246
result.append(files.add_lines(prefix + get_key('left'),
247
get_parents([('base',)]),
248
['base\n', 'left' + last_char]))
250
for prefix in prefixes:
251
result.append(files.add_lines(prefix + get_key('right'),
252
get_parents([('base',)]),
253
['base\n', 'right' + last_char]))
254
for prefix in prefixes:
255
result.append(files.add_lines(prefix + get_key('merged'),
256
get_parents([('left',), ('right',)]),
257
['base\n', 'left\n', 'right\n', 'merged' + last_char]))
261
class VersionedFileTestMixIn(object):
262
"""A mixin test class for testing VersionedFiles.
264
This is not an adaptor-style test at this point because
265
theres no dynamic substitution of versioned file implementations,
266
they are strictly controlled by their owning repositories.
269
def get_transaction(self):
270
if not hasattr(self, '_transaction'):
271
self._transaction = None
272
return self._transaction
276
f.add_lines('r0', [], ['a\n', 'b\n'])
277
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
279
versions = f.versions()
280
self.assertTrue('r0' in versions)
281
self.assertTrue('r1' in versions)
282
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
283
self.assertEquals(f.get_text('r0'), 'a\nb\n')
284
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
285
self.assertEqual(2, len(f))
286
self.assertEqual(2, f.num_versions())
288
self.assertRaises(RevisionNotPresent,
289
f.add_lines, 'r2', ['foo'], [])
290
self.assertRaises(RevisionAlreadyPresent,
291
f.add_lines, 'r1', [], [])
293
# this checks that reopen with create=True does not break anything.
294
f = self.reopen_file(create=True)
297
def test_adds_with_parent_texts(self):
300
_, _, parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])
302
_, _, parent_texts['r1'] = f.add_lines_with_ghosts('r1',
303
['r0', 'ghost'], ['b\n', 'c\n'], parent_texts=parent_texts)
304
except NotImplementedError:
305
# if the format doesn't support ghosts, just add normally.
306
_, _, parent_texts['r1'] = f.add_lines('r1',
307
['r0'], ['b\n', 'c\n'], parent_texts=parent_texts)
308
f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)
309
self.assertNotEqual(None, parent_texts['r0'])
310
self.assertNotEqual(None, parent_texts['r1'])
312
versions = f.versions()
313
self.assertTrue('r0' in versions)
314
self.assertTrue('r1' in versions)
315
self.assertTrue('r2' in versions)
316
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
317
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
318
self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])
319
self.assertEqual(3, f.num_versions())
320
origins = f.annotate('r1')
321
self.assertEquals(origins[0][0], 'r0')
322
self.assertEquals(origins[1][0], 'r1')
323
origins = f.annotate('r2')
324
self.assertEquals(origins[0][0], 'r1')
325
self.assertEquals(origins[1][0], 'r2')
328
f = self.reopen_file()
331
def test_add_unicode_content(self):
332
# unicode content is not permitted in versioned files.
333
# versioned files version sequences of bytes only.
335
self.assertRaises(errors.BzrBadParameterUnicode,
336
vf.add_lines, 'a', [], ['a\n', u'b\n', 'c\n'])
338
(errors.BzrBadParameterUnicode, NotImplementedError),
339
vf.add_lines_with_ghosts, 'a', [], ['a\n', u'b\n', 'c\n'])
341
def test_add_follows_left_matching_blocks(self):
342
"""If we change left_matching_blocks, delta changes
344
Note: There are multiple correct deltas in this case, because
345
we start with 1 "a" and we get 3.
348
if isinstance(vf, WeaveFile):
349
raise TestSkipped("WeaveFile ignores left_matching_blocks")
350
vf.add_lines('1', [], ['a\n'])
351
vf.add_lines('2', ['1'], ['a\n', 'a\n', 'a\n'],
352
left_matching_blocks=[(0, 0, 1), (1, 3, 0)])
353
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('2'))
354
vf.add_lines('3', ['1'], ['a\n', 'a\n', 'a\n'],
355
left_matching_blocks=[(0, 2, 1), (1, 3, 0)])
356
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('3'))
358
def test_inline_newline_throws(self):
359
# \r characters are not permitted in lines being added
361
self.assertRaises(errors.BzrBadParameterContainsNewline,
362
vf.add_lines, 'a', [], ['a\n\n'])
364
(errors.BzrBadParameterContainsNewline, NotImplementedError),
365
vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])
366
# but inline CR's are allowed
367
vf.add_lines('a', [], ['a\r\n'])
369
vf.add_lines_with_ghosts('b', [], ['a\r\n'])
370
except NotImplementedError:
373
def test_add_reserved(self):
375
self.assertRaises(errors.ReservedId,
376
vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
378
def test_add_lines_nostoresha(self):
379
"""When nostore_sha is supplied using old content raises."""
381
empty_text = ('a', [])
382
sample_text_nl = ('b', ["foo\n", "bar\n"])
383
sample_text_no_nl = ('c', ["foo\n", "bar"])
385
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
386
sha, _, _ = vf.add_lines(version, [], lines)
388
# we now have a copy of all the lines in the vf.
389
for sha, (version, lines) in zip(
390
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
391
self.assertRaises(errors.ExistingContent,
392
vf.add_lines, version + "2", [], lines,
394
# and no new version should have been added.
395
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
398
def test_add_lines_with_ghosts_nostoresha(self):
399
"""When nostore_sha is supplied using old content raises."""
401
empty_text = ('a', [])
402
sample_text_nl = ('b', ["foo\n", "bar\n"])
403
sample_text_no_nl = ('c', ["foo\n", "bar"])
405
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
406
sha, _, _ = vf.add_lines(version, [], lines)
408
# we now have a copy of all the lines in the vf.
409
# is the test applicable to this vf implementation?
411
vf.add_lines_with_ghosts('d', [], [])
412
except NotImplementedError:
413
raise TestSkipped("add_lines_with_ghosts is optional")
414
for sha, (version, lines) in zip(
415
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
416
self.assertRaises(errors.ExistingContent,
417
vf.add_lines_with_ghosts, version + "2", [], lines,
419
# and no new version should have been added.
420
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
423
def test_add_lines_return_value(self):
424
# add_lines should return the sha1 and the text size.
426
empty_text = ('a', [])
427
sample_text_nl = ('b', ["foo\n", "bar\n"])
428
sample_text_no_nl = ('c', ["foo\n", "bar"])
429
# check results for the three cases:
430
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
431
# the first two elements are the same for all versioned files:
432
# - the digest and the size of the text. For some versioned files
433
# additional data is returned in additional tuple elements.
434
result = vf.add_lines(version, [], lines)
435
self.assertEqual(3, len(result))
436
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
438
# parents should not affect the result:
439
lines = sample_text_nl[1]
440
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
441
vf.add_lines('d', ['b', 'c'], lines)[0:2])
443
def test_get_reserved(self):
445
self.assertRaises(errors.ReservedId, vf.get_texts, ['b:'])
446
self.assertRaises(errors.ReservedId, vf.get_lines, 'b:')
447
self.assertRaises(errors.ReservedId, vf.get_text, 'b:')
449
def test_add_unchanged_last_line_noeol_snapshot(self):
450
"""Add a text with an unchanged last line with no eol should work."""
451
# Test adding this in a number of chain lengths; because the interface
452
# for VersionedFile does not allow forcing a specific chain length, we
453
# just use a small base to get the first snapshot, then a much longer
454
# first line for the next add (which will make the third add snapshot)
455
# and so on. 20 has been chosen as an aribtrary figure - knits use 200
456
# as a capped delta length, but ideally we would have some way of
457
# tuning the test to the store (e.g. keep going until a snapshot
459
for length in range(20):
461
vf = self.get_file('case-%d' % length)
464
for step in range(length):
465
version = prefix % step
466
lines = (['prelude \n'] * step) + ['line']
467
vf.add_lines(version, parents, lines)
468
version_lines[version] = lines
470
vf.add_lines('no-eol', parents, ['line'])
471
vf.get_texts(version_lines.keys())
472
self.assertEqualDiff('line', vf.get_text('no-eol'))
474
def test_get_texts_eol_variation(self):
475
# similar to the failure in <http://bugs.launchpad.net/234748>
477
sample_text_nl = ["line\n"]
478
sample_text_no_nl = ["line"]
485
lines = sample_text_nl
487
lines = sample_text_no_nl
488
# left_matching blocks is an internal api; it operates on the
489
# *internal* representation for a knit, which is with *all* lines
490
# being normalised to end with \n - even the final line in a no_nl
491
# file. Using it here ensures that a broken internal implementation
492
# (which is what this test tests) will generate a correct line
493
# delta (which is to say, an empty delta).
494
vf.add_lines(version, parents, lines,
495
left_matching_blocks=[(0, 0, 1)])
497
versions.append(version)
498
version_lines[version] = lines
500
vf.get_texts(versions)
501
vf.get_texts(reversed(versions))
503
def test_add_lines_with_matching_blocks_noeol_last_line(self):
504
"""Add a text with an unchanged last line with no eol should work."""
505
from bzrlib import multiparent
506
# Hand verified sha1 of the text we're adding.
507
sha1 = '6a1d115ec7b60afb664dc14890b5af5ce3c827a4'
508
# Create a mpdiff which adds a new line before the trailing line, and
509
# reuse the last line unaltered (which can cause annotation reuse).
510
# Test adding this in two situations:
511
# On top of a new insertion
512
vf = self.get_file('fulltext')
513
vf.add_lines('noeol', [], ['line'])
514
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
515
left_matching_blocks=[(0, 1, 1)])
516
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
518
vf = self.get_file('delta')
519
vf.add_lines('base', [], ['line'])
520
vf.add_lines('noeol', ['base'], ['prelude\n', 'line'])
521
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
522
left_matching_blocks=[(1, 1, 1)])
523
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
525
def test_make_mpdiffs(self):
526
from bzrlib import multiparent
527
vf = self.get_file('foo')
528
sha1s = self._setup_for_deltas(vf)
529
new_vf = self.get_file('bar')
530
for version in multiparent.topo_iter(vf):
531
mpdiff = vf.make_mpdiffs([version])[0]
532
new_vf.add_mpdiffs([(version, vf.get_parent_map([version])[version],
533
vf.get_sha1s([version])[version], mpdiff)])
534
self.assertEqualDiff(vf.get_text(version),
535
new_vf.get_text(version))
537
def test_make_mpdiffs_with_ghosts(self):
538
vf = self.get_file('foo')
540
vf.add_lines_with_ghosts('text', ['ghost'], ['line\n'])
541
except NotImplementedError:
542
# old Weave formats do not allow ghosts
544
self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, ['ghost'])
546
def _setup_for_deltas(self, f):
547
self.assertFalse(f.has_version('base'))
548
# add texts that should trip the knit maximum delta chain threshold
549
# as well as doing parallel chains of data in knits.
550
# this is done by two chains of 25 insertions
551
f.add_lines('base', [], ['line\n'])
552
f.add_lines('noeol', ['base'], ['line'])
553
# detailed eol tests:
554
# shared last line with parent no-eol
555
f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])
556
# differing last line with parent, both no-eol
557
f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])
558
# add eol following a noneol parent, change content
559
f.add_lines('eol', ['noeol'], ['phone\n'])
560
# add eol following a noneol parent, no change content
561
f.add_lines('eolline', ['noeol'], ['line\n'])
562
# noeol with no parents:
563
f.add_lines('noeolbase', [], ['line'])
564
# noeol preceeding its leftmost parent in the output:
565
# this is done by making it a merge of two parents with no common
566
# anestry: noeolbase and noeol with the
567
# later-inserted parent the leftmost.
568
f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])
569
# two identical eol texts
570
f.add_lines('noeoldup', ['noeol'], ['line'])
572
text_name = 'chain1-'
574
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
575
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
576
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
577
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
578
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
579
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
580
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
581
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
582
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
583
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
584
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
585
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
586
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
587
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
588
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
589
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
590
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
591
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
592
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
593
19:'1ebed371807ba5935958ad0884595126e8c4e823',
594
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
595
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
596
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
597
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
598
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
599
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
601
for depth in range(26):
602
new_version = text_name + '%s' % depth
603
text = text + ['line\n']
604
f.add_lines(new_version, [next_parent], text)
605
next_parent = new_version
607
text_name = 'chain2-'
609
for depth in range(26):
610
new_version = text_name + '%s' % depth
611
text = text + ['line\n']
612
f.add_lines(new_version, [next_parent], text)
613
next_parent = new_version
616
def test_ancestry(self):
618
self.assertEqual([], f.get_ancestry([]))
619
f.add_lines('r0', [], ['a\n', 'b\n'])
620
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
621
f.add_lines('r2', ['r0'], ['b\n', 'c\n'])
622
f.add_lines('r3', ['r2'], ['b\n', 'c\n'])
623
f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])
624
self.assertEqual([], f.get_ancestry([]))
625
versions = f.get_ancestry(['rM'])
626
# there are some possibilities:
630
# so we check indexes
631
r0 = versions.index('r0')
632
r1 = versions.index('r1')
633
r2 = versions.index('r2')
634
self.assertFalse('r3' in versions)
635
rM = versions.index('rM')
636
self.assertTrue(r0 < r1)
637
self.assertTrue(r0 < r2)
638
self.assertTrue(r1 < rM)
639
self.assertTrue(r2 < rM)
641
self.assertRaises(RevisionNotPresent,
642
f.get_ancestry, ['rM', 'rX'])
644
self.assertEqual(set(f.get_ancestry('rM')),
645
set(f.get_ancestry('rM', topo_sorted=False)))
647
def test_mutate_after_finish(self):
648
self._transaction = 'before'
650
self._transaction = 'after'
651
self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])
652
self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])
654
def test_copy_to(self):
656
f.add_lines('0', [], ['a\n'])
657
t = MemoryTransport()
659
for suffix in self.get_factory().get_suffixes():
660
self.assertTrue(t.has('foo' + suffix))
662
def test_get_suffixes(self):
664
# and should be a list
665
self.assertTrue(isinstance(self.get_factory().get_suffixes(), list))
667
def test_get_parent_map(self):
669
f.add_lines('r0', [], ['a\n', 'b\n'])
671
{'r0':()}, f.get_parent_map(['r0']))
672
f.add_lines('r1', ['r0'], ['a\n', 'b\n'])
674
{'r1':('r0',)}, f.get_parent_map(['r1']))
678
f.get_parent_map(['r0', 'r1']))
679
f.add_lines('r2', [], ['a\n', 'b\n'])
680
f.add_lines('r3', [], ['a\n', 'b\n'])
681
f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
683
{'m':('r0', 'r1', 'r2', 'r3')}, f.get_parent_map(['m']))
684
self.assertEqual({}, f.get_parent_map('y'))
688
f.get_parent_map(['r0', 'y', 'r1']))
690
def test_annotate(self):
692
f.add_lines('r0', [], ['a\n', 'b\n'])
693
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
694
origins = f.annotate('r1')
695
self.assertEquals(origins[0][0], 'r1')
696
self.assertEquals(origins[1][0], 'r0')
698
self.assertRaises(RevisionNotPresent,
701
def test_detection(self):
702
# Test weaves detect corruption.
704
# Weaves contain a checksum of their texts.
705
# When a text is extracted, this checksum should be
708
w = self.get_file_corrupted_text()
710
self.assertEqual('hello\n', w.get_text('v1'))
711
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
712
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
713
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
715
w = self.get_file_corrupted_checksum()
717
self.assertEqual('hello\n', w.get_text('v1'))
718
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
719
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
720
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
722
def get_file_corrupted_text(self):
723
"""Return a versioned file with corrupt text but valid metadata."""
724
raise NotImplementedError(self.get_file_corrupted_text)
726
def reopen_file(self, name='foo'):
727
"""Open the versioned file from disk again."""
728
raise NotImplementedError(self.reopen_file)
730
def test_iter_lines_added_or_present_in_versions(self):
731
# test that we get at least an equalset of the lines added by
732
# versions in the weave
733
# the ordering here is to make a tree so that dumb searches have
734
# more changes to muck up.
736
class InstrumentedProgress(progress.DummyProgress):
740
progress.DummyProgress.__init__(self)
743
def update(self, msg=None, current=None, total=None):
744
self.updates.append((msg, current, total))
747
# add a base to get included
748
vf.add_lines('base', [], ['base\n'])
749
# add a ancestor to be included on one side
750
vf.add_lines('lancestor', [], ['lancestor\n'])
751
# add a ancestor to be included on the other side
752
vf.add_lines('rancestor', ['base'], ['rancestor\n'])
753
# add a child of rancestor with no eofile-nl
754
vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])
755
# add a child of lancestor and base to join the two roots
756
vf.add_lines('otherchild',
757
['lancestor', 'base'],
758
['base\n', 'lancestor\n', 'otherchild\n'])
759
def iter_with_versions(versions, expected):
760
# now we need to see what lines are returned, and how often.
762
progress = InstrumentedProgress()
763
# iterate over the lines
764
for line in vf.iter_lines_added_or_present_in_versions(versions,
766
lines.setdefault(line, 0)
768
if []!= progress.updates:
769
self.assertEqual(expected, progress.updates)
771
lines = iter_with_versions(['child', 'otherchild'],
772
[('Walking content', 0, 2),
773
('Walking content', 1, 2),
774
('Walking content', 2, 2)])
775
# we must see child and otherchild
776
self.assertTrue(lines[('child\n', 'child')] > 0)
777
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
778
# we dont care if we got more than that.
781
lines = iter_with_versions(None, [('Walking content', 0, 5),
782
('Walking content', 1, 5),
783
('Walking content', 2, 5),
784
('Walking content', 3, 5),
785
('Walking content', 4, 5),
786
('Walking content', 5, 5)])
787
# all lines must be seen at least once
788
self.assertTrue(lines[('base\n', 'base')] > 0)
789
self.assertTrue(lines[('lancestor\n', 'lancestor')] > 0)
790
self.assertTrue(lines[('rancestor\n', 'rancestor')] > 0)
791
self.assertTrue(lines[('child\n', 'child')] > 0)
792
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
794
def test_add_lines_with_ghosts(self):
795
# some versioned file formats allow lines to be added with parent
796
# information that is > than that in the format. Formats that do
797
# not support this need to raise NotImplementedError on the
798
# add_lines_with_ghosts api.
800
# add a revision with ghost parents
801
# The preferred form is utf8, but we should translate when needed
802
parent_id_unicode = u'b\xbfse'
803
parent_id_utf8 = parent_id_unicode.encode('utf8')
805
vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])
806
except NotImplementedError:
807
# check the other ghost apis are also not implemented
808
self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])
809
self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')
811
vf = self.reopen_file()
812
# test key graph related apis: getncestry, _graph, get_parents
814
# - these are ghost unaware and must not be reflect ghosts
815
self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))
816
self.assertFalse(vf.has_version(parent_id_utf8))
817
# we have _with_ghost apis to give us ghost information.
818
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
819
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
820
# if we add something that is a ghost of another, it should correct the
821
# results of the prior apis
822
vf.add_lines(parent_id_utf8, [], [])
823
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))
824
self.assertEqual({'notbxbfse':(parent_id_utf8,)},
825
vf.get_parent_map(['notbxbfse']))
826
self.assertTrue(vf.has_version(parent_id_utf8))
827
# we have _with_ghost apis to give us ghost information.
828
self.assertEqual([parent_id_utf8, 'notbxbfse'],
829
vf.get_ancestry_with_ghosts(['notbxbfse']))
830
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
832
def test_add_lines_with_ghosts_after_normal_revs(self):
833
# some versioned file formats allow lines to be added with parent
834
# information that is > than that in the format. Formats that do
835
# not support this need to raise NotImplementedError on the
836
# add_lines_with_ghosts api.
838
# probe for ghost support
840
vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n'])
841
except NotImplementedError:
843
vf.add_lines_with_ghosts('references_ghost',
845
['line\n', 'line_b\n', 'line_c\n'])
846
origins = vf.annotate('references_ghost')
847
self.assertEquals(('base', 'line\n'), origins[0])
848
self.assertEquals(('base', 'line_b\n'), origins[1])
849
self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
851
def test_readonly_mode(self):
852
transport = get_transport(self.get_url('.'))
853
factory = self.get_factory()
854
vf = factory('id', transport, 0777, create=True, access_mode='w')
855
vf = factory('id', transport, access_mode='r')
856
self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
857
self.assertRaises(errors.ReadOnlyError,
858
vf.add_lines_with_ghosts,
863
def test_get_sha1s(self):
864
# check the sha1 data is available
867
vf.add_lines('a', [], ['a\n'])
868
# the same file, different metadata
869
vf.add_lines('b', ['a'], ['a\n'])
870
# a file differing only in last newline.
871
vf.add_lines('c', [], ['a'])
873
'a': '3f786850e387550fdab836ed7e6dc881de23001b',
874
'c': '86f7e437faa5a7fce15d1ddcb9eaeaea377667b8',
875
'b': '3f786850e387550fdab836ed7e6dc881de23001b',
877
vf.get_sha1s(['a', 'c', 'b']))
880
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
882
def get_file(self, name='foo'):
883
return WeaveFile(name, get_transport(self.get_url('.')), create=True,
884
get_scope=self.get_transaction)
886
def get_file_corrupted_text(self):
887
w = WeaveFile('foo', get_transport(self.get_url('.')), create=True,
888
get_scope=self.get_transaction)
889
w.add_lines('v1', [], ['hello\n'])
890
w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
892
# We are going to invasively corrupt the text
893
# Make sure the internals of weave are the same
894
self.assertEqual([('{', 0)
902
self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'
903
, '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
908
w._weave[4] = 'There\n'
911
def get_file_corrupted_checksum(self):
912
w = self.get_file_corrupted_text()
914
w._weave[4] = 'there\n'
915
self.assertEqual('hello\nthere\n', w.get_text('v2'))
917
#Invalid checksum, first digit changed
918
w._sha1s[1] = 'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
921
def reopen_file(self, name='foo', create=False):
922
return WeaveFile(name, get_transport(self.get_url('.')), create=create,
923
get_scope=self.get_transaction)
925
def test_no_implicit_create(self):
926
self.assertRaises(errors.NoSuchFile,
929
get_transport(self.get_url('.')),
930
get_scope=self.get_transaction)
932
def get_factory(self):
936
class TestPlanMergeVersionedFile(TestCaseWithMemoryTransport):
939
TestCaseWithMemoryTransport.setUp(self)
940
mapper = PrefixMapper()
941
factory = make_file_factory(True, mapper)
942
self.vf1 = factory(self.get_transport('root-1'))
943
self.vf2 = factory(self.get_transport('root-2'))
944
self.plan_merge_vf = versionedfile._PlanMergeVersionedFile('root')
945
self.plan_merge_vf.fallback_versionedfiles.extend([self.vf1, self.vf2])
947
def test_add_lines(self):
948
self.plan_merge_vf.add_lines(('root', 'a:'), [], [])
949
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
950
('root', 'a'), [], [])
951
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
952
('root', 'a:'), None, [])
953
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
954
('root', 'a:'), [], None)
956
def setup_abcde(self):
957
self.vf1.add_lines(('root', 'A'), [], ['a'])
958
self.vf1.add_lines(('root', 'B'), [('root', 'A')], ['b'])
959
self.vf2.add_lines(('root', 'C'), [], ['c'])
960
self.vf2.add_lines(('root', 'D'), [('root', 'C')], ['d'])
961
self.plan_merge_vf.add_lines(('root', 'E:'),
962
[('root', 'B'), ('root', 'D')], ['e'])
964
def test_get_parents(self):
966
self.assertEqual({('root', 'B'):(('root', 'A'),)},
967
self.plan_merge_vf.get_parent_map([('root', 'B')]))
968
self.assertEqual({('root', 'D'):(('root', 'C'),)},
969
self.plan_merge_vf.get_parent_map([('root', 'D')]))
970
self.assertEqual({('root', 'E:'):(('root', 'B'),('root', 'D'))},
971
self.plan_merge_vf.get_parent_map([('root', 'E:')]))
973
self.plan_merge_vf.get_parent_map([('root', 'F')]))
975
('root', 'B'):(('root', 'A'),),
976
('root', 'D'):(('root', 'C'),),
977
('root', 'E:'):(('root', 'B'),('root', 'D')),
979
self.plan_merge_vf.get_parent_map(
980
[('root', 'B'), ('root', 'D'), ('root', 'E:'), ('root', 'F')]))
982
def test_get_record_stream(self):
984
def get_record(suffix):
985
return self.plan_merge_vf.get_record_stream(
986
[('root', suffix)], 'unordered', True).next()
987
self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))
988
self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))
989
self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))
990
self.assertEqual('absent', get_record('F').storage_kind)
993
class TestReadonlyHttpMixin(object):
995
def get_transaction(self):
998
def test_readonly_http_works(self):
999
# we should be able to read from http with a versioned file.
1000
vf = self.get_file()
1001
# try an empty file access
1002
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
1003
self.assertEqual([], readonly_vf.versions())
1005
vf.add_lines('1', [], ['a\n'])
1006
vf.add_lines('2', ['1'], ['b\n', 'a\n'])
1007
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
1008
self.assertEqual(['1', '2'], vf.versions())
1009
for version in readonly_vf.versions():
1010
readonly_vf.get_lines(version)
1013
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
1016
return WeaveFile('foo', get_transport(self.get_url('.')), create=True,
1017
get_scope=self.get_transaction)
1019
def get_factory(self):
1023
class MergeCasesMixin(object):
1025
def doMerge(self, base, a, b, mp):
1026
from cStringIO import StringIO
1027
from textwrap import dedent
1033
w.add_lines('text0', [], map(addcrlf, base))
1034
w.add_lines('text1', ['text0'], map(addcrlf, a))
1035
w.add_lines('text2', ['text0'], map(addcrlf, b))
1037
self.log_contents(w)
1039
self.log('merge plan:')
1040
p = list(w.plan_merge('text1', 'text2'))
1041
for state, line in p:
1043
self.log('%12s | %s' % (state, line[:-1]))
1047
mt.writelines(w.weave_merge(p))
1049
self.log(mt.getvalue())
1051
mp = map(addcrlf, mp)
1052
self.assertEqual(mt.readlines(), mp)
1055
def testOneInsert(self):
1061
def testSeparateInserts(self):
1062
self.doMerge(['aaa', 'bbb', 'ccc'],
1063
['aaa', 'xxx', 'bbb', 'ccc'],
1064
['aaa', 'bbb', 'yyy', 'ccc'],
1065
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1067
def testSameInsert(self):
1068
self.doMerge(['aaa', 'bbb', 'ccc'],
1069
['aaa', 'xxx', 'bbb', 'ccc'],
1070
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],
1071
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1072
overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']
1073
def testOverlappedInsert(self):
1074
self.doMerge(['aaa', 'bbb'],
1075
['aaa', 'xxx', 'yyy', 'bbb'],
1076
['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)
1078
# really it ought to reduce this to
1079
# ['aaa', 'xxx', 'yyy', 'bbb']
1082
def testClashReplace(self):
1083
self.doMerge(['aaa'],
1086
['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz',
1089
def testNonClashInsert1(self):
1090
self.doMerge(['aaa'],
1093
['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz',
1096
def testNonClashInsert2(self):
1097
self.doMerge(['aaa'],
1103
def testDeleteAndModify(self):
1104
"""Clashing delete and modification.
1106
If one side modifies a region and the other deletes it then
1107
there should be a conflict with one side blank.
1110
#######################################
1111
# skippd, not working yet
1114
self.doMerge(['aaa', 'bbb', 'ccc'],
1115
['aaa', 'ddd', 'ccc'],
1117
['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc'])
1119
def _test_merge_from_strings(self, base, a, b, expected):
1121
w.add_lines('text0', [], base.splitlines(True))
1122
w.add_lines('text1', ['text0'], a.splitlines(True))
1123
w.add_lines('text2', ['text0'], b.splitlines(True))
1124
self.log('merge plan:')
1125
p = list(w.plan_merge('text1', 'text2'))
1126
for state, line in p:
1128
self.log('%12s | %s' % (state, line[:-1]))
1129
self.log('merge result:')
1130
result_text = ''.join(w.weave_merge(p))
1131
self.log(result_text)
1132
self.assertEqualDiff(result_text, expected)
1134
def test_weave_merge_conflicts(self):
1135
# does weave merge properly handle plans that end with unchanged?
1136
result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')]))
1137
self.assertEqual(result, 'hello\n')
1139
def test_deletion_extended(self):
1140
"""One side deletes, the other deletes more.
1161
self._test_merge_from_strings(base, a, b, result)
1163
def test_deletion_overlap(self):
1164
"""Delete overlapping regions with no other conflict.
1166
Arguably it'd be better to treat these as agreement, rather than
1167
conflict, but for now conflict is safer.
1195
self._test_merge_from_strings(base, a, b, result)
1197
def test_agreement_deletion(self):
1198
"""Agree to delete some lines, without conflicts."""
1220
self._test_merge_from_strings(base, a, b, result)
1222
def test_sync_on_deletion(self):
1223
"""Specific case of merge where we can synchronize incorrectly.
1225
A previous version of the weave merge concluded that the two versions
1226
agreed on deleting line 2, and this could be a synchronization point.
1227
Line 1 was then considered in isolation, and thought to be deleted on
1230
It's better to consider the whole thing as a disagreement region.
1241
a's replacement line 2
1254
a's replacement line 2
1261
self._test_merge_from_strings(base, a, b, result)
1264
class TestWeaveMerge(TestCaseWithMemoryTransport, MergeCasesMixin):
1266
def get_file(self, name='foo'):
1267
return WeaveFile(name, get_transport(self.get_url('.')), create=True)
1269
def log_contents(self, w):
1270
self.log('weave is:')
1272
write_weave(w, tmpf)
1273
self.log(tmpf.getvalue())
1275
overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======',
1276
'xxx', '>>>>>>> ', 'bbb']
1279
class TestContentFactoryAdaption(TestCaseWithMemoryTransport):
1281
def test_select_adaptor(self):
1282
"""Test expected adapters exist."""
1283
# One scenario for each lookup combination we expect to use.
1284
# Each is source_kind, requested_kind, adapter class
1286
('knit-delta-gz', 'fulltext', _mod_knit.DeltaPlainToFullText),
1287
('knit-ft-gz', 'fulltext', _mod_knit.FTPlainToFullText),
1288
('knit-annotated-delta-gz', 'knit-delta-gz',
1289
_mod_knit.DeltaAnnotatedToUnannotated),
1290
('knit-annotated-delta-gz', 'fulltext',
1291
_mod_knit.DeltaAnnotatedToFullText),
1292
('knit-annotated-ft-gz', 'knit-ft-gz',
1293
_mod_knit.FTAnnotatedToUnannotated),
1294
('knit-annotated-ft-gz', 'fulltext',
1295
_mod_knit.FTAnnotatedToFullText),
1297
for source, requested, klass in scenarios:
1298
adapter_factory = versionedfile.adapter_registry.get(
1299
(source, requested))
1300
adapter = adapter_factory(None)
1301
self.assertIsInstance(adapter, klass)
1303
def get_knit(self, annotated=True):
1304
mapper = ConstantMapper('knit')
1305
transport = self.get_transport()
1306
return make_file_factory(annotated, mapper)(transport)
1308
def helpGetBytes(self, f, ft_adapter, delta_adapter):
1309
"""Grab the interested adapted texts for tests."""
1310
# origin is a fulltext
1311
entries = f.get_record_stream([('origin',)], 'unordered', False)
1312
base = entries.next()
1313
ft_data = ft_adapter.get_bytes(base)
1314
# merged is both a delta and multiple parents.
1315
entries = f.get_record_stream([('merged',)], 'unordered', False)
1316
merged = entries.next()
1317
delta_data = delta_adapter.get_bytes(merged)
1318
return ft_data, delta_data
1320
def test_deannotation_noeol(self):
1321
"""Test converting annotated knits to unannotated knits."""
1322
# we need a full text, and a delta
1324
get_diamond_files(f, 1, trailing_eol=False)
1325
ft_data, delta_data = self.helpGetBytes(f,
1326
_mod_knit.FTAnnotatedToUnannotated(None),
1327
_mod_knit.DeltaAnnotatedToUnannotated(None))
1329
'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'
1332
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1334
'version merged 4 32c2e79763b3f90e8ccde37f9710b6629c25a796\n'
1335
'1,2,3\nleft\nright\nmerged\nend merged\n',
1336
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1338
def test_deannotation(self):
1339
"""Test converting annotated knits to unannotated knits."""
1340
# we need a full text, and a delta
1342
get_diamond_files(f, 1)
1343
ft_data, delta_data = self.helpGetBytes(f,
1344
_mod_knit.FTAnnotatedToUnannotated(None),
1345
_mod_knit.DeltaAnnotatedToUnannotated(None))
1347
'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n'
1350
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1352
'version merged 3 ed8bce375198ea62444dc71952b22cfc2b09226d\n'
1353
'2,2,2\nright\nmerged\nend merged\n',
1354
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1356
def test_annotated_to_fulltext_no_eol(self):
1357
"""Test adapting annotated knits to full texts (for -> weaves)."""
1358
# we need a full text, and a delta
1360
get_diamond_files(f, 1, trailing_eol=False)
1361
# Reconstructing a full text requires a backing versioned file, and it
1362
# must have the base lines requested from it.
1363
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1364
ft_data, delta_data = self.helpGetBytes(f,
1365
_mod_knit.FTAnnotatedToFullText(None),
1366
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1367
self.assertEqual('origin', ft_data)
1368
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1369
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1370
True)], logged_vf.calls)
1372
def test_annotated_to_fulltext(self):
1373
"""Test adapting annotated knits to full texts (for -> weaves)."""
1374
# we need a full text, and a delta
1376
get_diamond_files(f, 1)
1377
# Reconstructing a full text requires a backing versioned file, and it
1378
# must have the base lines requested from it.
1379
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1380
ft_data, delta_data = self.helpGetBytes(f,
1381
_mod_knit.FTAnnotatedToFullText(None),
1382
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1383
self.assertEqual('origin\n', ft_data)
1384
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
1385
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1386
True)], logged_vf.calls)
1388
def test_unannotated_to_fulltext(self):
1389
"""Test adapting unannotated knits to full texts.
1391
This is used for -> weaves, and for -> annotated knits.
1393
# we need a full text, and a delta
1394
f = self.get_knit(annotated=False)
1395
get_diamond_files(f, 1)
1396
# Reconstructing a full text requires a backing versioned file, and it
1397
# must have the base lines requested from it.
1398
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1399
ft_data, delta_data = self.helpGetBytes(f,
1400
_mod_knit.FTPlainToFullText(None),
1401
_mod_knit.DeltaPlainToFullText(logged_vf))
1402
self.assertEqual('origin\n', ft_data)
1403
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
1404
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1405
True)], logged_vf.calls)
1407
def test_unannotated_to_fulltext_no_eol(self):
1408
"""Test adapting unannotated knits to full texts.
1410
This is used for -> weaves, and for -> annotated knits.
1412
# we need a full text, and a delta
1413
f = self.get_knit(annotated=False)
1414
get_diamond_files(f, 1, trailing_eol=False)
1415
# Reconstructing a full text requires a backing versioned file, and it
1416
# must have the base lines requested from it.
1417
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1418
ft_data, delta_data = self.helpGetBytes(f,
1419
_mod_knit.FTPlainToFullText(None),
1420
_mod_knit.DeltaPlainToFullText(logged_vf))
1421
self.assertEqual('origin', ft_data)
1422
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1423
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1424
True)], logged_vf.calls)
1427
class TestKeyMapper(TestCaseWithMemoryTransport):
1428
"""Tests for various key mapping logic."""
1430
def test_identity_mapper(self):
1431
mapper = versionedfile.ConstantMapper("inventory")
1432
self.assertEqual("inventory", mapper.map(('foo@ar',)))
1433
self.assertEqual("inventory", mapper.map(('quux',)))
1435
def test_prefix_mapper(self):
1437
mapper = versionedfile.PrefixMapper()
1438
self.assertEqual("file-id", mapper.map(("file-id", "revision-id")))
1439
self.assertEqual("new-id", mapper.map(("new-id", "revision-id")))
1440
self.assertEqual(('file-id',), mapper.unmap("file-id"))
1441
self.assertEqual(('new-id',), mapper.unmap("new-id"))
1443
def test_hash_prefix_mapper(self):
1444
#format6: hash + plain
1445
mapper = versionedfile.HashPrefixMapper()
1446
self.assertEqual("9b/file-id", mapper.map(("file-id", "revision-id")))
1447
self.assertEqual("45/new-id", mapper.map(("new-id", "revision-id")))
1448
self.assertEqual(('file-id',), mapper.unmap("9b/file-id"))
1449
self.assertEqual(('new-id',), mapper.unmap("45/new-id"))
1451
def test_hash_escaped_mapper(self):
1452
#knit1: hash + escaped
1453
mapper = versionedfile.HashEscapedPrefixMapper()
1454
self.assertEqual("88/%2520", mapper.map((" ", "revision-id")))
1455
self.assertEqual("ed/fil%2545-%2549d", mapper.map(("filE-Id",
1457
self.assertEqual("88/ne%2557-%2549d", mapper.map(("neW-Id",
1459
self.assertEqual(('filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1460
self.assertEqual(('neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1463
class TestVersionedFiles(TestCaseWithMemoryTransport):
1464
"""Tests for the multiple-file variant of VersionedFile."""
1466
def get_versionedfiles(self, relpath='files'):
1467
transport = self.get_transport(relpath)
1469
transport.mkdir('.')
1470
files = self.factory(transport)
1471
if self.cleanup is not None:
1472
self.addCleanup(lambda:self.cleanup(files))
1475
def get_simple_key(self, suffix):
1476
"""Return a key for the object under test."""
1477
if self.key_length == 1:
1480
return ('FileA',) + (suffix,)
1482
def test_add_lines(self):
1483
f = self.get_versionedfiles()
1484
key0 = self.get_simple_key('r0')
1485
key1 = self.get_simple_key('r1')
1486
key2 = self.get_simple_key('r2')
1487
keyf = self.get_simple_key('foo')
1488
f.add_lines(key0, [], ['a\n', 'b\n'])
1490
f.add_lines(key1, [key0], ['b\n', 'c\n'])
1492
f.add_lines(key1, [], ['b\n', 'c\n'])
1494
self.assertTrue(key0 in keys)
1495
self.assertTrue(key1 in keys)
1497
for record in f.get_record_stream([key0, key1], 'unordered', True):
1498
records.append((record.key, record.get_bytes_as('fulltext')))
1500
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1502
def test__add_text(self):
1503
f = self.get_versionedfiles()
1504
key0 = self.get_simple_key('r0')
1505
key1 = self.get_simple_key('r1')
1506
key2 = self.get_simple_key('r2')
1507
keyf = self.get_simple_key('foo')
1508
f._add_text(key0, [], 'a\nb\n')
1510
f._add_text(key1, [key0], 'b\nc\n')
1512
f._add_text(key1, [], 'b\nc\n')
1514
self.assertTrue(key0 in keys)
1515
self.assertTrue(key1 in keys)
1517
for record in f.get_record_stream([key0, key1], 'unordered', True):
1518
records.append((record.key, record.get_bytes_as('fulltext')))
1520
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1522
def test_annotate(self):
1523
files = self.get_versionedfiles()
1524
self.get_diamond_files(files)
1525
if self.key_length == 1:
1529
# introduced full text
1530
origins = files.annotate(prefix + ('origin',))
1532
(prefix + ('origin',), 'origin\n')],
1535
origins = files.annotate(prefix + ('base',))
1537
(prefix + ('base',), 'base\n')],
1540
origins = files.annotate(prefix + ('merged',))
1543
(prefix + ('base',), 'base\n'),
1544
(prefix + ('left',), 'left\n'),
1545
(prefix + ('right',), 'right\n'),
1546
(prefix + ('merged',), 'merged\n')
1550
# Without a graph everything is new.
1552
(prefix + ('merged',), 'base\n'),
1553
(prefix + ('merged',), 'left\n'),
1554
(prefix + ('merged',), 'right\n'),
1555
(prefix + ('merged',), 'merged\n')
1558
self.assertRaises(RevisionNotPresent,
1559
files.annotate, prefix + ('missing-key',))
1561
def test_check_no_parameters(self):
1562
files = self.get_versionedfiles()
1564
def test_check_progressbar_parameter(self):
1565
"""A progress bar can be supplied because check can be a generator."""
1566
pb = ui.ui_factory.nested_progress_bar()
1567
self.addCleanup(pb.finished)
1568
files = self.get_versionedfiles()
1569
files.check(progress_bar=pb)
1571
def test_check_with_keys_becomes_generator(self):
1572
files = self.get_versionedfiles()
1573
self.get_diamond_files(files)
1575
entries = files.check(keys=keys)
1577
# Texts output should be fulltexts.
1578
self.capture_stream(files, entries, seen.add,
1579
files.get_parent_map(keys), require_fulltext=True)
1580
# All texts should be output.
1581
self.assertEqual(set(keys), seen)
1583
def test_construct(self):
1584
"""Each parameterised test can be constructed on a transport."""
1585
files = self.get_versionedfiles()
1587
def get_diamond_files(self, files, trailing_eol=True, left_only=False,
1589
return get_diamond_files(files, self.key_length,
1590
trailing_eol=trailing_eol, nograph=not self.graph,
1591
left_only=left_only, nokeys=nokeys)
1593
def _add_content_nostoresha(self, add_lines):
1594
"""When nostore_sha is supplied using old content raises."""
1595
vf = self.get_versionedfiles()
1596
empty_text = ('a', [])
1597
sample_text_nl = ('b', ["foo\n", "bar\n"])
1598
sample_text_no_nl = ('c', ["foo\n", "bar"])
1600
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
1602
sha, _, _ = vf.add_lines(self.get_simple_key(version), [],
1605
sha, _, _ = vf._add_text(self.get_simple_key(version), [],
1608
# we now have a copy of all the lines in the vf.
1609
for sha, (version, lines) in zip(
1610
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
1611
new_key = self.get_simple_key(version + "2")
1612
self.assertRaises(errors.ExistingContent,
1613
vf.add_lines, new_key, [], lines,
1615
self.assertRaises(errors.ExistingContent,
1616
vf._add_text, new_key, [], ''.join(lines),
1618
# and no new version should have been added.
1619
record = vf.get_record_stream([new_key], 'unordered', True).next()
1620
self.assertEqual('absent', record.storage_kind)
1622
def test_add_lines_nostoresha(self):
1623
self._add_content_nostoresha(add_lines=True)
1625
def test__add_text_nostoresha(self):
1626
self._add_content_nostoresha(add_lines=False)
1628
def test_add_lines_return(self):
1629
files = self.get_versionedfiles()
1630
# save code by using the stock data insertion helper.
1631
adds = self.get_diamond_files(files)
1633
# We can only validate the first 2 elements returned from add_lines.
1635
self.assertEqual(3, len(add))
1636
results.append(add[:2])
1637
if self.key_length == 1:
1639
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1640
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1641
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1642
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1643
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1645
elif self.key_length == 2:
1647
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1648
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1649
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1650
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1651
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1652
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1653
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1654
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1655
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1656
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1659
def test_add_lines_no_key_generates_chk_key(self):
1660
files = self.get_versionedfiles()
1661
# save code by using the stock data insertion helper.
1662
adds = self.get_diamond_files(files, nokeys=True)
1664
# We can only validate the first 2 elements returned from add_lines.
1666
self.assertEqual(3, len(add))
1667
results.append(add[:2])
1668
if self.key_length == 1:
1670
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1671
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1672
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1673
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1674
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1676
# Check the added items got CHK keys.
1677
self.assertEqual(set([
1678
('sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1679
('sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1680
('sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1681
('sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1682
('sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1685
elif self.key_length == 2:
1687
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1688
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1689
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1690
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1691
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1692
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1693
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1694
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1695
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1696
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1698
# Check the added items got CHK keys.
1699
self.assertEqual(set([
1700
('FileA', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1701
('FileA', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1702
('FileA', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1703
('FileA', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1704
('FileA', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1705
('FileB', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1706
('FileB', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1707
('FileB', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1708
('FileB', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1709
('FileB', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1713
def test_empty_lines(self):
1714
"""Empty files can be stored."""
1715
f = self.get_versionedfiles()
1716
key_a = self.get_simple_key('a')
1717
f.add_lines(key_a, [], [])
1718
self.assertEqual('',
1719
f.get_record_stream([key_a], 'unordered', True
1720
).next().get_bytes_as('fulltext'))
1721
key_b = self.get_simple_key('b')
1722
f.add_lines(key_b, self.get_parents([key_a]), [])
1723
self.assertEqual('',
1724
f.get_record_stream([key_b], 'unordered', True
1725
).next().get_bytes_as('fulltext'))
1727
def test_newline_only(self):
1728
f = self.get_versionedfiles()
1729
key_a = self.get_simple_key('a')
1730
f.add_lines(key_a, [], ['\n'])
1731
self.assertEqual('\n',
1732
f.get_record_stream([key_a], 'unordered', True
1733
).next().get_bytes_as('fulltext'))
1734
key_b = self.get_simple_key('b')
1735
f.add_lines(key_b, self.get_parents([key_a]), ['\n'])
1736
self.assertEqual('\n',
1737
f.get_record_stream([key_b], 'unordered', True
1738
).next().get_bytes_as('fulltext'))
1740
def test_get_record_stream_empty(self):
1741
"""An empty stream can be requested without error."""
1742
f = self.get_versionedfiles()
1743
entries = f.get_record_stream([], 'unordered', False)
1744
self.assertEqual([], list(entries))
1746
def assertValidStorageKind(self, storage_kind):
1747
"""Assert that storage_kind is a valid storage_kind."""
1748
self.assertSubset([storage_kind],
1749
['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
1750
'knit-ft', 'knit-delta', 'chunked', 'fulltext',
1751
'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz',
1753
'knit-delta-closure', 'knit-delta-closure-ref',
1754
'groupcompress-block', 'groupcompress-block-ref'])
1756
def capture_stream(self, f, entries, on_seen, parents,
1757
require_fulltext=False):
1758
"""Capture a stream for testing."""
1759
for factory in entries:
1760
on_seen(factory.key)
1761
self.assertValidStorageKind(factory.storage_kind)
1762
if factory.sha1 is not None:
1763
self.assertEqual(f.get_sha1s([factory.key])[factory.key],
1765
self.assertEqual(parents[factory.key], factory.parents)
1766
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1768
if require_fulltext:
1769
factory.get_bytes_as('fulltext')
1771
def test_get_record_stream_interface(self):
1772
"""each item in a stream has to provide a regular interface."""
1773
files = self.get_versionedfiles()
1774
self.get_diamond_files(files)
1775
keys, _ = self.get_keys_and_sort_order()
1776
parent_map = files.get_parent_map(keys)
1777
entries = files.get_record_stream(keys, 'unordered', False)
1779
self.capture_stream(files, entries, seen.add, parent_map)
1780
self.assertEqual(set(keys), seen)
1782
def get_keys_and_sort_order(self):
1783
"""Get diamond test keys list, and their sort ordering."""
1784
if self.key_length == 1:
1785
keys = [('merged',), ('left',), ('right',), ('base',)]
1786
sort_order = {('merged',):2, ('left',):1, ('right',):1, ('base',):0}
1789
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1791
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1795
('FileA', 'merged'):2, ('FileA', 'left'):1, ('FileA', 'right'):1,
1796
('FileA', 'base'):0,
1797
('FileB', 'merged'):2, ('FileB', 'left'):1, ('FileB', 'right'):1,
1798
('FileB', 'base'):0,
1800
return keys, sort_order
1802
def get_keys_and_groupcompress_sort_order(self):
1803
"""Get diamond test keys list, and their groupcompress sort ordering."""
1804
if self.key_length == 1:
1805
keys = [('merged',), ('left',), ('right',), ('base',)]
1806
sort_order = {('merged',):0, ('left',):1, ('right',):1, ('base',):2}
1809
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1811
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1815
('FileA', 'merged'):0, ('FileA', 'left'):1, ('FileA', 'right'):1,
1816
('FileA', 'base'):2,
1817
('FileB', 'merged'):3, ('FileB', 'left'):4, ('FileB', 'right'):4,
1818
('FileB', 'base'):5,
1820
return keys, sort_order
1822
def test_get_record_stream_interface_ordered(self):
1823
"""each item in a stream has to provide a regular interface."""
1824
files = self.get_versionedfiles()
1825
self.get_diamond_files(files)
1826
keys, sort_order = self.get_keys_and_sort_order()
1827
parent_map = files.get_parent_map(keys)
1828
entries = files.get_record_stream(keys, 'topological', False)
1830
self.capture_stream(files, entries, seen.append, parent_map)
1831
self.assertStreamOrder(sort_order, seen, keys)
1833
def test_get_record_stream_interface_ordered_with_delta_closure(self):
1834
"""each item must be accessible as a fulltext."""
1835
files = self.get_versionedfiles()
1836
self.get_diamond_files(files)
1837
keys, sort_order = self.get_keys_and_sort_order()
1838
parent_map = files.get_parent_map(keys)
1839
entries = files.get_record_stream(keys, 'topological', True)
1841
for factory in entries:
1842
seen.append(factory.key)
1843
self.assertValidStorageKind(factory.storage_kind)
1844
self.assertSubset([factory.sha1],
1845
[None, files.get_sha1s([factory.key])[factory.key]])
1846
self.assertEqual(parent_map[factory.key], factory.parents)
1847
# self.assertEqual(files.get_text(factory.key),
1848
ft_bytes = factory.get_bytes_as('fulltext')
1849
self.assertIsInstance(ft_bytes, str)
1850
chunked_bytes = factory.get_bytes_as('chunked')
1851
self.assertEqualDiff(ft_bytes, ''.join(chunked_bytes))
1853
self.assertStreamOrder(sort_order, seen, keys)
1855
def test_get_record_stream_interface_groupcompress(self):
1856
"""each item in a stream has to provide a regular interface."""
1857
files = self.get_versionedfiles()
1858
self.get_diamond_files(files)
1859
keys, sort_order = self.get_keys_and_groupcompress_sort_order()
1860
parent_map = files.get_parent_map(keys)
1861
entries = files.get_record_stream(keys, 'groupcompress', False)
1863
self.capture_stream(files, entries, seen.append, parent_map)
1864
self.assertStreamOrder(sort_order, seen, keys)
1866
def assertStreamOrder(self, sort_order, seen, keys):
1867
self.assertEqual(len(set(seen)), len(keys))
1868
if self.key_length == 1:
1871
lows = {('FileA',):0, ('FileB',):0}
1873
self.assertEqual(set(keys), set(seen))
1876
sort_pos = sort_order[key]
1877
self.assertTrue(sort_pos >= lows[key[:-1]],
1878
"Out of order in sorted stream: %r, %r" % (key, seen))
1879
lows[key[:-1]] = sort_pos
1881
def test_get_record_stream_unknown_storage_kind_raises(self):
1882
"""Asking for a storage kind that the stream cannot supply raises."""
1883
files = self.get_versionedfiles()
1884
self.get_diamond_files(files)
1885
if self.key_length == 1:
1886
keys = [('merged',), ('left',), ('right',), ('base',)]
1889
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1891
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1894
parent_map = files.get_parent_map(keys)
1895
entries = files.get_record_stream(keys, 'unordered', False)
1896
# We track the contents because we should be able to try, fail a
1897
# particular kind and then ask for one that works and continue.
1899
for factory in entries:
1900
seen.add(factory.key)
1901
self.assertValidStorageKind(factory.storage_kind)
1902
if factory.sha1 is not None:
1903
self.assertEqual(files.get_sha1s([factory.key])[factory.key],
1905
self.assertEqual(parent_map[factory.key], factory.parents)
1906
# currently no stream emits mpdiff
1907
self.assertRaises(errors.UnavailableRepresentation,
1908
factory.get_bytes_as, 'mpdiff')
1909
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1911
self.assertEqual(set(keys), seen)
1913
def test_get_record_stream_missing_records_are_absent(self):
1914
files = self.get_versionedfiles()
1915
self.get_diamond_files(files)
1916
if self.key_length == 1:
1917
keys = [('merged',), ('left',), ('right',), ('absent',), ('base',)]
1920
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1921
('FileA', 'absent'), ('FileA', 'base'),
1922
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1923
('FileB', 'absent'), ('FileB', 'base'),
1924
('absent', 'absent'),
1926
parent_map = files.get_parent_map(keys)
1927
entries = files.get_record_stream(keys, 'unordered', False)
1928
self.assertAbsentRecord(files, keys, parent_map, entries)
1929
entries = files.get_record_stream(keys, 'topological', False)
1930
self.assertAbsentRecord(files, keys, parent_map, entries)
1932
def assertRecordHasContent(self, record, bytes):
1933
"""Assert that record has the bytes bytes."""
1934
self.assertEqual(bytes, record.get_bytes_as('fulltext'))
1935
self.assertEqual(bytes, ''.join(record.get_bytes_as('chunked')))
1937
def test_get_record_stream_native_formats_are_wire_ready_one_ft(self):
1938
files = self.get_versionedfiles()
1939
key = self.get_simple_key('foo')
1940
files.add_lines(key, (), ['my text\n', 'content'])
1941
stream = files.get_record_stream([key], 'unordered', False)
1942
record = stream.next()
1943
if record.storage_kind in ('chunked', 'fulltext'):
1944
# chunked and fulltext representations are for direct use not wire
1945
# serialisation: check they are able to be used directly. To send
1946
# such records over the wire translation will be needed.
1947
self.assertRecordHasContent(record, "my text\ncontent")
1949
bytes = [record.get_bytes_as(record.storage_kind)]
1950
network_stream = versionedfile.NetworkRecordStream(bytes).read()
1951
source_record = record
1953
for record in network_stream:
1954
records.append(record)
1955
self.assertEqual(source_record.storage_kind,
1956
record.storage_kind)
1957
self.assertEqual(source_record.parents, record.parents)
1959
source_record.get_bytes_as(source_record.storage_kind),
1960
record.get_bytes_as(record.storage_kind))
1961
self.assertEqual(1, len(records))
1963
def assertStreamMetaEqual(self, records, expected, stream):
1964
"""Assert that streams expected and stream have the same records.
1966
:param records: A list to collect the seen records.
1967
:return: A generator of the records in stream.
1969
# We make assertions during copying to catch things early for
1971
for record, ref_record in izip(stream, expected):
1972
records.append(record)
1973
self.assertEqual(ref_record.key, record.key)
1974
self.assertEqual(ref_record.storage_kind, record.storage_kind)
1975
self.assertEqual(ref_record.parents, record.parents)
1978
def stream_to_bytes_or_skip_counter(self, skipped_records, full_texts,
1980
"""Convert a stream to a bytes iterator.
1982
:param skipped_records: A list with one element to increment when a
1984
:param full_texts: A dict from key->fulltext representation, for
1985
checking chunked or fulltext stored records.
1986
:param stream: A record_stream.
1987
:return: An iterator over the bytes of each record.
1989
for record in stream:
1990
if record.storage_kind in ('chunked', 'fulltext'):
1991
skipped_records[0] += 1
1992
# check the content is correct for direct use.
1993
self.assertRecordHasContent(record, full_texts[record.key])
1995
yield record.get_bytes_as(record.storage_kind)
1997
def test_get_record_stream_native_formats_are_wire_ready_ft_delta(self):
1998
files = self.get_versionedfiles()
1999
target_files = self.get_versionedfiles('target')
2000
key = self.get_simple_key('ft')
2001
key_delta = self.get_simple_key('delta')
2002
files.add_lines(key, (), ['my text\n', 'content'])
2004
delta_parents = (key,)
2007
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
2008
local = files.get_record_stream([key, key_delta], 'unordered', False)
2009
ref = files.get_record_stream([key, key_delta], 'unordered', False)
2010
skipped_records = [0]
2012
key: "my text\ncontent",
2013
key_delta: "different\ncontent\n",
2015
byte_stream = self.stream_to_bytes_or_skip_counter(
2016
skipped_records, full_texts, local)
2017
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
2019
# insert the stream from the network into a versioned files object so we can
2020
# check the content was carried across correctly without doing delta
2022
target_files.insert_record_stream(
2023
self.assertStreamMetaEqual(records, ref, network_stream))
2024
# No duplicates on the wire thank you!
2025
self.assertEqual(2, len(records) + skipped_records[0])
2027
# if any content was copied it all must have all been.
2028
self.assertIdenticalVersionedFile(files, target_files)
2030
def test_get_record_stream_native_formats_are_wire_ready_delta(self):
2031
# copy a delta over the wire
2032
files = self.get_versionedfiles()
2033
target_files = self.get_versionedfiles('target')
2034
key = self.get_simple_key('ft')
2035
key_delta = self.get_simple_key('delta')
2036
files.add_lines(key, (), ['my text\n', 'content'])
2038
delta_parents = (key,)
2041
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
2042
# Copy the basis text across so we can reconstruct the delta during
2043
# insertion into target.
2044
target_files.insert_record_stream(files.get_record_stream([key],
2045
'unordered', False))
2046
local = files.get_record_stream([key_delta], 'unordered', False)
2047
ref = files.get_record_stream([key_delta], 'unordered', False)
2048
skipped_records = [0]
2050
key_delta: "different\ncontent\n",
2052
byte_stream = self.stream_to_bytes_or_skip_counter(
2053
skipped_records, full_texts, local)
2054
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
2056
# insert the stream from the network into a versioned files object so we can
2057
# check the content was carried across correctly without doing delta
2058
# inspection during check_stream.
2059
target_files.insert_record_stream(
2060
self.assertStreamMetaEqual(records, ref, network_stream))
2061
# No duplicates on the wire thank you!
2062
self.assertEqual(1, len(records) + skipped_records[0])
2064
# if any content was copied it all must have all been
2065
self.assertIdenticalVersionedFile(files, target_files)
2067
def test_get_record_stream_wire_ready_delta_closure_included(self):
2068
# copy a delta over the wire with the ability to get its full text.
2069
files = self.get_versionedfiles()
2070
key = self.get_simple_key('ft')
2071
key_delta = self.get_simple_key('delta')
2072
files.add_lines(key, (), ['my text\n', 'content'])
2074
delta_parents = (key,)
2077
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
2078
local = files.get_record_stream([key_delta], 'unordered', True)
2079
ref = files.get_record_stream([key_delta], 'unordered', True)
2080
skipped_records = [0]
2082
key_delta: "different\ncontent\n",
2084
byte_stream = self.stream_to_bytes_or_skip_counter(
2085
skipped_records, full_texts, local)
2086
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
2088
# insert the stream from the network into a versioned files object so we can
2089
# check the content was carried across correctly without doing delta
2090
# inspection during check_stream.
2091
for record in self.assertStreamMetaEqual(records, ref, network_stream):
2092
# we have to be able to get the full text out:
2093
self.assertRecordHasContent(record, full_texts[record.key])
2094
# No duplicates on the wire thank you!
2095
self.assertEqual(1, len(records) + skipped_records[0])
2097
def assertAbsentRecord(self, files, keys, parents, entries):
2098
"""Helper for test_get_record_stream_missing_records_are_absent."""
2100
for factory in entries:
2101
seen.add(factory.key)
2102
if factory.key[-1] == 'absent':
2103
self.assertEqual('absent', factory.storage_kind)
2104
self.assertEqual(None, factory.sha1)
2105
self.assertEqual(None, factory.parents)
2107
self.assertValidStorageKind(factory.storage_kind)
2108
if factory.sha1 is not None:
2109
sha1 = files.get_sha1s([factory.key])[factory.key]
2110
self.assertEqual(sha1, factory.sha1)
2111
self.assertEqual(parents[factory.key], factory.parents)
2112
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
2114
self.assertEqual(set(keys), seen)
2116
def test_filter_absent_records(self):
2117
"""Requested missing records can be filter trivially."""
2118
files = self.get_versionedfiles()
2119
self.get_diamond_files(files)
2120
keys, _ = self.get_keys_and_sort_order()
2121
parent_map = files.get_parent_map(keys)
2122
# Add an absent record in the middle of the present keys. (We don't ask
2123
# for just absent keys to ensure that content before and after the
2124
# absent keys is still delivered).
2125
present_keys = list(keys)
2126
if self.key_length == 1:
2127
keys.insert(2, ('extra',))
2129
keys.insert(2, ('extra', 'extra'))
2130
entries = files.get_record_stream(keys, 'unordered', False)
2132
self.capture_stream(files, versionedfile.filter_absent(entries), seen.add,
2134
self.assertEqual(set(present_keys), seen)
2136
def get_mapper(self):
2137
"""Get a mapper suitable for the key length of the test interface."""
2138
if self.key_length == 1:
2139
return ConstantMapper('source')
2141
return HashEscapedPrefixMapper()
2143
def get_parents(self, parents):
2144
"""Get parents, taking self.graph into consideration."""
2150
def test_get_annotator(self):
2151
files = self.get_versionedfiles()
2152
self.get_diamond_files(files)
2153
origin_key = self.get_simple_key('origin')
2154
base_key = self.get_simple_key('base')
2155
left_key = self.get_simple_key('left')
2156
right_key = self.get_simple_key('right')
2157
merged_key = self.get_simple_key('merged')
2158
# annotator = files.get_annotator()
2159
# introduced full text
2160
origins, lines = files.get_annotator().annotate(origin_key)
2161
self.assertEqual([(origin_key,)], origins)
2162
self.assertEqual(['origin\n'], lines)
2164
origins, lines = files.get_annotator().annotate(base_key)
2165
self.assertEqual([(base_key,)], origins)
2167
origins, lines = files.get_annotator().annotate(merged_key)
2176
# Without a graph everything is new.
2183
self.assertRaises(RevisionNotPresent,
2184
files.get_annotator().annotate, self.get_simple_key('missing-key'))
2186
def test_get_parent_map(self):
2187
files = self.get_versionedfiles()
2188
if self.key_length == 1:
2190
(('r0',), self.get_parents(())),
2191
(('r1',), self.get_parents((('r0',),))),
2192
(('r2',), self.get_parents(())),
2193
(('r3',), self.get_parents(())),
2194
(('m',), self.get_parents((('r0',),('r1',),('r2',),('r3',)))),
2198
(('FileA', 'r0'), self.get_parents(())),
2199
(('FileA', 'r1'), self.get_parents((('FileA', 'r0'),))),
2200
(('FileA', 'r2'), self.get_parents(())),
2201
(('FileA', 'r3'), self.get_parents(())),
2202
(('FileA', 'm'), self.get_parents((('FileA', 'r0'),
2203
('FileA', 'r1'), ('FileA', 'r2'), ('FileA', 'r3')))),
2205
for key, parents in parent_details:
2206
files.add_lines(key, parents, [])
2207
# immediately after adding it should be queryable.
2208
self.assertEqual({key:parents}, files.get_parent_map([key]))
2209
# We can ask for an empty set
2210
self.assertEqual({}, files.get_parent_map([]))
2211
# We can ask for many keys
2212
all_parents = dict(parent_details)
2213
self.assertEqual(all_parents, files.get_parent_map(all_parents.keys()))
2214
# Absent keys are just not included in the result.
2215
keys = all_parents.keys()
2216
if self.key_length == 1:
2217
keys.insert(1, ('missing',))
2219
keys.insert(1, ('missing', 'missing'))
2220
# Absent keys are just ignored
2221
self.assertEqual(all_parents, files.get_parent_map(keys))
2223
def test_get_sha1s(self):
2224
files = self.get_versionedfiles()
2225
self.get_diamond_files(files)
2226
if self.key_length == 1:
2227
keys = [('base',), ('origin',), ('left',), ('merged',), ('right',)]
2229
# ask for shas from different prefixes.
2231
('FileA', 'base'), ('FileB', 'origin'), ('FileA', 'left'),
2232
('FileA', 'merged'), ('FileB', 'right'),
2235
keys[0]: '51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
2236
keys[1]: '00e364d235126be43292ab09cb4686cf703ddc17',
2237
keys[2]: 'a8478686da38e370e32e42e8a0c220e33ee9132f',
2238
keys[3]: 'ed8bce375198ea62444dc71952b22cfc2b09226d',
2239
keys[4]: '9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
2241
files.get_sha1s(keys))
2243
def test_insert_record_stream_empty(self):
2244
"""Inserting an empty record stream should work."""
2245
files = self.get_versionedfiles()
2246
files.insert_record_stream([])
2248
def assertIdenticalVersionedFile(self, expected, actual):
2249
"""Assert that left and right have the same contents."""
2250
self.assertEqual(set(actual.keys()), set(expected.keys()))
2251
actual_parents = actual.get_parent_map(actual.keys())
2253
self.assertEqual(actual_parents, expected.get_parent_map(expected.keys()))
2255
for key, parents in actual_parents.items():
2256
self.assertEqual(None, parents)
2257
for key in actual.keys():
2258
actual_text = actual.get_record_stream(
2259
[key], 'unordered', True).next().get_bytes_as('fulltext')
2260
expected_text = expected.get_record_stream(
2261
[key], 'unordered', True).next().get_bytes_as('fulltext')
2262
self.assertEqual(actual_text, expected_text)
2264
def test_insert_record_stream_fulltexts(self):
2265
"""Any file should accept a stream of fulltexts."""
2266
files = self.get_versionedfiles()
2267
mapper = self.get_mapper()
2268
source_transport = self.get_transport('source')
2269
source_transport.mkdir('.')
2270
# weaves always output fulltexts.
2271
source = make_versioned_files_factory(WeaveFile, mapper)(
2273
self.get_diamond_files(source, trailing_eol=False)
2274
stream = source.get_record_stream(source.keys(), 'topological',
2276
files.insert_record_stream(stream)
2277
self.assertIdenticalVersionedFile(source, files)
2279
def test_insert_record_stream_fulltexts_noeol(self):
2280
"""Any file should accept a stream of fulltexts."""
2281
files = self.get_versionedfiles()
2282
mapper = self.get_mapper()
2283
source_transport = self.get_transport('source')
2284
source_transport.mkdir('.')
2285
# weaves always output fulltexts.
2286
source = make_versioned_files_factory(WeaveFile, mapper)(
2288
self.get_diamond_files(source, trailing_eol=False)
2289
stream = source.get_record_stream(source.keys(), 'topological',
2291
files.insert_record_stream(stream)
2292
self.assertIdenticalVersionedFile(source, files)
2294
def test_insert_record_stream_annotated_knits(self):
2295
"""Any file should accept a stream from plain knits."""
2296
files = self.get_versionedfiles()
2297
mapper = self.get_mapper()
2298
source_transport = self.get_transport('source')
2299
source_transport.mkdir('.')
2300
source = make_file_factory(True, mapper)(source_transport)
2301
self.get_diamond_files(source)
2302
stream = source.get_record_stream(source.keys(), 'topological',
2304
files.insert_record_stream(stream)
2305
self.assertIdenticalVersionedFile(source, files)
2307
def test_insert_record_stream_annotated_knits_noeol(self):
2308
"""Any file should accept a stream from plain knits."""
2309
files = self.get_versionedfiles()
2310
mapper = self.get_mapper()
2311
source_transport = self.get_transport('source')
2312
source_transport.mkdir('.')
2313
source = make_file_factory(True, mapper)(source_transport)
2314
self.get_diamond_files(source, trailing_eol=False)
2315
stream = source.get_record_stream(source.keys(), 'topological',
2317
files.insert_record_stream(stream)
2318
self.assertIdenticalVersionedFile(source, files)
2320
def test_insert_record_stream_plain_knits(self):
2321
"""Any file should accept a stream from plain knits."""
2322
files = self.get_versionedfiles()
2323
mapper = self.get_mapper()
2324
source_transport = self.get_transport('source')
2325
source_transport.mkdir('.')
2326
source = make_file_factory(False, mapper)(source_transport)
2327
self.get_diamond_files(source)
2328
stream = source.get_record_stream(source.keys(), 'topological',
2330
files.insert_record_stream(stream)
2331
self.assertIdenticalVersionedFile(source, files)
2333
def test_insert_record_stream_plain_knits_noeol(self):
2334
"""Any file should accept a stream from plain knits."""
2335
files = self.get_versionedfiles()
2336
mapper = self.get_mapper()
2337
source_transport = self.get_transport('source')
2338
source_transport.mkdir('.')
2339
source = make_file_factory(False, mapper)(source_transport)
2340
self.get_diamond_files(source, trailing_eol=False)
2341
stream = source.get_record_stream(source.keys(), 'topological',
2343
files.insert_record_stream(stream)
2344
self.assertIdenticalVersionedFile(source, files)
2346
def test_insert_record_stream_existing_keys(self):
2347
"""Inserting keys already in a file should not error."""
2348
files = self.get_versionedfiles()
2349
source = self.get_versionedfiles('source')
2350
self.get_diamond_files(source)
2351
# insert some keys into f.
2352
self.get_diamond_files(files, left_only=True)
2353
stream = source.get_record_stream(source.keys(), 'topological',
2355
files.insert_record_stream(stream)
2356
self.assertIdenticalVersionedFile(source, files)
2358
def test_insert_record_stream_missing_keys(self):
2359
"""Inserting a stream with absent keys should raise an error."""
2360
files = self.get_versionedfiles()
2361
source = self.get_versionedfiles('source')
2362
stream = source.get_record_stream([('missing',) * self.key_length],
2363
'topological', False)
2364
self.assertRaises(errors.RevisionNotPresent, files.insert_record_stream,
2367
def test_insert_record_stream_out_of_order(self):
2368
"""An out of order stream can either error or work."""
2369
files = self.get_versionedfiles()
2370
source = self.get_versionedfiles('source')
2371
self.get_diamond_files(source)
2372
if self.key_length == 1:
2373
origin_keys = [('origin',)]
2374
end_keys = [('merged',), ('left',)]
2375
start_keys = [('right',), ('base',)]
2377
origin_keys = [('FileA', 'origin'), ('FileB', 'origin')]
2378
end_keys = [('FileA', 'merged',), ('FileA', 'left',),
2379
('FileB', 'merged',), ('FileB', 'left',)]
2380
start_keys = [('FileA', 'right',), ('FileA', 'base',),
2381
('FileB', 'right',), ('FileB', 'base',)]
2382
origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
2383
end_entries = source.get_record_stream(end_keys, 'topological', False)
2384
start_entries = source.get_record_stream(start_keys, 'topological', False)
2385
entries = chain(origin_entries, end_entries, start_entries)
2387
files.insert_record_stream(entries)
2388
except RevisionNotPresent:
2389
# Must not have corrupted the file.
2392
self.assertIdenticalVersionedFile(source, files)
2394
def get_knit_delta_source(self):
2395
"""Get a source that can produce a stream with knit delta records,
2396
regardless of this test's scenario.
2398
mapper = self.get_mapper()
2399
source_transport = self.get_transport('source')
2400
source_transport.mkdir('.')
2401
source = make_file_factory(False, mapper)(source_transport)
2402
get_diamond_files(source, self.key_length, trailing_eol=True,
2403
nograph=False, left_only=False)
2406
def test_insert_record_stream_delta_missing_basis_no_corruption(self):
2407
"""Insertion where a needed basis is not included notifies the caller
2408
of the missing basis. In the meantime a record missing its basis is
2411
source = self.get_knit_delta_source()
2412
keys = [self.get_simple_key('origin'), self.get_simple_key('merged')]
2413
entries = source.get_record_stream(keys, 'unordered', False)
2414
files = self.get_versionedfiles()
2415
if self.support_partial_insertion:
2416
self.assertEqual([],
2417
list(files.get_missing_compression_parent_keys()))
2418
files.insert_record_stream(entries)
2419
missing_bases = files.get_missing_compression_parent_keys()
2420
self.assertEqual(set([self.get_simple_key('left')]),
2422
self.assertEqual(set(keys), set(files.get_parent_map(keys)))
2425
errors.RevisionNotPresent, files.insert_record_stream, entries)
2428
def test_insert_record_stream_delta_missing_basis_can_be_added_later(self):
2429
"""Insertion where a needed basis is not included notifies the caller
2430
of the missing basis. That basis can be added in a second
2431
insert_record_stream call that does not need to repeat records present
2432
in the previous stream. The record(s) that required that basis are
2433
fully inserted once their basis is no longer missing.
2435
if not self.support_partial_insertion:
2436
raise TestNotApplicable(
2437
'versioned file scenario does not support partial insertion')
2438
source = self.get_knit_delta_source()
2439
entries = source.get_record_stream([self.get_simple_key('origin'),
2440
self.get_simple_key('merged')], 'unordered', False)
2441
files = self.get_versionedfiles()
2442
files.insert_record_stream(entries)
2443
missing_bases = files.get_missing_compression_parent_keys()
2444
self.assertEqual(set([self.get_simple_key('left')]),
2446
# 'merged' is inserted (although a commit of a write group involving
2447
# this versionedfiles would fail).
2448
merged_key = self.get_simple_key('merged')
2450
[merged_key], files.get_parent_map([merged_key]).keys())
2451
# Add the full delta closure of the missing records
2452
missing_entries = source.get_record_stream(
2453
missing_bases, 'unordered', True)
2454
files.insert_record_stream(missing_entries)
2455
# Now 'merged' is fully inserted (and a commit would succeed).
2456
self.assertEqual([], list(files.get_missing_compression_parent_keys()))
2458
[merged_key], files.get_parent_map([merged_key]).keys())
2461
def test_iter_lines_added_or_present_in_keys(self):
2462
# test that we get at least an equalset of the lines added by
2463
# versions in the store.
2464
# the ordering here is to make a tree so that dumb searches have
2465
# more changes to muck up.
2467
class InstrumentedProgress(progress.DummyProgress):
2471
progress.DummyProgress.__init__(self)
2474
def update(self, msg=None, current=None, total=None):
2475
self.updates.append((msg, current, total))
2477
files = self.get_versionedfiles()
2478
# add a base to get included
2479
files.add_lines(self.get_simple_key('base'), (), ['base\n'])
2480
# add a ancestor to be included on one side
2481
files.add_lines(self.get_simple_key('lancestor'), (), ['lancestor\n'])
2482
# add a ancestor to be included on the other side
2483
files.add_lines(self.get_simple_key('rancestor'),
2484
self.get_parents([self.get_simple_key('base')]), ['rancestor\n'])
2485
# add a child of rancestor with no eofile-nl
2486
files.add_lines(self.get_simple_key('child'),
2487
self.get_parents([self.get_simple_key('rancestor')]),
2488
['base\n', 'child\n'])
2489
# add a child of lancestor and base to join the two roots
2490
files.add_lines(self.get_simple_key('otherchild'),
2491
self.get_parents([self.get_simple_key('lancestor'),
2492
self.get_simple_key('base')]),
2493
['base\n', 'lancestor\n', 'otherchild\n'])
2494
def iter_with_keys(keys, expected):
2495
# now we need to see what lines are returned, and how often.
2497
progress = InstrumentedProgress()
2498
# iterate over the lines
2499
for line in files.iter_lines_added_or_present_in_keys(keys,
2501
lines.setdefault(line, 0)
2503
if []!= progress.updates:
2504
self.assertEqual(expected, progress.updates)
2506
lines = iter_with_keys(
2507
[self.get_simple_key('child'), self.get_simple_key('otherchild')],
2508
[('Walking content', 0, 2),
2509
('Walking content', 1, 2),
2510
('Walking content', 2, 2)])
2511
# we must see child and otherchild
2512
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2514
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2515
# we dont care if we got more than that.
2518
lines = iter_with_keys(files.keys(),
2519
[('Walking content', 0, 5),
2520
('Walking content', 1, 5),
2521
('Walking content', 2, 5),
2522
('Walking content', 3, 5),
2523
('Walking content', 4, 5),
2524
('Walking content', 5, 5)])
2525
# all lines must be seen at least once
2526
self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)
2528
lines[('lancestor\n', self.get_simple_key('lancestor'))] > 0)
2530
lines[('rancestor\n', self.get_simple_key('rancestor'))] > 0)
2531
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2533
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2535
def test_make_mpdiffs(self):
2536
from bzrlib import multiparent
2537
files = self.get_versionedfiles('source')
2538
# add texts that should trip the knit maximum delta chain threshold
2539
# as well as doing parallel chains of data in knits.
2540
# this is done by two chains of 25 insertions
2541
files.add_lines(self.get_simple_key('base'), [], ['line\n'])
2542
files.add_lines(self.get_simple_key('noeol'),
2543
self.get_parents([self.get_simple_key('base')]), ['line'])
2544
# detailed eol tests:
2545
# shared last line with parent no-eol
2546
files.add_lines(self.get_simple_key('noeolsecond'),
2547
self.get_parents([self.get_simple_key('noeol')]),
2549
# differing last line with parent, both no-eol
2550
files.add_lines(self.get_simple_key('noeolnotshared'),
2551
self.get_parents([self.get_simple_key('noeolsecond')]),
2552
['line\n', 'phone'])
2553
# add eol following a noneol parent, change content
2554
files.add_lines(self.get_simple_key('eol'),
2555
self.get_parents([self.get_simple_key('noeol')]), ['phone\n'])
2556
# add eol following a noneol parent, no change content
2557
files.add_lines(self.get_simple_key('eolline'),
2558
self.get_parents([self.get_simple_key('noeol')]), ['line\n'])
2559
# noeol with no parents:
2560
files.add_lines(self.get_simple_key('noeolbase'), [], ['line'])
2561
# noeol preceeding its leftmost parent in the output:
2562
# this is done by making it a merge of two parents with no common
2563
# anestry: noeolbase and noeol with the
2564
# later-inserted parent the leftmost.
2565
files.add_lines(self.get_simple_key('eolbeforefirstparent'),
2566
self.get_parents([self.get_simple_key('noeolbase'),
2567
self.get_simple_key('noeol')]),
2569
# two identical eol texts
2570
files.add_lines(self.get_simple_key('noeoldup'),
2571
self.get_parents([self.get_simple_key('noeol')]), ['line'])
2572
next_parent = self.get_simple_key('base')
2573
text_name = 'chain1-'
2575
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2576
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2577
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2578
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2579
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
2580
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
2581
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2582
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2583
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
2584
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2585
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2586
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2587
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
2588
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2589
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
2590
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2591
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
2592
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2593
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
2594
19:'1ebed371807ba5935958ad0884595126e8c4e823',
2595
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2596
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2597
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2598
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2599
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2600
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2602
for depth in range(26):
2603
new_version = self.get_simple_key(text_name + '%s' % depth)
2604
text = text + ['line\n']
2605
files.add_lines(new_version, self.get_parents([next_parent]), text)
2606
next_parent = new_version
2607
next_parent = self.get_simple_key('base')
2608
text_name = 'chain2-'
2610
for depth in range(26):
2611
new_version = self.get_simple_key(text_name + '%s' % depth)
2612
text = text + ['line\n']
2613
files.add_lines(new_version, self.get_parents([next_parent]), text)
2614
next_parent = new_version
2615
target = self.get_versionedfiles('target')
2616
for key in multiparent.topo_iter_keys(files, files.keys()):
2617
mpdiff = files.make_mpdiffs([key])[0]
2618
parents = files.get_parent_map([key])[key] or []
2620
[(key, parents, files.get_sha1s([key])[key], mpdiff)])
2621
self.assertEqualDiff(
2622
files.get_record_stream([key], 'unordered',
2623
True).next().get_bytes_as('fulltext'),
2624
target.get_record_stream([key], 'unordered',
2625
True).next().get_bytes_as('fulltext')
2628
def test_keys(self):
2629
# While use is discouraged, versions() is still needed by aspects of
2631
files = self.get_versionedfiles()
2632
self.assertEqual(set(), set(files.keys()))
2633
if self.key_length == 1:
2636
key = ('foo', 'bar',)
2637
files.add_lines(key, (), [])
2638
self.assertEqual(set([key]), set(files.keys()))
2641
class VirtualVersionedFilesTests(TestCase):
2642
"""Basic tests for the VirtualVersionedFiles implementations."""
2644
def _get_parent_map(self, keys):
2647
if k in self._parent_map:
2648
ret[k] = self._parent_map[k]
2652
TestCase.setUp(self)
2654
self._parent_map = {}
2655
self.texts = VirtualVersionedFiles(self._get_parent_map,
2658
def test_add_lines(self):
2659
self.assertRaises(NotImplementedError,
2660
self.texts.add_lines, "foo", [], [])
2662
def test_add_mpdiffs(self):
2663
self.assertRaises(NotImplementedError,
2664
self.texts.add_mpdiffs, [])
2666
def test_check_noerrors(self):
2669
def test_insert_record_stream(self):
2670
self.assertRaises(NotImplementedError, self.texts.insert_record_stream,
2673
def test_get_sha1s_nonexistent(self):
2674
self.assertEquals({}, self.texts.get_sha1s([("NONEXISTENT",)]))
2676
def test_get_sha1s(self):
2677
self._lines["key"] = ["dataline1", "dataline2"]
2678
self.assertEquals({("key",): osutils.sha_strings(self._lines["key"])},
2679
self.texts.get_sha1s([("key",)]))
2681
def test_get_parent_map(self):
2682
self._parent_map = {"G": ("A", "B")}
2683
self.assertEquals({("G",): (("A",),("B",))},
2684
self.texts.get_parent_map([("G",), ("L",)]))
2686
def test_get_record_stream(self):
2687
self._lines["A"] = ["FOO", "BAR"]
2688
it = self.texts.get_record_stream([("A",)], "unordered", True)
2690
self.assertEquals("chunked", record.storage_kind)
2691
self.assertEquals("FOOBAR", record.get_bytes_as("fulltext"))
2692
self.assertEquals(["FOO", "BAR"], record.get_bytes_as("chunked"))
2694
def test_get_record_stream_absent(self):
2695
it = self.texts.get_record_stream([("A",)], "unordered", True)
2697
self.assertEquals("absent", record.storage_kind)
2699
def test_iter_lines_added_or_present_in_keys(self):
2700
self._lines["A"] = ["FOO", "BAR"]
2701
self._lines["B"] = ["HEY"]
2702
self._lines["C"] = ["Alberta"]
2703
it = self.texts.iter_lines_added_or_present_in_keys([("A",), ("B",)])
2704
self.assertEquals(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
2708
class TestOrderingVersionedFilesDecorator(TestCaseWithMemoryTransport):
2710
def get_ordering_vf(self, key_priority):
2711
builder = self.make_branch_builder('test')
2712
builder.start_series()
2713
builder.build_snapshot('A', None, [
2714
('add', ('', 'TREE_ROOT', 'directory', None))])
2715
builder.build_snapshot('B', ['A'], [])
2716
builder.build_snapshot('C', ['B'], [])
2717
builder.build_snapshot('D', ['C'], [])
2718
builder.finish_series()
2719
b = builder.get_branch()
2721
self.addCleanup(b.unlock)
2722
vf = b.repository.inventories
2723
return versionedfile.OrderingVersionedFilesDecorator(vf, key_priority)
2725
def test_get_empty(self):
2726
vf = self.get_ordering_vf({})
2727
self.assertEqual([], vf.calls)
2729
def test_get_record_stream_topological(self):
2730
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2731
request_keys = [('B',), ('C',), ('D',), ('A',)]
2732
keys = [r.key for r in vf.get_record_stream(request_keys,
2733
'topological', False)]
2734
# We should have gotten the keys in topological order
2735
self.assertEqual([('A',), ('B',), ('C',), ('D',)], keys)
2736
# And recorded that the request was made
2737
self.assertEqual([('get_record_stream', request_keys, 'topological',
2740
def test_get_record_stream_ordered(self):
2741
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2742
request_keys = [('B',), ('C',), ('D',), ('A',)]
2743
keys = [r.key for r in vf.get_record_stream(request_keys,
2744
'unordered', False)]
2745
# They should be returned based on their priority
2746
self.assertEqual([('D',), ('B',), ('A',), ('C',)], keys)
2747
# And the request recorded
2748
self.assertEqual([('get_record_stream', request_keys, 'unordered',
2751
def test_get_record_stream_implicit_order(self):
2752
vf = self.get_ordering_vf({('B',): 2, ('D',): 1})
2753
request_keys = [('B',), ('C',), ('D',), ('A',)]
2754
keys = [r.key for r in vf.get_record_stream(request_keys,
2755
'unordered', False)]
2756
# A and C are not in the map, so they get sorted to the front. A comes
2757
# before C alphabetically, so it comes back first
2758
self.assertEqual([('A',), ('C',), ('D',), ('B',)], keys)
2759
# And the request recorded
2760
self.assertEqual([('get_record_stream', request_keys, 'unordered',