1
# Copyright (C) 2006-2010 Canonical Ltd
4
# Johan Rydberg <jrydberg@gnu.org>
6
# This program is free software; you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation; either version 2 of the License, or
9
# (at your option) any later version.
11
# This program is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14
# GNU General Public License for more details.
16
# You should have received a copy of the GNU General Public License
17
# along with this program; if not, write to the Free Software
18
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
# considered typical and check that it can be detected/corrected.
24
from itertools import chain, izip
25
from StringIO import StringIO
36
from bzrlib.errors import (
38
RevisionAlreadyPresent,
41
from bzrlib.knit import (
48
from bzrlib.tests import (
50
TestCaseWithMemoryTransport,
54
split_suite_by_condition,
57
from bzrlib.tests.http_utils import TestCaseWithWebserver
58
from bzrlib.trace import mutter
59
from bzrlib.transport import get_transport
60
from bzrlib.transport.memory import MemoryTransport
61
from bzrlib.tsort import topo_sort
62
from bzrlib.tuned_gzip import GzipFile
63
import bzrlib.versionedfile as versionedfile
64
from bzrlib.versionedfile import (
66
HashEscapedPrefixMapper,
68
VirtualVersionedFiles,
69
make_versioned_files_factory,
71
from bzrlib.weave import WeaveFile
72
from bzrlib.weavefile import read_weave, write_weave
75
def load_tests(standard_tests, module, loader):
76
"""Parameterize VersionedFiles tests for different implementations."""
77
to_adapt, result = split_suite_by_condition(
78
standard_tests, condition_isinstance(TestVersionedFiles))
79
# We want to be sure of behaviour for:
80
# weaves prefix layout (weave texts)
81
# individually named weaves (weave inventories)
82
# annotated knits - prefix|hash|hash-escape layout, we test the third only
83
# as it is the most complex mapper.
84
# individually named knits
85
# individual no-graph knits in packs (signatures)
86
# individual graph knits in packs (inventories)
87
# individual graph nocompression knits in packs (revisions)
88
# plain text knits in packs (texts)
92
'factory':make_versioned_files_factory(WeaveFile,
93
ConstantMapper('inventory')),
96
'support_partial_insertion': False,
100
'factory':make_file_factory(False, ConstantMapper('revisions')),
103
'support_partial_insertion': False,
105
('named-nograph-nodelta-knit-pack', {
106
'cleanup':cleanup_pack_knit,
107
'factory':make_pack_factory(False, False, 1),
110
'support_partial_insertion': False,
112
('named-graph-knit-pack', {
113
'cleanup':cleanup_pack_knit,
114
'factory':make_pack_factory(True, True, 1),
117
'support_partial_insertion': True,
119
('named-graph-nodelta-knit-pack', {
120
'cleanup':cleanup_pack_knit,
121
'factory':make_pack_factory(True, False, 1),
124
'support_partial_insertion': False,
126
('groupcompress-nograph', {
127
'cleanup':groupcompress.cleanup_pack_group,
128
'factory':groupcompress.make_pack_factory(False, False, 1),
131
'support_partial_insertion':False,
134
len_two_scenarios = [
137
'factory':make_versioned_files_factory(WeaveFile,
141
'support_partial_insertion': False,
143
('annotated-knit-escape', {
145
'factory':make_file_factory(True, HashEscapedPrefixMapper()),
148
'support_partial_insertion': False,
150
('plain-knit-pack', {
151
'cleanup':cleanup_pack_knit,
152
'factory':make_pack_factory(True, True, 2),
155
'support_partial_insertion': True,
158
'cleanup':groupcompress.cleanup_pack_group,
159
'factory':groupcompress.make_pack_factory(True, False, 1),
162
'support_partial_insertion':False,
165
scenarios = len_one_scenarios + len_two_scenarios
166
return multiply_tests(to_adapt, scenarios, result)
169
def get_diamond_vf(f, trailing_eol=True, left_only=False):
170
"""Get a diamond graph to exercise deltas and merges.
172
:param trailing_eol: If True end the last line with \n.
176
'base': (('origin',),),
177
'left': (('base',),),
178
'right': (('base',),),
179
'merged': (('left',), ('right',)),
181
# insert a diamond graph to exercise deltas and merges.
186
f.add_lines('origin', [], ['origin' + last_char])
187
f.add_lines('base', ['origin'], ['base' + last_char])
188
f.add_lines('left', ['base'], ['base\n', 'left' + last_char])
190
f.add_lines('right', ['base'],
191
['base\n', 'right' + last_char])
192
f.add_lines('merged', ['left', 'right'],
193
['base\n', 'left\n', 'right\n', 'merged' + last_char])
197
def get_diamond_files(files, key_length, trailing_eol=True, left_only=False,
198
nograph=False, nokeys=False):
199
"""Get a diamond graph to exercise deltas and merges.
201
This creates a 5-node graph in files. If files supports 2-length keys two
202
graphs are made to exercise the support for multiple ids.
204
:param trailing_eol: If True end the last line with \n.
205
:param key_length: The length of keys in files. Currently supports length 1
207
:param left_only: If True do not add the right and merged nodes.
208
:param nograph: If True, do not provide parents to the add_lines calls;
209
this is useful for tests that need inserted data but have graphless
211
:param nokeys: If True, pass None is as the key for all insertions.
212
Currently implies nograph.
213
:return: The results of the add_lines calls.
220
prefixes = [('FileA',), ('FileB',)]
221
# insert a diamond graph to exercise deltas and merges.
227
def get_parents(suffix_list):
231
result = [prefix + suffix for suffix in suffix_list]
238
# we loop over each key because that spreads the inserts across prefixes,
239
# which is how commit operates.
240
for prefix in prefixes:
241
result.append(files.add_lines(prefix + get_key('origin'), (),
242
['origin' + last_char]))
243
for prefix in prefixes:
244
result.append(files.add_lines(prefix + get_key('base'),
245
get_parents([('origin',)]), ['base' + last_char]))
246
for prefix in prefixes:
247
result.append(files.add_lines(prefix + get_key('left'),
248
get_parents([('base',)]),
249
['base\n', 'left' + last_char]))
251
for prefix in prefixes:
252
result.append(files.add_lines(prefix + get_key('right'),
253
get_parents([('base',)]),
254
['base\n', 'right' + last_char]))
255
for prefix in prefixes:
256
result.append(files.add_lines(prefix + get_key('merged'),
257
get_parents([('left',), ('right',)]),
258
['base\n', 'left\n', 'right\n', 'merged' + last_char]))
262
class VersionedFileTestMixIn(object):
263
"""A mixin test class for testing VersionedFiles.
265
This is not an adaptor-style test at this point because
266
theres no dynamic substitution of versioned file implementations,
267
they are strictly controlled by their owning repositories.
270
def get_transaction(self):
271
if not hasattr(self, '_transaction'):
272
self._transaction = None
273
return self._transaction
277
f.add_lines('r0', [], ['a\n', 'b\n'])
278
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
280
versions = f.versions()
281
self.assertTrue('r0' in versions)
282
self.assertTrue('r1' in versions)
283
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
284
self.assertEquals(f.get_text('r0'), 'a\nb\n')
285
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
286
self.assertEqual(2, len(f))
287
self.assertEqual(2, f.num_versions())
289
self.assertRaises(RevisionNotPresent,
290
f.add_lines, 'r2', ['foo'], [])
291
self.assertRaises(RevisionAlreadyPresent,
292
f.add_lines, 'r1', [], [])
294
# this checks that reopen with create=True does not break anything.
295
f = self.reopen_file(create=True)
298
def test_adds_with_parent_texts(self):
301
_, _, parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])
303
_, _, parent_texts['r1'] = f.add_lines_with_ghosts('r1',
304
['r0', 'ghost'], ['b\n', 'c\n'], parent_texts=parent_texts)
305
except NotImplementedError:
306
# if the format doesn't support ghosts, just add normally.
307
_, _, parent_texts['r1'] = f.add_lines('r1',
308
['r0'], ['b\n', 'c\n'], parent_texts=parent_texts)
309
f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)
310
self.assertNotEqual(None, parent_texts['r0'])
311
self.assertNotEqual(None, parent_texts['r1'])
313
versions = f.versions()
314
self.assertTrue('r0' in versions)
315
self.assertTrue('r1' in versions)
316
self.assertTrue('r2' in versions)
317
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
318
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
319
self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])
320
self.assertEqual(3, f.num_versions())
321
origins = f.annotate('r1')
322
self.assertEquals(origins[0][0], 'r0')
323
self.assertEquals(origins[1][0], 'r1')
324
origins = f.annotate('r2')
325
self.assertEquals(origins[0][0], 'r1')
326
self.assertEquals(origins[1][0], 'r2')
329
f = self.reopen_file()
332
def test_add_unicode_content(self):
333
# unicode content is not permitted in versioned files.
334
# versioned files version sequences of bytes only.
336
self.assertRaises(errors.BzrBadParameterUnicode,
337
vf.add_lines, 'a', [], ['a\n', u'b\n', 'c\n'])
339
(errors.BzrBadParameterUnicode, NotImplementedError),
340
vf.add_lines_with_ghosts, 'a', [], ['a\n', u'b\n', 'c\n'])
342
def test_add_follows_left_matching_blocks(self):
343
"""If we change left_matching_blocks, delta changes
345
Note: There are multiple correct deltas in this case, because
346
we start with 1 "a" and we get 3.
349
if isinstance(vf, WeaveFile):
350
raise TestSkipped("WeaveFile ignores left_matching_blocks")
351
vf.add_lines('1', [], ['a\n'])
352
vf.add_lines('2', ['1'], ['a\n', 'a\n', 'a\n'],
353
left_matching_blocks=[(0, 0, 1), (1, 3, 0)])
354
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('2'))
355
vf.add_lines('3', ['1'], ['a\n', 'a\n', 'a\n'],
356
left_matching_blocks=[(0, 2, 1), (1, 3, 0)])
357
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('3'))
359
def test_inline_newline_throws(self):
360
# \r characters are not permitted in lines being added
362
self.assertRaises(errors.BzrBadParameterContainsNewline,
363
vf.add_lines, 'a', [], ['a\n\n'])
365
(errors.BzrBadParameterContainsNewline, NotImplementedError),
366
vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])
367
# but inline CR's are allowed
368
vf.add_lines('a', [], ['a\r\n'])
370
vf.add_lines_with_ghosts('b', [], ['a\r\n'])
371
except NotImplementedError:
374
def test_add_reserved(self):
376
self.assertRaises(errors.ReservedId,
377
vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
379
def test_add_lines_nostoresha(self):
380
"""When nostore_sha is supplied using old content raises."""
382
empty_text = ('a', [])
383
sample_text_nl = ('b', ["foo\n", "bar\n"])
384
sample_text_no_nl = ('c', ["foo\n", "bar"])
386
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
387
sha, _, _ = vf.add_lines(version, [], lines)
389
# we now have a copy of all the lines in the vf.
390
for sha, (version, lines) in zip(
391
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
392
self.assertRaises(errors.ExistingContent,
393
vf.add_lines, version + "2", [], lines,
395
# and no new version should have been added.
396
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
399
def test_add_lines_with_ghosts_nostoresha(self):
400
"""When nostore_sha is supplied using old content raises."""
402
empty_text = ('a', [])
403
sample_text_nl = ('b', ["foo\n", "bar\n"])
404
sample_text_no_nl = ('c', ["foo\n", "bar"])
406
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
407
sha, _, _ = vf.add_lines(version, [], lines)
409
# we now have a copy of all the lines in the vf.
410
# is the test applicable to this vf implementation?
412
vf.add_lines_with_ghosts('d', [], [])
413
except NotImplementedError:
414
raise TestSkipped("add_lines_with_ghosts is optional")
415
for sha, (version, lines) in zip(
416
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
417
self.assertRaises(errors.ExistingContent,
418
vf.add_lines_with_ghosts, version + "2", [], lines,
420
# and no new version should have been added.
421
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
424
def test_add_lines_return_value(self):
425
# add_lines should return the sha1 and the text size.
427
empty_text = ('a', [])
428
sample_text_nl = ('b', ["foo\n", "bar\n"])
429
sample_text_no_nl = ('c', ["foo\n", "bar"])
430
# check results for the three cases:
431
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
432
# the first two elements are the same for all versioned files:
433
# - the digest and the size of the text. For some versioned files
434
# additional data is returned in additional tuple elements.
435
result = vf.add_lines(version, [], lines)
436
self.assertEqual(3, len(result))
437
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
439
# parents should not affect the result:
440
lines = sample_text_nl[1]
441
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
442
vf.add_lines('d', ['b', 'c'], lines)[0:2])
444
def test_get_reserved(self):
446
self.assertRaises(errors.ReservedId, vf.get_texts, ['b:'])
447
self.assertRaises(errors.ReservedId, vf.get_lines, 'b:')
448
self.assertRaises(errors.ReservedId, vf.get_text, 'b:')
450
def test_add_unchanged_last_line_noeol_snapshot(self):
451
"""Add a text with an unchanged last line with no eol should work."""
452
# Test adding this in a number of chain lengths; because the interface
453
# for VersionedFile does not allow forcing a specific chain length, we
454
# just use a small base to get the first snapshot, then a much longer
455
# first line for the next add (which will make the third add snapshot)
456
# and so on. 20 has been chosen as an aribtrary figure - knits use 200
457
# as a capped delta length, but ideally we would have some way of
458
# tuning the test to the store (e.g. keep going until a snapshot
460
for length in range(20):
462
vf = self.get_file('case-%d' % length)
465
for step in range(length):
466
version = prefix % step
467
lines = (['prelude \n'] * step) + ['line']
468
vf.add_lines(version, parents, lines)
469
version_lines[version] = lines
471
vf.add_lines('no-eol', parents, ['line'])
472
vf.get_texts(version_lines.keys())
473
self.assertEqualDiff('line', vf.get_text('no-eol'))
475
def test_get_texts_eol_variation(self):
476
# similar to the failure in <http://bugs.launchpad.net/234748>
478
sample_text_nl = ["line\n"]
479
sample_text_no_nl = ["line"]
486
lines = sample_text_nl
488
lines = sample_text_no_nl
489
# left_matching blocks is an internal api; it operates on the
490
# *internal* representation for a knit, which is with *all* lines
491
# being normalised to end with \n - even the final line in a no_nl
492
# file. Using it here ensures that a broken internal implementation
493
# (which is what this test tests) will generate a correct line
494
# delta (which is to say, an empty delta).
495
vf.add_lines(version, parents, lines,
496
left_matching_blocks=[(0, 0, 1)])
498
versions.append(version)
499
version_lines[version] = lines
501
vf.get_texts(versions)
502
vf.get_texts(reversed(versions))
504
def test_add_lines_with_matching_blocks_noeol_last_line(self):
505
"""Add a text with an unchanged last line with no eol should work."""
506
from bzrlib import multiparent
507
# Hand verified sha1 of the text we're adding.
508
sha1 = '6a1d115ec7b60afb664dc14890b5af5ce3c827a4'
509
# Create a mpdiff which adds a new line before the trailing line, and
510
# reuse the last line unaltered (which can cause annotation reuse).
511
# Test adding this in two situations:
512
# On top of a new insertion
513
vf = self.get_file('fulltext')
514
vf.add_lines('noeol', [], ['line'])
515
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
516
left_matching_blocks=[(0, 1, 1)])
517
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
519
vf = self.get_file('delta')
520
vf.add_lines('base', [], ['line'])
521
vf.add_lines('noeol', ['base'], ['prelude\n', 'line'])
522
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
523
left_matching_blocks=[(1, 1, 1)])
524
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
526
def test_make_mpdiffs(self):
527
from bzrlib import multiparent
528
vf = self.get_file('foo')
529
sha1s = self._setup_for_deltas(vf)
530
new_vf = self.get_file('bar')
531
for version in multiparent.topo_iter(vf):
532
mpdiff = vf.make_mpdiffs([version])[0]
533
new_vf.add_mpdiffs([(version, vf.get_parent_map([version])[version],
534
vf.get_sha1s([version])[version], mpdiff)])
535
self.assertEqualDiff(vf.get_text(version),
536
new_vf.get_text(version))
538
def test_make_mpdiffs_with_ghosts(self):
539
vf = self.get_file('foo')
541
vf.add_lines_with_ghosts('text', ['ghost'], ['line\n'])
542
except NotImplementedError:
543
# old Weave formats do not allow ghosts
545
self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, ['ghost'])
547
def _setup_for_deltas(self, f):
548
self.assertFalse(f.has_version('base'))
549
# add texts that should trip the knit maximum delta chain threshold
550
# as well as doing parallel chains of data in knits.
551
# this is done by two chains of 25 insertions
552
f.add_lines('base', [], ['line\n'])
553
f.add_lines('noeol', ['base'], ['line'])
554
# detailed eol tests:
555
# shared last line with parent no-eol
556
f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])
557
# differing last line with parent, both no-eol
558
f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])
559
# add eol following a noneol parent, change content
560
f.add_lines('eol', ['noeol'], ['phone\n'])
561
# add eol following a noneol parent, no change content
562
f.add_lines('eolline', ['noeol'], ['line\n'])
563
# noeol with no parents:
564
f.add_lines('noeolbase', [], ['line'])
565
# noeol preceeding its leftmost parent in the output:
566
# this is done by making it a merge of two parents with no common
567
# anestry: noeolbase and noeol with the
568
# later-inserted parent the leftmost.
569
f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])
570
# two identical eol texts
571
f.add_lines('noeoldup', ['noeol'], ['line'])
573
text_name = 'chain1-'
575
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
576
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
577
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
578
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
579
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
580
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
581
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
582
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
583
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
584
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
585
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
586
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
587
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
588
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
589
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
590
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
591
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
592
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
593
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
594
19:'1ebed371807ba5935958ad0884595126e8c4e823',
595
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
596
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
597
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
598
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
599
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
600
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
602
for depth in range(26):
603
new_version = text_name + '%s' % depth
604
text = text + ['line\n']
605
f.add_lines(new_version, [next_parent], text)
606
next_parent = new_version
608
text_name = 'chain2-'
610
for depth in range(26):
611
new_version = text_name + '%s' % depth
612
text = text + ['line\n']
613
f.add_lines(new_version, [next_parent], text)
614
next_parent = new_version
617
def test_ancestry(self):
619
self.assertEqual([], f.get_ancestry([]))
620
f.add_lines('r0', [], ['a\n', 'b\n'])
621
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
622
f.add_lines('r2', ['r0'], ['b\n', 'c\n'])
623
f.add_lines('r3', ['r2'], ['b\n', 'c\n'])
624
f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])
625
self.assertEqual([], f.get_ancestry([]))
626
versions = f.get_ancestry(['rM'])
627
# there are some possibilities:
631
# so we check indexes
632
r0 = versions.index('r0')
633
r1 = versions.index('r1')
634
r2 = versions.index('r2')
635
self.assertFalse('r3' in versions)
636
rM = versions.index('rM')
637
self.assertTrue(r0 < r1)
638
self.assertTrue(r0 < r2)
639
self.assertTrue(r1 < rM)
640
self.assertTrue(r2 < rM)
642
self.assertRaises(RevisionNotPresent,
643
f.get_ancestry, ['rM', 'rX'])
645
self.assertEqual(set(f.get_ancestry('rM')),
646
set(f.get_ancestry('rM', topo_sorted=False)))
648
def test_mutate_after_finish(self):
649
self._transaction = 'before'
651
self._transaction = 'after'
652
self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])
653
self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])
655
def test_copy_to(self):
657
f.add_lines('0', [], ['a\n'])
658
t = MemoryTransport()
660
for suffix in self.get_factory().get_suffixes():
661
self.assertTrue(t.has('foo' + suffix))
663
def test_get_suffixes(self):
665
# and should be a list
666
self.assertTrue(isinstance(self.get_factory().get_suffixes(), list))
668
def test_get_parent_map(self):
670
f.add_lines('r0', [], ['a\n', 'b\n'])
672
{'r0':()}, f.get_parent_map(['r0']))
673
f.add_lines('r1', ['r0'], ['a\n', 'b\n'])
675
{'r1':('r0',)}, f.get_parent_map(['r1']))
679
f.get_parent_map(['r0', 'r1']))
680
f.add_lines('r2', [], ['a\n', 'b\n'])
681
f.add_lines('r3', [], ['a\n', 'b\n'])
682
f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
684
{'m':('r0', 'r1', 'r2', 'r3')}, f.get_parent_map(['m']))
685
self.assertEqual({}, f.get_parent_map('y'))
689
f.get_parent_map(['r0', 'y', 'r1']))
691
def test_annotate(self):
693
f.add_lines('r0', [], ['a\n', 'b\n'])
694
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
695
origins = f.annotate('r1')
696
self.assertEquals(origins[0][0], 'r1')
697
self.assertEquals(origins[1][0], 'r0')
699
self.assertRaises(RevisionNotPresent,
702
def test_detection(self):
703
# Test weaves detect corruption.
705
# Weaves contain a checksum of their texts.
706
# When a text is extracted, this checksum should be
709
w = self.get_file_corrupted_text()
711
self.assertEqual('hello\n', w.get_text('v1'))
712
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
713
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
714
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
716
w = self.get_file_corrupted_checksum()
718
self.assertEqual('hello\n', w.get_text('v1'))
719
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
720
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
721
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
723
def get_file_corrupted_text(self):
724
"""Return a versioned file with corrupt text but valid metadata."""
725
raise NotImplementedError(self.get_file_corrupted_text)
727
def reopen_file(self, name='foo'):
728
"""Open the versioned file from disk again."""
729
raise NotImplementedError(self.reopen_file)
731
def test_iter_lines_added_or_present_in_versions(self):
732
# test that we get at least an equalset of the lines added by
733
# versions in the weave
734
# the ordering here is to make a tree so that dumb searches have
735
# more changes to muck up.
737
class InstrumentedProgress(progress.ProgressTask):
740
progress.ProgressTask.__init__(self)
743
def update(self, msg=None, current=None, total=None):
744
self.updates.append((msg, current, total))
747
# add a base to get included
748
vf.add_lines('base', [], ['base\n'])
749
# add a ancestor to be included on one side
750
vf.add_lines('lancestor', [], ['lancestor\n'])
751
# add a ancestor to be included on the other side
752
vf.add_lines('rancestor', ['base'], ['rancestor\n'])
753
# add a child of rancestor with no eofile-nl
754
vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])
755
# add a child of lancestor and base to join the two roots
756
vf.add_lines('otherchild',
757
['lancestor', 'base'],
758
['base\n', 'lancestor\n', 'otherchild\n'])
759
def iter_with_versions(versions, expected):
760
# now we need to see what lines are returned, and how often.
762
progress = InstrumentedProgress()
763
# iterate over the lines
764
for line in vf.iter_lines_added_or_present_in_versions(versions,
766
lines.setdefault(line, 0)
768
if []!= progress.updates:
769
self.assertEqual(expected, progress.updates)
771
lines = iter_with_versions(['child', 'otherchild'],
772
[('Walking content', 0, 2),
773
('Walking content', 1, 2),
774
('Walking content', 2, 2)])
775
# we must see child and otherchild
776
self.assertTrue(lines[('child\n', 'child')] > 0)
777
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
778
# we dont care if we got more than that.
781
lines = iter_with_versions(None, [('Walking content', 0, 5),
782
('Walking content', 1, 5),
783
('Walking content', 2, 5),
784
('Walking content', 3, 5),
785
('Walking content', 4, 5),
786
('Walking content', 5, 5)])
787
# all lines must be seen at least once
788
self.assertTrue(lines[('base\n', 'base')] > 0)
789
self.assertTrue(lines[('lancestor\n', 'lancestor')] > 0)
790
self.assertTrue(lines[('rancestor\n', 'rancestor')] > 0)
791
self.assertTrue(lines[('child\n', 'child')] > 0)
792
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
794
def test_add_lines_with_ghosts(self):
795
# some versioned file formats allow lines to be added with parent
796
# information that is > than that in the format. Formats that do
797
# not support this need to raise NotImplementedError on the
798
# add_lines_with_ghosts api.
800
# add a revision with ghost parents
801
# The preferred form is utf8, but we should translate when needed
802
parent_id_unicode = u'b\xbfse'
803
parent_id_utf8 = parent_id_unicode.encode('utf8')
805
vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])
806
except NotImplementedError:
807
# check the other ghost apis are also not implemented
808
self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])
809
self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')
811
vf = self.reopen_file()
812
# test key graph related apis: getncestry, _graph, get_parents
814
# - these are ghost unaware and must not be reflect ghosts
815
self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))
816
self.assertFalse(vf.has_version(parent_id_utf8))
817
# we have _with_ghost apis to give us ghost information.
818
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
819
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
820
# if we add something that is a ghost of another, it should correct the
821
# results of the prior apis
822
vf.add_lines(parent_id_utf8, [], [])
823
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))
824
self.assertEqual({'notbxbfse':(parent_id_utf8,)},
825
vf.get_parent_map(['notbxbfse']))
826
self.assertTrue(vf.has_version(parent_id_utf8))
827
# we have _with_ghost apis to give us ghost information.
828
self.assertEqual([parent_id_utf8, 'notbxbfse'],
829
vf.get_ancestry_with_ghosts(['notbxbfse']))
830
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
832
def test_add_lines_with_ghosts_after_normal_revs(self):
833
# some versioned file formats allow lines to be added with parent
834
# information that is > than that in the format. Formats that do
835
# not support this need to raise NotImplementedError on the
836
# add_lines_with_ghosts api.
838
# probe for ghost support
840
vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n'])
841
except NotImplementedError:
843
vf.add_lines_with_ghosts('references_ghost',
845
['line\n', 'line_b\n', 'line_c\n'])
846
origins = vf.annotate('references_ghost')
847
self.assertEquals(('base', 'line\n'), origins[0])
848
self.assertEquals(('base', 'line_b\n'), origins[1])
849
self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
851
def test_readonly_mode(self):
852
transport = get_transport(self.get_url('.'))
853
factory = self.get_factory()
854
vf = factory('id', transport, 0777, create=True, access_mode='w')
855
vf = factory('id', transport, access_mode='r')
856
self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
857
self.assertRaises(errors.ReadOnlyError,
858
vf.add_lines_with_ghosts,
863
def test_get_sha1s(self):
864
# check the sha1 data is available
867
vf.add_lines('a', [], ['a\n'])
868
# the same file, different metadata
869
vf.add_lines('b', ['a'], ['a\n'])
870
# a file differing only in last newline.
871
vf.add_lines('c', [], ['a'])
873
'a': '3f786850e387550fdab836ed7e6dc881de23001b',
874
'c': '86f7e437faa5a7fce15d1ddcb9eaeaea377667b8',
875
'b': '3f786850e387550fdab836ed7e6dc881de23001b',
877
vf.get_sha1s(['a', 'c', 'b']))
880
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
882
def get_file(self, name='foo'):
883
return WeaveFile(name, get_transport(self.get_url('.')), create=True,
884
get_scope=self.get_transaction)
886
def get_file_corrupted_text(self):
887
w = WeaveFile('foo', get_transport(self.get_url('.')), create=True,
888
get_scope=self.get_transaction)
889
w.add_lines('v1', [], ['hello\n'])
890
w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
892
# We are going to invasively corrupt the text
893
# Make sure the internals of weave are the same
894
self.assertEqual([('{', 0)
902
self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'
903
, '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
908
w._weave[4] = 'There\n'
911
def get_file_corrupted_checksum(self):
912
w = self.get_file_corrupted_text()
914
w._weave[4] = 'there\n'
915
self.assertEqual('hello\nthere\n', w.get_text('v2'))
917
#Invalid checksum, first digit changed
918
w._sha1s[1] = 'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
921
def reopen_file(self, name='foo', create=False):
922
return WeaveFile(name, get_transport(self.get_url('.')), create=create,
923
get_scope=self.get_transaction)
925
def test_no_implicit_create(self):
926
self.assertRaises(errors.NoSuchFile,
929
get_transport(self.get_url('.')),
930
get_scope=self.get_transaction)
932
def get_factory(self):
936
class TestPlanMergeVersionedFile(TestCaseWithMemoryTransport):
939
TestCaseWithMemoryTransport.setUp(self)
940
mapper = PrefixMapper()
941
factory = make_file_factory(True, mapper)
942
self.vf1 = factory(self.get_transport('root-1'))
943
self.vf2 = factory(self.get_transport('root-2'))
944
self.plan_merge_vf = versionedfile._PlanMergeVersionedFile('root')
945
self.plan_merge_vf.fallback_versionedfiles.extend([self.vf1, self.vf2])
947
def test_add_lines(self):
948
self.plan_merge_vf.add_lines(('root', 'a:'), [], [])
949
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
950
('root', 'a'), [], [])
951
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
952
('root', 'a:'), None, [])
953
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
954
('root', 'a:'), [], None)
956
def setup_abcde(self):
957
self.vf1.add_lines(('root', 'A'), [], ['a'])
958
self.vf1.add_lines(('root', 'B'), [('root', 'A')], ['b'])
959
self.vf2.add_lines(('root', 'C'), [], ['c'])
960
self.vf2.add_lines(('root', 'D'), [('root', 'C')], ['d'])
961
self.plan_merge_vf.add_lines(('root', 'E:'),
962
[('root', 'B'), ('root', 'D')], ['e'])
964
def test_get_parents(self):
966
self.assertEqual({('root', 'B'):(('root', 'A'),)},
967
self.plan_merge_vf.get_parent_map([('root', 'B')]))
968
self.assertEqual({('root', 'D'):(('root', 'C'),)},
969
self.plan_merge_vf.get_parent_map([('root', 'D')]))
970
self.assertEqual({('root', 'E:'):(('root', 'B'),('root', 'D'))},
971
self.plan_merge_vf.get_parent_map([('root', 'E:')]))
973
self.plan_merge_vf.get_parent_map([('root', 'F')]))
975
('root', 'B'):(('root', 'A'),),
976
('root', 'D'):(('root', 'C'),),
977
('root', 'E:'):(('root', 'B'),('root', 'D')),
979
self.plan_merge_vf.get_parent_map(
980
[('root', 'B'), ('root', 'D'), ('root', 'E:'), ('root', 'F')]))
982
def test_get_record_stream(self):
984
def get_record(suffix):
985
return self.plan_merge_vf.get_record_stream(
986
[('root', suffix)], 'unordered', True).next()
987
self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))
988
self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))
989
self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))
990
self.assertEqual('absent', get_record('F').storage_kind)
993
class TestReadonlyHttpMixin(object):
995
def get_transaction(self):
998
def test_readonly_http_works(self):
999
# we should be able to read from http with a versioned file.
1000
vf = self.get_file()
1001
# try an empty file access
1002
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
1003
self.assertEqual([], readonly_vf.versions())
1005
vf.add_lines('1', [], ['a\n'])
1006
vf.add_lines('2', ['1'], ['b\n', 'a\n'])
1007
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
1008
self.assertEqual(['1', '2'], vf.versions())
1009
for version in readonly_vf.versions():
1010
readonly_vf.get_lines(version)
1013
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
1016
return WeaveFile('foo', get_transport(self.get_url('.')), create=True,
1017
get_scope=self.get_transaction)
1019
def get_factory(self):
1023
class MergeCasesMixin(object):
1025
def doMerge(self, base, a, b, mp):
1026
from cStringIO import StringIO
1027
from textwrap import dedent
1033
w.add_lines('text0', [], map(addcrlf, base))
1034
w.add_lines('text1', ['text0'], map(addcrlf, a))
1035
w.add_lines('text2', ['text0'], map(addcrlf, b))
1037
self.log_contents(w)
1039
self.log('merge plan:')
1040
p = list(w.plan_merge('text1', 'text2'))
1041
for state, line in p:
1043
self.log('%12s | %s' % (state, line[:-1]))
1047
mt.writelines(w.weave_merge(p))
1049
self.log(mt.getvalue())
1051
mp = map(addcrlf, mp)
1052
self.assertEqual(mt.readlines(), mp)
1055
def testOneInsert(self):
1061
def testSeparateInserts(self):
1062
self.doMerge(['aaa', 'bbb', 'ccc'],
1063
['aaa', 'xxx', 'bbb', 'ccc'],
1064
['aaa', 'bbb', 'yyy', 'ccc'],
1065
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1067
def testSameInsert(self):
1068
self.doMerge(['aaa', 'bbb', 'ccc'],
1069
['aaa', 'xxx', 'bbb', 'ccc'],
1070
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],
1071
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1072
overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']
1073
def testOverlappedInsert(self):
1074
self.doMerge(['aaa', 'bbb'],
1075
['aaa', 'xxx', 'yyy', 'bbb'],
1076
['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)
1078
# really it ought to reduce this to
1079
# ['aaa', 'xxx', 'yyy', 'bbb']
1082
def testClashReplace(self):
1083
self.doMerge(['aaa'],
1086
['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz',
1089
def testNonClashInsert1(self):
1090
self.doMerge(['aaa'],
1093
['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz',
1096
def testNonClashInsert2(self):
1097
self.doMerge(['aaa'],
1103
def testDeleteAndModify(self):
1104
"""Clashing delete and modification.
1106
If one side modifies a region and the other deletes it then
1107
there should be a conflict with one side blank.
1110
#######################################
1111
# skippd, not working yet
1114
self.doMerge(['aaa', 'bbb', 'ccc'],
1115
['aaa', 'ddd', 'ccc'],
1117
['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc'])
1119
def _test_merge_from_strings(self, base, a, b, expected):
1121
w.add_lines('text0', [], base.splitlines(True))
1122
w.add_lines('text1', ['text0'], a.splitlines(True))
1123
w.add_lines('text2', ['text0'], b.splitlines(True))
1124
self.log('merge plan:')
1125
p = list(w.plan_merge('text1', 'text2'))
1126
for state, line in p:
1128
self.log('%12s | %s' % (state, line[:-1]))
1129
self.log('merge result:')
1130
result_text = ''.join(w.weave_merge(p))
1131
self.log(result_text)
1132
self.assertEqualDiff(result_text, expected)
1134
def test_weave_merge_conflicts(self):
1135
# does weave merge properly handle plans that end with unchanged?
1136
result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')]))
1137
self.assertEqual(result, 'hello\n')
1139
def test_deletion_extended(self):
1140
"""One side deletes, the other deletes more.
1161
self._test_merge_from_strings(base, a, b, result)
1163
def test_deletion_overlap(self):
1164
"""Delete overlapping regions with no other conflict.
1166
Arguably it'd be better to treat these as agreement, rather than
1167
conflict, but for now conflict is safer.
1195
self._test_merge_from_strings(base, a, b, result)
1197
def test_agreement_deletion(self):
1198
"""Agree to delete some lines, without conflicts."""
1220
self._test_merge_from_strings(base, a, b, result)
1222
def test_sync_on_deletion(self):
1223
"""Specific case of merge where we can synchronize incorrectly.
1225
A previous version of the weave merge concluded that the two versions
1226
agreed on deleting line 2, and this could be a synchronization point.
1227
Line 1 was then considered in isolation, and thought to be deleted on
1230
It's better to consider the whole thing as a disagreement region.
1241
a's replacement line 2
1254
a's replacement line 2
1261
self._test_merge_from_strings(base, a, b, result)
1264
class TestWeaveMerge(TestCaseWithMemoryTransport, MergeCasesMixin):
1266
def get_file(self, name='foo'):
1267
return WeaveFile(name, get_transport(self.get_url('.')), create=True)
1269
def log_contents(self, w):
1270
self.log('weave is:')
1272
write_weave(w, tmpf)
1273
self.log(tmpf.getvalue())
1275
overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======',
1276
'xxx', '>>>>>>> ', 'bbb']
1279
class TestContentFactoryAdaption(TestCaseWithMemoryTransport):
1281
def test_select_adaptor(self):
1282
"""Test expected adapters exist."""
1283
# One scenario for each lookup combination we expect to use.
1284
# Each is source_kind, requested_kind, adapter class
1286
('knit-delta-gz', 'fulltext', _mod_knit.DeltaPlainToFullText),
1287
('knit-ft-gz', 'fulltext', _mod_knit.FTPlainToFullText),
1288
('knit-annotated-delta-gz', 'knit-delta-gz',
1289
_mod_knit.DeltaAnnotatedToUnannotated),
1290
('knit-annotated-delta-gz', 'fulltext',
1291
_mod_knit.DeltaAnnotatedToFullText),
1292
('knit-annotated-ft-gz', 'knit-ft-gz',
1293
_mod_knit.FTAnnotatedToUnannotated),
1294
('knit-annotated-ft-gz', 'fulltext',
1295
_mod_knit.FTAnnotatedToFullText),
1297
for source, requested, klass in scenarios:
1298
adapter_factory = versionedfile.adapter_registry.get(
1299
(source, requested))
1300
adapter = adapter_factory(None)
1301
self.assertIsInstance(adapter, klass)
1303
def get_knit(self, annotated=True):
1304
mapper = ConstantMapper('knit')
1305
transport = self.get_transport()
1306
return make_file_factory(annotated, mapper)(transport)
1308
def helpGetBytes(self, f, ft_adapter, delta_adapter):
1309
"""Grab the interested adapted texts for tests."""
1310
# origin is a fulltext
1311
entries = f.get_record_stream([('origin',)], 'unordered', False)
1312
base = entries.next()
1313
ft_data = ft_adapter.get_bytes(base)
1314
# merged is both a delta and multiple parents.
1315
entries = f.get_record_stream([('merged',)], 'unordered', False)
1316
merged = entries.next()
1317
delta_data = delta_adapter.get_bytes(merged)
1318
return ft_data, delta_data
1320
def test_deannotation_noeol(self):
1321
"""Test converting annotated knits to unannotated knits."""
1322
# we need a full text, and a delta
1324
get_diamond_files(f, 1, trailing_eol=False)
1325
ft_data, delta_data = self.helpGetBytes(f,
1326
_mod_knit.FTAnnotatedToUnannotated(None),
1327
_mod_knit.DeltaAnnotatedToUnannotated(None))
1329
'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'
1332
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1334
'version merged 4 32c2e79763b3f90e8ccde37f9710b6629c25a796\n'
1335
'1,2,3\nleft\nright\nmerged\nend merged\n',
1336
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1338
def test_deannotation(self):
1339
"""Test converting annotated knits to unannotated knits."""
1340
# we need a full text, and a delta
1342
get_diamond_files(f, 1)
1343
ft_data, delta_data = self.helpGetBytes(f,
1344
_mod_knit.FTAnnotatedToUnannotated(None),
1345
_mod_knit.DeltaAnnotatedToUnannotated(None))
1347
'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n'
1350
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1352
'version merged 3 ed8bce375198ea62444dc71952b22cfc2b09226d\n'
1353
'2,2,2\nright\nmerged\nend merged\n',
1354
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1356
def test_annotated_to_fulltext_no_eol(self):
1357
"""Test adapting annotated knits to full texts (for -> weaves)."""
1358
# we need a full text, and a delta
1360
get_diamond_files(f, 1, trailing_eol=False)
1361
# Reconstructing a full text requires a backing versioned file, and it
1362
# must have the base lines requested from it.
1363
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1364
ft_data, delta_data = self.helpGetBytes(f,
1365
_mod_knit.FTAnnotatedToFullText(None),
1366
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1367
self.assertEqual('origin', ft_data)
1368
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1369
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1370
True)], logged_vf.calls)
1372
def test_annotated_to_fulltext(self):
1373
"""Test adapting annotated knits to full texts (for -> weaves)."""
1374
# we need a full text, and a delta
1376
get_diamond_files(f, 1)
1377
# Reconstructing a full text requires a backing versioned file, and it
1378
# must have the base lines requested from it.
1379
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1380
ft_data, delta_data = self.helpGetBytes(f,
1381
_mod_knit.FTAnnotatedToFullText(None),
1382
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1383
self.assertEqual('origin\n', ft_data)
1384
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
1385
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1386
True)], logged_vf.calls)
1388
def test_unannotated_to_fulltext(self):
1389
"""Test adapting unannotated knits to full texts.
1391
This is used for -> weaves, and for -> annotated knits.
1393
# we need a full text, and a delta
1394
f = self.get_knit(annotated=False)
1395
get_diamond_files(f, 1)
1396
# Reconstructing a full text requires a backing versioned file, and it
1397
# must have the base lines requested from it.
1398
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1399
ft_data, delta_data = self.helpGetBytes(f,
1400
_mod_knit.FTPlainToFullText(None),
1401
_mod_knit.DeltaPlainToFullText(logged_vf))
1402
self.assertEqual('origin\n', ft_data)
1403
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
1404
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1405
True)], logged_vf.calls)
1407
def test_unannotated_to_fulltext_no_eol(self):
1408
"""Test adapting unannotated knits to full texts.
1410
This is used for -> weaves, and for -> annotated knits.
1412
# we need a full text, and a delta
1413
f = self.get_knit(annotated=False)
1414
get_diamond_files(f, 1, trailing_eol=False)
1415
# Reconstructing a full text requires a backing versioned file, and it
1416
# must have the base lines requested from it.
1417
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1418
ft_data, delta_data = self.helpGetBytes(f,
1419
_mod_knit.FTPlainToFullText(None),
1420
_mod_knit.DeltaPlainToFullText(logged_vf))
1421
self.assertEqual('origin', ft_data)
1422
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1423
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1424
True)], logged_vf.calls)
1427
class TestKeyMapper(TestCaseWithMemoryTransport):
1428
"""Tests for various key mapping logic."""
1430
def test_identity_mapper(self):
1431
mapper = versionedfile.ConstantMapper("inventory")
1432
self.assertEqual("inventory", mapper.map(('foo@ar',)))
1433
self.assertEqual("inventory", mapper.map(('quux',)))
1435
def test_prefix_mapper(self):
1437
mapper = versionedfile.PrefixMapper()
1438
self.assertEqual("file-id", mapper.map(("file-id", "revision-id")))
1439
self.assertEqual("new-id", mapper.map(("new-id", "revision-id")))
1440
self.assertEqual(('file-id',), mapper.unmap("file-id"))
1441
self.assertEqual(('new-id',), mapper.unmap("new-id"))
1443
def test_hash_prefix_mapper(self):
1444
#format6: hash + plain
1445
mapper = versionedfile.HashPrefixMapper()
1446
self.assertEqual("9b/file-id", mapper.map(("file-id", "revision-id")))
1447
self.assertEqual("45/new-id", mapper.map(("new-id", "revision-id")))
1448
self.assertEqual(('file-id',), mapper.unmap("9b/file-id"))
1449
self.assertEqual(('new-id',), mapper.unmap("45/new-id"))
1451
def test_hash_escaped_mapper(self):
1452
#knit1: hash + escaped
1453
mapper = versionedfile.HashEscapedPrefixMapper()
1454
self.assertEqual("88/%2520", mapper.map((" ", "revision-id")))
1455
self.assertEqual("ed/fil%2545-%2549d", mapper.map(("filE-Id",
1457
self.assertEqual("88/ne%2557-%2549d", mapper.map(("neW-Id",
1459
self.assertEqual(('filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1460
self.assertEqual(('neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1463
class TestVersionedFiles(TestCaseWithMemoryTransport):
1464
"""Tests for the multiple-file variant of VersionedFile."""
1466
def get_versionedfiles(self, relpath='files'):
1467
transport = self.get_transport(relpath)
1469
transport.mkdir('.')
1470
files = self.factory(transport)
1471
if self.cleanup is not None:
1472
self.addCleanup(self.cleanup, files)
1475
def get_simple_key(self, suffix):
1476
"""Return a key for the object under test."""
1477
if self.key_length == 1:
1480
return ('FileA',) + (suffix,)
1482
def test_add_lines(self):
1483
f = self.get_versionedfiles()
1484
key0 = self.get_simple_key('r0')
1485
key1 = self.get_simple_key('r1')
1486
key2 = self.get_simple_key('r2')
1487
keyf = self.get_simple_key('foo')
1488
f.add_lines(key0, [], ['a\n', 'b\n'])
1490
f.add_lines(key1, [key0], ['b\n', 'c\n'])
1492
f.add_lines(key1, [], ['b\n', 'c\n'])
1494
self.assertTrue(key0 in keys)
1495
self.assertTrue(key1 in keys)
1497
for record in f.get_record_stream([key0, key1], 'unordered', True):
1498
records.append((record.key, record.get_bytes_as('fulltext')))
1500
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1502
def test__add_text(self):
1503
f = self.get_versionedfiles()
1504
key0 = self.get_simple_key('r0')
1505
key1 = self.get_simple_key('r1')
1506
key2 = self.get_simple_key('r2')
1507
keyf = self.get_simple_key('foo')
1508
f._add_text(key0, [], 'a\nb\n')
1510
f._add_text(key1, [key0], 'b\nc\n')
1512
f._add_text(key1, [], 'b\nc\n')
1514
self.assertTrue(key0 in keys)
1515
self.assertTrue(key1 in keys)
1517
for record in f.get_record_stream([key0, key1], 'unordered', True):
1518
records.append((record.key, record.get_bytes_as('fulltext')))
1520
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1522
def test_annotate(self):
1523
files = self.get_versionedfiles()
1524
self.get_diamond_files(files)
1525
if self.key_length == 1:
1529
# introduced full text
1530
origins = files.annotate(prefix + ('origin',))
1532
(prefix + ('origin',), 'origin\n')],
1535
origins = files.annotate(prefix + ('base',))
1537
(prefix + ('base',), 'base\n')],
1540
origins = files.annotate(prefix + ('merged',))
1543
(prefix + ('base',), 'base\n'),
1544
(prefix + ('left',), 'left\n'),
1545
(prefix + ('right',), 'right\n'),
1546
(prefix + ('merged',), 'merged\n')
1550
# Without a graph everything is new.
1552
(prefix + ('merged',), 'base\n'),
1553
(prefix + ('merged',), 'left\n'),
1554
(prefix + ('merged',), 'right\n'),
1555
(prefix + ('merged',), 'merged\n')
1558
self.assertRaises(RevisionNotPresent,
1559
files.annotate, prefix + ('missing-key',))
1561
def test_check_no_parameters(self):
1562
files = self.get_versionedfiles()
1564
def test_check_progressbar_parameter(self):
1565
"""A progress bar can be supplied because check can be a generator."""
1566
pb = ui.ui_factory.nested_progress_bar()
1567
self.addCleanup(pb.finished)
1568
files = self.get_versionedfiles()
1569
files.check(progress_bar=pb)
1571
def test_check_with_keys_becomes_generator(self):
1572
files = self.get_versionedfiles()
1573
self.get_diamond_files(files)
1575
entries = files.check(keys=keys)
1577
# Texts output should be fulltexts.
1578
self.capture_stream(files, entries, seen.add,
1579
files.get_parent_map(keys), require_fulltext=True)
1580
# All texts should be output.
1581
self.assertEqual(set(keys), seen)
1583
def test_clear_cache(self):
1584
files = self.get_versionedfiles()
1587
def test_construct(self):
1588
"""Each parameterised test can be constructed on a transport."""
1589
files = self.get_versionedfiles()
1591
def get_diamond_files(self, files, trailing_eol=True, left_only=False,
1593
return get_diamond_files(files, self.key_length,
1594
trailing_eol=trailing_eol, nograph=not self.graph,
1595
left_only=left_only, nokeys=nokeys)
1597
def _add_content_nostoresha(self, add_lines):
1598
"""When nostore_sha is supplied using old content raises."""
1599
vf = self.get_versionedfiles()
1600
empty_text = ('a', [])
1601
sample_text_nl = ('b', ["foo\n", "bar\n"])
1602
sample_text_no_nl = ('c', ["foo\n", "bar"])
1604
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
1606
sha, _, _ = vf.add_lines(self.get_simple_key(version), [],
1609
sha, _, _ = vf._add_text(self.get_simple_key(version), [],
1612
# we now have a copy of all the lines in the vf.
1613
for sha, (version, lines) in zip(
1614
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
1615
new_key = self.get_simple_key(version + "2")
1616
self.assertRaises(errors.ExistingContent,
1617
vf.add_lines, new_key, [], lines,
1619
self.assertRaises(errors.ExistingContent,
1620
vf._add_text, new_key, [], ''.join(lines),
1622
# and no new version should have been added.
1623
record = vf.get_record_stream([new_key], 'unordered', True).next()
1624
self.assertEqual('absent', record.storage_kind)
1626
def test_add_lines_nostoresha(self):
1627
self._add_content_nostoresha(add_lines=True)
1629
def test__add_text_nostoresha(self):
1630
self._add_content_nostoresha(add_lines=False)
1632
def test_add_lines_return(self):
1633
files = self.get_versionedfiles()
1634
# save code by using the stock data insertion helper.
1635
adds = self.get_diamond_files(files)
1637
# We can only validate the first 2 elements returned from add_lines.
1639
self.assertEqual(3, len(add))
1640
results.append(add[:2])
1641
if self.key_length == 1:
1643
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1644
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1645
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1646
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1647
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1649
elif self.key_length == 2:
1651
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1652
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1653
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1654
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1655
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1656
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1657
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1658
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1659
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1660
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1663
def test_add_lines_no_key_generates_chk_key(self):
1664
files = self.get_versionedfiles()
1665
# save code by using the stock data insertion helper.
1666
adds = self.get_diamond_files(files, nokeys=True)
1668
# We can only validate the first 2 elements returned from add_lines.
1670
self.assertEqual(3, len(add))
1671
results.append(add[:2])
1672
if self.key_length == 1:
1674
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1675
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1676
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1677
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1678
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1680
# Check the added items got CHK keys.
1681
self.assertEqual(set([
1682
('sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1683
('sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1684
('sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1685
('sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1686
('sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1689
elif self.key_length == 2:
1691
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1692
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1693
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1694
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1695
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1696
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1697
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1698
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1699
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1700
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1702
# Check the added items got CHK keys.
1703
self.assertEqual(set([
1704
('FileA', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1705
('FileA', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1706
('FileA', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1707
('FileA', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1708
('FileA', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1709
('FileB', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1710
('FileB', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1711
('FileB', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1712
('FileB', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1713
('FileB', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1717
def test_empty_lines(self):
1718
"""Empty files can be stored."""
1719
f = self.get_versionedfiles()
1720
key_a = self.get_simple_key('a')
1721
f.add_lines(key_a, [], [])
1722
self.assertEqual('',
1723
f.get_record_stream([key_a], 'unordered', True
1724
).next().get_bytes_as('fulltext'))
1725
key_b = self.get_simple_key('b')
1726
f.add_lines(key_b, self.get_parents([key_a]), [])
1727
self.assertEqual('',
1728
f.get_record_stream([key_b], 'unordered', True
1729
).next().get_bytes_as('fulltext'))
1731
def test_newline_only(self):
1732
f = self.get_versionedfiles()
1733
key_a = self.get_simple_key('a')
1734
f.add_lines(key_a, [], ['\n'])
1735
self.assertEqual('\n',
1736
f.get_record_stream([key_a], 'unordered', True
1737
).next().get_bytes_as('fulltext'))
1738
key_b = self.get_simple_key('b')
1739
f.add_lines(key_b, self.get_parents([key_a]), ['\n'])
1740
self.assertEqual('\n',
1741
f.get_record_stream([key_b], 'unordered', True
1742
).next().get_bytes_as('fulltext'))
1744
def test_get_known_graph_ancestry(self):
1745
f = self.get_versionedfiles()
1747
raise TestNotApplicable('ancestry info only relevant with graph.')
1748
key_a = self.get_simple_key('a')
1749
key_b = self.get_simple_key('b')
1750
key_c = self.get_simple_key('c')
1756
f.add_lines(key_a, [], ['\n'])
1757
f.add_lines(key_b, [key_a], ['\n'])
1758
f.add_lines(key_c, [key_a, key_b], ['\n'])
1759
kg = f.get_known_graph_ancestry([key_c])
1760
self.assertIsInstance(kg, _mod_graph.KnownGraph)
1761
self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1763
def test_known_graph_with_fallbacks(self):
1764
f = self.get_versionedfiles('files')
1766
raise TestNotApplicable('ancestry info only relevant with graph.')
1767
if getattr(f, 'add_fallback_versioned_files', None) is None:
1768
raise TestNotApplicable("%s doesn't support fallbacks"
1769
% (f.__class__.__name__,))
1770
key_a = self.get_simple_key('a')
1771
key_b = self.get_simple_key('b')
1772
key_c = self.get_simple_key('c')
1773
# A only in fallback
1778
g = self.get_versionedfiles('fallback')
1779
g.add_lines(key_a, [], ['\n'])
1780
f.add_fallback_versioned_files(g)
1781
f.add_lines(key_b, [key_a], ['\n'])
1782
f.add_lines(key_c, [key_a, key_b], ['\n'])
1783
kg = f.get_known_graph_ancestry([key_c])
1784
self.assertEqual([key_a, key_b, key_c], list(kg.topo_sort()))
1786
def test_get_record_stream_empty(self):
1787
"""An empty stream can be requested without error."""
1788
f = self.get_versionedfiles()
1789
entries = f.get_record_stream([], 'unordered', False)
1790
self.assertEqual([], list(entries))
1792
def assertValidStorageKind(self, storage_kind):
1793
"""Assert that storage_kind is a valid storage_kind."""
1794
self.assertSubset([storage_kind],
1795
['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
1796
'knit-ft', 'knit-delta', 'chunked', 'fulltext',
1797
'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz',
1799
'knit-delta-closure', 'knit-delta-closure-ref',
1800
'groupcompress-block', 'groupcompress-block-ref'])
1802
def capture_stream(self, f, entries, on_seen, parents,
1803
require_fulltext=False):
1804
"""Capture a stream for testing."""
1805
for factory in entries:
1806
on_seen(factory.key)
1807
self.assertValidStorageKind(factory.storage_kind)
1808
if factory.sha1 is not None:
1809
self.assertEqual(f.get_sha1s([factory.key])[factory.key],
1811
self.assertEqual(parents[factory.key], factory.parents)
1812
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1814
if require_fulltext:
1815
factory.get_bytes_as('fulltext')
1817
def test_get_record_stream_interface(self):
1818
"""each item in a stream has to provide a regular interface."""
1819
files = self.get_versionedfiles()
1820
self.get_diamond_files(files)
1821
keys, _ = self.get_keys_and_sort_order()
1822
parent_map = files.get_parent_map(keys)
1823
entries = files.get_record_stream(keys, 'unordered', False)
1825
self.capture_stream(files, entries, seen.add, parent_map)
1826
self.assertEqual(set(keys), seen)
1828
def get_keys_and_sort_order(self):
1829
"""Get diamond test keys list, and their sort ordering."""
1830
if self.key_length == 1:
1831
keys = [('merged',), ('left',), ('right',), ('base',)]
1832
sort_order = {('merged',):2, ('left',):1, ('right',):1, ('base',):0}
1835
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1837
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1841
('FileA', 'merged'):2, ('FileA', 'left'):1, ('FileA', 'right'):1,
1842
('FileA', 'base'):0,
1843
('FileB', 'merged'):2, ('FileB', 'left'):1, ('FileB', 'right'):1,
1844
('FileB', 'base'):0,
1846
return keys, sort_order
1848
def get_keys_and_groupcompress_sort_order(self):
1849
"""Get diamond test keys list, and their groupcompress sort ordering."""
1850
if self.key_length == 1:
1851
keys = [('merged',), ('left',), ('right',), ('base',)]
1852
sort_order = {('merged',):0, ('left',):1, ('right',):1, ('base',):2}
1855
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1857
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1861
('FileA', 'merged'):0, ('FileA', 'left'):1, ('FileA', 'right'):1,
1862
('FileA', 'base'):2,
1863
('FileB', 'merged'):3, ('FileB', 'left'):4, ('FileB', 'right'):4,
1864
('FileB', 'base'):5,
1866
return keys, sort_order
1868
def test_get_record_stream_interface_ordered(self):
1869
"""each item in a stream has to provide a regular interface."""
1870
files = self.get_versionedfiles()
1871
self.get_diamond_files(files)
1872
keys, sort_order = self.get_keys_and_sort_order()
1873
parent_map = files.get_parent_map(keys)
1874
entries = files.get_record_stream(keys, 'topological', False)
1876
self.capture_stream(files, entries, seen.append, parent_map)
1877
self.assertStreamOrder(sort_order, seen, keys)
1879
def test_get_record_stream_interface_ordered_with_delta_closure(self):
1880
"""each item must be accessible as a fulltext."""
1881
files = self.get_versionedfiles()
1882
self.get_diamond_files(files)
1883
keys, sort_order = self.get_keys_and_sort_order()
1884
parent_map = files.get_parent_map(keys)
1885
entries = files.get_record_stream(keys, 'topological', True)
1887
for factory in entries:
1888
seen.append(factory.key)
1889
self.assertValidStorageKind(factory.storage_kind)
1890
self.assertSubset([factory.sha1],
1891
[None, files.get_sha1s([factory.key])[factory.key]])
1892
self.assertEqual(parent_map[factory.key], factory.parents)
1893
# self.assertEqual(files.get_text(factory.key),
1894
ft_bytes = factory.get_bytes_as('fulltext')
1895
self.assertIsInstance(ft_bytes, str)
1896
chunked_bytes = factory.get_bytes_as('chunked')
1897
self.assertEqualDiff(ft_bytes, ''.join(chunked_bytes))
1899
self.assertStreamOrder(sort_order, seen, keys)
1901
def test_get_record_stream_interface_groupcompress(self):
1902
"""each item in a stream has to provide a regular interface."""
1903
files = self.get_versionedfiles()
1904
self.get_diamond_files(files)
1905
keys, sort_order = self.get_keys_and_groupcompress_sort_order()
1906
parent_map = files.get_parent_map(keys)
1907
entries = files.get_record_stream(keys, 'groupcompress', False)
1909
self.capture_stream(files, entries, seen.append, parent_map)
1910
self.assertStreamOrder(sort_order, seen, keys)
1912
def assertStreamOrder(self, sort_order, seen, keys):
1913
self.assertEqual(len(set(seen)), len(keys))
1914
if self.key_length == 1:
1917
lows = {('FileA',):0, ('FileB',):0}
1919
self.assertEqual(set(keys), set(seen))
1922
sort_pos = sort_order[key]
1923
self.assertTrue(sort_pos >= lows[key[:-1]],
1924
"Out of order in sorted stream: %r, %r" % (key, seen))
1925
lows[key[:-1]] = sort_pos
1927
def test_get_record_stream_unknown_storage_kind_raises(self):
1928
"""Asking for a storage kind that the stream cannot supply raises."""
1929
files = self.get_versionedfiles()
1930
self.get_diamond_files(files)
1931
if self.key_length == 1:
1932
keys = [('merged',), ('left',), ('right',), ('base',)]
1935
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1937
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1940
parent_map = files.get_parent_map(keys)
1941
entries = files.get_record_stream(keys, 'unordered', False)
1942
# We track the contents because we should be able to try, fail a
1943
# particular kind and then ask for one that works and continue.
1945
for factory in entries:
1946
seen.add(factory.key)
1947
self.assertValidStorageKind(factory.storage_kind)
1948
if factory.sha1 is not None:
1949
self.assertEqual(files.get_sha1s([factory.key])[factory.key],
1951
self.assertEqual(parent_map[factory.key], factory.parents)
1952
# currently no stream emits mpdiff
1953
self.assertRaises(errors.UnavailableRepresentation,
1954
factory.get_bytes_as, 'mpdiff')
1955
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1957
self.assertEqual(set(keys), seen)
1959
def test_get_record_stream_missing_records_are_absent(self):
1960
files = self.get_versionedfiles()
1961
self.get_diamond_files(files)
1962
if self.key_length == 1:
1963
keys = [('merged',), ('left',), ('right',), ('absent',), ('base',)]
1966
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1967
('FileA', 'absent'), ('FileA', 'base'),
1968
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1969
('FileB', 'absent'), ('FileB', 'base'),
1970
('absent', 'absent'),
1972
parent_map = files.get_parent_map(keys)
1973
entries = files.get_record_stream(keys, 'unordered', False)
1974
self.assertAbsentRecord(files, keys, parent_map, entries)
1975
entries = files.get_record_stream(keys, 'topological', False)
1976
self.assertAbsentRecord(files, keys, parent_map, entries)
1978
def assertRecordHasContent(self, record, bytes):
1979
"""Assert that record has the bytes bytes."""
1980
self.assertEqual(bytes, record.get_bytes_as('fulltext'))
1981
self.assertEqual(bytes, ''.join(record.get_bytes_as('chunked')))
1983
def test_get_record_stream_native_formats_are_wire_ready_one_ft(self):
1984
files = self.get_versionedfiles()
1985
key = self.get_simple_key('foo')
1986
files.add_lines(key, (), ['my text\n', 'content'])
1987
stream = files.get_record_stream([key], 'unordered', False)
1988
record = stream.next()
1989
if record.storage_kind in ('chunked', 'fulltext'):
1990
# chunked and fulltext representations are for direct use not wire
1991
# serialisation: check they are able to be used directly. To send
1992
# such records over the wire translation will be needed.
1993
self.assertRecordHasContent(record, "my text\ncontent")
1995
bytes = [record.get_bytes_as(record.storage_kind)]
1996
network_stream = versionedfile.NetworkRecordStream(bytes).read()
1997
source_record = record
1999
for record in network_stream:
2000
records.append(record)
2001
self.assertEqual(source_record.storage_kind,
2002
record.storage_kind)
2003
self.assertEqual(source_record.parents, record.parents)
2005
source_record.get_bytes_as(source_record.storage_kind),
2006
record.get_bytes_as(record.storage_kind))
2007
self.assertEqual(1, len(records))
2009
def assertStreamMetaEqual(self, records, expected, stream):
2010
"""Assert that streams expected and stream have the same records.
2012
:param records: A list to collect the seen records.
2013
:return: A generator of the records in stream.
2015
# We make assertions during copying to catch things early for
2017
for record, ref_record in izip(stream, expected):
2018
records.append(record)
2019
self.assertEqual(ref_record.key, record.key)
2020
self.assertEqual(ref_record.storage_kind, record.storage_kind)
2021
self.assertEqual(ref_record.parents, record.parents)
2024
def stream_to_bytes_or_skip_counter(self, skipped_records, full_texts,
2026
"""Convert a stream to a bytes iterator.
2028
:param skipped_records: A list with one element to increment when a
2030
:param full_texts: A dict from key->fulltext representation, for
2031
checking chunked or fulltext stored records.
2032
:param stream: A record_stream.
2033
:return: An iterator over the bytes of each record.
2035
for record in stream:
2036
if record.storage_kind in ('chunked', 'fulltext'):
2037
skipped_records[0] += 1
2038
# check the content is correct for direct use.
2039
self.assertRecordHasContent(record, full_texts[record.key])
2041
yield record.get_bytes_as(record.storage_kind)
2043
def test_get_record_stream_native_formats_are_wire_ready_ft_delta(self):
2044
files = self.get_versionedfiles()
2045
target_files = self.get_versionedfiles('target')
2046
key = self.get_simple_key('ft')
2047
key_delta = self.get_simple_key('delta')
2048
files.add_lines(key, (), ['my text\n', 'content'])
2050
delta_parents = (key,)
2053
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
2054
local = files.get_record_stream([key, key_delta], 'unordered', False)
2055
ref = files.get_record_stream([key, key_delta], 'unordered', False)
2056
skipped_records = [0]
2058
key: "my text\ncontent",
2059
key_delta: "different\ncontent\n",
2061
byte_stream = self.stream_to_bytes_or_skip_counter(
2062
skipped_records, full_texts, local)
2063
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
2065
# insert the stream from the network into a versioned files object so we can
2066
# check the content was carried across correctly without doing delta
2068
target_files.insert_record_stream(
2069
self.assertStreamMetaEqual(records, ref, network_stream))
2070
# No duplicates on the wire thank you!
2071
self.assertEqual(2, len(records) + skipped_records[0])
2073
# if any content was copied it all must have all been.
2074
self.assertIdenticalVersionedFile(files, target_files)
2076
def test_get_record_stream_native_formats_are_wire_ready_delta(self):
2077
# copy a delta over the wire
2078
files = self.get_versionedfiles()
2079
target_files = self.get_versionedfiles('target')
2080
key = self.get_simple_key('ft')
2081
key_delta = self.get_simple_key('delta')
2082
files.add_lines(key, (), ['my text\n', 'content'])
2084
delta_parents = (key,)
2087
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
2088
# Copy the basis text across so we can reconstruct the delta during
2089
# insertion into target.
2090
target_files.insert_record_stream(files.get_record_stream([key],
2091
'unordered', False))
2092
local = files.get_record_stream([key_delta], 'unordered', False)
2093
ref = files.get_record_stream([key_delta], 'unordered', False)
2094
skipped_records = [0]
2096
key_delta: "different\ncontent\n",
2098
byte_stream = self.stream_to_bytes_or_skip_counter(
2099
skipped_records, full_texts, local)
2100
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
2102
# insert the stream from the network into a versioned files object so we can
2103
# check the content was carried across correctly without doing delta
2104
# inspection during check_stream.
2105
target_files.insert_record_stream(
2106
self.assertStreamMetaEqual(records, ref, network_stream))
2107
# No duplicates on the wire thank you!
2108
self.assertEqual(1, len(records) + skipped_records[0])
2110
# if any content was copied it all must have all been
2111
self.assertIdenticalVersionedFile(files, target_files)
2113
def test_get_record_stream_wire_ready_delta_closure_included(self):
2114
# copy a delta over the wire with the ability to get its full text.
2115
files = self.get_versionedfiles()
2116
key = self.get_simple_key('ft')
2117
key_delta = self.get_simple_key('delta')
2118
files.add_lines(key, (), ['my text\n', 'content'])
2120
delta_parents = (key,)
2123
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
2124
local = files.get_record_stream([key_delta], 'unordered', True)
2125
ref = files.get_record_stream([key_delta], 'unordered', True)
2126
skipped_records = [0]
2128
key_delta: "different\ncontent\n",
2130
byte_stream = self.stream_to_bytes_or_skip_counter(
2131
skipped_records, full_texts, local)
2132
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
2134
# insert the stream from the network into a versioned files object so we can
2135
# check the content was carried across correctly without doing delta
2136
# inspection during check_stream.
2137
for record in self.assertStreamMetaEqual(records, ref, network_stream):
2138
# we have to be able to get the full text out:
2139
self.assertRecordHasContent(record, full_texts[record.key])
2140
# No duplicates on the wire thank you!
2141
self.assertEqual(1, len(records) + skipped_records[0])
2143
def assertAbsentRecord(self, files, keys, parents, entries):
2144
"""Helper for test_get_record_stream_missing_records_are_absent."""
2146
for factory in entries:
2147
seen.add(factory.key)
2148
if factory.key[-1] == 'absent':
2149
self.assertEqual('absent', factory.storage_kind)
2150
self.assertEqual(None, factory.sha1)
2151
self.assertEqual(None, factory.parents)
2153
self.assertValidStorageKind(factory.storage_kind)
2154
if factory.sha1 is not None:
2155
sha1 = files.get_sha1s([factory.key])[factory.key]
2156
self.assertEqual(sha1, factory.sha1)
2157
self.assertEqual(parents[factory.key], factory.parents)
2158
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
2160
self.assertEqual(set(keys), seen)
2162
def test_filter_absent_records(self):
2163
"""Requested missing records can be filter trivially."""
2164
files = self.get_versionedfiles()
2165
self.get_diamond_files(files)
2166
keys, _ = self.get_keys_and_sort_order()
2167
parent_map = files.get_parent_map(keys)
2168
# Add an absent record in the middle of the present keys. (We don't ask
2169
# for just absent keys to ensure that content before and after the
2170
# absent keys is still delivered).
2171
present_keys = list(keys)
2172
if self.key_length == 1:
2173
keys.insert(2, ('extra',))
2175
keys.insert(2, ('extra', 'extra'))
2176
entries = files.get_record_stream(keys, 'unordered', False)
2178
self.capture_stream(files, versionedfile.filter_absent(entries), seen.add,
2180
self.assertEqual(set(present_keys), seen)
2182
def get_mapper(self):
2183
"""Get a mapper suitable for the key length of the test interface."""
2184
if self.key_length == 1:
2185
return ConstantMapper('source')
2187
return HashEscapedPrefixMapper()
2189
def get_parents(self, parents):
2190
"""Get parents, taking self.graph into consideration."""
2196
def test_get_annotator(self):
2197
files = self.get_versionedfiles()
2198
self.get_diamond_files(files)
2199
origin_key = self.get_simple_key('origin')
2200
base_key = self.get_simple_key('base')
2201
left_key = self.get_simple_key('left')
2202
right_key = self.get_simple_key('right')
2203
merged_key = self.get_simple_key('merged')
2204
# annotator = files.get_annotator()
2205
# introduced full text
2206
origins, lines = files.get_annotator().annotate(origin_key)
2207
self.assertEqual([(origin_key,)], origins)
2208
self.assertEqual(['origin\n'], lines)
2210
origins, lines = files.get_annotator().annotate(base_key)
2211
self.assertEqual([(base_key,)], origins)
2213
origins, lines = files.get_annotator().annotate(merged_key)
2222
# Without a graph everything is new.
2229
self.assertRaises(RevisionNotPresent,
2230
files.get_annotator().annotate, self.get_simple_key('missing-key'))
2232
def test_get_parent_map(self):
2233
files = self.get_versionedfiles()
2234
if self.key_length == 1:
2236
(('r0',), self.get_parents(())),
2237
(('r1',), self.get_parents((('r0',),))),
2238
(('r2',), self.get_parents(())),
2239
(('r3',), self.get_parents(())),
2240
(('m',), self.get_parents((('r0',),('r1',),('r2',),('r3',)))),
2244
(('FileA', 'r0'), self.get_parents(())),
2245
(('FileA', 'r1'), self.get_parents((('FileA', 'r0'),))),
2246
(('FileA', 'r2'), self.get_parents(())),
2247
(('FileA', 'r3'), self.get_parents(())),
2248
(('FileA', 'm'), self.get_parents((('FileA', 'r0'),
2249
('FileA', 'r1'), ('FileA', 'r2'), ('FileA', 'r3')))),
2251
for key, parents in parent_details:
2252
files.add_lines(key, parents, [])
2253
# immediately after adding it should be queryable.
2254
self.assertEqual({key:parents}, files.get_parent_map([key]))
2255
# We can ask for an empty set
2256
self.assertEqual({}, files.get_parent_map([]))
2257
# We can ask for many keys
2258
all_parents = dict(parent_details)
2259
self.assertEqual(all_parents, files.get_parent_map(all_parents.keys()))
2260
# Absent keys are just not included in the result.
2261
keys = all_parents.keys()
2262
if self.key_length == 1:
2263
keys.insert(1, ('missing',))
2265
keys.insert(1, ('missing', 'missing'))
2266
# Absent keys are just ignored
2267
self.assertEqual(all_parents, files.get_parent_map(keys))
2269
def test_get_sha1s(self):
2270
files = self.get_versionedfiles()
2271
self.get_diamond_files(files)
2272
if self.key_length == 1:
2273
keys = [('base',), ('origin',), ('left',), ('merged',), ('right',)]
2275
# ask for shas from different prefixes.
2277
('FileA', 'base'), ('FileB', 'origin'), ('FileA', 'left'),
2278
('FileA', 'merged'), ('FileB', 'right'),
2281
keys[0]: '51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
2282
keys[1]: '00e364d235126be43292ab09cb4686cf703ddc17',
2283
keys[2]: 'a8478686da38e370e32e42e8a0c220e33ee9132f',
2284
keys[3]: 'ed8bce375198ea62444dc71952b22cfc2b09226d',
2285
keys[4]: '9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
2287
files.get_sha1s(keys))
2289
def test_insert_record_stream_empty(self):
2290
"""Inserting an empty record stream should work."""
2291
files = self.get_versionedfiles()
2292
files.insert_record_stream([])
2294
def assertIdenticalVersionedFile(self, expected, actual):
2295
"""Assert that left and right have the same contents."""
2296
self.assertEqual(set(actual.keys()), set(expected.keys()))
2297
actual_parents = actual.get_parent_map(actual.keys())
2299
self.assertEqual(actual_parents, expected.get_parent_map(expected.keys()))
2301
for key, parents in actual_parents.items():
2302
self.assertEqual(None, parents)
2303
for key in actual.keys():
2304
actual_text = actual.get_record_stream(
2305
[key], 'unordered', True).next().get_bytes_as('fulltext')
2306
expected_text = expected.get_record_stream(
2307
[key], 'unordered', True).next().get_bytes_as('fulltext')
2308
self.assertEqual(actual_text, expected_text)
2310
def test_insert_record_stream_fulltexts(self):
2311
"""Any file should accept a stream of fulltexts."""
2312
files = self.get_versionedfiles()
2313
mapper = self.get_mapper()
2314
source_transport = self.get_transport('source')
2315
source_transport.mkdir('.')
2316
# weaves always output fulltexts.
2317
source = make_versioned_files_factory(WeaveFile, mapper)(
2319
self.get_diamond_files(source, trailing_eol=False)
2320
stream = source.get_record_stream(source.keys(), 'topological',
2322
files.insert_record_stream(stream)
2323
self.assertIdenticalVersionedFile(source, files)
2325
def test_insert_record_stream_fulltexts_noeol(self):
2326
"""Any file should accept a stream of fulltexts."""
2327
files = self.get_versionedfiles()
2328
mapper = self.get_mapper()
2329
source_transport = self.get_transport('source')
2330
source_transport.mkdir('.')
2331
# weaves always output fulltexts.
2332
source = make_versioned_files_factory(WeaveFile, mapper)(
2334
self.get_diamond_files(source, trailing_eol=False)
2335
stream = source.get_record_stream(source.keys(), 'topological',
2337
files.insert_record_stream(stream)
2338
self.assertIdenticalVersionedFile(source, files)
2340
def test_insert_record_stream_annotated_knits(self):
2341
"""Any file should accept a stream from plain knits."""
2342
files = self.get_versionedfiles()
2343
mapper = self.get_mapper()
2344
source_transport = self.get_transport('source')
2345
source_transport.mkdir('.')
2346
source = make_file_factory(True, mapper)(source_transport)
2347
self.get_diamond_files(source)
2348
stream = source.get_record_stream(source.keys(), 'topological',
2350
files.insert_record_stream(stream)
2351
self.assertIdenticalVersionedFile(source, files)
2353
def test_insert_record_stream_annotated_knits_noeol(self):
2354
"""Any file should accept a stream from plain knits."""
2355
files = self.get_versionedfiles()
2356
mapper = self.get_mapper()
2357
source_transport = self.get_transport('source')
2358
source_transport.mkdir('.')
2359
source = make_file_factory(True, mapper)(source_transport)
2360
self.get_diamond_files(source, trailing_eol=False)
2361
stream = source.get_record_stream(source.keys(), 'topological',
2363
files.insert_record_stream(stream)
2364
self.assertIdenticalVersionedFile(source, files)
2366
def test_insert_record_stream_plain_knits(self):
2367
"""Any file should accept a stream from plain knits."""
2368
files = self.get_versionedfiles()
2369
mapper = self.get_mapper()
2370
source_transport = self.get_transport('source')
2371
source_transport.mkdir('.')
2372
source = make_file_factory(False, mapper)(source_transport)
2373
self.get_diamond_files(source)
2374
stream = source.get_record_stream(source.keys(), 'topological',
2376
files.insert_record_stream(stream)
2377
self.assertIdenticalVersionedFile(source, files)
2379
def test_insert_record_stream_plain_knits_noeol(self):
2380
"""Any file should accept a stream from plain knits."""
2381
files = self.get_versionedfiles()
2382
mapper = self.get_mapper()
2383
source_transport = self.get_transport('source')
2384
source_transport.mkdir('.')
2385
source = make_file_factory(False, mapper)(source_transport)
2386
self.get_diamond_files(source, trailing_eol=False)
2387
stream = source.get_record_stream(source.keys(), 'topological',
2389
files.insert_record_stream(stream)
2390
self.assertIdenticalVersionedFile(source, files)
2392
def test_insert_record_stream_existing_keys(self):
2393
"""Inserting keys already in a file should not error."""
2394
files = self.get_versionedfiles()
2395
source = self.get_versionedfiles('source')
2396
self.get_diamond_files(source)
2397
# insert some keys into f.
2398
self.get_diamond_files(files, left_only=True)
2399
stream = source.get_record_stream(source.keys(), 'topological',
2401
files.insert_record_stream(stream)
2402
self.assertIdenticalVersionedFile(source, files)
2404
def test_insert_record_stream_missing_keys(self):
2405
"""Inserting a stream with absent keys should raise an error."""
2406
files = self.get_versionedfiles()
2407
source = self.get_versionedfiles('source')
2408
stream = source.get_record_stream([('missing',) * self.key_length],
2409
'topological', False)
2410
self.assertRaises(errors.RevisionNotPresent, files.insert_record_stream,
2413
def test_insert_record_stream_out_of_order(self):
2414
"""An out of order stream can either error or work."""
2415
files = self.get_versionedfiles()
2416
source = self.get_versionedfiles('source')
2417
self.get_diamond_files(source)
2418
if self.key_length == 1:
2419
origin_keys = [('origin',)]
2420
end_keys = [('merged',), ('left',)]
2421
start_keys = [('right',), ('base',)]
2423
origin_keys = [('FileA', 'origin'), ('FileB', 'origin')]
2424
end_keys = [('FileA', 'merged',), ('FileA', 'left',),
2425
('FileB', 'merged',), ('FileB', 'left',)]
2426
start_keys = [('FileA', 'right',), ('FileA', 'base',),
2427
('FileB', 'right',), ('FileB', 'base',)]
2428
origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
2429
end_entries = source.get_record_stream(end_keys, 'topological', False)
2430
start_entries = source.get_record_stream(start_keys, 'topological', False)
2431
entries = chain(origin_entries, end_entries, start_entries)
2433
files.insert_record_stream(entries)
2434
except RevisionNotPresent:
2435
# Must not have corrupted the file.
2438
self.assertIdenticalVersionedFile(source, files)
2440
def test_insert_record_stream_long_parent_chain_out_of_order(self):
2441
"""An out of order stream can either error or work."""
2443
raise TestNotApplicable('ancestry info only relevant with graph.')
2444
# Create a reasonably long chain of records based on each other, where
2445
# most will be deltas.
2446
source = self.get_versionedfiles('source')
2449
content = [('same same %d\n' % n) for n in range(500)]
2450
for letter in 'abcdefghijklmnopqrstuvwxyz':
2451
key = ('key-' + letter,)
2452
if self.key_length == 2:
2453
key = ('prefix',) + key
2454
content.append('content for ' + letter + '\n')
2455
source.add_lines(key, parents, content)
2458
# Create a stream of these records, excluding the first record that the
2459
# rest ultimately depend upon, and insert it into a new vf.
2461
for key in reversed(keys):
2462
streams.append(source.get_record_stream([key], 'unordered', False))
2463
deltas = chain(*streams[:-1])
2464
files = self.get_versionedfiles()
2466
files.insert_record_stream(deltas)
2467
except RevisionNotPresent:
2468
# Must not have corrupted the file.
2471
# Must only report either just the first key as a missing parent,
2472
# no key as missing (for nodelta scenarios).
2473
missing = set(files.get_missing_compression_parent_keys())
2474
missing.discard(keys[0])
2475
self.assertEqual(set(), missing)
2477
def get_knit_delta_source(self):
2478
"""Get a source that can produce a stream with knit delta records,
2479
regardless of this test's scenario.
2481
mapper = self.get_mapper()
2482
source_transport = self.get_transport('source')
2483
source_transport.mkdir('.')
2484
source = make_file_factory(False, mapper)(source_transport)
2485
get_diamond_files(source, self.key_length, trailing_eol=True,
2486
nograph=False, left_only=False)
2489
def test_insert_record_stream_delta_missing_basis_no_corruption(self):
2490
"""Insertion where a needed basis is not included notifies the caller
2491
of the missing basis. In the meantime a record missing its basis is
2494
source = self.get_knit_delta_source()
2495
keys = [self.get_simple_key('origin'), self.get_simple_key('merged')]
2496
entries = source.get_record_stream(keys, 'unordered', False)
2497
files = self.get_versionedfiles()
2498
if self.support_partial_insertion:
2499
self.assertEqual([],
2500
list(files.get_missing_compression_parent_keys()))
2501
files.insert_record_stream(entries)
2502
missing_bases = files.get_missing_compression_parent_keys()
2503
self.assertEqual(set([self.get_simple_key('left')]),
2505
self.assertEqual(set(keys), set(files.get_parent_map(keys)))
2508
errors.RevisionNotPresent, files.insert_record_stream, entries)
2511
def test_insert_record_stream_delta_missing_basis_can_be_added_later(self):
2512
"""Insertion where a needed basis is not included notifies the caller
2513
of the missing basis. That basis can be added in a second
2514
insert_record_stream call that does not need to repeat records present
2515
in the previous stream. The record(s) that required that basis are
2516
fully inserted once their basis is no longer missing.
2518
if not self.support_partial_insertion:
2519
raise TestNotApplicable(
2520
'versioned file scenario does not support partial insertion')
2521
source = self.get_knit_delta_source()
2522
entries = source.get_record_stream([self.get_simple_key('origin'),
2523
self.get_simple_key('merged')], 'unordered', False)
2524
files = self.get_versionedfiles()
2525
files.insert_record_stream(entries)
2526
missing_bases = files.get_missing_compression_parent_keys()
2527
self.assertEqual(set([self.get_simple_key('left')]),
2529
# 'merged' is inserted (although a commit of a write group involving
2530
# this versionedfiles would fail).
2531
merged_key = self.get_simple_key('merged')
2533
[merged_key], files.get_parent_map([merged_key]).keys())
2534
# Add the full delta closure of the missing records
2535
missing_entries = source.get_record_stream(
2536
missing_bases, 'unordered', True)
2537
files.insert_record_stream(missing_entries)
2538
# Now 'merged' is fully inserted (and a commit would succeed).
2539
self.assertEqual([], list(files.get_missing_compression_parent_keys()))
2541
[merged_key], files.get_parent_map([merged_key]).keys())
2544
def test_iter_lines_added_or_present_in_keys(self):
2545
# test that we get at least an equalset of the lines added by
2546
# versions in the store.
2547
# the ordering here is to make a tree so that dumb searches have
2548
# more changes to muck up.
2550
class InstrumentedProgress(progress.ProgressTask):
2553
progress.ProgressTask.__init__(self)
2556
def update(self, msg=None, current=None, total=None):
2557
self.updates.append((msg, current, total))
2559
files = self.get_versionedfiles()
2560
# add a base to get included
2561
files.add_lines(self.get_simple_key('base'), (), ['base\n'])
2562
# add a ancestor to be included on one side
2563
files.add_lines(self.get_simple_key('lancestor'), (), ['lancestor\n'])
2564
# add a ancestor to be included on the other side
2565
files.add_lines(self.get_simple_key('rancestor'),
2566
self.get_parents([self.get_simple_key('base')]), ['rancestor\n'])
2567
# add a child of rancestor with no eofile-nl
2568
files.add_lines(self.get_simple_key('child'),
2569
self.get_parents([self.get_simple_key('rancestor')]),
2570
['base\n', 'child\n'])
2571
# add a child of lancestor and base to join the two roots
2572
files.add_lines(self.get_simple_key('otherchild'),
2573
self.get_parents([self.get_simple_key('lancestor'),
2574
self.get_simple_key('base')]),
2575
['base\n', 'lancestor\n', 'otherchild\n'])
2576
def iter_with_keys(keys, expected):
2577
# now we need to see what lines are returned, and how often.
2579
progress = InstrumentedProgress()
2580
# iterate over the lines
2581
for line in files.iter_lines_added_or_present_in_keys(keys,
2583
lines.setdefault(line, 0)
2585
if []!= progress.updates:
2586
self.assertEqual(expected, progress.updates)
2588
lines = iter_with_keys(
2589
[self.get_simple_key('child'), self.get_simple_key('otherchild')],
2590
[('Walking content', 0, 2),
2591
('Walking content', 1, 2),
2592
('Walking content', 2, 2)])
2593
# we must see child and otherchild
2594
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2596
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2597
# we dont care if we got more than that.
2600
lines = iter_with_keys(files.keys(),
2601
[('Walking content', 0, 5),
2602
('Walking content', 1, 5),
2603
('Walking content', 2, 5),
2604
('Walking content', 3, 5),
2605
('Walking content', 4, 5),
2606
('Walking content', 5, 5)])
2607
# all lines must be seen at least once
2608
self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)
2610
lines[('lancestor\n', self.get_simple_key('lancestor'))] > 0)
2612
lines[('rancestor\n', self.get_simple_key('rancestor'))] > 0)
2613
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2615
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2617
def test_make_mpdiffs(self):
2618
from bzrlib import multiparent
2619
files = self.get_versionedfiles('source')
2620
# add texts that should trip the knit maximum delta chain threshold
2621
# as well as doing parallel chains of data in knits.
2622
# this is done by two chains of 25 insertions
2623
files.add_lines(self.get_simple_key('base'), [], ['line\n'])
2624
files.add_lines(self.get_simple_key('noeol'),
2625
self.get_parents([self.get_simple_key('base')]), ['line'])
2626
# detailed eol tests:
2627
# shared last line with parent no-eol
2628
files.add_lines(self.get_simple_key('noeolsecond'),
2629
self.get_parents([self.get_simple_key('noeol')]),
2631
# differing last line with parent, both no-eol
2632
files.add_lines(self.get_simple_key('noeolnotshared'),
2633
self.get_parents([self.get_simple_key('noeolsecond')]),
2634
['line\n', 'phone'])
2635
# add eol following a noneol parent, change content
2636
files.add_lines(self.get_simple_key('eol'),
2637
self.get_parents([self.get_simple_key('noeol')]), ['phone\n'])
2638
# add eol following a noneol parent, no change content
2639
files.add_lines(self.get_simple_key('eolline'),
2640
self.get_parents([self.get_simple_key('noeol')]), ['line\n'])
2641
# noeol with no parents:
2642
files.add_lines(self.get_simple_key('noeolbase'), [], ['line'])
2643
# noeol preceeding its leftmost parent in the output:
2644
# this is done by making it a merge of two parents with no common
2645
# anestry: noeolbase and noeol with the
2646
# later-inserted parent the leftmost.
2647
files.add_lines(self.get_simple_key('eolbeforefirstparent'),
2648
self.get_parents([self.get_simple_key('noeolbase'),
2649
self.get_simple_key('noeol')]),
2651
# two identical eol texts
2652
files.add_lines(self.get_simple_key('noeoldup'),
2653
self.get_parents([self.get_simple_key('noeol')]), ['line'])
2654
next_parent = self.get_simple_key('base')
2655
text_name = 'chain1-'
2657
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2658
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2659
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2660
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2661
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
2662
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
2663
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2664
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2665
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
2666
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2667
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2668
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2669
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
2670
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2671
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
2672
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2673
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
2674
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2675
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
2676
19:'1ebed371807ba5935958ad0884595126e8c4e823',
2677
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2678
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2679
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2680
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2681
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2682
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2684
for depth in range(26):
2685
new_version = self.get_simple_key(text_name + '%s' % depth)
2686
text = text + ['line\n']
2687
files.add_lines(new_version, self.get_parents([next_parent]), text)
2688
next_parent = new_version
2689
next_parent = self.get_simple_key('base')
2690
text_name = 'chain2-'
2692
for depth in range(26):
2693
new_version = self.get_simple_key(text_name + '%s' % depth)
2694
text = text + ['line\n']
2695
files.add_lines(new_version, self.get_parents([next_parent]), text)
2696
next_parent = new_version
2697
target = self.get_versionedfiles('target')
2698
for key in multiparent.topo_iter_keys(files, files.keys()):
2699
mpdiff = files.make_mpdiffs([key])[0]
2700
parents = files.get_parent_map([key])[key] or []
2702
[(key, parents, files.get_sha1s([key])[key], mpdiff)])
2703
self.assertEqualDiff(
2704
files.get_record_stream([key], 'unordered',
2705
True).next().get_bytes_as('fulltext'),
2706
target.get_record_stream([key], 'unordered',
2707
True).next().get_bytes_as('fulltext')
2710
def test_keys(self):
2711
# While use is discouraged, versions() is still needed by aspects of
2713
files = self.get_versionedfiles()
2714
self.assertEqual(set(), set(files.keys()))
2715
if self.key_length == 1:
2718
key = ('foo', 'bar',)
2719
files.add_lines(key, (), [])
2720
self.assertEqual(set([key]), set(files.keys()))
2723
class VirtualVersionedFilesTests(TestCase):
2724
"""Basic tests for the VirtualVersionedFiles implementations."""
2726
def _get_parent_map(self, keys):
2729
if k in self._parent_map:
2730
ret[k] = self._parent_map[k]
2734
TestCase.setUp(self)
2736
self._parent_map = {}
2737
self.texts = VirtualVersionedFiles(self._get_parent_map,
2740
def test_add_lines(self):
2741
self.assertRaises(NotImplementedError,
2742
self.texts.add_lines, "foo", [], [])
2744
def test_add_mpdiffs(self):
2745
self.assertRaises(NotImplementedError,
2746
self.texts.add_mpdiffs, [])
2748
def test_check_noerrors(self):
2751
def test_insert_record_stream(self):
2752
self.assertRaises(NotImplementedError, self.texts.insert_record_stream,
2755
def test_get_sha1s_nonexistent(self):
2756
self.assertEquals({}, self.texts.get_sha1s([("NONEXISTENT",)]))
2758
def test_get_sha1s(self):
2759
self._lines["key"] = ["dataline1", "dataline2"]
2760
self.assertEquals({("key",): osutils.sha_strings(self._lines["key"])},
2761
self.texts.get_sha1s([("key",)]))
2763
def test_get_parent_map(self):
2764
self._parent_map = {"G": ("A", "B")}
2765
self.assertEquals({("G",): (("A",),("B",))},
2766
self.texts.get_parent_map([("G",), ("L",)]))
2768
def test_get_record_stream(self):
2769
self._lines["A"] = ["FOO", "BAR"]
2770
it = self.texts.get_record_stream([("A",)], "unordered", True)
2772
self.assertEquals("chunked", record.storage_kind)
2773
self.assertEquals("FOOBAR", record.get_bytes_as("fulltext"))
2774
self.assertEquals(["FOO", "BAR"], record.get_bytes_as("chunked"))
2776
def test_get_record_stream_absent(self):
2777
it = self.texts.get_record_stream([("A",)], "unordered", True)
2779
self.assertEquals("absent", record.storage_kind)
2781
def test_iter_lines_added_or_present_in_keys(self):
2782
self._lines["A"] = ["FOO", "BAR"]
2783
self._lines["B"] = ["HEY"]
2784
self._lines["C"] = ["Alberta"]
2785
it = self.texts.iter_lines_added_or_present_in_keys([("A",), ("B",)])
2786
self.assertEquals(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
2790
class TestOrderingVersionedFilesDecorator(TestCaseWithMemoryTransport):
2792
def get_ordering_vf(self, key_priority):
2793
builder = self.make_branch_builder('test')
2794
builder.start_series()
2795
builder.build_snapshot('A', None, [
2796
('add', ('', 'TREE_ROOT', 'directory', None))])
2797
builder.build_snapshot('B', ['A'], [])
2798
builder.build_snapshot('C', ['B'], [])
2799
builder.build_snapshot('D', ['C'], [])
2800
builder.finish_series()
2801
b = builder.get_branch()
2803
self.addCleanup(b.unlock)
2804
vf = b.repository.inventories
2805
return versionedfile.OrderingVersionedFilesDecorator(vf, key_priority)
2807
def test_get_empty(self):
2808
vf = self.get_ordering_vf({})
2809
self.assertEqual([], vf.calls)
2811
def test_get_record_stream_topological(self):
2812
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2813
request_keys = [('B',), ('C',), ('D',), ('A',)]
2814
keys = [r.key for r in vf.get_record_stream(request_keys,
2815
'topological', False)]
2816
# We should have gotten the keys in topological order
2817
self.assertEqual([('A',), ('B',), ('C',), ('D',)], keys)
2818
# And recorded that the request was made
2819
self.assertEqual([('get_record_stream', request_keys, 'topological',
2822
def test_get_record_stream_ordered(self):
2823
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2824
request_keys = [('B',), ('C',), ('D',), ('A',)]
2825
keys = [r.key for r in vf.get_record_stream(request_keys,
2826
'unordered', False)]
2827
# They should be returned based on their priority
2828
self.assertEqual([('D',), ('B',), ('A',), ('C',)], keys)
2829
# And the request recorded
2830
self.assertEqual([('get_record_stream', request_keys, 'unordered',
2833
def test_get_record_stream_implicit_order(self):
2834
vf = self.get_ordering_vf({('B',): 2, ('D',): 1})
2835
request_keys = [('B',), ('C',), ('D',), ('A',)]
2836
keys = [r.key for r in vf.get_record_stream(request_keys,
2837
'unordered', False)]
2838
# A and C are not in the map, so they get sorted to the front. A comes
2839
# before C alphabetically, so it comes back first
2840
self.assertEqual([('A',), ('C',), ('D',), ('B',)], keys)
2841
# And the request recorded
2842
self.assertEqual([('get_record_stream', request_keys, 'unordered',