1
# Copyright (C) 2005, 2009 Canonical Ltd
4
# Johan Rydberg <jrydberg@gnu.org>
6
# This program is free software; you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation; either version 2 of the License, or
9
# (at your option) any later version.
11
# This program is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14
# GNU General Public License for more details.
16
# You should have received a copy of the GNU General Public License
17
# along with this program; if not, write to the Free Software
18
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
# considered typical and check that it can be detected/corrected.
24
from itertools import chain, izip
25
from StringIO import StringIO
33
from bzrlib.errors import (
35
RevisionAlreadyPresent,
38
from bzrlib.knit import (
45
from bzrlib.tests import (
47
TestCaseWithMemoryTransport,
51
split_suite_by_condition,
54
from bzrlib.tests.http_utils import TestCaseWithWebserver
55
from bzrlib.trace import mutter
56
from bzrlib.transport import get_transport
57
from bzrlib.transport.memory import MemoryTransport
58
from bzrlib.tsort import topo_sort
59
from bzrlib.tuned_gzip import GzipFile
60
import bzrlib.versionedfile as versionedfile
61
from bzrlib.versionedfile import (
63
HashEscapedPrefixMapper,
65
VirtualVersionedFiles,
66
make_versioned_files_factory,
68
from bzrlib.weave import WeaveFile
69
from bzrlib.weavefile import read_weave, write_weave
72
def load_tests(standard_tests, module, loader):
73
"""Parameterize VersionedFiles tests for different implementations."""
74
to_adapt, result = split_suite_by_condition(
75
standard_tests, condition_isinstance(TestVersionedFiles))
76
# We want to be sure of behaviour for:
77
# weaves prefix layout (weave texts)
78
# individually named weaves (weave inventories)
79
# annotated knits - prefix|hash|hash-escape layout, we test the third only
80
# as it is the most complex mapper.
81
# individually named knits
82
# individual no-graph knits in packs (signatures)
83
# individual graph knits in packs (inventories)
84
# individual graph nocompression knits in packs (revisions)
85
# plain text knits in packs (texts)
89
'factory':make_versioned_files_factory(WeaveFile,
90
ConstantMapper('inventory')),
93
'support_partial_insertion': False,
97
'factory':make_file_factory(False, ConstantMapper('revisions')),
100
'support_partial_insertion': False,
102
('named-nograph-nodelta-knit-pack', {
103
'cleanup':cleanup_pack_knit,
104
'factory':make_pack_factory(False, False, 1),
107
'support_partial_insertion': False,
109
('named-graph-knit-pack', {
110
'cleanup':cleanup_pack_knit,
111
'factory':make_pack_factory(True, True, 1),
114
'support_partial_insertion': True,
116
('named-graph-nodelta-knit-pack', {
117
'cleanup':cleanup_pack_knit,
118
'factory':make_pack_factory(True, False, 1),
121
'support_partial_insertion': False,
124
len_two_scenarios = [
127
'factory':make_versioned_files_factory(WeaveFile,
131
'support_partial_insertion': False,
133
('annotated-knit-escape', {
135
'factory':make_file_factory(True, HashEscapedPrefixMapper()),
138
'support_partial_insertion': False,
140
('plain-knit-pack', {
141
'cleanup':cleanup_pack_knit,
142
'factory':make_pack_factory(True, True, 2),
145
'support_partial_insertion': True,
148
scenarios = len_one_scenarios + len_two_scenarios
149
return multiply_tests(to_adapt, scenarios, result)
152
def get_diamond_vf(f, trailing_eol=True, left_only=False):
153
"""Get a diamond graph to exercise deltas and merges.
155
:param trailing_eol: If True end the last line with \n.
159
'base': (('origin',),),
160
'left': (('base',),),
161
'right': (('base',),),
162
'merged': (('left',), ('right',)),
164
# insert a diamond graph to exercise deltas and merges.
169
f.add_lines('origin', [], ['origin' + last_char])
170
f.add_lines('base', ['origin'], ['base' + last_char])
171
f.add_lines('left', ['base'], ['base\n', 'left' + last_char])
173
f.add_lines('right', ['base'],
174
['base\n', 'right' + last_char])
175
f.add_lines('merged', ['left', 'right'],
176
['base\n', 'left\n', 'right\n', 'merged' + last_char])
180
def get_diamond_files(files, key_length, trailing_eol=True, left_only=False,
181
nograph=False, nokeys=False):
182
"""Get a diamond graph to exercise deltas and merges.
184
This creates a 5-node graph in files. If files supports 2-length keys two
185
graphs are made to exercise the support for multiple ids.
187
:param trailing_eol: If True end the last line with \n.
188
:param key_length: The length of keys in files. Currently supports length 1
190
:param left_only: If True do not add the right and merged nodes.
191
:param nograph: If True, do not provide parents to the add_lines calls;
192
this is useful for tests that need inserted data but have graphless
194
:param nokeys: If True, pass None is as the key for all insertions.
195
Currently implies nograph.
196
:return: The results of the add_lines calls.
203
prefixes = [('FileA',), ('FileB',)]
204
# insert a diamond graph to exercise deltas and merges.
210
def get_parents(suffix_list):
214
result = [prefix + suffix for suffix in suffix_list]
221
# we loop over each key because that spreads the inserts across prefixes,
222
# which is how commit operates.
223
for prefix in prefixes:
224
result.append(files.add_lines(prefix + get_key('origin'), (),
225
['origin' + last_char]))
226
for prefix in prefixes:
227
result.append(files.add_lines(prefix + get_key('base'),
228
get_parents([('origin',)]), ['base' + last_char]))
229
for prefix in prefixes:
230
result.append(files.add_lines(prefix + get_key('left'),
231
get_parents([('base',)]),
232
['base\n', 'left' + last_char]))
234
for prefix in prefixes:
235
result.append(files.add_lines(prefix + get_key('right'),
236
get_parents([('base',)]),
237
['base\n', 'right' + last_char]))
238
for prefix in prefixes:
239
result.append(files.add_lines(prefix + get_key('merged'),
240
get_parents([('left',), ('right',)]),
241
['base\n', 'left\n', 'right\n', 'merged' + last_char]))
245
class VersionedFileTestMixIn(object):
246
"""A mixin test class for testing VersionedFiles.
248
This is not an adaptor-style test at this point because
249
theres no dynamic substitution of versioned file implementations,
250
they are strictly controlled by their owning repositories.
253
def get_transaction(self):
254
if not hasattr(self, '_transaction'):
255
self._transaction = None
256
return self._transaction
260
f.add_lines('r0', [], ['a\n', 'b\n'])
261
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
263
versions = f.versions()
264
self.assertTrue('r0' in versions)
265
self.assertTrue('r1' in versions)
266
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
267
self.assertEquals(f.get_text('r0'), 'a\nb\n')
268
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
269
self.assertEqual(2, len(f))
270
self.assertEqual(2, f.num_versions())
272
self.assertRaises(RevisionNotPresent,
273
f.add_lines, 'r2', ['foo'], [])
274
self.assertRaises(RevisionAlreadyPresent,
275
f.add_lines, 'r1', [], [])
277
# this checks that reopen with create=True does not break anything.
278
f = self.reopen_file(create=True)
281
def test_adds_with_parent_texts(self):
284
_, _, parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])
286
_, _, parent_texts['r1'] = f.add_lines_with_ghosts('r1',
287
['r0', 'ghost'], ['b\n', 'c\n'], parent_texts=parent_texts)
288
except NotImplementedError:
289
# if the format doesn't support ghosts, just add normally.
290
_, _, parent_texts['r1'] = f.add_lines('r1',
291
['r0'], ['b\n', 'c\n'], parent_texts=parent_texts)
292
f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)
293
self.assertNotEqual(None, parent_texts['r0'])
294
self.assertNotEqual(None, parent_texts['r1'])
296
versions = f.versions()
297
self.assertTrue('r0' in versions)
298
self.assertTrue('r1' in versions)
299
self.assertTrue('r2' in versions)
300
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
301
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
302
self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])
303
self.assertEqual(3, f.num_versions())
304
origins = f.annotate('r1')
305
self.assertEquals(origins[0][0], 'r0')
306
self.assertEquals(origins[1][0], 'r1')
307
origins = f.annotate('r2')
308
self.assertEquals(origins[0][0], 'r1')
309
self.assertEquals(origins[1][0], 'r2')
312
f = self.reopen_file()
315
def test_add_unicode_content(self):
316
# unicode content is not permitted in versioned files.
317
# versioned files version sequences of bytes only.
319
self.assertRaises(errors.BzrBadParameterUnicode,
320
vf.add_lines, 'a', [], ['a\n', u'b\n', 'c\n'])
322
(errors.BzrBadParameterUnicode, NotImplementedError),
323
vf.add_lines_with_ghosts, 'a', [], ['a\n', u'b\n', 'c\n'])
325
def test_add_follows_left_matching_blocks(self):
326
"""If we change left_matching_blocks, delta changes
328
Note: There are multiple correct deltas in this case, because
329
we start with 1 "a" and we get 3.
332
if isinstance(vf, WeaveFile):
333
raise TestSkipped("WeaveFile ignores left_matching_blocks")
334
vf.add_lines('1', [], ['a\n'])
335
vf.add_lines('2', ['1'], ['a\n', 'a\n', 'a\n'],
336
left_matching_blocks=[(0, 0, 1), (1, 3, 0)])
337
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('2'))
338
vf.add_lines('3', ['1'], ['a\n', 'a\n', 'a\n'],
339
left_matching_blocks=[(0, 2, 1), (1, 3, 0)])
340
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('3'))
342
def test_inline_newline_throws(self):
343
# \r characters are not permitted in lines being added
345
self.assertRaises(errors.BzrBadParameterContainsNewline,
346
vf.add_lines, 'a', [], ['a\n\n'])
348
(errors.BzrBadParameterContainsNewline, NotImplementedError),
349
vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])
350
# but inline CR's are allowed
351
vf.add_lines('a', [], ['a\r\n'])
353
vf.add_lines_with_ghosts('b', [], ['a\r\n'])
354
except NotImplementedError:
357
def test_add_reserved(self):
359
self.assertRaises(errors.ReservedId,
360
vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
362
def test_add_lines_nostoresha(self):
363
"""When nostore_sha is supplied using old content raises."""
365
empty_text = ('a', [])
366
sample_text_nl = ('b', ["foo\n", "bar\n"])
367
sample_text_no_nl = ('c', ["foo\n", "bar"])
369
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
370
sha, _, _ = vf.add_lines(version, [], lines)
372
# we now have a copy of all the lines in the vf.
373
for sha, (version, lines) in zip(
374
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
375
self.assertRaises(errors.ExistingContent,
376
vf.add_lines, version + "2", [], lines,
378
# and no new version should have been added.
379
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
382
def test_add_lines_with_ghosts_nostoresha(self):
383
"""When nostore_sha is supplied using old content raises."""
385
empty_text = ('a', [])
386
sample_text_nl = ('b', ["foo\n", "bar\n"])
387
sample_text_no_nl = ('c', ["foo\n", "bar"])
389
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
390
sha, _, _ = vf.add_lines(version, [], lines)
392
# we now have a copy of all the lines in the vf.
393
# is the test applicable to this vf implementation?
395
vf.add_lines_with_ghosts('d', [], [])
396
except NotImplementedError:
397
raise TestSkipped("add_lines_with_ghosts is optional")
398
for sha, (version, lines) in zip(
399
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
400
self.assertRaises(errors.ExistingContent,
401
vf.add_lines_with_ghosts, version + "2", [], lines,
403
# and no new version should have been added.
404
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
407
def test_add_lines_return_value(self):
408
# add_lines should return the sha1 and the text size.
410
empty_text = ('a', [])
411
sample_text_nl = ('b', ["foo\n", "bar\n"])
412
sample_text_no_nl = ('c', ["foo\n", "bar"])
413
# check results for the three cases:
414
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
415
# the first two elements are the same for all versioned files:
416
# - the digest and the size of the text. For some versioned files
417
# additional data is returned in additional tuple elements.
418
result = vf.add_lines(version, [], lines)
419
self.assertEqual(3, len(result))
420
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
422
# parents should not affect the result:
423
lines = sample_text_nl[1]
424
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
425
vf.add_lines('d', ['b', 'c'], lines)[0:2])
427
def test_get_reserved(self):
429
self.assertRaises(errors.ReservedId, vf.get_texts, ['b:'])
430
self.assertRaises(errors.ReservedId, vf.get_lines, 'b:')
431
self.assertRaises(errors.ReservedId, vf.get_text, 'b:')
433
def test_add_unchanged_last_line_noeol_snapshot(self):
434
"""Add a text with an unchanged last line with no eol should work."""
435
# Test adding this in a number of chain lengths; because the interface
436
# for VersionedFile does not allow forcing a specific chain length, we
437
# just use a small base to get the first snapshot, then a much longer
438
# first line for the next add (which will make the third add snapshot)
439
# and so on. 20 has been chosen as an aribtrary figure - knits use 200
440
# as a capped delta length, but ideally we would have some way of
441
# tuning the test to the store (e.g. keep going until a snapshot
443
for length in range(20):
445
vf = self.get_file('case-%d' % length)
448
for step in range(length):
449
version = prefix % step
450
lines = (['prelude \n'] * step) + ['line']
451
vf.add_lines(version, parents, lines)
452
version_lines[version] = lines
454
vf.add_lines('no-eol', parents, ['line'])
455
vf.get_texts(version_lines.keys())
456
self.assertEqualDiff('line', vf.get_text('no-eol'))
458
def test_get_texts_eol_variation(self):
459
# similar to the failure in <http://bugs.launchpad.net/234748>
461
sample_text_nl = ["line\n"]
462
sample_text_no_nl = ["line"]
469
lines = sample_text_nl
471
lines = sample_text_no_nl
472
# left_matching blocks is an internal api; it operates on the
473
# *internal* representation for a knit, which is with *all* lines
474
# being normalised to end with \n - even the final line in a no_nl
475
# file. Using it here ensures that a broken internal implementation
476
# (which is what this test tests) will generate a correct line
477
# delta (which is to say, an empty delta).
478
vf.add_lines(version, parents, lines,
479
left_matching_blocks=[(0, 0, 1)])
481
versions.append(version)
482
version_lines[version] = lines
484
vf.get_texts(versions)
485
vf.get_texts(reversed(versions))
487
def test_add_lines_with_matching_blocks_noeol_last_line(self):
488
"""Add a text with an unchanged last line with no eol should work."""
489
from bzrlib import multiparent
490
# Hand verified sha1 of the text we're adding.
491
sha1 = '6a1d115ec7b60afb664dc14890b5af5ce3c827a4'
492
# Create a mpdiff which adds a new line before the trailing line, and
493
# reuse the last line unaltered (which can cause annotation reuse).
494
# Test adding this in two situations:
495
# On top of a new insertion
496
vf = self.get_file('fulltext')
497
vf.add_lines('noeol', [], ['line'])
498
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
499
left_matching_blocks=[(0, 1, 1)])
500
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
502
vf = self.get_file('delta')
503
vf.add_lines('base', [], ['line'])
504
vf.add_lines('noeol', ['base'], ['prelude\n', 'line'])
505
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
506
left_matching_blocks=[(1, 1, 1)])
507
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
509
def test_make_mpdiffs(self):
510
from bzrlib import multiparent
511
vf = self.get_file('foo')
512
sha1s = self._setup_for_deltas(vf)
513
new_vf = self.get_file('bar')
514
for version in multiparent.topo_iter(vf):
515
mpdiff = vf.make_mpdiffs([version])[0]
516
new_vf.add_mpdiffs([(version, vf.get_parent_map([version])[version],
517
vf.get_sha1s([version])[version], mpdiff)])
518
self.assertEqualDiff(vf.get_text(version),
519
new_vf.get_text(version))
521
def test_make_mpdiffs_with_ghosts(self):
522
vf = self.get_file('foo')
524
vf.add_lines_with_ghosts('text', ['ghost'], ['line\n'])
525
except NotImplementedError:
526
# old Weave formats do not allow ghosts
528
self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, ['ghost'])
530
def _setup_for_deltas(self, f):
531
self.assertFalse(f.has_version('base'))
532
# add texts that should trip the knit maximum delta chain threshold
533
# as well as doing parallel chains of data in knits.
534
# this is done by two chains of 25 insertions
535
f.add_lines('base', [], ['line\n'])
536
f.add_lines('noeol', ['base'], ['line'])
537
# detailed eol tests:
538
# shared last line with parent no-eol
539
f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])
540
# differing last line with parent, both no-eol
541
f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])
542
# add eol following a noneol parent, change content
543
f.add_lines('eol', ['noeol'], ['phone\n'])
544
# add eol following a noneol parent, no change content
545
f.add_lines('eolline', ['noeol'], ['line\n'])
546
# noeol with no parents:
547
f.add_lines('noeolbase', [], ['line'])
548
# noeol preceeding its leftmost parent in the output:
549
# this is done by making it a merge of two parents with no common
550
# anestry: noeolbase and noeol with the
551
# later-inserted parent the leftmost.
552
f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])
553
# two identical eol texts
554
f.add_lines('noeoldup', ['noeol'], ['line'])
556
text_name = 'chain1-'
558
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
559
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
560
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
561
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
562
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
563
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
564
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
565
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
566
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
567
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
568
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
569
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
570
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
571
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
572
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
573
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
574
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
575
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
576
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
577
19:'1ebed371807ba5935958ad0884595126e8c4e823',
578
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
579
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
580
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
581
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
582
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
583
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
585
for depth in range(26):
586
new_version = text_name + '%s' % depth
587
text = text + ['line\n']
588
f.add_lines(new_version, [next_parent], text)
589
next_parent = new_version
591
text_name = 'chain2-'
593
for depth in range(26):
594
new_version = text_name + '%s' % depth
595
text = text + ['line\n']
596
f.add_lines(new_version, [next_parent], text)
597
next_parent = new_version
600
def test_ancestry(self):
602
self.assertEqual([], f.get_ancestry([]))
603
f.add_lines('r0', [], ['a\n', 'b\n'])
604
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
605
f.add_lines('r2', ['r0'], ['b\n', 'c\n'])
606
f.add_lines('r3', ['r2'], ['b\n', 'c\n'])
607
f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])
608
self.assertEqual([], f.get_ancestry([]))
609
versions = f.get_ancestry(['rM'])
610
# there are some possibilities:
614
# so we check indexes
615
r0 = versions.index('r0')
616
r1 = versions.index('r1')
617
r2 = versions.index('r2')
618
self.assertFalse('r3' in versions)
619
rM = versions.index('rM')
620
self.assertTrue(r0 < r1)
621
self.assertTrue(r0 < r2)
622
self.assertTrue(r1 < rM)
623
self.assertTrue(r2 < rM)
625
self.assertRaises(RevisionNotPresent,
626
f.get_ancestry, ['rM', 'rX'])
628
self.assertEqual(set(f.get_ancestry('rM')),
629
set(f.get_ancestry('rM', topo_sorted=False)))
631
def test_mutate_after_finish(self):
632
self._transaction = 'before'
634
self._transaction = 'after'
635
self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])
636
self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])
638
def test_copy_to(self):
640
f.add_lines('0', [], ['a\n'])
641
t = MemoryTransport()
643
for suffix in self.get_factory().get_suffixes():
644
self.assertTrue(t.has('foo' + suffix))
646
def test_get_suffixes(self):
648
# and should be a list
649
self.assertTrue(isinstance(self.get_factory().get_suffixes(), list))
651
def test_get_parent_map(self):
653
f.add_lines('r0', [], ['a\n', 'b\n'])
655
{'r0':()}, f.get_parent_map(['r0']))
656
f.add_lines('r1', ['r0'], ['a\n', 'b\n'])
658
{'r1':('r0',)}, f.get_parent_map(['r1']))
662
f.get_parent_map(['r0', 'r1']))
663
f.add_lines('r2', [], ['a\n', 'b\n'])
664
f.add_lines('r3', [], ['a\n', 'b\n'])
665
f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
667
{'m':('r0', 'r1', 'r2', 'r3')}, f.get_parent_map(['m']))
668
self.assertEqual({}, f.get_parent_map('y'))
672
f.get_parent_map(['r0', 'y', 'r1']))
674
def test_annotate(self):
676
f.add_lines('r0', [], ['a\n', 'b\n'])
677
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
678
origins = f.annotate('r1')
679
self.assertEquals(origins[0][0], 'r1')
680
self.assertEquals(origins[1][0], 'r0')
682
self.assertRaises(RevisionNotPresent,
685
def test_detection(self):
686
# Test weaves detect corruption.
688
# Weaves contain a checksum of their texts.
689
# When a text is extracted, this checksum should be
692
w = self.get_file_corrupted_text()
694
self.assertEqual('hello\n', w.get_text('v1'))
695
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
696
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
697
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
699
w = self.get_file_corrupted_checksum()
701
self.assertEqual('hello\n', w.get_text('v1'))
702
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
703
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
704
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
706
def get_file_corrupted_text(self):
707
"""Return a versioned file with corrupt text but valid metadata."""
708
raise NotImplementedError(self.get_file_corrupted_text)
710
def reopen_file(self, name='foo'):
711
"""Open the versioned file from disk again."""
712
raise NotImplementedError(self.reopen_file)
714
def test_iter_lines_added_or_present_in_versions(self):
715
# test that we get at least an equalset of the lines added by
716
# versions in the weave
717
# the ordering here is to make a tree so that dumb searches have
718
# more changes to muck up.
720
class InstrumentedProgress(progress.DummyProgress):
724
progress.DummyProgress.__init__(self)
727
def update(self, msg=None, current=None, total=None):
728
self.updates.append((msg, current, total))
731
# add a base to get included
732
vf.add_lines('base', [], ['base\n'])
733
# add a ancestor to be included on one side
734
vf.add_lines('lancestor', [], ['lancestor\n'])
735
# add a ancestor to be included on the other side
736
vf.add_lines('rancestor', ['base'], ['rancestor\n'])
737
# add a child of rancestor with no eofile-nl
738
vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])
739
# add a child of lancestor and base to join the two roots
740
vf.add_lines('otherchild',
741
['lancestor', 'base'],
742
['base\n', 'lancestor\n', 'otherchild\n'])
743
def iter_with_versions(versions, expected):
744
# now we need to see what lines are returned, and how often.
746
progress = InstrumentedProgress()
747
# iterate over the lines
748
for line in vf.iter_lines_added_or_present_in_versions(versions,
750
lines.setdefault(line, 0)
752
if []!= progress.updates:
753
self.assertEqual(expected, progress.updates)
755
lines = iter_with_versions(['child', 'otherchild'],
756
[('Walking content', 0, 2),
757
('Walking content', 1, 2),
758
('Walking content', 2, 2)])
759
# we must see child and otherchild
760
self.assertTrue(lines[('child\n', 'child')] > 0)
761
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
762
# we dont care if we got more than that.
765
lines = iter_with_versions(None, [('Walking content', 0, 5),
766
('Walking content', 1, 5),
767
('Walking content', 2, 5),
768
('Walking content', 3, 5),
769
('Walking content', 4, 5),
770
('Walking content', 5, 5)])
771
# all lines must be seen at least once
772
self.assertTrue(lines[('base\n', 'base')] > 0)
773
self.assertTrue(lines[('lancestor\n', 'lancestor')] > 0)
774
self.assertTrue(lines[('rancestor\n', 'rancestor')] > 0)
775
self.assertTrue(lines[('child\n', 'child')] > 0)
776
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
778
def test_add_lines_with_ghosts(self):
779
# some versioned file formats allow lines to be added with parent
780
# information that is > than that in the format. Formats that do
781
# not support this need to raise NotImplementedError on the
782
# add_lines_with_ghosts api.
784
# add a revision with ghost parents
785
# The preferred form is utf8, but we should translate when needed
786
parent_id_unicode = u'b\xbfse'
787
parent_id_utf8 = parent_id_unicode.encode('utf8')
789
vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])
790
except NotImplementedError:
791
# check the other ghost apis are also not implemented
792
self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])
793
self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')
795
vf = self.reopen_file()
796
# test key graph related apis: getncestry, _graph, get_parents
798
# - these are ghost unaware and must not be reflect ghosts
799
self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))
800
self.assertFalse(vf.has_version(parent_id_utf8))
801
# we have _with_ghost apis to give us ghost information.
802
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
803
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
804
# if we add something that is a ghost of another, it should correct the
805
# results of the prior apis
806
vf.add_lines(parent_id_utf8, [], [])
807
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))
808
self.assertEqual({'notbxbfse':(parent_id_utf8,)},
809
vf.get_parent_map(['notbxbfse']))
810
self.assertTrue(vf.has_version(parent_id_utf8))
811
# we have _with_ghost apis to give us ghost information.
812
self.assertEqual([parent_id_utf8, 'notbxbfse'],
813
vf.get_ancestry_with_ghosts(['notbxbfse']))
814
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
816
def test_add_lines_with_ghosts_after_normal_revs(self):
817
# some versioned file formats allow lines to be added with parent
818
# information that is > than that in the format. Formats that do
819
# not support this need to raise NotImplementedError on the
820
# add_lines_with_ghosts api.
822
# probe for ghost support
824
vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n'])
825
except NotImplementedError:
827
vf.add_lines_with_ghosts('references_ghost',
829
['line\n', 'line_b\n', 'line_c\n'])
830
origins = vf.annotate('references_ghost')
831
self.assertEquals(('base', 'line\n'), origins[0])
832
self.assertEquals(('base', 'line_b\n'), origins[1])
833
self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
835
def test_readonly_mode(self):
836
transport = get_transport(self.get_url('.'))
837
factory = self.get_factory()
838
vf = factory('id', transport, 0777, create=True, access_mode='w')
839
vf = factory('id', transport, access_mode='r')
840
self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
841
self.assertRaises(errors.ReadOnlyError,
842
vf.add_lines_with_ghosts,
847
def test_get_sha1s(self):
848
# check the sha1 data is available
851
vf.add_lines('a', [], ['a\n'])
852
# the same file, different metadata
853
vf.add_lines('b', ['a'], ['a\n'])
854
# a file differing only in last newline.
855
vf.add_lines('c', [], ['a'])
857
'a': '3f786850e387550fdab836ed7e6dc881de23001b',
858
'c': '86f7e437faa5a7fce15d1ddcb9eaeaea377667b8',
859
'b': '3f786850e387550fdab836ed7e6dc881de23001b',
861
vf.get_sha1s(['a', 'c', 'b']))
864
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
866
def get_file(self, name='foo'):
867
return WeaveFile(name, get_transport(self.get_url('.')), create=True,
868
get_scope=self.get_transaction)
870
def get_file_corrupted_text(self):
871
w = WeaveFile('foo', get_transport(self.get_url('.')), create=True,
872
get_scope=self.get_transaction)
873
w.add_lines('v1', [], ['hello\n'])
874
w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
876
# We are going to invasively corrupt the text
877
# Make sure the internals of weave are the same
878
self.assertEqual([('{', 0)
886
self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'
887
, '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
892
w._weave[4] = 'There\n'
895
def get_file_corrupted_checksum(self):
896
w = self.get_file_corrupted_text()
898
w._weave[4] = 'there\n'
899
self.assertEqual('hello\nthere\n', w.get_text('v2'))
901
#Invalid checksum, first digit changed
902
w._sha1s[1] = 'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
905
def reopen_file(self, name='foo', create=False):
906
return WeaveFile(name, get_transport(self.get_url('.')), create=create,
907
get_scope=self.get_transaction)
909
def test_no_implicit_create(self):
910
self.assertRaises(errors.NoSuchFile,
913
get_transport(self.get_url('.')),
914
get_scope=self.get_transaction)
916
def get_factory(self):
920
class TestPlanMergeVersionedFile(TestCaseWithMemoryTransport):
923
TestCaseWithMemoryTransport.setUp(self)
924
mapper = PrefixMapper()
925
factory = make_file_factory(True, mapper)
926
self.vf1 = factory(self.get_transport('root-1'))
927
self.vf2 = factory(self.get_transport('root-2'))
928
self.plan_merge_vf = versionedfile._PlanMergeVersionedFile('root')
929
self.plan_merge_vf.fallback_versionedfiles.extend([self.vf1, self.vf2])
931
def test_add_lines(self):
932
self.plan_merge_vf.add_lines(('root', 'a:'), [], [])
933
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
934
('root', 'a'), [], [])
935
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
936
('root', 'a:'), None, [])
937
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
938
('root', 'a:'), [], None)
940
def setup_abcde(self):
941
self.vf1.add_lines(('root', 'A'), [], ['a'])
942
self.vf1.add_lines(('root', 'B'), [('root', 'A')], ['b'])
943
self.vf2.add_lines(('root', 'C'), [], ['c'])
944
self.vf2.add_lines(('root', 'D'), [('root', 'C')], ['d'])
945
self.plan_merge_vf.add_lines(('root', 'E:'),
946
[('root', 'B'), ('root', 'D')], ['e'])
948
def test_get_parents(self):
950
self.assertEqual({('root', 'B'):(('root', 'A'),)},
951
self.plan_merge_vf.get_parent_map([('root', 'B')]))
952
self.assertEqual({('root', 'D'):(('root', 'C'),)},
953
self.plan_merge_vf.get_parent_map([('root', 'D')]))
954
self.assertEqual({('root', 'E:'):(('root', 'B'),('root', 'D'))},
955
self.plan_merge_vf.get_parent_map([('root', 'E:')]))
957
self.plan_merge_vf.get_parent_map([('root', 'F')]))
959
('root', 'B'):(('root', 'A'),),
960
('root', 'D'):(('root', 'C'),),
961
('root', 'E:'):(('root', 'B'),('root', 'D')),
963
self.plan_merge_vf.get_parent_map(
964
[('root', 'B'), ('root', 'D'), ('root', 'E:'), ('root', 'F')]))
966
def test_get_record_stream(self):
968
def get_record(suffix):
969
return self.plan_merge_vf.get_record_stream(
970
[('root', suffix)], 'unordered', True).next()
971
self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))
972
self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))
973
self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))
974
self.assertEqual('absent', get_record('F').storage_kind)
977
class TestReadonlyHttpMixin(object):
979
def get_transaction(self):
982
def test_readonly_http_works(self):
983
# we should be able to read from http with a versioned file.
985
# try an empty file access
986
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
987
self.assertEqual([], readonly_vf.versions())
989
vf.add_lines('1', [], ['a\n'])
990
vf.add_lines('2', ['1'], ['b\n', 'a\n'])
991
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
992
self.assertEqual(['1', '2'], vf.versions())
993
for version in readonly_vf.versions():
994
readonly_vf.get_lines(version)
997
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
1000
return WeaveFile('foo', get_transport(self.get_url('.')), create=True,
1001
get_scope=self.get_transaction)
1003
def get_factory(self):
1007
class MergeCasesMixin(object):
1009
def doMerge(self, base, a, b, mp):
1010
from cStringIO import StringIO
1011
from textwrap import dedent
1017
w.add_lines('text0', [], map(addcrlf, base))
1018
w.add_lines('text1', ['text0'], map(addcrlf, a))
1019
w.add_lines('text2', ['text0'], map(addcrlf, b))
1021
self.log_contents(w)
1023
self.log('merge plan:')
1024
p = list(w.plan_merge('text1', 'text2'))
1025
for state, line in p:
1027
self.log('%12s | %s' % (state, line[:-1]))
1031
mt.writelines(w.weave_merge(p))
1033
self.log(mt.getvalue())
1035
mp = map(addcrlf, mp)
1036
self.assertEqual(mt.readlines(), mp)
1039
def testOneInsert(self):
1045
def testSeparateInserts(self):
1046
self.doMerge(['aaa', 'bbb', 'ccc'],
1047
['aaa', 'xxx', 'bbb', 'ccc'],
1048
['aaa', 'bbb', 'yyy', 'ccc'],
1049
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1051
def testSameInsert(self):
1052
self.doMerge(['aaa', 'bbb', 'ccc'],
1053
['aaa', 'xxx', 'bbb', 'ccc'],
1054
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],
1055
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1056
overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']
1057
def testOverlappedInsert(self):
1058
self.doMerge(['aaa', 'bbb'],
1059
['aaa', 'xxx', 'yyy', 'bbb'],
1060
['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)
1062
# really it ought to reduce this to
1063
# ['aaa', 'xxx', 'yyy', 'bbb']
1066
def testClashReplace(self):
1067
self.doMerge(['aaa'],
1070
['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz',
1073
def testNonClashInsert1(self):
1074
self.doMerge(['aaa'],
1077
['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz',
1080
def testNonClashInsert2(self):
1081
self.doMerge(['aaa'],
1087
def testDeleteAndModify(self):
1088
"""Clashing delete and modification.
1090
If one side modifies a region and the other deletes it then
1091
there should be a conflict with one side blank.
1094
#######################################
1095
# skippd, not working yet
1098
self.doMerge(['aaa', 'bbb', 'ccc'],
1099
['aaa', 'ddd', 'ccc'],
1101
['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc'])
1103
def _test_merge_from_strings(self, base, a, b, expected):
1105
w.add_lines('text0', [], base.splitlines(True))
1106
w.add_lines('text1', ['text0'], a.splitlines(True))
1107
w.add_lines('text2', ['text0'], b.splitlines(True))
1108
self.log('merge plan:')
1109
p = list(w.plan_merge('text1', 'text2'))
1110
for state, line in p:
1112
self.log('%12s | %s' % (state, line[:-1]))
1113
self.log('merge result:')
1114
result_text = ''.join(w.weave_merge(p))
1115
self.log(result_text)
1116
self.assertEqualDiff(result_text, expected)
1118
def test_weave_merge_conflicts(self):
1119
# does weave merge properly handle plans that end with unchanged?
1120
result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')]))
1121
self.assertEqual(result, 'hello\n')
1123
def test_deletion_extended(self):
1124
"""One side deletes, the other deletes more.
1141
self._test_merge_from_strings(base, a, b, result)
1143
def test_deletion_overlap(self):
1144
"""Delete overlapping regions with no other conflict.
1146
Arguably it'd be better to treat these as agreement, rather than
1147
conflict, but for now conflict is safer.
1175
self._test_merge_from_strings(base, a, b, result)
1177
def test_agreement_deletion(self):
1178
"""Agree to delete some lines, without conflicts."""
1200
self._test_merge_from_strings(base, a, b, result)
1202
def test_sync_on_deletion(self):
1203
"""Specific case of merge where we can synchronize incorrectly.
1205
A previous version of the weave merge concluded that the two versions
1206
agreed on deleting line 2, and this could be a synchronization point.
1207
Line 1 was then considered in isolation, and thought to be deleted on
1210
It's better to consider the whole thing as a disagreement region.
1221
a's replacement line 2
1234
a's replacement line 2
1241
self._test_merge_from_strings(base, a, b, result)
1244
class TestWeaveMerge(TestCaseWithMemoryTransport, MergeCasesMixin):
1246
def get_file(self, name='foo'):
1247
return WeaveFile(name, get_transport(self.get_url('.')), create=True)
1249
def log_contents(self, w):
1250
self.log('weave is:')
1252
write_weave(w, tmpf)
1253
self.log(tmpf.getvalue())
1255
overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======',
1256
'xxx', '>>>>>>> ', 'bbb']
1259
class TestContentFactoryAdaption(TestCaseWithMemoryTransport):
1261
def test_select_adaptor(self):
1262
"""Test expected adapters exist."""
1263
# One scenario for each lookup combination we expect to use.
1264
# Each is source_kind, requested_kind, adapter class
1266
('knit-delta-gz', 'fulltext', _mod_knit.DeltaPlainToFullText),
1267
('knit-ft-gz', 'fulltext', _mod_knit.FTPlainToFullText),
1268
('knit-annotated-delta-gz', 'knit-delta-gz',
1269
_mod_knit.DeltaAnnotatedToUnannotated),
1270
('knit-annotated-delta-gz', 'fulltext',
1271
_mod_knit.DeltaAnnotatedToFullText),
1272
('knit-annotated-ft-gz', 'knit-ft-gz',
1273
_mod_knit.FTAnnotatedToUnannotated),
1274
('knit-annotated-ft-gz', 'fulltext',
1275
_mod_knit.FTAnnotatedToFullText),
1277
for source, requested, klass in scenarios:
1278
adapter_factory = versionedfile.adapter_registry.get(
1279
(source, requested))
1280
adapter = adapter_factory(None)
1281
self.assertIsInstance(adapter, klass)
1283
def get_knit(self, annotated=True):
1284
mapper = ConstantMapper('knit')
1285
transport = self.get_transport()
1286
return make_file_factory(annotated, mapper)(transport)
1288
def helpGetBytes(self, f, ft_adapter, delta_adapter):
1289
"""Grab the interested adapted texts for tests."""
1290
# origin is a fulltext
1291
entries = f.get_record_stream([('origin',)], 'unordered', False)
1292
base = entries.next()
1293
ft_data = ft_adapter.get_bytes(base)
1294
# merged is both a delta and multiple parents.
1295
entries = f.get_record_stream([('merged',)], 'unordered', False)
1296
merged = entries.next()
1297
delta_data = delta_adapter.get_bytes(merged)
1298
return ft_data, delta_data
1300
def test_deannotation_noeol(self):
1301
"""Test converting annotated knits to unannotated knits."""
1302
# we need a full text, and a delta
1304
get_diamond_files(f, 1, trailing_eol=False)
1305
ft_data, delta_data = self.helpGetBytes(f,
1306
_mod_knit.FTAnnotatedToUnannotated(None),
1307
_mod_knit.DeltaAnnotatedToUnannotated(None))
1309
'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'
1312
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1314
'version merged 4 32c2e79763b3f90e8ccde37f9710b6629c25a796\n'
1315
'1,2,3\nleft\nright\nmerged\nend merged\n',
1316
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1318
def test_deannotation(self):
1319
"""Test converting annotated knits to unannotated knits."""
1320
# we need a full text, and a delta
1322
get_diamond_files(f, 1)
1323
ft_data, delta_data = self.helpGetBytes(f,
1324
_mod_knit.FTAnnotatedToUnannotated(None),
1325
_mod_knit.DeltaAnnotatedToUnannotated(None))
1327
'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n'
1330
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1332
'version merged 3 ed8bce375198ea62444dc71952b22cfc2b09226d\n'
1333
'2,2,2\nright\nmerged\nend merged\n',
1334
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1336
def test_annotated_to_fulltext_no_eol(self):
1337
"""Test adapting annotated knits to full texts (for -> weaves)."""
1338
# we need a full text, and a delta
1340
get_diamond_files(f, 1, trailing_eol=False)
1341
# Reconstructing a full text requires a backing versioned file, and it
1342
# must have the base lines requested from it.
1343
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1344
ft_data, delta_data = self.helpGetBytes(f,
1345
_mod_knit.FTAnnotatedToFullText(None),
1346
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1347
self.assertEqual('origin', ft_data)
1348
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1349
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1350
True)], logged_vf.calls)
1352
def test_annotated_to_fulltext(self):
1353
"""Test adapting annotated knits to full texts (for -> weaves)."""
1354
# we need a full text, and a delta
1356
get_diamond_files(f, 1)
1357
# Reconstructing a full text requires a backing versioned file, and it
1358
# must have the base lines requested from it.
1359
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1360
ft_data, delta_data = self.helpGetBytes(f,
1361
_mod_knit.FTAnnotatedToFullText(None),
1362
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1363
self.assertEqual('origin\n', ft_data)
1364
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
1365
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1366
True)], logged_vf.calls)
1368
def test_unannotated_to_fulltext(self):
1369
"""Test adapting unannotated knits to full texts.
1371
This is used for -> weaves, and for -> annotated knits.
1373
# we need a full text, and a delta
1374
f = self.get_knit(annotated=False)
1375
get_diamond_files(f, 1)
1376
# Reconstructing a full text requires a backing versioned file, and it
1377
# must have the base lines requested from it.
1378
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1379
ft_data, delta_data = self.helpGetBytes(f,
1380
_mod_knit.FTPlainToFullText(None),
1381
_mod_knit.DeltaPlainToFullText(logged_vf))
1382
self.assertEqual('origin\n', ft_data)
1383
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
1384
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1385
True)], logged_vf.calls)
1387
def test_unannotated_to_fulltext_no_eol(self):
1388
"""Test adapting unannotated knits to full texts.
1390
This is used for -> weaves, and for -> annotated knits.
1392
# we need a full text, and a delta
1393
f = self.get_knit(annotated=False)
1394
get_diamond_files(f, 1, trailing_eol=False)
1395
# Reconstructing a full text requires a backing versioned file, and it
1396
# must have the base lines requested from it.
1397
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1398
ft_data, delta_data = self.helpGetBytes(f,
1399
_mod_knit.FTPlainToFullText(None),
1400
_mod_knit.DeltaPlainToFullText(logged_vf))
1401
self.assertEqual('origin', ft_data)
1402
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1403
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1404
True)], logged_vf.calls)
1407
class TestKeyMapper(TestCaseWithMemoryTransport):
1408
"""Tests for various key mapping logic."""
1410
def test_identity_mapper(self):
1411
mapper = versionedfile.ConstantMapper("inventory")
1412
self.assertEqual("inventory", mapper.map(('foo@ar',)))
1413
self.assertEqual("inventory", mapper.map(('quux',)))
1415
def test_prefix_mapper(self):
1417
mapper = versionedfile.PrefixMapper()
1418
self.assertEqual("file-id", mapper.map(("file-id", "revision-id")))
1419
self.assertEqual("new-id", mapper.map(("new-id", "revision-id")))
1420
self.assertEqual(('file-id',), mapper.unmap("file-id"))
1421
self.assertEqual(('new-id',), mapper.unmap("new-id"))
1423
def test_hash_prefix_mapper(self):
1424
#format6: hash + plain
1425
mapper = versionedfile.HashPrefixMapper()
1426
self.assertEqual("9b/file-id", mapper.map(("file-id", "revision-id")))
1427
self.assertEqual("45/new-id", mapper.map(("new-id", "revision-id")))
1428
self.assertEqual(('file-id',), mapper.unmap("9b/file-id"))
1429
self.assertEqual(('new-id',), mapper.unmap("45/new-id"))
1431
def test_hash_escaped_mapper(self):
1432
#knit1: hash + escaped
1433
mapper = versionedfile.HashEscapedPrefixMapper()
1434
self.assertEqual("88/%2520", mapper.map((" ", "revision-id")))
1435
self.assertEqual("ed/fil%2545-%2549d", mapper.map(("filE-Id",
1437
self.assertEqual("88/ne%2557-%2549d", mapper.map(("neW-Id",
1439
self.assertEqual(('filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1440
self.assertEqual(('neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1443
class TestVersionedFiles(TestCaseWithMemoryTransport):
1444
"""Tests for the multiple-file variant of VersionedFile."""
1446
def get_versionedfiles(self, relpath='files'):
1447
transport = self.get_transport(relpath)
1449
transport.mkdir('.')
1450
files = self.factory(transport)
1451
if self.cleanup is not None:
1452
self.addCleanup(lambda:self.cleanup(files))
1455
def test_annotate(self):
1456
files = self.get_versionedfiles()
1457
self.get_diamond_files(files)
1458
if self.key_length == 1:
1462
# introduced full text
1463
origins = files.annotate(prefix + ('origin',))
1465
(prefix + ('origin',), 'origin\n')],
1468
origins = files.annotate(prefix + ('base',))
1470
(prefix + ('base',), 'base\n')],
1473
origins = files.annotate(prefix + ('merged',))
1476
(prefix + ('base',), 'base\n'),
1477
(prefix + ('left',), 'left\n'),
1478
(prefix + ('right',), 'right\n'),
1479
(prefix + ('merged',), 'merged\n')
1483
# Without a graph everything is new.
1485
(prefix + ('merged',), 'base\n'),
1486
(prefix + ('merged',), 'left\n'),
1487
(prefix + ('merged',), 'right\n'),
1488
(prefix + ('merged',), 'merged\n')
1491
self.assertRaises(RevisionNotPresent,
1492
files.annotate, prefix + ('missing-key',))
1494
def test_construct(self):
1495
"""Each parameterised test can be constructed on a transport."""
1496
files = self.get_versionedfiles()
1498
def get_diamond_files(self, files, trailing_eol=True, left_only=False,
1500
return get_diamond_files(files, self.key_length,
1501
trailing_eol=trailing_eol, nograph=not self.graph,
1502
left_only=left_only, nokeys=nokeys)
1504
def test_add_lines_nostoresha(self):
1505
"""When nostore_sha is supplied using old content raises."""
1506
vf = self.get_versionedfiles()
1507
empty_text = ('a', [])
1508
sample_text_nl = ('b', ["foo\n", "bar\n"])
1509
sample_text_no_nl = ('c', ["foo\n", "bar"])
1511
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
1512
sha, _, _ = vf.add_lines(self.get_simple_key(version), [], lines)
1514
# we now have a copy of all the lines in the vf.
1515
for sha, (version, lines) in zip(
1516
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
1517
new_key = self.get_simple_key(version + "2")
1518
self.assertRaises(errors.ExistingContent,
1519
vf.add_lines, new_key, [], lines,
1521
# and no new version should have been added.
1522
record = vf.get_record_stream([new_key], 'unordered', True).next()
1523
self.assertEqual('absent', record.storage_kind)
1525
def test_add_lines_return(self):
1526
files = self.get_versionedfiles()
1527
# save code by using the stock data insertion helper.
1528
adds = self.get_diamond_files(files)
1530
# We can only validate the first 2 elements returned from add_lines.
1532
self.assertEqual(3, len(add))
1533
results.append(add[:2])
1534
if self.key_length == 1:
1536
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1537
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1538
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1539
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1540
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1542
elif self.key_length == 2:
1544
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1545
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1546
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1547
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1548
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1549
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1550
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1551
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1552
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1553
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1556
def test_add_lines_no_key_generates_chk_key(self):
1557
files = self.get_versionedfiles()
1558
# save code by using the stock data insertion helper.
1559
adds = self.get_diamond_files(files, nokeys=True)
1561
# We can only validate the first 2 elements returned from add_lines.
1563
self.assertEqual(3, len(add))
1564
results.append(add[:2])
1565
if self.key_length == 1:
1567
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1568
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1569
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1570
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1571
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1573
# Check the added items got CHK keys.
1574
self.assertEqual(set([
1575
('sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1576
('sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1577
('sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1578
('sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1579
('sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1582
elif self.key_length == 2:
1584
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1585
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1586
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1587
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1588
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1589
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1590
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1591
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1592
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1593
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1595
# Check the added items got CHK keys.
1596
self.assertEqual(set([
1597
('FileA', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1598
('FileA', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1599
('FileA', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1600
('FileA', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1601
('FileA', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1602
('FileB', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1603
('FileB', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1604
('FileB', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1605
('FileB', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1606
('FileB', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1610
def test_empty_lines(self):
1611
"""Empty files can be stored."""
1612
f = self.get_versionedfiles()
1613
key_a = self.get_simple_key('a')
1614
f.add_lines(key_a, [], [])
1615
self.assertEqual('',
1616
f.get_record_stream([key_a], 'unordered', True
1617
).next().get_bytes_as('fulltext'))
1618
key_b = self.get_simple_key('b')
1619
f.add_lines(key_b, self.get_parents([key_a]), [])
1620
self.assertEqual('',
1621
f.get_record_stream([key_b], 'unordered', True
1622
).next().get_bytes_as('fulltext'))
1624
def test_newline_only(self):
1625
f = self.get_versionedfiles()
1626
key_a = self.get_simple_key('a')
1627
f.add_lines(key_a, [], ['\n'])
1628
self.assertEqual('\n',
1629
f.get_record_stream([key_a], 'unordered', True
1630
).next().get_bytes_as('fulltext'))
1631
key_b = self.get_simple_key('b')
1632
f.add_lines(key_b, self.get_parents([key_a]), ['\n'])
1633
self.assertEqual('\n',
1634
f.get_record_stream([key_b], 'unordered', True
1635
).next().get_bytes_as('fulltext'))
1637
def test_get_record_stream_empty(self):
1638
"""An empty stream can be requested without error."""
1639
f = self.get_versionedfiles()
1640
entries = f.get_record_stream([], 'unordered', False)
1641
self.assertEqual([], list(entries))
1643
def assertValidStorageKind(self, storage_kind):
1644
"""Assert that storage_kind is a valid storage_kind."""
1645
self.assertSubset([storage_kind],
1646
['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
1647
'knit-ft', 'knit-delta', 'chunked', 'fulltext',
1648
'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz',
1650
'knit-delta-closure', 'knit-delta-closure-ref'])
1652
def capture_stream(self, f, entries, on_seen, parents):
1653
"""Capture a stream for testing."""
1654
for factory in entries:
1655
on_seen(factory.key)
1656
self.assertValidStorageKind(factory.storage_kind)
1657
if factory.sha1 is not None:
1658
self.assertEqual(f.get_sha1s([factory.key])[factory.key],
1660
self.assertEqual(parents[factory.key], factory.parents)
1661
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1664
def test_get_record_stream_interface(self):
1665
"""each item in a stream has to provide a regular interface."""
1666
files = self.get_versionedfiles()
1667
self.get_diamond_files(files)
1668
keys, _ = self.get_keys_and_sort_order()
1669
parent_map = files.get_parent_map(keys)
1670
entries = files.get_record_stream(keys, 'unordered', False)
1672
self.capture_stream(files, entries, seen.add, parent_map)
1673
self.assertEqual(set(keys), seen)
1675
def get_simple_key(self, suffix):
1676
"""Return a key for the object under test."""
1677
if self.key_length == 1:
1680
return ('FileA',) + (suffix,)
1682
def get_keys_and_sort_order(self):
1683
"""Get diamond test keys list, and their sort ordering."""
1684
if self.key_length == 1:
1685
keys = [('merged',), ('left',), ('right',), ('base',)]
1686
sort_order = {('merged',):2, ('left',):1, ('right',):1, ('base',):0}
1689
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1691
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1695
('FileA', 'merged'):2, ('FileA', 'left'):1, ('FileA', 'right'):1,
1696
('FileA', 'base'):0,
1697
('FileB', 'merged'):2, ('FileB', 'left'):1, ('FileB', 'right'):1,
1698
('FileB', 'base'):0,
1700
return keys, sort_order
1702
def get_keys_and_groupcompress_sort_order(self):
1703
"""Get diamond test keys list, and their groupcompress sort ordering."""
1704
if self.key_length == 1:
1705
keys = [('merged',), ('left',), ('right',), ('base',)]
1706
sort_order = {('merged',):0, ('left',):1, ('right',):1, ('base',):2}
1709
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1711
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1715
('FileA', 'merged'):0, ('FileA', 'left'):1, ('FileA', 'right'):1,
1716
('FileA', 'base'):2,
1717
('FileB', 'merged'):3, ('FileB', 'left'):4, ('FileB', 'right'):4,
1718
('FileB', 'base'):5,
1720
return keys, sort_order
1722
def test_get_record_stream_interface_ordered(self):
1723
"""each item in a stream has to provide a regular interface."""
1724
files = self.get_versionedfiles()
1725
self.get_diamond_files(files)
1726
keys, sort_order = self.get_keys_and_sort_order()
1727
parent_map = files.get_parent_map(keys)
1728
entries = files.get_record_stream(keys, 'topological', False)
1730
self.capture_stream(files, entries, seen.append, parent_map)
1731
self.assertStreamOrder(sort_order, seen, keys)
1733
def test_get_record_stream_interface_ordered_with_delta_closure(self):
1734
"""each item must be accessible as a fulltext."""
1735
files = self.get_versionedfiles()
1736
self.get_diamond_files(files)
1737
keys, sort_order = self.get_keys_and_sort_order()
1738
parent_map = files.get_parent_map(keys)
1739
entries = files.get_record_stream(keys, 'topological', True)
1741
for factory in entries:
1742
seen.append(factory.key)
1743
self.assertValidStorageKind(factory.storage_kind)
1744
self.assertSubset([factory.sha1],
1745
[None, files.get_sha1s([factory.key])[factory.key]])
1746
self.assertEqual(parent_map[factory.key], factory.parents)
1747
# self.assertEqual(files.get_text(factory.key),
1748
ft_bytes = factory.get_bytes_as('fulltext')
1749
self.assertIsInstance(ft_bytes, str)
1750
chunked_bytes = factory.get_bytes_as('chunked')
1751
self.assertEqualDiff(ft_bytes, ''.join(chunked_bytes))
1753
self.assertStreamOrder(sort_order, seen, keys)
1755
def test_get_record_stream_interface_groupcompress(self):
1756
"""each item in a stream has to provide a regular interface."""
1757
files = self.get_versionedfiles()
1758
self.get_diamond_files(files)
1759
keys, sort_order = self.get_keys_and_groupcompress_sort_order()
1760
parent_map = files.get_parent_map(keys)
1761
entries = files.get_record_stream(keys, 'groupcompress', False)
1763
self.capture_stream(files, entries, seen.append, parent_map)
1764
self.assertStreamOrder(sort_order, seen, keys)
1766
def assertStreamOrder(self, sort_order, seen, keys):
1767
self.assertEqual(len(set(seen)), len(keys))
1768
if self.key_length == 1:
1771
lows = {('FileA',):0, ('FileB',):0}
1773
self.assertEqual(set(keys), set(seen))
1776
sort_pos = sort_order[key]
1777
self.assertTrue(sort_pos >= lows[key[:-1]],
1778
"Out of order in sorted stream: %r, %r" % (key, seen))
1779
lows[key[:-1]] = sort_pos
1781
def test_get_record_stream_unknown_storage_kind_raises(self):
1782
"""Asking for a storage kind that the stream cannot supply raises."""
1783
files = self.get_versionedfiles()
1784
self.get_diamond_files(files)
1785
if self.key_length == 1:
1786
keys = [('merged',), ('left',), ('right',), ('base',)]
1789
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1791
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1794
parent_map = files.get_parent_map(keys)
1795
entries = files.get_record_stream(keys, 'unordered', False)
1796
# We track the contents because we should be able to try, fail a
1797
# particular kind and then ask for one that works and continue.
1799
for factory in entries:
1800
seen.add(factory.key)
1801
self.assertValidStorageKind(factory.storage_kind)
1802
if factory.sha1 is not None:
1803
self.assertEqual(files.get_sha1s([factory.key])[factory.key],
1805
self.assertEqual(parent_map[factory.key], factory.parents)
1806
# currently no stream emits mpdiff
1807
self.assertRaises(errors.UnavailableRepresentation,
1808
factory.get_bytes_as, 'mpdiff')
1809
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1811
self.assertEqual(set(keys), seen)
1813
def test_get_record_stream_missing_records_are_absent(self):
1814
files = self.get_versionedfiles()
1815
self.get_diamond_files(files)
1816
if self.key_length == 1:
1817
keys = [('merged',), ('left',), ('right',), ('absent',), ('base',)]
1820
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1821
('FileA', 'absent'), ('FileA', 'base'),
1822
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1823
('FileB', 'absent'), ('FileB', 'base'),
1824
('absent', 'absent'),
1826
parent_map = files.get_parent_map(keys)
1827
entries = files.get_record_stream(keys, 'unordered', False)
1828
self.assertAbsentRecord(files, keys, parent_map, entries)
1829
entries = files.get_record_stream(keys, 'topological', False)
1830
self.assertAbsentRecord(files, keys, parent_map, entries)
1832
def assertRecordHasContent(self, record, bytes):
1833
"""Assert that record has the bytes bytes."""
1834
self.assertEqual(bytes, record.get_bytes_as('fulltext'))
1835
self.assertEqual(bytes, ''.join(record.get_bytes_as('chunked')))
1837
def test_get_record_stream_native_formats_are_wire_ready_one_ft(self):
1838
files = self.get_versionedfiles()
1839
key = self.get_simple_key('foo')
1840
files.add_lines(key, (), ['my text\n', 'content'])
1841
stream = files.get_record_stream([key], 'unordered', False)
1842
record = stream.next()
1843
if record.storage_kind in ('chunked', 'fulltext'):
1844
# chunked and fulltext representations are for direct use not wire
1845
# serialisation: check they are able to be used directly. To send
1846
# such records over the wire translation will be needed.
1847
self.assertRecordHasContent(record, "my text\ncontent")
1849
bytes = [record.get_bytes_as(record.storage_kind)]
1850
network_stream = versionedfile.NetworkRecordStream(bytes).read()
1851
source_record = record
1853
for record in network_stream:
1854
records.append(record)
1855
self.assertEqual(source_record.storage_kind,
1856
record.storage_kind)
1857
self.assertEqual(source_record.parents, record.parents)
1859
source_record.get_bytes_as(source_record.storage_kind),
1860
record.get_bytes_as(record.storage_kind))
1861
self.assertEqual(1, len(records))
1863
def assertStreamMetaEqual(self, records, expected, stream):
1864
"""Assert that streams expected and stream have the same records.
1866
:param records: A list to collect the seen records.
1867
:return: A generator of the records in stream.
1869
# We make assertions during copying to catch things early for
1871
for record, ref_record in izip(stream, expected):
1872
records.append(record)
1873
self.assertEqual(ref_record.key, record.key)
1874
self.assertEqual(ref_record.storage_kind, record.storage_kind)
1875
self.assertEqual(ref_record.parents, record.parents)
1878
def stream_to_bytes_or_skip_counter(self, skipped_records, full_texts,
1880
"""Convert a stream to a bytes iterator.
1882
:param skipped_records: A list with one element to increment when a
1884
:param full_texts: A dict from key->fulltext representation, for
1885
checking chunked or fulltext stored records.
1886
:param stream: A record_stream.
1887
:return: An iterator over the bytes of each record.
1889
for record in stream:
1890
if record.storage_kind in ('chunked', 'fulltext'):
1891
skipped_records[0] += 1
1892
# check the content is correct for direct use.
1893
self.assertRecordHasContent(record, full_texts[record.key])
1895
yield record.get_bytes_as(record.storage_kind)
1897
def test_get_record_stream_native_formats_are_wire_ready_ft_delta(self):
1898
files = self.get_versionedfiles()
1899
target_files = self.get_versionedfiles('target')
1900
key = self.get_simple_key('ft')
1901
key_delta = self.get_simple_key('delta')
1902
files.add_lines(key, (), ['my text\n', 'content'])
1904
delta_parents = (key,)
1907
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
1908
local = files.get_record_stream([key, key_delta], 'unordered', False)
1909
ref = files.get_record_stream([key, key_delta], 'unordered', False)
1910
skipped_records = [0]
1912
key: "my text\ncontent",
1913
key_delta: "different\ncontent\n",
1915
byte_stream = self.stream_to_bytes_or_skip_counter(
1916
skipped_records, full_texts, local)
1917
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
1919
# insert the stream from the network into a versioned files object so we can
1920
# check the content was carried across correctly without doing delta
1922
target_files.insert_record_stream(
1923
self.assertStreamMetaEqual(records, ref, network_stream))
1924
# No duplicates on the wire thank you!
1925
self.assertEqual(2, len(records) + skipped_records[0])
1927
# if any content was copied it all must have all been.
1928
self.assertIdenticalVersionedFile(files, target_files)
1930
def test_get_record_stream_native_formats_are_wire_ready_delta(self):
1931
# copy a delta over the wire
1932
files = self.get_versionedfiles()
1933
target_files = self.get_versionedfiles('target')
1934
key = self.get_simple_key('ft')
1935
key_delta = self.get_simple_key('delta')
1936
files.add_lines(key, (), ['my text\n', 'content'])
1938
delta_parents = (key,)
1941
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
1942
# Copy the basis text across so we can reconstruct the delta during
1943
# insertion into target.
1944
target_files.insert_record_stream(files.get_record_stream([key],
1945
'unordered', False))
1946
local = files.get_record_stream([key_delta], 'unordered', False)
1947
ref = files.get_record_stream([key_delta], 'unordered', False)
1948
skipped_records = [0]
1950
key_delta: "different\ncontent\n",
1952
byte_stream = self.stream_to_bytes_or_skip_counter(
1953
skipped_records, full_texts, local)
1954
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
1956
# insert the stream from the network into a versioned files object so we can
1957
# check the content was carried across correctly without doing delta
1958
# inspection during check_stream.
1959
target_files.insert_record_stream(
1960
self.assertStreamMetaEqual(records, ref, network_stream))
1961
# No duplicates on the wire thank you!
1962
self.assertEqual(1, len(records) + skipped_records[0])
1964
# if any content was copied it all must have all been
1965
self.assertIdenticalVersionedFile(files, target_files)
1967
def test_get_record_stream_wire_ready_delta_closure_included(self):
1968
# copy a delta over the wire with the ability to get its full text.
1969
files = self.get_versionedfiles()
1970
key = self.get_simple_key('ft')
1971
key_delta = self.get_simple_key('delta')
1972
files.add_lines(key, (), ['my text\n', 'content'])
1974
delta_parents = (key,)
1977
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
1978
local = files.get_record_stream([key_delta], 'unordered', True)
1979
ref = files.get_record_stream([key_delta], 'unordered', True)
1980
skipped_records = [0]
1982
key_delta: "different\ncontent\n",
1984
byte_stream = self.stream_to_bytes_or_skip_counter(
1985
skipped_records, full_texts, local)
1986
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
1988
# insert the stream from the network into a versioned files object so we can
1989
# check the content was carried across correctly without doing delta
1990
# inspection during check_stream.
1991
for record in self.assertStreamMetaEqual(records, ref, network_stream):
1992
# we have to be able to get the full text out:
1993
self.assertRecordHasContent(record, full_texts[record.key])
1994
# No duplicates on the wire thank you!
1995
self.assertEqual(1, len(records) + skipped_records[0])
1997
def assertAbsentRecord(self, files, keys, parents, entries):
1998
"""Helper for test_get_record_stream_missing_records_are_absent."""
2000
for factory in entries:
2001
seen.add(factory.key)
2002
if factory.key[-1] == 'absent':
2003
self.assertEqual('absent', factory.storage_kind)
2004
self.assertEqual(None, factory.sha1)
2005
self.assertEqual(None, factory.parents)
2007
self.assertValidStorageKind(factory.storage_kind)
2008
if factory.sha1 is not None:
2009
sha1 = files.get_sha1s([factory.key])[factory.key]
2010
self.assertEqual(sha1, factory.sha1)
2011
self.assertEqual(parents[factory.key], factory.parents)
2012
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
2014
self.assertEqual(set(keys), seen)
2016
def test_filter_absent_records(self):
2017
"""Requested missing records can be filter trivially."""
2018
files = self.get_versionedfiles()
2019
self.get_diamond_files(files)
2020
keys, _ = self.get_keys_and_sort_order()
2021
parent_map = files.get_parent_map(keys)
2022
# Add an absent record in the middle of the present keys. (We don't ask
2023
# for just absent keys to ensure that content before and after the
2024
# absent keys is still delivered).
2025
present_keys = list(keys)
2026
if self.key_length == 1:
2027
keys.insert(2, ('extra',))
2029
keys.insert(2, ('extra', 'extra'))
2030
entries = files.get_record_stream(keys, 'unordered', False)
2032
self.capture_stream(files, versionedfile.filter_absent(entries), seen.add,
2034
self.assertEqual(set(present_keys), seen)
2036
def get_mapper(self):
2037
"""Get a mapper suitable for the key length of the test interface."""
2038
if self.key_length == 1:
2039
return ConstantMapper('source')
2041
return HashEscapedPrefixMapper()
2043
def get_parents(self, parents):
2044
"""Get parents, taking self.graph into consideration."""
2050
def test_get_parent_map(self):
2051
files = self.get_versionedfiles()
2052
if self.key_length == 1:
2054
(('r0',), self.get_parents(())),
2055
(('r1',), self.get_parents((('r0',),))),
2056
(('r2',), self.get_parents(())),
2057
(('r3',), self.get_parents(())),
2058
(('m',), self.get_parents((('r0',),('r1',),('r2',),('r3',)))),
2062
(('FileA', 'r0'), self.get_parents(())),
2063
(('FileA', 'r1'), self.get_parents((('FileA', 'r0'),))),
2064
(('FileA', 'r2'), self.get_parents(())),
2065
(('FileA', 'r3'), self.get_parents(())),
2066
(('FileA', 'm'), self.get_parents((('FileA', 'r0'),
2067
('FileA', 'r1'), ('FileA', 'r2'), ('FileA', 'r3')))),
2069
for key, parents in parent_details:
2070
files.add_lines(key, parents, [])
2071
# immediately after adding it should be queryable.
2072
self.assertEqual({key:parents}, files.get_parent_map([key]))
2073
# We can ask for an empty set
2074
self.assertEqual({}, files.get_parent_map([]))
2075
# We can ask for many keys
2076
all_parents = dict(parent_details)
2077
self.assertEqual(all_parents, files.get_parent_map(all_parents.keys()))
2078
# Absent keys are just not included in the result.
2079
keys = all_parents.keys()
2080
if self.key_length == 1:
2081
keys.insert(1, ('missing',))
2083
keys.insert(1, ('missing', 'missing'))
2084
# Absent keys are just ignored
2085
self.assertEqual(all_parents, files.get_parent_map(keys))
2087
def test_get_sha1s(self):
2088
files = self.get_versionedfiles()
2089
self.get_diamond_files(files)
2090
if self.key_length == 1:
2091
keys = [('base',), ('origin',), ('left',), ('merged',), ('right',)]
2093
# ask for shas from different prefixes.
2095
('FileA', 'base'), ('FileB', 'origin'), ('FileA', 'left'),
2096
('FileA', 'merged'), ('FileB', 'right'),
2099
keys[0]: '51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
2100
keys[1]: '00e364d235126be43292ab09cb4686cf703ddc17',
2101
keys[2]: 'a8478686da38e370e32e42e8a0c220e33ee9132f',
2102
keys[3]: 'ed8bce375198ea62444dc71952b22cfc2b09226d',
2103
keys[4]: '9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
2105
files.get_sha1s(keys))
2107
def test_insert_record_stream_empty(self):
2108
"""Inserting an empty record stream should work."""
2109
files = self.get_versionedfiles()
2110
files.insert_record_stream([])
2112
def assertIdenticalVersionedFile(self, expected, actual):
2113
"""Assert that left and right have the same contents."""
2114
self.assertEqual(set(actual.keys()), set(expected.keys()))
2115
actual_parents = actual.get_parent_map(actual.keys())
2117
self.assertEqual(actual_parents, expected.get_parent_map(expected.keys()))
2119
for key, parents in actual_parents.items():
2120
self.assertEqual(None, parents)
2121
for key in actual.keys():
2122
actual_text = actual.get_record_stream(
2123
[key], 'unordered', True).next().get_bytes_as('fulltext')
2124
expected_text = expected.get_record_stream(
2125
[key], 'unordered', True).next().get_bytes_as('fulltext')
2126
self.assertEqual(actual_text, expected_text)
2128
def test_insert_record_stream_fulltexts(self):
2129
"""Any file should accept a stream of fulltexts."""
2130
files = self.get_versionedfiles()
2131
mapper = self.get_mapper()
2132
source_transport = self.get_transport('source')
2133
source_transport.mkdir('.')
2134
# weaves always output fulltexts.
2135
source = make_versioned_files_factory(WeaveFile, mapper)(
2137
self.get_diamond_files(source, trailing_eol=False)
2138
stream = source.get_record_stream(source.keys(), 'topological',
2140
files.insert_record_stream(stream)
2141
self.assertIdenticalVersionedFile(source, files)
2143
def test_insert_record_stream_fulltexts_noeol(self):
2144
"""Any file should accept a stream of fulltexts."""
2145
files = self.get_versionedfiles()
2146
mapper = self.get_mapper()
2147
source_transport = self.get_transport('source')
2148
source_transport.mkdir('.')
2149
# weaves always output fulltexts.
2150
source = make_versioned_files_factory(WeaveFile, mapper)(
2152
self.get_diamond_files(source, trailing_eol=False)
2153
stream = source.get_record_stream(source.keys(), 'topological',
2155
files.insert_record_stream(stream)
2156
self.assertIdenticalVersionedFile(source, files)
2158
def test_insert_record_stream_annotated_knits(self):
2159
"""Any file should accept a stream from plain knits."""
2160
files = self.get_versionedfiles()
2161
mapper = self.get_mapper()
2162
source_transport = self.get_transport('source')
2163
source_transport.mkdir('.')
2164
source = make_file_factory(True, mapper)(source_transport)
2165
self.get_diamond_files(source)
2166
stream = source.get_record_stream(source.keys(), 'topological',
2168
files.insert_record_stream(stream)
2169
self.assertIdenticalVersionedFile(source, files)
2171
def test_insert_record_stream_annotated_knits_noeol(self):
2172
"""Any file should accept a stream from plain knits."""
2173
files = self.get_versionedfiles()
2174
mapper = self.get_mapper()
2175
source_transport = self.get_transport('source')
2176
source_transport.mkdir('.')
2177
source = make_file_factory(True, mapper)(source_transport)
2178
self.get_diamond_files(source, trailing_eol=False)
2179
stream = source.get_record_stream(source.keys(), 'topological',
2181
files.insert_record_stream(stream)
2182
self.assertIdenticalVersionedFile(source, files)
2184
def test_insert_record_stream_plain_knits(self):
2185
"""Any file should accept a stream from plain knits."""
2186
files = self.get_versionedfiles()
2187
mapper = self.get_mapper()
2188
source_transport = self.get_transport('source')
2189
source_transport.mkdir('.')
2190
source = make_file_factory(False, mapper)(source_transport)
2191
self.get_diamond_files(source)
2192
stream = source.get_record_stream(source.keys(), 'topological',
2194
files.insert_record_stream(stream)
2195
self.assertIdenticalVersionedFile(source, files)
2197
def test_insert_record_stream_plain_knits_noeol(self):
2198
"""Any file should accept a stream from plain knits."""
2199
files = self.get_versionedfiles()
2200
mapper = self.get_mapper()
2201
source_transport = self.get_transport('source')
2202
source_transport.mkdir('.')
2203
source = make_file_factory(False, mapper)(source_transport)
2204
self.get_diamond_files(source, trailing_eol=False)
2205
stream = source.get_record_stream(source.keys(), 'topological',
2207
files.insert_record_stream(stream)
2208
self.assertIdenticalVersionedFile(source, files)
2210
def test_insert_record_stream_existing_keys(self):
2211
"""Inserting keys already in a file should not error."""
2212
files = self.get_versionedfiles()
2213
source = self.get_versionedfiles('source')
2214
self.get_diamond_files(source)
2215
# insert some keys into f.
2216
self.get_diamond_files(files, left_only=True)
2217
stream = source.get_record_stream(source.keys(), 'topological',
2219
files.insert_record_stream(stream)
2220
self.assertIdenticalVersionedFile(source, files)
2222
def test_insert_record_stream_missing_keys(self):
2223
"""Inserting a stream with absent keys should raise an error."""
2224
files = self.get_versionedfiles()
2225
source = self.get_versionedfiles('source')
2226
stream = source.get_record_stream([('missing',) * self.key_length],
2227
'topological', False)
2228
self.assertRaises(errors.RevisionNotPresent, files.insert_record_stream,
2231
def test_insert_record_stream_out_of_order(self):
2232
"""An out of order stream can either error or work."""
2233
files = self.get_versionedfiles()
2234
source = self.get_versionedfiles('source')
2235
self.get_diamond_files(source)
2236
if self.key_length == 1:
2237
origin_keys = [('origin',)]
2238
end_keys = [('merged',), ('left',)]
2239
start_keys = [('right',), ('base',)]
2241
origin_keys = [('FileA', 'origin'), ('FileB', 'origin')]
2242
end_keys = [('FileA', 'merged',), ('FileA', 'left',),
2243
('FileB', 'merged',), ('FileB', 'left',)]
2244
start_keys = [('FileA', 'right',), ('FileA', 'base',),
2245
('FileB', 'right',), ('FileB', 'base',)]
2246
origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
2247
end_entries = source.get_record_stream(end_keys, 'topological', False)
2248
start_entries = source.get_record_stream(start_keys, 'topological', False)
2249
entries = chain(origin_entries, end_entries, start_entries)
2251
files.insert_record_stream(entries)
2252
except RevisionNotPresent:
2253
# Must not have corrupted the file.
2256
self.assertIdenticalVersionedFile(source, files)
2258
def get_knit_delta_source(self):
2259
"""Get a source that can produce a stream with knit delta records,
2260
regardless of this test's scenario.
2262
mapper = self.get_mapper()
2263
source_transport = self.get_transport('source')
2264
source_transport.mkdir('.')
2265
source = make_file_factory(False, mapper)(source_transport)
2266
get_diamond_files(source, self.key_length, trailing_eol=True,
2267
nograph=False, left_only=False)
2270
def test_insert_record_stream_delta_missing_basis_no_corruption(self):
2271
"""Insertion where a needed basis is not included notifies the caller
2272
of the missing basis. In the meantime a record missing its basis is
2275
source = self.get_knit_delta_source()
2276
keys = [self.get_simple_key('origin'), self.get_simple_key('merged')]
2277
entries = source.get_record_stream(keys, 'unordered', False)
2278
files = self.get_versionedfiles()
2279
if self.support_partial_insertion:
2280
self.assertEqual([],
2281
list(files.get_missing_compression_parent_keys()))
2282
files.insert_record_stream(entries)
2283
missing_bases = files.get_missing_compression_parent_keys()
2284
self.assertEqual(set([self.get_simple_key('left')]),
2286
self.assertEqual(set(keys), set(files.get_parent_map(keys)))
2289
errors.RevisionNotPresent, files.insert_record_stream, entries)
2292
def test_insert_record_stream_delta_missing_basis_can_be_added_later(self):
2293
"""Insertion where a needed basis is not included notifies the caller
2294
of the missing basis. That basis can be added in a second
2295
insert_record_stream call that does not need to repeat records present
2296
in the previous stream. The record(s) that required that basis are
2297
fully inserted once their basis is no longer missing.
2299
if not self.support_partial_insertion:
2300
raise TestNotApplicable(
2301
'versioned file scenario does not support partial insertion')
2302
source = self.get_knit_delta_source()
2303
entries = source.get_record_stream([self.get_simple_key('origin'),
2304
self.get_simple_key('merged')], 'unordered', False)
2305
files = self.get_versionedfiles()
2306
files.insert_record_stream(entries)
2307
missing_bases = files.get_missing_compression_parent_keys()
2308
self.assertEqual(set([self.get_simple_key('left')]),
2310
# 'merged' is inserted (although a commit of a write group involving
2311
# this versionedfiles would fail).
2312
merged_key = self.get_simple_key('merged')
2314
[merged_key], files.get_parent_map([merged_key]).keys())
2315
# Add the full delta closure of the missing records
2316
missing_entries = source.get_record_stream(
2317
missing_bases, 'unordered', True)
2318
files.insert_record_stream(missing_entries)
2319
# Now 'merged' is fully inserted (and a commit would succeed).
2320
self.assertEqual([], list(files.get_missing_compression_parent_keys()))
2322
[merged_key], files.get_parent_map([merged_key]).keys())
2325
def test_iter_lines_added_or_present_in_keys(self):
2326
# test that we get at least an equalset of the lines added by
2327
# versions in the store.
2328
# the ordering here is to make a tree so that dumb searches have
2329
# more changes to muck up.
2331
class InstrumentedProgress(progress.DummyProgress):
2335
progress.DummyProgress.__init__(self)
2338
def update(self, msg=None, current=None, total=None):
2339
self.updates.append((msg, current, total))
2341
files = self.get_versionedfiles()
2342
# add a base to get included
2343
files.add_lines(self.get_simple_key('base'), (), ['base\n'])
2344
# add a ancestor to be included on one side
2345
files.add_lines(self.get_simple_key('lancestor'), (), ['lancestor\n'])
2346
# add a ancestor to be included on the other side
2347
files.add_lines(self.get_simple_key('rancestor'),
2348
self.get_parents([self.get_simple_key('base')]), ['rancestor\n'])
2349
# add a child of rancestor with no eofile-nl
2350
files.add_lines(self.get_simple_key('child'),
2351
self.get_parents([self.get_simple_key('rancestor')]),
2352
['base\n', 'child\n'])
2353
# add a child of lancestor and base to join the two roots
2354
files.add_lines(self.get_simple_key('otherchild'),
2355
self.get_parents([self.get_simple_key('lancestor'),
2356
self.get_simple_key('base')]),
2357
['base\n', 'lancestor\n', 'otherchild\n'])
2358
def iter_with_keys(keys, expected):
2359
# now we need to see what lines are returned, and how often.
2361
progress = InstrumentedProgress()
2362
# iterate over the lines
2363
for line in files.iter_lines_added_or_present_in_keys(keys,
2365
lines.setdefault(line, 0)
2367
if []!= progress.updates:
2368
self.assertEqual(expected, progress.updates)
2370
lines = iter_with_keys(
2371
[self.get_simple_key('child'), self.get_simple_key('otherchild')],
2372
[('Walking content', 0, 2),
2373
('Walking content', 1, 2),
2374
('Walking content', 2, 2)])
2375
# we must see child and otherchild
2376
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2378
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2379
# we dont care if we got more than that.
2382
lines = iter_with_keys(files.keys(),
2383
[('Walking content', 0, 5),
2384
('Walking content', 1, 5),
2385
('Walking content', 2, 5),
2386
('Walking content', 3, 5),
2387
('Walking content', 4, 5),
2388
('Walking content', 5, 5)])
2389
# all lines must be seen at least once
2390
self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)
2392
lines[('lancestor\n', self.get_simple_key('lancestor'))] > 0)
2394
lines[('rancestor\n', self.get_simple_key('rancestor'))] > 0)
2395
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2397
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2399
def test_make_mpdiffs(self):
2400
from bzrlib import multiparent
2401
files = self.get_versionedfiles('source')
2402
# add texts that should trip the knit maximum delta chain threshold
2403
# as well as doing parallel chains of data in knits.
2404
# this is done by two chains of 25 insertions
2405
files.add_lines(self.get_simple_key('base'), [], ['line\n'])
2406
files.add_lines(self.get_simple_key('noeol'),
2407
self.get_parents([self.get_simple_key('base')]), ['line'])
2408
# detailed eol tests:
2409
# shared last line with parent no-eol
2410
files.add_lines(self.get_simple_key('noeolsecond'),
2411
self.get_parents([self.get_simple_key('noeol')]),
2413
# differing last line with parent, both no-eol
2414
files.add_lines(self.get_simple_key('noeolnotshared'),
2415
self.get_parents([self.get_simple_key('noeolsecond')]),
2416
['line\n', 'phone'])
2417
# add eol following a noneol parent, change content
2418
files.add_lines(self.get_simple_key('eol'),
2419
self.get_parents([self.get_simple_key('noeol')]), ['phone\n'])
2420
# add eol following a noneol parent, no change content
2421
files.add_lines(self.get_simple_key('eolline'),
2422
self.get_parents([self.get_simple_key('noeol')]), ['line\n'])
2423
# noeol with no parents:
2424
files.add_lines(self.get_simple_key('noeolbase'), [], ['line'])
2425
# noeol preceeding its leftmost parent in the output:
2426
# this is done by making it a merge of two parents with no common
2427
# anestry: noeolbase and noeol with the
2428
# later-inserted parent the leftmost.
2429
files.add_lines(self.get_simple_key('eolbeforefirstparent'),
2430
self.get_parents([self.get_simple_key('noeolbase'),
2431
self.get_simple_key('noeol')]),
2433
# two identical eol texts
2434
files.add_lines(self.get_simple_key('noeoldup'),
2435
self.get_parents([self.get_simple_key('noeol')]), ['line'])
2436
next_parent = self.get_simple_key('base')
2437
text_name = 'chain1-'
2439
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2440
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2441
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2442
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2443
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
2444
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
2445
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2446
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2447
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
2448
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2449
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2450
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2451
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
2452
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2453
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
2454
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2455
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
2456
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2457
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
2458
19:'1ebed371807ba5935958ad0884595126e8c4e823',
2459
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2460
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2461
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2462
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2463
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2464
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2466
for depth in range(26):
2467
new_version = self.get_simple_key(text_name + '%s' % depth)
2468
text = text + ['line\n']
2469
files.add_lines(new_version, self.get_parents([next_parent]), text)
2470
next_parent = new_version
2471
next_parent = self.get_simple_key('base')
2472
text_name = 'chain2-'
2474
for depth in range(26):
2475
new_version = self.get_simple_key(text_name + '%s' % depth)
2476
text = text + ['line\n']
2477
files.add_lines(new_version, self.get_parents([next_parent]), text)
2478
next_parent = new_version
2479
target = self.get_versionedfiles('target')
2480
for key in multiparent.topo_iter_keys(files, files.keys()):
2481
mpdiff = files.make_mpdiffs([key])[0]
2482
parents = files.get_parent_map([key])[key] or []
2484
[(key, parents, files.get_sha1s([key])[key], mpdiff)])
2485
self.assertEqualDiff(
2486
files.get_record_stream([key], 'unordered',
2487
True).next().get_bytes_as('fulltext'),
2488
target.get_record_stream([key], 'unordered',
2489
True).next().get_bytes_as('fulltext')
2492
def test_keys(self):
2493
# While use is discouraged, versions() is still needed by aspects of
2495
files = self.get_versionedfiles()
2496
self.assertEqual(set(), set(files.keys()))
2497
if self.key_length == 1:
2500
key = ('foo', 'bar',)
2501
files.add_lines(key, (), [])
2502
self.assertEqual(set([key]), set(files.keys()))
2505
class VirtualVersionedFilesTests(TestCase):
2506
"""Basic tests for the VirtualVersionedFiles implementations."""
2508
def _get_parent_map(self, keys):
2511
if k in self._parent_map:
2512
ret[k] = self._parent_map[k]
2516
TestCase.setUp(self)
2518
self._parent_map = {}
2519
self.texts = VirtualVersionedFiles(self._get_parent_map,
2522
def test_add_lines(self):
2523
self.assertRaises(NotImplementedError,
2524
self.texts.add_lines, "foo", [], [])
2526
def test_add_mpdiffs(self):
2527
self.assertRaises(NotImplementedError,
2528
self.texts.add_mpdiffs, [])
2530
def test_check(self):
2531
self.assertTrue(self.texts.check())
2533
def test_insert_record_stream(self):
2534
self.assertRaises(NotImplementedError, self.texts.insert_record_stream,
2537
def test_get_sha1s_nonexistent(self):
2538
self.assertEquals({}, self.texts.get_sha1s([("NONEXISTENT",)]))
2540
def test_get_sha1s(self):
2541
self._lines["key"] = ["dataline1", "dataline2"]
2542
self.assertEquals({("key",): osutils.sha_strings(self._lines["key"])},
2543
self.texts.get_sha1s([("key",)]))
2545
def test_get_parent_map(self):
2546
self._parent_map = {"G": ("A", "B")}
2547
self.assertEquals({("G",): (("A",),("B",))},
2548
self.texts.get_parent_map([("G",), ("L",)]))
2550
def test_get_record_stream(self):
2551
self._lines["A"] = ["FOO", "BAR"]
2552
it = self.texts.get_record_stream([("A",)], "unordered", True)
2554
self.assertEquals("chunked", record.storage_kind)
2555
self.assertEquals("FOOBAR", record.get_bytes_as("fulltext"))
2556
self.assertEquals(["FOO", "BAR"], record.get_bytes_as("chunked"))
2558
def test_get_record_stream_absent(self):
2559
it = self.texts.get_record_stream([("A",)], "unordered", True)
2561
self.assertEquals("absent", record.storage_kind)
2563
def test_iter_lines_added_or_present_in_keys(self):
2564
self._lines["A"] = ["FOO", "BAR"]
2565
self._lines["B"] = ["HEY"]
2566
self._lines["C"] = ["Alberta"]
2567
it = self.texts.iter_lines_added_or_present_in_keys([("A",), ("B",)])
2568
self.assertEquals(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
2572
class TestOrderingVersionedFilesDecorator(TestCaseWithMemoryTransport):
2574
def get_ordering_vf(self, key_priority):
2575
builder = self.make_branch_builder('test')
2576
builder.start_series()
2577
builder.build_snapshot('A', None, [
2578
('add', ('', 'TREE_ROOT', 'directory', None))])
2579
builder.build_snapshot('B', ['A'], [])
2580
builder.build_snapshot('C', ['B'], [])
2581
builder.build_snapshot('D', ['C'], [])
2582
builder.finish_series()
2583
b = builder.get_branch()
2585
self.addCleanup(b.unlock)
2586
vf = b.repository.inventories
2587
return versionedfile.OrderingVersionedFilesDecorator(vf, key_priority)
2589
def test_get_empty(self):
2590
vf = self.get_ordering_vf({})
2591
self.assertEqual([], vf.calls)
2593
def test_get_record_stream_topological(self):
2594
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2595
request_keys = [('B',), ('C',), ('D',), ('A',)]
2596
keys = [r.key for r in vf.get_record_stream(request_keys,
2597
'topological', False)]
2598
# We should have gotten the keys in topological order
2599
self.assertEqual([('A',), ('B',), ('C',), ('D',)], keys)
2600
# And recorded that the request was made
2601
self.assertEqual([('get_record_stream', request_keys, 'topological',
2604
def test_get_record_stream_ordered(self):
2605
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2606
request_keys = [('B',), ('C',), ('D',), ('A',)]
2607
keys = [r.key for r in vf.get_record_stream(request_keys,
2608
'unordered', False)]
2609
# They should be returned based on their priority
2610
self.assertEqual([('D',), ('B',), ('A',), ('C',)], keys)
2611
# And the request recorded
2612
self.assertEqual([('get_record_stream', request_keys, 'unordered',
2615
def test_get_record_stream_implicit_order(self):
2616
vf = self.get_ordering_vf({('B',): 2, ('D',): 1})
2617
request_keys = [('B',), ('C',), ('D',), ('A',)]
2618
keys = [r.key for r in vf.get_record_stream(request_keys,
2619
'unordered', False)]
2620
# A and C are not in the map, so they get sorted to the front. A comes
2621
# before C alphabetically, so it comes back first
2622
self.assertEqual([('A',), ('C',), ('D',), ('B',)], keys)
2623
# And the request recorded
2624
self.assertEqual([('get_record_stream', request_keys, 'unordered',