1
# Copyright (C) 2005, 2009 Canonical Ltd
4
# Johan Rydberg <jrydberg@gnu.org>
6
# This program is free software; you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation; either version 2 of the License, or
9
# (at your option) any later version.
11
# This program is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14
# GNU General Public License for more details.
16
# You should have received a copy of the GNU General Public License
17
# along with this program; if not, write to the Free Software
18
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
# considered typical and check that it can be detected/corrected.
24
from itertools import chain, izip
25
from StringIO import StringIO
34
from bzrlib.errors import (
36
RevisionAlreadyPresent,
39
from bzrlib.knit import (
46
from bzrlib.tests import (
48
TestCaseWithMemoryTransport,
52
split_suite_by_condition,
55
from bzrlib.tests.http_utils import TestCaseWithWebserver
56
from bzrlib.trace import mutter
57
from bzrlib.transport import get_transport
58
from bzrlib.transport.memory import MemoryTransport
59
from bzrlib.tsort import topo_sort
60
from bzrlib.tuned_gzip import GzipFile
61
import bzrlib.versionedfile as versionedfile
62
from bzrlib.versionedfile import (
64
HashEscapedPrefixMapper,
66
VirtualVersionedFiles,
67
make_versioned_files_factory,
69
from bzrlib.weave import WeaveFile
70
from bzrlib.weavefile import read_weave, write_weave
73
def load_tests(standard_tests, module, loader):
74
"""Parameterize VersionedFiles tests for different implementations."""
75
to_adapt, result = split_suite_by_condition(
76
standard_tests, condition_isinstance(TestVersionedFiles))
77
# We want to be sure of behaviour for:
78
# weaves prefix layout (weave texts)
79
# individually named weaves (weave inventories)
80
# annotated knits - prefix|hash|hash-escape layout, we test the third only
81
# as it is the most complex mapper.
82
# individually named knits
83
# individual no-graph knits in packs (signatures)
84
# individual graph knits in packs (inventories)
85
# individual graph nocompression knits in packs (revisions)
86
# plain text knits in packs (texts)
90
'factory':make_versioned_files_factory(WeaveFile,
91
ConstantMapper('inventory')),
94
'support_partial_insertion': False,
98
'factory':make_file_factory(False, ConstantMapper('revisions')),
101
'support_partial_insertion': False,
103
('named-nograph-nodelta-knit-pack', {
104
'cleanup':cleanup_pack_knit,
105
'factory':make_pack_factory(False, False, 1),
108
'support_partial_insertion': False,
110
('named-graph-knit-pack', {
111
'cleanup':cleanup_pack_knit,
112
'factory':make_pack_factory(True, True, 1),
115
'support_partial_insertion': True,
117
('named-graph-nodelta-knit-pack', {
118
'cleanup':cleanup_pack_knit,
119
'factory':make_pack_factory(True, False, 1),
122
'support_partial_insertion': False,
124
('groupcompress-nograph', {
125
'cleanup':groupcompress.cleanup_pack_group,
126
'factory':groupcompress.make_pack_factory(False, False, 1),
129
'support_partial_insertion':False,
132
len_two_scenarios = [
135
'factory':make_versioned_files_factory(WeaveFile,
139
'support_partial_insertion': False,
141
('annotated-knit-escape', {
143
'factory':make_file_factory(True, HashEscapedPrefixMapper()),
146
'support_partial_insertion': False,
148
('plain-knit-pack', {
149
'cleanup':cleanup_pack_knit,
150
'factory':make_pack_factory(True, True, 2),
153
'support_partial_insertion': True,
156
'cleanup':groupcompress.cleanup_pack_group,
157
'factory':groupcompress.make_pack_factory(True, False, 1),
160
'support_partial_insertion':False,
163
scenarios = len_one_scenarios + len_two_scenarios
164
return multiply_tests(to_adapt, scenarios, result)
167
def get_diamond_vf(f, trailing_eol=True, left_only=False):
168
"""Get a diamond graph to exercise deltas and merges.
170
:param trailing_eol: If True end the last line with \n.
174
'base': (('origin',),),
175
'left': (('base',),),
176
'right': (('base',),),
177
'merged': (('left',), ('right',)),
179
# insert a diamond graph to exercise deltas and merges.
184
f.add_lines('origin', [], ['origin' + last_char])
185
f.add_lines('base', ['origin'], ['base' + last_char])
186
f.add_lines('left', ['base'], ['base\n', 'left' + last_char])
188
f.add_lines('right', ['base'],
189
['base\n', 'right' + last_char])
190
f.add_lines('merged', ['left', 'right'],
191
['base\n', 'left\n', 'right\n', 'merged' + last_char])
195
def get_diamond_files(files, key_length, trailing_eol=True, left_only=False,
196
nograph=False, nokeys=False):
197
"""Get a diamond graph to exercise deltas and merges.
199
This creates a 5-node graph in files. If files supports 2-length keys two
200
graphs are made to exercise the support for multiple ids.
202
:param trailing_eol: If True end the last line with \n.
203
:param key_length: The length of keys in files. Currently supports length 1
205
:param left_only: If True do not add the right and merged nodes.
206
:param nograph: If True, do not provide parents to the add_lines calls;
207
this is useful for tests that need inserted data but have graphless
209
:param nokeys: If True, pass None is as the key for all insertions.
210
Currently implies nograph.
211
:return: The results of the add_lines calls.
218
prefixes = [('FileA',), ('FileB',)]
219
# insert a diamond graph to exercise deltas and merges.
225
def get_parents(suffix_list):
229
result = [prefix + suffix for suffix in suffix_list]
236
# we loop over each key because that spreads the inserts across prefixes,
237
# which is how commit operates.
238
for prefix in prefixes:
239
result.append(files.add_lines(prefix + get_key('origin'), (),
240
['origin' + last_char]))
241
for prefix in prefixes:
242
result.append(files.add_lines(prefix + get_key('base'),
243
get_parents([('origin',)]), ['base' + last_char]))
244
for prefix in prefixes:
245
result.append(files.add_lines(prefix + get_key('left'),
246
get_parents([('base',)]),
247
['base\n', 'left' + last_char]))
249
for prefix in prefixes:
250
result.append(files.add_lines(prefix + get_key('right'),
251
get_parents([('base',)]),
252
['base\n', 'right' + last_char]))
253
for prefix in prefixes:
254
result.append(files.add_lines(prefix + get_key('merged'),
255
get_parents([('left',), ('right',)]),
256
['base\n', 'left\n', 'right\n', 'merged' + last_char]))
260
class VersionedFileTestMixIn(object):
261
"""A mixin test class for testing VersionedFiles.
263
This is not an adaptor-style test at this point because
264
theres no dynamic substitution of versioned file implementations,
265
they are strictly controlled by their owning repositories.
268
def get_transaction(self):
269
if not hasattr(self, '_transaction'):
270
self._transaction = None
271
return self._transaction
275
f.add_lines('r0', [], ['a\n', 'b\n'])
276
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
278
versions = f.versions()
279
self.assertTrue('r0' in versions)
280
self.assertTrue('r1' in versions)
281
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
282
self.assertEquals(f.get_text('r0'), 'a\nb\n')
283
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
284
self.assertEqual(2, len(f))
285
self.assertEqual(2, f.num_versions())
287
self.assertRaises(RevisionNotPresent,
288
f.add_lines, 'r2', ['foo'], [])
289
self.assertRaises(RevisionAlreadyPresent,
290
f.add_lines, 'r1', [], [])
292
# this checks that reopen with create=True does not break anything.
293
f = self.reopen_file(create=True)
296
def test_adds_with_parent_texts(self):
299
_, _, parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])
301
_, _, parent_texts['r1'] = f.add_lines_with_ghosts('r1',
302
['r0', 'ghost'], ['b\n', 'c\n'], parent_texts=parent_texts)
303
except NotImplementedError:
304
# if the format doesn't support ghosts, just add normally.
305
_, _, parent_texts['r1'] = f.add_lines('r1',
306
['r0'], ['b\n', 'c\n'], parent_texts=parent_texts)
307
f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)
308
self.assertNotEqual(None, parent_texts['r0'])
309
self.assertNotEqual(None, parent_texts['r1'])
311
versions = f.versions()
312
self.assertTrue('r0' in versions)
313
self.assertTrue('r1' in versions)
314
self.assertTrue('r2' in versions)
315
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
316
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
317
self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])
318
self.assertEqual(3, f.num_versions())
319
origins = f.annotate('r1')
320
self.assertEquals(origins[0][0], 'r0')
321
self.assertEquals(origins[1][0], 'r1')
322
origins = f.annotate('r2')
323
self.assertEquals(origins[0][0], 'r1')
324
self.assertEquals(origins[1][0], 'r2')
327
f = self.reopen_file()
330
def test_add_unicode_content(self):
331
# unicode content is not permitted in versioned files.
332
# versioned files version sequences of bytes only.
334
self.assertRaises(errors.BzrBadParameterUnicode,
335
vf.add_lines, 'a', [], ['a\n', u'b\n', 'c\n'])
337
(errors.BzrBadParameterUnicode, NotImplementedError),
338
vf.add_lines_with_ghosts, 'a', [], ['a\n', u'b\n', 'c\n'])
340
def test_add_follows_left_matching_blocks(self):
341
"""If we change left_matching_blocks, delta changes
343
Note: There are multiple correct deltas in this case, because
344
we start with 1 "a" and we get 3.
347
if isinstance(vf, WeaveFile):
348
raise TestSkipped("WeaveFile ignores left_matching_blocks")
349
vf.add_lines('1', [], ['a\n'])
350
vf.add_lines('2', ['1'], ['a\n', 'a\n', 'a\n'],
351
left_matching_blocks=[(0, 0, 1), (1, 3, 0)])
352
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('2'))
353
vf.add_lines('3', ['1'], ['a\n', 'a\n', 'a\n'],
354
left_matching_blocks=[(0, 2, 1), (1, 3, 0)])
355
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('3'))
357
def test_inline_newline_throws(self):
358
# \r characters are not permitted in lines being added
360
self.assertRaises(errors.BzrBadParameterContainsNewline,
361
vf.add_lines, 'a', [], ['a\n\n'])
363
(errors.BzrBadParameterContainsNewline, NotImplementedError),
364
vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])
365
# but inline CR's are allowed
366
vf.add_lines('a', [], ['a\r\n'])
368
vf.add_lines_with_ghosts('b', [], ['a\r\n'])
369
except NotImplementedError:
372
def test_add_reserved(self):
374
self.assertRaises(errors.ReservedId,
375
vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
377
def test_add_lines_nostoresha(self):
378
"""When nostore_sha is supplied using old content raises."""
380
empty_text = ('a', [])
381
sample_text_nl = ('b', ["foo\n", "bar\n"])
382
sample_text_no_nl = ('c', ["foo\n", "bar"])
384
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
385
sha, _, _ = vf.add_lines(version, [], lines)
387
# we now have a copy of all the lines in the vf.
388
for sha, (version, lines) in zip(
389
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
390
self.assertRaises(errors.ExistingContent,
391
vf.add_lines, version + "2", [], lines,
393
# and no new version should have been added.
394
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
397
def test_add_lines_with_ghosts_nostoresha(self):
398
"""When nostore_sha is supplied using old content raises."""
400
empty_text = ('a', [])
401
sample_text_nl = ('b', ["foo\n", "bar\n"])
402
sample_text_no_nl = ('c', ["foo\n", "bar"])
404
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
405
sha, _, _ = vf.add_lines(version, [], lines)
407
# we now have a copy of all the lines in the vf.
408
# is the test applicable to this vf implementation?
410
vf.add_lines_with_ghosts('d', [], [])
411
except NotImplementedError:
412
raise TestSkipped("add_lines_with_ghosts is optional")
413
for sha, (version, lines) in zip(
414
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
415
self.assertRaises(errors.ExistingContent,
416
vf.add_lines_with_ghosts, version + "2", [], lines,
418
# and no new version should have been added.
419
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
422
def test_add_lines_return_value(self):
423
# add_lines should return the sha1 and the text size.
425
empty_text = ('a', [])
426
sample_text_nl = ('b', ["foo\n", "bar\n"])
427
sample_text_no_nl = ('c', ["foo\n", "bar"])
428
# check results for the three cases:
429
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
430
# the first two elements are the same for all versioned files:
431
# - the digest and the size of the text. For some versioned files
432
# additional data is returned in additional tuple elements.
433
result = vf.add_lines(version, [], lines)
434
self.assertEqual(3, len(result))
435
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
437
# parents should not affect the result:
438
lines = sample_text_nl[1]
439
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
440
vf.add_lines('d', ['b', 'c'], lines)[0:2])
442
def test_get_reserved(self):
444
self.assertRaises(errors.ReservedId, vf.get_texts, ['b:'])
445
self.assertRaises(errors.ReservedId, vf.get_lines, 'b:')
446
self.assertRaises(errors.ReservedId, vf.get_text, 'b:')
448
def test_add_unchanged_last_line_noeol_snapshot(self):
449
"""Add a text with an unchanged last line with no eol should work."""
450
# Test adding this in a number of chain lengths; because the interface
451
# for VersionedFile does not allow forcing a specific chain length, we
452
# just use a small base to get the first snapshot, then a much longer
453
# first line for the next add (which will make the third add snapshot)
454
# and so on. 20 has been chosen as an aribtrary figure - knits use 200
455
# as a capped delta length, but ideally we would have some way of
456
# tuning the test to the store (e.g. keep going until a snapshot
458
for length in range(20):
460
vf = self.get_file('case-%d' % length)
463
for step in range(length):
464
version = prefix % step
465
lines = (['prelude \n'] * step) + ['line']
466
vf.add_lines(version, parents, lines)
467
version_lines[version] = lines
469
vf.add_lines('no-eol', parents, ['line'])
470
vf.get_texts(version_lines.keys())
471
self.assertEqualDiff('line', vf.get_text('no-eol'))
473
def test_get_texts_eol_variation(self):
474
# similar to the failure in <http://bugs.launchpad.net/234748>
476
sample_text_nl = ["line\n"]
477
sample_text_no_nl = ["line"]
484
lines = sample_text_nl
486
lines = sample_text_no_nl
487
# left_matching blocks is an internal api; it operates on the
488
# *internal* representation for a knit, which is with *all* lines
489
# being normalised to end with \n - even the final line in a no_nl
490
# file. Using it here ensures that a broken internal implementation
491
# (which is what this test tests) will generate a correct line
492
# delta (which is to say, an empty delta).
493
vf.add_lines(version, parents, lines,
494
left_matching_blocks=[(0, 0, 1)])
496
versions.append(version)
497
version_lines[version] = lines
499
vf.get_texts(versions)
500
vf.get_texts(reversed(versions))
502
def test_add_lines_with_matching_blocks_noeol_last_line(self):
503
"""Add a text with an unchanged last line with no eol should work."""
504
from bzrlib import multiparent
505
# Hand verified sha1 of the text we're adding.
506
sha1 = '6a1d115ec7b60afb664dc14890b5af5ce3c827a4'
507
# Create a mpdiff which adds a new line before the trailing line, and
508
# reuse the last line unaltered (which can cause annotation reuse).
509
# Test adding this in two situations:
510
# On top of a new insertion
511
vf = self.get_file('fulltext')
512
vf.add_lines('noeol', [], ['line'])
513
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
514
left_matching_blocks=[(0, 1, 1)])
515
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
517
vf = self.get_file('delta')
518
vf.add_lines('base', [], ['line'])
519
vf.add_lines('noeol', ['base'], ['prelude\n', 'line'])
520
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
521
left_matching_blocks=[(1, 1, 1)])
522
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
524
def test_make_mpdiffs(self):
525
from bzrlib import multiparent
526
vf = self.get_file('foo')
527
sha1s = self._setup_for_deltas(vf)
528
new_vf = self.get_file('bar')
529
for version in multiparent.topo_iter(vf):
530
mpdiff = vf.make_mpdiffs([version])[0]
531
new_vf.add_mpdiffs([(version, vf.get_parent_map([version])[version],
532
vf.get_sha1s([version])[version], mpdiff)])
533
self.assertEqualDiff(vf.get_text(version),
534
new_vf.get_text(version))
536
def test_make_mpdiffs_with_ghosts(self):
537
vf = self.get_file('foo')
539
vf.add_lines_with_ghosts('text', ['ghost'], ['line\n'])
540
except NotImplementedError:
541
# old Weave formats do not allow ghosts
543
self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, ['ghost'])
545
def _setup_for_deltas(self, f):
546
self.assertFalse(f.has_version('base'))
547
# add texts that should trip the knit maximum delta chain threshold
548
# as well as doing parallel chains of data in knits.
549
# this is done by two chains of 25 insertions
550
f.add_lines('base', [], ['line\n'])
551
f.add_lines('noeol', ['base'], ['line'])
552
# detailed eol tests:
553
# shared last line with parent no-eol
554
f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])
555
# differing last line with parent, both no-eol
556
f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])
557
# add eol following a noneol parent, change content
558
f.add_lines('eol', ['noeol'], ['phone\n'])
559
# add eol following a noneol parent, no change content
560
f.add_lines('eolline', ['noeol'], ['line\n'])
561
# noeol with no parents:
562
f.add_lines('noeolbase', [], ['line'])
563
# noeol preceeding its leftmost parent in the output:
564
# this is done by making it a merge of two parents with no common
565
# anestry: noeolbase and noeol with the
566
# later-inserted parent the leftmost.
567
f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])
568
# two identical eol texts
569
f.add_lines('noeoldup', ['noeol'], ['line'])
571
text_name = 'chain1-'
573
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
574
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
575
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
576
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
577
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
578
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
579
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
580
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
581
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
582
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
583
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
584
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
585
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
586
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
587
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
588
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
589
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
590
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
591
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
592
19:'1ebed371807ba5935958ad0884595126e8c4e823',
593
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
594
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
595
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
596
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
597
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
598
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
600
for depth in range(26):
601
new_version = text_name + '%s' % depth
602
text = text + ['line\n']
603
f.add_lines(new_version, [next_parent], text)
604
next_parent = new_version
606
text_name = 'chain2-'
608
for depth in range(26):
609
new_version = text_name + '%s' % depth
610
text = text + ['line\n']
611
f.add_lines(new_version, [next_parent], text)
612
next_parent = new_version
615
def test_ancestry(self):
617
self.assertEqual([], f.get_ancestry([]))
618
f.add_lines('r0', [], ['a\n', 'b\n'])
619
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
620
f.add_lines('r2', ['r0'], ['b\n', 'c\n'])
621
f.add_lines('r3', ['r2'], ['b\n', 'c\n'])
622
f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])
623
self.assertEqual([], f.get_ancestry([]))
624
versions = f.get_ancestry(['rM'])
625
# there are some possibilities:
629
# so we check indexes
630
r0 = versions.index('r0')
631
r1 = versions.index('r1')
632
r2 = versions.index('r2')
633
self.assertFalse('r3' in versions)
634
rM = versions.index('rM')
635
self.assertTrue(r0 < r1)
636
self.assertTrue(r0 < r2)
637
self.assertTrue(r1 < rM)
638
self.assertTrue(r2 < rM)
640
self.assertRaises(RevisionNotPresent,
641
f.get_ancestry, ['rM', 'rX'])
643
self.assertEqual(set(f.get_ancestry('rM')),
644
set(f.get_ancestry('rM', topo_sorted=False)))
646
def test_mutate_after_finish(self):
647
self._transaction = 'before'
649
self._transaction = 'after'
650
self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])
651
self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])
653
def test_copy_to(self):
655
f.add_lines('0', [], ['a\n'])
656
t = MemoryTransport()
658
for suffix in self.get_factory().get_suffixes():
659
self.assertTrue(t.has('foo' + suffix))
661
def test_get_suffixes(self):
663
# and should be a list
664
self.assertTrue(isinstance(self.get_factory().get_suffixes(), list))
666
def test_get_parent_map(self):
668
f.add_lines('r0', [], ['a\n', 'b\n'])
670
{'r0':()}, f.get_parent_map(['r0']))
671
f.add_lines('r1', ['r0'], ['a\n', 'b\n'])
673
{'r1':('r0',)}, f.get_parent_map(['r1']))
677
f.get_parent_map(['r0', 'r1']))
678
f.add_lines('r2', [], ['a\n', 'b\n'])
679
f.add_lines('r3', [], ['a\n', 'b\n'])
680
f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
682
{'m':('r0', 'r1', 'r2', 'r3')}, f.get_parent_map(['m']))
683
self.assertEqual({}, f.get_parent_map('y'))
687
f.get_parent_map(['r0', 'y', 'r1']))
689
def test_annotate(self):
691
f.add_lines('r0', [], ['a\n', 'b\n'])
692
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
693
origins = f.annotate('r1')
694
self.assertEquals(origins[0][0], 'r1')
695
self.assertEquals(origins[1][0], 'r0')
697
self.assertRaises(RevisionNotPresent,
700
def test_detection(self):
701
# Test weaves detect corruption.
703
# Weaves contain a checksum of their texts.
704
# When a text is extracted, this checksum should be
707
w = self.get_file_corrupted_text()
709
self.assertEqual('hello\n', w.get_text('v1'))
710
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
711
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
712
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
714
w = self.get_file_corrupted_checksum()
716
self.assertEqual('hello\n', w.get_text('v1'))
717
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
718
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
719
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
721
def get_file_corrupted_text(self):
722
"""Return a versioned file with corrupt text but valid metadata."""
723
raise NotImplementedError(self.get_file_corrupted_text)
725
def reopen_file(self, name='foo'):
726
"""Open the versioned file from disk again."""
727
raise NotImplementedError(self.reopen_file)
729
def test_iter_lines_added_or_present_in_versions(self):
730
# test that we get at least an equalset of the lines added by
731
# versions in the weave
732
# the ordering here is to make a tree so that dumb searches have
733
# more changes to muck up.
735
class InstrumentedProgress(progress.DummyProgress):
739
progress.DummyProgress.__init__(self)
742
def update(self, msg=None, current=None, total=None):
743
self.updates.append((msg, current, total))
746
# add a base to get included
747
vf.add_lines('base', [], ['base\n'])
748
# add a ancestor to be included on one side
749
vf.add_lines('lancestor', [], ['lancestor\n'])
750
# add a ancestor to be included on the other side
751
vf.add_lines('rancestor', ['base'], ['rancestor\n'])
752
# add a child of rancestor with no eofile-nl
753
vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])
754
# add a child of lancestor and base to join the two roots
755
vf.add_lines('otherchild',
756
['lancestor', 'base'],
757
['base\n', 'lancestor\n', 'otherchild\n'])
758
def iter_with_versions(versions, expected):
759
# now we need to see what lines are returned, and how often.
761
progress = InstrumentedProgress()
762
# iterate over the lines
763
for line in vf.iter_lines_added_or_present_in_versions(versions,
765
lines.setdefault(line, 0)
767
if []!= progress.updates:
768
self.assertEqual(expected, progress.updates)
770
lines = iter_with_versions(['child', 'otherchild'],
771
[('Walking content', 0, 2),
772
('Walking content', 1, 2),
773
('Walking content', 2, 2)])
774
# we must see child and otherchild
775
self.assertTrue(lines[('child\n', 'child')] > 0)
776
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
777
# we dont care if we got more than that.
780
lines = iter_with_versions(None, [('Walking content', 0, 5),
781
('Walking content', 1, 5),
782
('Walking content', 2, 5),
783
('Walking content', 3, 5),
784
('Walking content', 4, 5),
785
('Walking content', 5, 5)])
786
# all lines must be seen at least once
787
self.assertTrue(lines[('base\n', 'base')] > 0)
788
self.assertTrue(lines[('lancestor\n', 'lancestor')] > 0)
789
self.assertTrue(lines[('rancestor\n', 'rancestor')] > 0)
790
self.assertTrue(lines[('child\n', 'child')] > 0)
791
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
793
def test_add_lines_with_ghosts(self):
794
# some versioned file formats allow lines to be added with parent
795
# information that is > than that in the format. Formats that do
796
# not support this need to raise NotImplementedError on the
797
# add_lines_with_ghosts api.
799
# add a revision with ghost parents
800
# The preferred form is utf8, but we should translate when needed
801
parent_id_unicode = u'b\xbfse'
802
parent_id_utf8 = parent_id_unicode.encode('utf8')
804
vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])
805
except NotImplementedError:
806
# check the other ghost apis are also not implemented
807
self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])
808
self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')
810
vf = self.reopen_file()
811
# test key graph related apis: getncestry, _graph, get_parents
813
# - these are ghost unaware and must not be reflect ghosts
814
self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))
815
self.assertFalse(vf.has_version(parent_id_utf8))
816
# we have _with_ghost apis to give us ghost information.
817
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
818
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
819
# if we add something that is a ghost of another, it should correct the
820
# results of the prior apis
821
vf.add_lines(parent_id_utf8, [], [])
822
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))
823
self.assertEqual({'notbxbfse':(parent_id_utf8,)},
824
vf.get_parent_map(['notbxbfse']))
825
self.assertTrue(vf.has_version(parent_id_utf8))
826
# we have _with_ghost apis to give us ghost information.
827
self.assertEqual([parent_id_utf8, 'notbxbfse'],
828
vf.get_ancestry_with_ghosts(['notbxbfse']))
829
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
831
def test_add_lines_with_ghosts_after_normal_revs(self):
832
# some versioned file formats allow lines to be added with parent
833
# information that is > than that in the format. Formats that do
834
# not support this need to raise NotImplementedError on the
835
# add_lines_with_ghosts api.
837
# probe for ghost support
839
vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n'])
840
except NotImplementedError:
842
vf.add_lines_with_ghosts('references_ghost',
844
['line\n', 'line_b\n', 'line_c\n'])
845
origins = vf.annotate('references_ghost')
846
self.assertEquals(('base', 'line\n'), origins[0])
847
self.assertEquals(('base', 'line_b\n'), origins[1])
848
self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
850
def test_readonly_mode(self):
851
transport = get_transport(self.get_url('.'))
852
factory = self.get_factory()
853
vf = factory('id', transport, 0777, create=True, access_mode='w')
854
vf = factory('id', transport, access_mode='r')
855
self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
856
self.assertRaises(errors.ReadOnlyError,
857
vf.add_lines_with_ghosts,
862
def test_get_sha1s(self):
863
# check the sha1 data is available
866
vf.add_lines('a', [], ['a\n'])
867
# the same file, different metadata
868
vf.add_lines('b', ['a'], ['a\n'])
869
# a file differing only in last newline.
870
vf.add_lines('c', [], ['a'])
872
'a': '3f786850e387550fdab836ed7e6dc881de23001b',
873
'c': '86f7e437faa5a7fce15d1ddcb9eaeaea377667b8',
874
'b': '3f786850e387550fdab836ed7e6dc881de23001b',
876
vf.get_sha1s(['a', 'c', 'b']))
879
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
881
def get_file(self, name='foo'):
882
return WeaveFile(name, get_transport(self.get_url('.')), create=True,
883
get_scope=self.get_transaction)
885
def get_file_corrupted_text(self):
886
w = WeaveFile('foo', get_transport(self.get_url('.')), create=True,
887
get_scope=self.get_transaction)
888
w.add_lines('v1', [], ['hello\n'])
889
w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
891
# We are going to invasively corrupt the text
892
# Make sure the internals of weave are the same
893
self.assertEqual([('{', 0)
901
self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'
902
, '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
907
w._weave[4] = 'There\n'
910
def get_file_corrupted_checksum(self):
911
w = self.get_file_corrupted_text()
913
w._weave[4] = 'there\n'
914
self.assertEqual('hello\nthere\n', w.get_text('v2'))
916
#Invalid checksum, first digit changed
917
w._sha1s[1] = 'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
920
def reopen_file(self, name='foo', create=False):
921
return WeaveFile(name, get_transport(self.get_url('.')), create=create,
922
get_scope=self.get_transaction)
924
def test_no_implicit_create(self):
925
self.assertRaises(errors.NoSuchFile,
928
get_transport(self.get_url('.')),
929
get_scope=self.get_transaction)
931
def get_factory(self):
935
class TestPlanMergeVersionedFile(TestCaseWithMemoryTransport):
938
TestCaseWithMemoryTransport.setUp(self)
939
mapper = PrefixMapper()
940
factory = make_file_factory(True, mapper)
941
self.vf1 = factory(self.get_transport('root-1'))
942
self.vf2 = factory(self.get_transport('root-2'))
943
self.plan_merge_vf = versionedfile._PlanMergeVersionedFile('root')
944
self.plan_merge_vf.fallback_versionedfiles.extend([self.vf1, self.vf2])
946
def test_add_lines(self):
947
self.plan_merge_vf.add_lines(('root', 'a:'), [], [])
948
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
949
('root', 'a'), [], [])
950
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
951
('root', 'a:'), None, [])
952
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
953
('root', 'a:'), [], None)
955
def setup_abcde(self):
956
self.vf1.add_lines(('root', 'A'), [], ['a'])
957
self.vf1.add_lines(('root', 'B'), [('root', 'A')], ['b'])
958
self.vf2.add_lines(('root', 'C'), [], ['c'])
959
self.vf2.add_lines(('root', 'D'), [('root', 'C')], ['d'])
960
self.plan_merge_vf.add_lines(('root', 'E:'),
961
[('root', 'B'), ('root', 'D')], ['e'])
963
def test_get_parents(self):
965
self.assertEqual({('root', 'B'):(('root', 'A'),)},
966
self.plan_merge_vf.get_parent_map([('root', 'B')]))
967
self.assertEqual({('root', 'D'):(('root', 'C'),)},
968
self.plan_merge_vf.get_parent_map([('root', 'D')]))
969
self.assertEqual({('root', 'E:'):(('root', 'B'),('root', 'D'))},
970
self.plan_merge_vf.get_parent_map([('root', 'E:')]))
972
self.plan_merge_vf.get_parent_map([('root', 'F')]))
974
('root', 'B'):(('root', 'A'),),
975
('root', 'D'):(('root', 'C'),),
976
('root', 'E:'):(('root', 'B'),('root', 'D')),
978
self.plan_merge_vf.get_parent_map(
979
[('root', 'B'), ('root', 'D'), ('root', 'E:'), ('root', 'F')]))
981
def test_get_record_stream(self):
983
def get_record(suffix):
984
return self.plan_merge_vf.get_record_stream(
985
[('root', suffix)], 'unordered', True).next()
986
self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))
987
self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))
988
self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))
989
self.assertEqual('absent', get_record('F').storage_kind)
992
class TestReadonlyHttpMixin(object):
994
def get_transaction(self):
997
def test_readonly_http_works(self):
998
# we should be able to read from http with a versioned file.
1000
# try an empty file access
1001
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
1002
self.assertEqual([], readonly_vf.versions())
1004
vf.add_lines('1', [], ['a\n'])
1005
vf.add_lines('2', ['1'], ['b\n', 'a\n'])
1006
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
1007
self.assertEqual(['1', '2'], vf.versions())
1008
for version in readonly_vf.versions():
1009
readonly_vf.get_lines(version)
1012
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
1015
return WeaveFile('foo', get_transport(self.get_url('.')), create=True,
1016
get_scope=self.get_transaction)
1018
def get_factory(self):
1022
class MergeCasesMixin(object):
1024
def doMerge(self, base, a, b, mp):
1025
from cStringIO import StringIO
1026
from textwrap import dedent
1032
w.add_lines('text0', [], map(addcrlf, base))
1033
w.add_lines('text1', ['text0'], map(addcrlf, a))
1034
w.add_lines('text2', ['text0'], map(addcrlf, b))
1036
self.log_contents(w)
1038
self.log('merge plan:')
1039
p = list(w.plan_merge('text1', 'text2'))
1040
for state, line in p:
1042
self.log('%12s | %s' % (state, line[:-1]))
1046
mt.writelines(w.weave_merge(p))
1048
self.log(mt.getvalue())
1050
mp = map(addcrlf, mp)
1051
self.assertEqual(mt.readlines(), mp)
1054
def testOneInsert(self):
1060
def testSeparateInserts(self):
1061
self.doMerge(['aaa', 'bbb', 'ccc'],
1062
['aaa', 'xxx', 'bbb', 'ccc'],
1063
['aaa', 'bbb', 'yyy', 'ccc'],
1064
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1066
def testSameInsert(self):
1067
self.doMerge(['aaa', 'bbb', 'ccc'],
1068
['aaa', 'xxx', 'bbb', 'ccc'],
1069
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],
1070
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1071
overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']
1072
def testOverlappedInsert(self):
1073
self.doMerge(['aaa', 'bbb'],
1074
['aaa', 'xxx', 'yyy', 'bbb'],
1075
['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)
1077
# really it ought to reduce this to
1078
# ['aaa', 'xxx', 'yyy', 'bbb']
1081
def testClashReplace(self):
1082
self.doMerge(['aaa'],
1085
['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz',
1088
def testNonClashInsert1(self):
1089
self.doMerge(['aaa'],
1092
['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz',
1095
def testNonClashInsert2(self):
1096
self.doMerge(['aaa'],
1102
def testDeleteAndModify(self):
1103
"""Clashing delete and modification.
1105
If one side modifies a region and the other deletes it then
1106
there should be a conflict with one side blank.
1109
#######################################
1110
# skippd, not working yet
1113
self.doMerge(['aaa', 'bbb', 'ccc'],
1114
['aaa', 'ddd', 'ccc'],
1116
['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc'])
1118
def _test_merge_from_strings(self, base, a, b, expected):
1120
w.add_lines('text0', [], base.splitlines(True))
1121
w.add_lines('text1', ['text0'], a.splitlines(True))
1122
w.add_lines('text2', ['text0'], b.splitlines(True))
1123
self.log('merge plan:')
1124
p = list(w.plan_merge('text1', 'text2'))
1125
for state, line in p:
1127
self.log('%12s | %s' % (state, line[:-1]))
1128
self.log('merge result:')
1129
result_text = ''.join(w.weave_merge(p))
1130
self.log(result_text)
1131
self.assertEqualDiff(result_text, expected)
1133
def test_weave_merge_conflicts(self):
1134
# does weave merge properly handle plans that end with unchanged?
1135
result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')]))
1136
self.assertEqual(result, 'hello\n')
1138
def test_deletion_extended(self):
1139
"""One side deletes, the other deletes more.
1160
self._test_merge_from_strings(base, a, b, result)
1162
def test_deletion_overlap(self):
1163
"""Delete overlapping regions with no other conflict.
1165
Arguably it'd be better to treat these as agreement, rather than
1166
conflict, but for now conflict is safer.
1194
self._test_merge_from_strings(base, a, b, result)
1196
def test_agreement_deletion(self):
1197
"""Agree to delete some lines, without conflicts."""
1219
self._test_merge_from_strings(base, a, b, result)
1221
def test_sync_on_deletion(self):
1222
"""Specific case of merge where we can synchronize incorrectly.
1224
A previous version of the weave merge concluded that the two versions
1225
agreed on deleting line 2, and this could be a synchronization point.
1226
Line 1 was then considered in isolation, and thought to be deleted on
1229
It's better to consider the whole thing as a disagreement region.
1240
a's replacement line 2
1253
a's replacement line 2
1260
self._test_merge_from_strings(base, a, b, result)
1263
class TestWeaveMerge(TestCaseWithMemoryTransport, MergeCasesMixin):
1265
def get_file(self, name='foo'):
1266
return WeaveFile(name, get_transport(self.get_url('.')), create=True)
1268
def log_contents(self, w):
1269
self.log('weave is:')
1271
write_weave(w, tmpf)
1272
self.log(tmpf.getvalue())
1274
overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======',
1275
'xxx', '>>>>>>> ', 'bbb']
1278
class TestContentFactoryAdaption(TestCaseWithMemoryTransport):
1280
def test_select_adaptor(self):
1281
"""Test expected adapters exist."""
1282
# One scenario for each lookup combination we expect to use.
1283
# Each is source_kind, requested_kind, adapter class
1285
('knit-delta-gz', 'fulltext', _mod_knit.DeltaPlainToFullText),
1286
('knit-ft-gz', 'fulltext', _mod_knit.FTPlainToFullText),
1287
('knit-annotated-delta-gz', 'knit-delta-gz',
1288
_mod_knit.DeltaAnnotatedToUnannotated),
1289
('knit-annotated-delta-gz', 'fulltext',
1290
_mod_knit.DeltaAnnotatedToFullText),
1291
('knit-annotated-ft-gz', 'knit-ft-gz',
1292
_mod_knit.FTAnnotatedToUnannotated),
1293
('knit-annotated-ft-gz', 'fulltext',
1294
_mod_knit.FTAnnotatedToFullText),
1296
for source, requested, klass in scenarios:
1297
adapter_factory = versionedfile.adapter_registry.get(
1298
(source, requested))
1299
adapter = adapter_factory(None)
1300
self.assertIsInstance(adapter, klass)
1302
def get_knit(self, annotated=True):
1303
mapper = ConstantMapper('knit')
1304
transport = self.get_transport()
1305
return make_file_factory(annotated, mapper)(transport)
1307
def helpGetBytes(self, f, ft_adapter, delta_adapter):
1308
"""Grab the interested adapted texts for tests."""
1309
# origin is a fulltext
1310
entries = f.get_record_stream([('origin',)], 'unordered', False)
1311
base = entries.next()
1312
ft_data = ft_adapter.get_bytes(base)
1313
# merged is both a delta and multiple parents.
1314
entries = f.get_record_stream([('merged',)], 'unordered', False)
1315
merged = entries.next()
1316
delta_data = delta_adapter.get_bytes(merged)
1317
return ft_data, delta_data
1319
def test_deannotation_noeol(self):
1320
"""Test converting annotated knits to unannotated knits."""
1321
# we need a full text, and a delta
1323
get_diamond_files(f, 1, trailing_eol=False)
1324
ft_data, delta_data = self.helpGetBytes(f,
1325
_mod_knit.FTAnnotatedToUnannotated(None),
1326
_mod_knit.DeltaAnnotatedToUnannotated(None))
1328
'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'
1331
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1333
'version merged 4 32c2e79763b3f90e8ccde37f9710b6629c25a796\n'
1334
'1,2,3\nleft\nright\nmerged\nend merged\n',
1335
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1337
def test_deannotation(self):
1338
"""Test converting annotated knits to unannotated knits."""
1339
# we need a full text, and a delta
1341
get_diamond_files(f, 1)
1342
ft_data, delta_data = self.helpGetBytes(f,
1343
_mod_knit.FTAnnotatedToUnannotated(None),
1344
_mod_knit.DeltaAnnotatedToUnannotated(None))
1346
'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n'
1349
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1351
'version merged 3 ed8bce375198ea62444dc71952b22cfc2b09226d\n'
1352
'2,2,2\nright\nmerged\nend merged\n',
1353
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1355
def test_annotated_to_fulltext_no_eol(self):
1356
"""Test adapting annotated knits to full texts (for -> weaves)."""
1357
# we need a full text, and a delta
1359
get_diamond_files(f, 1, trailing_eol=False)
1360
# Reconstructing a full text requires a backing versioned file, and it
1361
# must have the base lines requested from it.
1362
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1363
ft_data, delta_data = self.helpGetBytes(f,
1364
_mod_knit.FTAnnotatedToFullText(None),
1365
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1366
self.assertEqual('origin', ft_data)
1367
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1368
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1369
True)], logged_vf.calls)
1371
def test_annotated_to_fulltext(self):
1372
"""Test adapting annotated knits to full texts (for -> weaves)."""
1373
# we need a full text, and a delta
1375
get_diamond_files(f, 1)
1376
# Reconstructing a full text requires a backing versioned file, and it
1377
# must have the base lines requested from it.
1378
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1379
ft_data, delta_data = self.helpGetBytes(f,
1380
_mod_knit.FTAnnotatedToFullText(None),
1381
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1382
self.assertEqual('origin\n', ft_data)
1383
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
1384
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1385
True)], logged_vf.calls)
1387
def test_unannotated_to_fulltext(self):
1388
"""Test adapting unannotated knits to full texts.
1390
This is used for -> weaves, and for -> annotated knits.
1392
# we need a full text, and a delta
1393
f = self.get_knit(annotated=False)
1394
get_diamond_files(f, 1)
1395
# Reconstructing a full text requires a backing versioned file, and it
1396
# must have the base lines requested from it.
1397
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1398
ft_data, delta_data = self.helpGetBytes(f,
1399
_mod_knit.FTPlainToFullText(None),
1400
_mod_knit.DeltaPlainToFullText(logged_vf))
1401
self.assertEqual('origin\n', ft_data)
1402
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
1403
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1404
True)], logged_vf.calls)
1406
def test_unannotated_to_fulltext_no_eol(self):
1407
"""Test adapting unannotated knits to full texts.
1409
This is used for -> weaves, and for -> annotated knits.
1411
# we need a full text, and a delta
1412
f = self.get_knit(annotated=False)
1413
get_diamond_files(f, 1, trailing_eol=False)
1414
# Reconstructing a full text requires a backing versioned file, and it
1415
# must have the base lines requested from it.
1416
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1417
ft_data, delta_data = self.helpGetBytes(f,
1418
_mod_knit.FTPlainToFullText(None),
1419
_mod_knit.DeltaPlainToFullText(logged_vf))
1420
self.assertEqual('origin', ft_data)
1421
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1422
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1423
True)], logged_vf.calls)
1426
class TestKeyMapper(TestCaseWithMemoryTransport):
1427
"""Tests for various key mapping logic."""
1429
def test_identity_mapper(self):
1430
mapper = versionedfile.ConstantMapper("inventory")
1431
self.assertEqual("inventory", mapper.map(('foo@ar',)))
1432
self.assertEqual("inventory", mapper.map(('quux',)))
1434
def test_prefix_mapper(self):
1436
mapper = versionedfile.PrefixMapper()
1437
self.assertEqual("file-id", mapper.map(("file-id", "revision-id")))
1438
self.assertEqual("new-id", mapper.map(("new-id", "revision-id")))
1439
self.assertEqual(('file-id',), mapper.unmap("file-id"))
1440
self.assertEqual(('new-id',), mapper.unmap("new-id"))
1442
def test_hash_prefix_mapper(self):
1443
#format6: hash + plain
1444
mapper = versionedfile.HashPrefixMapper()
1445
self.assertEqual("9b/file-id", mapper.map(("file-id", "revision-id")))
1446
self.assertEqual("45/new-id", mapper.map(("new-id", "revision-id")))
1447
self.assertEqual(('file-id',), mapper.unmap("9b/file-id"))
1448
self.assertEqual(('new-id',), mapper.unmap("45/new-id"))
1450
def test_hash_escaped_mapper(self):
1451
#knit1: hash + escaped
1452
mapper = versionedfile.HashEscapedPrefixMapper()
1453
self.assertEqual("88/%2520", mapper.map((" ", "revision-id")))
1454
self.assertEqual("ed/fil%2545-%2549d", mapper.map(("filE-Id",
1456
self.assertEqual("88/ne%2557-%2549d", mapper.map(("neW-Id",
1458
self.assertEqual(('filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1459
self.assertEqual(('neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1462
class TestVersionedFiles(TestCaseWithMemoryTransport):
1463
"""Tests for the multiple-file variant of VersionedFile."""
1465
def get_versionedfiles(self, relpath='files'):
1466
transport = self.get_transport(relpath)
1468
transport.mkdir('.')
1469
files = self.factory(transport)
1470
if self.cleanup is not None:
1471
self.addCleanup(lambda:self.cleanup(files))
1474
def get_simple_key(self, suffix):
1475
"""Return a key for the object under test."""
1476
if self.key_length == 1:
1479
return ('FileA',) + (suffix,)
1481
def test_add_lines(self):
1482
f = self.get_versionedfiles()
1483
key0 = self.get_simple_key('r0')
1484
key1 = self.get_simple_key('r1')
1485
key2 = self.get_simple_key('r2')
1486
keyf = self.get_simple_key('foo')
1487
f.add_lines(key0, [], ['a\n', 'b\n'])
1489
f.add_lines(key1, [key0], ['b\n', 'c\n'])
1491
f.add_lines(key1, [], ['b\n', 'c\n'])
1493
self.assertTrue(key0 in keys)
1494
self.assertTrue(key1 in keys)
1496
for record in f.get_record_stream([key0, key1], 'unordered', True):
1497
records.append((record.key, record.get_bytes_as('fulltext')))
1499
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1501
def test__add_text(self):
1502
f = self.get_versionedfiles()
1503
key0 = self.get_simple_key('r0')
1504
key1 = self.get_simple_key('r1')
1505
key2 = self.get_simple_key('r2')
1506
keyf = self.get_simple_key('foo')
1507
f._add_text(key0, [], 'a\nb\n')
1509
f._add_text(key1, [key0], 'b\nc\n')
1511
f._add_text(key1, [], 'b\nc\n')
1513
self.assertTrue(key0 in keys)
1514
self.assertTrue(key1 in keys)
1516
for record in f.get_record_stream([key0, key1], 'unordered', True):
1517
records.append((record.key, record.get_bytes_as('fulltext')))
1519
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1521
def test_annotate(self):
1522
files = self.get_versionedfiles()
1523
self.get_diamond_files(files)
1524
if self.key_length == 1:
1528
# introduced full text
1529
origins = files.annotate(prefix + ('origin',))
1531
(prefix + ('origin',), 'origin\n')],
1534
origins = files.annotate(prefix + ('base',))
1536
(prefix + ('base',), 'base\n')],
1539
origins = files.annotate(prefix + ('merged',))
1542
(prefix + ('base',), 'base\n'),
1543
(prefix + ('left',), 'left\n'),
1544
(prefix + ('right',), 'right\n'),
1545
(prefix + ('merged',), 'merged\n')
1549
# Without a graph everything is new.
1551
(prefix + ('merged',), 'base\n'),
1552
(prefix + ('merged',), 'left\n'),
1553
(prefix + ('merged',), 'right\n'),
1554
(prefix + ('merged',), 'merged\n')
1557
self.assertRaises(RevisionNotPresent,
1558
files.annotate, prefix + ('missing-key',))
1560
def test_construct(self):
1561
"""Each parameterised test can be constructed on a transport."""
1562
files = self.get_versionedfiles()
1564
def get_diamond_files(self, files, trailing_eol=True, left_only=False,
1566
return get_diamond_files(files, self.key_length,
1567
trailing_eol=trailing_eol, nograph=not self.graph,
1568
left_only=left_only, nokeys=nokeys)
1570
def _add_content_nostoresha(self, add_lines):
1571
"""When nostore_sha is supplied using old content raises."""
1572
vf = self.get_versionedfiles()
1573
empty_text = ('a', [])
1574
sample_text_nl = ('b', ["foo\n", "bar\n"])
1575
sample_text_no_nl = ('c', ["foo\n", "bar"])
1577
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
1579
sha, _, _ = vf.add_lines(self.get_simple_key(version), [],
1582
sha, _, _ = vf._add_text(self.get_simple_key(version), [],
1585
# we now have a copy of all the lines in the vf.
1586
for sha, (version, lines) in zip(
1587
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
1588
new_key = self.get_simple_key(version + "2")
1589
self.assertRaises(errors.ExistingContent,
1590
vf.add_lines, new_key, [], lines,
1592
self.assertRaises(errors.ExistingContent,
1593
vf._add_text, new_key, [], ''.join(lines),
1595
# and no new version should have been added.
1596
record = vf.get_record_stream([new_key], 'unordered', True).next()
1597
self.assertEqual('absent', record.storage_kind)
1599
def test_add_lines_nostoresha(self):
1600
self._add_content_nostoresha(add_lines=True)
1602
def test__add_text_nostoresha(self):
1603
self._add_content_nostoresha(add_lines=False)
1605
def test_add_lines_return(self):
1606
files = self.get_versionedfiles()
1607
# save code by using the stock data insertion helper.
1608
adds = self.get_diamond_files(files)
1610
# We can only validate the first 2 elements returned from add_lines.
1612
self.assertEqual(3, len(add))
1613
results.append(add[:2])
1614
if self.key_length == 1:
1616
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1617
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1618
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1619
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1620
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1622
elif self.key_length == 2:
1624
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1625
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1626
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1627
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1628
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1629
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1630
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1631
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1632
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1633
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1636
def test_add_lines_no_key_generates_chk_key(self):
1637
files = self.get_versionedfiles()
1638
# save code by using the stock data insertion helper.
1639
adds = self.get_diamond_files(files, nokeys=True)
1641
# We can only validate the first 2 elements returned from add_lines.
1643
self.assertEqual(3, len(add))
1644
results.append(add[:2])
1645
if self.key_length == 1:
1647
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1648
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1649
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1650
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1651
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1653
# Check the added items got CHK keys.
1654
self.assertEqual(set([
1655
('sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1656
('sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1657
('sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1658
('sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1659
('sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1662
elif self.key_length == 2:
1664
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1665
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1666
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1667
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1668
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1669
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1670
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1671
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1672
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1673
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1675
# Check the added items got CHK keys.
1676
self.assertEqual(set([
1677
('FileA', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1678
('FileA', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1679
('FileA', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1680
('FileA', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1681
('FileA', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1682
('FileB', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1683
('FileB', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1684
('FileB', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1685
('FileB', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1686
('FileB', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1690
def test_empty_lines(self):
1691
"""Empty files can be stored."""
1692
f = self.get_versionedfiles()
1693
key_a = self.get_simple_key('a')
1694
f.add_lines(key_a, [], [])
1695
self.assertEqual('',
1696
f.get_record_stream([key_a], 'unordered', True
1697
).next().get_bytes_as('fulltext'))
1698
key_b = self.get_simple_key('b')
1699
f.add_lines(key_b, self.get_parents([key_a]), [])
1700
self.assertEqual('',
1701
f.get_record_stream([key_b], 'unordered', True
1702
).next().get_bytes_as('fulltext'))
1704
def test_newline_only(self):
1705
f = self.get_versionedfiles()
1706
key_a = self.get_simple_key('a')
1707
f.add_lines(key_a, [], ['\n'])
1708
self.assertEqual('\n',
1709
f.get_record_stream([key_a], 'unordered', True
1710
).next().get_bytes_as('fulltext'))
1711
key_b = self.get_simple_key('b')
1712
f.add_lines(key_b, self.get_parents([key_a]), ['\n'])
1713
self.assertEqual('\n',
1714
f.get_record_stream([key_b], 'unordered', True
1715
).next().get_bytes_as('fulltext'))
1717
def test_get_record_stream_empty(self):
1718
"""An empty stream can be requested without error."""
1719
f = self.get_versionedfiles()
1720
entries = f.get_record_stream([], 'unordered', False)
1721
self.assertEqual([], list(entries))
1723
def assertValidStorageKind(self, storage_kind):
1724
"""Assert that storage_kind is a valid storage_kind."""
1725
self.assertSubset([storage_kind],
1726
['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
1727
'knit-ft', 'knit-delta', 'chunked', 'fulltext',
1728
'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz',
1730
'knit-delta-closure', 'knit-delta-closure-ref',
1731
'groupcompress-block', 'groupcompress-block-ref'])
1733
def capture_stream(self, f, entries, on_seen, parents):
1734
"""Capture a stream for testing."""
1735
for factory in entries:
1736
on_seen(factory.key)
1737
self.assertValidStorageKind(factory.storage_kind)
1738
if factory.sha1 is not None:
1739
self.assertEqual(f.get_sha1s([factory.key])[factory.key],
1741
self.assertEqual(parents[factory.key], factory.parents)
1742
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1745
def test_get_record_stream_interface(self):
1746
"""each item in a stream has to provide a regular interface."""
1747
files = self.get_versionedfiles()
1748
self.get_diamond_files(files)
1749
keys, _ = self.get_keys_and_sort_order()
1750
parent_map = files.get_parent_map(keys)
1751
entries = files.get_record_stream(keys, 'unordered', False)
1753
self.capture_stream(files, entries, seen.add, parent_map)
1754
self.assertEqual(set(keys), seen)
1756
def get_keys_and_sort_order(self):
1757
"""Get diamond test keys list, and their sort ordering."""
1758
if self.key_length == 1:
1759
keys = [('merged',), ('left',), ('right',), ('base',)]
1760
sort_order = {('merged',):2, ('left',):1, ('right',):1, ('base',):0}
1763
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1765
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1769
('FileA', 'merged'):2, ('FileA', 'left'):1, ('FileA', 'right'):1,
1770
('FileA', 'base'):0,
1771
('FileB', 'merged'):2, ('FileB', 'left'):1, ('FileB', 'right'):1,
1772
('FileB', 'base'):0,
1774
return keys, sort_order
1776
def get_keys_and_groupcompress_sort_order(self):
1777
"""Get diamond test keys list, and their groupcompress sort ordering."""
1778
if self.key_length == 1:
1779
keys = [('merged',), ('left',), ('right',), ('base',)]
1780
sort_order = {('merged',):0, ('left',):1, ('right',):1, ('base',):2}
1783
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1785
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1789
('FileA', 'merged'):0, ('FileA', 'left'):1, ('FileA', 'right'):1,
1790
('FileA', 'base'):2,
1791
('FileB', 'merged'):3, ('FileB', 'left'):4, ('FileB', 'right'):4,
1792
('FileB', 'base'):5,
1794
return keys, sort_order
1796
def test_get_record_stream_interface_ordered(self):
1797
"""each item in a stream has to provide a regular interface."""
1798
files = self.get_versionedfiles()
1799
self.get_diamond_files(files)
1800
keys, sort_order = self.get_keys_and_sort_order()
1801
parent_map = files.get_parent_map(keys)
1802
entries = files.get_record_stream(keys, 'topological', False)
1804
self.capture_stream(files, entries, seen.append, parent_map)
1805
self.assertStreamOrder(sort_order, seen, keys)
1807
def test_get_record_stream_interface_ordered_with_delta_closure(self):
1808
"""each item must be accessible as a fulltext."""
1809
files = self.get_versionedfiles()
1810
self.get_diamond_files(files)
1811
keys, sort_order = self.get_keys_and_sort_order()
1812
parent_map = files.get_parent_map(keys)
1813
entries = files.get_record_stream(keys, 'topological', True)
1815
for factory in entries:
1816
seen.append(factory.key)
1817
self.assertValidStorageKind(factory.storage_kind)
1818
self.assertSubset([factory.sha1],
1819
[None, files.get_sha1s([factory.key])[factory.key]])
1820
self.assertEqual(parent_map[factory.key], factory.parents)
1821
# self.assertEqual(files.get_text(factory.key),
1822
ft_bytes = factory.get_bytes_as('fulltext')
1823
self.assertIsInstance(ft_bytes, str)
1824
chunked_bytes = factory.get_bytes_as('chunked')
1825
self.assertEqualDiff(ft_bytes, ''.join(chunked_bytes))
1827
self.assertStreamOrder(sort_order, seen, keys)
1829
def test_get_record_stream_interface_groupcompress(self):
1830
"""each item in a stream has to provide a regular interface."""
1831
files = self.get_versionedfiles()
1832
self.get_diamond_files(files)
1833
keys, sort_order = self.get_keys_and_groupcompress_sort_order()
1834
parent_map = files.get_parent_map(keys)
1835
entries = files.get_record_stream(keys, 'groupcompress', False)
1837
self.capture_stream(files, entries, seen.append, parent_map)
1838
self.assertStreamOrder(sort_order, seen, keys)
1840
def assertStreamOrder(self, sort_order, seen, keys):
1841
self.assertEqual(len(set(seen)), len(keys))
1842
if self.key_length == 1:
1845
lows = {('FileA',):0, ('FileB',):0}
1847
self.assertEqual(set(keys), set(seen))
1850
sort_pos = sort_order[key]
1851
self.assertTrue(sort_pos >= lows[key[:-1]],
1852
"Out of order in sorted stream: %r, %r" % (key, seen))
1853
lows[key[:-1]] = sort_pos
1855
def test_get_record_stream_unknown_storage_kind_raises(self):
1856
"""Asking for a storage kind that the stream cannot supply raises."""
1857
files = self.get_versionedfiles()
1858
self.get_diamond_files(files)
1859
if self.key_length == 1:
1860
keys = [('merged',), ('left',), ('right',), ('base',)]
1863
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1865
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1868
parent_map = files.get_parent_map(keys)
1869
entries = files.get_record_stream(keys, 'unordered', False)
1870
# We track the contents because we should be able to try, fail a
1871
# particular kind and then ask for one that works and continue.
1873
for factory in entries:
1874
seen.add(factory.key)
1875
self.assertValidStorageKind(factory.storage_kind)
1876
if factory.sha1 is not None:
1877
self.assertEqual(files.get_sha1s([factory.key])[factory.key],
1879
self.assertEqual(parent_map[factory.key], factory.parents)
1880
# currently no stream emits mpdiff
1881
self.assertRaises(errors.UnavailableRepresentation,
1882
factory.get_bytes_as, 'mpdiff')
1883
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1885
self.assertEqual(set(keys), seen)
1887
def test_get_record_stream_missing_records_are_absent(self):
1888
files = self.get_versionedfiles()
1889
self.get_diamond_files(files)
1890
if self.key_length == 1:
1891
keys = [('merged',), ('left',), ('right',), ('absent',), ('base',)]
1894
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1895
('FileA', 'absent'), ('FileA', 'base'),
1896
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1897
('FileB', 'absent'), ('FileB', 'base'),
1898
('absent', 'absent'),
1900
parent_map = files.get_parent_map(keys)
1901
entries = files.get_record_stream(keys, 'unordered', False)
1902
self.assertAbsentRecord(files, keys, parent_map, entries)
1903
entries = files.get_record_stream(keys, 'topological', False)
1904
self.assertAbsentRecord(files, keys, parent_map, entries)
1906
def assertRecordHasContent(self, record, bytes):
1907
"""Assert that record has the bytes bytes."""
1908
self.assertEqual(bytes, record.get_bytes_as('fulltext'))
1909
self.assertEqual(bytes, ''.join(record.get_bytes_as('chunked')))
1911
def test_get_record_stream_native_formats_are_wire_ready_one_ft(self):
1912
files = self.get_versionedfiles()
1913
key = self.get_simple_key('foo')
1914
files.add_lines(key, (), ['my text\n', 'content'])
1915
stream = files.get_record_stream([key], 'unordered', False)
1916
record = stream.next()
1917
if record.storage_kind in ('chunked', 'fulltext'):
1918
# chunked and fulltext representations are for direct use not wire
1919
# serialisation: check they are able to be used directly. To send
1920
# such records over the wire translation will be needed.
1921
self.assertRecordHasContent(record, "my text\ncontent")
1923
bytes = [record.get_bytes_as(record.storage_kind)]
1924
network_stream = versionedfile.NetworkRecordStream(bytes).read()
1925
source_record = record
1927
for record in network_stream:
1928
records.append(record)
1929
self.assertEqual(source_record.storage_kind,
1930
record.storage_kind)
1931
self.assertEqual(source_record.parents, record.parents)
1933
source_record.get_bytes_as(source_record.storage_kind),
1934
record.get_bytes_as(record.storage_kind))
1935
self.assertEqual(1, len(records))
1937
def assertStreamMetaEqual(self, records, expected, stream):
1938
"""Assert that streams expected and stream have the same records.
1940
:param records: A list to collect the seen records.
1941
:return: A generator of the records in stream.
1943
# We make assertions during copying to catch things early for
1945
for record, ref_record in izip(stream, expected):
1946
records.append(record)
1947
self.assertEqual(ref_record.key, record.key)
1948
self.assertEqual(ref_record.storage_kind, record.storage_kind)
1949
self.assertEqual(ref_record.parents, record.parents)
1952
def stream_to_bytes_or_skip_counter(self, skipped_records, full_texts,
1954
"""Convert a stream to a bytes iterator.
1956
:param skipped_records: A list with one element to increment when a
1958
:param full_texts: A dict from key->fulltext representation, for
1959
checking chunked or fulltext stored records.
1960
:param stream: A record_stream.
1961
:return: An iterator over the bytes of each record.
1963
for record in stream:
1964
if record.storage_kind in ('chunked', 'fulltext'):
1965
skipped_records[0] += 1
1966
# check the content is correct for direct use.
1967
self.assertRecordHasContent(record, full_texts[record.key])
1969
yield record.get_bytes_as(record.storage_kind)
1971
def test_get_record_stream_native_formats_are_wire_ready_ft_delta(self):
1972
files = self.get_versionedfiles()
1973
target_files = self.get_versionedfiles('target')
1974
key = self.get_simple_key('ft')
1975
key_delta = self.get_simple_key('delta')
1976
files.add_lines(key, (), ['my text\n', 'content'])
1978
delta_parents = (key,)
1981
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
1982
local = files.get_record_stream([key, key_delta], 'unordered', False)
1983
ref = files.get_record_stream([key, key_delta], 'unordered', False)
1984
skipped_records = [0]
1986
key: "my text\ncontent",
1987
key_delta: "different\ncontent\n",
1989
byte_stream = self.stream_to_bytes_or_skip_counter(
1990
skipped_records, full_texts, local)
1991
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
1993
# insert the stream from the network into a versioned files object so we can
1994
# check the content was carried across correctly without doing delta
1996
target_files.insert_record_stream(
1997
self.assertStreamMetaEqual(records, ref, network_stream))
1998
# No duplicates on the wire thank you!
1999
self.assertEqual(2, len(records) + skipped_records[0])
2001
# if any content was copied it all must have all been.
2002
self.assertIdenticalVersionedFile(files, target_files)
2004
def test_get_record_stream_native_formats_are_wire_ready_delta(self):
2005
# copy a delta over the wire
2006
files = self.get_versionedfiles()
2007
target_files = self.get_versionedfiles('target')
2008
key = self.get_simple_key('ft')
2009
key_delta = self.get_simple_key('delta')
2010
files.add_lines(key, (), ['my text\n', 'content'])
2012
delta_parents = (key,)
2015
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
2016
# Copy the basis text across so we can reconstruct the delta during
2017
# insertion into target.
2018
target_files.insert_record_stream(files.get_record_stream([key],
2019
'unordered', False))
2020
local = files.get_record_stream([key_delta], 'unordered', False)
2021
ref = files.get_record_stream([key_delta], 'unordered', False)
2022
skipped_records = [0]
2024
key_delta: "different\ncontent\n",
2026
byte_stream = self.stream_to_bytes_or_skip_counter(
2027
skipped_records, full_texts, local)
2028
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
2030
# insert the stream from the network into a versioned files object so we can
2031
# check the content was carried across correctly without doing delta
2032
# inspection during check_stream.
2033
target_files.insert_record_stream(
2034
self.assertStreamMetaEqual(records, ref, network_stream))
2035
# No duplicates on the wire thank you!
2036
self.assertEqual(1, len(records) + skipped_records[0])
2038
# if any content was copied it all must have all been
2039
self.assertIdenticalVersionedFile(files, target_files)
2041
def test_get_record_stream_wire_ready_delta_closure_included(self):
2042
# copy a delta over the wire with the ability to get its full text.
2043
files = self.get_versionedfiles()
2044
key = self.get_simple_key('ft')
2045
key_delta = self.get_simple_key('delta')
2046
files.add_lines(key, (), ['my text\n', 'content'])
2048
delta_parents = (key,)
2051
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
2052
local = files.get_record_stream([key_delta], 'unordered', True)
2053
ref = files.get_record_stream([key_delta], 'unordered', True)
2054
skipped_records = [0]
2056
key_delta: "different\ncontent\n",
2058
byte_stream = self.stream_to_bytes_or_skip_counter(
2059
skipped_records, full_texts, local)
2060
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
2062
# insert the stream from the network into a versioned files object so we can
2063
# check the content was carried across correctly without doing delta
2064
# inspection during check_stream.
2065
for record in self.assertStreamMetaEqual(records, ref, network_stream):
2066
# we have to be able to get the full text out:
2067
self.assertRecordHasContent(record, full_texts[record.key])
2068
# No duplicates on the wire thank you!
2069
self.assertEqual(1, len(records) + skipped_records[0])
2071
def assertAbsentRecord(self, files, keys, parents, entries):
2072
"""Helper for test_get_record_stream_missing_records_are_absent."""
2074
for factory in entries:
2075
seen.add(factory.key)
2076
if factory.key[-1] == 'absent':
2077
self.assertEqual('absent', factory.storage_kind)
2078
self.assertEqual(None, factory.sha1)
2079
self.assertEqual(None, factory.parents)
2081
self.assertValidStorageKind(factory.storage_kind)
2082
if factory.sha1 is not None:
2083
sha1 = files.get_sha1s([factory.key])[factory.key]
2084
self.assertEqual(sha1, factory.sha1)
2085
self.assertEqual(parents[factory.key], factory.parents)
2086
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
2088
self.assertEqual(set(keys), seen)
2090
def test_filter_absent_records(self):
2091
"""Requested missing records can be filter trivially."""
2092
files = self.get_versionedfiles()
2093
self.get_diamond_files(files)
2094
keys, _ = self.get_keys_and_sort_order()
2095
parent_map = files.get_parent_map(keys)
2096
# Add an absent record in the middle of the present keys. (We don't ask
2097
# for just absent keys to ensure that content before and after the
2098
# absent keys is still delivered).
2099
present_keys = list(keys)
2100
if self.key_length == 1:
2101
keys.insert(2, ('extra',))
2103
keys.insert(2, ('extra', 'extra'))
2104
entries = files.get_record_stream(keys, 'unordered', False)
2106
self.capture_stream(files, versionedfile.filter_absent(entries), seen.add,
2108
self.assertEqual(set(present_keys), seen)
2110
def get_mapper(self):
2111
"""Get a mapper suitable for the key length of the test interface."""
2112
if self.key_length == 1:
2113
return ConstantMapper('source')
2115
return HashEscapedPrefixMapper()
2117
def get_parents(self, parents):
2118
"""Get parents, taking self.graph into consideration."""
2124
def test_get_parent_map(self):
2125
files = self.get_versionedfiles()
2126
if self.key_length == 1:
2128
(('r0',), self.get_parents(())),
2129
(('r1',), self.get_parents((('r0',),))),
2130
(('r2',), self.get_parents(())),
2131
(('r3',), self.get_parents(())),
2132
(('m',), self.get_parents((('r0',),('r1',),('r2',),('r3',)))),
2136
(('FileA', 'r0'), self.get_parents(())),
2137
(('FileA', 'r1'), self.get_parents((('FileA', 'r0'),))),
2138
(('FileA', 'r2'), self.get_parents(())),
2139
(('FileA', 'r3'), self.get_parents(())),
2140
(('FileA', 'm'), self.get_parents((('FileA', 'r0'),
2141
('FileA', 'r1'), ('FileA', 'r2'), ('FileA', 'r3')))),
2143
for key, parents in parent_details:
2144
files.add_lines(key, parents, [])
2145
# immediately after adding it should be queryable.
2146
self.assertEqual({key:parents}, files.get_parent_map([key]))
2147
# We can ask for an empty set
2148
self.assertEqual({}, files.get_parent_map([]))
2149
# We can ask for many keys
2150
all_parents = dict(parent_details)
2151
self.assertEqual(all_parents, files.get_parent_map(all_parents.keys()))
2152
# Absent keys are just not included in the result.
2153
keys = all_parents.keys()
2154
if self.key_length == 1:
2155
keys.insert(1, ('missing',))
2157
keys.insert(1, ('missing', 'missing'))
2158
# Absent keys are just ignored
2159
self.assertEqual(all_parents, files.get_parent_map(keys))
2161
def test_get_sha1s(self):
2162
files = self.get_versionedfiles()
2163
self.get_diamond_files(files)
2164
if self.key_length == 1:
2165
keys = [('base',), ('origin',), ('left',), ('merged',), ('right',)]
2167
# ask for shas from different prefixes.
2169
('FileA', 'base'), ('FileB', 'origin'), ('FileA', 'left'),
2170
('FileA', 'merged'), ('FileB', 'right'),
2173
keys[0]: '51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
2174
keys[1]: '00e364d235126be43292ab09cb4686cf703ddc17',
2175
keys[2]: 'a8478686da38e370e32e42e8a0c220e33ee9132f',
2176
keys[3]: 'ed8bce375198ea62444dc71952b22cfc2b09226d',
2177
keys[4]: '9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
2179
files.get_sha1s(keys))
2181
def test_insert_record_stream_empty(self):
2182
"""Inserting an empty record stream should work."""
2183
files = self.get_versionedfiles()
2184
files.insert_record_stream([])
2186
def assertIdenticalVersionedFile(self, expected, actual):
2187
"""Assert that left and right have the same contents."""
2188
self.assertEqual(set(actual.keys()), set(expected.keys()))
2189
actual_parents = actual.get_parent_map(actual.keys())
2191
self.assertEqual(actual_parents, expected.get_parent_map(expected.keys()))
2193
for key, parents in actual_parents.items():
2194
self.assertEqual(None, parents)
2195
for key in actual.keys():
2196
actual_text = actual.get_record_stream(
2197
[key], 'unordered', True).next().get_bytes_as('fulltext')
2198
expected_text = expected.get_record_stream(
2199
[key], 'unordered', True).next().get_bytes_as('fulltext')
2200
self.assertEqual(actual_text, expected_text)
2202
def test_insert_record_stream_fulltexts(self):
2203
"""Any file should accept a stream of fulltexts."""
2204
files = self.get_versionedfiles()
2205
mapper = self.get_mapper()
2206
source_transport = self.get_transport('source')
2207
source_transport.mkdir('.')
2208
# weaves always output fulltexts.
2209
source = make_versioned_files_factory(WeaveFile, mapper)(
2211
self.get_diamond_files(source, trailing_eol=False)
2212
stream = source.get_record_stream(source.keys(), 'topological',
2214
files.insert_record_stream(stream)
2215
self.assertIdenticalVersionedFile(source, files)
2217
def test_insert_record_stream_fulltexts_noeol(self):
2218
"""Any file should accept a stream of fulltexts."""
2219
files = self.get_versionedfiles()
2220
mapper = self.get_mapper()
2221
source_transport = self.get_transport('source')
2222
source_transport.mkdir('.')
2223
# weaves always output fulltexts.
2224
source = make_versioned_files_factory(WeaveFile, mapper)(
2226
self.get_diamond_files(source, trailing_eol=False)
2227
stream = source.get_record_stream(source.keys(), 'topological',
2229
files.insert_record_stream(stream)
2230
self.assertIdenticalVersionedFile(source, files)
2232
def test_insert_record_stream_annotated_knits(self):
2233
"""Any file should accept a stream from plain knits."""
2234
files = self.get_versionedfiles()
2235
mapper = self.get_mapper()
2236
source_transport = self.get_transport('source')
2237
source_transport.mkdir('.')
2238
source = make_file_factory(True, mapper)(source_transport)
2239
self.get_diamond_files(source)
2240
stream = source.get_record_stream(source.keys(), 'topological',
2242
files.insert_record_stream(stream)
2243
self.assertIdenticalVersionedFile(source, files)
2245
def test_insert_record_stream_annotated_knits_noeol(self):
2246
"""Any file should accept a stream from plain knits."""
2247
files = self.get_versionedfiles()
2248
mapper = self.get_mapper()
2249
source_transport = self.get_transport('source')
2250
source_transport.mkdir('.')
2251
source = make_file_factory(True, mapper)(source_transport)
2252
self.get_diamond_files(source, trailing_eol=False)
2253
stream = source.get_record_stream(source.keys(), 'topological',
2255
files.insert_record_stream(stream)
2256
self.assertIdenticalVersionedFile(source, files)
2258
def test_insert_record_stream_plain_knits(self):
2259
"""Any file should accept a stream from plain knits."""
2260
files = self.get_versionedfiles()
2261
mapper = self.get_mapper()
2262
source_transport = self.get_transport('source')
2263
source_transport.mkdir('.')
2264
source = make_file_factory(False, mapper)(source_transport)
2265
self.get_diamond_files(source)
2266
stream = source.get_record_stream(source.keys(), 'topological',
2268
files.insert_record_stream(stream)
2269
self.assertIdenticalVersionedFile(source, files)
2271
def test_insert_record_stream_plain_knits_noeol(self):
2272
"""Any file should accept a stream from plain knits."""
2273
files = self.get_versionedfiles()
2274
mapper = self.get_mapper()
2275
source_transport = self.get_transport('source')
2276
source_transport.mkdir('.')
2277
source = make_file_factory(False, mapper)(source_transport)
2278
self.get_diamond_files(source, trailing_eol=False)
2279
stream = source.get_record_stream(source.keys(), 'topological',
2281
files.insert_record_stream(stream)
2282
self.assertIdenticalVersionedFile(source, files)
2284
def test_insert_record_stream_existing_keys(self):
2285
"""Inserting keys already in a file should not error."""
2286
files = self.get_versionedfiles()
2287
source = self.get_versionedfiles('source')
2288
self.get_diamond_files(source)
2289
# insert some keys into f.
2290
self.get_diamond_files(files, left_only=True)
2291
stream = source.get_record_stream(source.keys(), 'topological',
2293
files.insert_record_stream(stream)
2294
self.assertIdenticalVersionedFile(source, files)
2296
def test_insert_record_stream_missing_keys(self):
2297
"""Inserting a stream with absent keys should raise an error."""
2298
files = self.get_versionedfiles()
2299
source = self.get_versionedfiles('source')
2300
stream = source.get_record_stream([('missing',) * self.key_length],
2301
'topological', False)
2302
self.assertRaises(errors.RevisionNotPresent, files.insert_record_stream,
2305
def test_insert_record_stream_out_of_order(self):
2306
"""An out of order stream can either error or work."""
2307
files = self.get_versionedfiles()
2308
source = self.get_versionedfiles('source')
2309
self.get_diamond_files(source)
2310
if self.key_length == 1:
2311
origin_keys = [('origin',)]
2312
end_keys = [('merged',), ('left',)]
2313
start_keys = [('right',), ('base',)]
2315
origin_keys = [('FileA', 'origin'), ('FileB', 'origin')]
2316
end_keys = [('FileA', 'merged',), ('FileA', 'left',),
2317
('FileB', 'merged',), ('FileB', 'left',)]
2318
start_keys = [('FileA', 'right',), ('FileA', 'base',),
2319
('FileB', 'right',), ('FileB', 'base',)]
2320
origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
2321
end_entries = source.get_record_stream(end_keys, 'topological', False)
2322
start_entries = source.get_record_stream(start_keys, 'topological', False)
2323
entries = chain(origin_entries, end_entries, start_entries)
2325
files.insert_record_stream(entries)
2326
except RevisionNotPresent:
2327
# Must not have corrupted the file.
2330
self.assertIdenticalVersionedFile(source, files)
2332
def get_knit_delta_source(self):
2333
"""Get a source that can produce a stream with knit delta records,
2334
regardless of this test's scenario.
2336
mapper = self.get_mapper()
2337
source_transport = self.get_transport('source')
2338
source_transport.mkdir('.')
2339
source = make_file_factory(False, mapper)(source_transport)
2340
get_diamond_files(source, self.key_length, trailing_eol=True,
2341
nograph=False, left_only=False)
2344
def test_insert_record_stream_delta_missing_basis_no_corruption(self):
2345
"""Insertion where a needed basis is not included notifies the caller
2346
of the missing basis. In the meantime a record missing its basis is
2349
source = self.get_knit_delta_source()
2350
keys = [self.get_simple_key('origin'), self.get_simple_key('merged')]
2351
entries = source.get_record_stream(keys, 'unordered', False)
2352
files = self.get_versionedfiles()
2353
if self.support_partial_insertion:
2354
self.assertEqual([],
2355
list(files.get_missing_compression_parent_keys()))
2356
files.insert_record_stream(entries)
2357
missing_bases = files.get_missing_compression_parent_keys()
2358
self.assertEqual(set([self.get_simple_key('left')]),
2360
self.assertEqual(set(keys), set(files.get_parent_map(keys)))
2363
errors.RevisionNotPresent, files.insert_record_stream, entries)
2366
def test_insert_record_stream_delta_missing_basis_can_be_added_later(self):
2367
"""Insertion where a needed basis is not included notifies the caller
2368
of the missing basis. That basis can be added in a second
2369
insert_record_stream call that does not need to repeat records present
2370
in the previous stream. The record(s) that required that basis are
2371
fully inserted once their basis is no longer missing.
2373
if not self.support_partial_insertion:
2374
raise TestNotApplicable(
2375
'versioned file scenario does not support partial insertion')
2376
source = self.get_knit_delta_source()
2377
entries = source.get_record_stream([self.get_simple_key('origin'),
2378
self.get_simple_key('merged')], 'unordered', False)
2379
files = self.get_versionedfiles()
2380
files.insert_record_stream(entries)
2381
missing_bases = files.get_missing_compression_parent_keys()
2382
self.assertEqual(set([self.get_simple_key('left')]),
2384
# 'merged' is inserted (although a commit of a write group involving
2385
# this versionedfiles would fail).
2386
merged_key = self.get_simple_key('merged')
2388
[merged_key], files.get_parent_map([merged_key]).keys())
2389
# Add the full delta closure of the missing records
2390
missing_entries = source.get_record_stream(
2391
missing_bases, 'unordered', True)
2392
files.insert_record_stream(missing_entries)
2393
# Now 'merged' is fully inserted (and a commit would succeed).
2394
self.assertEqual([], list(files.get_missing_compression_parent_keys()))
2396
[merged_key], files.get_parent_map([merged_key]).keys())
2399
def test_iter_lines_added_or_present_in_keys(self):
2400
# test that we get at least an equalset of the lines added by
2401
# versions in the store.
2402
# the ordering here is to make a tree so that dumb searches have
2403
# more changes to muck up.
2405
class InstrumentedProgress(progress.DummyProgress):
2409
progress.DummyProgress.__init__(self)
2412
def update(self, msg=None, current=None, total=None):
2413
self.updates.append((msg, current, total))
2415
files = self.get_versionedfiles()
2416
# add a base to get included
2417
files.add_lines(self.get_simple_key('base'), (), ['base\n'])
2418
# add a ancestor to be included on one side
2419
files.add_lines(self.get_simple_key('lancestor'), (), ['lancestor\n'])
2420
# add a ancestor to be included on the other side
2421
files.add_lines(self.get_simple_key('rancestor'),
2422
self.get_parents([self.get_simple_key('base')]), ['rancestor\n'])
2423
# add a child of rancestor with no eofile-nl
2424
files.add_lines(self.get_simple_key('child'),
2425
self.get_parents([self.get_simple_key('rancestor')]),
2426
['base\n', 'child\n'])
2427
# add a child of lancestor and base to join the two roots
2428
files.add_lines(self.get_simple_key('otherchild'),
2429
self.get_parents([self.get_simple_key('lancestor'),
2430
self.get_simple_key('base')]),
2431
['base\n', 'lancestor\n', 'otherchild\n'])
2432
def iter_with_keys(keys, expected):
2433
# now we need to see what lines are returned, and how often.
2435
progress = InstrumentedProgress()
2436
# iterate over the lines
2437
for line in files.iter_lines_added_or_present_in_keys(keys,
2439
lines.setdefault(line, 0)
2441
if []!= progress.updates:
2442
self.assertEqual(expected, progress.updates)
2444
lines = iter_with_keys(
2445
[self.get_simple_key('child'), self.get_simple_key('otherchild')],
2446
[('Walking content', 0, 2),
2447
('Walking content', 1, 2),
2448
('Walking content', 2, 2)])
2449
# we must see child and otherchild
2450
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2452
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2453
# we dont care if we got more than that.
2456
lines = iter_with_keys(files.keys(),
2457
[('Walking content', 0, 5),
2458
('Walking content', 1, 5),
2459
('Walking content', 2, 5),
2460
('Walking content', 3, 5),
2461
('Walking content', 4, 5),
2462
('Walking content', 5, 5)])
2463
# all lines must be seen at least once
2464
self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)
2466
lines[('lancestor\n', self.get_simple_key('lancestor'))] > 0)
2468
lines[('rancestor\n', self.get_simple_key('rancestor'))] > 0)
2469
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2471
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2473
def test_make_mpdiffs(self):
2474
from bzrlib import multiparent
2475
files = self.get_versionedfiles('source')
2476
# add texts that should trip the knit maximum delta chain threshold
2477
# as well as doing parallel chains of data in knits.
2478
# this is done by two chains of 25 insertions
2479
files.add_lines(self.get_simple_key('base'), [], ['line\n'])
2480
files.add_lines(self.get_simple_key('noeol'),
2481
self.get_parents([self.get_simple_key('base')]), ['line'])
2482
# detailed eol tests:
2483
# shared last line with parent no-eol
2484
files.add_lines(self.get_simple_key('noeolsecond'),
2485
self.get_parents([self.get_simple_key('noeol')]),
2487
# differing last line with parent, both no-eol
2488
files.add_lines(self.get_simple_key('noeolnotshared'),
2489
self.get_parents([self.get_simple_key('noeolsecond')]),
2490
['line\n', 'phone'])
2491
# add eol following a noneol parent, change content
2492
files.add_lines(self.get_simple_key('eol'),
2493
self.get_parents([self.get_simple_key('noeol')]), ['phone\n'])
2494
# add eol following a noneol parent, no change content
2495
files.add_lines(self.get_simple_key('eolline'),
2496
self.get_parents([self.get_simple_key('noeol')]), ['line\n'])
2497
# noeol with no parents:
2498
files.add_lines(self.get_simple_key('noeolbase'), [], ['line'])
2499
# noeol preceeding its leftmost parent in the output:
2500
# this is done by making it a merge of two parents with no common
2501
# anestry: noeolbase and noeol with the
2502
# later-inserted parent the leftmost.
2503
files.add_lines(self.get_simple_key('eolbeforefirstparent'),
2504
self.get_parents([self.get_simple_key('noeolbase'),
2505
self.get_simple_key('noeol')]),
2507
# two identical eol texts
2508
files.add_lines(self.get_simple_key('noeoldup'),
2509
self.get_parents([self.get_simple_key('noeol')]), ['line'])
2510
next_parent = self.get_simple_key('base')
2511
text_name = 'chain1-'
2513
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2514
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2515
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2516
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2517
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
2518
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
2519
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2520
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2521
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
2522
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2523
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2524
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2525
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
2526
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2527
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
2528
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2529
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
2530
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2531
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
2532
19:'1ebed371807ba5935958ad0884595126e8c4e823',
2533
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2534
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2535
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2536
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2537
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2538
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2540
for depth in range(26):
2541
new_version = self.get_simple_key(text_name + '%s' % depth)
2542
text = text + ['line\n']
2543
files.add_lines(new_version, self.get_parents([next_parent]), text)
2544
next_parent = new_version
2545
next_parent = self.get_simple_key('base')
2546
text_name = 'chain2-'
2548
for depth in range(26):
2549
new_version = self.get_simple_key(text_name + '%s' % depth)
2550
text = text + ['line\n']
2551
files.add_lines(new_version, self.get_parents([next_parent]), text)
2552
next_parent = new_version
2553
target = self.get_versionedfiles('target')
2554
for key in multiparent.topo_iter_keys(files, files.keys()):
2555
mpdiff = files.make_mpdiffs([key])[0]
2556
parents = files.get_parent_map([key])[key] or []
2558
[(key, parents, files.get_sha1s([key])[key], mpdiff)])
2559
self.assertEqualDiff(
2560
files.get_record_stream([key], 'unordered',
2561
True).next().get_bytes_as('fulltext'),
2562
target.get_record_stream([key], 'unordered',
2563
True).next().get_bytes_as('fulltext')
2566
def test_keys(self):
2567
# While use is discouraged, versions() is still needed by aspects of
2569
files = self.get_versionedfiles()
2570
self.assertEqual(set(), set(files.keys()))
2571
if self.key_length == 1:
2574
key = ('foo', 'bar',)
2575
files.add_lines(key, (), [])
2576
self.assertEqual(set([key]), set(files.keys()))
2579
class VirtualVersionedFilesTests(TestCase):
2580
"""Basic tests for the VirtualVersionedFiles implementations."""
2582
def _get_parent_map(self, keys):
2585
if k in self._parent_map:
2586
ret[k] = self._parent_map[k]
2590
TestCase.setUp(self)
2592
self._parent_map = {}
2593
self.texts = VirtualVersionedFiles(self._get_parent_map,
2596
def test_add_lines(self):
2597
self.assertRaises(NotImplementedError,
2598
self.texts.add_lines, "foo", [], [])
2600
def test_add_mpdiffs(self):
2601
self.assertRaises(NotImplementedError,
2602
self.texts.add_mpdiffs, [])
2604
def test_check(self):
2605
self.assertTrue(self.texts.check())
2607
def test_insert_record_stream(self):
2608
self.assertRaises(NotImplementedError, self.texts.insert_record_stream,
2611
def test_get_sha1s_nonexistent(self):
2612
self.assertEquals({}, self.texts.get_sha1s([("NONEXISTENT",)]))
2614
def test_get_sha1s(self):
2615
self._lines["key"] = ["dataline1", "dataline2"]
2616
self.assertEquals({("key",): osutils.sha_strings(self._lines["key"])},
2617
self.texts.get_sha1s([("key",)]))
2619
def test_get_parent_map(self):
2620
self._parent_map = {"G": ("A", "B")}
2621
self.assertEquals({("G",): (("A",),("B",))},
2622
self.texts.get_parent_map([("G",), ("L",)]))
2624
def test_get_record_stream(self):
2625
self._lines["A"] = ["FOO", "BAR"]
2626
it = self.texts.get_record_stream([("A",)], "unordered", True)
2628
self.assertEquals("chunked", record.storage_kind)
2629
self.assertEquals("FOOBAR", record.get_bytes_as("fulltext"))
2630
self.assertEquals(["FOO", "BAR"], record.get_bytes_as("chunked"))
2632
def test_get_record_stream_absent(self):
2633
it = self.texts.get_record_stream([("A",)], "unordered", True)
2635
self.assertEquals("absent", record.storage_kind)
2637
def test_iter_lines_added_or_present_in_keys(self):
2638
self._lines["A"] = ["FOO", "BAR"]
2639
self._lines["B"] = ["HEY"]
2640
self._lines["C"] = ["Alberta"]
2641
it = self.texts.iter_lines_added_or_present_in_keys([("A",), ("B",)])
2642
self.assertEquals(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
2646
class TestOrderingVersionedFilesDecorator(TestCaseWithMemoryTransport):
2648
def get_ordering_vf(self, key_priority):
2649
builder = self.make_branch_builder('test')
2650
builder.start_series()
2651
builder.build_snapshot('A', None, [
2652
('add', ('', 'TREE_ROOT', 'directory', None))])
2653
builder.build_snapshot('B', ['A'], [])
2654
builder.build_snapshot('C', ['B'], [])
2655
builder.build_snapshot('D', ['C'], [])
2656
builder.finish_series()
2657
b = builder.get_branch()
2659
self.addCleanup(b.unlock)
2660
vf = b.repository.inventories
2661
return versionedfile.OrderingVersionedFilesDecorator(vf, key_priority)
2663
def test_get_empty(self):
2664
vf = self.get_ordering_vf({})
2665
self.assertEqual([], vf.calls)
2667
def test_get_record_stream_topological(self):
2668
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2669
request_keys = [('B',), ('C',), ('D',), ('A',)]
2670
keys = [r.key for r in vf.get_record_stream(request_keys,
2671
'topological', False)]
2672
# We should have gotten the keys in topological order
2673
self.assertEqual([('A',), ('B',), ('C',), ('D',)], keys)
2674
# And recorded that the request was made
2675
self.assertEqual([('get_record_stream', request_keys, 'topological',
2678
def test_get_record_stream_ordered(self):
2679
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2680
request_keys = [('B',), ('C',), ('D',), ('A',)]
2681
keys = [r.key for r in vf.get_record_stream(request_keys,
2682
'unordered', False)]
2683
# They should be returned based on their priority
2684
self.assertEqual([('D',), ('B',), ('A',), ('C',)], keys)
2685
# And the request recorded
2686
self.assertEqual([('get_record_stream', request_keys, 'unordered',
2689
def test_get_record_stream_implicit_order(self):
2690
vf = self.get_ordering_vf({('B',): 2, ('D',): 1})
2691
request_keys = [('B',), ('C',), ('D',), ('A',)]
2692
keys = [r.key for r in vf.get_record_stream(request_keys,
2693
'unordered', False)]
2694
# A and C are not in the map, so they get sorted to the front. A comes
2695
# before C alphabetically, so it comes back first
2696
self.assertEqual([('A',), ('C',), ('D',), ('B',)], keys)
2697
# And the request recorded
2698
self.assertEqual([('get_record_stream', request_keys, 'unordered',