1
# Copyright (C) 2005, 2009 Canonical Ltd
4
# Johan Rydberg <jrydberg@gnu.org>
6
# This program is free software; you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation; either version 2 of the License, or
9
# (at your option) any later version.
11
# This program is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14
# GNU General Public License for more details.
16
# You should have received a copy of the GNU General Public License
17
# along with this program; if not, write to the Free Software
18
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
# considered typical and check that it can be detected/corrected.
24
from itertools import chain, izip
25
from StringIO import StringIO
34
from bzrlib.errors import (
36
RevisionAlreadyPresent,
39
from bzrlib.knit import (
46
from bzrlib.tests import (
48
TestCaseWithMemoryTransport,
52
split_suite_by_condition,
55
from bzrlib.tests.http_utils import TestCaseWithWebserver
56
from bzrlib.trace import mutter
57
from bzrlib.transport import get_transport
58
from bzrlib.transport.memory import MemoryTransport
59
from bzrlib.tsort import topo_sort
60
from bzrlib.tuned_gzip import GzipFile
61
import bzrlib.versionedfile as versionedfile
62
from bzrlib.versionedfile import (
64
HashEscapedPrefixMapper,
66
VirtualVersionedFiles,
67
make_versioned_files_factory,
69
from bzrlib.weave import WeaveFile
70
from bzrlib.weavefile import read_weave, write_weave
73
def load_tests(standard_tests, module, loader):
74
"""Parameterize VersionedFiles tests for different implementations."""
75
to_adapt, result = split_suite_by_condition(
76
standard_tests, condition_isinstance(TestVersionedFiles))
77
# We want to be sure of behaviour for:
78
# weaves prefix layout (weave texts)
79
# individually named weaves (weave inventories)
80
# annotated knits - prefix|hash|hash-escape layout, we test the third only
81
# as it is the most complex mapper.
82
# individually named knits
83
# individual no-graph knits in packs (signatures)
84
# individual graph knits in packs (inventories)
85
# individual graph nocompression knits in packs (revisions)
86
# plain text knits in packs (texts)
90
'factory':make_versioned_files_factory(WeaveFile,
91
ConstantMapper('inventory')),
94
'support_partial_insertion': False,
98
'factory':make_file_factory(False, ConstantMapper('revisions')),
101
'support_partial_insertion': False,
103
('named-nograph-nodelta-knit-pack', {
104
'cleanup':cleanup_pack_knit,
105
'factory':make_pack_factory(False, False, 1),
108
'support_partial_insertion': False,
110
('named-graph-knit-pack', {
111
'cleanup':cleanup_pack_knit,
112
'factory':make_pack_factory(True, True, 1),
115
'support_partial_insertion': True,
117
('named-graph-nodelta-knit-pack', {
118
'cleanup':cleanup_pack_knit,
119
'factory':make_pack_factory(True, False, 1),
122
'support_partial_insertion': False,
124
('groupcompress-nograph', {
125
'cleanup':groupcompress.cleanup_pack_group,
126
'factory':groupcompress.make_pack_factory(False, False, 1),
129
'support_partial_insertion':False,
132
len_two_scenarios = [
135
'factory':make_versioned_files_factory(WeaveFile,
139
'support_partial_insertion': False,
141
('annotated-knit-escape', {
143
'factory':make_file_factory(True, HashEscapedPrefixMapper()),
146
'support_partial_insertion': False,
148
('plain-knit-pack', {
149
'cleanup':cleanup_pack_knit,
150
'factory':make_pack_factory(True, True, 2),
153
'support_partial_insertion': True,
156
'cleanup':groupcompress.cleanup_pack_group,
157
'factory':groupcompress.make_pack_factory(True, False, 1),
160
'support_partial_insertion':False,
163
scenarios = len_one_scenarios + len_two_scenarios
164
return multiply_tests(to_adapt, scenarios, result)
167
def get_diamond_vf(f, trailing_eol=True, left_only=False):
168
"""Get a diamond graph to exercise deltas and merges.
170
:param trailing_eol: If True end the last line with \n.
174
'base': (('origin',),),
175
'left': (('base',),),
176
'right': (('base',),),
177
'merged': (('left',), ('right',)),
179
# insert a diamond graph to exercise deltas and merges.
184
f.add_lines('origin', [], ['origin' + last_char])
185
f.add_lines('base', ['origin'], ['base' + last_char])
186
f.add_lines('left', ['base'], ['base\n', 'left' + last_char])
188
f.add_lines('right', ['base'],
189
['base\n', 'right' + last_char])
190
f.add_lines('merged', ['left', 'right'],
191
['base\n', 'left\n', 'right\n', 'merged' + last_char])
195
def get_diamond_files(files, key_length, trailing_eol=True, left_only=False,
196
nograph=False, nokeys=False):
197
"""Get a diamond graph to exercise deltas and merges.
199
This creates a 5-node graph in files. If files supports 2-length keys two
200
graphs are made to exercise the support for multiple ids.
202
:param trailing_eol: If True end the last line with \n.
203
:param key_length: The length of keys in files. Currently supports length 1
205
:param left_only: If True do not add the right and merged nodes.
206
:param nograph: If True, do not provide parents to the add_lines calls;
207
this is useful for tests that need inserted data but have graphless
209
:param nokeys: If True, pass None is as the key for all insertions.
210
Currently implies nograph.
211
:return: The results of the add_lines calls.
218
prefixes = [('FileA',), ('FileB',)]
219
# insert a diamond graph to exercise deltas and merges.
225
def get_parents(suffix_list):
229
result = [prefix + suffix for suffix in suffix_list]
236
# we loop over each key because that spreads the inserts across prefixes,
237
# which is how commit operates.
238
for prefix in prefixes:
239
result.append(files.add_lines(prefix + get_key('origin'), (),
240
['origin' + last_char]))
241
for prefix in prefixes:
242
result.append(files.add_lines(prefix + get_key('base'),
243
get_parents([('origin',)]), ['base' + last_char]))
244
for prefix in prefixes:
245
result.append(files.add_lines(prefix + get_key('left'),
246
get_parents([('base',)]),
247
['base\n', 'left' + last_char]))
249
for prefix in prefixes:
250
result.append(files.add_lines(prefix + get_key('right'),
251
get_parents([('base',)]),
252
['base\n', 'right' + last_char]))
253
for prefix in prefixes:
254
result.append(files.add_lines(prefix + get_key('merged'),
255
get_parents([('left',), ('right',)]),
256
['base\n', 'left\n', 'right\n', 'merged' + last_char]))
260
class VersionedFileTestMixIn(object):
261
"""A mixin test class for testing VersionedFiles.
263
This is not an adaptor-style test at this point because
264
theres no dynamic substitution of versioned file implementations,
265
they are strictly controlled by their owning repositories.
268
def get_transaction(self):
269
if not hasattr(self, '_transaction'):
270
self._transaction = None
271
return self._transaction
275
f.add_lines('r0', [], ['a\n', 'b\n'])
276
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
278
versions = f.versions()
279
self.assertTrue('r0' in versions)
280
self.assertTrue('r1' in versions)
281
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
282
self.assertEquals(f.get_text('r0'), 'a\nb\n')
283
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
284
self.assertEqual(2, len(f))
285
self.assertEqual(2, f.num_versions())
287
self.assertRaises(RevisionNotPresent,
288
f.add_lines, 'r2', ['foo'], [])
289
self.assertRaises(RevisionAlreadyPresent,
290
f.add_lines, 'r1', [], [])
292
# this checks that reopen with create=True does not break anything.
293
f = self.reopen_file(create=True)
296
def test_adds_with_parent_texts(self):
299
_, _, parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])
301
_, _, parent_texts['r1'] = f.add_lines_with_ghosts('r1',
302
['r0', 'ghost'], ['b\n', 'c\n'], parent_texts=parent_texts)
303
except NotImplementedError:
304
# if the format doesn't support ghosts, just add normally.
305
_, _, parent_texts['r1'] = f.add_lines('r1',
306
['r0'], ['b\n', 'c\n'], parent_texts=parent_texts)
307
f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)
308
self.assertNotEqual(None, parent_texts['r0'])
309
self.assertNotEqual(None, parent_texts['r1'])
311
versions = f.versions()
312
self.assertTrue('r0' in versions)
313
self.assertTrue('r1' in versions)
314
self.assertTrue('r2' in versions)
315
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
316
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
317
self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])
318
self.assertEqual(3, f.num_versions())
319
origins = f.annotate('r1')
320
self.assertEquals(origins[0][0], 'r0')
321
self.assertEquals(origins[1][0], 'r1')
322
origins = f.annotate('r2')
323
self.assertEquals(origins[0][0], 'r1')
324
self.assertEquals(origins[1][0], 'r2')
327
f = self.reopen_file()
330
def test_add_unicode_content(self):
331
# unicode content is not permitted in versioned files.
332
# versioned files version sequences of bytes only.
334
self.assertRaises(errors.BzrBadParameterUnicode,
335
vf.add_lines, 'a', [], ['a\n', u'b\n', 'c\n'])
337
(errors.BzrBadParameterUnicode, NotImplementedError),
338
vf.add_lines_with_ghosts, 'a', [], ['a\n', u'b\n', 'c\n'])
340
def test_add_follows_left_matching_blocks(self):
341
"""If we change left_matching_blocks, delta changes
343
Note: There are multiple correct deltas in this case, because
344
we start with 1 "a" and we get 3.
347
if isinstance(vf, WeaveFile):
348
raise TestSkipped("WeaveFile ignores left_matching_blocks")
349
vf.add_lines('1', [], ['a\n'])
350
vf.add_lines('2', ['1'], ['a\n', 'a\n', 'a\n'],
351
left_matching_blocks=[(0, 0, 1), (1, 3, 0)])
352
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('2'))
353
vf.add_lines('3', ['1'], ['a\n', 'a\n', 'a\n'],
354
left_matching_blocks=[(0, 2, 1), (1, 3, 0)])
355
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('3'))
357
def test_inline_newline_throws(self):
358
# \r characters are not permitted in lines being added
360
self.assertRaises(errors.BzrBadParameterContainsNewline,
361
vf.add_lines, 'a', [], ['a\n\n'])
363
(errors.BzrBadParameterContainsNewline, NotImplementedError),
364
vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])
365
# but inline CR's are allowed
366
vf.add_lines('a', [], ['a\r\n'])
368
vf.add_lines_with_ghosts('b', [], ['a\r\n'])
369
except NotImplementedError:
372
def test_add_reserved(self):
374
self.assertRaises(errors.ReservedId,
375
vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
377
def test_add_lines_nostoresha(self):
378
"""When nostore_sha is supplied using old content raises."""
380
empty_text = ('a', [])
381
sample_text_nl = ('b', ["foo\n", "bar\n"])
382
sample_text_no_nl = ('c', ["foo\n", "bar"])
384
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
385
sha, _, _ = vf.add_lines(version, [], lines)
387
# we now have a copy of all the lines in the vf.
388
for sha, (version, lines) in zip(
389
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
390
self.assertRaises(errors.ExistingContent,
391
vf.add_lines, version + "2", [], lines,
393
# and no new version should have been added.
394
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
397
def test_add_lines_with_ghosts_nostoresha(self):
398
"""When nostore_sha is supplied using old content raises."""
400
empty_text = ('a', [])
401
sample_text_nl = ('b', ["foo\n", "bar\n"])
402
sample_text_no_nl = ('c', ["foo\n", "bar"])
404
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
405
sha, _, _ = vf.add_lines(version, [], lines)
407
# we now have a copy of all the lines in the vf.
408
# is the test applicable to this vf implementation?
410
vf.add_lines_with_ghosts('d', [], [])
411
except NotImplementedError:
412
raise TestSkipped("add_lines_with_ghosts is optional")
413
for sha, (version, lines) in zip(
414
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
415
self.assertRaises(errors.ExistingContent,
416
vf.add_lines_with_ghosts, version + "2", [], lines,
418
# and no new version should have been added.
419
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
422
def test_add_lines_return_value(self):
423
# add_lines should return the sha1 and the text size.
425
empty_text = ('a', [])
426
sample_text_nl = ('b', ["foo\n", "bar\n"])
427
sample_text_no_nl = ('c', ["foo\n", "bar"])
428
# check results for the three cases:
429
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
430
# the first two elements are the same for all versioned files:
431
# - the digest and the size of the text. For some versioned files
432
# additional data is returned in additional tuple elements.
433
result = vf.add_lines(version, [], lines)
434
self.assertEqual(3, len(result))
435
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
437
# parents should not affect the result:
438
lines = sample_text_nl[1]
439
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
440
vf.add_lines('d', ['b', 'c'], lines)[0:2])
442
def test_get_reserved(self):
444
self.assertRaises(errors.ReservedId, vf.get_texts, ['b:'])
445
self.assertRaises(errors.ReservedId, vf.get_lines, 'b:')
446
self.assertRaises(errors.ReservedId, vf.get_text, 'b:')
448
def test_add_unchanged_last_line_noeol_snapshot(self):
449
"""Add a text with an unchanged last line with no eol should work."""
450
# Test adding this in a number of chain lengths; because the interface
451
# for VersionedFile does not allow forcing a specific chain length, we
452
# just use a small base to get the first snapshot, then a much longer
453
# first line for the next add (which will make the third add snapshot)
454
# and so on. 20 has been chosen as an aribtrary figure - knits use 200
455
# as a capped delta length, but ideally we would have some way of
456
# tuning the test to the store (e.g. keep going until a snapshot
458
for length in range(20):
460
vf = self.get_file('case-%d' % length)
463
for step in range(length):
464
version = prefix % step
465
lines = (['prelude \n'] * step) + ['line']
466
vf.add_lines(version, parents, lines)
467
version_lines[version] = lines
469
vf.add_lines('no-eol', parents, ['line'])
470
vf.get_texts(version_lines.keys())
471
self.assertEqualDiff('line', vf.get_text('no-eol'))
473
def test_get_texts_eol_variation(self):
474
# similar to the failure in <http://bugs.launchpad.net/234748>
476
sample_text_nl = ["line\n"]
477
sample_text_no_nl = ["line"]
484
lines = sample_text_nl
486
lines = sample_text_no_nl
487
# left_matching blocks is an internal api; it operates on the
488
# *internal* representation for a knit, which is with *all* lines
489
# being normalised to end with \n - even the final line in a no_nl
490
# file. Using it here ensures that a broken internal implementation
491
# (which is what this test tests) will generate a correct line
492
# delta (which is to say, an empty delta).
493
vf.add_lines(version, parents, lines,
494
left_matching_blocks=[(0, 0, 1)])
496
versions.append(version)
497
version_lines[version] = lines
499
vf.get_texts(versions)
500
vf.get_texts(reversed(versions))
502
def test_add_lines_with_matching_blocks_noeol_last_line(self):
503
"""Add a text with an unchanged last line with no eol should work."""
504
from bzrlib import multiparent
505
# Hand verified sha1 of the text we're adding.
506
sha1 = '6a1d115ec7b60afb664dc14890b5af5ce3c827a4'
507
# Create a mpdiff which adds a new line before the trailing line, and
508
# reuse the last line unaltered (which can cause annotation reuse).
509
# Test adding this in two situations:
510
# On top of a new insertion
511
vf = self.get_file('fulltext')
512
vf.add_lines('noeol', [], ['line'])
513
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
514
left_matching_blocks=[(0, 1, 1)])
515
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
517
vf = self.get_file('delta')
518
vf.add_lines('base', [], ['line'])
519
vf.add_lines('noeol', ['base'], ['prelude\n', 'line'])
520
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
521
left_matching_blocks=[(1, 1, 1)])
522
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
524
def test_make_mpdiffs(self):
525
from bzrlib import multiparent
526
vf = self.get_file('foo')
527
sha1s = self._setup_for_deltas(vf)
528
new_vf = self.get_file('bar')
529
for version in multiparent.topo_iter(vf):
530
mpdiff = vf.make_mpdiffs([version])[0]
531
new_vf.add_mpdiffs([(version, vf.get_parent_map([version])[version],
532
vf.get_sha1s([version])[version], mpdiff)])
533
self.assertEqualDiff(vf.get_text(version),
534
new_vf.get_text(version))
536
def test_make_mpdiffs_with_ghosts(self):
537
vf = self.get_file('foo')
539
vf.add_lines_with_ghosts('text', ['ghost'], ['line\n'])
540
except NotImplementedError:
541
# old Weave formats do not allow ghosts
543
self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, ['ghost'])
545
def _setup_for_deltas(self, f):
546
self.assertFalse(f.has_version('base'))
547
# add texts that should trip the knit maximum delta chain threshold
548
# as well as doing parallel chains of data in knits.
549
# this is done by two chains of 25 insertions
550
f.add_lines('base', [], ['line\n'])
551
f.add_lines('noeol', ['base'], ['line'])
552
# detailed eol tests:
553
# shared last line with parent no-eol
554
f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])
555
# differing last line with parent, both no-eol
556
f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])
557
# add eol following a noneol parent, change content
558
f.add_lines('eol', ['noeol'], ['phone\n'])
559
# add eol following a noneol parent, no change content
560
f.add_lines('eolline', ['noeol'], ['line\n'])
561
# noeol with no parents:
562
f.add_lines('noeolbase', [], ['line'])
563
# noeol preceeding its leftmost parent in the output:
564
# this is done by making it a merge of two parents with no common
565
# anestry: noeolbase and noeol with the
566
# later-inserted parent the leftmost.
567
f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])
568
# two identical eol texts
569
f.add_lines('noeoldup', ['noeol'], ['line'])
571
text_name = 'chain1-'
573
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
574
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
575
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
576
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
577
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
578
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
579
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
580
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
581
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
582
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
583
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
584
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
585
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
586
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
587
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
588
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
589
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
590
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
591
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
592
19:'1ebed371807ba5935958ad0884595126e8c4e823',
593
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
594
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
595
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
596
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
597
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
598
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
600
for depth in range(26):
601
new_version = text_name + '%s' % depth
602
text = text + ['line\n']
603
f.add_lines(new_version, [next_parent], text)
604
next_parent = new_version
606
text_name = 'chain2-'
608
for depth in range(26):
609
new_version = text_name + '%s' % depth
610
text = text + ['line\n']
611
f.add_lines(new_version, [next_parent], text)
612
next_parent = new_version
615
def test_ancestry(self):
617
self.assertEqual([], f.get_ancestry([]))
618
f.add_lines('r0', [], ['a\n', 'b\n'])
619
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
620
f.add_lines('r2', ['r0'], ['b\n', 'c\n'])
621
f.add_lines('r3', ['r2'], ['b\n', 'c\n'])
622
f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])
623
self.assertEqual([], f.get_ancestry([]))
624
versions = f.get_ancestry(['rM'])
625
# there are some possibilities:
629
# so we check indexes
630
r0 = versions.index('r0')
631
r1 = versions.index('r1')
632
r2 = versions.index('r2')
633
self.assertFalse('r3' in versions)
634
rM = versions.index('rM')
635
self.assertTrue(r0 < r1)
636
self.assertTrue(r0 < r2)
637
self.assertTrue(r1 < rM)
638
self.assertTrue(r2 < rM)
640
self.assertRaises(RevisionNotPresent,
641
f.get_ancestry, ['rM', 'rX'])
643
self.assertEqual(set(f.get_ancestry('rM')),
644
set(f.get_ancestry('rM', topo_sorted=False)))
646
def test_mutate_after_finish(self):
647
self._transaction = 'before'
649
self._transaction = 'after'
650
self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])
651
self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])
653
def test_copy_to(self):
655
f.add_lines('0', [], ['a\n'])
656
t = MemoryTransport()
658
for suffix in self.get_factory().get_suffixes():
659
self.assertTrue(t.has('foo' + suffix))
661
def test_get_suffixes(self):
663
# and should be a list
664
self.assertTrue(isinstance(self.get_factory().get_suffixes(), list))
666
def test_get_parent_map(self):
668
f.add_lines('r0', [], ['a\n', 'b\n'])
670
{'r0':()}, f.get_parent_map(['r0']))
671
f.add_lines('r1', ['r0'], ['a\n', 'b\n'])
673
{'r1':('r0',)}, f.get_parent_map(['r1']))
677
f.get_parent_map(['r0', 'r1']))
678
f.add_lines('r2', [], ['a\n', 'b\n'])
679
f.add_lines('r3', [], ['a\n', 'b\n'])
680
f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
682
{'m':('r0', 'r1', 'r2', 'r3')}, f.get_parent_map(['m']))
683
self.assertEqual({}, f.get_parent_map('y'))
687
f.get_parent_map(['r0', 'y', 'r1']))
689
def test_annotate(self):
691
f.add_lines('r0', [], ['a\n', 'b\n'])
692
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
693
origins = f.annotate('r1')
694
self.assertEquals(origins[0][0], 'r1')
695
self.assertEquals(origins[1][0], 'r0')
697
self.assertRaises(RevisionNotPresent,
700
def test_detection(self):
701
# Test weaves detect corruption.
703
# Weaves contain a checksum of their texts.
704
# When a text is extracted, this checksum should be
707
w = self.get_file_corrupted_text()
709
self.assertEqual('hello\n', w.get_text('v1'))
710
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
711
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
712
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
714
w = self.get_file_corrupted_checksum()
716
self.assertEqual('hello\n', w.get_text('v1'))
717
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
718
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
719
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
721
def get_file_corrupted_text(self):
722
"""Return a versioned file with corrupt text but valid metadata."""
723
raise NotImplementedError(self.get_file_corrupted_text)
725
def reopen_file(self, name='foo'):
726
"""Open the versioned file from disk again."""
727
raise NotImplementedError(self.reopen_file)
729
def test_iter_lines_added_or_present_in_versions(self):
730
# test that we get at least an equalset of the lines added by
731
# versions in the weave
732
# the ordering here is to make a tree so that dumb searches have
733
# more changes to muck up.
735
class InstrumentedProgress(progress.DummyProgress):
739
progress.DummyProgress.__init__(self)
742
def update(self, msg=None, current=None, total=None):
743
self.updates.append((msg, current, total))
746
# add a base to get included
747
vf.add_lines('base', [], ['base\n'])
748
# add a ancestor to be included on one side
749
vf.add_lines('lancestor', [], ['lancestor\n'])
750
# add a ancestor to be included on the other side
751
vf.add_lines('rancestor', ['base'], ['rancestor\n'])
752
# add a child of rancestor with no eofile-nl
753
vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])
754
# add a child of lancestor and base to join the two roots
755
vf.add_lines('otherchild',
756
['lancestor', 'base'],
757
['base\n', 'lancestor\n', 'otherchild\n'])
758
def iter_with_versions(versions, expected):
759
# now we need to see what lines are returned, and how often.
761
progress = InstrumentedProgress()
762
# iterate over the lines
763
for line in vf.iter_lines_added_or_present_in_versions(versions,
765
lines.setdefault(line, 0)
767
if []!= progress.updates:
768
self.assertEqual(expected, progress.updates)
770
lines = iter_with_versions(['child', 'otherchild'],
771
[('Walking content', 0, 2),
772
('Walking content', 1, 2),
773
('Walking content', 2, 2)])
774
# we must see child and otherchild
775
self.assertTrue(lines[('child\n', 'child')] > 0)
776
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
777
# we dont care if we got more than that.
780
lines = iter_with_versions(None, [('Walking content', 0, 5),
781
('Walking content', 1, 5),
782
('Walking content', 2, 5),
783
('Walking content', 3, 5),
784
('Walking content', 4, 5),
785
('Walking content', 5, 5)])
786
# all lines must be seen at least once
787
self.assertTrue(lines[('base\n', 'base')] > 0)
788
self.assertTrue(lines[('lancestor\n', 'lancestor')] > 0)
789
self.assertTrue(lines[('rancestor\n', 'rancestor')] > 0)
790
self.assertTrue(lines[('child\n', 'child')] > 0)
791
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
793
def test_add_lines_with_ghosts(self):
794
# some versioned file formats allow lines to be added with parent
795
# information that is > than that in the format. Formats that do
796
# not support this need to raise NotImplementedError on the
797
# add_lines_with_ghosts api.
799
# add a revision with ghost parents
800
# The preferred form is utf8, but we should translate when needed
801
parent_id_unicode = u'b\xbfse'
802
parent_id_utf8 = parent_id_unicode.encode('utf8')
804
vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])
805
except NotImplementedError:
806
# check the other ghost apis are also not implemented
807
self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])
808
self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')
810
vf = self.reopen_file()
811
# test key graph related apis: getncestry, _graph, get_parents
813
# - these are ghost unaware and must not be reflect ghosts
814
self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))
815
self.assertFalse(vf.has_version(parent_id_utf8))
816
# we have _with_ghost apis to give us ghost information.
817
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
818
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
819
# if we add something that is a ghost of another, it should correct the
820
# results of the prior apis
821
vf.add_lines(parent_id_utf8, [], [])
822
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))
823
self.assertEqual({'notbxbfse':(parent_id_utf8,)},
824
vf.get_parent_map(['notbxbfse']))
825
self.assertTrue(vf.has_version(parent_id_utf8))
826
# we have _with_ghost apis to give us ghost information.
827
self.assertEqual([parent_id_utf8, 'notbxbfse'],
828
vf.get_ancestry_with_ghosts(['notbxbfse']))
829
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
831
def test_add_lines_with_ghosts_after_normal_revs(self):
832
# some versioned file formats allow lines to be added with parent
833
# information that is > than that in the format. Formats that do
834
# not support this need to raise NotImplementedError on the
835
# add_lines_with_ghosts api.
837
# probe for ghost support
839
vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n'])
840
except NotImplementedError:
842
vf.add_lines_with_ghosts('references_ghost',
844
['line\n', 'line_b\n', 'line_c\n'])
845
origins = vf.annotate('references_ghost')
846
self.assertEquals(('base', 'line\n'), origins[0])
847
self.assertEquals(('base', 'line_b\n'), origins[1])
848
self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
850
def test_readonly_mode(self):
851
transport = get_transport(self.get_url('.'))
852
factory = self.get_factory()
853
vf = factory('id', transport, 0777, create=True, access_mode='w')
854
vf = factory('id', transport, access_mode='r')
855
self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
856
self.assertRaises(errors.ReadOnlyError,
857
vf.add_lines_with_ghosts,
862
def test_get_sha1s(self):
863
# check the sha1 data is available
866
vf.add_lines('a', [], ['a\n'])
867
# the same file, different metadata
868
vf.add_lines('b', ['a'], ['a\n'])
869
# a file differing only in last newline.
870
vf.add_lines('c', [], ['a'])
872
'a': '3f786850e387550fdab836ed7e6dc881de23001b',
873
'c': '86f7e437faa5a7fce15d1ddcb9eaeaea377667b8',
874
'b': '3f786850e387550fdab836ed7e6dc881de23001b',
876
vf.get_sha1s(['a', 'c', 'b']))
879
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
881
def get_file(self, name='foo'):
882
return WeaveFile(name, get_transport(self.get_url('.')), create=True,
883
get_scope=self.get_transaction)
885
def get_file_corrupted_text(self):
886
w = WeaveFile('foo', get_transport(self.get_url('.')), create=True,
887
get_scope=self.get_transaction)
888
w.add_lines('v1', [], ['hello\n'])
889
w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
891
# We are going to invasively corrupt the text
892
# Make sure the internals of weave are the same
893
self.assertEqual([('{', 0)
901
self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'
902
, '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
907
w._weave[4] = 'There\n'
910
def get_file_corrupted_checksum(self):
911
w = self.get_file_corrupted_text()
913
w._weave[4] = 'there\n'
914
self.assertEqual('hello\nthere\n', w.get_text('v2'))
916
#Invalid checksum, first digit changed
917
w._sha1s[1] = 'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
920
def reopen_file(self, name='foo', create=False):
921
return WeaveFile(name, get_transport(self.get_url('.')), create=create,
922
get_scope=self.get_transaction)
924
def test_no_implicit_create(self):
925
self.assertRaises(errors.NoSuchFile,
928
get_transport(self.get_url('.')),
929
get_scope=self.get_transaction)
931
def get_factory(self):
935
class TestPlanMergeVersionedFile(TestCaseWithMemoryTransport):
938
TestCaseWithMemoryTransport.setUp(self)
939
mapper = PrefixMapper()
940
factory = make_file_factory(True, mapper)
941
self.vf1 = factory(self.get_transport('root-1'))
942
self.vf2 = factory(self.get_transport('root-2'))
943
self.plan_merge_vf = versionedfile._PlanMergeVersionedFile('root')
944
self.plan_merge_vf.fallback_versionedfiles.extend([self.vf1, self.vf2])
946
def test_add_lines(self):
947
self.plan_merge_vf.add_lines(('root', 'a:'), [], [])
948
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
949
('root', 'a'), [], [])
950
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
951
('root', 'a:'), None, [])
952
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
953
('root', 'a:'), [], None)
955
def setup_abcde(self):
956
self.vf1.add_lines(('root', 'A'), [], ['a'])
957
self.vf1.add_lines(('root', 'B'), [('root', 'A')], ['b'])
958
self.vf2.add_lines(('root', 'C'), [], ['c'])
959
self.vf2.add_lines(('root', 'D'), [('root', 'C')], ['d'])
960
self.plan_merge_vf.add_lines(('root', 'E:'),
961
[('root', 'B'), ('root', 'D')], ['e'])
963
def test_get_parents(self):
965
self.assertEqual({('root', 'B'):(('root', 'A'),)},
966
self.plan_merge_vf.get_parent_map([('root', 'B')]))
967
self.assertEqual({('root', 'D'):(('root', 'C'),)},
968
self.plan_merge_vf.get_parent_map([('root', 'D')]))
969
self.assertEqual({('root', 'E:'):(('root', 'B'),('root', 'D'))},
970
self.plan_merge_vf.get_parent_map([('root', 'E:')]))
972
self.plan_merge_vf.get_parent_map([('root', 'F')]))
974
('root', 'B'):(('root', 'A'),),
975
('root', 'D'):(('root', 'C'),),
976
('root', 'E:'):(('root', 'B'),('root', 'D')),
978
self.plan_merge_vf.get_parent_map(
979
[('root', 'B'), ('root', 'D'), ('root', 'E:'), ('root', 'F')]))
981
def test_get_record_stream(self):
983
def get_record(suffix):
984
return self.plan_merge_vf.get_record_stream(
985
[('root', suffix)], 'unordered', True).next()
986
self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))
987
self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))
988
self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))
989
self.assertEqual('absent', get_record('F').storage_kind)
992
class TestReadonlyHttpMixin(object):
994
def get_transaction(self):
997
def test_readonly_http_works(self):
998
# we should be able to read from http with a versioned file.
1000
# try an empty file access
1001
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
1002
self.assertEqual([], readonly_vf.versions())
1004
vf.add_lines('1', [], ['a\n'])
1005
vf.add_lines('2', ['1'], ['b\n', 'a\n'])
1006
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
1007
self.assertEqual(['1', '2'], vf.versions())
1008
for version in readonly_vf.versions():
1009
readonly_vf.get_lines(version)
1012
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
1015
return WeaveFile('foo', get_transport(self.get_url('.')), create=True,
1016
get_scope=self.get_transaction)
1018
def get_factory(self):
1022
class MergeCasesMixin(object):
1024
def doMerge(self, base, a, b, mp):
1025
from cStringIO import StringIO
1026
from textwrap import dedent
1032
w.add_lines('text0', [], map(addcrlf, base))
1033
w.add_lines('text1', ['text0'], map(addcrlf, a))
1034
w.add_lines('text2', ['text0'], map(addcrlf, b))
1036
self.log_contents(w)
1038
self.log('merge plan:')
1039
p = list(w.plan_merge('text1', 'text2'))
1040
for state, line in p:
1042
self.log('%12s | %s' % (state, line[:-1]))
1046
mt.writelines(w.weave_merge(p))
1048
self.log(mt.getvalue())
1050
mp = map(addcrlf, mp)
1051
self.assertEqual(mt.readlines(), mp)
1054
def testOneInsert(self):
1060
def testSeparateInserts(self):
1061
self.doMerge(['aaa', 'bbb', 'ccc'],
1062
['aaa', 'xxx', 'bbb', 'ccc'],
1063
['aaa', 'bbb', 'yyy', 'ccc'],
1064
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1066
def testSameInsert(self):
1067
self.doMerge(['aaa', 'bbb', 'ccc'],
1068
['aaa', 'xxx', 'bbb', 'ccc'],
1069
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],
1070
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1071
overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']
1072
def testOverlappedInsert(self):
1073
self.doMerge(['aaa', 'bbb'],
1074
['aaa', 'xxx', 'yyy', 'bbb'],
1075
['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)
1077
# really it ought to reduce this to
1078
# ['aaa', 'xxx', 'yyy', 'bbb']
1081
def testClashReplace(self):
1082
self.doMerge(['aaa'],
1085
['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz',
1088
def testNonClashInsert1(self):
1089
self.doMerge(['aaa'],
1092
['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz',
1095
def testNonClashInsert2(self):
1096
self.doMerge(['aaa'],
1102
def testDeleteAndModify(self):
1103
"""Clashing delete and modification.
1105
If one side modifies a region and the other deletes it then
1106
there should be a conflict with one side blank.
1109
#######################################
1110
# skippd, not working yet
1113
self.doMerge(['aaa', 'bbb', 'ccc'],
1114
['aaa', 'ddd', 'ccc'],
1116
['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc'])
1118
def _test_merge_from_strings(self, base, a, b, expected):
1120
w.add_lines('text0', [], base.splitlines(True))
1121
w.add_lines('text1', ['text0'], a.splitlines(True))
1122
w.add_lines('text2', ['text0'], b.splitlines(True))
1123
self.log('merge plan:')
1124
p = list(w.plan_merge('text1', 'text2'))
1125
for state, line in p:
1127
self.log('%12s | %s' % (state, line[:-1]))
1128
self.log('merge result:')
1129
result_text = ''.join(w.weave_merge(p))
1130
self.log(result_text)
1131
self.assertEqualDiff(result_text, expected)
1133
def test_weave_merge_conflicts(self):
1134
# does weave merge properly handle plans that end with unchanged?
1135
result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')]))
1136
self.assertEqual(result, 'hello\n')
1138
def test_deletion_extended(self):
1139
"""One side deletes, the other deletes more.
1160
self._test_merge_from_strings(base, a, b, result)
1162
def test_deletion_overlap(self):
1163
"""Delete overlapping regions with no other conflict.
1165
Arguably it'd be better to treat these as agreement, rather than
1166
conflict, but for now conflict is safer.
1194
self._test_merge_from_strings(base, a, b, result)
1196
def test_agreement_deletion(self):
1197
"""Agree to delete some lines, without conflicts."""
1219
self._test_merge_from_strings(base, a, b, result)
1221
def test_sync_on_deletion(self):
1222
"""Specific case of merge where we can synchronize incorrectly.
1224
A previous version of the weave merge concluded that the two versions
1225
agreed on deleting line 2, and this could be a synchronization point.
1226
Line 1 was then considered in isolation, and thought to be deleted on
1229
It's better to consider the whole thing as a disagreement region.
1240
a's replacement line 2
1253
a's replacement line 2
1260
self._test_merge_from_strings(base, a, b, result)
1263
class TestWeaveMerge(TestCaseWithMemoryTransport, MergeCasesMixin):
1265
def get_file(self, name='foo'):
1266
return WeaveFile(name, get_transport(self.get_url('.')), create=True)
1268
def log_contents(self, w):
1269
self.log('weave is:')
1271
write_weave(w, tmpf)
1272
self.log(tmpf.getvalue())
1274
overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======',
1275
'xxx', '>>>>>>> ', 'bbb']
1278
class TestContentFactoryAdaption(TestCaseWithMemoryTransport):
1280
def test_select_adaptor(self):
1281
"""Test expected adapters exist."""
1282
# One scenario for each lookup combination we expect to use.
1283
# Each is source_kind, requested_kind, adapter class
1285
('knit-delta-gz', 'fulltext', _mod_knit.DeltaPlainToFullText),
1286
('knit-ft-gz', 'fulltext', _mod_knit.FTPlainToFullText),
1287
('knit-annotated-delta-gz', 'knit-delta-gz',
1288
_mod_knit.DeltaAnnotatedToUnannotated),
1289
('knit-annotated-delta-gz', 'fulltext',
1290
_mod_knit.DeltaAnnotatedToFullText),
1291
('knit-annotated-ft-gz', 'knit-ft-gz',
1292
_mod_knit.FTAnnotatedToUnannotated),
1293
('knit-annotated-ft-gz', 'fulltext',
1294
_mod_knit.FTAnnotatedToFullText),
1296
for source, requested, klass in scenarios:
1297
adapter_factory = versionedfile.adapter_registry.get(
1298
(source, requested))
1299
adapter = adapter_factory(None)
1300
self.assertIsInstance(adapter, klass)
1302
def get_knit(self, annotated=True):
1303
mapper = ConstantMapper('knit')
1304
transport = self.get_transport()
1305
return make_file_factory(annotated, mapper)(transport)
1307
def helpGetBytes(self, f, ft_adapter, delta_adapter):
1308
"""Grab the interested adapted texts for tests."""
1309
# origin is a fulltext
1310
entries = f.get_record_stream([('origin',)], 'unordered', False)
1311
base = entries.next()
1312
ft_data = ft_adapter.get_bytes(base)
1313
# merged is both a delta and multiple parents.
1314
entries = f.get_record_stream([('merged',)], 'unordered', False)
1315
merged = entries.next()
1316
delta_data = delta_adapter.get_bytes(merged)
1317
return ft_data, delta_data
1319
def test_deannotation_noeol(self):
1320
"""Test converting annotated knits to unannotated knits."""
1321
# we need a full text, and a delta
1323
get_diamond_files(f, 1, trailing_eol=False)
1324
ft_data, delta_data = self.helpGetBytes(f,
1325
_mod_knit.FTAnnotatedToUnannotated(None),
1326
_mod_knit.DeltaAnnotatedToUnannotated(None))
1328
'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'
1331
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1333
'version merged 4 32c2e79763b3f90e8ccde37f9710b6629c25a796\n'
1334
'1,2,3\nleft\nright\nmerged\nend merged\n',
1335
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1337
def test_deannotation(self):
1338
"""Test converting annotated knits to unannotated knits."""
1339
# we need a full text, and a delta
1341
get_diamond_files(f, 1)
1342
ft_data, delta_data = self.helpGetBytes(f,
1343
_mod_knit.FTAnnotatedToUnannotated(None),
1344
_mod_knit.DeltaAnnotatedToUnannotated(None))
1346
'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n'
1349
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1351
'version merged 3 ed8bce375198ea62444dc71952b22cfc2b09226d\n'
1352
'2,2,2\nright\nmerged\nend merged\n',
1353
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1355
def test_annotated_to_fulltext_no_eol(self):
1356
"""Test adapting annotated knits to full texts (for -> weaves)."""
1357
# we need a full text, and a delta
1359
get_diamond_files(f, 1, trailing_eol=False)
1360
# Reconstructing a full text requires a backing versioned file, and it
1361
# must have the base lines requested from it.
1362
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1363
ft_data, delta_data = self.helpGetBytes(f,
1364
_mod_knit.FTAnnotatedToFullText(None),
1365
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1366
self.assertEqual('origin', ft_data)
1367
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1368
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1369
True)], logged_vf.calls)
1371
def test_annotated_to_fulltext(self):
1372
"""Test adapting annotated knits to full texts (for -> weaves)."""
1373
# we need a full text, and a delta
1375
get_diamond_files(f, 1)
1376
# Reconstructing a full text requires a backing versioned file, and it
1377
# must have the base lines requested from it.
1378
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1379
ft_data, delta_data = self.helpGetBytes(f,
1380
_mod_knit.FTAnnotatedToFullText(None),
1381
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1382
self.assertEqual('origin\n', ft_data)
1383
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
1384
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1385
True)], logged_vf.calls)
1387
def test_unannotated_to_fulltext(self):
1388
"""Test adapting unannotated knits to full texts.
1390
This is used for -> weaves, and for -> annotated knits.
1392
# we need a full text, and a delta
1393
f = self.get_knit(annotated=False)
1394
get_diamond_files(f, 1)
1395
# Reconstructing a full text requires a backing versioned file, and it
1396
# must have the base lines requested from it.
1397
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1398
ft_data, delta_data = self.helpGetBytes(f,
1399
_mod_knit.FTPlainToFullText(None),
1400
_mod_knit.DeltaPlainToFullText(logged_vf))
1401
self.assertEqual('origin\n', ft_data)
1402
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
1403
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1404
True)], logged_vf.calls)
1406
def test_unannotated_to_fulltext_no_eol(self):
1407
"""Test adapting unannotated knits to full texts.
1409
This is used for -> weaves, and for -> annotated knits.
1411
# we need a full text, and a delta
1412
f = self.get_knit(annotated=False)
1413
get_diamond_files(f, 1, trailing_eol=False)
1414
# Reconstructing a full text requires a backing versioned file, and it
1415
# must have the base lines requested from it.
1416
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1417
ft_data, delta_data = self.helpGetBytes(f,
1418
_mod_knit.FTPlainToFullText(None),
1419
_mod_knit.DeltaPlainToFullText(logged_vf))
1420
self.assertEqual('origin', ft_data)
1421
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1422
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1423
True)], logged_vf.calls)
1426
class TestKeyMapper(TestCaseWithMemoryTransport):
1427
"""Tests for various key mapping logic."""
1429
def test_identity_mapper(self):
1430
mapper = versionedfile.ConstantMapper("inventory")
1431
self.assertEqual("inventory", mapper.map(('foo@ar',)))
1432
self.assertEqual("inventory", mapper.map(('quux',)))
1434
def test_prefix_mapper(self):
1436
mapper = versionedfile.PrefixMapper()
1437
self.assertEqual("file-id", mapper.map(("file-id", "revision-id")))
1438
self.assertEqual("new-id", mapper.map(("new-id", "revision-id")))
1439
self.assertEqual(('file-id',), mapper.unmap("file-id"))
1440
self.assertEqual(('new-id',), mapper.unmap("new-id"))
1442
def test_hash_prefix_mapper(self):
1443
#format6: hash + plain
1444
mapper = versionedfile.HashPrefixMapper()
1445
self.assertEqual("9b/file-id", mapper.map(("file-id", "revision-id")))
1446
self.assertEqual("45/new-id", mapper.map(("new-id", "revision-id")))
1447
self.assertEqual(('file-id',), mapper.unmap("9b/file-id"))
1448
self.assertEqual(('new-id',), mapper.unmap("45/new-id"))
1450
def test_hash_escaped_mapper(self):
1451
#knit1: hash + escaped
1452
mapper = versionedfile.HashEscapedPrefixMapper()
1453
self.assertEqual("88/%2520", mapper.map((" ", "revision-id")))
1454
self.assertEqual("ed/fil%2545-%2549d", mapper.map(("filE-Id",
1456
self.assertEqual("88/ne%2557-%2549d", mapper.map(("neW-Id",
1458
self.assertEqual(('filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1459
self.assertEqual(('neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1462
class TestVersionedFiles(TestCaseWithMemoryTransport):
1463
"""Tests for the multiple-file variant of VersionedFile."""
1465
def get_versionedfiles(self, relpath='files'):
1466
transport = self.get_transport(relpath)
1468
transport.mkdir('.')
1469
files = self.factory(transport)
1470
if self.cleanup is not None:
1471
self.addCleanup(lambda:self.cleanup(files))
1474
def get_simple_key(self, suffix):
1475
"""Return a key for the object under test."""
1476
if self.key_length == 1:
1479
return ('FileA',) + (suffix,)
1481
def test_add_lines(self):
1482
f = self.get_versionedfiles()
1483
key0 = self.get_simple_key('r0')
1484
key1 = self.get_simple_key('r1')
1485
key2 = self.get_simple_key('r2')
1486
keyf = self.get_simple_key('foo')
1487
f.add_lines(key0, [], ['a\n', 'b\n'])
1489
f.add_lines(key1, [key0], ['b\n', 'c\n'])
1491
f.add_lines(key1, [], ['b\n', 'c\n'])
1493
self.assertTrue(key0 in keys)
1494
self.assertTrue(key1 in keys)
1496
for record in f.get_record_stream([key0, key1], 'unordered', True):
1497
records.append((record.key, record.get_bytes_as('fulltext')))
1499
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1501
def test__add_text(self):
1502
f = self.get_versionedfiles()
1503
key0 = self.get_simple_key('r0')
1504
key1 = self.get_simple_key('r1')
1505
key2 = self.get_simple_key('r2')
1506
keyf = self.get_simple_key('foo')
1507
f._add_text(key0, [], 'a\nb\n')
1509
f._add_text(key1, [key0], 'b\nc\n')
1511
f._add_text(key1, [], 'b\nc\n')
1513
self.assertTrue(key0 in keys)
1514
self.assertTrue(key1 in keys)
1516
for record in f.get_record_stream([key0, key1], 'unordered', True):
1517
records.append((record.key, record.get_bytes_as('fulltext')))
1519
self.assertEqual([(key0, 'a\nb\n'), (key1, 'b\nc\n')], records)
1521
def test_annotate(self):
1522
files = self.get_versionedfiles()
1523
self.get_diamond_files(files)
1524
if self.key_length == 1:
1528
# introduced full text
1529
origins = files.annotate(prefix + ('origin',))
1531
(prefix + ('origin',), 'origin\n')],
1534
origins = files.annotate(prefix + ('base',))
1536
(prefix + ('base',), 'base\n')],
1539
origins = files.annotate(prefix + ('merged',))
1542
(prefix + ('base',), 'base\n'),
1543
(prefix + ('left',), 'left\n'),
1544
(prefix + ('right',), 'right\n'),
1545
(prefix + ('merged',), 'merged\n')
1549
# Without a graph everything is new.
1551
(prefix + ('merged',), 'base\n'),
1552
(prefix + ('merged',), 'left\n'),
1553
(prefix + ('merged',), 'right\n'),
1554
(prefix + ('merged',), 'merged\n')
1557
self.assertRaises(RevisionNotPresent,
1558
files.annotate, prefix + ('missing-key',))
1560
def test_get_annotator(self):
1561
files = self.get_versionedfiles()
1562
self.get_diamond_files(files)
1563
origin_key = self.get_simple_key('origin')
1564
base_key = self.get_simple_key('base')
1565
left_key = self.get_simple_key('left')
1566
right_key = self.get_simple_key('right')
1567
merged_key = self.get_simple_key('merged')
1568
# annotator = files.get_annotator()
1569
# introduced full text
1570
origins, lines = files.get_annotator().annotate(origin_key)
1571
self.assertEqual([(origin_key,)], origins)
1572
self.assertEqual(['origin\n'], lines)
1574
origins, lines = files.get_annotator().annotate(base_key)
1575
self.assertEqual([(base_key,)], origins)
1577
origins, lines = files.get_annotator().annotate(merged_key)
1586
# Without a graph everything is new.
1593
self.assertRaises(RevisionNotPresent,
1594
files.get_annotator().annotate, self.get_simple_key('missing-key'))
1596
def test_construct(self):
1597
"""Each parameterised test can be constructed on a transport."""
1598
files = self.get_versionedfiles()
1600
def get_diamond_files(self, files, trailing_eol=True, left_only=False,
1602
return get_diamond_files(files, self.key_length,
1603
trailing_eol=trailing_eol, nograph=not self.graph,
1604
left_only=left_only, nokeys=nokeys)
1606
def _add_content_nostoresha(self, add_lines):
1607
"""When nostore_sha is supplied using old content raises."""
1608
vf = self.get_versionedfiles()
1609
empty_text = ('a', [])
1610
sample_text_nl = ('b', ["foo\n", "bar\n"])
1611
sample_text_no_nl = ('c', ["foo\n", "bar"])
1613
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
1615
sha, _, _ = vf.add_lines(self.get_simple_key(version), [],
1618
sha, _, _ = vf._add_text(self.get_simple_key(version), [],
1621
# we now have a copy of all the lines in the vf.
1622
for sha, (version, lines) in zip(
1623
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
1624
new_key = self.get_simple_key(version + "2")
1625
self.assertRaises(errors.ExistingContent,
1626
vf.add_lines, new_key, [], lines,
1628
self.assertRaises(errors.ExistingContent,
1629
vf._add_text, new_key, [], ''.join(lines),
1631
# and no new version should have been added.
1632
record = vf.get_record_stream([new_key], 'unordered', True).next()
1633
self.assertEqual('absent', record.storage_kind)
1635
def test_add_lines_nostoresha(self):
1636
self._add_content_nostoresha(add_lines=True)
1638
def test__add_text_nostoresha(self):
1639
self._add_content_nostoresha(add_lines=False)
1641
def test_add_lines_return(self):
1642
files = self.get_versionedfiles()
1643
# save code by using the stock data insertion helper.
1644
adds = self.get_diamond_files(files)
1646
# We can only validate the first 2 elements returned from add_lines.
1648
self.assertEqual(3, len(add))
1649
results.append(add[:2])
1650
if self.key_length == 1:
1652
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1653
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1654
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1655
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1656
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1658
elif self.key_length == 2:
1660
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1661
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1662
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1663
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1664
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1665
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1666
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1667
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1668
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1669
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1672
def test_add_lines_no_key_generates_chk_key(self):
1673
files = self.get_versionedfiles()
1674
# save code by using the stock data insertion helper.
1675
adds = self.get_diamond_files(files, nokeys=True)
1677
# We can only validate the first 2 elements returned from add_lines.
1679
self.assertEqual(3, len(add))
1680
results.append(add[:2])
1681
if self.key_length == 1:
1683
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1684
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1685
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1686
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1687
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1689
# Check the added items got CHK keys.
1690
self.assertEqual(set([
1691
('sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1692
('sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1693
('sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1694
('sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1695
('sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1698
elif self.key_length == 2:
1700
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1701
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1702
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1703
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1704
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1705
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1706
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1707
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1708
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1709
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1711
# Check the added items got CHK keys.
1712
self.assertEqual(set([
1713
('FileA', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1714
('FileA', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1715
('FileA', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1716
('FileA', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1717
('FileA', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1718
('FileB', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1719
('FileB', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1720
('FileB', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1721
('FileB', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1722
('FileB', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1726
def test_empty_lines(self):
1727
"""Empty files can be stored."""
1728
f = self.get_versionedfiles()
1729
key_a = self.get_simple_key('a')
1730
f.add_lines(key_a, [], [])
1731
self.assertEqual('',
1732
f.get_record_stream([key_a], 'unordered', True
1733
).next().get_bytes_as('fulltext'))
1734
key_b = self.get_simple_key('b')
1735
f.add_lines(key_b, self.get_parents([key_a]), [])
1736
self.assertEqual('',
1737
f.get_record_stream([key_b], 'unordered', True
1738
).next().get_bytes_as('fulltext'))
1740
def test_newline_only(self):
1741
f = self.get_versionedfiles()
1742
key_a = self.get_simple_key('a')
1743
f.add_lines(key_a, [], ['\n'])
1744
self.assertEqual('\n',
1745
f.get_record_stream([key_a], 'unordered', True
1746
).next().get_bytes_as('fulltext'))
1747
key_b = self.get_simple_key('b')
1748
f.add_lines(key_b, self.get_parents([key_a]), ['\n'])
1749
self.assertEqual('\n',
1750
f.get_record_stream([key_b], 'unordered', True
1751
).next().get_bytes_as('fulltext'))
1753
def test_get_record_stream_empty(self):
1754
"""An empty stream can be requested without error."""
1755
f = self.get_versionedfiles()
1756
entries = f.get_record_stream([], 'unordered', False)
1757
self.assertEqual([], list(entries))
1759
def assertValidStorageKind(self, storage_kind):
1760
"""Assert that storage_kind is a valid storage_kind."""
1761
self.assertSubset([storage_kind],
1762
['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
1763
'knit-ft', 'knit-delta', 'chunked', 'fulltext',
1764
'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz',
1766
'knit-delta-closure', 'knit-delta-closure-ref',
1767
'groupcompress-block', 'groupcompress-block-ref'])
1769
def capture_stream(self, f, entries, on_seen, parents):
1770
"""Capture a stream for testing."""
1771
for factory in entries:
1772
on_seen(factory.key)
1773
self.assertValidStorageKind(factory.storage_kind)
1774
if factory.sha1 is not None:
1775
self.assertEqual(f.get_sha1s([factory.key])[factory.key],
1777
self.assertEqual(parents[factory.key], factory.parents)
1778
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1781
def test_get_record_stream_interface(self):
1782
"""each item in a stream has to provide a regular interface."""
1783
files = self.get_versionedfiles()
1784
self.get_diamond_files(files)
1785
keys, _ = self.get_keys_and_sort_order()
1786
parent_map = files.get_parent_map(keys)
1787
entries = files.get_record_stream(keys, 'unordered', False)
1789
self.capture_stream(files, entries, seen.add, parent_map)
1790
self.assertEqual(set(keys), seen)
1792
def get_keys_and_sort_order(self):
1793
"""Get diamond test keys list, and their sort ordering."""
1794
if self.key_length == 1:
1795
keys = [('merged',), ('left',), ('right',), ('base',)]
1796
sort_order = {('merged',):2, ('left',):1, ('right',):1, ('base',):0}
1799
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1801
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1805
('FileA', 'merged'):2, ('FileA', 'left'):1, ('FileA', 'right'):1,
1806
('FileA', 'base'):0,
1807
('FileB', 'merged'):2, ('FileB', 'left'):1, ('FileB', 'right'):1,
1808
('FileB', 'base'):0,
1810
return keys, sort_order
1812
def get_keys_and_groupcompress_sort_order(self):
1813
"""Get diamond test keys list, and their groupcompress sort ordering."""
1814
if self.key_length == 1:
1815
keys = [('merged',), ('left',), ('right',), ('base',)]
1816
sort_order = {('merged',):0, ('left',):1, ('right',):1, ('base',):2}
1819
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1821
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1825
('FileA', 'merged'):0, ('FileA', 'left'):1, ('FileA', 'right'):1,
1826
('FileA', 'base'):2,
1827
('FileB', 'merged'):3, ('FileB', 'left'):4, ('FileB', 'right'):4,
1828
('FileB', 'base'):5,
1830
return keys, sort_order
1832
def test_get_record_stream_interface_ordered(self):
1833
"""each item in a stream has to provide a regular interface."""
1834
files = self.get_versionedfiles()
1835
self.get_diamond_files(files)
1836
keys, sort_order = self.get_keys_and_sort_order()
1837
parent_map = files.get_parent_map(keys)
1838
entries = files.get_record_stream(keys, 'topological', False)
1840
self.capture_stream(files, entries, seen.append, parent_map)
1841
self.assertStreamOrder(sort_order, seen, keys)
1843
def test_get_record_stream_interface_ordered_with_delta_closure(self):
1844
"""each item must be accessible as a fulltext."""
1845
files = self.get_versionedfiles()
1846
self.get_diamond_files(files)
1847
keys, sort_order = self.get_keys_and_sort_order()
1848
parent_map = files.get_parent_map(keys)
1849
entries = files.get_record_stream(keys, 'topological', True)
1851
for factory in entries:
1852
seen.append(factory.key)
1853
self.assertValidStorageKind(factory.storage_kind)
1854
self.assertSubset([factory.sha1],
1855
[None, files.get_sha1s([factory.key])[factory.key]])
1856
self.assertEqual(parent_map[factory.key], factory.parents)
1857
# self.assertEqual(files.get_text(factory.key),
1858
ft_bytes = factory.get_bytes_as('fulltext')
1859
self.assertIsInstance(ft_bytes, str)
1860
chunked_bytes = factory.get_bytes_as('chunked')
1861
self.assertEqualDiff(ft_bytes, ''.join(chunked_bytes))
1863
self.assertStreamOrder(sort_order, seen, keys)
1865
def test_get_record_stream_interface_groupcompress(self):
1866
"""each item in a stream has to provide a regular interface."""
1867
files = self.get_versionedfiles()
1868
self.get_diamond_files(files)
1869
keys, sort_order = self.get_keys_and_groupcompress_sort_order()
1870
parent_map = files.get_parent_map(keys)
1871
entries = files.get_record_stream(keys, 'groupcompress', False)
1873
self.capture_stream(files, entries, seen.append, parent_map)
1874
self.assertStreamOrder(sort_order, seen, keys)
1876
def assertStreamOrder(self, sort_order, seen, keys):
1877
self.assertEqual(len(set(seen)), len(keys))
1878
if self.key_length == 1:
1881
lows = {('FileA',):0, ('FileB',):0}
1883
self.assertEqual(set(keys), set(seen))
1886
sort_pos = sort_order[key]
1887
self.assertTrue(sort_pos >= lows[key[:-1]],
1888
"Out of order in sorted stream: %r, %r" % (key, seen))
1889
lows[key[:-1]] = sort_pos
1891
def test_get_record_stream_unknown_storage_kind_raises(self):
1892
"""Asking for a storage kind that the stream cannot supply raises."""
1893
files = self.get_versionedfiles()
1894
self.get_diamond_files(files)
1895
if self.key_length == 1:
1896
keys = [('merged',), ('left',), ('right',), ('base',)]
1899
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1901
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1904
parent_map = files.get_parent_map(keys)
1905
entries = files.get_record_stream(keys, 'unordered', False)
1906
# We track the contents because we should be able to try, fail a
1907
# particular kind and then ask for one that works and continue.
1909
for factory in entries:
1910
seen.add(factory.key)
1911
self.assertValidStorageKind(factory.storage_kind)
1912
if factory.sha1 is not None:
1913
self.assertEqual(files.get_sha1s([factory.key])[factory.key],
1915
self.assertEqual(parent_map[factory.key], factory.parents)
1916
# currently no stream emits mpdiff
1917
self.assertRaises(errors.UnavailableRepresentation,
1918
factory.get_bytes_as, 'mpdiff')
1919
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1921
self.assertEqual(set(keys), seen)
1923
def test_get_record_stream_missing_records_are_absent(self):
1924
files = self.get_versionedfiles()
1925
self.get_diamond_files(files)
1926
if self.key_length == 1:
1927
keys = [('merged',), ('left',), ('right',), ('absent',), ('base',)]
1930
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1931
('FileA', 'absent'), ('FileA', 'base'),
1932
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1933
('FileB', 'absent'), ('FileB', 'base'),
1934
('absent', 'absent'),
1936
parent_map = files.get_parent_map(keys)
1937
entries = files.get_record_stream(keys, 'unordered', False)
1938
self.assertAbsentRecord(files, keys, parent_map, entries)
1939
entries = files.get_record_stream(keys, 'topological', False)
1940
self.assertAbsentRecord(files, keys, parent_map, entries)
1942
def assertRecordHasContent(self, record, bytes):
1943
"""Assert that record has the bytes bytes."""
1944
self.assertEqual(bytes, record.get_bytes_as('fulltext'))
1945
self.assertEqual(bytes, ''.join(record.get_bytes_as('chunked')))
1947
def test_get_record_stream_native_formats_are_wire_ready_one_ft(self):
1948
files = self.get_versionedfiles()
1949
key = self.get_simple_key('foo')
1950
files.add_lines(key, (), ['my text\n', 'content'])
1951
stream = files.get_record_stream([key], 'unordered', False)
1952
record = stream.next()
1953
if record.storage_kind in ('chunked', 'fulltext'):
1954
# chunked and fulltext representations are for direct use not wire
1955
# serialisation: check they are able to be used directly. To send
1956
# such records over the wire translation will be needed.
1957
self.assertRecordHasContent(record, "my text\ncontent")
1959
bytes = [record.get_bytes_as(record.storage_kind)]
1960
network_stream = versionedfile.NetworkRecordStream(bytes).read()
1961
source_record = record
1963
for record in network_stream:
1964
records.append(record)
1965
self.assertEqual(source_record.storage_kind,
1966
record.storage_kind)
1967
self.assertEqual(source_record.parents, record.parents)
1969
source_record.get_bytes_as(source_record.storage_kind),
1970
record.get_bytes_as(record.storage_kind))
1971
self.assertEqual(1, len(records))
1973
def assertStreamMetaEqual(self, records, expected, stream):
1974
"""Assert that streams expected and stream have the same records.
1976
:param records: A list to collect the seen records.
1977
:return: A generator of the records in stream.
1979
# We make assertions during copying to catch things early for
1981
for record, ref_record in izip(stream, expected):
1982
records.append(record)
1983
self.assertEqual(ref_record.key, record.key)
1984
self.assertEqual(ref_record.storage_kind, record.storage_kind)
1985
self.assertEqual(ref_record.parents, record.parents)
1988
def stream_to_bytes_or_skip_counter(self, skipped_records, full_texts,
1990
"""Convert a stream to a bytes iterator.
1992
:param skipped_records: A list with one element to increment when a
1994
:param full_texts: A dict from key->fulltext representation, for
1995
checking chunked or fulltext stored records.
1996
:param stream: A record_stream.
1997
:return: An iterator over the bytes of each record.
1999
for record in stream:
2000
if record.storage_kind in ('chunked', 'fulltext'):
2001
skipped_records[0] += 1
2002
# check the content is correct for direct use.
2003
self.assertRecordHasContent(record, full_texts[record.key])
2005
yield record.get_bytes_as(record.storage_kind)
2007
def test_get_record_stream_native_formats_are_wire_ready_ft_delta(self):
2008
files = self.get_versionedfiles()
2009
target_files = self.get_versionedfiles('target')
2010
key = self.get_simple_key('ft')
2011
key_delta = self.get_simple_key('delta')
2012
files.add_lines(key, (), ['my text\n', 'content'])
2014
delta_parents = (key,)
2017
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
2018
local = files.get_record_stream([key, key_delta], 'unordered', False)
2019
ref = files.get_record_stream([key, key_delta], 'unordered', False)
2020
skipped_records = [0]
2022
key: "my text\ncontent",
2023
key_delta: "different\ncontent\n",
2025
byte_stream = self.stream_to_bytes_or_skip_counter(
2026
skipped_records, full_texts, local)
2027
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
2029
# insert the stream from the network into a versioned files object so we can
2030
# check the content was carried across correctly without doing delta
2032
target_files.insert_record_stream(
2033
self.assertStreamMetaEqual(records, ref, network_stream))
2034
# No duplicates on the wire thank you!
2035
self.assertEqual(2, len(records) + skipped_records[0])
2037
# if any content was copied it all must have all been.
2038
self.assertIdenticalVersionedFile(files, target_files)
2040
def test_get_record_stream_native_formats_are_wire_ready_delta(self):
2041
# copy a delta over the wire
2042
files = self.get_versionedfiles()
2043
target_files = self.get_versionedfiles('target')
2044
key = self.get_simple_key('ft')
2045
key_delta = self.get_simple_key('delta')
2046
files.add_lines(key, (), ['my text\n', 'content'])
2048
delta_parents = (key,)
2051
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
2052
# Copy the basis text across so we can reconstruct the delta during
2053
# insertion into target.
2054
target_files.insert_record_stream(files.get_record_stream([key],
2055
'unordered', False))
2056
local = files.get_record_stream([key_delta], 'unordered', False)
2057
ref = files.get_record_stream([key_delta], 'unordered', False)
2058
skipped_records = [0]
2060
key_delta: "different\ncontent\n",
2062
byte_stream = self.stream_to_bytes_or_skip_counter(
2063
skipped_records, full_texts, local)
2064
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
2066
# insert the stream from the network into a versioned files object so we can
2067
# check the content was carried across correctly without doing delta
2068
# inspection during check_stream.
2069
target_files.insert_record_stream(
2070
self.assertStreamMetaEqual(records, ref, network_stream))
2071
# No duplicates on the wire thank you!
2072
self.assertEqual(1, len(records) + skipped_records[0])
2074
# if any content was copied it all must have all been
2075
self.assertIdenticalVersionedFile(files, target_files)
2077
def test_get_record_stream_wire_ready_delta_closure_included(self):
2078
# copy a delta over the wire with the ability to get its full text.
2079
files = self.get_versionedfiles()
2080
key = self.get_simple_key('ft')
2081
key_delta = self.get_simple_key('delta')
2082
files.add_lines(key, (), ['my text\n', 'content'])
2084
delta_parents = (key,)
2087
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
2088
local = files.get_record_stream([key_delta], 'unordered', True)
2089
ref = files.get_record_stream([key_delta], 'unordered', True)
2090
skipped_records = [0]
2092
key_delta: "different\ncontent\n",
2094
byte_stream = self.stream_to_bytes_or_skip_counter(
2095
skipped_records, full_texts, local)
2096
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
2098
# insert the stream from the network into a versioned files object so we can
2099
# check the content was carried across correctly without doing delta
2100
# inspection during check_stream.
2101
for record in self.assertStreamMetaEqual(records, ref, network_stream):
2102
# we have to be able to get the full text out:
2103
self.assertRecordHasContent(record, full_texts[record.key])
2104
# No duplicates on the wire thank you!
2105
self.assertEqual(1, len(records) + skipped_records[0])
2107
def assertAbsentRecord(self, files, keys, parents, entries):
2108
"""Helper for test_get_record_stream_missing_records_are_absent."""
2110
for factory in entries:
2111
seen.add(factory.key)
2112
if factory.key[-1] == 'absent':
2113
self.assertEqual('absent', factory.storage_kind)
2114
self.assertEqual(None, factory.sha1)
2115
self.assertEqual(None, factory.parents)
2117
self.assertValidStorageKind(factory.storage_kind)
2118
if factory.sha1 is not None:
2119
sha1 = files.get_sha1s([factory.key])[factory.key]
2120
self.assertEqual(sha1, factory.sha1)
2121
self.assertEqual(parents[factory.key], factory.parents)
2122
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
2124
self.assertEqual(set(keys), seen)
2126
def test_filter_absent_records(self):
2127
"""Requested missing records can be filter trivially."""
2128
files = self.get_versionedfiles()
2129
self.get_diamond_files(files)
2130
keys, _ = self.get_keys_and_sort_order()
2131
parent_map = files.get_parent_map(keys)
2132
# Add an absent record in the middle of the present keys. (We don't ask
2133
# for just absent keys to ensure that content before and after the
2134
# absent keys is still delivered).
2135
present_keys = list(keys)
2136
if self.key_length == 1:
2137
keys.insert(2, ('extra',))
2139
keys.insert(2, ('extra', 'extra'))
2140
entries = files.get_record_stream(keys, 'unordered', False)
2142
self.capture_stream(files, versionedfile.filter_absent(entries), seen.add,
2144
self.assertEqual(set(present_keys), seen)
2146
def get_mapper(self):
2147
"""Get a mapper suitable for the key length of the test interface."""
2148
if self.key_length == 1:
2149
return ConstantMapper('source')
2151
return HashEscapedPrefixMapper()
2153
def get_parents(self, parents):
2154
"""Get parents, taking self.graph into consideration."""
2160
def test_get_parent_map(self):
2161
files = self.get_versionedfiles()
2162
if self.key_length == 1:
2164
(('r0',), self.get_parents(())),
2165
(('r1',), self.get_parents((('r0',),))),
2166
(('r2',), self.get_parents(())),
2167
(('r3',), self.get_parents(())),
2168
(('m',), self.get_parents((('r0',),('r1',),('r2',),('r3',)))),
2172
(('FileA', 'r0'), self.get_parents(())),
2173
(('FileA', 'r1'), self.get_parents((('FileA', 'r0'),))),
2174
(('FileA', 'r2'), self.get_parents(())),
2175
(('FileA', 'r3'), self.get_parents(())),
2176
(('FileA', 'm'), self.get_parents((('FileA', 'r0'),
2177
('FileA', 'r1'), ('FileA', 'r2'), ('FileA', 'r3')))),
2179
for key, parents in parent_details:
2180
files.add_lines(key, parents, [])
2181
# immediately after adding it should be queryable.
2182
self.assertEqual({key:parents}, files.get_parent_map([key]))
2183
# We can ask for an empty set
2184
self.assertEqual({}, files.get_parent_map([]))
2185
# We can ask for many keys
2186
all_parents = dict(parent_details)
2187
self.assertEqual(all_parents, files.get_parent_map(all_parents.keys()))
2188
# Absent keys are just not included in the result.
2189
keys = all_parents.keys()
2190
if self.key_length == 1:
2191
keys.insert(1, ('missing',))
2193
keys.insert(1, ('missing', 'missing'))
2194
# Absent keys are just ignored
2195
self.assertEqual(all_parents, files.get_parent_map(keys))
2197
def test_get_sha1s(self):
2198
files = self.get_versionedfiles()
2199
self.get_diamond_files(files)
2200
if self.key_length == 1:
2201
keys = [('base',), ('origin',), ('left',), ('merged',), ('right',)]
2203
# ask for shas from different prefixes.
2205
('FileA', 'base'), ('FileB', 'origin'), ('FileA', 'left'),
2206
('FileA', 'merged'), ('FileB', 'right'),
2209
keys[0]: '51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
2210
keys[1]: '00e364d235126be43292ab09cb4686cf703ddc17',
2211
keys[2]: 'a8478686da38e370e32e42e8a0c220e33ee9132f',
2212
keys[3]: 'ed8bce375198ea62444dc71952b22cfc2b09226d',
2213
keys[4]: '9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
2215
files.get_sha1s(keys))
2217
def test_insert_record_stream_empty(self):
2218
"""Inserting an empty record stream should work."""
2219
files = self.get_versionedfiles()
2220
files.insert_record_stream([])
2222
def assertIdenticalVersionedFile(self, expected, actual):
2223
"""Assert that left and right have the same contents."""
2224
self.assertEqual(set(actual.keys()), set(expected.keys()))
2225
actual_parents = actual.get_parent_map(actual.keys())
2227
self.assertEqual(actual_parents, expected.get_parent_map(expected.keys()))
2229
for key, parents in actual_parents.items():
2230
self.assertEqual(None, parents)
2231
for key in actual.keys():
2232
actual_text = actual.get_record_stream(
2233
[key], 'unordered', True).next().get_bytes_as('fulltext')
2234
expected_text = expected.get_record_stream(
2235
[key], 'unordered', True).next().get_bytes_as('fulltext')
2236
self.assertEqual(actual_text, expected_text)
2238
def test_insert_record_stream_fulltexts(self):
2239
"""Any file should accept a stream of fulltexts."""
2240
files = self.get_versionedfiles()
2241
mapper = self.get_mapper()
2242
source_transport = self.get_transport('source')
2243
source_transport.mkdir('.')
2244
# weaves always output fulltexts.
2245
source = make_versioned_files_factory(WeaveFile, mapper)(
2247
self.get_diamond_files(source, trailing_eol=False)
2248
stream = source.get_record_stream(source.keys(), 'topological',
2250
files.insert_record_stream(stream)
2251
self.assertIdenticalVersionedFile(source, files)
2253
def test_insert_record_stream_fulltexts_noeol(self):
2254
"""Any file should accept a stream of fulltexts."""
2255
files = self.get_versionedfiles()
2256
mapper = self.get_mapper()
2257
source_transport = self.get_transport('source')
2258
source_transport.mkdir('.')
2259
# weaves always output fulltexts.
2260
source = make_versioned_files_factory(WeaveFile, mapper)(
2262
self.get_diamond_files(source, trailing_eol=False)
2263
stream = source.get_record_stream(source.keys(), 'topological',
2265
files.insert_record_stream(stream)
2266
self.assertIdenticalVersionedFile(source, files)
2268
def test_insert_record_stream_annotated_knits(self):
2269
"""Any file should accept a stream from plain knits."""
2270
files = self.get_versionedfiles()
2271
mapper = self.get_mapper()
2272
source_transport = self.get_transport('source')
2273
source_transport.mkdir('.')
2274
source = make_file_factory(True, mapper)(source_transport)
2275
self.get_diamond_files(source)
2276
stream = source.get_record_stream(source.keys(), 'topological',
2278
files.insert_record_stream(stream)
2279
self.assertIdenticalVersionedFile(source, files)
2281
def test_insert_record_stream_annotated_knits_noeol(self):
2282
"""Any file should accept a stream from plain knits."""
2283
files = self.get_versionedfiles()
2284
mapper = self.get_mapper()
2285
source_transport = self.get_transport('source')
2286
source_transport.mkdir('.')
2287
source = make_file_factory(True, mapper)(source_transport)
2288
self.get_diamond_files(source, trailing_eol=False)
2289
stream = source.get_record_stream(source.keys(), 'topological',
2291
files.insert_record_stream(stream)
2292
self.assertIdenticalVersionedFile(source, files)
2294
def test_insert_record_stream_plain_knits(self):
2295
"""Any file should accept a stream from plain knits."""
2296
files = self.get_versionedfiles()
2297
mapper = self.get_mapper()
2298
source_transport = self.get_transport('source')
2299
source_transport.mkdir('.')
2300
source = make_file_factory(False, mapper)(source_transport)
2301
self.get_diamond_files(source)
2302
stream = source.get_record_stream(source.keys(), 'topological',
2304
files.insert_record_stream(stream)
2305
self.assertIdenticalVersionedFile(source, files)
2307
def test_insert_record_stream_plain_knits_noeol(self):
2308
"""Any file should accept a stream from plain knits."""
2309
files = self.get_versionedfiles()
2310
mapper = self.get_mapper()
2311
source_transport = self.get_transport('source')
2312
source_transport.mkdir('.')
2313
source = make_file_factory(False, mapper)(source_transport)
2314
self.get_diamond_files(source, trailing_eol=False)
2315
stream = source.get_record_stream(source.keys(), 'topological',
2317
files.insert_record_stream(stream)
2318
self.assertIdenticalVersionedFile(source, files)
2320
def test_insert_record_stream_existing_keys(self):
2321
"""Inserting keys already in a file should not error."""
2322
files = self.get_versionedfiles()
2323
source = self.get_versionedfiles('source')
2324
self.get_diamond_files(source)
2325
# insert some keys into f.
2326
self.get_diamond_files(files, left_only=True)
2327
stream = source.get_record_stream(source.keys(), 'topological',
2329
files.insert_record_stream(stream)
2330
self.assertIdenticalVersionedFile(source, files)
2332
def test_insert_record_stream_missing_keys(self):
2333
"""Inserting a stream with absent keys should raise an error."""
2334
files = self.get_versionedfiles()
2335
source = self.get_versionedfiles('source')
2336
stream = source.get_record_stream([('missing',) * self.key_length],
2337
'topological', False)
2338
self.assertRaises(errors.RevisionNotPresent, files.insert_record_stream,
2341
def test_insert_record_stream_out_of_order(self):
2342
"""An out of order stream can either error or work."""
2343
files = self.get_versionedfiles()
2344
source = self.get_versionedfiles('source')
2345
self.get_diamond_files(source)
2346
if self.key_length == 1:
2347
origin_keys = [('origin',)]
2348
end_keys = [('merged',), ('left',)]
2349
start_keys = [('right',), ('base',)]
2351
origin_keys = [('FileA', 'origin'), ('FileB', 'origin')]
2352
end_keys = [('FileA', 'merged',), ('FileA', 'left',),
2353
('FileB', 'merged',), ('FileB', 'left',)]
2354
start_keys = [('FileA', 'right',), ('FileA', 'base',),
2355
('FileB', 'right',), ('FileB', 'base',)]
2356
origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
2357
end_entries = source.get_record_stream(end_keys, 'topological', False)
2358
start_entries = source.get_record_stream(start_keys, 'topological', False)
2359
entries = chain(origin_entries, end_entries, start_entries)
2361
files.insert_record_stream(entries)
2362
except RevisionNotPresent:
2363
# Must not have corrupted the file.
2366
self.assertIdenticalVersionedFile(source, files)
2368
def get_knit_delta_source(self):
2369
"""Get a source that can produce a stream with knit delta records,
2370
regardless of this test's scenario.
2372
mapper = self.get_mapper()
2373
source_transport = self.get_transport('source')
2374
source_transport.mkdir('.')
2375
source = make_file_factory(False, mapper)(source_transport)
2376
get_diamond_files(source, self.key_length, trailing_eol=True,
2377
nograph=False, left_only=False)
2380
def test_insert_record_stream_delta_missing_basis_no_corruption(self):
2381
"""Insertion where a needed basis is not included notifies the caller
2382
of the missing basis. In the meantime a record missing its basis is
2385
source = self.get_knit_delta_source()
2386
keys = [self.get_simple_key('origin'), self.get_simple_key('merged')]
2387
entries = source.get_record_stream(keys, 'unordered', False)
2388
files = self.get_versionedfiles()
2389
if self.support_partial_insertion:
2390
self.assertEqual([],
2391
list(files.get_missing_compression_parent_keys()))
2392
files.insert_record_stream(entries)
2393
missing_bases = files.get_missing_compression_parent_keys()
2394
self.assertEqual(set([self.get_simple_key('left')]),
2396
self.assertEqual(set(keys), set(files.get_parent_map(keys)))
2399
errors.RevisionNotPresent, files.insert_record_stream, entries)
2402
def test_insert_record_stream_delta_missing_basis_can_be_added_later(self):
2403
"""Insertion where a needed basis is not included notifies the caller
2404
of the missing basis. That basis can be added in a second
2405
insert_record_stream call that does not need to repeat records present
2406
in the previous stream. The record(s) that required that basis are
2407
fully inserted once their basis is no longer missing.
2409
if not self.support_partial_insertion:
2410
raise TestNotApplicable(
2411
'versioned file scenario does not support partial insertion')
2412
source = self.get_knit_delta_source()
2413
entries = source.get_record_stream([self.get_simple_key('origin'),
2414
self.get_simple_key('merged')], 'unordered', False)
2415
files = self.get_versionedfiles()
2416
files.insert_record_stream(entries)
2417
missing_bases = files.get_missing_compression_parent_keys()
2418
self.assertEqual(set([self.get_simple_key('left')]),
2420
# 'merged' is inserted (although a commit of a write group involving
2421
# this versionedfiles would fail).
2422
merged_key = self.get_simple_key('merged')
2424
[merged_key], files.get_parent_map([merged_key]).keys())
2425
# Add the full delta closure of the missing records
2426
missing_entries = source.get_record_stream(
2427
missing_bases, 'unordered', True)
2428
files.insert_record_stream(missing_entries)
2429
# Now 'merged' is fully inserted (and a commit would succeed).
2430
self.assertEqual([], list(files.get_missing_compression_parent_keys()))
2432
[merged_key], files.get_parent_map([merged_key]).keys())
2435
def test_iter_lines_added_or_present_in_keys(self):
2436
# test that we get at least an equalset of the lines added by
2437
# versions in the store.
2438
# the ordering here is to make a tree so that dumb searches have
2439
# more changes to muck up.
2441
class InstrumentedProgress(progress.DummyProgress):
2445
progress.DummyProgress.__init__(self)
2448
def update(self, msg=None, current=None, total=None):
2449
self.updates.append((msg, current, total))
2451
files = self.get_versionedfiles()
2452
# add a base to get included
2453
files.add_lines(self.get_simple_key('base'), (), ['base\n'])
2454
# add a ancestor to be included on one side
2455
files.add_lines(self.get_simple_key('lancestor'), (), ['lancestor\n'])
2456
# add a ancestor to be included on the other side
2457
files.add_lines(self.get_simple_key('rancestor'),
2458
self.get_parents([self.get_simple_key('base')]), ['rancestor\n'])
2459
# add a child of rancestor with no eofile-nl
2460
files.add_lines(self.get_simple_key('child'),
2461
self.get_parents([self.get_simple_key('rancestor')]),
2462
['base\n', 'child\n'])
2463
# add a child of lancestor and base to join the two roots
2464
files.add_lines(self.get_simple_key('otherchild'),
2465
self.get_parents([self.get_simple_key('lancestor'),
2466
self.get_simple_key('base')]),
2467
['base\n', 'lancestor\n', 'otherchild\n'])
2468
def iter_with_keys(keys, expected):
2469
# now we need to see what lines are returned, and how often.
2471
progress = InstrumentedProgress()
2472
# iterate over the lines
2473
for line in files.iter_lines_added_or_present_in_keys(keys,
2475
lines.setdefault(line, 0)
2477
if []!= progress.updates:
2478
self.assertEqual(expected, progress.updates)
2480
lines = iter_with_keys(
2481
[self.get_simple_key('child'), self.get_simple_key('otherchild')],
2482
[('Walking content', 0, 2),
2483
('Walking content', 1, 2),
2484
('Walking content', 2, 2)])
2485
# we must see child and otherchild
2486
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2488
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2489
# we dont care if we got more than that.
2492
lines = iter_with_keys(files.keys(),
2493
[('Walking content', 0, 5),
2494
('Walking content', 1, 5),
2495
('Walking content', 2, 5),
2496
('Walking content', 3, 5),
2497
('Walking content', 4, 5),
2498
('Walking content', 5, 5)])
2499
# all lines must be seen at least once
2500
self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)
2502
lines[('lancestor\n', self.get_simple_key('lancestor'))] > 0)
2504
lines[('rancestor\n', self.get_simple_key('rancestor'))] > 0)
2505
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2507
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2509
def test_make_mpdiffs(self):
2510
from bzrlib import multiparent
2511
files = self.get_versionedfiles('source')
2512
# add texts that should trip the knit maximum delta chain threshold
2513
# as well as doing parallel chains of data in knits.
2514
# this is done by two chains of 25 insertions
2515
files.add_lines(self.get_simple_key('base'), [], ['line\n'])
2516
files.add_lines(self.get_simple_key('noeol'),
2517
self.get_parents([self.get_simple_key('base')]), ['line'])
2518
# detailed eol tests:
2519
# shared last line with parent no-eol
2520
files.add_lines(self.get_simple_key('noeolsecond'),
2521
self.get_parents([self.get_simple_key('noeol')]),
2523
# differing last line with parent, both no-eol
2524
files.add_lines(self.get_simple_key('noeolnotshared'),
2525
self.get_parents([self.get_simple_key('noeolsecond')]),
2526
['line\n', 'phone'])
2527
# add eol following a noneol parent, change content
2528
files.add_lines(self.get_simple_key('eol'),
2529
self.get_parents([self.get_simple_key('noeol')]), ['phone\n'])
2530
# add eol following a noneol parent, no change content
2531
files.add_lines(self.get_simple_key('eolline'),
2532
self.get_parents([self.get_simple_key('noeol')]), ['line\n'])
2533
# noeol with no parents:
2534
files.add_lines(self.get_simple_key('noeolbase'), [], ['line'])
2535
# noeol preceeding its leftmost parent in the output:
2536
# this is done by making it a merge of two parents with no common
2537
# anestry: noeolbase and noeol with the
2538
# later-inserted parent the leftmost.
2539
files.add_lines(self.get_simple_key('eolbeforefirstparent'),
2540
self.get_parents([self.get_simple_key('noeolbase'),
2541
self.get_simple_key('noeol')]),
2543
# two identical eol texts
2544
files.add_lines(self.get_simple_key('noeoldup'),
2545
self.get_parents([self.get_simple_key('noeol')]), ['line'])
2546
next_parent = self.get_simple_key('base')
2547
text_name = 'chain1-'
2549
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2550
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2551
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2552
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2553
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
2554
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
2555
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2556
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2557
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
2558
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2559
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2560
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2561
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
2562
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2563
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
2564
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2565
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
2566
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2567
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
2568
19:'1ebed371807ba5935958ad0884595126e8c4e823',
2569
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2570
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2571
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2572
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2573
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2574
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2576
for depth in range(26):
2577
new_version = self.get_simple_key(text_name + '%s' % depth)
2578
text = text + ['line\n']
2579
files.add_lines(new_version, self.get_parents([next_parent]), text)
2580
next_parent = new_version
2581
next_parent = self.get_simple_key('base')
2582
text_name = 'chain2-'
2584
for depth in range(26):
2585
new_version = self.get_simple_key(text_name + '%s' % depth)
2586
text = text + ['line\n']
2587
files.add_lines(new_version, self.get_parents([next_parent]), text)
2588
next_parent = new_version
2589
target = self.get_versionedfiles('target')
2590
for key in multiparent.topo_iter_keys(files, files.keys()):
2591
mpdiff = files.make_mpdiffs([key])[0]
2592
parents = files.get_parent_map([key])[key] or []
2594
[(key, parents, files.get_sha1s([key])[key], mpdiff)])
2595
self.assertEqualDiff(
2596
files.get_record_stream([key], 'unordered',
2597
True).next().get_bytes_as('fulltext'),
2598
target.get_record_stream([key], 'unordered',
2599
True).next().get_bytes_as('fulltext')
2602
def test_keys(self):
2603
# While use is discouraged, versions() is still needed by aspects of
2605
files = self.get_versionedfiles()
2606
self.assertEqual(set(), set(files.keys()))
2607
if self.key_length == 1:
2610
key = ('foo', 'bar',)
2611
files.add_lines(key, (), [])
2612
self.assertEqual(set([key]), set(files.keys()))
2615
class VirtualVersionedFilesTests(TestCase):
2616
"""Basic tests for the VirtualVersionedFiles implementations."""
2618
def _get_parent_map(self, keys):
2621
if k in self._parent_map:
2622
ret[k] = self._parent_map[k]
2626
TestCase.setUp(self)
2628
self._parent_map = {}
2629
self.texts = VirtualVersionedFiles(self._get_parent_map,
2632
def test_add_lines(self):
2633
self.assertRaises(NotImplementedError,
2634
self.texts.add_lines, "foo", [], [])
2636
def test_add_mpdiffs(self):
2637
self.assertRaises(NotImplementedError,
2638
self.texts.add_mpdiffs, [])
2640
def test_check(self):
2641
self.assertTrue(self.texts.check())
2643
def test_insert_record_stream(self):
2644
self.assertRaises(NotImplementedError, self.texts.insert_record_stream,
2647
def test_get_sha1s_nonexistent(self):
2648
self.assertEquals({}, self.texts.get_sha1s([("NONEXISTENT",)]))
2650
def test_get_sha1s(self):
2651
self._lines["key"] = ["dataline1", "dataline2"]
2652
self.assertEquals({("key",): osutils.sha_strings(self._lines["key"])},
2653
self.texts.get_sha1s([("key",)]))
2655
def test_get_parent_map(self):
2656
self._parent_map = {"G": ("A", "B")}
2657
self.assertEquals({("G",): (("A",),("B",))},
2658
self.texts.get_parent_map([("G",), ("L",)]))
2660
def test_get_record_stream(self):
2661
self._lines["A"] = ["FOO", "BAR"]
2662
it = self.texts.get_record_stream([("A",)], "unordered", True)
2664
self.assertEquals("chunked", record.storage_kind)
2665
self.assertEquals("FOOBAR", record.get_bytes_as("fulltext"))
2666
self.assertEquals(["FOO", "BAR"], record.get_bytes_as("chunked"))
2668
def test_get_record_stream_absent(self):
2669
it = self.texts.get_record_stream([("A",)], "unordered", True)
2671
self.assertEquals("absent", record.storage_kind)
2673
def test_iter_lines_added_or_present_in_keys(self):
2674
self._lines["A"] = ["FOO", "BAR"]
2675
self._lines["B"] = ["HEY"]
2676
self._lines["C"] = ["Alberta"]
2677
it = self.texts.iter_lines_added_or_present_in_keys([("A",), ("B",)])
2678
self.assertEquals(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
2682
class TestOrderingVersionedFilesDecorator(TestCaseWithMemoryTransport):
2684
def get_ordering_vf(self, key_priority):
2685
builder = self.make_branch_builder('test')
2686
builder.start_series()
2687
builder.build_snapshot('A', None, [
2688
('add', ('', 'TREE_ROOT', 'directory', None))])
2689
builder.build_snapshot('B', ['A'], [])
2690
builder.build_snapshot('C', ['B'], [])
2691
builder.build_snapshot('D', ['C'], [])
2692
builder.finish_series()
2693
b = builder.get_branch()
2695
self.addCleanup(b.unlock)
2696
vf = b.repository.inventories
2697
return versionedfile.OrderingVersionedFilesDecorator(vf, key_priority)
2699
def test_get_empty(self):
2700
vf = self.get_ordering_vf({})
2701
self.assertEqual([], vf.calls)
2703
def test_get_record_stream_topological(self):
2704
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2705
request_keys = [('B',), ('C',), ('D',), ('A',)]
2706
keys = [r.key for r in vf.get_record_stream(request_keys,
2707
'topological', False)]
2708
# We should have gotten the keys in topological order
2709
self.assertEqual([('A',), ('B',), ('C',), ('D',)], keys)
2710
# And recorded that the request was made
2711
self.assertEqual([('get_record_stream', request_keys, 'topological',
2714
def test_get_record_stream_ordered(self):
2715
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2716
request_keys = [('B',), ('C',), ('D',), ('A',)]
2717
keys = [r.key for r in vf.get_record_stream(request_keys,
2718
'unordered', False)]
2719
# They should be returned based on their priority
2720
self.assertEqual([('D',), ('B',), ('A',), ('C',)], keys)
2721
# And the request recorded
2722
self.assertEqual([('get_record_stream', request_keys, 'unordered',
2725
def test_get_record_stream_implicit_order(self):
2726
vf = self.get_ordering_vf({('B',): 2, ('D',): 1})
2727
request_keys = [('B',), ('C',), ('D',), ('A',)]
2728
keys = [r.key for r in vf.get_record_stream(request_keys,
2729
'unordered', False)]
2730
# A and C are not in the map, so they get sorted to the front. A comes
2731
# before C alphabetically, so it comes back first
2732
self.assertEqual([('A',), ('C',), ('D',), ('B',)], keys)
2733
# And the request recorded
2734
self.assertEqual([('get_record_stream', request_keys, 'unordered',