1
# Copyright (C) 2005, 2009 Canonical Ltd
4
# Johan Rydberg <jrydberg@gnu.org>
6
# This program is free software; you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation; either version 2 of the License, or
9
# (at your option) any later version.
11
# This program is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14
# GNU General Public License for more details.
16
# You should have received a copy of the GNU General Public License
17
# along with this program; if not, write to the Free Software
18
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
# considered typical and check that it can be detected/corrected.
24
from itertools import chain, izip
25
from StringIO import StringIO
34
from bzrlib.errors import (
36
RevisionAlreadyPresent,
39
from bzrlib.knit import (
46
from bzrlib.tests import (
48
TestCaseWithMemoryTransport,
52
split_suite_by_condition,
55
from bzrlib.tests.http_utils import TestCaseWithWebserver
56
from bzrlib.trace import mutter
57
from bzrlib.transport import get_transport
58
from bzrlib.transport.memory import MemoryTransport
59
from bzrlib.tsort import topo_sort
60
from bzrlib.tuned_gzip import GzipFile
61
import bzrlib.versionedfile as versionedfile
62
from bzrlib.versionedfile import (
64
HashEscapedPrefixMapper,
66
VirtualVersionedFiles,
67
make_versioned_files_factory,
69
from bzrlib.weave import WeaveFile
70
from bzrlib.weavefile import read_weave, write_weave
73
def load_tests(standard_tests, module, loader):
74
"""Parameterize VersionedFiles tests for different implementations."""
75
to_adapt, result = split_suite_by_condition(
76
standard_tests, condition_isinstance(TestVersionedFiles))
77
# We want to be sure of behaviour for:
78
# weaves prefix layout (weave texts)
79
# individually named weaves (weave inventories)
80
# annotated knits - prefix|hash|hash-escape layout, we test the third only
81
# as it is the most complex mapper.
82
# individually named knits
83
# individual no-graph knits in packs (signatures)
84
# individual graph knits in packs (inventories)
85
# individual graph nocompression knits in packs (revisions)
86
# plain text knits in packs (texts)
90
'factory':make_versioned_files_factory(WeaveFile,
91
ConstantMapper('inventory')),
94
'support_partial_insertion': False,
98
'factory':make_file_factory(False, ConstantMapper('revisions')),
101
'support_partial_insertion': False,
103
('named-nograph-nodelta-knit-pack', {
104
'cleanup':cleanup_pack_knit,
105
'factory':make_pack_factory(False, False, 1),
108
'support_partial_insertion': False,
110
('named-graph-knit-pack', {
111
'cleanup':cleanup_pack_knit,
112
'factory':make_pack_factory(True, True, 1),
115
'support_partial_insertion': True,
117
('named-graph-nodelta-knit-pack', {
118
'cleanup':cleanup_pack_knit,
119
'factory':make_pack_factory(True, False, 1),
122
'support_partial_insertion': False,
124
('groupcompress-nograph', {
125
'cleanup':groupcompress.cleanup_pack_group,
126
'factory':groupcompress.make_pack_factory(False, False, 1),
129
'support_partial_insertion':False,
132
len_two_scenarios = [
135
'factory':make_versioned_files_factory(WeaveFile,
139
'support_partial_insertion': False,
141
('annotated-knit-escape', {
143
'factory':make_file_factory(True, HashEscapedPrefixMapper()),
146
'support_partial_insertion': False,
148
('plain-knit-pack', {
149
'cleanup':cleanup_pack_knit,
150
'factory':make_pack_factory(True, True, 2),
153
'support_partial_insertion': True,
156
'cleanup':groupcompress.cleanup_pack_group,
157
'factory':groupcompress.make_pack_factory(True, False, 1),
160
'support_partial_insertion':False,
163
scenarios = len_one_scenarios + len_two_scenarios
164
return multiply_tests(to_adapt, scenarios, result)
167
def get_diamond_vf(f, trailing_eol=True, left_only=False):
168
"""Get a diamond graph to exercise deltas and merges.
170
:param trailing_eol: If True end the last line with \n.
174
'base': (('origin',),),
175
'left': (('base',),),
176
'right': (('base',),),
177
'merged': (('left',), ('right',)),
179
# insert a diamond graph to exercise deltas and merges.
184
f.add_lines('origin', [], ['origin' + last_char])
185
f.add_lines('base', ['origin'], ['base' + last_char])
186
f.add_lines('left', ['base'], ['base\n', 'left' + last_char])
188
f.add_lines('right', ['base'],
189
['base\n', 'right' + last_char])
190
f.add_lines('merged', ['left', 'right'],
191
['base\n', 'left\n', 'right\n', 'merged' + last_char])
195
def get_diamond_files(files, key_length, trailing_eol=True, left_only=False,
196
nograph=False, nokeys=False):
197
"""Get a diamond graph to exercise deltas and merges.
199
This creates a 5-node graph in files. If files supports 2-length keys two
200
graphs are made to exercise the support for multiple ids.
202
:param trailing_eol: If True end the last line with \n.
203
:param key_length: The length of keys in files. Currently supports length 1
205
:param left_only: If True do not add the right and merged nodes.
206
:param nograph: If True, do not provide parents to the add_lines calls;
207
this is useful for tests that need inserted data but have graphless
209
:param nokeys: If True, pass None is as the key for all insertions.
210
Currently implies nograph.
211
:return: The results of the add_lines calls.
218
prefixes = [('FileA',), ('FileB',)]
219
# insert a diamond graph to exercise deltas and merges.
225
def get_parents(suffix_list):
229
result = [prefix + suffix for suffix in suffix_list]
236
# we loop over each key because that spreads the inserts across prefixes,
237
# which is how commit operates.
238
for prefix in prefixes:
239
result.append(files.add_lines(prefix + get_key('origin'), (),
240
['origin' + last_char]))
241
for prefix in prefixes:
242
result.append(files.add_lines(prefix + get_key('base'),
243
get_parents([('origin',)]), ['base' + last_char]))
244
for prefix in prefixes:
245
result.append(files.add_lines(prefix + get_key('left'),
246
get_parents([('base',)]),
247
['base\n', 'left' + last_char]))
249
for prefix in prefixes:
250
result.append(files.add_lines(prefix + get_key('right'),
251
get_parents([('base',)]),
252
['base\n', 'right' + last_char]))
253
for prefix in prefixes:
254
result.append(files.add_lines(prefix + get_key('merged'),
255
get_parents([('left',), ('right',)]),
256
['base\n', 'left\n', 'right\n', 'merged' + last_char]))
260
class VersionedFileTestMixIn(object):
261
"""A mixin test class for testing VersionedFiles.
263
This is not an adaptor-style test at this point because
264
theres no dynamic substitution of versioned file implementations,
265
they are strictly controlled by their owning repositories.
268
def get_transaction(self):
269
if not hasattr(self, '_transaction'):
270
self._transaction = None
271
return self._transaction
275
f.add_lines('r0', [], ['a\n', 'b\n'])
276
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
278
versions = f.versions()
279
self.assertTrue('r0' in versions)
280
self.assertTrue('r1' in versions)
281
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
282
self.assertEquals(f.get_text('r0'), 'a\nb\n')
283
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
284
self.assertEqual(2, len(f))
285
self.assertEqual(2, f.num_versions())
287
self.assertRaises(RevisionNotPresent,
288
f.add_lines, 'r2', ['foo'], [])
289
self.assertRaises(RevisionAlreadyPresent,
290
f.add_lines, 'r1', [], [])
292
# this checks that reopen with create=True does not break anything.
293
f = self.reopen_file(create=True)
296
def test_adds_with_parent_texts(self):
299
_, _, parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])
301
_, _, parent_texts['r1'] = f.add_lines_with_ghosts('r1',
302
['r0', 'ghost'], ['b\n', 'c\n'], parent_texts=parent_texts)
303
except NotImplementedError:
304
# if the format doesn't support ghosts, just add normally.
305
_, _, parent_texts['r1'] = f.add_lines('r1',
306
['r0'], ['b\n', 'c\n'], parent_texts=parent_texts)
307
f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)
308
self.assertNotEqual(None, parent_texts['r0'])
309
self.assertNotEqual(None, parent_texts['r1'])
311
versions = f.versions()
312
self.assertTrue('r0' in versions)
313
self.assertTrue('r1' in versions)
314
self.assertTrue('r2' in versions)
315
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
316
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
317
self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])
318
self.assertEqual(3, f.num_versions())
319
origins = f.annotate('r1')
320
self.assertEquals(origins[0][0], 'r0')
321
self.assertEquals(origins[1][0], 'r1')
322
origins = f.annotate('r2')
323
self.assertEquals(origins[0][0], 'r1')
324
self.assertEquals(origins[1][0], 'r2')
327
f = self.reopen_file()
330
def test_add_unicode_content(self):
331
# unicode content is not permitted in versioned files.
332
# versioned files version sequences of bytes only.
334
self.assertRaises(errors.BzrBadParameterUnicode,
335
vf.add_lines, 'a', [], ['a\n', u'b\n', 'c\n'])
337
(errors.BzrBadParameterUnicode, NotImplementedError),
338
vf.add_lines_with_ghosts, 'a', [], ['a\n', u'b\n', 'c\n'])
340
def test_add_follows_left_matching_blocks(self):
341
"""If we change left_matching_blocks, delta changes
343
Note: There are multiple correct deltas in this case, because
344
we start with 1 "a" and we get 3.
347
if isinstance(vf, WeaveFile):
348
raise TestSkipped("WeaveFile ignores left_matching_blocks")
349
vf.add_lines('1', [], ['a\n'])
350
vf.add_lines('2', ['1'], ['a\n', 'a\n', 'a\n'],
351
left_matching_blocks=[(0, 0, 1), (1, 3, 0)])
352
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('2'))
353
vf.add_lines('3', ['1'], ['a\n', 'a\n', 'a\n'],
354
left_matching_blocks=[(0, 2, 1), (1, 3, 0)])
355
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('3'))
357
def test_inline_newline_throws(self):
358
# \r characters are not permitted in lines being added
360
self.assertRaises(errors.BzrBadParameterContainsNewline,
361
vf.add_lines, 'a', [], ['a\n\n'])
363
(errors.BzrBadParameterContainsNewline, NotImplementedError),
364
vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])
365
# but inline CR's are allowed
366
vf.add_lines('a', [], ['a\r\n'])
368
vf.add_lines_with_ghosts('b', [], ['a\r\n'])
369
except NotImplementedError:
372
def test_add_reserved(self):
374
self.assertRaises(errors.ReservedId,
375
vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
377
def test_add_lines_nostoresha(self):
378
"""When nostore_sha is supplied using old content raises."""
380
empty_text = ('a', [])
381
sample_text_nl = ('b', ["foo\n", "bar\n"])
382
sample_text_no_nl = ('c', ["foo\n", "bar"])
384
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
385
sha, _, _ = vf.add_lines(version, [], lines)
387
# we now have a copy of all the lines in the vf.
388
for sha, (version, lines) in zip(
389
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
390
self.assertRaises(errors.ExistingContent,
391
vf.add_lines, version + "2", [], lines,
393
# and no new version should have been added.
394
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
397
def test_add_lines_with_ghosts_nostoresha(self):
398
"""When nostore_sha is supplied using old content raises."""
400
empty_text = ('a', [])
401
sample_text_nl = ('b', ["foo\n", "bar\n"])
402
sample_text_no_nl = ('c', ["foo\n", "bar"])
404
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
405
sha, _, _ = vf.add_lines(version, [], lines)
407
# we now have a copy of all the lines in the vf.
408
# is the test applicable to this vf implementation?
410
vf.add_lines_with_ghosts('d', [], [])
411
except NotImplementedError:
412
raise TestSkipped("add_lines_with_ghosts is optional")
413
for sha, (version, lines) in zip(
414
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
415
self.assertRaises(errors.ExistingContent,
416
vf.add_lines_with_ghosts, version + "2", [], lines,
418
# and no new version should have been added.
419
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
422
def test_add_lines_return_value(self):
423
# add_lines should return the sha1 and the text size.
425
empty_text = ('a', [])
426
sample_text_nl = ('b', ["foo\n", "bar\n"])
427
sample_text_no_nl = ('c', ["foo\n", "bar"])
428
# check results for the three cases:
429
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
430
# the first two elements are the same for all versioned files:
431
# - the digest and the size of the text. For some versioned files
432
# additional data is returned in additional tuple elements.
433
result = vf.add_lines(version, [], lines)
434
self.assertEqual(3, len(result))
435
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
437
# parents should not affect the result:
438
lines = sample_text_nl[1]
439
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
440
vf.add_lines('d', ['b', 'c'], lines)[0:2])
442
def test_get_reserved(self):
444
self.assertRaises(errors.ReservedId, vf.get_texts, ['b:'])
445
self.assertRaises(errors.ReservedId, vf.get_lines, 'b:')
446
self.assertRaises(errors.ReservedId, vf.get_text, 'b:')
448
def test_add_unchanged_last_line_noeol_snapshot(self):
449
"""Add a text with an unchanged last line with no eol should work."""
450
# Test adding this in a number of chain lengths; because the interface
451
# for VersionedFile does not allow forcing a specific chain length, we
452
# just use a small base to get the first snapshot, then a much longer
453
# first line for the next add (which will make the third add snapshot)
454
# and so on. 20 has been chosen as an aribtrary figure - knits use 200
455
# as a capped delta length, but ideally we would have some way of
456
# tuning the test to the store (e.g. keep going until a snapshot
458
for length in range(20):
460
vf = self.get_file('case-%d' % length)
463
for step in range(length):
464
version = prefix % step
465
lines = (['prelude \n'] * step) + ['line']
466
vf.add_lines(version, parents, lines)
467
version_lines[version] = lines
469
vf.add_lines('no-eol', parents, ['line'])
470
vf.get_texts(version_lines.keys())
471
self.assertEqualDiff('line', vf.get_text('no-eol'))
473
def test_get_texts_eol_variation(self):
474
# similar to the failure in <http://bugs.launchpad.net/234748>
476
sample_text_nl = ["line\n"]
477
sample_text_no_nl = ["line"]
484
lines = sample_text_nl
486
lines = sample_text_no_nl
487
# left_matching blocks is an internal api; it operates on the
488
# *internal* representation for a knit, which is with *all* lines
489
# being normalised to end with \n - even the final line in a no_nl
490
# file. Using it here ensures that a broken internal implementation
491
# (which is what this test tests) will generate a correct line
492
# delta (which is to say, an empty delta).
493
vf.add_lines(version, parents, lines,
494
left_matching_blocks=[(0, 0, 1)])
496
versions.append(version)
497
version_lines[version] = lines
499
vf.get_texts(versions)
500
vf.get_texts(reversed(versions))
502
def test_add_lines_with_matching_blocks_noeol_last_line(self):
503
"""Add a text with an unchanged last line with no eol should work."""
504
from bzrlib import multiparent
505
# Hand verified sha1 of the text we're adding.
506
sha1 = '6a1d115ec7b60afb664dc14890b5af5ce3c827a4'
507
# Create a mpdiff which adds a new line before the trailing line, and
508
# reuse the last line unaltered (which can cause annotation reuse).
509
# Test adding this in two situations:
510
# On top of a new insertion
511
vf = self.get_file('fulltext')
512
vf.add_lines('noeol', [], ['line'])
513
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
514
left_matching_blocks=[(0, 1, 1)])
515
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
517
vf = self.get_file('delta')
518
vf.add_lines('base', [], ['line'])
519
vf.add_lines('noeol', ['base'], ['prelude\n', 'line'])
520
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
521
left_matching_blocks=[(1, 1, 1)])
522
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
524
def test_make_mpdiffs(self):
525
from bzrlib import multiparent
526
vf = self.get_file('foo')
527
sha1s = self._setup_for_deltas(vf)
528
new_vf = self.get_file('bar')
529
for version in multiparent.topo_iter(vf):
530
mpdiff = vf.make_mpdiffs([version])[0]
531
new_vf.add_mpdiffs([(version, vf.get_parent_map([version])[version],
532
vf.get_sha1s([version])[version], mpdiff)])
533
self.assertEqualDiff(vf.get_text(version),
534
new_vf.get_text(version))
536
def test_make_mpdiffs_with_ghosts(self):
537
vf = self.get_file('foo')
539
vf.add_lines_with_ghosts('text', ['ghost'], ['line\n'])
540
except NotImplementedError:
541
# old Weave formats do not allow ghosts
543
self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, ['ghost'])
545
def _setup_for_deltas(self, f):
546
self.assertFalse(f.has_version('base'))
547
# add texts that should trip the knit maximum delta chain threshold
548
# as well as doing parallel chains of data in knits.
549
# this is done by two chains of 25 insertions
550
f.add_lines('base', [], ['line\n'])
551
f.add_lines('noeol', ['base'], ['line'])
552
# detailed eol tests:
553
# shared last line with parent no-eol
554
f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])
555
# differing last line with parent, both no-eol
556
f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])
557
# add eol following a noneol parent, change content
558
f.add_lines('eol', ['noeol'], ['phone\n'])
559
# add eol following a noneol parent, no change content
560
f.add_lines('eolline', ['noeol'], ['line\n'])
561
# noeol with no parents:
562
f.add_lines('noeolbase', [], ['line'])
563
# noeol preceeding its leftmost parent in the output:
564
# this is done by making it a merge of two parents with no common
565
# anestry: noeolbase and noeol with the
566
# later-inserted parent the leftmost.
567
f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])
568
# two identical eol texts
569
f.add_lines('noeoldup', ['noeol'], ['line'])
571
text_name = 'chain1-'
573
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
574
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
575
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
576
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
577
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
578
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
579
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
580
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
581
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
582
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
583
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
584
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
585
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
586
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
587
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
588
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
589
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
590
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
591
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
592
19:'1ebed371807ba5935958ad0884595126e8c4e823',
593
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
594
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
595
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
596
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
597
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
598
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
600
for depth in range(26):
601
new_version = text_name + '%s' % depth
602
text = text + ['line\n']
603
f.add_lines(new_version, [next_parent], text)
604
next_parent = new_version
606
text_name = 'chain2-'
608
for depth in range(26):
609
new_version = text_name + '%s' % depth
610
text = text + ['line\n']
611
f.add_lines(new_version, [next_parent], text)
612
next_parent = new_version
615
def test_ancestry(self):
617
self.assertEqual([], f.get_ancestry([]))
618
f.add_lines('r0', [], ['a\n', 'b\n'])
619
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
620
f.add_lines('r2', ['r0'], ['b\n', 'c\n'])
621
f.add_lines('r3', ['r2'], ['b\n', 'c\n'])
622
f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])
623
self.assertEqual([], f.get_ancestry([]))
624
versions = f.get_ancestry(['rM'])
625
# there are some possibilities:
629
# so we check indexes
630
r0 = versions.index('r0')
631
r1 = versions.index('r1')
632
r2 = versions.index('r2')
633
self.assertFalse('r3' in versions)
634
rM = versions.index('rM')
635
self.assertTrue(r0 < r1)
636
self.assertTrue(r0 < r2)
637
self.assertTrue(r1 < rM)
638
self.assertTrue(r2 < rM)
640
self.assertRaises(RevisionNotPresent,
641
f.get_ancestry, ['rM', 'rX'])
643
self.assertEqual(set(f.get_ancestry('rM')),
644
set(f.get_ancestry('rM', topo_sorted=False)))
646
def test_mutate_after_finish(self):
647
self._transaction = 'before'
649
self._transaction = 'after'
650
self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])
651
self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])
653
def test_copy_to(self):
655
f.add_lines('0', [], ['a\n'])
656
t = MemoryTransport()
658
for suffix in self.get_factory().get_suffixes():
659
self.assertTrue(t.has('foo' + suffix))
661
def test_get_suffixes(self):
663
# and should be a list
664
self.assertTrue(isinstance(self.get_factory().get_suffixes(), list))
666
def test_get_parent_map(self):
668
f.add_lines('r0', [], ['a\n', 'b\n'])
670
{'r0':()}, f.get_parent_map(['r0']))
671
f.add_lines('r1', ['r0'], ['a\n', 'b\n'])
673
{'r1':('r0',)}, f.get_parent_map(['r1']))
677
f.get_parent_map(['r0', 'r1']))
678
f.add_lines('r2', [], ['a\n', 'b\n'])
679
f.add_lines('r3', [], ['a\n', 'b\n'])
680
f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
682
{'m':('r0', 'r1', 'r2', 'r3')}, f.get_parent_map(['m']))
683
self.assertEqual({}, f.get_parent_map('y'))
687
f.get_parent_map(['r0', 'y', 'r1']))
689
def test_annotate(self):
691
f.add_lines('r0', [], ['a\n', 'b\n'])
692
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
693
origins = f.annotate('r1')
694
self.assertEquals(origins[0][0], 'r1')
695
self.assertEquals(origins[1][0], 'r0')
697
self.assertRaises(RevisionNotPresent,
700
def test_detection(self):
701
# Test weaves detect corruption.
703
# Weaves contain a checksum of their texts.
704
# When a text is extracted, this checksum should be
707
w = self.get_file_corrupted_text()
709
self.assertEqual('hello\n', w.get_text('v1'))
710
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
711
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
712
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
714
w = self.get_file_corrupted_checksum()
716
self.assertEqual('hello\n', w.get_text('v1'))
717
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
718
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
719
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
721
def get_file_corrupted_text(self):
722
"""Return a versioned file with corrupt text but valid metadata."""
723
raise NotImplementedError(self.get_file_corrupted_text)
725
def reopen_file(self, name='foo'):
726
"""Open the versioned file from disk again."""
727
raise NotImplementedError(self.reopen_file)
729
def test_iter_lines_added_or_present_in_versions(self):
730
# test that we get at least an equalset of the lines added by
731
# versions in the weave
732
# the ordering here is to make a tree so that dumb searches have
733
# more changes to muck up.
735
class InstrumentedProgress(progress.DummyProgress):
739
progress.DummyProgress.__init__(self)
742
def update(self, msg=None, current=None, total=None):
743
self.updates.append((msg, current, total))
746
# add a base to get included
747
vf.add_lines('base', [], ['base\n'])
748
# add a ancestor to be included on one side
749
vf.add_lines('lancestor', [], ['lancestor\n'])
750
# add a ancestor to be included on the other side
751
vf.add_lines('rancestor', ['base'], ['rancestor\n'])
752
# add a child of rancestor with no eofile-nl
753
vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])
754
# add a child of lancestor and base to join the two roots
755
vf.add_lines('otherchild',
756
['lancestor', 'base'],
757
['base\n', 'lancestor\n', 'otherchild\n'])
758
def iter_with_versions(versions, expected):
759
# now we need to see what lines are returned, and how often.
761
progress = InstrumentedProgress()
762
# iterate over the lines
763
for line in vf.iter_lines_added_or_present_in_versions(versions,
765
lines.setdefault(line, 0)
767
if []!= progress.updates:
768
self.assertEqual(expected, progress.updates)
770
lines = iter_with_versions(['child', 'otherchild'],
771
[('Walking content', 0, 2),
772
('Walking content', 1, 2),
773
('Walking content', 2, 2)])
774
# we must see child and otherchild
775
self.assertTrue(lines[('child\n', 'child')] > 0)
776
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
777
# we dont care if we got more than that.
780
lines = iter_with_versions(None, [('Walking content', 0, 5),
781
('Walking content', 1, 5),
782
('Walking content', 2, 5),
783
('Walking content', 3, 5),
784
('Walking content', 4, 5),
785
('Walking content', 5, 5)])
786
# all lines must be seen at least once
787
self.assertTrue(lines[('base\n', 'base')] > 0)
788
self.assertTrue(lines[('lancestor\n', 'lancestor')] > 0)
789
self.assertTrue(lines[('rancestor\n', 'rancestor')] > 0)
790
self.assertTrue(lines[('child\n', 'child')] > 0)
791
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
793
def test_add_lines_with_ghosts(self):
794
# some versioned file formats allow lines to be added with parent
795
# information that is > than that in the format. Formats that do
796
# not support this need to raise NotImplementedError on the
797
# add_lines_with_ghosts api.
799
# add a revision with ghost parents
800
# The preferred form is utf8, but we should translate when needed
801
parent_id_unicode = u'b\xbfse'
802
parent_id_utf8 = parent_id_unicode.encode('utf8')
804
vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])
805
except NotImplementedError:
806
# check the other ghost apis are also not implemented
807
self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])
808
self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')
810
vf = self.reopen_file()
811
# test key graph related apis: getncestry, _graph, get_parents
813
# - these are ghost unaware and must not be reflect ghosts
814
self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))
815
self.assertFalse(vf.has_version(parent_id_utf8))
816
# we have _with_ghost apis to give us ghost information.
817
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
818
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
819
# if we add something that is a ghost of another, it should correct the
820
# results of the prior apis
821
vf.add_lines(parent_id_utf8, [], [])
822
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))
823
self.assertEqual({'notbxbfse':(parent_id_utf8,)},
824
vf.get_parent_map(['notbxbfse']))
825
self.assertTrue(vf.has_version(parent_id_utf8))
826
# we have _with_ghost apis to give us ghost information.
827
self.assertEqual([parent_id_utf8, 'notbxbfse'],
828
vf.get_ancestry_with_ghosts(['notbxbfse']))
829
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
831
def test_add_lines_with_ghosts_after_normal_revs(self):
832
# some versioned file formats allow lines to be added with parent
833
# information that is > than that in the format. Formats that do
834
# not support this need to raise NotImplementedError on the
835
# add_lines_with_ghosts api.
837
# probe for ghost support
839
vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n'])
840
except NotImplementedError:
842
vf.add_lines_with_ghosts('references_ghost',
844
['line\n', 'line_b\n', 'line_c\n'])
845
origins = vf.annotate('references_ghost')
846
self.assertEquals(('base', 'line\n'), origins[0])
847
self.assertEquals(('base', 'line_b\n'), origins[1])
848
self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
850
def test_readonly_mode(self):
851
transport = get_transport(self.get_url('.'))
852
factory = self.get_factory()
853
vf = factory('id', transport, 0777, create=True, access_mode='w')
854
vf = factory('id', transport, access_mode='r')
855
self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
856
self.assertRaises(errors.ReadOnlyError,
857
vf.add_lines_with_ghosts,
862
def test_get_sha1s(self):
863
# check the sha1 data is available
866
vf.add_lines('a', [], ['a\n'])
867
# the same file, different metadata
868
vf.add_lines('b', ['a'], ['a\n'])
869
# a file differing only in last newline.
870
vf.add_lines('c', [], ['a'])
872
'a': '3f786850e387550fdab836ed7e6dc881de23001b',
873
'c': '86f7e437faa5a7fce15d1ddcb9eaeaea377667b8',
874
'b': '3f786850e387550fdab836ed7e6dc881de23001b',
876
vf.get_sha1s(['a', 'c', 'b']))
879
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
881
def get_file(self, name='foo'):
882
return WeaveFile(name, get_transport(self.get_url('.')), create=True,
883
get_scope=self.get_transaction)
885
def get_file_corrupted_text(self):
886
w = WeaveFile('foo', get_transport(self.get_url('.')), create=True,
887
get_scope=self.get_transaction)
888
w.add_lines('v1', [], ['hello\n'])
889
w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
891
# We are going to invasively corrupt the text
892
# Make sure the internals of weave are the same
893
self.assertEqual([('{', 0)
901
self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'
902
, '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
907
w._weave[4] = 'There\n'
910
def get_file_corrupted_checksum(self):
911
w = self.get_file_corrupted_text()
913
w._weave[4] = 'there\n'
914
self.assertEqual('hello\nthere\n', w.get_text('v2'))
916
#Invalid checksum, first digit changed
917
w._sha1s[1] = 'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
920
def reopen_file(self, name='foo', create=False):
921
return WeaveFile(name, get_transport(self.get_url('.')), create=create,
922
get_scope=self.get_transaction)
924
def test_no_implicit_create(self):
925
self.assertRaises(errors.NoSuchFile,
928
get_transport(self.get_url('.')),
929
get_scope=self.get_transaction)
931
def get_factory(self):
935
class TestPlanMergeVersionedFile(TestCaseWithMemoryTransport):
938
TestCaseWithMemoryTransport.setUp(self)
939
mapper = PrefixMapper()
940
factory = make_file_factory(True, mapper)
941
self.vf1 = factory(self.get_transport('root-1'))
942
self.vf2 = factory(self.get_transport('root-2'))
943
self.plan_merge_vf = versionedfile._PlanMergeVersionedFile('root')
944
self.plan_merge_vf.fallback_versionedfiles.extend([self.vf1, self.vf2])
946
def test_add_lines(self):
947
self.plan_merge_vf.add_lines(('root', 'a:'), [], [])
948
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
949
('root', 'a'), [], [])
950
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
951
('root', 'a:'), None, [])
952
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
953
('root', 'a:'), [], None)
955
def setup_abcde(self):
956
self.vf1.add_lines(('root', 'A'), [], ['a'])
957
self.vf1.add_lines(('root', 'B'), [('root', 'A')], ['b'])
958
self.vf2.add_lines(('root', 'C'), [], ['c'])
959
self.vf2.add_lines(('root', 'D'), [('root', 'C')], ['d'])
960
self.plan_merge_vf.add_lines(('root', 'E:'),
961
[('root', 'B'), ('root', 'D')], ['e'])
963
def test_get_parents(self):
965
self.assertEqual({('root', 'B'):(('root', 'A'),)},
966
self.plan_merge_vf.get_parent_map([('root', 'B')]))
967
self.assertEqual({('root', 'D'):(('root', 'C'),)},
968
self.plan_merge_vf.get_parent_map([('root', 'D')]))
969
self.assertEqual({('root', 'E:'):(('root', 'B'),('root', 'D'))},
970
self.plan_merge_vf.get_parent_map([('root', 'E:')]))
972
self.plan_merge_vf.get_parent_map([('root', 'F')]))
974
('root', 'B'):(('root', 'A'),),
975
('root', 'D'):(('root', 'C'),),
976
('root', 'E:'):(('root', 'B'),('root', 'D')),
978
self.plan_merge_vf.get_parent_map(
979
[('root', 'B'), ('root', 'D'), ('root', 'E:'), ('root', 'F')]))
981
def test_get_record_stream(self):
983
def get_record(suffix):
984
return self.plan_merge_vf.get_record_stream(
985
[('root', suffix)], 'unordered', True).next()
986
self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))
987
self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))
988
self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))
989
self.assertEqual('absent', get_record('F').storage_kind)
992
class TestReadonlyHttpMixin(object):
994
def get_transaction(self):
997
def test_readonly_http_works(self):
998
# we should be able to read from http with a versioned file.
1000
# try an empty file access
1001
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
1002
self.assertEqual([], readonly_vf.versions())
1004
vf.add_lines('1', [], ['a\n'])
1005
vf.add_lines('2', ['1'], ['b\n', 'a\n'])
1006
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
1007
self.assertEqual(['1', '2'], vf.versions())
1008
for version in readonly_vf.versions():
1009
readonly_vf.get_lines(version)
1012
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
1015
return WeaveFile('foo', get_transport(self.get_url('.')), create=True,
1016
get_scope=self.get_transaction)
1018
def get_factory(self):
1022
class MergeCasesMixin(object):
1024
def doMerge(self, base, a, b, mp):
1025
from cStringIO import StringIO
1026
from textwrap import dedent
1032
w.add_lines('text0', [], map(addcrlf, base))
1033
w.add_lines('text1', ['text0'], map(addcrlf, a))
1034
w.add_lines('text2', ['text0'], map(addcrlf, b))
1036
self.log_contents(w)
1038
self.log('merge plan:')
1039
p = list(w.plan_merge('text1', 'text2'))
1040
for state, line in p:
1042
self.log('%12s | %s' % (state, line[:-1]))
1046
mt.writelines(w.weave_merge(p))
1048
self.log(mt.getvalue())
1050
mp = map(addcrlf, mp)
1051
self.assertEqual(mt.readlines(), mp)
1054
def testOneInsert(self):
1060
def testSeparateInserts(self):
1061
self.doMerge(['aaa', 'bbb', 'ccc'],
1062
['aaa', 'xxx', 'bbb', 'ccc'],
1063
['aaa', 'bbb', 'yyy', 'ccc'],
1064
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1066
def testSameInsert(self):
1067
self.doMerge(['aaa', 'bbb', 'ccc'],
1068
['aaa', 'xxx', 'bbb', 'ccc'],
1069
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],
1070
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1071
overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']
1072
def testOverlappedInsert(self):
1073
self.doMerge(['aaa', 'bbb'],
1074
['aaa', 'xxx', 'yyy', 'bbb'],
1075
['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)
1077
# really it ought to reduce this to
1078
# ['aaa', 'xxx', 'yyy', 'bbb']
1081
def testClashReplace(self):
1082
self.doMerge(['aaa'],
1085
['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz',
1088
def testNonClashInsert1(self):
1089
self.doMerge(['aaa'],
1092
['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz',
1095
def testNonClashInsert2(self):
1096
self.doMerge(['aaa'],
1102
def testDeleteAndModify(self):
1103
"""Clashing delete and modification.
1105
If one side modifies a region and the other deletes it then
1106
there should be a conflict with one side blank.
1109
#######################################
1110
# skippd, not working yet
1113
self.doMerge(['aaa', 'bbb', 'ccc'],
1114
['aaa', 'ddd', 'ccc'],
1116
['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc'])
1118
def _test_merge_from_strings(self, base, a, b, expected):
1120
w.add_lines('text0', [], base.splitlines(True))
1121
w.add_lines('text1', ['text0'], a.splitlines(True))
1122
w.add_lines('text2', ['text0'], b.splitlines(True))
1123
self.log('merge plan:')
1124
p = list(w.plan_merge('text1', 'text2'))
1125
for state, line in p:
1127
self.log('%12s | %s' % (state, line[:-1]))
1128
self.log('merge result:')
1129
result_text = ''.join(w.weave_merge(p))
1130
self.log(result_text)
1131
self.assertEqualDiff(result_text, expected)
1133
def test_weave_merge_conflicts(self):
1134
# does weave merge properly handle plans that end with unchanged?
1135
result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')]))
1136
self.assertEqual(result, 'hello\n')
1138
def test_deletion_extended(self):
1139
"""One side deletes, the other deletes more.
1156
self._test_merge_from_strings(base, a, b, result)
1158
def test_deletion_overlap(self):
1159
"""Delete overlapping regions with no other conflict.
1161
Arguably it'd be better to treat these as agreement, rather than
1162
conflict, but for now conflict is safer.
1190
self._test_merge_from_strings(base, a, b, result)
1192
def test_agreement_deletion(self):
1193
"""Agree to delete some lines, without conflicts."""
1215
self._test_merge_from_strings(base, a, b, result)
1217
def test_sync_on_deletion(self):
1218
"""Specific case of merge where we can synchronize incorrectly.
1220
A previous version of the weave merge concluded that the two versions
1221
agreed on deleting line 2, and this could be a synchronization point.
1222
Line 1 was then considered in isolation, and thought to be deleted on
1225
It's better to consider the whole thing as a disagreement region.
1236
a's replacement line 2
1249
a's replacement line 2
1256
self._test_merge_from_strings(base, a, b, result)
1259
class TestWeaveMerge(TestCaseWithMemoryTransport, MergeCasesMixin):
1261
def get_file(self, name='foo'):
1262
return WeaveFile(name, get_transport(self.get_url('.')), create=True)
1264
def log_contents(self, w):
1265
self.log('weave is:')
1267
write_weave(w, tmpf)
1268
self.log(tmpf.getvalue())
1270
overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======',
1271
'xxx', '>>>>>>> ', 'bbb']
1274
class TestContentFactoryAdaption(TestCaseWithMemoryTransport):
1276
def test_select_adaptor(self):
1277
"""Test expected adapters exist."""
1278
# One scenario for each lookup combination we expect to use.
1279
# Each is source_kind, requested_kind, adapter class
1281
('knit-delta-gz', 'fulltext', _mod_knit.DeltaPlainToFullText),
1282
('knit-ft-gz', 'fulltext', _mod_knit.FTPlainToFullText),
1283
('knit-annotated-delta-gz', 'knit-delta-gz',
1284
_mod_knit.DeltaAnnotatedToUnannotated),
1285
('knit-annotated-delta-gz', 'fulltext',
1286
_mod_knit.DeltaAnnotatedToFullText),
1287
('knit-annotated-ft-gz', 'knit-ft-gz',
1288
_mod_knit.FTAnnotatedToUnannotated),
1289
('knit-annotated-ft-gz', 'fulltext',
1290
_mod_knit.FTAnnotatedToFullText),
1292
for source, requested, klass in scenarios:
1293
adapter_factory = versionedfile.adapter_registry.get(
1294
(source, requested))
1295
adapter = adapter_factory(None)
1296
self.assertIsInstance(adapter, klass)
1298
def get_knit(self, annotated=True):
1299
mapper = ConstantMapper('knit')
1300
transport = self.get_transport()
1301
return make_file_factory(annotated, mapper)(transport)
1303
def helpGetBytes(self, f, ft_adapter, delta_adapter):
1304
"""Grab the interested adapted texts for tests."""
1305
# origin is a fulltext
1306
entries = f.get_record_stream([('origin',)], 'unordered', False)
1307
base = entries.next()
1308
ft_data = ft_adapter.get_bytes(base)
1309
# merged is both a delta and multiple parents.
1310
entries = f.get_record_stream([('merged',)], 'unordered', False)
1311
merged = entries.next()
1312
delta_data = delta_adapter.get_bytes(merged)
1313
return ft_data, delta_data
1315
def test_deannotation_noeol(self):
1316
"""Test converting annotated knits to unannotated knits."""
1317
# we need a full text, and a delta
1319
get_diamond_files(f, 1, trailing_eol=False)
1320
ft_data, delta_data = self.helpGetBytes(f,
1321
_mod_knit.FTAnnotatedToUnannotated(None),
1322
_mod_knit.DeltaAnnotatedToUnannotated(None))
1324
'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'
1327
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1329
'version merged 4 32c2e79763b3f90e8ccde37f9710b6629c25a796\n'
1330
'1,2,3\nleft\nright\nmerged\nend merged\n',
1331
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1333
def test_deannotation(self):
1334
"""Test converting annotated knits to unannotated knits."""
1335
# we need a full text, and a delta
1337
get_diamond_files(f, 1)
1338
ft_data, delta_data = self.helpGetBytes(f,
1339
_mod_knit.FTAnnotatedToUnannotated(None),
1340
_mod_knit.DeltaAnnotatedToUnannotated(None))
1342
'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n'
1345
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1347
'version merged 3 ed8bce375198ea62444dc71952b22cfc2b09226d\n'
1348
'2,2,2\nright\nmerged\nend merged\n',
1349
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1351
def test_annotated_to_fulltext_no_eol(self):
1352
"""Test adapting annotated knits to full texts (for -> weaves)."""
1353
# we need a full text, and a delta
1355
get_diamond_files(f, 1, trailing_eol=False)
1356
# Reconstructing a full text requires a backing versioned file, and it
1357
# must have the base lines requested from it.
1358
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1359
ft_data, delta_data = self.helpGetBytes(f,
1360
_mod_knit.FTAnnotatedToFullText(None),
1361
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1362
self.assertEqual('origin', ft_data)
1363
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1364
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1365
True)], logged_vf.calls)
1367
def test_annotated_to_fulltext(self):
1368
"""Test adapting annotated knits to full texts (for -> weaves)."""
1369
# we need a full text, and a delta
1371
get_diamond_files(f, 1)
1372
# Reconstructing a full text requires a backing versioned file, and it
1373
# must have the base lines requested from it.
1374
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1375
ft_data, delta_data = self.helpGetBytes(f,
1376
_mod_knit.FTAnnotatedToFullText(None),
1377
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1378
self.assertEqual('origin\n', ft_data)
1379
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
1380
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1381
True)], logged_vf.calls)
1383
def test_unannotated_to_fulltext(self):
1384
"""Test adapting unannotated knits to full texts.
1386
This is used for -> weaves, and for -> annotated knits.
1388
# we need a full text, and a delta
1389
f = self.get_knit(annotated=False)
1390
get_diamond_files(f, 1)
1391
# Reconstructing a full text requires a backing versioned file, and it
1392
# must have the base lines requested from it.
1393
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1394
ft_data, delta_data = self.helpGetBytes(f,
1395
_mod_knit.FTPlainToFullText(None),
1396
_mod_knit.DeltaPlainToFullText(logged_vf))
1397
self.assertEqual('origin\n', ft_data)
1398
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
1399
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1400
True)], logged_vf.calls)
1402
def test_unannotated_to_fulltext_no_eol(self):
1403
"""Test adapting unannotated knits to full texts.
1405
This is used for -> weaves, and for -> annotated knits.
1407
# we need a full text, and a delta
1408
f = self.get_knit(annotated=False)
1409
get_diamond_files(f, 1, trailing_eol=False)
1410
# Reconstructing a full text requires a backing versioned file, and it
1411
# must have the base lines requested from it.
1412
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1413
ft_data, delta_data = self.helpGetBytes(f,
1414
_mod_knit.FTPlainToFullText(None),
1415
_mod_knit.DeltaPlainToFullText(logged_vf))
1416
self.assertEqual('origin', ft_data)
1417
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1418
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1419
True)], logged_vf.calls)
1422
class TestKeyMapper(TestCaseWithMemoryTransport):
1423
"""Tests for various key mapping logic."""
1425
def test_identity_mapper(self):
1426
mapper = versionedfile.ConstantMapper("inventory")
1427
self.assertEqual("inventory", mapper.map(('foo@ar',)))
1428
self.assertEqual("inventory", mapper.map(('quux',)))
1430
def test_prefix_mapper(self):
1432
mapper = versionedfile.PrefixMapper()
1433
self.assertEqual("file-id", mapper.map(("file-id", "revision-id")))
1434
self.assertEqual("new-id", mapper.map(("new-id", "revision-id")))
1435
self.assertEqual(('file-id',), mapper.unmap("file-id"))
1436
self.assertEqual(('new-id',), mapper.unmap("new-id"))
1438
def test_hash_prefix_mapper(self):
1439
#format6: hash + plain
1440
mapper = versionedfile.HashPrefixMapper()
1441
self.assertEqual("9b/file-id", mapper.map(("file-id", "revision-id")))
1442
self.assertEqual("45/new-id", mapper.map(("new-id", "revision-id")))
1443
self.assertEqual(('file-id',), mapper.unmap("9b/file-id"))
1444
self.assertEqual(('new-id',), mapper.unmap("45/new-id"))
1446
def test_hash_escaped_mapper(self):
1447
#knit1: hash + escaped
1448
mapper = versionedfile.HashEscapedPrefixMapper()
1449
self.assertEqual("88/%2520", mapper.map((" ", "revision-id")))
1450
self.assertEqual("ed/fil%2545-%2549d", mapper.map(("filE-Id",
1452
self.assertEqual("88/ne%2557-%2549d", mapper.map(("neW-Id",
1454
self.assertEqual(('filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1455
self.assertEqual(('neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1458
class TestVersionedFiles(TestCaseWithMemoryTransport):
1459
"""Tests for the multiple-file variant of VersionedFile."""
1461
def get_versionedfiles(self, relpath='files'):
1462
transport = self.get_transport(relpath)
1464
transport.mkdir('.')
1465
files = self.factory(transport)
1466
if self.cleanup is not None:
1467
self.addCleanup(lambda:self.cleanup(files))
1470
def test_annotate(self):
1471
files = self.get_versionedfiles()
1472
self.get_diamond_files(files)
1473
if self.key_length == 1:
1477
# introduced full text
1478
origins = files.annotate(prefix + ('origin',))
1480
(prefix + ('origin',), 'origin\n')],
1483
origins = files.annotate(prefix + ('base',))
1485
(prefix + ('base',), 'base\n')],
1488
origins = files.annotate(prefix + ('merged',))
1491
(prefix + ('base',), 'base\n'),
1492
(prefix + ('left',), 'left\n'),
1493
(prefix + ('right',), 'right\n'),
1494
(prefix + ('merged',), 'merged\n')
1498
# Without a graph everything is new.
1500
(prefix + ('merged',), 'base\n'),
1501
(prefix + ('merged',), 'left\n'),
1502
(prefix + ('merged',), 'right\n'),
1503
(prefix + ('merged',), 'merged\n')
1506
self.assertRaises(RevisionNotPresent,
1507
files.annotate, prefix + ('missing-key',))
1509
def test_construct(self):
1510
"""Each parameterised test can be constructed on a transport."""
1511
files = self.get_versionedfiles()
1513
def get_diamond_files(self, files, trailing_eol=True, left_only=False,
1515
return get_diamond_files(files, self.key_length,
1516
trailing_eol=trailing_eol, nograph=not self.graph,
1517
left_only=left_only, nokeys=nokeys)
1519
def test_add_lines_nostoresha(self):
1520
"""When nostore_sha is supplied using old content raises."""
1521
vf = self.get_versionedfiles()
1522
empty_text = ('a', [])
1523
sample_text_nl = ('b', ["foo\n", "bar\n"])
1524
sample_text_no_nl = ('c', ["foo\n", "bar"])
1526
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
1527
sha, _, _ = vf.add_lines(self.get_simple_key(version), [], lines)
1529
# we now have a copy of all the lines in the vf.
1530
for sha, (version, lines) in zip(
1531
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
1532
new_key = self.get_simple_key(version + "2")
1533
self.assertRaises(errors.ExistingContent,
1534
vf.add_lines, new_key, [], lines,
1536
# and no new version should have been added.
1537
record = vf.get_record_stream([new_key], 'unordered', True).next()
1538
self.assertEqual('absent', record.storage_kind)
1540
def test_add_lines_return(self):
1541
files = self.get_versionedfiles()
1542
# save code by using the stock data insertion helper.
1543
adds = self.get_diamond_files(files)
1545
# We can only validate the first 2 elements returned from add_lines.
1547
self.assertEqual(3, len(add))
1548
results.append(add[:2])
1549
if self.key_length == 1:
1551
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1552
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1553
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1554
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1555
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1557
elif self.key_length == 2:
1559
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1560
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1561
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1562
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1563
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1564
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1565
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1566
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1567
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1568
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1571
def test_add_lines_no_key_generates_chk_key(self):
1572
files = self.get_versionedfiles()
1573
# save code by using the stock data insertion helper.
1574
adds = self.get_diamond_files(files, nokeys=True)
1576
# We can only validate the first 2 elements returned from add_lines.
1578
self.assertEqual(3, len(add))
1579
results.append(add[:2])
1580
if self.key_length == 1:
1582
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1583
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1584
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1585
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1586
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1588
# Check the added items got CHK keys.
1589
self.assertEqual(set([
1590
('sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1591
('sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1592
('sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1593
('sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1594
('sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1597
elif self.key_length == 2:
1599
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1600
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1601
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1602
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1603
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1604
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1605
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1606
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1607
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1608
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1610
# Check the added items got CHK keys.
1611
self.assertEqual(set([
1612
('FileA', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1613
('FileA', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1614
('FileA', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1615
('FileA', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1616
('FileA', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1617
('FileB', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1618
('FileB', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1619
('FileB', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1620
('FileB', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1621
('FileB', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1625
def test_empty_lines(self):
1626
"""Empty files can be stored."""
1627
f = self.get_versionedfiles()
1628
key_a = self.get_simple_key('a')
1629
f.add_lines(key_a, [], [])
1630
self.assertEqual('',
1631
f.get_record_stream([key_a], 'unordered', True
1632
).next().get_bytes_as('fulltext'))
1633
key_b = self.get_simple_key('b')
1634
f.add_lines(key_b, self.get_parents([key_a]), [])
1635
self.assertEqual('',
1636
f.get_record_stream([key_b], 'unordered', True
1637
).next().get_bytes_as('fulltext'))
1639
def test_newline_only(self):
1640
f = self.get_versionedfiles()
1641
key_a = self.get_simple_key('a')
1642
f.add_lines(key_a, [], ['\n'])
1643
self.assertEqual('\n',
1644
f.get_record_stream([key_a], 'unordered', True
1645
).next().get_bytes_as('fulltext'))
1646
key_b = self.get_simple_key('b')
1647
f.add_lines(key_b, self.get_parents([key_a]), ['\n'])
1648
self.assertEqual('\n',
1649
f.get_record_stream([key_b], 'unordered', True
1650
).next().get_bytes_as('fulltext'))
1652
def test_get_record_stream_empty(self):
1653
"""An empty stream can be requested without error."""
1654
f = self.get_versionedfiles()
1655
entries = f.get_record_stream([], 'unordered', False)
1656
self.assertEqual([], list(entries))
1658
def assertValidStorageKind(self, storage_kind):
1659
"""Assert that storage_kind is a valid storage_kind."""
1660
self.assertSubset([storage_kind],
1661
['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
1662
'knit-ft', 'knit-delta', 'chunked', 'fulltext',
1663
'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz',
1665
'knit-delta-closure', 'knit-delta-closure-ref',
1666
'groupcompress-block', 'groupcompress-block-ref'])
1668
def capture_stream(self, f, entries, on_seen, parents):
1669
"""Capture a stream for testing."""
1670
for factory in entries:
1671
on_seen(factory.key)
1672
self.assertValidStorageKind(factory.storage_kind)
1673
if factory.sha1 is not None:
1674
self.assertEqual(f.get_sha1s([factory.key])[factory.key],
1676
self.assertEqual(parents[factory.key], factory.parents)
1677
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1680
def test_get_record_stream_interface(self):
1681
"""each item in a stream has to provide a regular interface."""
1682
files = self.get_versionedfiles()
1683
self.get_diamond_files(files)
1684
keys, _ = self.get_keys_and_sort_order()
1685
parent_map = files.get_parent_map(keys)
1686
entries = files.get_record_stream(keys, 'unordered', False)
1688
self.capture_stream(files, entries, seen.add, parent_map)
1689
self.assertEqual(set(keys), seen)
1691
def get_simple_key(self, suffix):
1692
"""Return a key for the object under test."""
1693
if self.key_length == 1:
1696
return ('FileA',) + (suffix,)
1698
def get_keys_and_sort_order(self):
1699
"""Get diamond test keys list, and their sort ordering."""
1700
if self.key_length == 1:
1701
keys = [('merged',), ('left',), ('right',), ('base',)]
1702
sort_order = {('merged',):2, ('left',):1, ('right',):1, ('base',):0}
1705
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1707
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1711
('FileA', 'merged'):2, ('FileA', 'left'):1, ('FileA', 'right'):1,
1712
('FileA', 'base'):0,
1713
('FileB', 'merged'):2, ('FileB', 'left'):1, ('FileB', 'right'):1,
1714
('FileB', 'base'):0,
1716
return keys, sort_order
1718
def get_keys_and_groupcompress_sort_order(self):
1719
"""Get diamond test keys list, and their groupcompress sort ordering."""
1720
if self.key_length == 1:
1721
keys = [('merged',), ('left',), ('right',), ('base',)]
1722
sort_order = {('merged',):0, ('left',):1, ('right',):1, ('base',):2}
1725
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1727
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1731
('FileA', 'merged'):0, ('FileA', 'left'):1, ('FileA', 'right'):1,
1732
('FileA', 'base'):2,
1733
('FileB', 'merged'):3, ('FileB', 'left'):4, ('FileB', 'right'):4,
1734
('FileB', 'base'):5,
1736
return keys, sort_order
1738
def test_get_record_stream_interface_ordered(self):
1739
"""each item in a stream has to provide a regular interface."""
1740
files = self.get_versionedfiles()
1741
self.get_diamond_files(files)
1742
keys, sort_order = self.get_keys_and_sort_order()
1743
parent_map = files.get_parent_map(keys)
1744
entries = files.get_record_stream(keys, 'topological', False)
1746
self.capture_stream(files, entries, seen.append, parent_map)
1747
self.assertStreamOrder(sort_order, seen, keys)
1749
def test_get_record_stream_interface_ordered_with_delta_closure(self):
1750
"""each item must be accessible as a fulltext."""
1751
files = self.get_versionedfiles()
1752
self.get_diamond_files(files)
1753
keys, sort_order = self.get_keys_and_sort_order()
1754
parent_map = files.get_parent_map(keys)
1755
entries = files.get_record_stream(keys, 'topological', True)
1757
for factory in entries:
1758
seen.append(factory.key)
1759
self.assertValidStorageKind(factory.storage_kind)
1760
self.assertSubset([factory.sha1],
1761
[None, files.get_sha1s([factory.key])[factory.key]])
1762
self.assertEqual(parent_map[factory.key], factory.parents)
1763
# self.assertEqual(files.get_text(factory.key),
1764
ft_bytes = factory.get_bytes_as('fulltext')
1765
self.assertIsInstance(ft_bytes, str)
1766
chunked_bytes = factory.get_bytes_as('chunked')
1767
self.assertEqualDiff(ft_bytes, ''.join(chunked_bytes))
1769
self.assertStreamOrder(sort_order, seen, keys)
1771
def test_get_record_stream_interface_groupcompress(self):
1772
"""each item in a stream has to provide a regular interface."""
1773
files = self.get_versionedfiles()
1774
self.get_diamond_files(files)
1775
keys, sort_order = self.get_keys_and_groupcompress_sort_order()
1776
parent_map = files.get_parent_map(keys)
1777
entries = files.get_record_stream(keys, 'groupcompress', False)
1779
self.capture_stream(files, entries, seen.append, parent_map)
1780
self.assertStreamOrder(sort_order, seen, keys)
1782
def assertStreamOrder(self, sort_order, seen, keys):
1783
self.assertEqual(len(set(seen)), len(keys))
1784
if self.key_length == 1:
1787
lows = {('FileA',):0, ('FileB',):0}
1789
self.assertEqual(set(keys), set(seen))
1792
sort_pos = sort_order[key]
1793
self.assertTrue(sort_pos >= lows[key[:-1]],
1794
"Out of order in sorted stream: %r, %r" % (key, seen))
1795
lows[key[:-1]] = sort_pos
1797
def test_get_record_stream_unknown_storage_kind_raises(self):
1798
"""Asking for a storage kind that the stream cannot supply raises."""
1799
files = self.get_versionedfiles()
1800
self.get_diamond_files(files)
1801
if self.key_length == 1:
1802
keys = [('merged',), ('left',), ('right',), ('base',)]
1805
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1807
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1810
parent_map = files.get_parent_map(keys)
1811
entries = files.get_record_stream(keys, 'unordered', False)
1812
# We track the contents because we should be able to try, fail a
1813
# particular kind and then ask for one that works and continue.
1815
for factory in entries:
1816
seen.add(factory.key)
1817
self.assertValidStorageKind(factory.storage_kind)
1818
if factory.sha1 is not None:
1819
self.assertEqual(files.get_sha1s([factory.key])[factory.key],
1821
self.assertEqual(parent_map[factory.key], factory.parents)
1822
# currently no stream emits mpdiff
1823
self.assertRaises(errors.UnavailableRepresentation,
1824
factory.get_bytes_as, 'mpdiff')
1825
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1827
self.assertEqual(set(keys), seen)
1829
def test_get_record_stream_missing_records_are_absent(self):
1830
files = self.get_versionedfiles()
1831
self.get_diamond_files(files)
1832
if self.key_length == 1:
1833
keys = [('merged',), ('left',), ('right',), ('absent',), ('base',)]
1836
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1837
('FileA', 'absent'), ('FileA', 'base'),
1838
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1839
('FileB', 'absent'), ('FileB', 'base'),
1840
('absent', 'absent'),
1842
parent_map = files.get_parent_map(keys)
1843
entries = files.get_record_stream(keys, 'unordered', False)
1844
self.assertAbsentRecord(files, keys, parent_map, entries)
1845
entries = files.get_record_stream(keys, 'topological', False)
1846
self.assertAbsentRecord(files, keys, parent_map, entries)
1848
def assertRecordHasContent(self, record, bytes):
1849
"""Assert that record has the bytes bytes."""
1850
self.assertEqual(bytes, record.get_bytes_as('fulltext'))
1851
self.assertEqual(bytes, ''.join(record.get_bytes_as('chunked')))
1853
def test_get_record_stream_native_formats_are_wire_ready_one_ft(self):
1854
files = self.get_versionedfiles()
1855
key = self.get_simple_key('foo')
1856
files.add_lines(key, (), ['my text\n', 'content'])
1857
stream = files.get_record_stream([key], 'unordered', False)
1858
record = stream.next()
1859
if record.storage_kind in ('chunked', 'fulltext'):
1860
# chunked and fulltext representations are for direct use not wire
1861
# serialisation: check they are able to be used directly. To send
1862
# such records over the wire translation will be needed.
1863
self.assertRecordHasContent(record, "my text\ncontent")
1865
bytes = [record.get_bytes_as(record.storage_kind)]
1866
network_stream = versionedfile.NetworkRecordStream(bytes).read()
1867
source_record = record
1869
for record in network_stream:
1870
records.append(record)
1871
self.assertEqual(source_record.storage_kind,
1872
record.storage_kind)
1873
self.assertEqual(source_record.parents, record.parents)
1875
source_record.get_bytes_as(source_record.storage_kind),
1876
record.get_bytes_as(record.storage_kind))
1877
self.assertEqual(1, len(records))
1879
def assertStreamMetaEqual(self, records, expected, stream):
1880
"""Assert that streams expected and stream have the same records.
1882
:param records: A list to collect the seen records.
1883
:return: A generator of the records in stream.
1885
# We make assertions during copying to catch things early for
1887
for record, ref_record in izip(stream, expected):
1888
records.append(record)
1889
self.assertEqual(ref_record.key, record.key)
1890
self.assertEqual(ref_record.storage_kind, record.storage_kind)
1891
self.assertEqual(ref_record.parents, record.parents)
1894
def stream_to_bytes_or_skip_counter(self, skipped_records, full_texts,
1896
"""Convert a stream to a bytes iterator.
1898
:param skipped_records: A list with one element to increment when a
1900
:param full_texts: A dict from key->fulltext representation, for
1901
checking chunked or fulltext stored records.
1902
:param stream: A record_stream.
1903
:return: An iterator over the bytes of each record.
1905
for record in stream:
1906
if record.storage_kind in ('chunked', 'fulltext'):
1907
skipped_records[0] += 1
1908
# check the content is correct for direct use.
1909
self.assertRecordHasContent(record, full_texts[record.key])
1911
yield record.get_bytes_as(record.storage_kind)
1913
def test_get_record_stream_native_formats_are_wire_ready_ft_delta(self):
1914
files = self.get_versionedfiles()
1915
target_files = self.get_versionedfiles('target')
1916
key = self.get_simple_key('ft')
1917
key_delta = self.get_simple_key('delta')
1918
files.add_lines(key, (), ['my text\n', 'content'])
1920
delta_parents = (key,)
1923
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
1924
local = files.get_record_stream([key, key_delta], 'unordered', False)
1925
ref = files.get_record_stream([key, key_delta], 'unordered', False)
1926
skipped_records = [0]
1928
key: "my text\ncontent",
1929
key_delta: "different\ncontent\n",
1931
byte_stream = self.stream_to_bytes_or_skip_counter(
1932
skipped_records, full_texts, local)
1933
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
1935
# insert the stream from the network into a versioned files object so we can
1936
# check the content was carried across correctly without doing delta
1938
target_files.insert_record_stream(
1939
self.assertStreamMetaEqual(records, ref, network_stream))
1940
# No duplicates on the wire thank you!
1941
self.assertEqual(2, len(records) + skipped_records[0])
1943
# if any content was copied it all must have all been.
1944
self.assertIdenticalVersionedFile(files, target_files)
1946
def test_get_record_stream_native_formats_are_wire_ready_delta(self):
1947
# copy a delta over the wire
1948
files = self.get_versionedfiles()
1949
target_files = self.get_versionedfiles('target')
1950
key = self.get_simple_key('ft')
1951
key_delta = self.get_simple_key('delta')
1952
files.add_lines(key, (), ['my text\n', 'content'])
1954
delta_parents = (key,)
1957
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
1958
# Copy the basis text across so we can reconstruct the delta during
1959
# insertion into target.
1960
target_files.insert_record_stream(files.get_record_stream([key],
1961
'unordered', False))
1962
local = files.get_record_stream([key_delta], 'unordered', False)
1963
ref = files.get_record_stream([key_delta], 'unordered', False)
1964
skipped_records = [0]
1966
key_delta: "different\ncontent\n",
1968
byte_stream = self.stream_to_bytes_or_skip_counter(
1969
skipped_records, full_texts, local)
1970
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
1972
# insert the stream from the network into a versioned files object so we can
1973
# check the content was carried across correctly without doing delta
1974
# inspection during check_stream.
1975
target_files.insert_record_stream(
1976
self.assertStreamMetaEqual(records, ref, network_stream))
1977
# No duplicates on the wire thank you!
1978
self.assertEqual(1, len(records) + skipped_records[0])
1980
# if any content was copied it all must have all been
1981
self.assertIdenticalVersionedFile(files, target_files)
1983
def test_get_record_stream_wire_ready_delta_closure_included(self):
1984
# copy a delta over the wire with the ability to get its full text.
1985
files = self.get_versionedfiles()
1986
key = self.get_simple_key('ft')
1987
key_delta = self.get_simple_key('delta')
1988
files.add_lines(key, (), ['my text\n', 'content'])
1990
delta_parents = (key,)
1993
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
1994
local = files.get_record_stream([key_delta], 'unordered', True)
1995
ref = files.get_record_stream([key_delta], 'unordered', True)
1996
skipped_records = [0]
1998
key_delta: "different\ncontent\n",
2000
byte_stream = self.stream_to_bytes_or_skip_counter(
2001
skipped_records, full_texts, local)
2002
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
2004
# insert the stream from the network into a versioned files object so we can
2005
# check the content was carried across correctly without doing delta
2006
# inspection during check_stream.
2007
for record in self.assertStreamMetaEqual(records, ref, network_stream):
2008
# we have to be able to get the full text out:
2009
self.assertRecordHasContent(record, full_texts[record.key])
2010
# No duplicates on the wire thank you!
2011
self.assertEqual(1, len(records) + skipped_records[0])
2013
def assertAbsentRecord(self, files, keys, parents, entries):
2014
"""Helper for test_get_record_stream_missing_records_are_absent."""
2016
for factory in entries:
2017
seen.add(factory.key)
2018
if factory.key[-1] == 'absent':
2019
self.assertEqual('absent', factory.storage_kind)
2020
self.assertEqual(None, factory.sha1)
2021
self.assertEqual(None, factory.parents)
2023
self.assertValidStorageKind(factory.storage_kind)
2024
if factory.sha1 is not None:
2025
sha1 = files.get_sha1s([factory.key])[factory.key]
2026
self.assertEqual(sha1, factory.sha1)
2027
self.assertEqual(parents[factory.key], factory.parents)
2028
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
2030
self.assertEqual(set(keys), seen)
2032
def test_filter_absent_records(self):
2033
"""Requested missing records can be filter trivially."""
2034
files = self.get_versionedfiles()
2035
self.get_diamond_files(files)
2036
keys, _ = self.get_keys_and_sort_order()
2037
parent_map = files.get_parent_map(keys)
2038
# Add an absent record in the middle of the present keys. (We don't ask
2039
# for just absent keys to ensure that content before and after the
2040
# absent keys is still delivered).
2041
present_keys = list(keys)
2042
if self.key_length == 1:
2043
keys.insert(2, ('extra',))
2045
keys.insert(2, ('extra', 'extra'))
2046
entries = files.get_record_stream(keys, 'unordered', False)
2048
self.capture_stream(files, versionedfile.filter_absent(entries), seen.add,
2050
self.assertEqual(set(present_keys), seen)
2052
def get_mapper(self):
2053
"""Get a mapper suitable for the key length of the test interface."""
2054
if self.key_length == 1:
2055
return ConstantMapper('source')
2057
return HashEscapedPrefixMapper()
2059
def get_parents(self, parents):
2060
"""Get parents, taking self.graph into consideration."""
2066
def test_get_parent_map(self):
2067
files = self.get_versionedfiles()
2068
if self.key_length == 1:
2070
(('r0',), self.get_parents(())),
2071
(('r1',), self.get_parents((('r0',),))),
2072
(('r2',), self.get_parents(())),
2073
(('r3',), self.get_parents(())),
2074
(('m',), self.get_parents((('r0',),('r1',),('r2',),('r3',)))),
2078
(('FileA', 'r0'), self.get_parents(())),
2079
(('FileA', 'r1'), self.get_parents((('FileA', 'r0'),))),
2080
(('FileA', 'r2'), self.get_parents(())),
2081
(('FileA', 'r3'), self.get_parents(())),
2082
(('FileA', 'm'), self.get_parents((('FileA', 'r0'),
2083
('FileA', 'r1'), ('FileA', 'r2'), ('FileA', 'r3')))),
2085
for key, parents in parent_details:
2086
files.add_lines(key, parents, [])
2087
# immediately after adding it should be queryable.
2088
self.assertEqual({key:parents}, files.get_parent_map([key]))
2089
# We can ask for an empty set
2090
self.assertEqual({}, files.get_parent_map([]))
2091
# We can ask for many keys
2092
all_parents = dict(parent_details)
2093
self.assertEqual(all_parents, files.get_parent_map(all_parents.keys()))
2094
# Absent keys are just not included in the result.
2095
keys = all_parents.keys()
2096
if self.key_length == 1:
2097
keys.insert(1, ('missing',))
2099
keys.insert(1, ('missing', 'missing'))
2100
# Absent keys are just ignored
2101
self.assertEqual(all_parents, files.get_parent_map(keys))
2103
def test_get_sha1s(self):
2104
files = self.get_versionedfiles()
2105
self.get_diamond_files(files)
2106
if self.key_length == 1:
2107
keys = [('base',), ('origin',), ('left',), ('merged',), ('right',)]
2109
# ask for shas from different prefixes.
2111
('FileA', 'base'), ('FileB', 'origin'), ('FileA', 'left'),
2112
('FileA', 'merged'), ('FileB', 'right'),
2115
keys[0]: '51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
2116
keys[1]: '00e364d235126be43292ab09cb4686cf703ddc17',
2117
keys[2]: 'a8478686da38e370e32e42e8a0c220e33ee9132f',
2118
keys[3]: 'ed8bce375198ea62444dc71952b22cfc2b09226d',
2119
keys[4]: '9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
2121
files.get_sha1s(keys))
2123
def test_insert_record_stream_empty(self):
2124
"""Inserting an empty record stream should work."""
2125
files = self.get_versionedfiles()
2126
files.insert_record_stream([])
2128
def assertIdenticalVersionedFile(self, expected, actual):
2129
"""Assert that left and right have the same contents."""
2130
self.assertEqual(set(actual.keys()), set(expected.keys()))
2131
actual_parents = actual.get_parent_map(actual.keys())
2133
self.assertEqual(actual_parents, expected.get_parent_map(expected.keys()))
2135
for key, parents in actual_parents.items():
2136
self.assertEqual(None, parents)
2137
for key in actual.keys():
2138
actual_text = actual.get_record_stream(
2139
[key], 'unordered', True).next().get_bytes_as('fulltext')
2140
expected_text = expected.get_record_stream(
2141
[key], 'unordered', True).next().get_bytes_as('fulltext')
2142
self.assertEqual(actual_text, expected_text)
2144
def test_insert_record_stream_fulltexts(self):
2145
"""Any file should accept a stream of fulltexts."""
2146
files = self.get_versionedfiles()
2147
mapper = self.get_mapper()
2148
source_transport = self.get_transport('source')
2149
source_transport.mkdir('.')
2150
# weaves always output fulltexts.
2151
source = make_versioned_files_factory(WeaveFile, mapper)(
2153
self.get_diamond_files(source, trailing_eol=False)
2154
stream = source.get_record_stream(source.keys(), 'topological',
2156
files.insert_record_stream(stream)
2157
self.assertIdenticalVersionedFile(source, files)
2159
def test_insert_record_stream_fulltexts_noeol(self):
2160
"""Any file should accept a stream of fulltexts."""
2161
files = self.get_versionedfiles()
2162
mapper = self.get_mapper()
2163
source_transport = self.get_transport('source')
2164
source_transport.mkdir('.')
2165
# weaves always output fulltexts.
2166
source = make_versioned_files_factory(WeaveFile, mapper)(
2168
self.get_diamond_files(source, trailing_eol=False)
2169
stream = source.get_record_stream(source.keys(), 'topological',
2171
files.insert_record_stream(stream)
2172
self.assertIdenticalVersionedFile(source, files)
2174
def test_insert_record_stream_annotated_knits(self):
2175
"""Any file should accept a stream from plain knits."""
2176
files = self.get_versionedfiles()
2177
mapper = self.get_mapper()
2178
source_transport = self.get_transport('source')
2179
source_transport.mkdir('.')
2180
source = make_file_factory(True, mapper)(source_transport)
2181
self.get_diamond_files(source)
2182
stream = source.get_record_stream(source.keys(), 'topological',
2184
files.insert_record_stream(stream)
2185
self.assertIdenticalVersionedFile(source, files)
2187
def test_insert_record_stream_annotated_knits_noeol(self):
2188
"""Any file should accept a stream from plain knits."""
2189
files = self.get_versionedfiles()
2190
mapper = self.get_mapper()
2191
source_transport = self.get_transport('source')
2192
source_transport.mkdir('.')
2193
source = make_file_factory(True, mapper)(source_transport)
2194
self.get_diamond_files(source, trailing_eol=False)
2195
stream = source.get_record_stream(source.keys(), 'topological',
2197
files.insert_record_stream(stream)
2198
self.assertIdenticalVersionedFile(source, files)
2200
def test_insert_record_stream_plain_knits(self):
2201
"""Any file should accept a stream from plain knits."""
2202
files = self.get_versionedfiles()
2203
mapper = self.get_mapper()
2204
source_transport = self.get_transport('source')
2205
source_transport.mkdir('.')
2206
source = make_file_factory(False, mapper)(source_transport)
2207
self.get_diamond_files(source)
2208
stream = source.get_record_stream(source.keys(), 'topological',
2210
files.insert_record_stream(stream)
2211
self.assertIdenticalVersionedFile(source, files)
2213
def test_insert_record_stream_plain_knits_noeol(self):
2214
"""Any file should accept a stream from plain knits."""
2215
files = self.get_versionedfiles()
2216
mapper = self.get_mapper()
2217
source_transport = self.get_transport('source')
2218
source_transport.mkdir('.')
2219
source = make_file_factory(False, mapper)(source_transport)
2220
self.get_diamond_files(source, trailing_eol=False)
2221
stream = source.get_record_stream(source.keys(), 'topological',
2223
files.insert_record_stream(stream)
2224
self.assertIdenticalVersionedFile(source, files)
2226
def test_insert_record_stream_existing_keys(self):
2227
"""Inserting keys already in a file should not error."""
2228
files = self.get_versionedfiles()
2229
source = self.get_versionedfiles('source')
2230
self.get_diamond_files(source)
2231
# insert some keys into f.
2232
self.get_diamond_files(files, left_only=True)
2233
stream = source.get_record_stream(source.keys(), 'topological',
2235
files.insert_record_stream(stream)
2236
self.assertIdenticalVersionedFile(source, files)
2238
def test_insert_record_stream_missing_keys(self):
2239
"""Inserting a stream with absent keys should raise an error."""
2240
files = self.get_versionedfiles()
2241
source = self.get_versionedfiles('source')
2242
stream = source.get_record_stream([('missing',) * self.key_length],
2243
'topological', False)
2244
self.assertRaises(errors.RevisionNotPresent, files.insert_record_stream,
2247
def test_insert_record_stream_out_of_order(self):
2248
"""An out of order stream can either error or work."""
2249
files = self.get_versionedfiles()
2250
source = self.get_versionedfiles('source')
2251
self.get_diamond_files(source)
2252
if self.key_length == 1:
2253
origin_keys = [('origin',)]
2254
end_keys = [('merged',), ('left',)]
2255
start_keys = [('right',), ('base',)]
2257
origin_keys = [('FileA', 'origin'), ('FileB', 'origin')]
2258
end_keys = [('FileA', 'merged',), ('FileA', 'left',),
2259
('FileB', 'merged',), ('FileB', 'left',)]
2260
start_keys = [('FileA', 'right',), ('FileA', 'base',),
2261
('FileB', 'right',), ('FileB', 'base',)]
2262
origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
2263
end_entries = source.get_record_stream(end_keys, 'topological', False)
2264
start_entries = source.get_record_stream(start_keys, 'topological', False)
2265
entries = chain(origin_entries, end_entries, start_entries)
2267
files.insert_record_stream(entries)
2268
except RevisionNotPresent:
2269
# Must not have corrupted the file.
2272
self.assertIdenticalVersionedFile(source, files)
2274
def get_knit_delta_source(self):
2275
"""Get a source that can produce a stream with knit delta records,
2276
regardless of this test's scenario.
2278
mapper = self.get_mapper()
2279
source_transport = self.get_transport('source')
2280
source_transport.mkdir('.')
2281
source = make_file_factory(False, mapper)(source_transport)
2282
get_diamond_files(source, self.key_length, trailing_eol=True,
2283
nograph=False, left_only=False)
2286
def test_insert_record_stream_delta_missing_basis_no_corruption(self):
2287
"""Insertion where a needed basis is not included notifies the caller
2288
of the missing basis. In the meantime a record missing its basis is
2291
source = self.get_knit_delta_source()
2292
keys = [self.get_simple_key('origin'), self.get_simple_key('merged')]
2293
entries = source.get_record_stream(keys, 'unordered', False)
2294
files = self.get_versionedfiles()
2295
if self.support_partial_insertion:
2296
self.assertEqual([],
2297
list(files.get_missing_compression_parent_keys()))
2298
files.insert_record_stream(entries)
2299
missing_bases = files.get_missing_compression_parent_keys()
2300
self.assertEqual(set([self.get_simple_key('left')]),
2302
self.assertEqual(set(keys), set(files.get_parent_map(keys)))
2305
errors.RevisionNotPresent, files.insert_record_stream, entries)
2308
def test_insert_record_stream_delta_missing_basis_can_be_added_later(self):
2309
"""Insertion where a needed basis is not included notifies the caller
2310
of the missing basis. That basis can be added in a second
2311
insert_record_stream call that does not need to repeat records present
2312
in the previous stream. The record(s) that required that basis are
2313
fully inserted once their basis is no longer missing.
2315
if not self.support_partial_insertion:
2316
raise TestNotApplicable(
2317
'versioned file scenario does not support partial insertion')
2318
source = self.get_knit_delta_source()
2319
entries = source.get_record_stream([self.get_simple_key('origin'),
2320
self.get_simple_key('merged')], 'unordered', False)
2321
files = self.get_versionedfiles()
2322
files.insert_record_stream(entries)
2323
missing_bases = files.get_missing_compression_parent_keys()
2324
self.assertEqual(set([self.get_simple_key('left')]),
2326
# 'merged' is inserted (although a commit of a write group involving
2327
# this versionedfiles would fail).
2328
merged_key = self.get_simple_key('merged')
2330
[merged_key], files.get_parent_map([merged_key]).keys())
2331
# Add the full delta closure of the missing records
2332
missing_entries = source.get_record_stream(
2333
missing_bases, 'unordered', True)
2334
files.insert_record_stream(missing_entries)
2335
# Now 'merged' is fully inserted (and a commit would succeed).
2336
self.assertEqual([], list(files.get_missing_compression_parent_keys()))
2338
[merged_key], files.get_parent_map([merged_key]).keys())
2341
def test_iter_lines_added_or_present_in_keys(self):
2342
# test that we get at least an equalset of the lines added by
2343
# versions in the store.
2344
# the ordering here is to make a tree so that dumb searches have
2345
# more changes to muck up.
2347
class InstrumentedProgress(progress.DummyProgress):
2351
progress.DummyProgress.__init__(self)
2354
def update(self, msg=None, current=None, total=None):
2355
self.updates.append((msg, current, total))
2357
files = self.get_versionedfiles()
2358
# add a base to get included
2359
files.add_lines(self.get_simple_key('base'), (), ['base\n'])
2360
# add a ancestor to be included on one side
2361
files.add_lines(self.get_simple_key('lancestor'), (), ['lancestor\n'])
2362
# add a ancestor to be included on the other side
2363
files.add_lines(self.get_simple_key('rancestor'),
2364
self.get_parents([self.get_simple_key('base')]), ['rancestor\n'])
2365
# add a child of rancestor with no eofile-nl
2366
files.add_lines(self.get_simple_key('child'),
2367
self.get_parents([self.get_simple_key('rancestor')]),
2368
['base\n', 'child\n'])
2369
# add a child of lancestor and base to join the two roots
2370
files.add_lines(self.get_simple_key('otherchild'),
2371
self.get_parents([self.get_simple_key('lancestor'),
2372
self.get_simple_key('base')]),
2373
['base\n', 'lancestor\n', 'otherchild\n'])
2374
def iter_with_keys(keys, expected):
2375
# now we need to see what lines are returned, and how often.
2377
progress = InstrumentedProgress()
2378
# iterate over the lines
2379
for line in files.iter_lines_added_or_present_in_keys(keys,
2381
lines.setdefault(line, 0)
2383
if []!= progress.updates:
2384
self.assertEqual(expected, progress.updates)
2386
lines = iter_with_keys(
2387
[self.get_simple_key('child'), self.get_simple_key('otherchild')],
2388
[('Walking content', 0, 2),
2389
('Walking content', 1, 2),
2390
('Walking content', 2, 2)])
2391
# we must see child and otherchild
2392
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2394
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2395
# we dont care if we got more than that.
2398
lines = iter_with_keys(files.keys(),
2399
[('Walking content', 0, 5),
2400
('Walking content', 1, 5),
2401
('Walking content', 2, 5),
2402
('Walking content', 3, 5),
2403
('Walking content', 4, 5),
2404
('Walking content', 5, 5)])
2405
# all lines must be seen at least once
2406
self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)
2408
lines[('lancestor\n', self.get_simple_key('lancestor'))] > 0)
2410
lines[('rancestor\n', self.get_simple_key('rancestor'))] > 0)
2411
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2413
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2415
def test_make_mpdiffs(self):
2416
from bzrlib import multiparent
2417
files = self.get_versionedfiles('source')
2418
# add texts that should trip the knit maximum delta chain threshold
2419
# as well as doing parallel chains of data in knits.
2420
# this is done by two chains of 25 insertions
2421
files.add_lines(self.get_simple_key('base'), [], ['line\n'])
2422
files.add_lines(self.get_simple_key('noeol'),
2423
self.get_parents([self.get_simple_key('base')]), ['line'])
2424
# detailed eol tests:
2425
# shared last line with parent no-eol
2426
files.add_lines(self.get_simple_key('noeolsecond'),
2427
self.get_parents([self.get_simple_key('noeol')]),
2429
# differing last line with parent, both no-eol
2430
files.add_lines(self.get_simple_key('noeolnotshared'),
2431
self.get_parents([self.get_simple_key('noeolsecond')]),
2432
['line\n', 'phone'])
2433
# add eol following a noneol parent, change content
2434
files.add_lines(self.get_simple_key('eol'),
2435
self.get_parents([self.get_simple_key('noeol')]), ['phone\n'])
2436
# add eol following a noneol parent, no change content
2437
files.add_lines(self.get_simple_key('eolline'),
2438
self.get_parents([self.get_simple_key('noeol')]), ['line\n'])
2439
# noeol with no parents:
2440
files.add_lines(self.get_simple_key('noeolbase'), [], ['line'])
2441
# noeol preceeding its leftmost parent in the output:
2442
# this is done by making it a merge of two parents with no common
2443
# anestry: noeolbase and noeol with the
2444
# later-inserted parent the leftmost.
2445
files.add_lines(self.get_simple_key('eolbeforefirstparent'),
2446
self.get_parents([self.get_simple_key('noeolbase'),
2447
self.get_simple_key('noeol')]),
2449
# two identical eol texts
2450
files.add_lines(self.get_simple_key('noeoldup'),
2451
self.get_parents([self.get_simple_key('noeol')]), ['line'])
2452
next_parent = self.get_simple_key('base')
2453
text_name = 'chain1-'
2455
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2456
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2457
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2458
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2459
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
2460
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
2461
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2462
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2463
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
2464
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2465
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2466
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2467
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
2468
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2469
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
2470
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2471
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
2472
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2473
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
2474
19:'1ebed371807ba5935958ad0884595126e8c4e823',
2475
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2476
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2477
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2478
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2479
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2480
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2482
for depth in range(26):
2483
new_version = self.get_simple_key(text_name + '%s' % depth)
2484
text = text + ['line\n']
2485
files.add_lines(new_version, self.get_parents([next_parent]), text)
2486
next_parent = new_version
2487
next_parent = self.get_simple_key('base')
2488
text_name = 'chain2-'
2490
for depth in range(26):
2491
new_version = self.get_simple_key(text_name + '%s' % depth)
2492
text = text + ['line\n']
2493
files.add_lines(new_version, self.get_parents([next_parent]), text)
2494
next_parent = new_version
2495
target = self.get_versionedfiles('target')
2496
for key in multiparent.topo_iter_keys(files, files.keys()):
2497
mpdiff = files.make_mpdiffs([key])[0]
2498
parents = files.get_parent_map([key])[key] or []
2500
[(key, parents, files.get_sha1s([key])[key], mpdiff)])
2501
self.assertEqualDiff(
2502
files.get_record_stream([key], 'unordered',
2503
True).next().get_bytes_as('fulltext'),
2504
target.get_record_stream([key], 'unordered',
2505
True).next().get_bytes_as('fulltext')
2508
def test_keys(self):
2509
# While use is discouraged, versions() is still needed by aspects of
2511
files = self.get_versionedfiles()
2512
self.assertEqual(set(), set(files.keys()))
2513
if self.key_length == 1:
2516
key = ('foo', 'bar',)
2517
files.add_lines(key, (), [])
2518
self.assertEqual(set([key]), set(files.keys()))
2521
class VirtualVersionedFilesTests(TestCase):
2522
"""Basic tests for the VirtualVersionedFiles implementations."""
2524
def _get_parent_map(self, keys):
2527
if k in self._parent_map:
2528
ret[k] = self._parent_map[k]
2532
TestCase.setUp(self)
2534
self._parent_map = {}
2535
self.texts = VirtualVersionedFiles(self._get_parent_map,
2538
def test_add_lines(self):
2539
self.assertRaises(NotImplementedError,
2540
self.texts.add_lines, "foo", [], [])
2542
def test_add_mpdiffs(self):
2543
self.assertRaises(NotImplementedError,
2544
self.texts.add_mpdiffs, [])
2546
def test_check(self):
2547
self.assertTrue(self.texts.check())
2549
def test_insert_record_stream(self):
2550
self.assertRaises(NotImplementedError, self.texts.insert_record_stream,
2553
def test_get_sha1s_nonexistent(self):
2554
self.assertEquals({}, self.texts.get_sha1s([("NONEXISTENT",)]))
2556
def test_get_sha1s(self):
2557
self._lines["key"] = ["dataline1", "dataline2"]
2558
self.assertEquals({("key",): osutils.sha_strings(self._lines["key"])},
2559
self.texts.get_sha1s([("key",)]))
2561
def test_get_parent_map(self):
2562
self._parent_map = {"G": ("A", "B")}
2563
self.assertEquals({("G",): (("A",),("B",))},
2564
self.texts.get_parent_map([("G",), ("L",)]))
2566
def test_get_record_stream(self):
2567
self._lines["A"] = ["FOO", "BAR"]
2568
it = self.texts.get_record_stream([("A",)], "unordered", True)
2570
self.assertEquals("chunked", record.storage_kind)
2571
self.assertEquals("FOOBAR", record.get_bytes_as("fulltext"))
2572
self.assertEquals(["FOO", "BAR"], record.get_bytes_as("chunked"))
2574
def test_get_record_stream_absent(self):
2575
it = self.texts.get_record_stream([("A",)], "unordered", True)
2577
self.assertEquals("absent", record.storage_kind)
2579
def test_iter_lines_added_or_present_in_keys(self):
2580
self._lines["A"] = ["FOO", "BAR"]
2581
self._lines["B"] = ["HEY"]
2582
self._lines["C"] = ["Alberta"]
2583
it = self.texts.iter_lines_added_or_present_in_keys([("A",), ("B",)])
2584
self.assertEquals(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
2588
class TestOrderingVersionedFilesDecorator(TestCaseWithMemoryTransport):
2590
def get_ordering_vf(self, key_priority):
2591
builder = self.make_branch_builder('test')
2592
builder.start_series()
2593
builder.build_snapshot('A', None, [
2594
('add', ('', 'TREE_ROOT', 'directory', None))])
2595
builder.build_snapshot('B', ['A'], [])
2596
builder.build_snapshot('C', ['B'], [])
2597
builder.build_snapshot('D', ['C'], [])
2598
builder.finish_series()
2599
b = builder.get_branch()
2601
self.addCleanup(b.unlock)
2602
vf = b.repository.inventories
2603
return versionedfile.OrderingVersionedFilesDecorator(vf, key_priority)
2605
def test_get_empty(self):
2606
vf = self.get_ordering_vf({})
2607
self.assertEqual([], vf.calls)
2609
def test_get_record_stream_topological(self):
2610
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2611
request_keys = [('B',), ('C',), ('D',), ('A',)]
2612
keys = [r.key for r in vf.get_record_stream(request_keys,
2613
'topological', False)]
2614
# We should have gotten the keys in topological order
2615
self.assertEqual([('A',), ('B',), ('C',), ('D',)], keys)
2616
# And recorded that the request was made
2617
self.assertEqual([('get_record_stream', request_keys, 'topological',
2620
def test_get_record_stream_ordered(self):
2621
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2622
request_keys = [('B',), ('C',), ('D',), ('A',)]
2623
keys = [r.key for r in vf.get_record_stream(request_keys,
2624
'unordered', False)]
2625
# They should be returned based on their priority
2626
self.assertEqual([('D',), ('B',), ('A',), ('C',)], keys)
2627
# And the request recorded
2628
self.assertEqual([('get_record_stream', request_keys, 'unordered',
2631
def test_get_record_stream_implicit_order(self):
2632
vf = self.get_ordering_vf({('B',): 2, ('D',): 1})
2633
request_keys = [('B',), ('C',), ('D',), ('A',)]
2634
keys = [r.key for r in vf.get_record_stream(request_keys,
2635
'unordered', False)]
2636
# A and C are not in the map, so they get sorted to the front. A comes
2637
# before C alphabetically, so it comes back first
2638
self.assertEqual([('A',), ('C',), ('D',), ('B',)], keys)
2639
# And the request recorded
2640
self.assertEqual([('get_record_stream', request_keys, 'unordered',