1
# Copyright (C) 2005, 2009 Canonical Ltd
4
# Johan Rydberg <jrydberg@gnu.org>
6
# This program is free software; you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation; either version 2 of the License, or
9
# (at your option) any later version.
11
# This program is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14
# GNU General Public License for more details.
16
# You should have received a copy of the GNU General Public License
17
# along with this program; if not, write to the Free Software
18
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21
# TODO: might be nice to create a versionedfile with some type of corruption
22
# considered typical and check that it can be detected/corrected.
24
from itertools import chain, izip
25
from StringIO import StringIO
34
from bzrlib.errors import (
36
RevisionAlreadyPresent,
39
from bzrlib.knit import (
46
from bzrlib.tests import (
48
TestCaseWithMemoryTransport,
52
split_suite_by_condition,
55
from bzrlib.tests.http_utils import TestCaseWithWebserver
56
from bzrlib.trace import mutter
57
from bzrlib.transport import get_transport
58
from bzrlib.transport.memory import MemoryTransport
59
from bzrlib.tsort import topo_sort
60
from bzrlib.tuned_gzip import GzipFile
61
import bzrlib.versionedfile as versionedfile
62
from bzrlib.versionedfile import (
64
HashEscapedPrefixMapper,
66
VirtualVersionedFiles,
67
make_versioned_files_factory,
69
from bzrlib.weave import WeaveFile
70
from bzrlib.weavefile import read_weave, write_weave
73
def load_tests(standard_tests, module, loader):
74
"""Parameterize VersionedFiles tests for different implementations."""
75
to_adapt, result = split_suite_by_condition(
76
standard_tests, condition_isinstance(TestVersionedFiles))
77
# We want to be sure of behaviour for:
78
# weaves prefix layout (weave texts)
79
# individually named weaves (weave inventories)
80
# annotated knits - prefix|hash|hash-escape layout, we test the third only
81
# as it is the most complex mapper.
82
# individually named knits
83
# individual no-graph knits in packs (signatures)
84
# individual graph knits in packs (inventories)
85
# individual graph nocompression knits in packs (revisions)
86
# plain text knits in packs (texts)
90
'factory':make_versioned_files_factory(WeaveFile,
91
ConstantMapper('inventory')),
94
'support_partial_insertion': False,
98
'factory':make_file_factory(False, ConstantMapper('revisions')),
101
'support_partial_insertion': False,
103
('named-nograph-nodelta-knit-pack', {
104
'cleanup':cleanup_pack_knit,
105
'factory':make_pack_factory(False, False, 1),
108
'support_partial_insertion': False,
110
('named-graph-knit-pack', {
111
'cleanup':cleanup_pack_knit,
112
'factory':make_pack_factory(True, True, 1),
115
'support_partial_insertion': True,
117
('named-graph-nodelta-knit-pack', {
118
'cleanup':cleanup_pack_knit,
119
'factory':make_pack_factory(True, False, 1),
122
'support_partial_insertion': False,
124
('groupcompress-nograph', {
125
'cleanup':groupcompress.cleanup_pack_group,
126
'factory':groupcompress.make_pack_factory(False, False, 1),
129
'support_partial_insertion':False,
132
len_two_scenarios = [
135
'factory':make_versioned_files_factory(WeaveFile,
139
'support_partial_insertion': False,
141
('annotated-knit-escape', {
143
'factory':make_file_factory(True, HashEscapedPrefixMapper()),
146
'support_partial_insertion': False,
148
('plain-knit-pack', {
149
'cleanup':cleanup_pack_knit,
150
'factory':make_pack_factory(True, True, 2),
153
'support_partial_insertion': True,
156
'cleanup':groupcompress.cleanup_pack_group,
157
'factory':groupcompress.make_pack_factory(True, False, 1),
160
'support_partial_insertion':False,
163
scenarios = len_one_scenarios + len_two_scenarios
164
return multiply_tests(to_adapt, scenarios, result)
167
def get_diamond_vf(f, trailing_eol=True, left_only=False):
168
"""Get a diamond graph to exercise deltas and merges.
170
:param trailing_eol: If True end the last line with \n.
174
'base': (('origin',),),
175
'left': (('base',),),
176
'right': (('base',),),
177
'merged': (('left',), ('right',)),
179
# insert a diamond graph to exercise deltas and merges.
184
f.add_lines('origin', [], ['origin' + last_char])
185
f.add_lines('base', ['origin'], ['base' + last_char])
186
f.add_lines('left', ['base'], ['base\n', 'left' + last_char])
188
f.add_lines('right', ['base'],
189
['base\n', 'right' + last_char])
190
f.add_lines('merged', ['left', 'right'],
191
['base\n', 'left\n', 'right\n', 'merged' + last_char])
195
def get_diamond_files(files, key_length, trailing_eol=True, left_only=False,
196
nograph=False, nokeys=False):
197
"""Get a diamond graph to exercise deltas and merges.
199
This creates a 5-node graph in files. If files supports 2-length keys two
200
graphs are made to exercise the support for multiple ids.
202
:param trailing_eol: If True end the last line with \n.
203
:param key_length: The length of keys in files. Currently supports length 1
205
:param left_only: If True do not add the right and merged nodes.
206
:param nograph: If True, do not provide parents to the add_lines calls;
207
this is useful for tests that need inserted data but have graphless
209
:param nokeys: If True, pass None is as the key for all insertions.
210
Currently implies nograph.
211
:return: The results of the add_lines calls.
218
prefixes = [('FileA',), ('FileB',)]
219
# insert a diamond graph to exercise deltas and merges.
225
def get_parents(suffix_list):
229
result = [prefix + suffix for suffix in suffix_list]
236
# we loop over each key because that spreads the inserts across prefixes,
237
# which is how commit operates.
238
for prefix in prefixes:
239
result.append(files.add_lines(prefix + get_key('origin'), (),
240
['origin' + last_char]))
241
for prefix in prefixes:
242
result.append(files.add_lines(prefix + get_key('base'),
243
get_parents([('origin',)]), ['base' + last_char]))
244
for prefix in prefixes:
245
result.append(files.add_lines(prefix + get_key('left'),
246
get_parents([('base',)]),
247
['base\n', 'left' + last_char]))
249
for prefix in prefixes:
250
result.append(files.add_lines(prefix + get_key('right'),
251
get_parents([('base',)]),
252
['base\n', 'right' + last_char]))
253
for prefix in prefixes:
254
result.append(files.add_lines(prefix + get_key('merged'),
255
get_parents([('left',), ('right',)]),
256
['base\n', 'left\n', 'right\n', 'merged' + last_char]))
260
class VersionedFileTestMixIn(object):
261
"""A mixin test class for testing VersionedFiles.
263
This is not an adaptor-style test at this point because
264
theres no dynamic substitution of versioned file implementations,
265
they are strictly controlled by their owning repositories.
268
def get_transaction(self):
269
if not hasattr(self, '_transaction'):
270
self._transaction = None
271
return self._transaction
275
f.add_lines('r0', [], ['a\n', 'b\n'])
276
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
278
versions = f.versions()
279
self.assertTrue('r0' in versions)
280
self.assertTrue('r1' in versions)
281
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
282
self.assertEquals(f.get_text('r0'), 'a\nb\n')
283
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
284
self.assertEqual(2, len(f))
285
self.assertEqual(2, f.num_versions())
287
self.assertRaises(RevisionNotPresent,
288
f.add_lines, 'r2', ['foo'], [])
289
self.assertRaises(RevisionAlreadyPresent,
290
f.add_lines, 'r1', [], [])
292
# this checks that reopen with create=True does not break anything.
293
f = self.reopen_file(create=True)
296
def test_adds_with_parent_texts(self):
299
_, _, parent_texts['r0'] = f.add_lines('r0', [], ['a\n', 'b\n'])
301
_, _, parent_texts['r1'] = f.add_lines_with_ghosts('r1',
302
['r0', 'ghost'], ['b\n', 'c\n'], parent_texts=parent_texts)
303
except NotImplementedError:
304
# if the format doesn't support ghosts, just add normally.
305
_, _, parent_texts['r1'] = f.add_lines('r1',
306
['r0'], ['b\n', 'c\n'], parent_texts=parent_texts)
307
f.add_lines('r2', ['r1'], ['c\n', 'd\n'], parent_texts=parent_texts)
308
self.assertNotEqual(None, parent_texts['r0'])
309
self.assertNotEqual(None, parent_texts['r1'])
311
versions = f.versions()
312
self.assertTrue('r0' in versions)
313
self.assertTrue('r1' in versions)
314
self.assertTrue('r2' in versions)
315
self.assertEquals(f.get_lines('r0'), ['a\n', 'b\n'])
316
self.assertEquals(f.get_lines('r1'), ['b\n', 'c\n'])
317
self.assertEquals(f.get_lines('r2'), ['c\n', 'd\n'])
318
self.assertEqual(3, f.num_versions())
319
origins = f.annotate('r1')
320
self.assertEquals(origins[0][0], 'r0')
321
self.assertEquals(origins[1][0], 'r1')
322
origins = f.annotate('r2')
323
self.assertEquals(origins[0][0], 'r1')
324
self.assertEquals(origins[1][0], 'r2')
327
f = self.reopen_file()
330
def test_add_unicode_content(self):
331
# unicode content is not permitted in versioned files.
332
# versioned files version sequences of bytes only.
334
self.assertRaises(errors.BzrBadParameterUnicode,
335
vf.add_lines, 'a', [], ['a\n', u'b\n', 'c\n'])
337
(errors.BzrBadParameterUnicode, NotImplementedError),
338
vf.add_lines_with_ghosts, 'a', [], ['a\n', u'b\n', 'c\n'])
340
def test_add_follows_left_matching_blocks(self):
341
"""If we change left_matching_blocks, delta changes
343
Note: There are multiple correct deltas in this case, because
344
we start with 1 "a" and we get 3.
347
if isinstance(vf, WeaveFile):
348
raise TestSkipped("WeaveFile ignores left_matching_blocks")
349
vf.add_lines('1', [], ['a\n'])
350
vf.add_lines('2', ['1'], ['a\n', 'a\n', 'a\n'],
351
left_matching_blocks=[(0, 0, 1), (1, 3, 0)])
352
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('2'))
353
vf.add_lines('3', ['1'], ['a\n', 'a\n', 'a\n'],
354
left_matching_blocks=[(0, 2, 1), (1, 3, 0)])
355
self.assertEqual(['a\n', 'a\n', 'a\n'], vf.get_lines('3'))
357
def test_inline_newline_throws(self):
358
# \r characters are not permitted in lines being added
360
self.assertRaises(errors.BzrBadParameterContainsNewline,
361
vf.add_lines, 'a', [], ['a\n\n'])
363
(errors.BzrBadParameterContainsNewline, NotImplementedError),
364
vf.add_lines_with_ghosts, 'a', [], ['a\n\n'])
365
# but inline CR's are allowed
366
vf.add_lines('a', [], ['a\r\n'])
368
vf.add_lines_with_ghosts('b', [], ['a\r\n'])
369
except NotImplementedError:
372
def test_add_reserved(self):
374
self.assertRaises(errors.ReservedId,
375
vf.add_lines, 'a:', [], ['a\n', 'b\n', 'c\n'])
377
def test_add_lines_nostoresha(self):
378
"""When nostore_sha is supplied using old content raises."""
380
empty_text = ('a', [])
381
sample_text_nl = ('b', ["foo\n", "bar\n"])
382
sample_text_no_nl = ('c', ["foo\n", "bar"])
384
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
385
sha, _, _ = vf.add_lines(version, [], lines)
387
# we now have a copy of all the lines in the vf.
388
for sha, (version, lines) in zip(
389
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
390
self.assertRaises(errors.ExistingContent,
391
vf.add_lines, version + "2", [], lines,
393
# and no new version should have been added.
394
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
397
def test_add_lines_with_ghosts_nostoresha(self):
398
"""When nostore_sha is supplied using old content raises."""
400
empty_text = ('a', [])
401
sample_text_nl = ('b', ["foo\n", "bar\n"])
402
sample_text_no_nl = ('c', ["foo\n", "bar"])
404
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
405
sha, _, _ = vf.add_lines(version, [], lines)
407
# we now have a copy of all the lines in the vf.
408
# is the test applicable to this vf implementation?
410
vf.add_lines_with_ghosts('d', [], [])
411
except NotImplementedError:
412
raise TestSkipped("add_lines_with_ghosts is optional")
413
for sha, (version, lines) in zip(
414
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
415
self.assertRaises(errors.ExistingContent,
416
vf.add_lines_with_ghosts, version + "2", [], lines,
418
# and no new version should have been added.
419
self.assertRaises(errors.RevisionNotPresent, vf.get_lines,
422
def test_add_lines_return_value(self):
423
# add_lines should return the sha1 and the text size.
425
empty_text = ('a', [])
426
sample_text_nl = ('b', ["foo\n", "bar\n"])
427
sample_text_no_nl = ('c', ["foo\n", "bar"])
428
# check results for the three cases:
429
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
430
# the first two elements are the same for all versioned files:
431
# - the digest and the size of the text. For some versioned files
432
# additional data is returned in additional tuple elements.
433
result = vf.add_lines(version, [], lines)
434
self.assertEqual(3, len(result))
435
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
437
# parents should not affect the result:
438
lines = sample_text_nl[1]
439
self.assertEqual((osutils.sha_strings(lines), sum(map(len, lines))),
440
vf.add_lines('d', ['b', 'c'], lines)[0:2])
442
def test_get_reserved(self):
444
self.assertRaises(errors.ReservedId, vf.get_texts, ['b:'])
445
self.assertRaises(errors.ReservedId, vf.get_lines, 'b:')
446
self.assertRaises(errors.ReservedId, vf.get_text, 'b:')
448
def test_add_unchanged_last_line_noeol_snapshot(self):
449
"""Add a text with an unchanged last line with no eol should work."""
450
# Test adding this in a number of chain lengths; because the interface
451
# for VersionedFile does not allow forcing a specific chain length, we
452
# just use a small base to get the first snapshot, then a much longer
453
# first line for the next add (which will make the third add snapshot)
454
# and so on. 20 has been chosen as an aribtrary figure - knits use 200
455
# as a capped delta length, but ideally we would have some way of
456
# tuning the test to the store (e.g. keep going until a snapshot
458
for length in range(20):
460
vf = self.get_file('case-%d' % length)
463
for step in range(length):
464
version = prefix % step
465
lines = (['prelude \n'] * step) + ['line']
466
vf.add_lines(version, parents, lines)
467
version_lines[version] = lines
469
vf.add_lines('no-eol', parents, ['line'])
470
vf.get_texts(version_lines.keys())
471
self.assertEqualDiff('line', vf.get_text('no-eol'))
473
def test_get_texts_eol_variation(self):
474
# similar to the failure in <http://bugs.launchpad.net/234748>
476
sample_text_nl = ["line\n"]
477
sample_text_no_nl = ["line"]
484
lines = sample_text_nl
486
lines = sample_text_no_nl
487
# left_matching blocks is an internal api; it operates on the
488
# *internal* representation for a knit, which is with *all* lines
489
# being normalised to end with \n - even the final line in a no_nl
490
# file. Using it here ensures that a broken internal implementation
491
# (which is what this test tests) will generate a correct line
492
# delta (which is to say, an empty delta).
493
vf.add_lines(version, parents, lines,
494
left_matching_blocks=[(0, 0, 1)])
496
versions.append(version)
497
version_lines[version] = lines
499
vf.get_texts(versions)
500
vf.get_texts(reversed(versions))
502
def test_add_lines_with_matching_blocks_noeol_last_line(self):
503
"""Add a text with an unchanged last line with no eol should work."""
504
from bzrlib import multiparent
505
# Hand verified sha1 of the text we're adding.
506
sha1 = '6a1d115ec7b60afb664dc14890b5af5ce3c827a4'
507
# Create a mpdiff which adds a new line before the trailing line, and
508
# reuse the last line unaltered (which can cause annotation reuse).
509
# Test adding this in two situations:
510
# On top of a new insertion
511
vf = self.get_file('fulltext')
512
vf.add_lines('noeol', [], ['line'])
513
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
514
left_matching_blocks=[(0, 1, 1)])
515
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
517
vf = self.get_file('delta')
518
vf.add_lines('base', [], ['line'])
519
vf.add_lines('noeol', ['base'], ['prelude\n', 'line'])
520
vf.add_lines('noeol2', ['noeol'], ['newline\n', 'line'],
521
left_matching_blocks=[(1, 1, 1)])
522
self.assertEqualDiff('newline\nline', vf.get_text('noeol2'))
524
def test_make_mpdiffs(self):
525
from bzrlib import multiparent
526
vf = self.get_file('foo')
527
sha1s = self._setup_for_deltas(vf)
528
new_vf = self.get_file('bar')
529
for version in multiparent.topo_iter(vf):
530
mpdiff = vf.make_mpdiffs([version])[0]
531
new_vf.add_mpdiffs([(version, vf.get_parent_map([version])[version],
532
vf.get_sha1s([version])[version], mpdiff)])
533
self.assertEqualDiff(vf.get_text(version),
534
new_vf.get_text(version))
536
def test_make_mpdiffs_with_ghosts(self):
537
vf = self.get_file('foo')
539
vf.add_lines_with_ghosts('text', ['ghost'], ['line\n'])
540
except NotImplementedError:
541
# old Weave formats do not allow ghosts
543
self.assertRaises(errors.RevisionNotPresent, vf.make_mpdiffs, ['ghost'])
545
def _setup_for_deltas(self, f):
546
self.assertFalse(f.has_version('base'))
547
# add texts that should trip the knit maximum delta chain threshold
548
# as well as doing parallel chains of data in knits.
549
# this is done by two chains of 25 insertions
550
f.add_lines('base', [], ['line\n'])
551
f.add_lines('noeol', ['base'], ['line'])
552
# detailed eol tests:
553
# shared last line with parent no-eol
554
f.add_lines('noeolsecond', ['noeol'], ['line\n', 'line'])
555
# differing last line with parent, both no-eol
556
f.add_lines('noeolnotshared', ['noeolsecond'], ['line\n', 'phone'])
557
# add eol following a noneol parent, change content
558
f.add_lines('eol', ['noeol'], ['phone\n'])
559
# add eol following a noneol parent, no change content
560
f.add_lines('eolline', ['noeol'], ['line\n'])
561
# noeol with no parents:
562
f.add_lines('noeolbase', [], ['line'])
563
# noeol preceeding its leftmost parent in the output:
564
# this is done by making it a merge of two parents with no common
565
# anestry: noeolbase and noeol with the
566
# later-inserted parent the leftmost.
567
f.add_lines('eolbeforefirstparent', ['noeolbase', 'noeol'], ['line'])
568
# two identical eol texts
569
f.add_lines('noeoldup', ['noeol'], ['line'])
571
text_name = 'chain1-'
573
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
574
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
575
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
576
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
577
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
578
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
579
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
580
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
581
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
582
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
583
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
584
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
585
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
586
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
587
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
588
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
589
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
590
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
591
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
592
19:'1ebed371807ba5935958ad0884595126e8c4e823',
593
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
594
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
595
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
596
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
597
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
598
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
600
for depth in range(26):
601
new_version = text_name + '%s' % depth
602
text = text + ['line\n']
603
f.add_lines(new_version, [next_parent], text)
604
next_parent = new_version
606
text_name = 'chain2-'
608
for depth in range(26):
609
new_version = text_name + '%s' % depth
610
text = text + ['line\n']
611
f.add_lines(new_version, [next_parent], text)
612
next_parent = new_version
615
def test_ancestry(self):
617
self.assertEqual([], f.get_ancestry([]))
618
f.add_lines('r0', [], ['a\n', 'b\n'])
619
f.add_lines('r1', ['r0'], ['b\n', 'c\n'])
620
f.add_lines('r2', ['r0'], ['b\n', 'c\n'])
621
f.add_lines('r3', ['r2'], ['b\n', 'c\n'])
622
f.add_lines('rM', ['r1', 'r2'], ['b\n', 'c\n'])
623
self.assertEqual([], f.get_ancestry([]))
624
versions = f.get_ancestry(['rM'])
625
# there are some possibilities:
629
# so we check indexes
630
r0 = versions.index('r0')
631
r1 = versions.index('r1')
632
r2 = versions.index('r2')
633
self.assertFalse('r3' in versions)
634
rM = versions.index('rM')
635
self.assertTrue(r0 < r1)
636
self.assertTrue(r0 < r2)
637
self.assertTrue(r1 < rM)
638
self.assertTrue(r2 < rM)
640
self.assertRaises(RevisionNotPresent,
641
f.get_ancestry, ['rM', 'rX'])
643
self.assertEqual(set(f.get_ancestry('rM')),
644
set(f.get_ancestry('rM', topo_sorted=False)))
646
def test_mutate_after_finish(self):
647
self._transaction = 'before'
649
self._transaction = 'after'
650
self.assertRaises(errors.OutSideTransaction, f.add_lines, '', [], [])
651
self.assertRaises(errors.OutSideTransaction, f.add_lines_with_ghosts, '', [], [])
653
def test_copy_to(self):
655
f.add_lines('0', [], ['a\n'])
656
t = MemoryTransport()
658
for suffix in self.get_factory().get_suffixes():
659
self.assertTrue(t.has('foo' + suffix))
661
def test_get_suffixes(self):
663
# and should be a list
664
self.assertTrue(isinstance(self.get_factory().get_suffixes(), list))
666
def test_get_parent_map(self):
668
f.add_lines('r0', [], ['a\n', 'b\n'])
670
{'r0':()}, f.get_parent_map(['r0']))
671
f.add_lines('r1', ['r0'], ['a\n', 'b\n'])
673
{'r1':('r0',)}, f.get_parent_map(['r1']))
677
f.get_parent_map(['r0', 'r1']))
678
f.add_lines('r2', [], ['a\n', 'b\n'])
679
f.add_lines('r3', [], ['a\n', 'b\n'])
680
f.add_lines('m', ['r0', 'r1', 'r2', 'r3'], ['a\n', 'b\n'])
682
{'m':('r0', 'r1', 'r2', 'r3')}, f.get_parent_map(['m']))
683
self.assertEqual({}, f.get_parent_map('y'))
687
f.get_parent_map(['r0', 'y', 'r1']))
689
def test_annotate(self):
691
f.add_lines('r0', [], ['a\n', 'b\n'])
692
f.add_lines('r1', ['r0'], ['c\n', 'b\n'])
693
origins = f.annotate('r1')
694
self.assertEquals(origins[0][0], 'r1')
695
self.assertEquals(origins[1][0], 'r0')
697
self.assertRaises(RevisionNotPresent,
700
def test_detection(self):
701
# Test weaves detect corruption.
703
# Weaves contain a checksum of their texts.
704
# When a text is extracted, this checksum should be
707
w = self.get_file_corrupted_text()
709
self.assertEqual('hello\n', w.get_text('v1'))
710
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
711
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
712
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
714
w = self.get_file_corrupted_checksum()
716
self.assertEqual('hello\n', w.get_text('v1'))
717
self.assertRaises(errors.WeaveInvalidChecksum, w.get_text, 'v2')
718
self.assertRaises(errors.WeaveInvalidChecksum, w.get_lines, 'v2')
719
self.assertRaises(errors.WeaveInvalidChecksum, w.check)
721
def get_file_corrupted_text(self):
722
"""Return a versioned file with corrupt text but valid metadata."""
723
raise NotImplementedError(self.get_file_corrupted_text)
725
def reopen_file(self, name='foo'):
726
"""Open the versioned file from disk again."""
727
raise NotImplementedError(self.reopen_file)
729
def test_iter_lines_added_or_present_in_versions(self):
730
# test that we get at least an equalset of the lines added by
731
# versions in the weave
732
# the ordering here is to make a tree so that dumb searches have
733
# more changes to muck up.
735
class InstrumentedProgress(progress.DummyProgress):
739
progress.DummyProgress.__init__(self)
742
def update(self, msg=None, current=None, total=None):
743
self.updates.append((msg, current, total))
746
# add a base to get included
747
vf.add_lines('base', [], ['base\n'])
748
# add a ancestor to be included on one side
749
vf.add_lines('lancestor', [], ['lancestor\n'])
750
# add a ancestor to be included on the other side
751
vf.add_lines('rancestor', ['base'], ['rancestor\n'])
752
# add a child of rancestor with no eofile-nl
753
vf.add_lines('child', ['rancestor'], ['base\n', 'child\n'])
754
# add a child of lancestor and base to join the two roots
755
vf.add_lines('otherchild',
756
['lancestor', 'base'],
757
['base\n', 'lancestor\n', 'otherchild\n'])
758
def iter_with_versions(versions, expected):
759
# now we need to see what lines are returned, and how often.
761
progress = InstrumentedProgress()
762
# iterate over the lines
763
for line in vf.iter_lines_added_or_present_in_versions(versions,
765
lines.setdefault(line, 0)
767
if []!= progress.updates:
768
self.assertEqual(expected, progress.updates)
770
lines = iter_with_versions(['child', 'otherchild'],
771
[('Walking content', 0, 2),
772
('Walking content', 1, 2),
773
('Walking content', 2, 2)])
774
# we must see child and otherchild
775
self.assertTrue(lines[('child\n', 'child')] > 0)
776
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
777
# we dont care if we got more than that.
780
lines = iter_with_versions(None, [('Walking content', 0, 5),
781
('Walking content', 1, 5),
782
('Walking content', 2, 5),
783
('Walking content', 3, 5),
784
('Walking content', 4, 5),
785
('Walking content', 5, 5)])
786
# all lines must be seen at least once
787
self.assertTrue(lines[('base\n', 'base')] > 0)
788
self.assertTrue(lines[('lancestor\n', 'lancestor')] > 0)
789
self.assertTrue(lines[('rancestor\n', 'rancestor')] > 0)
790
self.assertTrue(lines[('child\n', 'child')] > 0)
791
self.assertTrue(lines[('otherchild\n', 'otherchild')] > 0)
793
def test_add_lines_with_ghosts(self):
794
# some versioned file formats allow lines to be added with parent
795
# information that is > than that in the format. Formats that do
796
# not support this need to raise NotImplementedError on the
797
# add_lines_with_ghosts api.
799
# add a revision with ghost parents
800
# The preferred form is utf8, but we should translate when needed
801
parent_id_unicode = u'b\xbfse'
802
parent_id_utf8 = parent_id_unicode.encode('utf8')
804
vf.add_lines_with_ghosts('notbxbfse', [parent_id_utf8], [])
805
except NotImplementedError:
806
# check the other ghost apis are also not implemented
807
self.assertRaises(NotImplementedError, vf.get_ancestry_with_ghosts, ['foo'])
808
self.assertRaises(NotImplementedError, vf.get_parents_with_ghosts, 'foo')
810
vf = self.reopen_file()
811
# test key graph related apis: getncestry, _graph, get_parents
813
# - these are ghost unaware and must not be reflect ghosts
814
self.assertEqual(['notbxbfse'], vf.get_ancestry('notbxbfse'))
815
self.assertFalse(vf.has_version(parent_id_utf8))
816
# we have _with_ghost apis to give us ghost information.
817
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry_with_ghosts(['notbxbfse']))
818
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
819
# if we add something that is a ghost of another, it should correct the
820
# results of the prior apis
821
vf.add_lines(parent_id_utf8, [], [])
822
self.assertEqual([parent_id_utf8, 'notbxbfse'], vf.get_ancestry(['notbxbfse']))
823
self.assertEqual({'notbxbfse':(parent_id_utf8,)},
824
vf.get_parent_map(['notbxbfse']))
825
self.assertTrue(vf.has_version(parent_id_utf8))
826
# we have _with_ghost apis to give us ghost information.
827
self.assertEqual([parent_id_utf8, 'notbxbfse'],
828
vf.get_ancestry_with_ghosts(['notbxbfse']))
829
self.assertEqual([parent_id_utf8], vf.get_parents_with_ghosts('notbxbfse'))
831
def test_add_lines_with_ghosts_after_normal_revs(self):
832
# some versioned file formats allow lines to be added with parent
833
# information that is > than that in the format. Formats that do
834
# not support this need to raise NotImplementedError on the
835
# add_lines_with_ghosts api.
837
# probe for ghost support
839
vf.add_lines_with_ghosts('base', [], ['line\n', 'line_b\n'])
840
except NotImplementedError:
842
vf.add_lines_with_ghosts('references_ghost',
844
['line\n', 'line_b\n', 'line_c\n'])
845
origins = vf.annotate('references_ghost')
846
self.assertEquals(('base', 'line\n'), origins[0])
847
self.assertEquals(('base', 'line_b\n'), origins[1])
848
self.assertEquals(('references_ghost', 'line_c\n'), origins[2])
850
def test_readonly_mode(self):
851
transport = get_transport(self.get_url('.'))
852
factory = self.get_factory()
853
vf = factory('id', transport, 0777, create=True, access_mode='w')
854
vf = factory('id', transport, access_mode='r')
855
self.assertRaises(errors.ReadOnlyError, vf.add_lines, 'base', [], [])
856
self.assertRaises(errors.ReadOnlyError,
857
vf.add_lines_with_ghosts,
862
def test_get_sha1s(self):
863
# check the sha1 data is available
866
vf.add_lines('a', [], ['a\n'])
867
# the same file, different metadata
868
vf.add_lines('b', ['a'], ['a\n'])
869
# a file differing only in last newline.
870
vf.add_lines('c', [], ['a'])
872
'a': '3f786850e387550fdab836ed7e6dc881de23001b',
873
'c': '86f7e437faa5a7fce15d1ddcb9eaeaea377667b8',
874
'b': '3f786850e387550fdab836ed7e6dc881de23001b',
876
vf.get_sha1s(['a', 'c', 'b']))
879
class TestWeave(TestCaseWithMemoryTransport, VersionedFileTestMixIn):
881
def get_file(self, name='foo'):
882
return WeaveFile(name, get_transport(self.get_url('.')), create=True,
883
get_scope=self.get_transaction)
885
def get_file_corrupted_text(self):
886
w = WeaveFile('foo', get_transport(self.get_url('.')), create=True,
887
get_scope=self.get_transaction)
888
w.add_lines('v1', [], ['hello\n'])
889
w.add_lines('v2', ['v1'], ['hello\n', 'there\n'])
891
# We are going to invasively corrupt the text
892
# Make sure the internals of weave are the same
893
self.assertEqual([('{', 0)
901
self.assertEqual(['f572d396fae9206628714fb2ce00f72e94f2258f'
902
, '90f265c6e75f1c8f9ab76dcf85528352c5f215ef'
907
w._weave[4] = 'There\n'
910
def get_file_corrupted_checksum(self):
911
w = self.get_file_corrupted_text()
913
w._weave[4] = 'there\n'
914
self.assertEqual('hello\nthere\n', w.get_text('v2'))
916
#Invalid checksum, first digit changed
917
w._sha1s[1] = 'f0f265c6e75f1c8f9ab76dcf85528352c5f215ef'
920
def reopen_file(self, name='foo', create=False):
921
return WeaveFile(name, get_transport(self.get_url('.')), create=create,
922
get_scope=self.get_transaction)
924
def test_no_implicit_create(self):
925
self.assertRaises(errors.NoSuchFile,
928
get_transport(self.get_url('.')),
929
get_scope=self.get_transaction)
931
def get_factory(self):
935
class TestPlanMergeVersionedFile(TestCaseWithMemoryTransport):
938
TestCaseWithMemoryTransport.setUp(self)
939
mapper = PrefixMapper()
940
factory = make_file_factory(True, mapper)
941
self.vf1 = factory(self.get_transport('root-1'))
942
self.vf2 = factory(self.get_transport('root-2'))
943
self.plan_merge_vf = versionedfile._PlanMergeVersionedFile('root')
944
self.plan_merge_vf.fallback_versionedfiles.extend([self.vf1, self.vf2])
946
def test_add_lines(self):
947
self.plan_merge_vf.add_lines(('root', 'a:'), [], [])
948
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
949
('root', 'a'), [], [])
950
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
951
('root', 'a:'), None, [])
952
self.assertRaises(ValueError, self.plan_merge_vf.add_lines,
953
('root', 'a:'), [], None)
955
def setup_abcde(self):
956
self.vf1.add_lines(('root', 'A'), [], ['a'])
957
self.vf1.add_lines(('root', 'B'), [('root', 'A')], ['b'])
958
self.vf2.add_lines(('root', 'C'), [], ['c'])
959
self.vf2.add_lines(('root', 'D'), [('root', 'C')], ['d'])
960
self.plan_merge_vf.add_lines(('root', 'E:'),
961
[('root', 'B'), ('root', 'D')], ['e'])
963
def test_get_parents(self):
965
self.assertEqual({('root', 'B'):(('root', 'A'),)},
966
self.plan_merge_vf.get_parent_map([('root', 'B')]))
967
self.assertEqual({('root', 'D'):(('root', 'C'),)},
968
self.plan_merge_vf.get_parent_map([('root', 'D')]))
969
self.assertEqual({('root', 'E:'):(('root', 'B'),('root', 'D'))},
970
self.plan_merge_vf.get_parent_map([('root', 'E:')]))
972
self.plan_merge_vf.get_parent_map([('root', 'F')]))
974
('root', 'B'):(('root', 'A'),),
975
('root', 'D'):(('root', 'C'),),
976
('root', 'E:'):(('root', 'B'),('root', 'D')),
978
self.plan_merge_vf.get_parent_map(
979
[('root', 'B'), ('root', 'D'), ('root', 'E:'), ('root', 'F')]))
981
def test_get_record_stream(self):
983
def get_record(suffix):
984
return self.plan_merge_vf.get_record_stream(
985
[('root', suffix)], 'unordered', True).next()
986
self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))
987
self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))
988
self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))
989
self.assertEqual('absent', get_record('F').storage_kind)
992
class TestReadonlyHttpMixin(object):
994
def get_transaction(self):
997
def test_readonly_http_works(self):
998
# we should be able to read from http with a versioned file.
1000
# try an empty file access
1001
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
1002
self.assertEqual([], readonly_vf.versions())
1004
vf.add_lines('1', [], ['a\n'])
1005
vf.add_lines('2', ['1'], ['b\n', 'a\n'])
1006
readonly_vf = self.get_factory()('foo', get_transport(self.get_readonly_url('.')))
1007
self.assertEqual(['1', '2'], vf.versions())
1008
for version in readonly_vf.versions():
1009
readonly_vf.get_lines(version)
1012
class TestWeaveHTTP(TestCaseWithWebserver, TestReadonlyHttpMixin):
1015
return WeaveFile('foo', get_transport(self.get_url('.')), create=True,
1016
get_scope=self.get_transaction)
1018
def get_factory(self):
1022
class MergeCasesMixin(object):
1024
def doMerge(self, base, a, b, mp):
1025
from cStringIO import StringIO
1026
from textwrap import dedent
1032
w.add_lines('text0', [], map(addcrlf, base))
1033
w.add_lines('text1', ['text0'], map(addcrlf, a))
1034
w.add_lines('text2', ['text0'], map(addcrlf, b))
1036
self.log_contents(w)
1038
self.log('merge plan:')
1039
p = list(w.plan_merge('text1', 'text2'))
1040
for state, line in p:
1042
self.log('%12s | %s' % (state, line[:-1]))
1046
mt.writelines(w.weave_merge(p))
1048
self.log(mt.getvalue())
1050
mp = map(addcrlf, mp)
1051
self.assertEqual(mt.readlines(), mp)
1054
def testOneInsert(self):
1060
def testSeparateInserts(self):
1061
self.doMerge(['aaa', 'bbb', 'ccc'],
1062
['aaa', 'xxx', 'bbb', 'ccc'],
1063
['aaa', 'bbb', 'yyy', 'ccc'],
1064
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1066
def testSameInsert(self):
1067
self.doMerge(['aaa', 'bbb', 'ccc'],
1068
['aaa', 'xxx', 'bbb', 'ccc'],
1069
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'],
1070
['aaa', 'xxx', 'bbb', 'yyy', 'ccc'])
1071
overlappedInsertExpected = ['aaa', 'xxx', 'yyy', 'bbb']
1072
def testOverlappedInsert(self):
1073
self.doMerge(['aaa', 'bbb'],
1074
['aaa', 'xxx', 'yyy', 'bbb'],
1075
['aaa', 'xxx', 'bbb'], self.overlappedInsertExpected)
1077
# really it ought to reduce this to
1078
# ['aaa', 'xxx', 'yyy', 'bbb']
1081
def testClashReplace(self):
1082
self.doMerge(['aaa'],
1085
['<<<<<<< ', 'xxx', '=======', 'yyy', 'zzz',
1088
def testNonClashInsert1(self):
1089
self.doMerge(['aaa'],
1092
['<<<<<<< ', 'xxx', 'aaa', '=======', 'yyy', 'zzz',
1095
def testNonClashInsert2(self):
1096
self.doMerge(['aaa'],
1102
def testDeleteAndModify(self):
1103
"""Clashing delete and modification.
1105
If one side modifies a region and the other deletes it then
1106
there should be a conflict with one side blank.
1109
#######################################
1110
# skippd, not working yet
1113
self.doMerge(['aaa', 'bbb', 'ccc'],
1114
['aaa', 'ddd', 'ccc'],
1116
['<<<<<<<< ', 'aaa', '=======', '>>>>>>> ', 'ccc'])
1118
def _test_merge_from_strings(self, base, a, b, expected):
1120
w.add_lines('text0', [], base.splitlines(True))
1121
w.add_lines('text1', ['text0'], a.splitlines(True))
1122
w.add_lines('text2', ['text0'], b.splitlines(True))
1123
self.log('merge plan:')
1124
p = list(w.plan_merge('text1', 'text2'))
1125
for state, line in p:
1127
self.log('%12s | %s' % (state, line[:-1]))
1128
self.log('merge result:')
1129
result_text = ''.join(w.weave_merge(p))
1130
self.log(result_text)
1131
self.assertEqualDiff(result_text, expected)
1133
def test_weave_merge_conflicts(self):
1134
# does weave merge properly handle plans that end with unchanged?
1135
result = ''.join(self.get_file().weave_merge([('new-a', 'hello\n')]))
1136
self.assertEqual(result, 'hello\n')
1138
def test_deletion_extended(self):
1139
"""One side deletes, the other deletes more.
1160
self._test_merge_from_strings(base, a, b, result)
1162
def test_deletion_overlap(self):
1163
"""Delete overlapping regions with no other conflict.
1165
Arguably it'd be better to treat these as agreement, rather than
1166
conflict, but for now conflict is safer.
1194
self._test_merge_from_strings(base, a, b, result)
1196
def test_agreement_deletion(self):
1197
"""Agree to delete some lines, without conflicts."""
1219
self._test_merge_from_strings(base, a, b, result)
1221
def test_sync_on_deletion(self):
1222
"""Specific case of merge where we can synchronize incorrectly.
1224
A previous version of the weave merge concluded that the two versions
1225
agreed on deleting line 2, and this could be a synchronization point.
1226
Line 1 was then considered in isolation, and thought to be deleted on
1229
It's better to consider the whole thing as a disagreement region.
1240
a's replacement line 2
1253
a's replacement line 2
1260
self._test_merge_from_strings(base, a, b, result)
1263
class TestWeaveMerge(TestCaseWithMemoryTransport, MergeCasesMixin):
1265
def get_file(self, name='foo'):
1266
return WeaveFile(name, get_transport(self.get_url('.')), create=True)
1268
def log_contents(self, w):
1269
self.log('weave is:')
1271
write_weave(w, tmpf)
1272
self.log(tmpf.getvalue())
1274
overlappedInsertExpected = ['aaa', '<<<<<<< ', 'xxx', 'yyy', '=======',
1275
'xxx', '>>>>>>> ', 'bbb']
1278
class TestContentFactoryAdaption(TestCaseWithMemoryTransport):
1280
def test_select_adaptor(self):
1281
"""Test expected adapters exist."""
1282
# One scenario for each lookup combination we expect to use.
1283
# Each is source_kind, requested_kind, adapter class
1285
('knit-delta-gz', 'fulltext', _mod_knit.DeltaPlainToFullText),
1286
('knit-ft-gz', 'fulltext', _mod_knit.FTPlainToFullText),
1287
('knit-annotated-delta-gz', 'knit-delta-gz',
1288
_mod_knit.DeltaAnnotatedToUnannotated),
1289
('knit-annotated-delta-gz', 'fulltext',
1290
_mod_knit.DeltaAnnotatedToFullText),
1291
('knit-annotated-ft-gz', 'knit-ft-gz',
1292
_mod_knit.FTAnnotatedToUnannotated),
1293
('knit-annotated-ft-gz', 'fulltext',
1294
_mod_knit.FTAnnotatedToFullText),
1296
for source, requested, klass in scenarios:
1297
adapter_factory = versionedfile.adapter_registry.get(
1298
(source, requested))
1299
adapter = adapter_factory(None)
1300
self.assertIsInstance(adapter, klass)
1302
def get_knit(self, annotated=True):
1303
mapper = ConstantMapper('knit')
1304
transport = self.get_transport()
1305
return make_file_factory(annotated, mapper)(transport)
1307
def helpGetBytes(self, f, ft_adapter, delta_adapter):
1308
"""Grab the interested adapted texts for tests."""
1309
# origin is a fulltext
1310
entries = f.get_record_stream([('origin',)], 'unordered', False)
1311
base = entries.next()
1312
ft_data = ft_adapter.get_bytes(base)
1313
# merged is both a delta and multiple parents.
1314
entries = f.get_record_stream([('merged',)], 'unordered', False)
1315
merged = entries.next()
1316
delta_data = delta_adapter.get_bytes(merged)
1317
return ft_data, delta_data
1319
def test_deannotation_noeol(self):
1320
"""Test converting annotated knits to unannotated knits."""
1321
# we need a full text, and a delta
1323
get_diamond_files(f, 1, trailing_eol=False)
1324
ft_data, delta_data = self.helpGetBytes(f,
1325
_mod_knit.FTAnnotatedToUnannotated(None),
1326
_mod_knit.DeltaAnnotatedToUnannotated(None))
1328
'version origin 1 b284f94827db1fa2970d9e2014f080413b547a7e\n'
1331
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1333
'version merged 4 32c2e79763b3f90e8ccde37f9710b6629c25a796\n'
1334
'1,2,3\nleft\nright\nmerged\nend merged\n',
1335
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1337
def test_deannotation(self):
1338
"""Test converting annotated knits to unannotated knits."""
1339
# we need a full text, and a delta
1341
get_diamond_files(f, 1)
1342
ft_data, delta_data = self.helpGetBytes(f,
1343
_mod_knit.FTAnnotatedToUnannotated(None),
1344
_mod_knit.DeltaAnnotatedToUnannotated(None))
1346
'version origin 1 00e364d235126be43292ab09cb4686cf703ddc17\n'
1349
GzipFile(mode='rb', fileobj=StringIO(ft_data)).read())
1351
'version merged 3 ed8bce375198ea62444dc71952b22cfc2b09226d\n'
1352
'2,2,2\nright\nmerged\nend merged\n',
1353
GzipFile(mode='rb', fileobj=StringIO(delta_data)).read())
1355
def test_annotated_to_fulltext_no_eol(self):
1356
"""Test adapting annotated knits to full texts (for -> weaves)."""
1357
# we need a full text, and a delta
1359
get_diamond_files(f, 1, trailing_eol=False)
1360
# Reconstructing a full text requires a backing versioned file, and it
1361
# must have the base lines requested from it.
1362
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1363
ft_data, delta_data = self.helpGetBytes(f,
1364
_mod_knit.FTAnnotatedToFullText(None),
1365
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1366
self.assertEqual('origin', ft_data)
1367
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1368
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1369
True)], logged_vf.calls)
1371
def test_annotated_to_fulltext(self):
1372
"""Test adapting annotated knits to full texts (for -> weaves)."""
1373
# we need a full text, and a delta
1375
get_diamond_files(f, 1)
1376
# Reconstructing a full text requires a backing versioned file, and it
1377
# must have the base lines requested from it.
1378
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1379
ft_data, delta_data = self.helpGetBytes(f,
1380
_mod_knit.FTAnnotatedToFullText(None),
1381
_mod_knit.DeltaAnnotatedToFullText(logged_vf))
1382
self.assertEqual('origin\n', ft_data)
1383
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
1384
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1385
True)], logged_vf.calls)
1387
def test_unannotated_to_fulltext(self):
1388
"""Test adapting unannotated knits to full texts.
1390
This is used for -> weaves, and for -> annotated knits.
1392
# we need a full text, and a delta
1393
f = self.get_knit(annotated=False)
1394
get_diamond_files(f, 1)
1395
# Reconstructing a full text requires a backing versioned file, and it
1396
# must have the base lines requested from it.
1397
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1398
ft_data, delta_data = self.helpGetBytes(f,
1399
_mod_knit.FTPlainToFullText(None),
1400
_mod_knit.DeltaPlainToFullText(logged_vf))
1401
self.assertEqual('origin\n', ft_data)
1402
self.assertEqual('base\nleft\nright\nmerged\n', delta_data)
1403
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1404
True)], logged_vf.calls)
1406
def test_unannotated_to_fulltext_no_eol(self):
1407
"""Test adapting unannotated knits to full texts.
1409
This is used for -> weaves, and for -> annotated knits.
1411
# we need a full text, and a delta
1412
f = self.get_knit(annotated=False)
1413
get_diamond_files(f, 1, trailing_eol=False)
1414
# Reconstructing a full text requires a backing versioned file, and it
1415
# must have the base lines requested from it.
1416
logged_vf = versionedfile.RecordingVersionedFilesDecorator(f)
1417
ft_data, delta_data = self.helpGetBytes(f,
1418
_mod_knit.FTPlainToFullText(None),
1419
_mod_knit.DeltaPlainToFullText(logged_vf))
1420
self.assertEqual('origin', ft_data)
1421
self.assertEqual('base\nleft\nright\nmerged', delta_data)
1422
self.assertEqual([('get_record_stream', [('left',)], 'unordered',
1423
True)], logged_vf.calls)
1426
class TestKeyMapper(TestCaseWithMemoryTransport):
1427
"""Tests for various key mapping logic."""
1429
def test_identity_mapper(self):
1430
mapper = versionedfile.ConstantMapper("inventory")
1431
self.assertEqual("inventory", mapper.map(('foo@ar',)))
1432
self.assertEqual("inventory", mapper.map(('quux',)))
1434
def test_prefix_mapper(self):
1436
mapper = versionedfile.PrefixMapper()
1437
self.assertEqual("file-id", mapper.map(("file-id", "revision-id")))
1438
self.assertEqual("new-id", mapper.map(("new-id", "revision-id")))
1439
self.assertEqual(('file-id',), mapper.unmap("file-id"))
1440
self.assertEqual(('new-id',), mapper.unmap("new-id"))
1442
def test_hash_prefix_mapper(self):
1443
#format6: hash + plain
1444
mapper = versionedfile.HashPrefixMapper()
1445
self.assertEqual("9b/file-id", mapper.map(("file-id", "revision-id")))
1446
self.assertEqual("45/new-id", mapper.map(("new-id", "revision-id")))
1447
self.assertEqual(('file-id',), mapper.unmap("9b/file-id"))
1448
self.assertEqual(('new-id',), mapper.unmap("45/new-id"))
1450
def test_hash_escaped_mapper(self):
1451
#knit1: hash + escaped
1452
mapper = versionedfile.HashEscapedPrefixMapper()
1453
self.assertEqual("88/%2520", mapper.map((" ", "revision-id")))
1454
self.assertEqual("ed/fil%2545-%2549d", mapper.map(("filE-Id",
1456
self.assertEqual("88/ne%2557-%2549d", mapper.map(("neW-Id",
1458
self.assertEqual(('filE-Id',), mapper.unmap("ed/fil%2545-%2549d"))
1459
self.assertEqual(('neW-Id',), mapper.unmap("88/ne%2557-%2549d"))
1462
class TestVersionedFiles(TestCaseWithMemoryTransport):
1463
"""Tests for the multiple-file variant of VersionedFile."""
1465
def get_versionedfiles(self, relpath='files'):
1466
transport = self.get_transport(relpath)
1468
transport.mkdir('.')
1469
files = self.factory(transport)
1470
if self.cleanup is not None:
1471
self.addCleanup(lambda:self.cleanup(files))
1474
def test_annotate(self):
1475
files = self.get_versionedfiles()
1476
self.get_diamond_files(files)
1477
if self.key_length == 1:
1481
# introduced full text
1482
origins = files.annotate(prefix + ('origin',))
1484
(prefix + ('origin',), 'origin\n')],
1487
origins = files.annotate(prefix + ('base',))
1489
(prefix + ('base',), 'base\n')],
1492
origins = files.annotate(prefix + ('merged',))
1495
(prefix + ('base',), 'base\n'),
1496
(prefix + ('left',), 'left\n'),
1497
(prefix + ('right',), 'right\n'),
1498
(prefix + ('merged',), 'merged\n')
1502
# Without a graph everything is new.
1504
(prefix + ('merged',), 'base\n'),
1505
(prefix + ('merged',), 'left\n'),
1506
(prefix + ('merged',), 'right\n'),
1507
(prefix + ('merged',), 'merged\n')
1510
self.assertRaises(RevisionNotPresent,
1511
files.annotate, prefix + ('missing-key',))
1513
def test_construct(self):
1514
"""Each parameterised test can be constructed on a transport."""
1515
files = self.get_versionedfiles()
1517
def get_diamond_files(self, files, trailing_eol=True, left_only=False,
1519
return get_diamond_files(files, self.key_length,
1520
trailing_eol=trailing_eol, nograph=not self.graph,
1521
left_only=left_only, nokeys=nokeys)
1523
def test_add_lines_nostoresha(self):
1524
"""When nostore_sha is supplied using old content raises."""
1525
vf = self.get_versionedfiles()
1526
empty_text = ('a', [])
1527
sample_text_nl = ('b', ["foo\n", "bar\n"])
1528
sample_text_no_nl = ('c', ["foo\n", "bar"])
1530
for version, lines in (empty_text, sample_text_nl, sample_text_no_nl):
1531
sha, _, _ = vf.add_lines(self.get_simple_key(version), [], lines)
1533
# we now have a copy of all the lines in the vf.
1534
for sha, (version, lines) in zip(
1535
shas, (empty_text, sample_text_nl, sample_text_no_nl)):
1536
new_key = self.get_simple_key(version + "2")
1537
self.assertRaises(errors.ExistingContent,
1538
vf.add_lines, new_key, [], lines,
1540
# and no new version should have been added.
1541
record = vf.get_record_stream([new_key], 'unordered', True).next()
1542
self.assertEqual('absent', record.storage_kind)
1544
def test_add_lines_return(self):
1545
files = self.get_versionedfiles()
1546
# save code by using the stock data insertion helper.
1547
adds = self.get_diamond_files(files)
1549
# We can only validate the first 2 elements returned from add_lines.
1551
self.assertEqual(3, len(add))
1552
results.append(add[:2])
1553
if self.key_length == 1:
1555
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1556
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1557
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1558
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1559
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1561
elif self.key_length == 2:
1563
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1564
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1565
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1566
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1567
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1568
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1569
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1570
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1571
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1572
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1575
def test_add_lines_no_key_generates_chk_key(self):
1576
files = self.get_versionedfiles()
1577
# save code by using the stock data insertion helper.
1578
adds = self.get_diamond_files(files, nokeys=True)
1580
# We can only validate the first 2 elements returned from add_lines.
1582
self.assertEqual(3, len(add))
1583
results.append(add[:2])
1584
if self.key_length == 1:
1586
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1587
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1588
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1589
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1590
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1592
# Check the added items got CHK keys.
1593
self.assertEqual(set([
1594
('sha1:00e364d235126be43292ab09cb4686cf703ddc17',),
1595
('sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',),
1596
('sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',),
1597
('sha1:a8478686da38e370e32e42e8a0c220e33ee9132f',),
1598
('sha1:ed8bce375198ea62444dc71952b22cfc2b09226d',),
1601
elif self.key_length == 2:
1603
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1604
('00e364d235126be43292ab09cb4686cf703ddc17', 7),
1605
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1606
('51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44', 5),
1607
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1608
('a8478686da38e370e32e42e8a0c220e33ee9132f', 10),
1609
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1610
('9ef09dfa9d86780bdec9219a22560c6ece8e0ef1', 11),
1611
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23),
1612
('ed8bce375198ea62444dc71952b22cfc2b09226d', 23)],
1614
# Check the added items got CHK keys.
1615
self.assertEqual(set([
1616
('FileA', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1617
('FileA', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1618
('FileA', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1619
('FileA', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1620
('FileA', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1621
('FileB', 'sha1:00e364d235126be43292ab09cb4686cf703ddc17'),
1622
('FileB', 'sha1:51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44'),
1623
('FileB', 'sha1:9ef09dfa9d86780bdec9219a22560c6ece8e0ef1'),
1624
('FileB', 'sha1:a8478686da38e370e32e42e8a0c220e33ee9132f'),
1625
('FileB', 'sha1:ed8bce375198ea62444dc71952b22cfc2b09226d'),
1629
def test_empty_lines(self):
1630
"""Empty files can be stored."""
1631
f = self.get_versionedfiles()
1632
key_a = self.get_simple_key('a')
1633
f.add_lines(key_a, [], [])
1634
self.assertEqual('',
1635
f.get_record_stream([key_a], 'unordered', True
1636
).next().get_bytes_as('fulltext'))
1637
key_b = self.get_simple_key('b')
1638
f.add_lines(key_b, self.get_parents([key_a]), [])
1639
self.assertEqual('',
1640
f.get_record_stream([key_b], 'unordered', True
1641
).next().get_bytes_as('fulltext'))
1643
def test_newline_only(self):
1644
f = self.get_versionedfiles()
1645
key_a = self.get_simple_key('a')
1646
f.add_lines(key_a, [], ['\n'])
1647
self.assertEqual('\n',
1648
f.get_record_stream([key_a], 'unordered', True
1649
).next().get_bytes_as('fulltext'))
1650
key_b = self.get_simple_key('b')
1651
f.add_lines(key_b, self.get_parents([key_a]), ['\n'])
1652
self.assertEqual('\n',
1653
f.get_record_stream([key_b], 'unordered', True
1654
).next().get_bytes_as('fulltext'))
1656
def test_get_record_stream_empty(self):
1657
"""An empty stream can be requested without error."""
1658
f = self.get_versionedfiles()
1659
entries = f.get_record_stream([], 'unordered', False)
1660
self.assertEqual([], list(entries))
1662
def assertValidStorageKind(self, storage_kind):
1663
"""Assert that storage_kind is a valid storage_kind."""
1664
self.assertSubset([storage_kind],
1665
['mpdiff', 'knit-annotated-ft', 'knit-annotated-delta',
1666
'knit-ft', 'knit-delta', 'chunked', 'fulltext',
1667
'knit-annotated-ft-gz', 'knit-annotated-delta-gz', 'knit-ft-gz',
1669
'knit-delta-closure', 'knit-delta-closure-ref',
1670
'groupcompress-block', 'groupcompress-block-ref'])
1672
def capture_stream(self, f, entries, on_seen, parents):
1673
"""Capture a stream for testing."""
1674
for factory in entries:
1675
on_seen(factory.key)
1676
self.assertValidStorageKind(factory.storage_kind)
1677
if factory.sha1 is not None:
1678
self.assertEqual(f.get_sha1s([factory.key])[factory.key],
1680
self.assertEqual(parents[factory.key], factory.parents)
1681
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1684
def test_get_record_stream_interface(self):
1685
"""each item in a stream has to provide a regular interface."""
1686
files = self.get_versionedfiles()
1687
self.get_diamond_files(files)
1688
keys, _ = self.get_keys_and_sort_order()
1689
parent_map = files.get_parent_map(keys)
1690
entries = files.get_record_stream(keys, 'unordered', False)
1692
self.capture_stream(files, entries, seen.add, parent_map)
1693
self.assertEqual(set(keys), seen)
1695
def get_simple_key(self, suffix):
1696
"""Return a key for the object under test."""
1697
if self.key_length == 1:
1700
return ('FileA',) + (suffix,)
1702
def get_keys_and_sort_order(self):
1703
"""Get diamond test keys list, and their sort ordering."""
1704
if self.key_length == 1:
1705
keys = [('merged',), ('left',), ('right',), ('base',)]
1706
sort_order = {('merged',):2, ('left',):1, ('right',):1, ('base',):0}
1709
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1711
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1715
('FileA', 'merged'):2, ('FileA', 'left'):1, ('FileA', 'right'):1,
1716
('FileA', 'base'):0,
1717
('FileB', 'merged'):2, ('FileB', 'left'):1, ('FileB', 'right'):1,
1718
('FileB', 'base'):0,
1720
return keys, sort_order
1722
def get_keys_and_groupcompress_sort_order(self):
1723
"""Get diamond test keys list, and their groupcompress sort ordering."""
1724
if self.key_length == 1:
1725
keys = [('merged',), ('left',), ('right',), ('base',)]
1726
sort_order = {('merged',):0, ('left',):1, ('right',):1, ('base',):2}
1729
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1731
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1735
('FileA', 'merged'):0, ('FileA', 'left'):1, ('FileA', 'right'):1,
1736
('FileA', 'base'):2,
1737
('FileB', 'merged'):3, ('FileB', 'left'):4, ('FileB', 'right'):4,
1738
('FileB', 'base'):5,
1740
return keys, sort_order
1742
def test_get_record_stream_interface_ordered(self):
1743
"""each item in a stream has to provide a regular interface."""
1744
files = self.get_versionedfiles()
1745
self.get_diamond_files(files)
1746
keys, sort_order = self.get_keys_and_sort_order()
1747
parent_map = files.get_parent_map(keys)
1748
entries = files.get_record_stream(keys, 'topological', False)
1750
self.capture_stream(files, entries, seen.append, parent_map)
1751
self.assertStreamOrder(sort_order, seen, keys)
1753
def test_get_record_stream_interface_ordered_with_delta_closure(self):
1754
"""each item must be accessible as a fulltext."""
1755
files = self.get_versionedfiles()
1756
self.get_diamond_files(files)
1757
keys, sort_order = self.get_keys_and_sort_order()
1758
parent_map = files.get_parent_map(keys)
1759
entries = files.get_record_stream(keys, 'topological', True)
1761
for factory in entries:
1762
seen.append(factory.key)
1763
self.assertValidStorageKind(factory.storage_kind)
1764
self.assertSubset([factory.sha1],
1765
[None, files.get_sha1s([factory.key])[factory.key]])
1766
self.assertEqual(parent_map[factory.key], factory.parents)
1767
# self.assertEqual(files.get_text(factory.key),
1768
ft_bytes = factory.get_bytes_as('fulltext')
1769
self.assertIsInstance(ft_bytes, str)
1770
chunked_bytes = factory.get_bytes_as('chunked')
1771
self.assertEqualDiff(ft_bytes, ''.join(chunked_bytes))
1773
self.assertStreamOrder(sort_order, seen, keys)
1775
def test_get_record_stream_interface_groupcompress(self):
1776
"""each item in a stream has to provide a regular interface."""
1777
files = self.get_versionedfiles()
1778
self.get_diamond_files(files)
1779
keys, sort_order = self.get_keys_and_groupcompress_sort_order()
1780
parent_map = files.get_parent_map(keys)
1781
entries = files.get_record_stream(keys, 'groupcompress', False)
1783
self.capture_stream(files, entries, seen.append, parent_map)
1784
self.assertStreamOrder(sort_order, seen, keys)
1786
def assertStreamOrder(self, sort_order, seen, keys):
1787
self.assertEqual(len(set(seen)), len(keys))
1788
if self.key_length == 1:
1791
lows = {('FileA',):0, ('FileB',):0}
1793
self.assertEqual(set(keys), set(seen))
1796
sort_pos = sort_order[key]
1797
self.assertTrue(sort_pos >= lows[key[:-1]],
1798
"Out of order in sorted stream: %r, %r" % (key, seen))
1799
lows[key[:-1]] = sort_pos
1801
def test_get_record_stream_unknown_storage_kind_raises(self):
1802
"""Asking for a storage kind that the stream cannot supply raises."""
1803
files = self.get_versionedfiles()
1804
self.get_diamond_files(files)
1805
if self.key_length == 1:
1806
keys = [('merged',), ('left',), ('right',), ('base',)]
1809
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1811
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1814
parent_map = files.get_parent_map(keys)
1815
entries = files.get_record_stream(keys, 'unordered', False)
1816
# We track the contents because we should be able to try, fail a
1817
# particular kind and then ask for one that works and continue.
1819
for factory in entries:
1820
seen.add(factory.key)
1821
self.assertValidStorageKind(factory.storage_kind)
1822
if factory.sha1 is not None:
1823
self.assertEqual(files.get_sha1s([factory.key])[factory.key],
1825
self.assertEqual(parent_map[factory.key], factory.parents)
1826
# currently no stream emits mpdiff
1827
self.assertRaises(errors.UnavailableRepresentation,
1828
factory.get_bytes_as, 'mpdiff')
1829
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
1831
self.assertEqual(set(keys), seen)
1833
def test_get_record_stream_missing_records_are_absent(self):
1834
files = self.get_versionedfiles()
1835
self.get_diamond_files(files)
1836
if self.key_length == 1:
1837
keys = [('merged',), ('left',), ('right',), ('absent',), ('base',)]
1840
('FileA', 'merged'), ('FileA', 'left'), ('FileA', 'right'),
1841
('FileA', 'absent'), ('FileA', 'base'),
1842
('FileB', 'merged'), ('FileB', 'left'), ('FileB', 'right'),
1843
('FileB', 'absent'), ('FileB', 'base'),
1844
('absent', 'absent'),
1846
parent_map = files.get_parent_map(keys)
1847
entries = files.get_record_stream(keys, 'unordered', False)
1848
self.assertAbsentRecord(files, keys, parent_map, entries)
1849
entries = files.get_record_stream(keys, 'topological', False)
1850
self.assertAbsentRecord(files, keys, parent_map, entries)
1852
def assertRecordHasContent(self, record, bytes):
1853
"""Assert that record has the bytes bytes."""
1854
self.assertEqual(bytes, record.get_bytes_as('fulltext'))
1855
self.assertEqual(bytes, ''.join(record.get_bytes_as('chunked')))
1857
def test_get_record_stream_native_formats_are_wire_ready_one_ft(self):
1858
files = self.get_versionedfiles()
1859
key = self.get_simple_key('foo')
1860
files.add_lines(key, (), ['my text\n', 'content'])
1861
stream = files.get_record_stream([key], 'unordered', False)
1862
record = stream.next()
1863
if record.storage_kind in ('chunked', 'fulltext'):
1864
# chunked and fulltext representations are for direct use not wire
1865
# serialisation: check they are able to be used directly. To send
1866
# such records over the wire translation will be needed.
1867
self.assertRecordHasContent(record, "my text\ncontent")
1869
bytes = [record.get_bytes_as(record.storage_kind)]
1870
network_stream = versionedfile.NetworkRecordStream(bytes).read()
1871
source_record = record
1873
for record in network_stream:
1874
records.append(record)
1875
self.assertEqual(source_record.storage_kind,
1876
record.storage_kind)
1877
self.assertEqual(source_record.parents, record.parents)
1879
source_record.get_bytes_as(source_record.storage_kind),
1880
record.get_bytes_as(record.storage_kind))
1881
self.assertEqual(1, len(records))
1883
def assertStreamMetaEqual(self, records, expected, stream):
1884
"""Assert that streams expected and stream have the same records.
1886
:param records: A list to collect the seen records.
1887
:return: A generator of the records in stream.
1889
# We make assertions during copying to catch things early for
1891
for record, ref_record in izip(stream, expected):
1892
records.append(record)
1893
self.assertEqual(ref_record.key, record.key)
1894
self.assertEqual(ref_record.storage_kind, record.storage_kind)
1895
self.assertEqual(ref_record.parents, record.parents)
1898
def stream_to_bytes_or_skip_counter(self, skipped_records, full_texts,
1900
"""Convert a stream to a bytes iterator.
1902
:param skipped_records: A list with one element to increment when a
1904
:param full_texts: A dict from key->fulltext representation, for
1905
checking chunked or fulltext stored records.
1906
:param stream: A record_stream.
1907
:return: An iterator over the bytes of each record.
1909
for record in stream:
1910
if record.storage_kind in ('chunked', 'fulltext'):
1911
skipped_records[0] += 1
1912
# check the content is correct for direct use.
1913
self.assertRecordHasContent(record, full_texts[record.key])
1915
yield record.get_bytes_as(record.storage_kind)
1917
def test_get_record_stream_native_formats_are_wire_ready_ft_delta(self):
1918
files = self.get_versionedfiles()
1919
target_files = self.get_versionedfiles('target')
1920
key = self.get_simple_key('ft')
1921
key_delta = self.get_simple_key('delta')
1922
files.add_lines(key, (), ['my text\n', 'content'])
1924
delta_parents = (key,)
1927
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
1928
local = files.get_record_stream([key, key_delta], 'unordered', False)
1929
ref = files.get_record_stream([key, key_delta], 'unordered', False)
1930
skipped_records = [0]
1932
key: "my text\ncontent",
1933
key_delta: "different\ncontent\n",
1935
byte_stream = self.stream_to_bytes_or_skip_counter(
1936
skipped_records, full_texts, local)
1937
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
1939
# insert the stream from the network into a versioned files object so we can
1940
# check the content was carried across correctly without doing delta
1942
target_files.insert_record_stream(
1943
self.assertStreamMetaEqual(records, ref, network_stream))
1944
# No duplicates on the wire thank you!
1945
self.assertEqual(2, len(records) + skipped_records[0])
1947
# if any content was copied it all must have all been.
1948
self.assertIdenticalVersionedFile(files, target_files)
1950
def test_get_record_stream_native_formats_are_wire_ready_delta(self):
1951
# copy a delta over the wire
1952
files = self.get_versionedfiles()
1953
target_files = self.get_versionedfiles('target')
1954
key = self.get_simple_key('ft')
1955
key_delta = self.get_simple_key('delta')
1956
files.add_lines(key, (), ['my text\n', 'content'])
1958
delta_parents = (key,)
1961
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
1962
# Copy the basis text across so we can reconstruct the delta during
1963
# insertion into target.
1964
target_files.insert_record_stream(files.get_record_stream([key],
1965
'unordered', False))
1966
local = files.get_record_stream([key_delta], 'unordered', False)
1967
ref = files.get_record_stream([key_delta], 'unordered', False)
1968
skipped_records = [0]
1970
key_delta: "different\ncontent\n",
1972
byte_stream = self.stream_to_bytes_or_skip_counter(
1973
skipped_records, full_texts, local)
1974
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
1976
# insert the stream from the network into a versioned files object so we can
1977
# check the content was carried across correctly without doing delta
1978
# inspection during check_stream.
1979
target_files.insert_record_stream(
1980
self.assertStreamMetaEqual(records, ref, network_stream))
1981
# No duplicates on the wire thank you!
1982
self.assertEqual(1, len(records) + skipped_records[0])
1984
# if any content was copied it all must have all been
1985
self.assertIdenticalVersionedFile(files, target_files)
1987
def test_get_record_stream_wire_ready_delta_closure_included(self):
1988
# copy a delta over the wire with the ability to get its full text.
1989
files = self.get_versionedfiles()
1990
key = self.get_simple_key('ft')
1991
key_delta = self.get_simple_key('delta')
1992
files.add_lines(key, (), ['my text\n', 'content'])
1994
delta_parents = (key,)
1997
files.add_lines(key_delta, delta_parents, ['different\n', 'content\n'])
1998
local = files.get_record_stream([key_delta], 'unordered', True)
1999
ref = files.get_record_stream([key_delta], 'unordered', True)
2000
skipped_records = [0]
2002
key_delta: "different\ncontent\n",
2004
byte_stream = self.stream_to_bytes_or_skip_counter(
2005
skipped_records, full_texts, local)
2006
network_stream = versionedfile.NetworkRecordStream(byte_stream).read()
2008
# insert the stream from the network into a versioned files object so we can
2009
# check the content was carried across correctly without doing delta
2010
# inspection during check_stream.
2011
for record in self.assertStreamMetaEqual(records, ref, network_stream):
2012
# we have to be able to get the full text out:
2013
self.assertRecordHasContent(record, full_texts[record.key])
2014
# No duplicates on the wire thank you!
2015
self.assertEqual(1, len(records) + skipped_records[0])
2017
def assertAbsentRecord(self, files, keys, parents, entries):
2018
"""Helper for test_get_record_stream_missing_records_are_absent."""
2020
for factory in entries:
2021
seen.add(factory.key)
2022
if factory.key[-1] == 'absent':
2023
self.assertEqual('absent', factory.storage_kind)
2024
self.assertEqual(None, factory.sha1)
2025
self.assertEqual(None, factory.parents)
2027
self.assertValidStorageKind(factory.storage_kind)
2028
if factory.sha1 is not None:
2029
sha1 = files.get_sha1s([factory.key])[factory.key]
2030
self.assertEqual(sha1, factory.sha1)
2031
self.assertEqual(parents[factory.key], factory.parents)
2032
self.assertIsInstance(factory.get_bytes_as(factory.storage_kind),
2034
self.assertEqual(set(keys), seen)
2036
def test_filter_absent_records(self):
2037
"""Requested missing records can be filter trivially."""
2038
files = self.get_versionedfiles()
2039
self.get_diamond_files(files)
2040
keys, _ = self.get_keys_and_sort_order()
2041
parent_map = files.get_parent_map(keys)
2042
# Add an absent record in the middle of the present keys. (We don't ask
2043
# for just absent keys to ensure that content before and after the
2044
# absent keys is still delivered).
2045
present_keys = list(keys)
2046
if self.key_length == 1:
2047
keys.insert(2, ('extra',))
2049
keys.insert(2, ('extra', 'extra'))
2050
entries = files.get_record_stream(keys, 'unordered', False)
2052
self.capture_stream(files, versionedfile.filter_absent(entries), seen.add,
2054
self.assertEqual(set(present_keys), seen)
2056
def get_mapper(self):
2057
"""Get a mapper suitable for the key length of the test interface."""
2058
if self.key_length == 1:
2059
return ConstantMapper('source')
2061
return HashEscapedPrefixMapper()
2063
def get_parents(self, parents):
2064
"""Get parents, taking self.graph into consideration."""
2070
def test_get_parent_map(self):
2071
files = self.get_versionedfiles()
2072
if self.key_length == 1:
2074
(('r0',), self.get_parents(())),
2075
(('r1',), self.get_parents((('r0',),))),
2076
(('r2',), self.get_parents(())),
2077
(('r3',), self.get_parents(())),
2078
(('m',), self.get_parents((('r0',),('r1',),('r2',),('r3',)))),
2082
(('FileA', 'r0'), self.get_parents(())),
2083
(('FileA', 'r1'), self.get_parents((('FileA', 'r0'),))),
2084
(('FileA', 'r2'), self.get_parents(())),
2085
(('FileA', 'r3'), self.get_parents(())),
2086
(('FileA', 'm'), self.get_parents((('FileA', 'r0'),
2087
('FileA', 'r1'), ('FileA', 'r2'), ('FileA', 'r3')))),
2089
for key, parents in parent_details:
2090
files.add_lines(key, parents, [])
2091
# immediately after adding it should be queryable.
2092
self.assertEqual({key:parents}, files.get_parent_map([key]))
2093
# We can ask for an empty set
2094
self.assertEqual({}, files.get_parent_map([]))
2095
# We can ask for many keys
2096
all_parents = dict(parent_details)
2097
self.assertEqual(all_parents, files.get_parent_map(all_parents.keys()))
2098
# Absent keys are just not included in the result.
2099
keys = all_parents.keys()
2100
if self.key_length == 1:
2101
keys.insert(1, ('missing',))
2103
keys.insert(1, ('missing', 'missing'))
2104
# Absent keys are just ignored
2105
self.assertEqual(all_parents, files.get_parent_map(keys))
2107
def test_get_sha1s(self):
2108
files = self.get_versionedfiles()
2109
self.get_diamond_files(files)
2110
if self.key_length == 1:
2111
keys = [('base',), ('origin',), ('left',), ('merged',), ('right',)]
2113
# ask for shas from different prefixes.
2115
('FileA', 'base'), ('FileB', 'origin'), ('FileA', 'left'),
2116
('FileA', 'merged'), ('FileB', 'right'),
2119
keys[0]: '51c64a6f4fc375daf0d24aafbabe4d91b6f4bb44',
2120
keys[1]: '00e364d235126be43292ab09cb4686cf703ddc17',
2121
keys[2]: 'a8478686da38e370e32e42e8a0c220e33ee9132f',
2122
keys[3]: 'ed8bce375198ea62444dc71952b22cfc2b09226d',
2123
keys[4]: '9ef09dfa9d86780bdec9219a22560c6ece8e0ef1',
2125
files.get_sha1s(keys))
2127
def test_insert_record_stream_empty(self):
2128
"""Inserting an empty record stream should work."""
2129
files = self.get_versionedfiles()
2130
files.insert_record_stream([])
2132
def assertIdenticalVersionedFile(self, expected, actual):
2133
"""Assert that left and right have the same contents."""
2134
self.assertEqual(set(actual.keys()), set(expected.keys()))
2135
actual_parents = actual.get_parent_map(actual.keys())
2137
self.assertEqual(actual_parents, expected.get_parent_map(expected.keys()))
2139
for key, parents in actual_parents.items():
2140
self.assertEqual(None, parents)
2141
for key in actual.keys():
2142
actual_text = actual.get_record_stream(
2143
[key], 'unordered', True).next().get_bytes_as('fulltext')
2144
expected_text = expected.get_record_stream(
2145
[key], 'unordered', True).next().get_bytes_as('fulltext')
2146
self.assertEqual(actual_text, expected_text)
2148
def test_insert_record_stream_fulltexts(self):
2149
"""Any file should accept a stream of fulltexts."""
2150
files = self.get_versionedfiles()
2151
mapper = self.get_mapper()
2152
source_transport = self.get_transport('source')
2153
source_transport.mkdir('.')
2154
# weaves always output fulltexts.
2155
source = make_versioned_files_factory(WeaveFile, mapper)(
2157
self.get_diamond_files(source, trailing_eol=False)
2158
stream = source.get_record_stream(source.keys(), 'topological',
2160
files.insert_record_stream(stream)
2161
self.assertIdenticalVersionedFile(source, files)
2163
def test_insert_record_stream_fulltexts_noeol(self):
2164
"""Any file should accept a stream of fulltexts."""
2165
files = self.get_versionedfiles()
2166
mapper = self.get_mapper()
2167
source_transport = self.get_transport('source')
2168
source_transport.mkdir('.')
2169
# weaves always output fulltexts.
2170
source = make_versioned_files_factory(WeaveFile, mapper)(
2172
self.get_diamond_files(source, trailing_eol=False)
2173
stream = source.get_record_stream(source.keys(), 'topological',
2175
files.insert_record_stream(stream)
2176
self.assertIdenticalVersionedFile(source, files)
2178
def test_insert_record_stream_annotated_knits(self):
2179
"""Any file should accept a stream from plain knits."""
2180
files = self.get_versionedfiles()
2181
mapper = self.get_mapper()
2182
source_transport = self.get_transport('source')
2183
source_transport.mkdir('.')
2184
source = make_file_factory(True, mapper)(source_transport)
2185
self.get_diamond_files(source)
2186
stream = source.get_record_stream(source.keys(), 'topological',
2188
files.insert_record_stream(stream)
2189
self.assertIdenticalVersionedFile(source, files)
2191
def test_insert_record_stream_annotated_knits_noeol(self):
2192
"""Any file should accept a stream from plain knits."""
2193
files = self.get_versionedfiles()
2194
mapper = self.get_mapper()
2195
source_transport = self.get_transport('source')
2196
source_transport.mkdir('.')
2197
source = make_file_factory(True, mapper)(source_transport)
2198
self.get_diamond_files(source, trailing_eol=False)
2199
stream = source.get_record_stream(source.keys(), 'topological',
2201
files.insert_record_stream(stream)
2202
self.assertIdenticalVersionedFile(source, files)
2204
def test_insert_record_stream_plain_knits(self):
2205
"""Any file should accept a stream from plain knits."""
2206
files = self.get_versionedfiles()
2207
mapper = self.get_mapper()
2208
source_transport = self.get_transport('source')
2209
source_transport.mkdir('.')
2210
source = make_file_factory(False, mapper)(source_transport)
2211
self.get_diamond_files(source)
2212
stream = source.get_record_stream(source.keys(), 'topological',
2214
files.insert_record_stream(stream)
2215
self.assertIdenticalVersionedFile(source, files)
2217
def test_insert_record_stream_plain_knits_noeol(self):
2218
"""Any file should accept a stream from plain knits."""
2219
files = self.get_versionedfiles()
2220
mapper = self.get_mapper()
2221
source_transport = self.get_transport('source')
2222
source_transport.mkdir('.')
2223
source = make_file_factory(False, mapper)(source_transport)
2224
self.get_diamond_files(source, trailing_eol=False)
2225
stream = source.get_record_stream(source.keys(), 'topological',
2227
files.insert_record_stream(stream)
2228
self.assertIdenticalVersionedFile(source, files)
2230
def test_insert_record_stream_existing_keys(self):
2231
"""Inserting keys already in a file should not error."""
2232
files = self.get_versionedfiles()
2233
source = self.get_versionedfiles('source')
2234
self.get_diamond_files(source)
2235
# insert some keys into f.
2236
self.get_diamond_files(files, left_only=True)
2237
stream = source.get_record_stream(source.keys(), 'topological',
2239
files.insert_record_stream(stream)
2240
self.assertIdenticalVersionedFile(source, files)
2242
def test_insert_record_stream_missing_keys(self):
2243
"""Inserting a stream with absent keys should raise an error."""
2244
files = self.get_versionedfiles()
2245
source = self.get_versionedfiles('source')
2246
stream = source.get_record_stream([('missing',) * self.key_length],
2247
'topological', False)
2248
self.assertRaises(errors.RevisionNotPresent, files.insert_record_stream,
2251
def test_insert_record_stream_out_of_order(self):
2252
"""An out of order stream can either error or work."""
2253
files = self.get_versionedfiles()
2254
source = self.get_versionedfiles('source')
2255
self.get_diamond_files(source)
2256
if self.key_length == 1:
2257
origin_keys = [('origin',)]
2258
end_keys = [('merged',), ('left',)]
2259
start_keys = [('right',), ('base',)]
2261
origin_keys = [('FileA', 'origin'), ('FileB', 'origin')]
2262
end_keys = [('FileA', 'merged',), ('FileA', 'left',),
2263
('FileB', 'merged',), ('FileB', 'left',)]
2264
start_keys = [('FileA', 'right',), ('FileA', 'base',),
2265
('FileB', 'right',), ('FileB', 'base',)]
2266
origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
2267
end_entries = source.get_record_stream(end_keys, 'topological', False)
2268
start_entries = source.get_record_stream(start_keys, 'topological', False)
2269
entries = chain(origin_entries, end_entries, start_entries)
2271
files.insert_record_stream(entries)
2272
except RevisionNotPresent:
2273
# Must not have corrupted the file.
2276
self.assertIdenticalVersionedFile(source, files)
2278
def get_knit_delta_source(self):
2279
"""Get a source that can produce a stream with knit delta records,
2280
regardless of this test's scenario.
2282
mapper = self.get_mapper()
2283
source_transport = self.get_transport('source')
2284
source_transport.mkdir('.')
2285
source = make_file_factory(False, mapper)(source_transport)
2286
get_diamond_files(source, self.key_length, trailing_eol=True,
2287
nograph=False, left_only=False)
2290
def test_insert_record_stream_delta_missing_basis_no_corruption(self):
2291
"""Insertion where a needed basis is not included notifies the caller
2292
of the missing basis. In the meantime a record missing its basis is
2295
source = self.get_knit_delta_source()
2296
keys = [self.get_simple_key('origin'), self.get_simple_key('merged')]
2297
entries = source.get_record_stream(keys, 'unordered', False)
2298
files = self.get_versionedfiles()
2299
if self.support_partial_insertion:
2300
self.assertEqual([],
2301
list(files.get_missing_compression_parent_keys()))
2302
files.insert_record_stream(entries)
2303
missing_bases = files.get_missing_compression_parent_keys()
2304
self.assertEqual(set([self.get_simple_key('left')]),
2306
self.assertEqual(set(keys), set(files.get_parent_map(keys)))
2309
errors.RevisionNotPresent, files.insert_record_stream, entries)
2312
def test_insert_record_stream_delta_missing_basis_can_be_added_later(self):
2313
"""Insertion where a needed basis is not included notifies the caller
2314
of the missing basis. That basis can be added in a second
2315
insert_record_stream call that does not need to repeat records present
2316
in the previous stream. The record(s) that required that basis are
2317
fully inserted once their basis is no longer missing.
2319
if not self.support_partial_insertion:
2320
raise TestNotApplicable(
2321
'versioned file scenario does not support partial insertion')
2322
source = self.get_knit_delta_source()
2323
entries = source.get_record_stream([self.get_simple_key('origin'),
2324
self.get_simple_key('merged')], 'unordered', False)
2325
files = self.get_versionedfiles()
2326
files.insert_record_stream(entries)
2327
missing_bases = files.get_missing_compression_parent_keys()
2328
self.assertEqual(set([self.get_simple_key('left')]),
2330
# 'merged' is inserted (although a commit of a write group involving
2331
# this versionedfiles would fail).
2332
merged_key = self.get_simple_key('merged')
2334
[merged_key], files.get_parent_map([merged_key]).keys())
2335
# Add the full delta closure of the missing records
2336
missing_entries = source.get_record_stream(
2337
missing_bases, 'unordered', True)
2338
files.insert_record_stream(missing_entries)
2339
# Now 'merged' is fully inserted (and a commit would succeed).
2340
self.assertEqual([], list(files.get_missing_compression_parent_keys()))
2342
[merged_key], files.get_parent_map([merged_key]).keys())
2345
def test_iter_lines_added_or_present_in_keys(self):
2346
# test that we get at least an equalset of the lines added by
2347
# versions in the store.
2348
# the ordering here is to make a tree so that dumb searches have
2349
# more changes to muck up.
2351
class InstrumentedProgress(progress.DummyProgress):
2355
progress.DummyProgress.__init__(self)
2358
def update(self, msg=None, current=None, total=None):
2359
self.updates.append((msg, current, total))
2361
files = self.get_versionedfiles()
2362
# add a base to get included
2363
files.add_lines(self.get_simple_key('base'), (), ['base\n'])
2364
# add a ancestor to be included on one side
2365
files.add_lines(self.get_simple_key('lancestor'), (), ['lancestor\n'])
2366
# add a ancestor to be included on the other side
2367
files.add_lines(self.get_simple_key('rancestor'),
2368
self.get_parents([self.get_simple_key('base')]), ['rancestor\n'])
2369
# add a child of rancestor with no eofile-nl
2370
files.add_lines(self.get_simple_key('child'),
2371
self.get_parents([self.get_simple_key('rancestor')]),
2372
['base\n', 'child\n'])
2373
# add a child of lancestor and base to join the two roots
2374
files.add_lines(self.get_simple_key('otherchild'),
2375
self.get_parents([self.get_simple_key('lancestor'),
2376
self.get_simple_key('base')]),
2377
['base\n', 'lancestor\n', 'otherchild\n'])
2378
def iter_with_keys(keys, expected):
2379
# now we need to see what lines are returned, and how often.
2381
progress = InstrumentedProgress()
2382
# iterate over the lines
2383
for line in files.iter_lines_added_or_present_in_keys(keys,
2385
lines.setdefault(line, 0)
2387
if []!= progress.updates:
2388
self.assertEqual(expected, progress.updates)
2390
lines = iter_with_keys(
2391
[self.get_simple_key('child'), self.get_simple_key('otherchild')],
2392
[('Walking content', 0, 2),
2393
('Walking content', 1, 2),
2394
('Walking content', 2, 2)])
2395
# we must see child and otherchild
2396
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2398
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2399
# we dont care if we got more than that.
2402
lines = iter_with_keys(files.keys(),
2403
[('Walking content', 0, 5),
2404
('Walking content', 1, 5),
2405
('Walking content', 2, 5),
2406
('Walking content', 3, 5),
2407
('Walking content', 4, 5),
2408
('Walking content', 5, 5)])
2409
# all lines must be seen at least once
2410
self.assertTrue(lines[('base\n', self.get_simple_key('base'))] > 0)
2412
lines[('lancestor\n', self.get_simple_key('lancestor'))] > 0)
2414
lines[('rancestor\n', self.get_simple_key('rancestor'))] > 0)
2415
self.assertTrue(lines[('child\n', self.get_simple_key('child'))] > 0)
2417
lines[('otherchild\n', self.get_simple_key('otherchild'))] > 0)
2419
def test_make_mpdiffs(self):
2420
from bzrlib import multiparent
2421
files = self.get_versionedfiles('source')
2422
# add texts that should trip the knit maximum delta chain threshold
2423
# as well as doing parallel chains of data in knits.
2424
# this is done by two chains of 25 insertions
2425
files.add_lines(self.get_simple_key('base'), [], ['line\n'])
2426
files.add_lines(self.get_simple_key('noeol'),
2427
self.get_parents([self.get_simple_key('base')]), ['line'])
2428
# detailed eol tests:
2429
# shared last line with parent no-eol
2430
files.add_lines(self.get_simple_key('noeolsecond'),
2431
self.get_parents([self.get_simple_key('noeol')]),
2433
# differing last line with parent, both no-eol
2434
files.add_lines(self.get_simple_key('noeolnotshared'),
2435
self.get_parents([self.get_simple_key('noeolsecond')]),
2436
['line\n', 'phone'])
2437
# add eol following a noneol parent, change content
2438
files.add_lines(self.get_simple_key('eol'),
2439
self.get_parents([self.get_simple_key('noeol')]), ['phone\n'])
2440
# add eol following a noneol parent, no change content
2441
files.add_lines(self.get_simple_key('eolline'),
2442
self.get_parents([self.get_simple_key('noeol')]), ['line\n'])
2443
# noeol with no parents:
2444
files.add_lines(self.get_simple_key('noeolbase'), [], ['line'])
2445
# noeol preceeding its leftmost parent in the output:
2446
# this is done by making it a merge of two parents with no common
2447
# anestry: noeolbase and noeol with the
2448
# later-inserted parent the leftmost.
2449
files.add_lines(self.get_simple_key('eolbeforefirstparent'),
2450
self.get_parents([self.get_simple_key('noeolbase'),
2451
self.get_simple_key('noeol')]),
2453
# two identical eol texts
2454
files.add_lines(self.get_simple_key('noeoldup'),
2455
self.get_parents([self.get_simple_key('noeol')]), ['line'])
2456
next_parent = self.get_simple_key('base')
2457
text_name = 'chain1-'
2459
sha1s = {0 :'da6d3141cb4a5e6f464bf6e0518042ddc7bfd079',
2460
1 :'45e21ea146a81ea44a821737acdb4f9791c8abe7',
2461
2 :'e1f11570edf3e2a070052366c582837a4fe4e9fa',
2462
3 :'26b4b8626da827088c514b8f9bbe4ebf181edda1',
2463
4 :'e28a5510be25ba84d31121cff00956f9970ae6f6',
2464
5 :'d63ec0ce22e11dcf65a931b69255d3ac747a318d',
2465
6 :'2c2888d288cb5e1d98009d822fedfe6019c6a4ea',
2466
7 :'95c14da9cafbf828e3e74a6f016d87926ba234ab',
2467
8 :'779e9a0b28f9f832528d4b21e17e168c67697272',
2468
9 :'1f8ff4e5c6ff78ac106fcfe6b1e8cb8740ff9a8f',
2469
10:'131a2ae712cf51ed62f143e3fbac3d4206c25a05',
2470
11:'c5a9d6f520d2515e1ec401a8f8a67e6c3c89f199',
2471
12:'31a2286267f24d8bedaa43355f8ad7129509ea85',
2472
13:'dc2a7fe80e8ec5cae920973973a8ee28b2da5e0a',
2473
14:'2c4b1736566b8ca6051e668de68650686a3922f2',
2474
15:'5912e4ecd9b0c07be4d013e7e2bdcf9323276cde',
2475
16:'b0d2e18d3559a00580f6b49804c23fea500feab3',
2476
17:'8e1d43ad72f7562d7cb8f57ee584e20eb1a69fc7',
2477
18:'5cf64a3459ae28efa60239e44b20312d25b253f3',
2478
19:'1ebed371807ba5935958ad0884595126e8c4e823',
2479
20:'2aa62a8b06fb3b3b892a3292a068ade69d5ee0d3',
2480
21:'01edc447978004f6e4e962b417a4ae1955b6fe5d',
2481
22:'d8d8dc49c4bf0bab401e0298bb5ad827768618bb',
2482
23:'c21f62b1c482862983a8ffb2b0c64b3451876e3f',
2483
24:'c0593fe795e00dff6b3c0fe857a074364d5f04fc',
2484
25:'dd1a1cf2ba9cc225c3aff729953e6364bf1d1855',
2486
for depth in range(26):
2487
new_version = self.get_simple_key(text_name + '%s' % depth)
2488
text = text + ['line\n']
2489
files.add_lines(new_version, self.get_parents([next_parent]), text)
2490
next_parent = new_version
2491
next_parent = self.get_simple_key('base')
2492
text_name = 'chain2-'
2494
for depth in range(26):
2495
new_version = self.get_simple_key(text_name + '%s' % depth)
2496
text = text + ['line\n']
2497
files.add_lines(new_version, self.get_parents([next_parent]), text)
2498
next_parent = new_version
2499
target = self.get_versionedfiles('target')
2500
for key in multiparent.topo_iter_keys(files, files.keys()):
2501
mpdiff = files.make_mpdiffs([key])[0]
2502
parents = files.get_parent_map([key])[key] or []
2504
[(key, parents, files.get_sha1s([key])[key], mpdiff)])
2505
self.assertEqualDiff(
2506
files.get_record_stream([key], 'unordered',
2507
True).next().get_bytes_as('fulltext'),
2508
target.get_record_stream([key], 'unordered',
2509
True).next().get_bytes_as('fulltext')
2512
def test_keys(self):
2513
# While use is discouraged, versions() is still needed by aspects of
2515
files = self.get_versionedfiles()
2516
self.assertEqual(set(), set(files.keys()))
2517
if self.key_length == 1:
2520
key = ('foo', 'bar',)
2521
files.add_lines(key, (), [])
2522
self.assertEqual(set([key]), set(files.keys()))
2525
class VirtualVersionedFilesTests(TestCase):
2526
"""Basic tests for the VirtualVersionedFiles implementations."""
2528
def _get_parent_map(self, keys):
2531
if k in self._parent_map:
2532
ret[k] = self._parent_map[k]
2536
TestCase.setUp(self)
2538
self._parent_map = {}
2539
self.texts = VirtualVersionedFiles(self._get_parent_map,
2542
def test_add_lines(self):
2543
self.assertRaises(NotImplementedError,
2544
self.texts.add_lines, "foo", [], [])
2546
def test_add_mpdiffs(self):
2547
self.assertRaises(NotImplementedError,
2548
self.texts.add_mpdiffs, [])
2550
def test_check(self):
2551
self.assertTrue(self.texts.check())
2553
def test_insert_record_stream(self):
2554
self.assertRaises(NotImplementedError, self.texts.insert_record_stream,
2557
def test_get_sha1s_nonexistent(self):
2558
self.assertEquals({}, self.texts.get_sha1s([("NONEXISTENT",)]))
2560
def test_get_sha1s(self):
2561
self._lines["key"] = ["dataline1", "dataline2"]
2562
self.assertEquals({("key",): osutils.sha_strings(self._lines["key"])},
2563
self.texts.get_sha1s([("key",)]))
2565
def test_get_parent_map(self):
2566
self._parent_map = {"G": ("A", "B")}
2567
self.assertEquals({("G",): (("A",),("B",))},
2568
self.texts.get_parent_map([("G",), ("L",)]))
2570
def test_get_record_stream(self):
2571
self._lines["A"] = ["FOO", "BAR"]
2572
it = self.texts.get_record_stream([("A",)], "unordered", True)
2574
self.assertEquals("chunked", record.storage_kind)
2575
self.assertEquals("FOOBAR", record.get_bytes_as("fulltext"))
2576
self.assertEquals(["FOO", "BAR"], record.get_bytes_as("chunked"))
2578
def test_get_record_stream_absent(self):
2579
it = self.texts.get_record_stream([("A",)], "unordered", True)
2581
self.assertEquals("absent", record.storage_kind)
2583
def test_iter_lines_added_or_present_in_keys(self):
2584
self._lines["A"] = ["FOO", "BAR"]
2585
self._lines["B"] = ["HEY"]
2586
self._lines["C"] = ["Alberta"]
2587
it = self.texts.iter_lines_added_or_present_in_keys([("A",), ("B",)])
2588
self.assertEquals(sorted([("FOO", "A"), ("BAR", "A"), ("HEY", "B")]),
2592
class TestOrderingVersionedFilesDecorator(TestCaseWithMemoryTransport):
2594
def get_ordering_vf(self, key_priority):
2595
builder = self.make_branch_builder('test')
2596
builder.start_series()
2597
builder.build_snapshot('A', None, [
2598
('add', ('', 'TREE_ROOT', 'directory', None))])
2599
builder.build_snapshot('B', ['A'], [])
2600
builder.build_snapshot('C', ['B'], [])
2601
builder.build_snapshot('D', ['C'], [])
2602
builder.finish_series()
2603
b = builder.get_branch()
2605
self.addCleanup(b.unlock)
2606
vf = b.repository.inventories
2607
return versionedfile.OrderingVersionedFilesDecorator(vf, key_priority)
2609
def test_get_empty(self):
2610
vf = self.get_ordering_vf({})
2611
self.assertEqual([], vf.calls)
2613
def test_get_record_stream_topological(self):
2614
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2615
request_keys = [('B',), ('C',), ('D',), ('A',)]
2616
keys = [r.key for r in vf.get_record_stream(request_keys,
2617
'topological', False)]
2618
# We should have gotten the keys in topological order
2619
self.assertEqual([('A',), ('B',), ('C',), ('D',)], keys)
2620
# And recorded that the request was made
2621
self.assertEqual([('get_record_stream', request_keys, 'topological',
2624
def test_get_record_stream_ordered(self):
2625
vf = self.get_ordering_vf({('A',): 3, ('B',): 2, ('C',): 4, ('D',): 1})
2626
request_keys = [('B',), ('C',), ('D',), ('A',)]
2627
keys = [r.key for r in vf.get_record_stream(request_keys,
2628
'unordered', False)]
2629
# They should be returned based on their priority
2630
self.assertEqual([('D',), ('B',), ('A',), ('C',)], keys)
2631
# And the request recorded
2632
self.assertEqual([('get_record_stream', request_keys, 'unordered',
2635
def test_get_record_stream_implicit_order(self):
2636
vf = self.get_ordering_vf({('B',): 2, ('D',): 1})
2637
request_keys = [('B',), ('C',), ('D',), ('A',)]
2638
keys = [r.key for r in vf.get_record_stream(request_keys,
2639
'unordered', False)]
2640
# A and C are not in the map, so they get sorted to the front. A comes
2641
# before C alphabetically, so it comes back first
2642
self.assertEqual([('A',), ('C',), ('D',), ('B',)], keys)
2643
# And the request recorded
2644
self.assertEqual([('get_record_stream', request_keys, 'unordered',