1
# Copyright (C) 2005, 2006, 2007 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
17
"""Tests for Knit data structure"""
19
from cStringIO import StringIO
32
from bzrlib.errors import (
33
RevisionAlreadyPresent,
38
from bzrlib.index import *
39
from bzrlib.knit import (
51
from bzrlib.osutils import split_lines
52
from bzrlib.symbol_versioning import one_four
53
from bzrlib.tests import (
57
TestCaseWithMemoryTransport,
58
TestCaseWithTransport,
60
from bzrlib.transport import get_transport
61
from bzrlib.transport.memory import MemoryTransport
62
from bzrlib.tuned_gzip import GzipFile
63
from bzrlib.versionedfile import (
66
RecordingVersionedFilesDecorator,
70
class _CompiledKnitFeature(Feature):
74
import bzrlib._knit_load_data_c
79
def feature_name(self):
80
return 'bzrlib._knit_load_data_c'
82
CompiledKnitFeature = _CompiledKnitFeature()
85
class KnitContentTestsMixin(object):
87
def test_constructor(self):
88
content = self._make_content([])
91
content = self._make_content([])
92
self.assertEqual(content.text(), [])
94
content = self._make_content([("origin1", "text1"), ("origin2", "text2")])
95
self.assertEqual(content.text(), ["text1", "text2"])
98
content = self._make_content([("origin1", "text1"), ("origin2", "text2")])
100
self.assertIsInstance(copy, content.__class__)
101
self.assertEqual(copy.annotate(), content.annotate())
103
def assertDerivedBlocksEqual(self, source, target, noeol=False):
104
"""Assert that the derived matching blocks match real output"""
105
source_lines = source.splitlines(True)
106
target_lines = target.splitlines(True)
108
if noeol and not line.endswith('\n'):
112
source_content = self._make_content([(None, nl(l)) for l in source_lines])
113
target_content = self._make_content([(None, nl(l)) for l in target_lines])
114
line_delta = source_content.line_delta(target_content)
115
delta_blocks = list(KnitContent.get_line_delta_blocks(line_delta,
116
source_lines, target_lines))
117
matcher = KnitSequenceMatcher(None, source_lines, target_lines)
118
matcher_blocks = list(list(matcher.get_matching_blocks()))
119
self.assertEqual(matcher_blocks, delta_blocks)
121
def test_get_line_delta_blocks(self):
122
self.assertDerivedBlocksEqual('a\nb\nc\n', 'q\nc\n')
123
self.assertDerivedBlocksEqual(TEXT_1, TEXT_1)
124
self.assertDerivedBlocksEqual(TEXT_1, TEXT_1A)
125
self.assertDerivedBlocksEqual(TEXT_1, TEXT_1B)
126
self.assertDerivedBlocksEqual(TEXT_1B, TEXT_1A)
127
self.assertDerivedBlocksEqual(TEXT_1A, TEXT_1B)
128
self.assertDerivedBlocksEqual(TEXT_1A, '')
129
self.assertDerivedBlocksEqual('', TEXT_1A)
130
self.assertDerivedBlocksEqual('', '')
131
self.assertDerivedBlocksEqual('a\nb\nc', 'a\nb\nc\nd')
133
def test_get_line_delta_blocks_noeol(self):
134
"""Handle historical knit deltas safely
136
Some existing knit deltas don't consider the last line to differ
137
when the only difference whether it has a final newline.
139
New knit deltas appear to always consider the last line to differ
142
self.assertDerivedBlocksEqual('a\nb\nc', 'a\nb\nc\nd\n', noeol=True)
143
self.assertDerivedBlocksEqual('a\nb\nc\nd\n', 'a\nb\nc', noeol=True)
144
self.assertDerivedBlocksEqual('a\nb\nc\n', 'a\nb\nc', noeol=True)
145
self.assertDerivedBlocksEqual('a\nb\nc', 'a\nb\nc\n', noeol=True)
157
Banana cup cake recipe
167
Banana cup cake recipe
169
- bananas (do not use plantains!!!)
176
Banana cup cake recipe
193
class TestPlainKnitContent(TestCase, KnitContentTestsMixin):
195
def _make_content(self, lines):
196
annotated_content = AnnotatedKnitContent(lines)
197
return PlainKnitContent(annotated_content.text(), 'bogus')
199
def test_annotate(self):
200
content = self._make_content([])
201
self.assertEqual(content.annotate(), [])
203
content = self._make_content([("origin1", "text1"), ("origin2", "text2")])
204
self.assertEqual(content.annotate(),
205
[("bogus", "text1"), ("bogus", "text2")])
207
def test_line_delta(self):
208
content1 = self._make_content([("", "a"), ("", "b")])
209
content2 = self._make_content([("", "a"), ("", "a"), ("", "c")])
210
self.assertEqual(content1.line_delta(content2),
211
[(1, 2, 2, ["a", "c"])])
213
def test_line_delta_iter(self):
214
content1 = self._make_content([("", "a"), ("", "b")])
215
content2 = self._make_content([("", "a"), ("", "a"), ("", "c")])
216
it = content1.line_delta_iter(content2)
217
self.assertEqual(it.next(), (1, 2, 2, ["a", "c"]))
218
self.assertRaises(StopIteration, it.next)
221
class TestAnnotatedKnitContent(TestCase, KnitContentTestsMixin):
223
def _make_content(self, lines):
224
return AnnotatedKnitContent(lines)
226
def test_annotate(self):
227
content = self._make_content([])
228
self.assertEqual(content.annotate(), [])
230
content = self._make_content([("origin1", "text1"), ("origin2", "text2")])
231
self.assertEqual(content.annotate(),
232
[("origin1", "text1"), ("origin2", "text2")])
234
def test_line_delta(self):
235
content1 = self._make_content([("", "a"), ("", "b")])
236
content2 = self._make_content([("", "a"), ("", "a"), ("", "c")])
237
self.assertEqual(content1.line_delta(content2),
238
[(1, 2, 2, [("", "a"), ("", "c")])])
240
def test_line_delta_iter(self):
241
content1 = self._make_content([("", "a"), ("", "b")])
242
content2 = self._make_content([("", "a"), ("", "a"), ("", "c")])
243
it = content1.line_delta_iter(content2)
244
self.assertEqual(it.next(), (1, 2, 2, [("", "a"), ("", "c")]))
245
self.assertRaises(StopIteration, it.next)
248
class MockTransport(object):
250
def __init__(self, file_lines=None):
251
self.file_lines = file_lines
253
# We have no base directory for the MockTransport
256
def get(self, filename):
257
if self.file_lines is None:
258
raise NoSuchFile(filename)
260
return StringIO("\n".join(self.file_lines))
262
def readv(self, relpath, offsets):
263
fp = self.get(relpath)
264
for offset, size in offsets:
266
yield offset, fp.read(size)
268
def __getattr__(self, name):
269
def queue_call(*args, **kwargs):
270
self.calls.append((name, args, kwargs))
274
class KnitRecordAccessTestsMixin(object):
275
"""Tests for getting and putting knit records."""
277
def test_add_raw_records(self):
278
"""Add_raw_records adds records retrievable later."""
279
access = self.get_access()
280
memos = access.add_raw_records([('key', 10)], '1234567890')
281
self.assertEqual(['1234567890'], list(access.get_raw_records(memos)))
283
def test_add_several_raw_records(self):
284
"""add_raw_records with many records and read some back."""
285
access = self.get_access()
286
memos = access.add_raw_records([('key', 10), ('key2', 2), ('key3', 5)],
288
self.assertEqual(['1234567890', '12', '34567'],
289
list(access.get_raw_records(memos)))
290
self.assertEqual(['1234567890'],
291
list(access.get_raw_records(memos[0:1])))
292
self.assertEqual(['12'],
293
list(access.get_raw_records(memos[1:2])))
294
self.assertEqual(['34567'],
295
list(access.get_raw_records(memos[2:3])))
296
self.assertEqual(['1234567890', '34567'],
297
list(access.get_raw_records(memos[0:1] + memos[2:3])))
300
class TestKnitKnitAccess(TestCaseWithMemoryTransport, KnitRecordAccessTestsMixin):
301
"""Tests for the .kndx implementation."""
303
def get_access(self):
304
"""Get a .knit style access instance."""
305
mapper = ConstantMapper("foo")
306
access = _KnitKeyAccess(self.get_transport(), mapper)
310
class TestPackKnitAccess(TestCaseWithMemoryTransport, KnitRecordAccessTestsMixin):
311
"""Tests for the pack based access."""
313
def get_access(self):
314
return self._get_access()[0]
316
def _get_access(self, packname='packfile', index='FOO'):
317
transport = self.get_transport()
318
def write_data(bytes):
319
transport.append_bytes(packname, bytes)
320
writer = pack.ContainerWriter(write_data)
322
access = _DirectPackAccess({})
323
access.set_writer(writer, index, (transport, packname))
324
return access, writer
326
def test_read_from_several_packs(self):
327
access, writer = self._get_access()
329
memos.extend(access.add_raw_records([('key', 10)], '1234567890'))
331
access, writer = self._get_access('pack2', 'FOOBAR')
332
memos.extend(access.add_raw_records([('key', 5)], '12345'))
334
access, writer = self._get_access('pack3', 'BAZ')
335
memos.extend(access.add_raw_records([('key', 5)], 'alpha'))
337
transport = self.get_transport()
338
access = _DirectPackAccess({"FOO":(transport, 'packfile'),
339
"FOOBAR":(transport, 'pack2'),
340
"BAZ":(transport, 'pack3')})
341
self.assertEqual(['1234567890', '12345', 'alpha'],
342
list(access.get_raw_records(memos)))
343
self.assertEqual(['1234567890'],
344
list(access.get_raw_records(memos[0:1])))
345
self.assertEqual(['12345'],
346
list(access.get_raw_records(memos[1:2])))
347
self.assertEqual(['alpha'],
348
list(access.get_raw_records(memos[2:3])))
349
self.assertEqual(['1234567890', 'alpha'],
350
list(access.get_raw_records(memos[0:1] + memos[2:3])))
352
def test_set_writer(self):
353
"""The writer should be settable post construction."""
354
access = _DirectPackAccess({})
355
transport = self.get_transport()
356
packname = 'packfile'
358
def write_data(bytes):
359
transport.append_bytes(packname, bytes)
360
writer = pack.ContainerWriter(write_data)
362
access.set_writer(writer, index, (transport, packname))
363
memos = access.add_raw_records([('key', 10)], '1234567890')
365
self.assertEqual(['1234567890'], list(access.get_raw_records(memos)))
368
class LowLevelKnitDataTests(TestCase):
370
def create_gz_content(self, text):
372
gz_file = gzip.GzipFile(mode='wb', fileobj=sio)
375
return sio.getvalue()
377
def test_valid_knit_data(self):
378
sha1sum = sha.new('foo\nbar\n').hexdigest()
379
gz_txt = self.create_gz_content('version rev-id-1 2 %s\n'
384
transport = MockTransport([gz_txt])
385
access = _KnitKeyAccess(transport, ConstantMapper('filename'))
386
knit = KnitVersionedFiles(None, access)
387
records = [(('rev-id-1',), (('rev-id-1',), 0, len(gz_txt)))]
389
contents = list(knit._read_records_iter(records))
390
self.assertEqual([(('rev-id-1',), ['foo\n', 'bar\n'],
391
'4e48e2c9a3d2ca8a708cb0cc545700544efb5021')], contents)
393
raw_contents = list(knit._read_records_iter_raw(records))
394
self.assertEqual([(('rev-id-1',), gz_txt, sha1sum)], raw_contents)
396
def test_not_enough_lines(self):
397
sha1sum = sha.new('foo\n').hexdigest()
398
# record says 2 lines data says 1
399
gz_txt = self.create_gz_content('version rev-id-1 2 %s\n'
403
transport = MockTransport([gz_txt])
404
access = _KnitKeyAccess(transport, ConstantMapper('filename'))
405
knit = KnitVersionedFiles(None, access)
406
records = [(('rev-id-1',), (('rev-id-1',), 0, len(gz_txt)))]
407
self.assertRaises(errors.KnitCorrupt, list,
408
knit._read_records_iter(records))
410
# read_records_iter_raw won't detect that sort of mismatch/corruption
411
raw_contents = list(knit._read_records_iter_raw(records))
412
self.assertEqual([(('rev-id-1',), gz_txt, sha1sum)], raw_contents)
414
def test_too_many_lines(self):
415
sha1sum = sha.new('foo\nbar\n').hexdigest()
416
# record says 1 lines data says 2
417
gz_txt = self.create_gz_content('version rev-id-1 1 %s\n'
422
transport = MockTransport([gz_txt])
423
access = _KnitKeyAccess(transport, ConstantMapper('filename'))
424
knit = KnitVersionedFiles(None, access)
425
records = [(('rev-id-1',), (('rev-id-1',), 0, len(gz_txt)))]
426
self.assertRaises(errors.KnitCorrupt, list,
427
knit._read_records_iter(records))
429
# read_records_iter_raw won't detect that sort of mismatch/corruption
430
raw_contents = list(knit._read_records_iter_raw(records))
431
self.assertEqual([(('rev-id-1',), gz_txt, sha1sum)], raw_contents)
433
def test_mismatched_version_id(self):
434
sha1sum = sha.new('foo\nbar\n').hexdigest()
435
gz_txt = self.create_gz_content('version rev-id-1 2 %s\n'
440
transport = MockTransport([gz_txt])
441
access = _KnitKeyAccess(transport, ConstantMapper('filename'))
442
knit = KnitVersionedFiles(None, access)
443
# We are asking for rev-id-2, but the data is rev-id-1
444
records = [(('rev-id-2',), (('rev-id-2',), 0, len(gz_txt)))]
445
self.assertRaises(errors.KnitCorrupt, list,
446
knit._read_records_iter(records))
448
# read_records_iter_raw detects mismatches in the header
449
self.assertRaises(errors.KnitCorrupt, list,
450
knit._read_records_iter_raw(records))
452
def test_uncompressed_data(self):
453
sha1sum = sha.new('foo\nbar\n').hexdigest()
454
txt = ('version rev-id-1 2 %s\n'
459
transport = MockTransport([txt])
460
access = _KnitKeyAccess(transport, ConstantMapper('filename'))
461
knit = KnitVersionedFiles(None, access)
462
records = [(('rev-id-1',), (('rev-id-1',), 0, len(txt)))]
464
# We don't have valid gzip data ==> corrupt
465
self.assertRaises(errors.KnitCorrupt, list,
466
knit._read_records_iter(records))
468
# read_records_iter_raw will notice the bad data
469
self.assertRaises(errors.KnitCorrupt, list,
470
knit._read_records_iter_raw(records))
472
def test_corrupted_data(self):
473
sha1sum = sha.new('foo\nbar\n').hexdigest()
474
gz_txt = self.create_gz_content('version rev-id-1 2 %s\n'
479
# Change 2 bytes in the middle to \xff
480
gz_txt = gz_txt[:10] + '\xff\xff' + gz_txt[12:]
481
transport = MockTransport([gz_txt])
482
access = _KnitKeyAccess(transport, ConstantMapper('filename'))
483
knit = KnitVersionedFiles(None, access)
484
records = [(('rev-id-1',), (('rev-id-1',), 0, len(gz_txt)))]
485
self.assertRaises(errors.KnitCorrupt, list,
486
knit._read_records_iter(records))
487
# read_records_iter_raw will barf on bad gz data
488
self.assertRaises(errors.KnitCorrupt, list,
489
knit._read_records_iter_raw(records))
492
class LowLevelKnitIndexTests(TestCase):
494
def get_knit_index(self, transport, name, mode):
495
mapper = ConstantMapper(name)
496
orig = knit._load_data
498
knit._load_data = orig
499
self.addCleanup(reset)
500
from bzrlib._knit_load_data_py import _load_data_py
501
knit._load_data = _load_data_py
502
allow_writes = lambda: 'w' in mode
503
return _KndxIndex(transport, mapper, lambda:None, allow_writes, lambda:True)
505
def test_create_file(self):
506
transport = MockTransport()
507
index = self.get_knit_index(transport, "filename", "w")
509
call = transport.calls.pop(0)
510
# call[1][1] is a StringIO - we can't test it by simple equality.
511
self.assertEqual('put_file_non_atomic', call[0])
512
self.assertEqual('filename.kndx', call[1][0])
513
# With no history, _KndxIndex writes a new index:
514
self.assertEqual(_KndxIndex.HEADER,
515
call[1][1].getvalue())
516
self.assertEqual({'create_parent_dir': True}, call[2])
518
def test_read_utf8_version_id(self):
519
unicode_revision_id = u"version-\N{CYRILLIC CAPITAL LETTER A}"
520
utf8_revision_id = unicode_revision_id.encode('utf-8')
521
transport = MockTransport([
523
'%s option 0 1 :' % (utf8_revision_id,)
525
index = self.get_knit_index(transport, "filename", "r")
526
# _KndxIndex is a private class, and deals in utf8 revision_ids, not
527
# Unicode revision_ids.
528
self.assertEqual({(utf8_revision_id,):()},
529
index.get_parent_map(index.keys()))
530
self.assertFalse((unicode_revision_id,) in index.keys())
532
def test_read_utf8_parents(self):
533
unicode_revision_id = u"version-\N{CYRILLIC CAPITAL LETTER A}"
534
utf8_revision_id = unicode_revision_id.encode('utf-8')
535
transport = MockTransport([
537
"version option 0 1 .%s :" % (utf8_revision_id,)
539
index = self.get_knit_index(transport, "filename", "r")
540
self.assertEqual({("version",):((utf8_revision_id,),)},
541
index.get_parent_map(index.keys()))
543
def test_read_ignore_corrupted_lines(self):
544
transport = MockTransport([
547
"corrupted options 0 1 .b .c ",
548
"version options 0 1 :"
550
index = self.get_knit_index(transport, "filename", "r")
551
self.assertEqual(1, len(index.keys()))
552
self.assertEqual(set([("version",)]), index.keys())
554
def test_read_corrupted_header(self):
555
transport = MockTransport(['not a bzr knit index header\n'])
556
index = self.get_knit_index(transport, "filename", "r")
557
self.assertRaises(KnitHeaderError, index.keys)
559
def test_read_duplicate_entries(self):
560
transport = MockTransport([
562
"parent options 0 1 :",
563
"version options1 0 1 0 :",
564
"version options2 1 2 .other :",
565
"version options3 3 4 0 .other :"
567
index = self.get_knit_index(transport, "filename", "r")
568
self.assertEqual(2, len(index.keys()))
569
# check that the index used is the first one written. (Specific
570
# to KnitIndex style indices.
571
self.assertEqual("1", index._dictionary_compress([("version",)]))
572
self.assertEqual((("version",), 3, 4), index.get_position(("version",)))
573
self.assertEqual(["options3"], index.get_options(("version",)))
574
self.assertEqual({("version",):(("parent",), ("other",))},
575
index.get_parent_map([("version",)]))
577
def test_read_compressed_parents(self):
578
transport = MockTransport([
582
"c option 0 1 1 0 :",
584
index = self.get_knit_index(transport, "filename", "r")
585
self.assertEqual({("b",):(("a",),), ("c",):(("b",), ("a",))},
586
index.get_parent_map([("b",), ("c",)]))
588
def test_write_utf8_version_id(self):
589
unicode_revision_id = u"version-\N{CYRILLIC CAPITAL LETTER A}"
590
utf8_revision_id = unicode_revision_id.encode('utf-8')
591
transport = MockTransport([
594
index = self.get_knit_index(transport, "filename", "r")
596
((utf8_revision_id,), ["option"], ((utf8_revision_id,), 0, 1), [])])
597
call = transport.calls.pop(0)
598
# call[1][1] is a StringIO - we can't test it by simple equality.
599
self.assertEqual('put_file_non_atomic', call[0])
600
self.assertEqual('filename.kndx', call[1][0])
601
# With no history, _KndxIndex writes a new index:
602
self.assertEqual(_KndxIndex.HEADER +
603
"\n%s option 0 1 :" % (utf8_revision_id,),
604
call[1][1].getvalue())
605
self.assertEqual({'create_parent_dir': True}, call[2])
607
def test_write_utf8_parents(self):
608
unicode_revision_id = u"version-\N{CYRILLIC CAPITAL LETTER A}"
609
utf8_revision_id = unicode_revision_id.encode('utf-8')
610
transport = MockTransport([
613
index = self.get_knit_index(transport, "filename", "r")
615
(("version",), ["option"], (("version",), 0, 1), [(utf8_revision_id,)])])
616
call = transport.calls.pop(0)
617
# call[1][1] is a StringIO - we can't test it by simple equality.
618
self.assertEqual('put_file_non_atomic', call[0])
619
self.assertEqual('filename.kndx', call[1][0])
620
# With no history, _KndxIndex writes a new index:
621
self.assertEqual(_KndxIndex.HEADER +
622
"\nversion option 0 1 .%s :" % (utf8_revision_id,),
623
call[1][1].getvalue())
624
self.assertEqual({'create_parent_dir': True}, call[2])
627
transport = MockTransport([
630
index = self.get_knit_index(transport, "filename", "r")
632
self.assertEqual(set(), index.keys())
634
index.add_records([(("a",), ["option"], (("a",), 0, 1), [])])
635
self.assertEqual(set([("a",)]), index.keys())
637
index.add_records([(("a",), ["option"], (("a",), 0, 1), [])])
638
self.assertEqual(set([("a",)]), index.keys())
640
index.add_records([(("b",), ["option"], (("b",), 0, 1), [])])
641
self.assertEqual(set([("a",), ("b",)]), index.keys())
643
def add_a_b(self, index, random_id=None):
645
if random_id is not None:
646
kwargs["random_id"] = random_id
648
(("a",), ["option"], (("a",), 0, 1), [("b",)]),
649
(("a",), ["opt"], (("a",), 1, 2), [("c",)]),
650
(("b",), ["option"], (("b",), 2, 3), [("a",)])
653
def assertIndexIsAB(self, index):
658
index.get_parent_map(index.keys()))
659
self.assertEqual((("a",), 1, 2), index.get_position(("a",)))
660
self.assertEqual((("b",), 2, 3), index.get_position(("b",)))
661
self.assertEqual(["opt"], index.get_options(("a",)))
663
def test_add_versions(self):
664
transport = MockTransport([
667
index = self.get_knit_index(transport, "filename", "r")
670
call = transport.calls.pop(0)
671
# call[1][1] is a StringIO - we can't test it by simple equality.
672
self.assertEqual('put_file_non_atomic', call[0])
673
self.assertEqual('filename.kndx', call[1][0])
674
# With no history, _KndxIndex writes a new index:
677
"\na option 0 1 .b :"
679
"\nb option 2 3 0 :",
680
call[1][1].getvalue())
681
self.assertEqual({'create_parent_dir': True}, call[2])
682
self.assertIndexIsAB(index)
684
def test_add_versions_random_id_is_accepted(self):
685
transport = MockTransport([
688
index = self.get_knit_index(transport, "filename", "r")
689
self.add_a_b(index, random_id=True)
691
def test_delay_create_and_add_versions(self):
692
transport = MockTransport()
694
index = self.get_knit_index(transport, "filename", "w")
696
self.assertEqual([], transport.calls)
699
#[ {"dir_mode": 0777, "create_parent_dir": True, "mode": "wb"},
701
# Two calls: one during which we load the existing index (and when its
702
# missing create it), then a second where we write the contents out.
703
self.assertEqual(2, len(transport.calls))
704
call = transport.calls.pop(0)
705
self.assertEqual('put_file_non_atomic', call[0])
706
self.assertEqual('filename.kndx', call[1][0])
707
# With no history, _KndxIndex writes a new index:
708
self.assertEqual(_KndxIndex.HEADER, call[1][1].getvalue())
709
self.assertEqual({'create_parent_dir': True}, call[2])
710
call = transport.calls.pop(0)
711
# call[1][1] is a StringIO - we can't test it by simple equality.
712
self.assertEqual('put_file_non_atomic', call[0])
713
self.assertEqual('filename.kndx', call[1][0])
714
# With no history, _KndxIndex writes a new index:
717
"\na option 0 1 .b :"
719
"\nb option 2 3 0 :",
720
call[1][1].getvalue())
721
self.assertEqual({'create_parent_dir': True}, call[2])
723
def test_get_position(self):
724
transport = MockTransport([
729
index = self.get_knit_index(transport, "filename", "r")
731
self.assertEqual((("a",), 0, 1), index.get_position(("a",)))
732
self.assertEqual((("b",), 1, 2), index.get_position(("b",)))
734
def test_get_method(self):
735
transport = MockTransport([
737
"a fulltext,unknown 0 1 :",
738
"b unknown,line-delta 1 2 :",
741
index = self.get_knit_index(transport, "filename", "r")
743
self.assertEqual("fulltext", index.get_method("a"))
744
self.assertEqual("line-delta", index.get_method("b"))
745
self.assertRaises(errors.KnitIndexUnknownMethod, index.get_method, "c")
747
def test_get_options(self):
748
transport = MockTransport([
753
index = self.get_knit_index(transport, "filename", "r")
755
self.assertEqual(["opt1"], index.get_options("a"))
756
self.assertEqual(["opt2", "opt3"], index.get_options("b"))
758
def test_get_parent_map(self):
759
transport = MockTransport([
762
"b option 1 2 0 .c :",
763
"c option 1 2 1 0 .e :"
765
index = self.get_knit_index(transport, "filename", "r")
769
("b",):(("a",), ("c",)),
770
("c",):(("b",), ("a",), ("e",)),
771
}, index.get_parent_map(index.keys()))
773
def test_impossible_parent(self):
774
"""Test we get KnitCorrupt if the parent couldn't possibly exist."""
775
transport = MockTransport([
778
"b option 0 1 4 :" # We don't have a 4th record
780
index = self.get_knit_index(transport, 'filename', 'r')
782
self.assertRaises(errors.KnitCorrupt, index.keys)
784
if (str(e) == ('exceptions must be strings, classes, or instances,'
785
' not exceptions.IndexError')
786
and sys.version_info[0:2] >= (2,5)):
787
self.knownFailure('Pyrex <0.9.5 fails with TypeError when'
788
' raising new style exceptions with python'
793
def test_corrupted_parent(self):
794
transport = MockTransport([
798
"c option 0 1 1v :", # Can't have a parent of '1v'
800
index = self.get_knit_index(transport, 'filename', 'r')
802
self.assertRaises(errors.KnitCorrupt, index.keys)
804
if (str(e) == ('exceptions must be strings, classes, or instances,'
805
' not exceptions.ValueError')
806
and sys.version_info[0:2] >= (2,5)):
807
self.knownFailure('Pyrex <0.9.5 fails with TypeError when'
808
' raising new style exceptions with python'
813
def test_corrupted_parent_in_list(self):
814
transport = MockTransport([
818
"c option 0 1 1 v :", # Can't have a parent of 'v'
820
index = self.get_knit_index(transport, 'filename', 'r')
822
self.assertRaises(errors.KnitCorrupt, index.keys)
824
if (str(e) == ('exceptions must be strings, classes, or instances,'
825
' not exceptions.ValueError')
826
and sys.version_info[0:2] >= (2,5)):
827
self.knownFailure('Pyrex <0.9.5 fails with TypeError when'
828
' raising new style exceptions with python'
833
def test_invalid_position(self):
834
transport = MockTransport([
838
index = self.get_knit_index(transport, 'filename', 'r')
840
self.assertRaises(errors.KnitCorrupt, index.keys)
842
if (str(e) == ('exceptions must be strings, classes, or instances,'
843
' not exceptions.ValueError')
844
and sys.version_info[0:2] >= (2,5)):
845
self.knownFailure('Pyrex <0.9.5 fails with TypeError when'
846
' raising new style exceptions with python'
851
def test_invalid_size(self):
852
transport = MockTransport([
856
index = self.get_knit_index(transport, 'filename', 'r')
858
self.assertRaises(errors.KnitCorrupt, index.keys)
860
if (str(e) == ('exceptions must be strings, classes, or instances,'
861
' not exceptions.ValueError')
862
and sys.version_info[0:2] >= (2,5)):
863
self.knownFailure('Pyrex <0.9.5 fails with TypeError when'
864
' raising new style exceptions with python'
869
def test_short_line(self):
870
transport = MockTransport([
873
"b option 10 10 0", # This line isn't terminated, ignored
875
index = self.get_knit_index(transport, "filename", "r")
876
self.assertEqual(set([('a',)]), index.keys())
878
def test_skip_incomplete_record(self):
879
# A line with bogus data should just be skipped
880
transport = MockTransport([
883
"b option 10 10 0", # This line isn't terminated, ignored
884
"c option 20 10 0 :", # Properly terminated, and starts with '\n'
886
index = self.get_knit_index(transport, "filename", "r")
887
self.assertEqual(set([('a',), ('c',)]), index.keys())
889
def test_trailing_characters(self):
890
# A line with bogus data should just be skipped
891
transport = MockTransport([
894
"b option 10 10 0 :a", # This line has extra trailing characters
895
"c option 20 10 0 :", # Properly terminated, and starts with '\n'
897
index = self.get_knit_index(transport, "filename", "r")
898
self.assertEqual(set([('a',), ('c',)]), index.keys())
901
class LowLevelKnitIndexTests_c(LowLevelKnitIndexTests):
903
_test_needs_features = [CompiledKnitFeature]
905
def get_knit_index(self, transport, name, mode):
906
mapper = ConstantMapper(name)
907
orig = knit._load_data
909
knit._load_data = orig
910
self.addCleanup(reset)
911
from bzrlib._knit_load_data_c import _load_data_c
912
knit._load_data = _load_data_c
913
allow_writes = lambda: mode == 'w'
914
return _KndxIndex(transport, mapper, lambda:None, allow_writes, lambda:True)
917
class KnitTests(TestCaseWithTransport):
918
"""Class containing knit test helper routines."""
920
def make_test_knit(self, annotate=False, name='test'):
921
mapper = ConstantMapper(name)
922
return make_file_factory(annotate, mapper)(self.get_transport())
925
class TestKnitIndex(KnitTests):
927
def test_add_versions_dictionary_compresses(self):
928
"""Adding versions to the index should update the lookup dict"""
929
knit = self.make_test_knit()
931
idx.add_records([(('a-1',), ['fulltext'], (('a-1',), 0, 0), [])])
932
self.check_file_contents('test.kndx',
933
'# bzr knit index 8\n'
938
(('a-2',), ['fulltext'], (('a-2',), 0, 0), [('a-1',)]),
939
(('a-3',), ['fulltext'], (('a-3',), 0, 0), [('a-2',)]),
941
self.check_file_contents('test.kndx',
942
'# bzr knit index 8\n'
944
'a-1 fulltext 0 0 :\n'
945
'a-2 fulltext 0 0 0 :\n'
946
'a-3 fulltext 0 0 1 :'
948
self.assertEqual(set([('a-3',), ('a-1',), ('a-2',)]), idx.keys())
950
('a-1',): ((('a-1',), 0, 0), None, (), ('fulltext', False)),
951
('a-2',): ((('a-2',), 0, 0), None, (('a-1',),), ('fulltext', False)),
952
('a-3',): ((('a-3',), 0, 0), None, (('a-2',),), ('fulltext', False)),
953
}, idx.get_build_details(idx.keys()))
954
self.assertEqual({('a-1',):(),
955
('a-2',):(('a-1',),),
956
('a-3',):(('a-2',),),},
957
idx.get_parent_map(idx.keys()))
959
def test_add_versions_fails_clean(self):
960
"""If add_versions fails in the middle, it restores a pristine state.
962
Any modifications that are made to the index are reset if all versions
965
# This cheats a little bit by passing in a generator which will
966
# raise an exception before the processing finishes
967
# Other possibilities would be to have an version with the wrong number
968
# of entries, or to make the backing transport unable to write any
971
knit = self.make_test_knit()
973
idx.add_records([(('a-1',), ['fulltext'], (('a-1',), 0, 0), [])])
975
class StopEarly(Exception):
978
def generate_failure():
979
"""Add some entries and then raise an exception"""
980
yield (('a-2',), ['fulltext'], (None, 0, 0), ('a-1',))
981
yield (('a-3',), ['fulltext'], (None, 0, 0), ('a-2',))
984
# Assert the pre-condition
986
self.assertEqual(set([('a-1',)]), set(idx.keys()))
988
{('a-1',): ((('a-1',), 0, 0), None, (), ('fulltext', False))},
989
idx.get_build_details([('a-1',)]))
990
self.assertEqual({('a-1',):()}, idx.get_parent_map(idx.keys()))
993
self.assertRaises(StopEarly, idx.add_records, generate_failure())
994
# And it shouldn't be modified
997
def test_knit_index_ignores_empty_files(self):
998
# There was a race condition in older bzr, where a ^C at the right time
999
# could leave an empty .kndx file, which bzr would later claim was a
1000
# corrupted file since the header was not present. In reality, the file
1001
# just wasn't created, so it should be ignored.
1002
t = get_transport('.')
1003
t.put_bytes('test.kndx', '')
1005
knit = self.make_test_knit()
1007
def test_knit_index_checks_header(self):
1008
t = get_transport('.')
1009
t.put_bytes('test.kndx', '# not really a knit header\n\n')
1010
k = self.make_test_knit()
1011
self.assertRaises(KnitHeaderError, k.keys)
1014
class TestGraphIndexKnit(KnitTests):
1015
"""Tests for knits using a GraphIndex rather than a KnitIndex."""
1017
def make_g_index(self, name, ref_lists=0, nodes=[]):
1018
builder = GraphIndexBuilder(ref_lists)
1019
for node, references, value in nodes:
1020
builder.add_node(node, references, value)
1021
stream = builder.finish()
1022
trans = self.get_transport()
1023
size = trans.put_file(name, stream)
1024
return GraphIndex(trans, name, size)
1026
def two_graph_index(self, deltas=False, catch_adds=False):
1027
"""Build a two-graph index.
1029
:param deltas: If true, use underlying indices with two node-ref
1030
lists and 'parent' set to a delta-compressed against tail.
1032
# build a complex graph across several indices.
1034
# delta compression inn the index
1035
index1 = self.make_g_index('1', 2, [
1036
(('tip', ), 'N0 100', ([('parent', )], [], )),
1037
(('tail', ), '', ([], []))])
1038
index2 = self.make_g_index('2', 2, [
1039
(('parent', ), ' 100 78', ([('tail', ), ('ghost', )], [('tail', )])),
1040
(('separate', ), '', ([], []))])
1042
# just blob location and graph in the index.
1043
index1 = self.make_g_index('1', 1, [
1044
(('tip', ), 'N0 100', ([('parent', )], )),
1045
(('tail', ), '', ([], ))])
1046
index2 = self.make_g_index('2', 1, [
1047
(('parent', ), ' 100 78', ([('tail', ), ('ghost', )], )),
1048
(('separate', ), '', ([], ))])
1049
combined_index = CombinedGraphIndex([index1, index2])
1051
self.combined_index = combined_index
1052
self.caught_entries = []
1053
add_callback = self.catch_add
1056
return _KnitGraphIndex(combined_index, lambda:True, deltas=deltas,
1057
add_callback=add_callback)
1059
def test_keys(self):
1060
index = self.two_graph_index()
1061
self.assertEqual(set([('tail',), ('tip',), ('parent',), ('separate',)]),
1064
def test_get_position(self):
1065
index = self.two_graph_index()
1066
self.assertEqual((index._graph_index._indices[0], 0, 100), index.get_position(('tip',)))
1067
self.assertEqual((index._graph_index._indices[1], 100, 78), index.get_position(('parent',)))
1069
def test_get_method_deltas(self):
1070
index = self.two_graph_index(deltas=True)
1071
self.assertEqual('fulltext', index.get_method(('tip',)))
1072
self.assertEqual('line-delta', index.get_method(('parent',)))
1074
def test_get_method_no_deltas(self):
1075
# check that the parent-history lookup is ignored with deltas=False.
1076
index = self.two_graph_index(deltas=False)
1077
self.assertEqual('fulltext', index.get_method(('tip',)))
1078
self.assertEqual('fulltext', index.get_method(('parent',)))
1080
def test_get_options_deltas(self):
1081
index = self.two_graph_index(deltas=True)
1082
self.assertEqual(['fulltext', 'no-eol'], index.get_options(('tip',)))
1083
self.assertEqual(['line-delta'], index.get_options(('parent',)))
1085
def test_get_options_no_deltas(self):
1086
# check that the parent-history lookup is ignored with deltas=False.
1087
index = self.two_graph_index(deltas=False)
1088
self.assertEqual(['fulltext', 'no-eol'], index.get_options(('tip',)))
1089
self.assertEqual(['fulltext'], index.get_options(('parent',)))
1091
def test_get_parent_map(self):
1092
index = self.two_graph_index()
1093
self.assertEqual({('parent',):(('tail',), ('ghost',))},
1094
index.get_parent_map([('parent',), ('ghost',)]))
1096
def catch_add(self, entries):
1097
self.caught_entries.append(entries)
1099
def test_add_no_callback_errors(self):
1100
index = self.two_graph_index()
1101
self.assertRaises(errors.ReadOnlyError, index.add_records,
1102
[(('new',), 'fulltext,no-eol', (None, 50, 60), ['separate'])])
1104
def test_add_version_smoke(self):
1105
index = self.two_graph_index(catch_adds=True)
1106
index.add_records([(('new',), 'fulltext,no-eol', (None, 50, 60),
1108
self.assertEqual([[(('new', ), 'N50 60', ((('separate',),),))]],
1109
self.caught_entries)
1111
def test_add_version_delta_not_delta_index(self):
1112
index = self.two_graph_index(catch_adds=True)
1113
self.assertRaises(errors.KnitCorrupt, index.add_records,
1114
[(('new',), 'no-eol,line-delta', (None, 0, 100), [('parent',)])])
1115
self.assertEqual([], self.caught_entries)
1117
def test_add_version_same_dup(self):
1118
index = self.two_graph_index(catch_adds=True)
1119
# options can be spelt two different ways
1120
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 100), [('parent',)])])
1121
index.add_records([(('tip',), 'no-eol,fulltext', (None, 0, 100), [('parent',)])])
1122
# position/length are ignored (because each pack could have fulltext or
1123
# delta, and be at a different position.
1124
index.add_records([(('tip',), 'fulltext,no-eol', (None, 50, 100),
1126
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 1000),
1128
# but neither should have added data:
1129
self.assertEqual([[], [], [], []], self.caught_entries)
1131
def test_add_version_different_dup(self):
1132
index = self.two_graph_index(deltas=True, catch_adds=True)
1134
self.assertRaises(errors.KnitCorrupt, index.add_records,
1135
[(('tip',), 'no-eol,line-delta', (None, 0, 100), [('parent',)])])
1136
self.assertRaises(errors.KnitCorrupt, index.add_records,
1137
[(('tip',), 'line-delta,no-eol', (None, 0, 100), [('parent',)])])
1138
self.assertRaises(errors.KnitCorrupt, index.add_records,
1139
[(('tip',), 'fulltext', (None, 0, 100), [('parent',)])])
1141
self.assertRaises(errors.KnitCorrupt, index.add_records,
1142
[(('tip',), 'fulltext,no-eol', (None, 0, 100), [])])
1143
self.assertEqual([], self.caught_entries)
1145
def test_add_versions_nodeltas(self):
1146
index = self.two_graph_index(catch_adds=True)
1148
(('new',), 'fulltext,no-eol', (None, 50, 60), [('separate',)]),
1149
(('new2',), 'fulltext', (None, 0, 6), [('new',)]),
1151
self.assertEqual([(('new', ), 'N50 60', ((('separate',),),)),
1152
(('new2', ), ' 0 6', ((('new',),),))],
1153
sorted(self.caught_entries[0]))
1154
self.assertEqual(1, len(self.caught_entries))
1156
def test_add_versions_deltas(self):
1157
index = self.two_graph_index(deltas=True, catch_adds=True)
1159
(('new',), 'fulltext,no-eol', (None, 50, 60), [('separate',)]),
1160
(('new2',), 'line-delta', (None, 0, 6), [('new',)]),
1162
self.assertEqual([(('new', ), 'N50 60', ((('separate',),), ())),
1163
(('new2', ), ' 0 6', ((('new',),), (('new',),), ))],
1164
sorted(self.caught_entries[0]))
1165
self.assertEqual(1, len(self.caught_entries))
1167
def test_add_versions_delta_not_delta_index(self):
1168
index = self.two_graph_index(catch_adds=True)
1169
self.assertRaises(errors.KnitCorrupt, index.add_records,
1170
[(('new',), 'no-eol,line-delta', (None, 0, 100), [('parent',)])])
1171
self.assertEqual([], self.caught_entries)
1173
def test_add_versions_random_id_accepted(self):
1174
index = self.two_graph_index(catch_adds=True)
1175
index.add_records([], random_id=True)
1177
def test_add_versions_same_dup(self):
1178
index = self.two_graph_index(catch_adds=True)
1179
# options can be spelt two different ways
1180
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 100),
1182
index.add_records([(('tip',), 'no-eol,fulltext', (None, 0, 100),
1184
# position/length are ignored (because each pack could have fulltext or
1185
# delta, and be at a different position.
1186
index.add_records([(('tip',), 'fulltext,no-eol', (None, 50, 100),
1188
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 1000),
1190
# but neither should have added data.
1191
self.assertEqual([[], [], [], []], self.caught_entries)
1193
def test_add_versions_different_dup(self):
1194
index = self.two_graph_index(deltas=True, catch_adds=True)
1196
self.assertRaises(errors.KnitCorrupt, index.add_records,
1197
[(('tip',), 'no-eol,line-delta', (None, 0, 100), [('parent',)])])
1198
self.assertRaises(errors.KnitCorrupt, index.add_records,
1199
[(('tip',), 'line-delta,no-eol', (None, 0, 100), [('parent',)])])
1200
self.assertRaises(errors.KnitCorrupt, index.add_records,
1201
[(('tip',), 'fulltext', (None, 0, 100), [('parent',)])])
1203
self.assertRaises(errors.KnitCorrupt, index.add_records,
1204
[(('tip',), 'fulltext,no-eol', (None, 0, 100), [])])
1205
# change options in the second record
1206
self.assertRaises(errors.KnitCorrupt, index.add_records,
1207
[(('tip',), 'fulltext,no-eol', (None, 0, 100), [('parent',)]),
1208
(('tip',), 'no-eol,line-delta', (None, 0, 100), [('parent',)])])
1209
self.assertEqual([], self.caught_entries)
1212
class TestNoParentsGraphIndexKnit(KnitTests):
1213
"""Tests for knits using _KnitGraphIndex with no parents."""
1215
def make_g_index(self, name, ref_lists=0, nodes=[]):
1216
builder = GraphIndexBuilder(ref_lists)
1217
for node, references in nodes:
1218
builder.add_node(node, references)
1219
stream = builder.finish()
1220
trans = self.get_transport()
1221
size = trans.put_file(name, stream)
1222
return GraphIndex(trans, name, size)
1224
def test_parents_deltas_incompatible(self):
1225
index = CombinedGraphIndex([])
1226
self.assertRaises(errors.KnitError, _KnitGraphIndex, lambda:True,
1227
index, deltas=True, parents=False)
1229
def two_graph_index(self, catch_adds=False):
1230
"""Build a two-graph index.
1232
:param deltas: If true, use underlying indices with two node-ref
1233
lists and 'parent' set to a delta-compressed against tail.
1235
# put several versions in the index.
1236
index1 = self.make_g_index('1', 0, [
1237
(('tip', ), 'N0 100'),
1239
index2 = self.make_g_index('2', 0, [
1240
(('parent', ), ' 100 78'),
1241
(('separate', ), '')])
1242
combined_index = CombinedGraphIndex([index1, index2])
1244
self.combined_index = combined_index
1245
self.caught_entries = []
1246
add_callback = self.catch_add
1249
return _KnitGraphIndex(combined_index, lambda:True, parents=False,
1250
add_callback=add_callback)
1252
def test_keys(self):
1253
index = self.two_graph_index()
1254
self.assertEqual(set([('tail',), ('tip',), ('parent',), ('separate',)]),
1257
def test_get_position(self):
1258
index = self.two_graph_index()
1259
self.assertEqual((index._graph_index._indices[0], 0, 100),
1260
index.get_position(('tip',)))
1261
self.assertEqual((index._graph_index._indices[1], 100, 78),
1262
index.get_position(('parent',)))
1264
def test_get_method(self):
1265
index = self.two_graph_index()
1266
self.assertEqual('fulltext', index.get_method(('tip',)))
1267
self.assertEqual(['fulltext'], index.get_options(('parent',)))
1269
def test_get_options(self):
1270
index = self.two_graph_index()
1271
self.assertEqual(['fulltext', 'no-eol'], index.get_options(('tip',)))
1272
self.assertEqual(['fulltext'], index.get_options(('parent',)))
1274
def test_get_parent_map(self):
1275
index = self.two_graph_index()
1276
self.assertEqual({('parent',):None},
1277
index.get_parent_map([('parent',), ('ghost',)]))
1279
def catch_add(self, entries):
1280
self.caught_entries.append(entries)
1282
def test_add_no_callback_errors(self):
1283
index = self.two_graph_index()
1284
self.assertRaises(errors.ReadOnlyError, index.add_records,
1285
[(('new',), 'fulltext,no-eol', (None, 50, 60), [('separate',)])])
1287
def test_add_version_smoke(self):
1288
index = self.two_graph_index(catch_adds=True)
1289
index.add_records([(('new',), 'fulltext,no-eol', (None, 50, 60), [])])
1290
self.assertEqual([[(('new', ), 'N50 60')]],
1291
self.caught_entries)
1293
def test_add_version_delta_not_delta_index(self):
1294
index = self.two_graph_index(catch_adds=True)
1295
self.assertRaises(errors.KnitCorrupt, index.add_records,
1296
[(('new',), 'no-eol,line-delta', (None, 0, 100), [])])
1297
self.assertEqual([], self.caught_entries)
1299
def test_add_version_same_dup(self):
1300
index = self.two_graph_index(catch_adds=True)
1301
# options can be spelt two different ways
1302
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 100), [])])
1303
index.add_records([(('tip',), 'no-eol,fulltext', (None, 0, 100), [])])
1304
# position/length are ignored (because each pack could have fulltext or
1305
# delta, and be at a different position.
1306
index.add_records([(('tip',), 'fulltext,no-eol', (None, 50, 100), [])])
1307
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 1000), [])])
1308
# but neither should have added data.
1309
self.assertEqual([[], [], [], []], self.caught_entries)
1311
def test_add_version_different_dup(self):
1312
index = self.two_graph_index(catch_adds=True)
1314
self.assertRaises(errors.KnitCorrupt, index.add_records,
1315
[(('tip',), 'no-eol,line-delta', (None, 0, 100), [])])
1316
self.assertRaises(errors.KnitCorrupt, index.add_records,
1317
[(('tip',), 'line-delta,no-eol', (None, 0, 100), [])])
1318
self.assertRaises(errors.KnitCorrupt, index.add_records,
1319
[(('tip',), 'fulltext', (None, 0, 100), [])])
1321
self.assertRaises(errors.KnitCorrupt, index.add_records,
1322
[(('tip',), 'fulltext,no-eol', (None, 0, 100), [('parent',)])])
1323
self.assertEqual([], self.caught_entries)
1325
def test_add_versions(self):
1326
index = self.two_graph_index(catch_adds=True)
1328
(('new',), 'fulltext,no-eol', (None, 50, 60), []),
1329
(('new2',), 'fulltext', (None, 0, 6), []),
1331
self.assertEqual([(('new', ), 'N50 60'), (('new2', ), ' 0 6')],
1332
sorted(self.caught_entries[0]))
1333
self.assertEqual(1, len(self.caught_entries))
1335
def test_add_versions_delta_not_delta_index(self):
1336
index = self.two_graph_index(catch_adds=True)
1337
self.assertRaises(errors.KnitCorrupt, index.add_records,
1338
[(('new',), 'no-eol,line-delta', (None, 0, 100), [('parent',)])])
1339
self.assertEqual([], self.caught_entries)
1341
def test_add_versions_parents_not_parents_index(self):
1342
index = self.two_graph_index(catch_adds=True)
1343
self.assertRaises(errors.KnitCorrupt, index.add_records,
1344
[(('new',), 'no-eol,fulltext', (None, 0, 100), [('parent',)])])
1345
self.assertEqual([], self.caught_entries)
1347
def test_add_versions_random_id_accepted(self):
1348
index = self.two_graph_index(catch_adds=True)
1349
index.add_records([], random_id=True)
1351
def test_add_versions_same_dup(self):
1352
index = self.two_graph_index(catch_adds=True)
1353
# options can be spelt two different ways
1354
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 100), [])])
1355
index.add_records([(('tip',), 'no-eol,fulltext', (None, 0, 100), [])])
1356
# position/length are ignored (because each pack could have fulltext or
1357
# delta, and be at a different position.
1358
index.add_records([(('tip',), 'fulltext,no-eol', (None, 50, 100), [])])
1359
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 1000), [])])
1360
# but neither should have added data.
1361
self.assertEqual([[], [], [], []], self.caught_entries)
1363
def test_add_versions_different_dup(self):
1364
index = self.two_graph_index(catch_adds=True)
1366
self.assertRaises(errors.KnitCorrupt, index.add_records,
1367
[(('tip',), 'no-eol,line-delta', (None, 0, 100), [])])
1368
self.assertRaises(errors.KnitCorrupt, index.add_records,
1369
[(('tip',), 'line-delta,no-eol', (None, 0, 100), [])])
1370
self.assertRaises(errors.KnitCorrupt, index.add_records,
1371
[(('tip',), 'fulltext', (None, 0, 100), [])])
1373
self.assertRaises(errors.KnitCorrupt, index.add_records,
1374
[(('tip',), 'fulltext,no-eol', (None, 0, 100), [('parent',)])])
1375
# change options in the second record
1376
self.assertRaises(errors.KnitCorrupt, index.add_records,
1377
[(('tip',), 'fulltext,no-eol', (None, 0, 100), []),
1378
(('tip',), 'no-eol,line-delta', (None, 0, 100), [])])
1379
self.assertEqual([], self.caught_entries)
1382
class TestStacking(KnitTests):
1384
def get_basis_and_test_knit(self):
1385
basis = self.make_test_knit(name='basis')
1386
basis = RecordingVersionedFilesDecorator(basis)
1387
test = self.make_test_knit(name='test')
1388
test.add_fallback_versioned_files(basis)
1391
def test_add_fallback_versioned_files(self):
1392
basis = self.make_test_knit(name='basis')
1393
test = self.make_test_knit(name='test')
1394
# It must not error; other tests test that the fallback is referred to
1395
# when accessing data.
1396
test.add_fallback_versioned_files(basis)
1398
def test_add_lines(self):
1399
# lines added to the test are not added to the basis
1400
basis, test = self.get_basis_and_test_knit()
1402
key_basis = ('bar',)
1403
key_cross_border = ('quux',)
1404
key_delta = ('zaphod',)
1405
test.add_lines(key, (), ['foo\n'])
1406
self.assertEqual({}, basis.get_parent_map([key]))
1407
# lines added to the test that reference across the stack do a
1409
basis.add_lines(key_basis, (), ['foo\n'])
1411
test.add_lines(key_cross_border, (key_basis,), ['foo\n'])
1412
self.assertEqual('fulltext', test._index.get_method(key_cross_border))
1413
self.assertEqual([("get_parent_map", set([key_basis]))], basis.calls)
1414
# Subsequent adds do delta.
1416
test.add_lines(key_delta, (key_cross_border,), ['foo\n'])
1417
self.assertEqual('line-delta', test._index.get_method(key_delta))
1418
self.assertEqual([], basis.calls)
1420
def test_annotate(self):
1421
# annotations from the test knit are answered without asking the basis
1422
basis, test = self.get_basis_and_test_knit()
1424
key_basis = ('bar',)
1425
key_missing = ('missing',)
1426
test.add_lines(key, (), ['foo\n'])
1427
details = test.annotate(key)
1428
self.assertEqual([(key, 'foo\n')], details)
1429
self.assertEqual([], basis.calls)
1430
# But texts that are not in the test knit are looked for in the basis
1432
basis.add_lines(key_basis, (), ['foo\n', 'bar\n'])
1434
details = test.annotate(key_basis)
1435
self.assertEqual([(key_basis, 'foo\n'), (key_basis, 'bar\n')], details)
1436
# Not optimised to date:
1437
# self.assertEqual([("annotate", key_basis)], basis.calls)
1438
self.assertEqual([('get_parent_map', set([key_basis])),
1439
('get_parent_map', set([key_basis])),
1440
('get_parent_map', set([key_basis])),
1441
('get_record_stream', [key_basis], 'unordered', True)],
1444
def test_check(self):
1445
# At the moment checking a stacked knit does implicitly check the
1447
basis, test = self.get_basis_and_test_knit()
1450
def test_get_parent_map(self):
1451
# parents in the test knit are answered without asking the basis
1452
basis, test = self.get_basis_and_test_knit()
1454
key_basis = ('bar',)
1455
key_missing = ('missing',)
1456
test.add_lines(key, (), [])
1457
parent_map = test.get_parent_map([key])
1458
self.assertEqual({key: ()}, parent_map)
1459
self.assertEqual([], basis.calls)
1460
# But parents that are not in the test knit are looked for in the basis
1461
basis.add_lines(key_basis, (), [])
1463
parent_map = test.get_parent_map([key, key_basis, key_missing])
1464
self.assertEqual({key: (),
1465
key_basis: ()}, parent_map)
1466
self.assertEqual([("get_parent_map", set([key_basis, key_missing]))],
1469
def test_get_record_stream_unordered_fulltexts(self):
1470
# records from the test knit are answered without asking the basis:
1471
basis, test = self.get_basis_and_test_knit()
1473
key_basis = ('bar',)
1474
key_missing = ('missing',)
1475
test.add_lines(key, (), ['foo\n'])
1476
records = list(test.get_record_stream([key], 'unordered', True))
1477
self.assertEqual(1, len(records))
1478
self.assertEqual([], basis.calls)
1479
# Missing (from test knit) objects are retrieved from the basis:
1480
basis.add_lines(key_basis, (), ['foo\n', 'bar\n'])
1482
records = list(test.get_record_stream([key_basis, key_missing],
1484
self.assertEqual(2, len(records))
1485
calls = list(basis.calls)
1486
for record in records:
1487
self.assertSubset([record.key], (key_basis, key_missing))
1488
if record.key == key_missing:
1489
self.assertIsInstance(record, AbsentContentFactory)
1491
reference = list(basis.get_record_stream([key_basis],
1492
'unordered', True))[0]
1493
self.assertEqual(reference.key, record.key)
1494
self.assertEqual(reference.sha1, record.sha1)
1495
self.assertEqual(reference.storage_kind, record.storage_kind)
1496
self.assertEqual(reference.get_bytes_as(reference.storage_kind),
1497
record.get_bytes_as(record.storage_kind))
1498
self.assertEqual(reference.get_bytes_as('fulltext'),
1499
record.get_bytes_as('fulltext'))
1500
# It's not strictly minimal, but it seems reasonable for now for it to
1501
# ask which fallbacks have which parents.
1503
("get_parent_map", set([key_basis, key_missing])),
1504
("get_record_stream", [key_basis], 'unordered', True)],
1507
def test_get_record_stream_ordered_fulltexts(self):
1508
# ordering is preserved down into the fallback store.
1509
basis, test = self.get_basis_and_test_knit()
1511
key_basis = ('bar',)
1512
key_basis_2 = ('quux',)
1513
key_missing = ('missing',)
1514
test.add_lines(key, (key_basis,), ['foo\n'])
1515
# Missing (from test knit) objects are retrieved from the basis:
1516
basis.add_lines(key_basis, (key_basis_2,), ['foo\n', 'bar\n'])
1517
basis.add_lines(key_basis_2, (), ['quux\n'])
1519
# ask for in non-topological order
1520
records = list(test.get_record_stream(
1521
[key, key_basis, key_missing, key_basis_2], 'topological', True))
1522
self.assertEqual(4, len(records))
1524
for record in records:
1525
self.assertSubset([record.key],
1526
(key_basis, key_missing, key_basis_2, key))
1527
if record.key == key_missing:
1528
self.assertIsInstance(record, AbsentContentFactory)
1530
results.append((record.key, record.sha1, record.storage_kind,
1531
record.get_bytes_as('fulltext')))
1532
calls = list(basis.calls)
1533
order = [record[0] for record in results]
1534
self.assertEqual([key_basis_2, key_basis, key], order)
1535
for result in results:
1536
if result[0] == key:
1540
record = source.get_record_stream([result[0]], 'unordered',
1542
self.assertEqual(record.key, result[0])
1543
self.assertEqual(record.sha1, result[1])
1544
self.assertEqual(record.storage_kind, result[2])
1545
self.assertEqual(record.get_bytes_as('fulltext'), result[3])
1546
# It's not strictly minimal, but it seems reasonable for now for it to
1547
# ask which fallbacks have which parents.
1549
("get_parent_map", set([key_basis, key_basis_2, key_missing])),
1550
# unordered is asked for by the underlying worker as it still
1551
# buffers everything while answering - which is a problem!
1552
("get_record_stream", [key_basis_2, key_basis], 'unordered', True)],
1555
def test_get_record_stream_unordered_deltas(self):
1556
# records from the test knit are answered without asking the basis:
1557
basis, test = self.get_basis_and_test_knit()
1559
key_basis = ('bar',)
1560
key_missing = ('missing',)
1561
test.add_lines(key, (), ['foo\n'])
1562
records = list(test.get_record_stream([key], 'unordered', False))
1563
self.assertEqual(1, len(records))
1564
self.assertEqual([], basis.calls)
1565
# Missing (from test knit) objects are retrieved from the basis:
1566
basis.add_lines(key_basis, (), ['foo\n', 'bar\n'])
1568
records = list(test.get_record_stream([key_basis, key_missing],
1569
'unordered', False))
1570
self.assertEqual(2, len(records))
1571
calls = list(basis.calls)
1572
for record in records:
1573
self.assertSubset([record.key], (key_basis, key_missing))
1574
if record.key == key_missing:
1575
self.assertIsInstance(record, AbsentContentFactory)
1577
reference = list(basis.get_record_stream([key_basis],
1578
'unordered', False))[0]
1579
self.assertEqual(reference.key, record.key)
1580
self.assertEqual(reference.sha1, record.sha1)
1581
self.assertEqual(reference.storage_kind, record.storage_kind)
1582
self.assertEqual(reference.get_bytes_as(reference.storage_kind),
1583
record.get_bytes_as(record.storage_kind))
1584
# It's not strictly minimal, but it seems reasonable for now for it to
1585
# ask which fallbacks have which parents.
1587
("get_parent_map", set([key_basis, key_missing])),
1588
("get_record_stream", [key_basis], 'unordered', False)],
1591
def test_get_record_stream_ordered_deltas(self):
1592
# ordering is preserved down into the fallback store.
1593
basis, test = self.get_basis_and_test_knit()
1595
key_basis = ('bar',)
1596
key_basis_2 = ('quux',)
1597
key_missing = ('missing',)
1598
test.add_lines(key, (key_basis,), ['foo\n'])
1599
# Missing (from test knit) objects are retrieved from the basis:
1600
basis.add_lines(key_basis, (key_basis_2,), ['foo\n', 'bar\n'])
1601
basis.add_lines(key_basis_2, (), ['quux\n'])
1603
# ask for in non-topological order
1604
records = list(test.get_record_stream(
1605
[key, key_basis, key_missing, key_basis_2], 'topological', False))
1606
self.assertEqual(4, len(records))
1608
for record in records:
1609
self.assertSubset([record.key],
1610
(key_basis, key_missing, key_basis_2, key))
1611
if record.key == key_missing:
1612
self.assertIsInstance(record, AbsentContentFactory)
1614
results.append((record.key, record.sha1, record.storage_kind,
1615
record.get_bytes_as(record.storage_kind)))
1616
calls = list(basis.calls)
1617
order = [record[0] for record in results]
1618
self.assertEqual([key_basis_2, key_basis, key], order)
1619
for result in results:
1620
if result[0] == key:
1624
record = source.get_record_stream([result[0]], 'unordered',
1626
self.assertEqual(record.key, result[0])
1627
self.assertEqual(record.sha1, result[1])
1628
self.assertEqual(record.storage_kind, result[2])
1629
self.assertEqual(record.get_bytes_as(record.storage_kind), result[3])
1630
# It's not strictly minimal, but it seems reasonable for now for it to
1631
# ask which fallbacks have which parents.
1633
("get_parent_map", set([key_basis, key_basis_2, key_missing])),
1634
("get_record_stream", [key_basis_2, key_basis], 'topological', False)],
1637
def test_get_sha1s(self):
1638
# sha1's in the test knit are answered without asking the basis
1639
basis, test = self.get_basis_and_test_knit()
1641
key_basis = ('bar',)
1642
key_missing = ('missing',)
1643
test.add_lines(key, (), ['foo\n'])
1644
key_sha1sum = sha.new('foo\n').hexdigest()
1645
sha1s = test.get_sha1s([key])
1646
self.assertEqual({key: key_sha1sum}, sha1s)
1647
self.assertEqual([], basis.calls)
1648
# But texts that are not in the test knit are looked for in the basis
1649
# directly (rather than via text reconstruction) so that remote servers
1650
# etc don't have to answer with full content.
1651
basis.add_lines(key_basis, (), ['foo\n', 'bar\n'])
1652
basis_sha1sum = sha.new('foo\nbar\n').hexdigest()
1654
sha1s = test.get_sha1s([key, key_missing, key_basis])
1655
self.assertEqual({key: key_sha1sum,
1656
key_basis: basis_sha1sum}, sha1s)
1657
self.assertEqual([("get_sha1s", set([key_basis, key_missing]))],
1660
def test_insert_record_stream(self):
1661
# records are inserted as normal; insert_record_stream builds on
1662
# add_lines, so a smoke test should be all that's needed:
1664
key_basis = ('bar',)
1665
key_delta = ('zaphod',)
1666
basis, test = self.get_basis_and_test_knit()
1667
source = self.make_test_knit(name='source')
1668
basis.add_lines(key_basis, (), ['foo\n'])
1670
source.add_lines(key_basis, (), ['foo\n'])
1671
source.add_lines(key_delta, (key_basis,), ['bar\n'])
1672
stream = source.get_record_stream([key_delta], 'unordered', False)
1673
test.insert_record_stream(stream)
1674
self.assertEqual([("get_parent_map", set([key_basis]))],
1676
self.assertEqual({key_delta:(key_basis,)},
1677
test.get_parent_map([key_delta]))
1678
self.assertEqual('bar\n', test.get_record_stream([key_delta],
1679
'unordered', True).next().get_bytes_as('fulltext'))
1681
def test_iter_lines_added_or_present_in_keys(self):
1682
# Lines from the basis are returned, and lines for a given key are only
1686
# all sources are asked for keys:
1687
basis, test = self.get_basis_and_test_knit()
1688
basis.add_lines(key1, (), ["foo"])
1690
lines = list(test.iter_lines_added_or_present_in_keys([key1]))
1691
self.assertEqual([("foo\n", key1)], lines)
1692
self.assertEqual([("iter_lines_added_or_present_in_keys", set([key1]))],
1694
# keys in both are not duplicated:
1695
test.add_lines(key2, (), ["bar\n"])
1696
basis.add_lines(key2, (), ["bar\n"])
1698
lines = list(test.iter_lines_added_or_present_in_keys([key2]))
1699
self.assertEqual([("bar\n", key2)], lines)
1700
self.assertEqual([], basis.calls)
1702
def test_keys(self):
1705
# all sources are asked for keys:
1706
basis, test = self.get_basis_and_test_knit()
1708
self.assertEqual(set(), set(keys))
1709
self.assertEqual([("keys",)], basis.calls)
1710
# keys from a basis are returned:
1711
basis.add_lines(key1, (), [])
1714
self.assertEqual(set([key1]), set(keys))
1715
self.assertEqual([("keys",)], basis.calls)
1716
# keys in both are not duplicated:
1717
test.add_lines(key2, (), [])
1718
basis.add_lines(key2, (), [])
1721
self.assertEqual(2, len(keys))
1722
self.assertEqual(set([key1, key2]), set(keys))
1723
self.assertEqual([("keys",)], basis.calls)
1725
def test_add_mpdiffs(self):
1726
# records are inserted as normal; add_mpdiff builds on
1727
# add_lines, so a smoke test should be all that's needed:
1729
key_basis = ('bar',)
1730
key_delta = ('zaphod',)
1731
basis, test = self.get_basis_and_test_knit()
1732
source = self.make_test_knit(name='source')
1733
basis.add_lines(key_basis, (), ['foo\n'])
1735
source.add_lines(key_basis, (), ['foo\n'])
1736
source.add_lines(key_delta, (key_basis,), ['bar\n'])
1737
diffs = source.make_mpdiffs([key_delta])
1738
test.add_mpdiffs([(key_delta, (key_basis,),
1739
source.get_sha1s([key_delta])[key_delta], diffs[0])])
1740
self.assertEqual([("get_parent_map", set([key_basis])),
1741
('get_record_stream', [key_basis], 'unordered', True),
1742
('get_parent_map', set([key_basis]))],
1744
self.assertEqual({key_delta:(key_basis,)},
1745
test.get_parent_map([key_delta]))
1746
self.assertEqual('bar\n', test.get_record_stream([key_delta],
1747
'unordered', True).next().get_bytes_as('fulltext'))
1749
def test_make_mpdiffs(self):
1750
# Generating an mpdiff across a stacking boundary should detect parent
1754
key_right = ('zaphod',)
1755
basis, test = self.get_basis_and_test_knit()
1756
basis.add_lines(key_left, (), ['bar\n'])
1757
basis.add_lines(key_right, (), ['zaphod\n'])
1759
test.add_lines(key, (key_left, key_right),
1760
['bar\n', 'foo\n', 'zaphod\n'])
1761
diffs = test.make_mpdiffs([key])
1763
multiparent.MultiParent([multiparent.ParentText(0, 0, 0, 1),
1764
multiparent.NewText(['foo\n']),
1765
multiparent.ParentText(1, 0, 2, 1)])],
1767
self.assertEqual(4, len(basis.calls))
1769
("get_parent_map", set([key_left, key_right])),
1770
("get_parent_map", set([key_left, key_right])),
1771
("get_parent_map", set([key_left, key_right])),
1774
last_call = basis.calls[3]
1775
self.assertEqual('get_record_stream', last_call[0])
1776
self.assertEqual(set([key_left, key_right]), set(last_call[1]))
1777
self.assertEqual('unordered', last_call[2])
1778
self.assertEqual(True, last_call[3])