1
# Copyright (C) 2006-2011 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
"""Tests for Knit data structure"""
19
from cStringIO import StringIO
32
from bzrlib.errors import (
36
from bzrlib.index import *
37
from bzrlib.knit import (
42
_VFContentMapGenerator,
49
from bzrlib.patiencediff import PatienceSequenceMatcher
50
from bzrlib.repofmt import pack_repo
51
from bzrlib.tests import (
53
TestCaseWithMemoryTransport,
54
TestCaseWithTransport,
57
from bzrlib.versionedfile import (
60
network_bytes_to_kind_and_offset,
61
RecordingVersionedFilesDecorator,
65
compiled_knit_feature = tests.ModuleAvailableFeature(
66
'bzrlib._knit_load_data_pyx')
69
class KnitContentTestsMixin(object):
71
def test_constructor(self):
72
content = self._make_content([])
75
content = self._make_content([])
76
self.assertEqual(content.text(), [])
78
content = self._make_content([("origin1", "text1"), ("origin2", "text2")])
79
self.assertEqual(content.text(), ["text1", "text2"])
82
content = self._make_content([("origin1", "text1"), ("origin2", "text2")])
84
self.assertIsInstance(copy, content.__class__)
85
self.assertEqual(copy.annotate(), content.annotate())
87
def assertDerivedBlocksEqual(self, source, target, noeol=False):
88
"""Assert that the derived matching blocks match real output"""
89
source_lines = source.splitlines(True)
90
target_lines = target.splitlines(True)
92
if noeol and not line.endswith('\n'):
96
source_content = self._make_content([(None, nl(l)) for l in source_lines])
97
target_content = self._make_content([(None, nl(l)) for l in target_lines])
98
line_delta = source_content.line_delta(target_content)
99
delta_blocks = list(KnitContent.get_line_delta_blocks(line_delta,
100
source_lines, target_lines))
101
matcher = PatienceSequenceMatcher(None, source_lines, target_lines)
102
matcher_blocks = list(matcher.get_matching_blocks())
103
self.assertEqual(matcher_blocks, delta_blocks)
105
def test_get_line_delta_blocks(self):
106
self.assertDerivedBlocksEqual('a\nb\nc\n', 'q\nc\n')
107
self.assertDerivedBlocksEqual(TEXT_1, TEXT_1)
108
self.assertDerivedBlocksEqual(TEXT_1, TEXT_1A)
109
self.assertDerivedBlocksEqual(TEXT_1, TEXT_1B)
110
self.assertDerivedBlocksEqual(TEXT_1B, TEXT_1A)
111
self.assertDerivedBlocksEqual(TEXT_1A, TEXT_1B)
112
self.assertDerivedBlocksEqual(TEXT_1A, '')
113
self.assertDerivedBlocksEqual('', TEXT_1A)
114
self.assertDerivedBlocksEqual('', '')
115
self.assertDerivedBlocksEqual('a\nb\nc', 'a\nb\nc\nd')
117
def test_get_line_delta_blocks_noeol(self):
118
"""Handle historical knit deltas safely
120
Some existing knit deltas don't consider the last line to differ
121
when the only difference whether it has a final newline.
123
New knit deltas appear to always consider the last line to differ
126
self.assertDerivedBlocksEqual('a\nb\nc', 'a\nb\nc\nd\n', noeol=True)
127
self.assertDerivedBlocksEqual('a\nb\nc\nd\n', 'a\nb\nc', noeol=True)
128
self.assertDerivedBlocksEqual('a\nb\nc\n', 'a\nb\nc', noeol=True)
129
self.assertDerivedBlocksEqual('a\nb\nc', 'a\nb\nc\n', noeol=True)
141
Banana cup cake recipe
151
Banana cup cake recipe
153
- bananas (do not use plantains!!!)
160
Banana cup cake recipe
177
class TestPlainKnitContent(TestCase, KnitContentTestsMixin):
179
def _make_content(self, lines):
180
annotated_content = AnnotatedKnitContent(lines)
181
return PlainKnitContent(annotated_content.text(), 'bogus')
183
def test_annotate(self):
184
content = self._make_content([])
185
self.assertEqual(content.annotate(), [])
187
content = self._make_content([("origin1", "text1"), ("origin2", "text2")])
188
self.assertEqual(content.annotate(),
189
[("bogus", "text1"), ("bogus", "text2")])
191
def test_line_delta(self):
192
content1 = self._make_content([("", "a"), ("", "b")])
193
content2 = self._make_content([("", "a"), ("", "a"), ("", "c")])
194
self.assertEqual(content1.line_delta(content2),
195
[(1, 2, 2, ["a", "c"])])
197
def test_line_delta_iter(self):
198
content1 = self._make_content([("", "a"), ("", "b")])
199
content2 = self._make_content([("", "a"), ("", "a"), ("", "c")])
200
it = content1.line_delta_iter(content2)
201
self.assertEqual(it.next(), (1, 2, 2, ["a", "c"]))
202
self.assertRaises(StopIteration, it.next)
205
class TestAnnotatedKnitContent(TestCase, KnitContentTestsMixin):
207
def _make_content(self, lines):
208
return AnnotatedKnitContent(lines)
210
def test_annotate(self):
211
content = self._make_content([])
212
self.assertEqual(content.annotate(), [])
214
content = self._make_content([("origin1", "text1"), ("origin2", "text2")])
215
self.assertEqual(content.annotate(),
216
[("origin1", "text1"), ("origin2", "text2")])
218
def test_line_delta(self):
219
content1 = self._make_content([("", "a"), ("", "b")])
220
content2 = self._make_content([("", "a"), ("", "a"), ("", "c")])
221
self.assertEqual(content1.line_delta(content2),
222
[(1, 2, 2, [("", "a"), ("", "c")])])
224
def test_line_delta_iter(self):
225
content1 = self._make_content([("", "a"), ("", "b")])
226
content2 = self._make_content([("", "a"), ("", "a"), ("", "c")])
227
it = content1.line_delta_iter(content2)
228
self.assertEqual(it.next(), (1, 2, 2, [("", "a"), ("", "c")]))
229
self.assertRaises(StopIteration, it.next)
232
class MockTransport(object):
234
def __init__(self, file_lines=None):
235
self.file_lines = file_lines
237
# We have no base directory for the MockTransport
240
def get(self, filename):
241
if self.file_lines is None:
242
raise NoSuchFile(filename)
244
return StringIO("\n".join(self.file_lines))
246
def readv(self, relpath, offsets):
247
fp = self.get(relpath)
248
for offset, size in offsets:
250
yield offset, fp.read(size)
252
def __getattr__(self, name):
253
def queue_call(*args, **kwargs):
254
self.calls.append((name, args, kwargs))
258
class MockReadvFailingTransport(MockTransport):
259
"""Fail in the middle of a readv() result.
261
This Transport will successfully yield the first two requested hunks, but
262
raise NoSuchFile for the rest.
265
def readv(self, relpath, offsets):
267
for result in MockTransport.readv(self, relpath, offsets):
269
# we use 2 because the first offset is the pack header, the second
270
# is the first actual content requset
272
raise errors.NoSuchFile(relpath)
276
class KnitRecordAccessTestsMixin(object):
277
"""Tests for getting and putting knit records."""
279
def test_add_raw_records(self):
280
"""Add_raw_records adds records retrievable later."""
281
access = self.get_access()
282
memos = access.add_raw_records([('key', 10)], '1234567890')
283
self.assertEqual(['1234567890'], list(access.get_raw_records(memos)))
285
def test_add_several_raw_records(self):
286
"""add_raw_records with many records and read some back."""
287
access = self.get_access()
288
memos = access.add_raw_records([('key', 10), ('key2', 2), ('key3', 5)],
290
self.assertEqual(['1234567890', '12', '34567'],
291
list(access.get_raw_records(memos)))
292
self.assertEqual(['1234567890'],
293
list(access.get_raw_records(memos[0:1])))
294
self.assertEqual(['12'],
295
list(access.get_raw_records(memos[1:2])))
296
self.assertEqual(['34567'],
297
list(access.get_raw_records(memos[2:3])))
298
self.assertEqual(['1234567890', '34567'],
299
list(access.get_raw_records(memos[0:1] + memos[2:3])))
302
class TestKnitKnitAccess(TestCaseWithMemoryTransport, KnitRecordAccessTestsMixin):
303
"""Tests for the .kndx implementation."""
305
def get_access(self):
306
"""Get a .knit style access instance."""
307
mapper = ConstantMapper("foo")
308
access = _KnitKeyAccess(self.get_transport(), mapper)
312
class _TestException(Exception):
313
"""Just an exception for local tests to use."""
316
class TestPackKnitAccess(TestCaseWithMemoryTransport, KnitRecordAccessTestsMixin):
317
"""Tests for the pack based access."""
319
def get_access(self):
320
return self._get_access()[0]
322
def _get_access(self, packname='packfile', index='FOO'):
323
transport = self.get_transport()
324
def write_data(bytes):
325
transport.append_bytes(packname, bytes)
326
writer = pack.ContainerWriter(write_data)
328
access = _DirectPackAccess({})
329
access.set_writer(writer, index, (transport, packname))
330
return access, writer
332
def make_pack_file(self):
333
"""Create a pack file with 2 records."""
334
access, writer = self._get_access(packname='packname', index='foo')
336
memos.extend(access.add_raw_records([('key1', 10)], '1234567890'))
337
memos.extend(access.add_raw_records([('key2', 5)], '12345'))
341
def test_pack_collection_pack_retries(self):
342
"""An explicit pack of a pack collection succeeds even when a
343
concurrent pack happens.
345
builder = self.make_branch_builder('.')
346
builder.start_series()
347
builder.build_snapshot('rev-1', None, [
348
('add', ('', 'root-id', 'directory', None)),
349
('add', ('file', 'file-id', 'file', 'content\nrev 1\n')),
351
builder.build_snapshot('rev-2', ['rev-1'], [
352
('modify', ('file-id', 'content\nrev 2\n')),
354
builder.build_snapshot('rev-3', ['rev-2'], [
355
('modify', ('file-id', 'content\nrev 3\n')),
357
self.addCleanup(builder.finish_series)
358
b = builder.get_branch()
359
self.addCleanup(b.lock_write().unlock)
361
collection = repo._pack_collection
362
# Concurrently repack the repo.
363
reopened_repo = repo.bzrdir.open_repository()
368
def make_vf_for_retrying(self):
369
"""Create 3 packs and a reload function.
371
Originally, 2 pack files will have the data, but one will be missing.
372
And then the third will be used in place of the first two if reload()
375
:return: (versioned_file, reload_counter)
376
versioned_file a KnitVersionedFiles using the packs for access
378
builder = self.make_branch_builder('.', format="1.9")
379
builder.start_series()
380
builder.build_snapshot('rev-1', None, [
381
('add', ('', 'root-id', 'directory', None)),
382
('add', ('file', 'file-id', 'file', 'content\nrev 1\n')),
384
builder.build_snapshot('rev-2', ['rev-1'], [
385
('modify', ('file-id', 'content\nrev 2\n')),
387
builder.build_snapshot('rev-3', ['rev-2'], [
388
('modify', ('file-id', 'content\nrev 3\n')),
390
builder.finish_series()
391
b = builder.get_branch()
393
self.addCleanup(b.unlock)
394
# Pack these three revisions into another pack file, but don't remove
397
collection = repo._pack_collection
398
collection.ensure_loaded()
399
orig_packs = collection.packs
400
packer = pack_repo.Packer(collection, orig_packs, '.testpack')
401
new_pack = packer.pack()
402
# forget about the new pack
406
# Set up a reload() function that switches to using the new pack file
407
new_index = new_pack.revision_index
408
access_tuple = new_pack.access_tuple()
409
reload_counter = [0, 0, 0]
411
reload_counter[0] += 1
412
if reload_counter[1] > 0:
413
# We already reloaded, nothing more to do
414
reload_counter[2] += 1
416
reload_counter[1] += 1
417
vf._index._graph_index._indices[:] = [new_index]
418
vf._access._indices.clear()
419
vf._access._indices[new_index] = access_tuple
421
# Delete one of the pack files so the data will need to be reloaded. We
422
# will delete the file with 'rev-2' in it
423
trans, name = orig_packs[1].access_tuple()
425
# We don't have the index trigger reloading because we want to test
426
# that we reload when the .pack disappears
427
vf._access._reload_func = reload
428
return vf, reload_counter
430
def make_reload_func(self, return_val=True):
433
reload_called[0] += 1
435
return reload_called, reload
437
def make_retry_exception(self):
438
# We raise a real exception so that sys.exc_info() is properly
441
raise _TestException('foobar')
442
except _TestException, e:
443
retry_exc = errors.RetryWithNewPacks(None, reload_occurred=False,
444
exc_info=sys.exc_info())
447
def test_read_from_several_packs(self):
448
access, writer = self._get_access()
450
memos.extend(access.add_raw_records([('key', 10)], '1234567890'))
452
access, writer = self._get_access('pack2', 'FOOBAR')
453
memos.extend(access.add_raw_records([('key', 5)], '12345'))
455
access, writer = self._get_access('pack3', 'BAZ')
456
memos.extend(access.add_raw_records([('key', 5)], 'alpha'))
458
transport = self.get_transport()
459
access = _DirectPackAccess({"FOO":(transport, 'packfile'),
460
"FOOBAR":(transport, 'pack2'),
461
"BAZ":(transport, 'pack3')})
462
self.assertEqual(['1234567890', '12345', 'alpha'],
463
list(access.get_raw_records(memos)))
464
self.assertEqual(['1234567890'],
465
list(access.get_raw_records(memos[0:1])))
466
self.assertEqual(['12345'],
467
list(access.get_raw_records(memos[1:2])))
468
self.assertEqual(['alpha'],
469
list(access.get_raw_records(memos[2:3])))
470
self.assertEqual(['1234567890', 'alpha'],
471
list(access.get_raw_records(memos[0:1] + memos[2:3])))
473
def test_set_writer(self):
474
"""The writer should be settable post construction."""
475
access = _DirectPackAccess({})
476
transport = self.get_transport()
477
packname = 'packfile'
479
def write_data(bytes):
480
transport.append_bytes(packname, bytes)
481
writer = pack.ContainerWriter(write_data)
483
access.set_writer(writer, index, (transport, packname))
484
memos = access.add_raw_records([('key', 10)], '1234567890')
486
self.assertEqual(['1234567890'], list(access.get_raw_records(memos)))
488
def test_missing_index_raises_retry(self):
489
memos = self.make_pack_file()
490
transport = self.get_transport()
491
reload_called, reload_func = self.make_reload_func()
492
# Note that the index key has changed from 'foo' to 'bar'
493
access = _DirectPackAccess({'bar':(transport, 'packname')},
494
reload_func=reload_func)
495
e = self.assertListRaises(errors.RetryWithNewPacks,
496
access.get_raw_records, memos)
497
# Because a key was passed in which does not match our index list, we
498
# assume that the listing was already reloaded
499
self.assertTrue(e.reload_occurred)
500
self.assertIsInstance(e.exc_info, tuple)
501
self.assertIs(e.exc_info[0], KeyError)
502
self.assertIsInstance(e.exc_info[1], KeyError)
504
def test_missing_index_raises_key_error_with_no_reload(self):
505
memos = self.make_pack_file()
506
transport = self.get_transport()
507
# Note that the index key has changed from 'foo' to 'bar'
508
access = _DirectPackAccess({'bar':(transport, 'packname')})
509
e = self.assertListRaises(KeyError, access.get_raw_records, memos)
511
def test_missing_file_raises_retry(self):
512
memos = self.make_pack_file()
513
transport = self.get_transport()
514
reload_called, reload_func = self.make_reload_func()
515
# Note that the 'filename' has been changed to 'different-packname'
516
access = _DirectPackAccess({'foo':(transport, 'different-packname')},
517
reload_func=reload_func)
518
e = self.assertListRaises(errors.RetryWithNewPacks,
519
access.get_raw_records, memos)
520
# The file has gone missing, so we assume we need to reload
521
self.assertFalse(e.reload_occurred)
522
self.assertIsInstance(e.exc_info, tuple)
523
self.assertIs(e.exc_info[0], errors.NoSuchFile)
524
self.assertIsInstance(e.exc_info[1], errors.NoSuchFile)
525
self.assertEqual('different-packname', e.exc_info[1].path)
527
def test_missing_file_raises_no_such_file_with_no_reload(self):
528
memos = self.make_pack_file()
529
transport = self.get_transport()
530
# Note that the 'filename' has been changed to 'different-packname'
531
access = _DirectPackAccess({'foo':(transport, 'different-packname')})
532
e = self.assertListRaises(errors.NoSuchFile,
533
access.get_raw_records, memos)
535
def test_failing_readv_raises_retry(self):
536
memos = self.make_pack_file()
537
transport = self.get_transport()
538
failing_transport = MockReadvFailingTransport(
539
[transport.get_bytes('packname')])
540
reload_called, reload_func = self.make_reload_func()
541
access = _DirectPackAccess({'foo':(failing_transport, 'packname')},
542
reload_func=reload_func)
543
# Asking for a single record will not trigger the Mock failure
544
self.assertEqual(['1234567890'],
545
list(access.get_raw_records(memos[:1])))
546
self.assertEqual(['12345'],
547
list(access.get_raw_records(memos[1:2])))
548
# A multiple offset readv() will fail mid-way through
549
e = self.assertListRaises(errors.RetryWithNewPacks,
550
access.get_raw_records, memos)
551
# The file has gone missing, so we assume we need to reload
552
self.assertFalse(e.reload_occurred)
553
self.assertIsInstance(e.exc_info, tuple)
554
self.assertIs(e.exc_info[0], errors.NoSuchFile)
555
self.assertIsInstance(e.exc_info[1], errors.NoSuchFile)
556
self.assertEqual('packname', e.exc_info[1].path)
558
def test_failing_readv_raises_no_such_file_with_no_reload(self):
559
memos = self.make_pack_file()
560
transport = self.get_transport()
561
failing_transport = MockReadvFailingTransport(
562
[transport.get_bytes('packname')])
563
reload_called, reload_func = self.make_reload_func()
564
access = _DirectPackAccess({'foo':(failing_transport, 'packname')})
565
# Asking for a single record will not trigger the Mock failure
566
self.assertEqual(['1234567890'],
567
list(access.get_raw_records(memos[:1])))
568
self.assertEqual(['12345'],
569
list(access.get_raw_records(memos[1:2])))
570
# A multiple offset readv() will fail mid-way through
571
e = self.assertListRaises(errors.NoSuchFile,
572
access.get_raw_records, memos)
574
def test_reload_or_raise_no_reload(self):
575
access = _DirectPackAccess({}, reload_func=None)
576
retry_exc = self.make_retry_exception()
577
# Without a reload_func, we will just re-raise the original exception
578
self.assertRaises(_TestException, access.reload_or_raise, retry_exc)
580
def test_reload_or_raise_reload_changed(self):
581
reload_called, reload_func = self.make_reload_func(return_val=True)
582
access = _DirectPackAccess({}, reload_func=reload_func)
583
retry_exc = self.make_retry_exception()
584
access.reload_or_raise(retry_exc)
585
self.assertEqual([1], reload_called)
586
retry_exc.reload_occurred=True
587
access.reload_or_raise(retry_exc)
588
self.assertEqual([2], reload_called)
590
def test_reload_or_raise_reload_no_change(self):
591
reload_called, reload_func = self.make_reload_func(return_val=False)
592
access = _DirectPackAccess({}, reload_func=reload_func)
593
retry_exc = self.make_retry_exception()
594
# If reload_occurred is False, then we consider it an error to have
595
# reload_func() return False (no changes).
596
self.assertRaises(_TestException, access.reload_or_raise, retry_exc)
597
self.assertEqual([1], reload_called)
598
retry_exc.reload_occurred=True
599
# If reload_occurred is True, then we assume nothing changed because
600
# it had changed earlier, but didn't change again
601
access.reload_or_raise(retry_exc)
602
self.assertEqual([2], reload_called)
604
def test_annotate_retries(self):
605
vf, reload_counter = self.make_vf_for_retrying()
606
# It is a little bit bogus to annotate the Revision VF, but it works,
607
# as we have ancestry stored there
609
reload_lines = vf.annotate(key)
610
self.assertEqual([1, 1, 0], reload_counter)
611
plain_lines = vf.annotate(key)
612
self.assertEqual([1, 1, 0], reload_counter) # No extra reloading
613
if reload_lines != plain_lines:
614
self.fail('Annotation was not identical with reloading.')
615
# Now delete the packs-in-use, which should trigger another reload, but
616
# this time we just raise an exception because we can't recover
617
for trans, name in vf._access._indices.itervalues():
619
self.assertRaises(errors.NoSuchFile, vf.annotate, key)
620
self.assertEqual([2, 1, 1], reload_counter)
622
def test__get_record_map_retries(self):
623
vf, reload_counter = self.make_vf_for_retrying()
624
keys = [('rev-1',), ('rev-2',), ('rev-3',)]
625
records = vf._get_record_map(keys)
626
self.assertEqual(keys, sorted(records.keys()))
627
self.assertEqual([1, 1, 0], reload_counter)
628
# Now delete the packs-in-use, which should trigger another reload, but
629
# this time we just raise an exception because we can't recover
630
for trans, name in vf._access._indices.itervalues():
632
self.assertRaises(errors.NoSuchFile, vf._get_record_map, keys)
633
self.assertEqual([2, 1, 1], reload_counter)
635
def test_get_record_stream_retries(self):
636
vf, reload_counter = self.make_vf_for_retrying()
637
keys = [('rev-1',), ('rev-2',), ('rev-3',)]
638
record_stream = vf.get_record_stream(keys, 'topological', False)
639
record = record_stream.next()
640
self.assertEqual(('rev-1',), record.key)
641
self.assertEqual([0, 0, 0], reload_counter)
642
record = record_stream.next()
643
self.assertEqual(('rev-2',), record.key)
644
self.assertEqual([1, 1, 0], reload_counter)
645
record = record_stream.next()
646
self.assertEqual(('rev-3',), record.key)
647
self.assertEqual([1, 1, 0], reload_counter)
648
# Now delete all pack files, and see that we raise the right error
649
for trans, name in vf._access._indices.itervalues():
651
self.assertListRaises(errors.NoSuchFile,
652
vf.get_record_stream, keys, 'topological', False)
654
def test_iter_lines_added_or_present_in_keys_retries(self):
655
vf, reload_counter = self.make_vf_for_retrying()
656
keys = [('rev-1',), ('rev-2',), ('rev-3',)]
657
# Unfortunately, iter_lines_added_or_present_in_keys iterates the
658
# result in random order (determined by the iteration order from a
659
# set()), so we don't have any solid way to trigger whether data is
660
# read before or after. However we tried to delete the middle node to
661
# exercise the code well.
662
# What we care about is that all lines are always yielded, but not
665
reload_lines = sorted(vf.iter_lines_added_or_present_in_keys(keys))
666
self.assertEqual([1, 1, 0], reload_counter)
667
# Now do it again, to make sure the result is equivalent
668
plain_lines = sorted(vf.iter_lines_added_or_present_in_keys(keys))
669
self.assertEqual([1, 1, 0], reload_counter) # No extra reloading
670
self.assertEqual(plain_lines, reload_lines)
671
self.assertEqual(21, len(plain_lines))
672
# Now delete all pack files, and see that we raise the right error
673
for trans, name in vf._access._indices.itervalues():
675
self.assertListRaises(errors.NoSuchFile,
676
vf.iter_lines_added_or_present_in_keys, keys)
677
self.assertEqual([2, 1, 1], reload_counter)
679
def test_get_record_stream_yields_disk_sorted_order(self):
680
# if we get 'unordered' pick a semi-optimal order for reading. The
681
# order should be grouped by pack file, and then by position in file
682
repo = self.make_repository('test', format='pack-0.92')
684
self.addCleanup(repo.unlock)
685
repo.start_write_group()
687
vf.add_lines(('f-id', 'rev-5'), [('f-id', 'rev-4')], ['lines\n'])
688
vf.add_lines(('f-id', 'rev-1'), [], ['lines\n'])
689
vf.add_lines(('f-id', 'rev-2'), [('f-id', 'rev-1')], ['lines\n'])
690
repo.commit_write_group()
691
# We inserted them as rev-5, rev-1, rev-2, we should get them back in
693
stream = vf.get_record_stream([('f-id', 'rev-1'), ('f-id', 'rev-5'),
694
('f-id', 'rev-2')], 'unordered', False)
695
keys = [r.key for r in stream]
696
self.assertEqual([('f-id', 'rev-5'), ('f-id', 'rev-1'),
697
('f-id', 'rev-2')], keys)
698
repo.start_write_group()
699
vf.add_lines(('f-id', 'rev-4'), [('f-id', 'rev-3')], ['lines\n'])
700
vf.add_lines(('f-id', 'rev-3'), [('f-id', 'rev-2')], ['lines\n'])
701
vf.add_lines(('f-id', 'rev-6'), [('f-id', 'rev-5')], ['lines\n'])
702
repo.commit_write_group()
703
# Request in random order, to make sure the output order isn't based on
705
request_keys = set(('f-id', 'rev-%d' % i) for i in range(1, 7))
706
stream = vf.get_record_stream(request_keys, 'unordered', False)
707
keys = [r.key for r in stream]
708
# We want to get the keys back in disk order, but it doesn't matter
709
# which pack we read from first. So this can come back in 2 orders
710
alt1 = [('f-id', 'rev-%d' % i) for i in [4, 3, 6, 5, 1, 2]]
711
alt2 = [('f-id', 'rev-%d' % i) for i in [5, 1, 2, 4, 3, 6]]
712
if keys != alt1 and keys != alt2:
713
self.fail('Returned key order did not match either expected order.'
714
' expected %s or %s, not %s'
715
% (alt1, alt2, keys))
718
class LowLevelKnitDataTests(TestCase):
720
def create_gz_content(self, text):
722
gz_file = gzip.GzipFile(mode='wb', fileobj=sio)
725
return sio.getvalue()
727
def make_multiple_records(self):
728
"""Create the content for multiple records."""
729
sha1sum = osutils.sha('foo\nbar\n').hexdigest()
731
gz_txt = self.create_gz_content('version rev-id-1 2 %s\n'
736
record_1 = (0, len(gz_txt), sha1sum)
737
total_txt.append(gz_txt)
738
sha1sum = osutils.sha('baz\n').hexdigest()
739
gz_txt = self.create_gz_content('version rev-id-2 1 %s\n'
743
record_2 = (record_1[1], len(gz_txt), sha1sum)
744
total_txt.append(gz_txt)
745
return total_txt, record_1, record_2
747
def test_valid_knit_data(self):
748
sha1sum = osutils.sha('foo\nbar\n').hexdigest()
749
gz_txt = self.create_gz_content('version rev-id-1 2 %s\n'
754
transport = MockTransport([gz_txt])
755
access = _KnitKeyAccess(transport, ConstantMapper('filename'))
756
knit = KnitVersionedFiles(None, access)
757
records = [(('rev-id-1',), (('rev-id-1',), 0, len(gz_txt)))]
759
contents = list(knit._read_records_iter(records))
760
self.assertEqual([(('rev-id-1',), ['foo\n', 'bar\n'],
761
'4e48e2c9a3d2ca8a708cb0cc545700544efb5021')], contents)
763
raw_contents = list(knit._read_records_iter_raw(records))
764
self.assertEqual([(('rev-id-1',), gz_txt, sha1sum)], raw_contents)
766
def test_multiple_records_valid(self):
767
total_txt, record_1, record_2 = self.make_multiple_records()
768
transport = MockTransport([''.join(total_txt)])
769
access = _KnitKeyAccess(transport, ConstantMapper('filename'))
770
knit = KnitVersionedFiles(None, access)
771
records = [(('rev-id-1',), (('rev-id-1',), record_1[0], record_1[1])),
772
(('rev-id-2',), (('rev-id-2',), record_2[0], record_2[1]))]
774
contents = list(knit._read_records_iter(records))
775
self.assertEqual([(('rev-id-1',), ['foo\n', 'bar\n'], record_1[2]),
776
(('rev-id-2',), ['baz\n'], record_2[2])],
779
raw_contents = list(knit._read_records_iter_raw(records))
780
self.assertEqual([(('rev-id-1',), total_txt[0], record_1[2]),
781
(('rev-id-2',), total_txt[1], record_2[2])],
784
def test_not_enough_lines(self):
785
sha1sum = osutils.sha('foo\n').hexdigest()
786
# record says 2 lines data says 1
787
gz_txt = self.create_gz_content('version rev-id-1 2 %s\n'
791
transport = MockTransport([gz_txt])
792
access = _KnitKeyAccess(transport, ConstantMapper('filename'))
793
knit = KnitVersionedFiles(None, access)
794
records = [(('rev-id-1',), (('rev-id-1',), 0, len(gz_txt)))]
795
self.assertRaises(errors.KnitCorrupt, list,
796
knit._read_records_iter(records))
798
# read_records_iter_raw won't detect that sort of mismatch/corruption
799
raw_contents = list(knit._read_records_iter_raw(records))
800
self.assertEqual([(('rev-id-1',), gz_txt, sha1sum)], raw_contents)
802
def test_too_many_lines(self):
803
sha1sum = osutils.sha('foo\nbar\n').hexdigest()
804
# record says 1 lines data says 2
805
gz_txt = self.create_gz_content('version rev-id-1 1 %s\n'
810
transport = MockTransport([gz_txt])
811
access = _KnitKeyAccess(transport, ConstantMapper('filename'))
812
knit = KnitVersionedFiles(None, access)
813
records = [(('rev-id-1',), (('rev-id-1',), 0, len(gz_txt)))]
814
self.assertRaises(errors.KnitCorrupt, list,
815
knit._read_records_iter(records))
817
# read_records_iter_raw won't detect that sort of mismatch/corruption
818
raw_contents = list(knit._read_records_iter_raw(records))
819
self.assertEqual([(('rev-id-1',), gz_txt, sha1sum)], raw_contents)
821
def test_mismatched_version_id(self):
822
sha1sum = osutils.sha('foo\nbar\n').hexdigest()
823
gz_txt = self.create_gz_content('version rev-id-1 2 %s\n'
828
transport = MockTransport([gz_txt])
829
access = _KnitKeyAccess(transport, ConstantMapper('filename'))
830
knit = KnitVersionedFiles(None, access)
831
# We are asking for rev-id-2, but the data is rev-id-1
832
records = [(('rev-id-2',), (('rev-id-2',), 0, len(gz_txt)))]
833
self.assertRaises(errors.KnitCorrupt, list,
834
knit._read_records_iter(records))
836
# read_records_iter_raw detects mismatches in the header
837
self.assertRaises(errors.KnitCorrupt, list,
838
knit._read_records_iter_raw(records))
840
def test_uncompressed_data(self):
841
sha1sum = osutils.sha('foo\nbar\n').hexdigest()
842
txt = ('version rev-id-1 2 %s\n'
847
transport = MockTransport([txt])
848
access = _KnitKeyAccess(transport, ConstantMapper('filename'))
849
knit = KnitVersionedFiles(None, access)
850
records = [(('rev-id-1',), (('rev-id-1',), 0, len(txt)))]
852
# We don't have valid gzip data ==> corrupt
853
self.assertRaises(errors.KnitCorrupt, list,
854
knit._read_records_iter(records))
856
# read_records_iter_raw will notice the bad data
857
self.assertRaises(errors.KnitCorrupt, list,
858
knit._read_records_iter_raw(records))
860
def test_corrupted_data(self):
861
sha1sum = osutils.sha('foo\nbar\n').hexdigest()
862
gz_txt = self.create_gz_content('version rev-id-1 2 %s\n'
867
# Change 2 bytes in the middle to \xff
868
gz_txt = gz_txt[:10] + '\xff\xff' + gz_txt[12:]
869
transport = MockTransport([gz_txt])
870
access = _KnitKeyAccess(transport, ConstantMapper('filename'))
871
knit = KnitVersionedFiles(None, access)
872
records = [(('rev-id-1',), (('rev-id-1',), 0, len(gz_txt)))]
873
self.assertRaises(errors.KnitCorrupt, list,
874
knit._read_records_iter(records))
875
# read_records_iter_raw will barf on bad gz data
876
self.assertRaises(errors.KnitCorrupt, list,
877
knit._read_records_iter_raw(records))
880
class LowLevelKnitIndexTests(TestCase):
882
def get_knit_index(self, transport, name, mode):
883
mapper = ConstantMapper(name)
884
from bzrlib._knit_load_data_py import _load_data_py
885
self.overrideAttr(knit, '_load_data', _load_data_py)
886
allow_writes = lambda: 'w' in mode
887
return _KndxIndex(transport, mapper, lambda:None, allow_writes, lambda:True)
889
def test_create_file(self):
890
transport = MockTransport()
891
index = self.get_knit_index(transport, "filename", "w")
893
call = transport.calls.pop(0)
894
# call[1][1] is a StringIO - we can't test it by simple equality.
895
self.assertEqual('put_file_non_atomic', call[0])
896
self.assertEqual('filename.kndx', call[1][0])
897
# With no history, _KndxIndex writes a new index:
898
self.assertEqual(_KndxIndex.HEADER,
899
call[1][1].getvalue())
900
self.assertEqual({'create_parent_dir': True}, call[2])
902
def test_read_utf8_version_id(self):
903
unicode_revision_id = u"version-\N{CYRILLIC CAPITAL LETTER A}"
904
utf8_revision_id = unicode_revision_id.encode('utf-8')
905
transport = MockTransport([
907
'%s option 0 1 :' % (utf8_revision_id,)
909
index = self.get_knit_index(transport, "filename", "r")
910
# _KndxIndex is a private class, and deals in utf8 revision_ids, not
911
# Unicode revision_ids.
912
self.assertEqual({(utf8_revision_id,):()},
913
index.get_parent_map(index.keys()))
914
self.assertFalse((unicode_revision_id,) in index.keys())
916
def test_read_utf8_parents(self):
917
unicode_revision_id = u"version-\N{CYRILLIC CAPITAL LETTER A}"
918
utf8_revision_id = unicode_revision_id.encode('utf-8')
919
transport = MockTransport([
921
"version option 0 1 .%s :" % (utf8_revision_id,)
923
index = self.get_knit_index(transport, "filename", "r")
924
self.assertEqual({("version",):((utf8_revision_id,),)},
925
index.get_parent_map(index.keys()))
927
def test_read_ignore_corrupted_lines(self):
928
transport = MockTransport([
931
"corrupted options 0 1 .b .c ",
932
"version options 0 1 :"
934
index = self.get_knit_index(transport, "filename", "r")
935
self.assertEqual(1, len(index.keys()))
936
self.assertEqual(set([("version",)]), index.keys())
938
def test_read_corrupted_header(self):
939
transport = MockTransport(['not a bzr knit index header\n'])
940
index = self.get_knit_index(transport, "filename", "r")
941
self.assertRaises(KnitHeaderError, index.keys)
943
def test_read_duplicate_entries(self):
944
transport = MockTransport([
946
"parent options 0 1 :",
947
"version options1 0 1 0 :",
948
"version options2 1 2 .other :",
949
"version options3 3 4 0 .other :"
951
index = self.get_knit_index(transport, "filename", "r")
952
self.assertEqual(2, len(index.keys()))
953
# check that the index used is the first one written. (Specific
954
# to KnitIndex style indices.
955
self.assertEqual("1", index._dictionary_compress([("version",)]))
956
self.assertEqual((("version",), 3, 4), index.get_position(("version",)))
957
self.assertEqual(["options3"], index.get_options(("version",)))
958
self.assertEqual({("version",):(("parent",), ("other",))},
959
index.get_parent_map([("version",)]))
961
def test_read_compressed_parents(self):
962
transport = MockTransport([
966
"c option 0 1 1 0 :",
968
index = self.get_knit_index(transport, "filename", "r")
969
self.assertEqual({("b",):(("a",),), ("c",):(("b",), ("a",))},
970
index.get_parent_map([("b",), ("c",)]))
972
def test_write_utf8_version_id(self):
973
unicode_revision_id = u"version-\N{CYRILLIC CAPITAL LETTER A}"
974
utf8_revision_id = unicode_revision_id.encode('utf-8')
975
transport = MockTransport([
978
index = self.get_knit_index(transport, "filename", "r")
980
((utf8_revision_id,), ["option"], ((utf8_revision_id,), 0, 1), [])])
981
call = transport.calls.pop(0)
982
# call[1][1] is a StringIO - we can't test it by simple equality.
983
self.assertEqual('put_file_non_atomic', call[0])
984
self.assertEqual('filename.kndx', call[1][0])
985
# With no history, _KndxIndex writes a new index:
986
self.assertEqual(_KndxIndex.HEADER +
987
"\n%s option 0 1 :" % (utf8_revision_id,),
988
call[1][1].getvalue())
989
self.assertEqual({'create_parent_dir': True}, call[2])
991
def test_write_utf8_parents(self):
992
unicode_revision_id = u"version-\N{CYRILLIC CAPITAL LETTER A}"
993
utf8_revision_id = unicode_revision_id.encode('utf-8')
994
transport = MockTransport([
997
index = self.get_knit_index(transport, "filename", "r")
999
(("version",), ["option"], (("version",), 0, 1), [(utf8_revision_id,)])])
1000
call = transport.calls.pop(0)
1001
# call[1][1] is a StringIO - we can't test it by simple equality.
1002
self.assertEqual('put_file_non_atomic', call[0])
1003
self.assertEqual('filename.kndx', call[1][0])
1004
# With no history, _KndxIndex writes a new index:
1005
self.assertEqual(_KndxIndex.HEADER +
1006
"\nversion option 0 1 .%s :" % (utf8_revision_id,),
1007
call[1][1].getvalue())
1008
self.assertEqual({'create_parent_dir': True}, call[2])
1010
def test_keys(self):
1011
transport = MockTransport([
1014
index = self.get_knit_index(transport, "filename", "r")
1016
self.assertEqual(set(), index.keys())
1018
index.add_records([(("a",), ["option"], (("a",), 0, 1), [])])
1019
self.assertEqual(set([("a",)]), index.keys())
1021
index.add_records([(("a",), ["option"], (("a",), 0, 1), [])])
1022
self.assertEqual(set([("a",)]), index.keys())
1024
index.add_records([(("b",), ["option"], (("b",), 0, 1), [])])
1025
self.assertEqual(set([("a",), ("b",)]), index.keys())
1027
def add_a_b(self, index, random_id=None):
1029
if random_id is not None:
1030
kwargs["random_id"] = random_id
1032
(("a",), ["option"], (("a",), 0, 1), [("b",)]),
1033
(("a",), ["opt"], (("a",), 1, 2), [("c",)]),
1034
(("b",), ["option"], (("b",), 2, 3), [("a",)])
1037
def assertIndexIsAB(self, index):
1042
index.get_parent_map(index.keys()))
1043
self.assertEqual((("a",), 1, 2), index.get_position(("a",)))
1044
self.assertEqual((("b",), 2, 3), index.get_position(("b",)))
1045
self.assertEqual(["opt"], index.get_options(("a",)))
1047
def test_add_versions(self):
1048
transport = MockTransport([
1051
index = self.get_knit_index(transport, "filename", "r")
1054
call = transport.calls.pop(0)
1055
# call[1][1] is a StringIO - we can't test it by simple equality.
1056
self.assertEqual('put_file_non_atomic', call[0])
1057
self.assertEqual('filename.kndx', call[1][0])
1058
# With no history, _KndxIndex writes a new index:
1061
"\na option 0 1 .b :"
1063
"\nb option 2 3 0 :",
1064
call[1][1].getvalue())
1065
self.assertEqual({'create_parent_dir': True}, call[2])
1066
self.assertIndexIsAB(index)
1068
def test_add_versions_random_id_is_accepted(self):
1069
transport = MockTransport([
1072
index = self.get_knit_index(transport, "filename", "r")
1073
self.add_a_b(index, random_id=True)
1075
def test_delay_create_and_add_versions(self):
1076
transport = MockTransport()
1078
index = self.get_knit_index(transport, "filename", "w")
1080
self.assertEqual([], transport.calls)
1083
#[ {"dir_mode": 0777, "create_parent_dir": True, "mode": "wb"},
1085
# Two calls: one during which we load the existing index (and when its
1086
# missing create it), then a second where we write the contents out.
1087
self.assertEqual(2, len(transport.calls))
1088
call = transport.calls.pop(0)
1089
self.assertEqual('put_file_non_atomic', call[0])
1090
self.assertEqual('filename.kndx', call[1][0])
1091
# With no history, _KndxIndex writes a new index:
1092
self.assertEqual(_KndxIndex.HEADER, call[1][1].getvalue())
1093
self.assertEqual({'create_parent_dir': True}, call[2])
1094
call = transport.calls.pop(0)
1095
# call[1][1] is a StringIO - we can't test it by simple equality.
1096
self.assertEqual('put_file_non_atomic', call[0])
1097
self.assertEqual('filename.kndx', call[1][0])
1098
# With no history, _KndxIndex writes a new index:
1101
"\na option 0 1 .b :"
1103
"\nb option 2 3 0 :",
1104
call[1][1].getvalue())
1105
self.assertEqual({'create_parent_dir': True}, call[2])
1107
def assertTotalBuildSize(self, size, keys, positions):
1108
self.assertEqual(size,
1109
knit._get_total_build_size(None, keys, positions))
1111
def test__get_total_build_size(self):
1113
('a',): (('fulltext', False), (('a',), 0, 100), None),
1114
('b',): (('line-delta', False), (('b',), 100, 21), ('a',)),
1115
('c',): (('line-delta', False), (('c',), 121, 35), ('b',)),
1116
('d',): (('line-delta', False), (('d',), 156, 12), ('b',)),
1118
self.assertTotalBuildSize(100, [('a',)], positions)
1119
self.assertTotalBuildSize(121, [('b',)], positions)
1120
# c needs both a & b
1121
self.assertTotalBuildSize(156, [('c',)], positions)
1122
# we shouldn't count 'b' twice
1123
self.assertTotalBuildSize(156, [('b',), ('c',)], positions)
1124
self.assertTotalBuildSize(133, [('d',)], positions)
1125
self.assertTotalBuildSize(168, [('c',), ('d',)], positions)
1127
def test_get_position(self):
1128
transport = MockTransport([
1133
index = self.get_knit_index(transport, "filename", "r")
1135
self.assertEqual((("a",), 0, 1), index.get_position(("a",)))
1136
self.assertEqual((("b",), 1, 2), index.get_position(("b",)))
1138
def test_get_method(self):
1139
transport = MockTransport([
1141
"a fulltext,unknown 0 1 :",
1142
"b unknown,line-delta 1 2 :",
1145
index = self.get_knit_index(transport, "filename", "r")
1147
self.assertEqual("fulltext", index.get_method("a"))
1148
self.assertEqual("line-delta", index.get_method("b"))
1149
self.assertRaises(errors.KnitIndexUnknownMethod, index.get_method, "c")
1151
def test_get_options(self):
1152
transport = MockTransport([
1157
index = self.get_knit_index(transport, "filename", "r")
1159
self.assertEqual(["opt1"], index.get_options("a"))
1160
self.assertEqual(["opt2", "opt3"], index.get_options("b"))
1162
def test_get_parent_map(self):
1163
transport = MockTransport([
1166
"b option 1 2 0 .c :",
1167
"c option 1 2 1 0 .e :"
1169
index = self.get_knit_index(transport, "filename", "r")
1173
("b",):(("a",), ("c",)),
1174
("c",):(("b",), ("a",), ("e",)),
1175
}, index.get_parent_map(index.keys()))
1177
def test_impossible_parent(self):
1178
"""Test we get KnitCorrupt if the parent couldn't possibly exist."""
1179
transport = MockTransport([
1182
"b option 0 1 4 :" # We don't have a 4th record
1184
index = self.get_knit_index(transport, 'filename', 'r')
1186
self.assertRaises(errors.KnitCorrupt, index.keys)
1187
except TypeError, e:
1188
if (str(e) == ('exceptions must be strings, classes, or instances,'
1189
' not exceptions.IndexError')
1190
and sys.version_info[0:2] >= (2,5)):
1191
self.knownFailure('Pyrex <0.9.5 fails with TypeError when'
1192
' raising new style exceptions with python'
1197
def test_corrupted_parent(self):
1198
transport = MockTransport([
1202
"c option 0 1 1v :", # Can't have a parent of '1v'
1204
index = self.get_knit_index(transport, 'filename', 'r')
1206
self.assertRaises(errors.KnitCorrupt, index.keys)
1207
except TypeError, e:
1208
if (str(e) == ('exceptions must be strings, classes, or instances,'
1209
' not exceptions.ValueError')
1210
and sys.version_info[0:2] >= (2,5)):
1211
self.knownFailure('Pyrex <0.9.5 fails with TypeError when'
1212
' raising new style exceptions with python'
1217
def test_corrupted_parent_in_list(self):
1218
transport = MockTransport([
1222
"c option 0 1 1 v :", # Can't have a parent of 'v'
1224
index = self.get_knit_index(transport, 'filename', 'r')
1226
self.assertRaises(errors.KnitCorrupt, index.keys)
1227
except TypeError, e:
1228
if (str(e) == ('exceptions must be strings, classes, or instances,'
1229
' not exceptions.ValueError')
1230
and sys.version_info[0:2] >= (2,5)):
1231
self.knownFailure('Pyrex <0.9.5 fails with TypeError when'
1232
' raising new style exceptions with python'
1237
def test_invalid_position(self):
1238
transport = MockTransport([
1242
index = self.get_knit_index(transport, 'filename', 'r')
1244
self.assertRaises(errors.KnitCorrupt, index.keys)
1245
except TypeError, e:
1246
if (str(e) == ('exceptions must be strings, classes, or instances,'
1247
' not exceptions.ValueError')
1248
and sys.version_info[0:2] >= (2,5)):
1249
self.knownFailure('Pyrex <0.9.5 fails with TypeError when'
1250
' raising new style exceptions with python'
1255
def test_invalid_size(self):
1256
transport = MockTransport([
1260
index = self.get_knit_index(transport, 'filename', 'r')
1262
self.assertRaises(errors.KnitCorrupt, index.keys)
1263
except TypeError, e:
1264
if (str(e) == ('exceptions must be strings, classes, or instances,'
1265
' not exceptions.ValueError')
1266
and sys.version_info[0:2] >= (2,5)):
1267
self.knownFailure('Pyrex <0.9.5 fails with TypeError when'
1268
' raising new style exceptions with python'
1273
def test_scan_unvalidated_index_not_implemented(self):
1274
transport = MockTransport()
1275
index = self.get_knit_index(transport, 'filename', 'r')
1277
NotImplementedError, index.scan_unvalidated_index,
1278
'dummy graph_index')
1280
NotImplementedError, index.get_missing_compression_parents)
1282
def test_short_line(self):
1283
transport = MockTransport([
1286
"b option 10 10 0", # This line isn't terminated, ignored
1288
index = self.get_knit_index(transport, "filename", "r")
1289
self.assertEqual(set([('a',)]), index.keys())
1291
def test_skip_incomplete_record(self):
1292
# A line with bogus data should just be skipped
1293
transport = MockTransport([
1296
"b option 10 10 0", # This line isn't terminated, ignored
1297
"c option 20 10 0 :", # Properly terminated, and starts with '\n'
1299
index = self.get_knit_index(transport, "filename", "r")
1300
self.assertEqual(set([('a',), ('c',)]), index.keys())
1302
def test_trailing_characters(self):
1303
# A line with bogus data should just be skipped
1304
transport = MockTransport([
1307
"b option 10 10 0 :a", # This line has extra trailing characters
1308
"c option 20 10 0 :", # Properly terminated, and starts with '\n'
1310
index = self.get_knit_index(transport, "filename", "r")
1311
self.assertEqual(set([('a',), ('c',)]), index.keys())
1314
class LowLevelKnitIndexTests_c(LowLevelKnitIndexTests):
1316
_test_needs_features = [compiled_knit_feature]
1318
def get_knit_index(self, transport, name, mode):
1319
mapper = ConstantMapper(name)
1320
from bzrlib._knit_load_data_pyx import _load_data_c
1321
self.overrideAttr(knit, '_load_data', _load_data_c)
1322
allow_writes = lambda: mode == 'w'
1323
return _KndxIndex(transport, mapper, lambda:None,
1324
allow_writes, lambda:True)
1327
class Test_KnitAnnotator(TestCaseWithMemoryTransport):
1329
def make_annotator(self):
1330
factory = knit.make_pack_factory(True, True, 1)
1331
vf = factory(self.get_transport())
1332
return knit._KnitAnnotator(vf)
1334
def test__expand_fulltext(self):
1335
ann = self.make_annotator()
1336
rev_key = ('rev-id',)
1337
ann._num_compression_children[rev_key] = 1
1338
res = ann._expand_record(rev_key, (('parent-id',),), None,
1339
['line1\n', 'line2\n'], ('fulltext', True))
1340
# The content object and text lines should be cached appropriately
1341
self.assertEqual(['line1\n', 'line2'], res)
1342
content_obj = ann._content_objects[rev_key]
1343
self.assertEqual(['line1\n', 'line2\n'], content_obj._lines)
1344
self.assertEqual(res, content_obj.text())
1345
self.assertEqual(res, ann._text_cache[rev_key])
1347
def test__expand_delta_comp_parent_not_available(self):
1348
# Parent isn't available yet, so we return nothing, but queue up this
1349
# node for later processing
1350
ann = self.make_annotator()
1351
rev_key = ('rev-id',)
1352
parent_key = ('parent-id',)
1353
record = ['0,1,1\n', 'new-line\n']
1354
details = ('line-delta', False)
1355
res = ann._expand_record(rev_key, (parent_key,), parent_key,
1357
self.assertEqual(None, res)
1358
self.assertTrue(parent_key in ann._pending_deltas)
1359
pending = ann._pending_deltas[parent_key]
1360
self.assertEqual(1, len(pending))
1361
self.assertEqual((rev_key, (parent_key,), record, details), pending[0])
1363
def test__expand_record_tracks_num_children(self):
1364
ann = self.make_annotator()
1365
rev_key = ('rev-id',)
1366
rev2_key = ('rev2-id',)
1367
parent_key = ('parent-id',)
1368
record = ['0,1,1\n', 'new-line\n']
1369
details = ('line-delta', False)
1370
ann._num_compression_children[parent_key] = 2
1371
ann._expand_record(parent_key, (), None, ['line1\n', 'line2\n'],
1372
('fulltext', False))
1373
res = ann._expand_record(rev_key, (parent_key,), parent_key,
1375
self.assertEqual({parent_key: 1}, ann._num_compression_children)
1376
# Expanding the second child should remove the content object, and the
1377
# num_compression_children entry
1378
res = ann._expand_record(rev2_key, (parent_key,), parent_key,
1380
self.assertFalse(parent_key in ann._content_objects)
1381
self.assertEqual({}, ann._num_compression_children)
1382
# We should not cache the content_objects for rev2 and rev, because
1383
# they do not have compression children of their own.
1384
self.assertEqual({}, ann._content_objects)
1386
def test__expand_delta_records_blocks(self):
1387
ann = self.make_annotator()
1388
rev_key = ('rev-id',)
1389
parent_key = ('parent-id',)
1390
record = ['0,1,1\n', 'new-line\n']
1391
details = ('line-delta', True)
1392
ann._num_compression_children[parent_key] = 2
1393
ann._expand_record(parent_key, (), None,
1394
['line1\n', 'line2\n', 'line3\n'],
1395
('fulltext', False))
1396
ann._expand_record(rev_key, (parent_key,), parent_key, record, details)
1397
self.assertEqual({(rev_key, parent_key): [(1, 1, 1), (3, 3, 0)]},
1398
ann._matching_blocks)
1399
rev2_key = ('rev2-id',)
1400
record = ['0,1,1\n', 'new-line\n']
1401
details = ('line-delta', False)
1402
ann._expand_record(rev2_key, (parent_key,), parent_key, record, details)
1403
self.assertEqual([(1, 1, 2), (3, 3, 0)],
1404
ann._matching_blocks[(rev2_key, parent_key)])
1406
def test__get_parent_ann_uses_matching_blocks(self):
1407
ann = self.make_annotator()
1408
rev_key = ('rev-id',)
1409
parent_key = ('parent-id',)
1410
parent_ann = [(parent_key,)]*3
1411
block_key = (rev_key, parent_key)
1412
ann._annotations_cache[parent_key] = parent_ann
1413
ann._matching_blocks[block_key] = [(0, 1, 1), (3, 3, 0)]
1414
# We should not try to access any parent_lines content, because we know
1415
# we already have the matching blocks
1416
par_ann, blocks = ann._get_parent_annotations_and_matches(rev_key,
1417
['1\n', '2\n', '3\n'], parent_key)
1418
self.assertEqual(parent_ann, par_ann)
1419
self.assertEqual([(0, 1, 1), (3, 3, 0)], blocks)
1420
self.assertEqual({}, ann._matching_blocks)
1422
def test__process_pending(self):
1423
ann = self.make_annotator()
1424
rev_key = ('rev-id',)
1427
record = ['0,1,1\n', 'new-line\n']
1428
details = ('line-delta', False)
1429
p1_record = ['line1\n', 'line2\n']
1430
ann._num_compression_children[p1_key] = 1
1431
res = ann._expand_record(rev_key, (p1_key,p2_key), p1_key,
1433
self.assertEqual(None, res)
1434
# self.assertTrue(p1_key in ann._pending_deltas)
1435
self.assertEqual({}, ann._pending_annotation)
1436
# Now insert p1, and we should be able to expand the delta
1437
res = ann._expand_record(p1_key, (), None, p1_record,
1438
('fulltext', False))
1439
self.assertEqual(p1_record, res)
1440
ann._annotations_cache[p1_key] = [(p1_key,)]*2
1441
res = ann._process_pending(p1_key)
1442
self.assertEqual([], res)
1443
self.assertFalse(p1_key in ann._pending_deltas)
1444
self.assertTrue(p2_key in ann._pending_annotation)
1445
self.assertEqual({p2_key: [(rev_key, (p1_key, p2_key))]},
1446
ann._pending_annotation)
1447
# Now fill in parent 2, and pending annotation should be satisfied
1448
res = ann._expand_record(p2_key, (), None, [], ('fulltext', False))
1449
ann._annotations_cache[p2_key] = []
1450
res = ann._process_pending(p2_key)
1451
self.assertEqual([rev_key], res)
1452
self.assertEqual({}, ann._pending_annotation)
1453
self.assertEqual({}, ann._pending_deltas)
1455
def test_record_delta_removes_basis(self):
1456
ann = self.make_annotator()
1457
ann._expand_record(('parent-id',), (), None,
1458
['line1\n', 'line2\n'], ('fulltext', False))
1459
ann._num_compression_children['parent-id'] = 2
1461
def test_annotate_special_text(self):
1462
ann = self.make_annotator()
1464
rev1_key = ('rev-1',)
1465
rev2_key = ('rev-2',)
1466
rev3_key = ('rev-3',)
1467
spec_key = ('special:',)
1468
vf.add_lines(rev1_key, [], ['initial content\n'])
1469
vf.add_lines(rev2_key, [rev1_key], ['initial content\n',
1472
vf.add_lines(rev3_key, [rev1_key], ['initial content\n',
1475
spec_text = ('initial content\n'
1479
ann.add_special_text(spec_key, [rev2_key, rev3_key], spec_text)
1480
anns, lines = ann.annotate(spec_key)
1481
self.assertEqual([(rev1_key,),
1482
(rev2_key, rev3_key),
1486
self.assertEqualDiff(spec_text, ''.join(lines))
1489
class KnitTests(TestCaseWithTransport):
1490
"""Class containing knit test helper routines."""
1492
def make_test_knit(self, annotate=False, name='test'):
1493
mapper = ConstantMapper(name)
1494
return make_file_factory(annotate, mapper)(self.get_transport())
1497
class TestBadShaError(KnitTests):
1498
"""Tests for handling of sha errors."""
1500
def test_sha_exception_has_text(self):
1501
# having the failed text included in the error allows for recovery.
1502
source = self.make_test_knit()
1503
target = self.make_test_knit(name="target")
1504
if not source._max_delta_chain:
1505
raise TestNotApplicable(
1506
"cannot get delta-caused sha failures without deltas.")
1509
broken = ('broken',)
1510
source.add_lines(basis, (), ['foo\n'])
1511
source.add_lines(broken, (basis,), ['foo\n', 'bar\n'])
1512
# Seed target with a bad basis text
1513
target.add_lines(basis, (), ['gam\n'])
1514
target.insert_record_stream(
1515
source.get_record_stream([broken], 'unordered', False))
1516
err = self.assertRaises(errors.KnitCorrupt,
1517
target.get_record_stream([broken], 'unordered', True
1518
).next().get_bytes_as, 'chunked')
1519
self.assertEqual(['gam\n', 'bar\n'], err.content)
1520
# Test for formatting with live data
1521
self.assertStartsWith(str(err), "Knit ")
1524
class TestKnitIndex(KnitTests):
1526
def test_add_versions_dictionary_compresses(self):
1527
"""Adding versions to the index should update the lookup dict"""
1528
knit = self.make_test_knit()
1530
idx.add_records([(('a-1',), ['fulltext'], (('a-1',), 0, 0), [])])
1531
self.check_file_contents('test.kndx',
1532
'# bzr knit index 8\n'
1534
'a-1 fulltext 0 0 :'
1537
(('a-2',), ['fulltext'], (('a-2',), 0, 0), [('a-1',)]),
1538
(('a-3',), ['fulltext'], (('a-3',), 0, 0), [('a-2',)]),
1540
self.check_file_contents('test.kndx',
1541
'# bzr knit index 8\n'
1543
'a-1 fulltext 0 0 :\n'
1544
'a-2 fulltext 0 0 0 :\n'
1545
'a-3 fulltext 0 0 1 :'
1547
self.assertEqual(set([('a-3',), ('a-1',), ('a-2',)]), idx.keys())
1549
('a-1',): ((('a-1',), 0, 0), None, (), ('fulltext', False)),
1550
('a-2',): ((('a-2',), 0, 0), None, (('a-1',),), ('fulltext', False)),
1551
('a-3',): ((('a-3',), 0, 0), None, (('a-2',),), ('fulltext', False)),
1552
}, idx.get_build_details(idx.keys()))
1553
self.assertEqual({('a-1',):(),
1554
('a-2',):(('a-1',),),
1555
('a-3',):(('a-2',),),},
1556
idx.get_parent_map(idx.keys()))
1558
def test_add_versions_fails_clean(self):
1559
"""If add_versions fails in the middle, it restores a pristine state.
1561
Any modifications that are made to the index are reset if all versions
1564
# This cheats a little bit by passing in a generator which will
1565
# raise an exception before the processing finishes
1566
# Other possibilities would be to have an version with the wrong number
1567
# of entries, or to make the backing transport unable to write any
1570
knit = self.make_test_knit()
1572
idx.add_records([(('a-1',), ['fulltext'], (('a-1',), 0, 0), [])])
1574
class StopEarly(Exception):
1577
def generate_failure():
1578
"""Add some entries and then raise an exception"""
1579
yield (('a-2',), ['fulltext'], (None, 0, 0), ('a-1',))
1580
yield (('a-3',), ['fulltext'], (None, 0, 0), ('a-2',))
1583
# Assert the pre-condition
1585
self.assertEqual(set([('a-1',)]), set(idx.keys()))
1587
{('a-1',): ((('a-1',), 0, 0), None, (), ('fulltext', False))},
1588
idx.get_build_details([('a-1',)]))
1589
self.assertEqual({('a-1',):()}, idx.get_parent_map(idx.keys()))
1592
self.assertRaises(StopEarly, idx.add_records, generate_failure())
1593
# And it shouldn't be modified
1596
def test_knit_index_ignores_empty_files(self):
1597
# There was a race condition in older bzr, where a ^C at the right time
1598
# could leave an empty .kndx file, which bzr would later claim was a
1599
# corrupted file since the header was not present. In reality, the file
1600
# just wasn't created, so it should be ignored.
1601
t = transport.get_transport('.')
1602
t.put_bytes('test.kndx', '')
1604
knit = self.make_test_knit()
1606
def test_knit_index_checks_header(self):
1607
t = transport.get_transport('.')
1608
t.put_bytes('test.kndx', '# not really a knit header\n\n')
1609
k = self.make_test_knit()
1610
self.assertRaises(KnitHeaderError, k.keys)
1613
class TestGraphIndexKnit(KnitTests):
1614
"""Tests for knits using a GraphIndex rather than a KnitIndex."""
1616
def make_g_index(self, name, ref_lists=0, nodes=[]):
1617
builder = GraphIndexBuilder(ref_lists)
1618
for node, references, value in nodes:
1619
builder.add_node(node, references, value)
1620
stream = builder.finish()
1621
trans = self.get_transport()
1622
size = trans.put_file(name, stream)
1623
return GraphIndex(trans, name, size)
1625
def two_graph_index(self, deltas=False, catch_adds=False):
1626
"""Build a two-graph index.
1628
:param deltas: If true, use underlying indices with two node-ref
1629
lists and 'parent' set to a delta-compressed against tail.
1631
# build a complex graph across several indices.
1633
# delta compression inn the index
1634
index1 = self.make_g_index('1', 2, [
1635
(('tip', ), 'N0 100', ([('parent', )], [], )),
1636
(('tail', ), '', ([], []))])
1637
index2 = self.make_g_index('2', 2, [
1638
(('parent', ), ' 100 78', ([('tail', ), ('ghost', )], [('tail', )])),
1639
(('separate', ), '', ([], []))])
1641
# just blob location and graph in the index.
1642
index1 = self.make_g_index('1', 1, [
1643
(('tip', ), 'N0 100', ([('parent', )], )),
1644
(('tail', ), '', ([], ))])
1645
index2 = self.make_g_index('2', 1, [
1646
(('parent', ), ' 100 78', ([('tail', ), ('ghost', )], )),
1647
(('separate', ), '', ([], ))])
1648
combined_index = CombinedGraphIndex([index1, index2])
1650
self.combined_index = combined_index
1651
self.caught_entries = []
1652
add_callback = self.catch_add
1655
return _KnitGraphIndex(combined_index, lambda:True, deltas=deltas,
1656
add_callback=add_callback)
1658
def test_keys(self):
1659
index = self.two_graph_index()
1660
self.assertEqual(set([('tail',), ('tip',), ('parent',), ('separate',)]),
1663
def test_get_position(self):
1664
index = self.two_graph_index()
1665
self.assertEqual((index._graph_index._indices[0], 0, 100), index.get_position(('tip',)))
1666
self.assertEqual((index._graph_index._indices[1], 100, 78), index.get_position(('parent',)))
1668
def test_get_method_deltas(self):
1669
index = self.two_graph_index(deltas=True)
1670
self.assertEqual('fulltext', index.get_method(('tip',)))
1671
self.assertEqual('line-delta', index.get_method(('parent',)))
1673
def test_get_method_no_deltas(self):
1674
# check that the parent-history lookup is ignored with deltas=False.
1675
index = self.two_graph_index(deltas=False)
1676
self.assertEqual('fulltext', index.get_method(('tip',)))
1677
self.assertEqual('fulltext', index.get_method(('parent',)))
1679
def test_get_options_deltas(self):
1680
index = self.two_graph_index(deltas=True)
1681
self.assertEqual(['fulltext', 'no-eol'], index.get_options(('tip',)))
1682
self.assertEqual(['line-delta'], index.get_options(('parent',)))
1684
def test_get_options_no_deltas(self):
1685
# check that the parent-history lookup is ignored with deltas=False.
1686
index = self.two_graph_index(deltas=False)
1687
self.assertEqual(['fulltext', 'no-eol'], index.get_options(('tip',)))
1688
self.assertEqual(['fulltext'], index.get_options(('parent',)))
1690
def test_get_parent_map(self):
1691
index = self.two_graph_index()
1692
self.assertEqual({('parent',):(('tail',), ('ghost',))},
1693
index.get_parent_map([('parent',), ('ghost',)]))
1695
def catch_add(self, entries):
1696
self.caught_entries.append(entries)
1698
def test_add_no_callback_errors(self):
1699
index = self.two_graph_index()
1700
self.assertRaises(errors.ReadOnlyError, index.add_records,
1701
[(('new',), 'fulltext,no-eol', (None, 50, 60), ['separate'])])
1703
def test_add_version_smoke(self):
1704
index = self.two_graph_index(catch_adds=True)
1705
index.add_records([(('new',), 'fulltext,no-eol', (None, 50, 60),
1707
self.assertEqual([[(('new', ), 'N50 60', ((('separate',),),))]],
1708
self.caught_entries)
1710
def test_add_version_delta_not_delta_index(self):
1711
index = self.two_graph_index(catch_adds=True)
1712
self.assertRaises(errors.KnitCorrupt, index.add_records,
1713
[(('new',), 'no-eol,line-delta', (None, 0, 100), [('parent',)])])
1714
self.assertEqual([], self.caught_entries)
1716
def test_add_version_same_dup(self):
1717
index = self.two_graph_index(catch_adds=True)
1718
# options can be spelt two different ways
1719
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 100), [('parent',)])])
1720
index.add_records([(('tip',), 'no-eol,fulltext', (None, 0, 100), [('parent',)])])
1721
# position/length are ignored (because each pack could have fulltext or
1722
# delta, and be at a different position.
1723
index.add_records([(('tip',), 'fulltext,no-eol', (None, 50, 100),
1725
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 1000),
1727
# but neither should have added data:
1728
self.assertEqual([[], [], [], []], self.caught_entries)
1730
def test_add_version_different_dup(self):
1731
index = self.two_graph_index(deltas=True, catch_adds=True)
1733
self.assertRaises(errors.KnitCorrupt, index.add_records,
1734
[(('tip',), 'line-delta', (None, 0, 100), [('parent',)])])
1735
self.assertRaises(errors.KnitCorrupt, index.add_records,
1736
[(('tip',), 'fulltext', (None, 0, 100), [('parent',)])])
1738
self.assertRaises(errors.KnitCorrupt, index.add_records,
1739
[(('tip',), 'fulltext,no-eol', (None, 0, 100), [])])
1740
self.assertEqual([], self.caught_entries)
1742
def test_add_versions_nodeltas(self):
1743
index = self.two_graph_index(catch_adds=True)
1745
(('new',), 'fulltext,no-eol', (None, 50, 60), [('separate',)]),
1746
(('new2',), 'fulltext', (None, 0, 6), [('new',)]),
1748
self.assertEqual([(('new', ), 'N50 60', ((('separate',),),)),
1749
(('new2', ), ' 0 6', ((('new',),),))],
1750
sorted(self.caught_entries[0]))
1751
self.assertEqual(1, len(self.caught_entries))
1753
def test_add_versions_deltas(self):
1754
index = self.two_graph_index(deltas=True, catch_adds=True)
1756
(('new',), 'fulltext,no-eol', (None, 50, 60), [('separate',)]),
1757
(('new2',), 'line-delta', (None, 0, 6), [('new',)]),
1759
self.assertEqual([(('new', ), 'N50 60', ((('separate',),), ())),
1760
(('new2', ), ' 0 6', ((('new',),), (('new',),), ))],
1761
sorted(self.caught_entries[0]))
1762
self.assertEqual(1, len(self.caught_entries))
1764
def test_add_versions_delta_not_delta_index(self):
1765
index = self.two_graph_index(catch_adds=True)
1766
self.assertRaises(errors.KnitCorrupt, index.add_records,
1767
[(('new',), 'no-eol,line-delta', (None, 0, 100), [('parent',)])])
1768
self.assertEqual([], self.caught_entries)
1770
def test_add_versions_random_id_accepted(self):
1771
index = self.two_graph_index(catch_adds=True)
1772
index.add_records([], random_id=True)
1774
def test_add_versions_same_dup(self):
1775
index = self.two_graph_index(catch_adds=True)
1776
# options can be spelt two different ways
1777
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 100),
1779
index.add_records([(('tip',), 'no-eol,fulltext', (None, 0, 100),
1781
# position/length are ignored (because each pack could have fulltext or
1782
# delta, and be at a different position.
1783
index.add_records([(('tip',), 'fulltext,no-eol', (None, 50, 100),
1785
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 1000),
1787
# but neither should have added data.
1788
self.assertEqual([[], [], [], []], self.caught_entries)
1790
def test_add_versions_different_dup(self):
1791
index = self.two_graph_index(deltas=True, catch_adds=True)
1793
self.assertRaises(errors.KnitCorrupt, index.add_records,
1794
[(('tip',), 'line-delta', (None, 0, 100), [('parent',)])])
1795
self.assertRaises(errors.KnitCorrupt, index.add_records,
1796
[(('tip',), 'fulltext', (None, 0, 100), [('parent',)])])
1798
self.assertRaises(errors.KnitCorrupt, index.add_records,
1799
[(('tip',), 'fulltext,no-eol', (None, 0, 100), [])])
1800
# change options in the second record
1801
self.assertRaises(errors.KnitCorrupt, index.add_records,
1802
[(('tip',), 'fulltext,no-eol', (None, 0, 100), [('parent',)]),
1803
(('tip',), 'line-delta', (None, 0, 100), [('parent',)])])
1804
self.assertEqual([], self.caught_entries)
1806
def make_g_index_missing_compression_parent(self):
1807
graph_index = self.make_g_index('missing_comp', 2,
1808
[(('tip', ), ' 100 78',
1809
([('missing-parent', ), ('ghost', )], [('missing-parent', )]))])
1812
def make_g_index_missing_parent(self):
1813
graph_index = self.make_g_index('missing_parent', 2,
1814
[(('parent', ), ' 100 78', ([], [])),
1815
(('tip', ), ' 100 78',
1816
([('parent', ), ('missing-parent', )], [('parent', )])),
1820
def make_g_index_no_external_refs(self):
1821
graph_index = self.make_g_index('no_external_refs', 2,
1822
[(('rev', ), ' 100 78',
1823
([('parent', ), ('ghost', )], []))])
1826
def test_add_good_unvalidated_index(self):
1827
unvalidated = self.make_g_index_no_external_refs()
1828
combined = CombinedGraphIndex([unvalidated])
1829
index = _KnitGraphIndex(combined, lambda: True, deltas=True)
1830
index.scan_unvalidated_index(unvalidated)
1831
self.assertEqual(frozenset(), index.get_missing_compression_parents())
1833
def test_add_missing_compression_parent_unvalidated_index(self):
1834
unvalidated = self.make_g_index_missing_compression_parent()
1835
combined = CombinedGraphIndex([unvalidated])
1836
index = _KnitGraphIndex(combined, lambda: True, deltas=True)
1837
index.scan_unvalidated_index(unvalidated)
1838
# This also checks that its only the compression parent that is
1839
# examined, otherwise 'ghost' would also be reported as a missing
1842
frozenset([('missing-parent',)]),
1843
index.get_missing_compression_parents())
1845
def test_add_missing_noncompression_parent_unvalidated_index(self):
1846
unvalidated = self.make_g_index_missing_parent()
1847
combined = CombinedGraphIndex([unvalidated])
1848
index = _KnitGraphIndex(combined, lambda: True, deltas=True,
1849
track_external_parent_refs=True)
1850
index.scan_unvalidated_index(unvalidated)
1852
frozenset([('missing-parent',)]), index.get_missing_parents())
1854
def test_track_external_parent_refs(self):
1855
g_index = self.make_g_index('empty', 2, [])
1856
combined = CombinedGraphIndex([g_index])
1857
index = _KnitGraphIndex(combined, lambda: True, deltas=True,
1858
add_callback=self.catch_add, track_external_parent_refs=True)
1859
self.caught_entries = []
1861
(('new-key',), 'fulltext,no-eol', (None, 50, 60),
1862
[('parent-1',), ('parent-2',)])])
1864
frozenset([('parent-1',), ('parent-2',)]),
1865
index.get_missing_parents())
1867
def test_add_unvalidated_index_with_present_external_references(self):
1868
index = self.two_graph_index(deltas=True)
1869
# Ugly hack to get at one of the underlying GraphIndex objects that
1870
# two_graph_index built.
1871
unvalidated = index._graph_index._indices[1]
1872
# 'parent' is an external ref of _indices[1] (unvalidated), but is
1873
# present in _indices[0].
1874
index.scan_unvalidated_index(unvalidated)
1875
self.assertEqual(frozenset(), index.get_missing_compression_parents())
1877
def make_new_missing_parent_g_index(self, name):
1878
missing_parent = name + '-missing-parent'
1879
graph_index = self.make_g_index(name, 2,
1880
[((name + 'tip', ), ' 100 78',
1881
([(missing_parent, ), ('ghost', )], [(missing_parent, )]))])
1884
def test_add_mulitiple_unvalidated_indices_with_missing_parents(self):
1885
g_index_1 = self.make_new_missing_parent_g_index('one')
1886
g_index_2 = self.make_new_missing_parent_g_index('two')
1887
combined = CombinedGraphIndex([g_index_1, g_index_2])
1888
index = _KnitGraphIndex(combined, lambda: True, deltas=True)
1889
index.scan_unvalidated_index(g_index_1)
1890
index.scan_unvalidated_index(g_index_2)
1892
frozenset([('one-missing-parent',), ('two-missing-parent',)]),
1893
index.get_missing_compression_parents())
1895
def test_add_mulitiple_unvalidated_indices_with_mutual_dependencies(self):
1896
graph_index_a = self.make_g_index('one', 2,
1897
[(('parent-one', ), ' 100 78', ([('non-compression-parent',)], [])),
1898
(('child-of-two', ), ' 100 78',
1899
([('parent-two',)], [('parent-two',)]))])
1900
graph_index_b = self.make_g_index('two', 2,
1901
[(('parent-two', ), ' 100 78', ([('non-compression-parent',)], [])),
1902
(('child-of-one', ), ' 100 78',
1903
([('parent-one',)], [('parent-one',)]))])
1904
combined = CombinedGraphIndex([graph_index_a, graph_index_b])
1905
index = _KnitGraphIndex(combined, lambda: True, deltas=True)
1906
index.scan_unvalidated_index(graph_index_a)
1907
index.scan_unvalidated_index(graph_index_b)
1909
frozenset([]), index.get_missing_compression_parents())
1912
class TestNoParentsGraphIndexKnit(KnitTests):
1913
"""Tests for knits using _KnitGraphIndex with no parents."""
1915
def make_g_index(self, name, ref_lists=0, nodes=[]):
1916
builder = GraphIndexBuilder(ref_lists)
1917
for node, references in nodes:
1918
builder.add_node(node, references)
1919
stream = builder.finish()
1920
trans = self.get_transport()
1921
size = trans.put_file(name, stream)
1922
return GraphIndex(trans, name, size)
1924
def test_add_good_unvalidated_index(self):
1925
unvalidated = self.make_g_index('unvalidated')
1926
combined = CombinedGraphIndex([unvalidated])
1927
index = _KnitGraphIndex(combined, lambda: True, parents=False)
1928
index.scan_unvalidated_index(unvalidated)
1929
self.assertEqual(frozenset(),
1930
index.get_missing_compression_parents())
1932
def test_parents_deltas_incompatible(self):
1933
index = CombinedGraphIndex([])
1934
self.assertRaises(errors.KnitError, _KnitGraphIndex, lambda:True,
1935
index, deltas=True, parents=False)
1937
def two_graph_index(self, catch_adds=False):
1938
"""Build a two-graph index.
1940
:param deltas: If true, use underlying indices with two node-ref
1941
lists and 'parent' set to a delta-compressed against tail.
1943
# put several versions in the index.
1944
index1 = self.make_g_index('1', 0, [
1945
(('tip', ), 'N0 100'),
1947
index2 = self.make_g_index('2', 0, [
1948
(('parent', ), ' 100 78'),
1949
(('separate', ), '')])
1950
combined_index = CombinedGraphIndex([index1, index2])
1952
self.combined_index = combined_index
1953
self.caught_entries = []
1954
add_callback = self.catch_add
1957
return _KnitGraphIndex(combined_index, lambda:True, parents=False,
1958
add_callback=add_callback)
1960
def test_keys(self):
1961
index = self.two_graph_index()
1962
self.assertEqual(set([('tail',), ('tip',), ('parent',), ('separate',)]),
1965
def test_get_position(self):
1966
index = self.two_graph_index()
1967
self.assertEqual((index._graph_index._indices[0], 0, 100),
1968
index.get_position(('tip',)))
1969
self.assertEqual((index._graph_index._indices[1], 100, 78),
1970
index.get_position(('parent',)))
1972
def test_get_method(self):
1973
index = self.two_graph_index()
1974
self.assertEqual('fulltext', index.get_method(('tip',)))
1975
self.assertEqual(['fulltext'], index.get_options(('parent',)))
1977
def test_get_options(self):
1978
index = self.two_graph_index()
1979
self.assertEqual(['fulltext', 'no-eol'], index.get_options(('tip',)))
1980
self.assertEqual(['fulltext'], index.get_options(('parent',)))
1982
def test_get_parent_map(self):
1983
index = self.two_graph_index()
1984
self.assertEqual({('parent',):None},
1985
index.get_parent_map([('parent',), ('ghost',)]))
1987
def catch_add(self, entries):
1988
self.caught_entries.append(entries)
1990
def test_add_no_callback_errors(self):
1991
index = self.two_graph_index()
1992
self.assertRaises(errors.ReadOnlyError, index.add_records,
1993
[(('new',), 'fulltext,no-eol', (None, 50, 60), [('separate',)])])
1995
def test_add_version_smoke(self):
1996
index = self.two_graph_index(catch_adds=True)
1997
index.add_records([(('new',), 'fulltext,no-eol', (None, 50, 60), [])])
1998
self.assertEqual([[(('new', ), 'N50 60')]],
1999
self.caught_entries)
2001
def test_add_version_delta_not_delta_index(self):
2002
index = self.two_graph_index(catch_adds=True)
2003
self.assertRaises(errors.KnitCorrupt, index.add_records,
2004
[(('new',), 'no-eol,line-delta', (None, 0, 100), [])])
2005
self.assertEqual([], self.caught_entries)
2007
def test_add_version_same_dup(self):
2008
index = self.two_graph_index(catch_adds=True)
2009
# options can be spelt two different ways
2010
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 100), [])])
2011
index.add_records([(('tip',), 'no-eol,fulltext', (None, 0, 100), [])])
2012
# position/length are ignored (because each pack could have fulltext or
2013
# delta, and be at a different position.
2014
index.add_records([(('tip',), 'fulltext,no-eol', (None, 50, 100), [])])
2015
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 1000), [])])
2016
# but neither should have added data.
2017
self.assertEqual([[], [], [], []], self.caught_entries)
2019
def test_add_version_different_dup(self):
2020
index = self.two_graph_index(catch_adds=True)
2022
self.assertRaises(errors.KnitCorrupt, index.add_records,
2023
[(('tip',), 'no-eol,line-delta', (None, 0, 100), [])])
2024
self.assertRaises(errors.KnitCorrupt, index.add_records,
2025
[(('tip',), 'line-delta,no-eol', (None, 0, 100), [])])
2026
self.assertRaises(errors.KnitCorrupt, index.add_records,
2027
[(('tip',), 'fulltext', (None, 0, 100), [])])
2029
self.assertRaises(errors.KnitCorrupt, index.add_records,
2030
[(('tip',), 'fulltext,no-eol', (None, 0, 100), [('parent',)])])
2031
self.assertEqual([], self.caught_entries)
2033
def test_add_versions(self):
2034
index = self.two_graph_index(catch_adds=True)
2036
(('new',), 'fulltext,no-eol', (None, 50, 60), []),
2037
(('new2',), 'fulltext', (None, 0, 6), []),
2039
self.assertEqual([(('new', ), 'N50 60'), (('new2', ), ' 0 6')],
2040
sorted(self.caught_entries[0]))
2041
self.assertEqual(1, len(self.caught_entries))
2043
def test_add_versions_delta_not_delta_index(self):
2044
index = self.two_graph_index(catch_adds=True)
2045
self.assertRaises(errors.KnitCorrupt, index.add_records,
2046
[(('new',), 'no-eol,line-delta', (None, 0, 100), [('parent',)])])
2047
self.assertEqual([], self.caught_entries)
2049
def test_add_versions_parents_not_parents_index(self):
2050
index = self.two_graph_index(catch_adds=True)
2051
self.assertRaises(errors.KnitCorrupt, index.add_records,
2052
[(('new',), 'no-eol,fulltext', (None, 0, 100), [('parent',)])])
2053
self.assertEqual([], self.caught_entries)
2055
def test_add_versions_random_id_accepted(self):
2056
index = self.two_graph_index(catch_adds=True)
2057
index.add_records([], random_id=True)
2059
def test_add_versions_same_dup(self):
2060
index = self.two_graph_index(catch_adds=True)
2061
# options can be spelt two different ways
2062
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 100), [])])
2063
index.add_records([(('tip',), 'no-eol,fulltext', (None, 0, 100), [])])
2064
# position/length are ignored (because each pack could have fulltext or
2065
# delta, and be at a different position.
2066
index.add_records([(('tip',), 'fulltext,no-eol', (None, 50, 100), [])])
2067
index.add_records([(('tip',), 'fulltext,no-eol', (None, 0, 1000), [])])
2068
# but neither should have added data.
2069
self.assertEqual([[], [], [], []], self.caught_entries)
2071
def test_add_versions_different_dup(self):
2072
index = self.two_graph_index(catch_adds=True)
2074
self.assertRaises(errors.KnitCorrupt, index.add_records,
2075
[(('tip',), 'no-eol,line-delta', (None, 0, 100), [])])
2076
self.assertRaises(errors.KnitCorrupt, index.add_records,
2077
[(('tip',), 'line-delta,no-eol', (None, 0, 100), [])])
2078
self.assertRaises(errors.KnitCorrupt, index.add_records,
2079
[(('tip',), 'fulltext', (None, 0, 100), [])])
2081
self.assertRaises(errors.KnitCorrupt, index.add_records,
2082
[(('tip',), 'fulltext,no-eol', (None, 0, 100), [('parent',)])])
2083
# change options in the second record
2084
self.assertRaises(errors.KnitCorrupt, index.add_records,
2085
[(('tip',), 'fulltext,no-eol', (None, 0, 100), []),
2086
(('tip',), 'no-eol,line-delta', (None, 0, 100), [])])
2087
self.assertEqual([], self.caught_entries)
2090
class TestKnitVersionedFiles(KnitTests):
2092
def assertGroupKeysForIo(self, exp_groups, keys, non_local_keys,
2093
positions, _min_buffer_size=None):
2094
kvf = self.make_test_knit()
2095
if _min_buffer_size is None:
2096
_min_buffer_size = knit._STREAM_MIN_BUFFER_SIZE
2097
self.assertEqual(exp_groups, kvf._group_keys_for_io(keys,
2098
non_local_keys, positions,
2099
_min_buffer_size=_min_buffer_size))
2101
def assertSplitByPrefix(self, expected_map, expected_prefix_order,
2103
split, prefix_order = KnitVersionedFiles._split_by_prefix(keys)
2104
self.assertEqual(expected_map, split)
2105
self.assertEqual(expected_prefix_order, prefix_order)
2107
def test__group_keys_for_io(self):
2108
ft_detail = ('fulltext', False)
2109
ld_detail = ('line-delta', False)
2117
f_a: (ft_detail, (f_a, 0, 100), None),
2118
f_b: (ld_detail, (f_b, 100, 21), f_a),
2119
f_c: (ld_detail, (f_c, 180, 15), f_b),
2120
g_a: (ft_detail, (g_a, 121, 35), None),
2121
g_b: (ld_detail, (g_b, 156, 12), g_a),
2122
g_c: (ld_detail, (g_c, 195, 13), g_a),
2124
self.assertGroupKeysForIo([([f_a], set())],
2125
[f_a], [], positions)
2126
self.assertGroupKeysForIo([([f_a], set([f_a]))],
2127
[f_a], [f_a], positions)
2128
self.assertGroupKeysForIo([([f_a, f_b], set([]))],
2129
[f_a, f_b], [], positions)
2130
self.assertGroupKeysForIo([([f_a, f_b], set([f_b]))],
2131
[f_a, f_b], [f_b], positions)
2132
self.assertGroupKeysForIo([([f_a, f_b, g_a, g_b], set())],
2133
[f_a, g_a, f_b, g_b], [], positions)
2134
self.assertGroupKeysForIo([([f_a, f_b, g_a, g_b], set())],
2135
[f_a, g_a, f_b, g_b], [], positions,
2136
_min_buffer_size=150)
2137
self.assertGroupKeysForIo([([f_a, f_b], set()), ([g_a, g_b], set())],
2138
[f_a, g_a, f_b, g_b], [], positions,
2139
_min_buffer_size=100)
2140
self.assertGroupKeysForIo([([f_c], set()), ([g_b], set())],
2141
[f_c, g_b], [], positions,
2142
_min_buffer_size=125)
2143
self.assertGroupKeysForIo([([g_b, f_c], set())],
2144
[g_b, f_c], [], positions,
2145
_min_buffer_size=125)
2147
def test__split_by_prefix(self):
2148
self.assertSplitByPrefix({'f': [('f', 'a'), ('f', 'b')],
2149
'g': [('g', 'b'), ('g', 'a')],
2151
[('f', 'a'), ('g', 'b'),
2152
('g', 'a'), ('f', 'b')])
2154
self.assertSplitByPrefix({'f': [('f', 'a'), ('f', 'b')],
2155
'g': [('g', 'b'), ('g', 'a')],
2157
[('f', 'a'), ('f', 'b'),
2158
('g', 'b'), ('g', 'a')])
2160
self.assertSplitByPrefix({'f': [('f', 'a'), ('f', 'b')],
2161
'g': [('g', 'b'), ('g', 'a')],
2163
[('f', 'a'), ('f', 'b'),
2164
('g', 'b'), ('g', 'a')])
2166
self.assertSplitByPrefix({'f': [('f', 'a'), ('f', 'b')],
2167
'g': [('g', 'b'), ('g', 'a')],
2168
'': [('a',), ('b',)]
2170
[('f', 'a'), ('g', 'b'),
2172
('g', 'a'), ('f', 'b')])
2175
class TestStacking(KnitTests):
2177
def get_basis_and_test_knit(self):
2178
basis = self.make_test_knit(name='basis')
2179
basis = RecordingVersionedFilesDecorator(basis)
2180
test = self.make_test_knit(name='test')
2181
test.add_fallback_versioned_files(basis)
2184
def test_add_fallback_versioned_files(self):
2185
basis = self.make_test_knit(name='basis')
2186
test = self.make_test_knit(name='test')
2187
# It must not error; other tests test that the fallback is referred to
2188
# when accessing data.
2189
test.add_fallback_versioned_files(basis)
2191
def test_add_lines(self):
2192
# lines added to the test are not added to the basis
2193
basis, test = self.get_basis_and_test_knit()
2195
key_basis = ('bar',)
2196
key_cross_border = ('quux',)
2197
key_delta = ('zaphod',)
2198
test.add_lines(key, (), ['foo\n'])
2199
self.assertEqual({}, basis.get_parent_map([key]))
2200
# lines added to the test that reference across the stack do a
2202
basis.add_lines(key_basis, (), ['foo\n'])
2204
test.add_lines(key_cross_border, (key_basis,), ['foo\n'])
2205
self.assertEqual('fulltext', test._index.get_method(key_cross_border))
2206
# we don't even need to look at the basis to see that this should be
2207
# stored as a fulltext
2208
self.assertEqual([], basis.calls)
2209
# Subsequent adds do delta.
2211
test.add_lines(key_delta, (key_cross_border,), ['foo\n'])
2212
self.assertEqual('line-delta', test._index.get_method(key_delta))
2213
self.assertEqual([], basis.calls)
2215
def test_annotate(self):
2216
# annotations from the test knit are answered without asking the basis
2217
basis, test = self.get_basis_and_test_knit()
2219
key_basis = ('bar',)
2220
key_missing = ('missing',)
2221
test.add_lines(key, (), ['foo\n'])
2222
details = test.annotate(key)
2223
self.assertEqual([(key, 'foo\n')], details)
2224
self.assertEqual([], basis.calls)
2225
# But texts that are not in the test knit are looked for in the basis
2227
basis.add_lines(key_basis, (), ['foo\n', 'bar\n'])
2229
details = test.annotate(key_basis)
2230
self.assertEqual([(key_basis, 'foo\n'), (key_basis, 'bar\n')], details)
2231
# Not optimised to date:
2232
# self.assertEqual([("annotate", key_basis)], basis.calls)
2233
self.assertEqual([('get_parent_map', set([key_basis])),
2234
('get_parent_map', set([key_basis])),
2235
('get_record_stream', [key_basis], 'topological', True)],
2238
def test_check(self):
2239
# At the moment checking a stacked knit does implicitly check the
2241
basis, test = self.get_basis_and_test_knit()
2244
def test_get_parent_map(self):
2245
# parents in the test knit are answered without asking the basis
2246
basis, test = self.get_basis_and_test_knit()
2248
key_basis = ('bar',)
2249
key_missing = ('missing',)
2250
test.add_lines(key, (), [])
2251
parent_map = test.get_parent_map([key])
2252
self.assertEqual({key: ()}, parent_map)
2253
self.assertEqual([], basis.calls)
2254
# But parents that are not in the test knit are looked for in the basis
2255
basis.add_lines(key_basis, (), [])
2257
parent_map = test.get_parent_map([key, key_basis, key_missing])
2258
self.assertEqual({key: (),
2259
key_basis: ()}, parent_map)
2260
self.assertEqual([("get_parent_map", set([key_basis, key_missing]))],
2263
def test_get_record_stream_unordered_fulltexts(self):
2264
# records from the test knit are answered without asking the basis:
2265
basis, test = self.get_basis_and_test_knit()
2267
key_basis = ('bar',)
2268
key_missing = ('missing',)
2269
test.add_lines(key, (), ['foo\n'])
2270
records = list(test.get_record_stream([key], 'unordered', True))
2271
self.assertEqual(1, len(records))
2272
self.assertEqual([], basis.calls)
2273
# Missing (from test knit) objects are retrieved from the basis:
2274
basis.add_lines(key_basis, (), ['foo\n', 'bar\n'])
2276
records = list(test.get_record_stream([key_basis, key_missing],
2278
self.assertEqual(2, len(records))
2279
calls = list(basis.calls)
2280
for record in records:
2281
self.assertSubset([record.key], (key_basis, key_missing))
2282
if record.key == key_missing:
2283
self.assertIsInstance(record, AbsentContentFactory)
2285
reference = list(basis.get_record_stream([key_basis],
2286
'unordered', True))[0]
2287
self.assertEqual(reference.key, record.key)
2288
self.assertEqual(reference.sha1, record.sha1)
2289
self.assertEqual(reference.storage_kind, record.storage_kind)
2290
self.assertEqual(reference.get_bytes_as(reference.storage_kind),
2291
record.get_bytes_as(record.storage_kind))
2292
self.assertEqual(reference.get_bytes_as('fulltext'),
2293
record.get_bytes_as('fulltext'))
2294
# It's not strictly minimal, but it seems reasonable for now for it to
2295
# ask which fallbacks have which parents.
2297
("get_parent_map", set([key_basis, key_missing])),
2298
("get_record_stream", [key_basis], 'unordered', True)],
2301
def test_get_record_stream_ordered_fulltexts(self):
2302
# ordering is preserved down into the fallback store.
2303
basis, test = self.get_basis_and_test_knit()
2305
key_basis = ('bar',)
2306
key_basis_2 = ('quux',)
2307
key_missing = ('missing',)
2308
test.add_lines(key, (key_basis,), ['foo\n'])
2309
# Missing (from test knit) objects are retrieved from the basis:
2310
basis.add_lines(key_basis, (key_basis_2,), ['foo\n', 'bar\n'])
2311
basis.add_lines(key_basis_2, (), ['quux\n'])
2313
# ask for in non-topological order
2314
records = list(test.get_record_stream(
2315
[key, key_basis, key_missing, key_basis_2], 'topological', True))
2316
self.assertEqual(4, len(records))
2318
for record in records:
2319
self.assertSubset([record.key],
2320
(key_basis, key_missing, key_basis_2, key))
2321
if record.key == key_missing:
2322
self.assertIsInstance(record, AbsentContentFactory)
2324
results.append((record.key, record.sha1, record.storage_kind,
2325
record.get_bytes_as('fulltext')))
2326
calls = list(basis.calls)
2327
order = [record[0] for record in results]
2328
self.assertEqual([key_basis_2, key_basis, key], order)
2329
for result in results:
2330
if result[0] == key:
2334
record = source.get_record_stream([result[0]], 'unordered',
2336
self.assertEqual(record.key, result[0])
2337
self.assertEqual(record.sha1, result[1])
2338
# We used to check that the storage kind matched, but actually it
2339
# depends on whether it was sourced from the basis, or in a single
2340
# group, because asking for full texts returns proxy objects to a
2341
# _ContentMapGenerator object; so checking the kind is unneeded.
2342
self.assertEqual(record.get_bytes_as('fulltext'), result[3])
2343
# It's not strictly minimal, but it seems reasonable for now for it to
2344
# ask which fallbacks have which parents.
2346
("get_parent_map", set([key_basis, key_basis_2, key_missing])),
2347
# topological is requested from the fallback, because that is what
2348
# was requested at the top level.
2349
("get_record_stream", [key_basis_2, key_basis], 'topological', True)],
2352
def test_get_record_stream_unordered_deltas(self):
2353
# records from the test knit are answered without asking the basis:
2354
basis, test = self.get_basis_and_test_knit()
2356
key_basis = ('bar',)
2357
key_missing = ('missing',)
2358
test.add_lines(key, (), ['foo\n'])
2359
records = list(test.get_record_stream([key], 'unordered', False))
2360
self.assertEqual(1, len(records))
2361
self.assertEqual([], basis.calls)
2362
# Missing (from test knit) objects are retrieved from the basis:
2363
basis.add_lines(key_basis, (), ['foo\n', 'bar\n'])
2365
records = list(test.get_record_stream([key_basis, key_missing],
2366
'unordered', False))
2367
self.assertEqual(2, len(records))
2368
calls = list(basis.calls)
2369
for record in records:
2370
self.assertSubset([record.key], (key_basis, key_missing))
2371
if record.key == key_missing:
2372
self.assertIsInstance(record, AbsentContentFactory)
2374
reference = list(basis.get_record_stream([key_basis],
2375
'unordered', False))[0]
2376
self.assertEqual(reference.key, record.key)
2377
self.assertEqual(reference.sha1, record.sha1)
2378
self.assertEqual(reference.storage_kind, record.storage_kind)
2379
self.assertEqual(reference.get_bytes_as(reference.storage_kind),
2380
record.get_bytes_as(record.storage_kind))
2381
# It's not strictly minimal, but it seems reasonable for now for it to
2382
# ask which fallbacks have which parents.
2384
("get_parent_map", set([key_basis, key_missing])),
2385
("get_record_stream", [key_basis], 'unordered', False)],
2388
def test_get_record_stream_ordered_deltas(self):
2389
# ordering is preserved down into the fallback store.
2390
basis, test = self.get_basis_and_test_knit()
2392
key_basis = ('bar',)
2393
key_basis_2 = ('quux',)
2394
key_missing = ('missing',)
2395
test.add_lines(key, (key_basis,), ['foo\n'])
2396
# Missing (from test knit) objects are retrieved from the basis:
2397
basis.add_lines(key_basis, (key_basis_2,), ['foo\n', 'bar\n'])
2398
basis.add_lines(key_basis_2, (), ['quux\n'])
2400
# ask for in non-topological order
2401
records = list(test.get_record_stream(
2402
[key, key_basis, key_missing, key_basis_2], 'topological', False))
2403
self.assertEqual(4, len(records))
2405
for record in records:
2406
self.assertSubset([record.key],
2407
(key_basis, key_missing, key_basis_2, key))
2408
if record.key == key_missing:
2409
self.assertIsInstance(record, AbsentContentFactory)
2411
results.append((record.key, record.sha1, record.storage_kind,
2412
record.get_bytes_as(record.storage_kind)))
2413
calls = list(basis.calls)
2414
order = [record[0] for record in results]
2415
self.assertEqual([key_basis_2, key_basis, key], order)
2416
for result in results:
2417
if result[0] == key:
2421
record = source.get_record_stream([result[0]], 'unordered',
2423
self.assertEqual(record.key, result[0])
2424
self.assertEqual(record.sha1, result[1])
2425
self.assertEqual(record.storage_kind, result[2])
2426
self.assertEqual(record.get_bytes_as(record.storage_kind), result[3])
2427
# It's not strictly minimal, but it seems reasonable for now for it to
2428
# ask which fallbacks have which parents.
2430
("get_parent_map", set([key_basis, key_basis_2, key_missing])),
2431
("get_record_stream", [key_basis_2, key_basis], 'topological', False)],
2434
def test_get_sha1s(self):
2435
# sha1's in the test knit are answered without asking the basis
2436
basis, test = self.get_basis_and_test_knit()
2438
key_basis = ('bar',)
2439
key_missing = ('missing',)
2440
test.add_lines(key, (), ['foo\n'])
2441
key_sha1sum = osutils.sha('foo\n').hexdigest()
2442
sha1s = test.get_sha1s([key])
2443
self.assertEqual({key: key_sha1sum}, sha1s)
2444
self.assertEqual([], basis.calls)
2445
# But texts that are not in the test knit are looked for in the basis
2446
# directly (rather than via text reconstruction) so that remote servers
2447
# etc don't have to answer with full content.
2448
basis.add_lines(key_basis, (), ['foo\n', 'bar\n'])
2449
basis_sha1sum = osutils.sha('foo\nbar\n').hexdigest()
2451
sha1s = test.get_sha1s([key, key_missing, key_basis])
2452
self.assertEqual({key: key_sha1sum,
2453
key_basis: basis_sha1sum}, sha1s)
2454
self.assertEqual([("get_sha1s", set([key_basis, key_missing]))],
2457
def test_insert_record_stream(self):
2458
# records are inserted as normal; insert_record_stream builds on
2459
# add_lines, so a smoke test should be all that's needed:
2461
key_basis = ('bar',)
2462
key_delta = ('zaphod',)
2463
basis, test = self.get_basis_and_test_knit()
2464
source = self.make_test_knit(name='source')
2465
basis.add_lines(key_basis, (), ['foo\n'])
2467
source.add_lines(key_basis, (), ['foo\n'])
2468
source.add_lines(key_delta, (key_basis,), ['bar\n'])
2469
stream = source.get_record_stream([key_delta], 'unordered', False)
2470
test.insert_record_stream(stream)
2471
# XXX: this does somewhat too many calls in making sure of whether it
2472
# has to recreate the full text.
2473
self.assertEqual([("get_parent_map", set([key_basis])),
2474
('get_parent_map', set([key_basis])),
2475
('get_record_stream', [key_basis], 'unordered', True)],
2477
self.assertEqual({key_delta:(key_basis,)},
2478
test.get_parent_map([key_delta]))
2479
self.assertEqual('bar\n', test.get_record_stream([key_delta],
2480
'unordered', True).next().get_bytes_as('fulltext'))
2482
def test_iter_lines_added_or_present_in_keys(self):
2483
# Lines from the basis are returned, and lines for a given key are only
2487
# all sources are asked for keys:
2488
basis, test = self.get_basis_and_test_knit()
2489
basis.add_lines(key1, (), ["foo"])
2491
lines = list(test.iter_lines_added_or_present_in_keys([key1]))
2492
self.assertEqual([("foo\n", key1)], lines)
2493
self.assertEqual([("iter_lines_added_or_present_in_keys", set([key1]))],
2495
# keys in both are not duplicated:
2496
test.add_lines(key2, (), ["bar\n"])
2497
basis.add_lines(key2, (), ["bar\n"])
2499
lines = list(test.iter_lines_added_or_present_in_keys([key2]))
2500
self.assertEqual([("bar\n", key2)], lines)
2501
self.assertEqual([], basis.calls)
2503
def test_keys(self):
2506
# all sources are asked for keys:
2507
basis, test = self.get_basis_and_test_knit()
2509
self.assertEqual(set(), set(keys))
2510
self.assertEqual([("keys",)], basis.calls)
2511
# keys from a basis are returned:
2512
basis.add_lines(key1, (), [])
2515
self.assertEqual(set([key1]), set(keys))
2516
self.assertEqual([("keys",)], basis.calls)
2517
# keys in both are not duplicated:
2518
test.add_lines(key2, (), [])
2519
basis.add_lines(key2, (), [])
2522
self.assertEqual(2, len(keys))
2523
self.assertEqual(set([key1, key2]), set(keys))
2524
self.assertEqual([("keys",)], basis.calls)
2526
def test_add_mpdiffs(self):
2527
# records are inserted as normal; add_mpdiff builds on
2528
# add_lines, so a smoke test should be all that's needed:
2530
key_basis = ('bar',)
2531
key_delta = ('zaphod',)
2532
basis, test = self.get_basis_and_test_knit()
2533
source = self.make_test_knit(name='source')
2534
basis.add_lines(key_basis, (), ['foo\n'])
2536
source.add_lines(key_basis, (), ['foo\n'])
2537
source.add_lines(key_delta, (key_basis,), ['bar\n'])
2538
diffs = source.make_mpdiffs([key_delta])
2539
test.add_mpdiffs([(key_delta, (key_basis,),
2540
source.get_sha1s([key_delta])[key_delta], diffs[0])])
2541
self.assertEqual([("get_parent_map", set([key_basis])),
2542
('get_record_stream', [key_basis], 'unordered', True),],
2544
self.assertEqual({key_delta:(key_basis,)},
2545
test.get_parent_map([key_delta]))
2546
self.assertEqual('bar\n', test.get_record_stream([key_delta],
2547
'unordered', True).next().get_bytes_as('fulltext'))
2549
def test_make_mpdiffs(self):
2550
# Generating an mpdiff across a stacking boundary should detect parent
2554
key_right = ('zaphod',)
2555
basis, test = self.get_basis_and_test_knit()
2556
basis.add_lines(key_left, (), ['bar\n'])
2557
basis.add_lines(key_right, (), ['zaphod\n'])
2559
test.add_lines(key, (key_left, key_right),
2560
['bar\n', 'foo\n', 'zaphod\n'])
2561
diffs = test.make_mpdiffs([key])
2563
multiparent.MultiParent([multiparent.ParentText(0, 0, 0, 1),
2564
multiparent.NewText(['foo\n']),
2565
multiparent.ParentText(1, 0, 2, 1)])],
2567
self.assertEqual(3, len(basis.calls))
2569
("get_parent_map", set([key_left, key_right])),
2570
("get_parent_map", set([key_left, key_right])),
2573
last_call = basis.calls[-1]
2574
self.assertEqual('get_record_stream', last_call[0])
2575
self.assertEqual(set([key_left, key_right]), set(last_call[1]))
2576
self.assertEqual('topological', last_call[2])
2577
self.assertEqual(True, last_call[3])
2580
class TestNetworkBehaviour(KnitTests):
2581
"""Tests for getting data out of/into knits over the network."""
2583
def test_include_delta_closure_generates_a_knit_delta_closure(self):
2584
vf = self.make_test_knit(name='test')
2585
# put in three texts, giving ft, delta, delta
2586
vf.add_lines(('base',), (), ['base\n', 'content\n'])
2587
vf.add_lines(('d1',), (('base',),), ['d1\n'])
2588
vf.add_lines(('d2',), (('d1',),), ['d2\n'])
2589
# But heuristics could interfere, so check what happened:
2590
self.assertEqual(['knit-ft-gz', 'knit-delta-gz', 'knit-delta-gz'],
2591
[record.storage_kind for record in
2592
vf.get_record_stream([('base',), ('d1',), ('d2',)],
2593
'topological', False)])
2594
# generate a stream of just the deltas include_delta_closure=True,
2595
# serialise to the network, and check that we get a delta closure on the wire.
2596
stream = vf.get_record_stream([('d1',), ('d2',)], 'topological', True)
2597
netb = [record.get_bytes_as(record.storage_kind) for record in stream]
2598
# The first bytes should be a memo from _ContentMapGenerator, and the
2599
# second bytes should be empty (because its a API proxy not something
2600
# for wire serialisation.
2601
self.assertEqual('', netb[1])
2603
kind, line_end = network_bytes_to_kind_and_offset(bytes)
2604
self.assertEqual('knit-delta-closure', kind)
2607
class TestContentMapGenerator(KnitTests):
2608
"""Tests for ContentMapGenerator"""
2610
def test_get_record_stream_gives_records(self):
2611
vf = self.make_test_knit(name='test')
2612
# put in three texts, giving ft, delta, delta
2613
vf.add_lines(('base',), (), ['base\n', 'content\n'])
2614
vf.add_lines(('d1',), (('base',),), ['d1\n'])
2615
vf.add_lines(('d2',), (('d1',),), ['d2\n'])
2616
keys = [('d1',), ('d2',)]
2617
generator = _VFContentMapGenerator(vf, keys,
2618
global_map=vf.get_parent_map(keys))
2619
for record in generator.get_record_stream():
2620
if record.key == ('d1',):
2621
self.assertEqual('d1\n', record.get_bytes_as('fulltext'))
2623
self.assertEqual('d2\n', record.get_bytes_as('fulltext'))
2625
def test_get_record_stream_kinds_are_raw(self):
2626
vf = self.make_test_knit(name='test')
2627
# put in three texts, giving ft, delta, delta
2628
vf.add_lines(('base',), (), ['base\n', 'content\n'])
2629
vf.add_lines(('d1',), (('base',),), ['d1\n'])
2630
vf.add_lines(('d2',), (('d1',),), ['d2\n'])
2631
keys = [('base',), ('d1',), ('d2',)]
2632
generator = _VFContentMapGenerator(vf, keys,
2633
global_map=vf.get_parent_map(keys))
2634
kinds = {('base',): 'knit-delta-closure',
2635
('d1',): 'knit-delta-closure-ref',
2636
('d2',): 'knit-delta-closure-ref',
2638
for record in generator.get_record_stream():
2639
self.assertEqual(kinds[record.key], record.storage_kind)