~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_knit.py

  • Committer: Canonical.com Patch Queue Manager
  • Date: 2009-04-09 20:23:07 UTC
  • mfrom: (4265.1.4 bbc-merge)
  • Revision ID: pqm@pqm.ubuntu.com-20090409202307-n0depb16qepoe21o
(jam) Change _fetch_uses_deltas = False for CHK repos until we can
        write a better fix.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2011 Canonical Ltd
 
1
# Copyright (C) 2005, 2006, 2007 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
17
17
"""Tests for Knit data structure"""
18
18
 
19
19
from cStringIO import StringIO
 
20
import difflib
20
21
import gzip
21
22
import sys
22
23
 
23
24
from bzrlib import (
24
25
    errors,
 
26
    generate_ids,
25
27
    knit,
26
28
    multiparent,
27
29
    osutils,
28
30
    pack,
29
 
    tests,
30
 
    transport,
31
31
    )
32
32
from bzrlib.errors import (
 
33
    RevisionAlreadyPresent,
33
34
    KnitHeaderError,
 
35
    RevisionNotPresent,
34
36
    NoSuchFile,
35
37
    )
36
38
from bzrlib.index import *
37
39
from bzrlib.knit import (
38
40
    AnnotatedKnitContent,
39
41
    KnitContent,
 
42
    KnitSequenceMatcher,
40
43
    KnitVersionedFiles,
41
44
    PlainKnitContent,
42
45
    _VFContentMapGenerator,
 
46
    _DirectPackAccess,
43
47
    _KndxIndex,
44
48
    _KnitGraphIndex,
45
49
    _KnitKeyAccess,
46
50
    make_file_factory,
47
51
    )
48
 
from bzrlib.patiencediff import PatienceSequenceMatcher
49
 
from bzrlib.repofmt import (
50
 
    knitpack_repo,
51
 
    pack_repo,
52
 
    )
 
52
from bzrlib.repofmt import pack_repo
53
53
from bzrlib.tests import (
 
54
    Feature,
 
55
    KnownFailure,
54
56
    TestCase,
55
57
    TestCaseWithMemoryTransport,
56
58
    TestCaseWithTransport,
57
59
    TestNotApplicable,
58
60
    )
 
61
from bzrlib.transport import get_transport
 
62
from bzrlib.transport.memory import MemoryTransport
 
63
from bzrlib.tuned_gzip import GzipFile
59
64
from bzrlib.versionedfile import (
60
65
    AbsentContentFactory,
61
66
    ConstantMapper,
62
67
    network_bytes_to_kind_and_offset,
63
68
    RecordingVersionedFilesDecorator,
64
69
    )
65
 
from bzrlib.tests import (
66
 
    features,
67
 
    )
68
 
 
69
 
 
70
 
compiled_knit_feature = features.ModuleAvailableFeature(
71
 
    'bzrlib._knit_load_data_pyx')
 
70
 
 
71
 
 
72
class _CompiledKnitFeature(Feature):
 
73
 
 
74
    def _probe(self):
 
75
        try:
 
76
            import bzrlib._knit_load_data_c
 
77
        except ImportError:
 
78
            return False
 
79
        return True
 
80
 
 
81
    def feature_name(self):
 
82
        return 'bzrlib._knit_load_data_c'
 
83
 
 
84
CompiledKnitFeature = _CompiledKnitFeature()
72
85
 
73
86
 
74
87
class KnitContentTestsMixin(object):
103
116
        line_delta = source_content.line_delta(target_content)
104
117
        delta_blocks = list(KnitContent.get_line_delta_blocks(line_delta,
105
118
            source_lines, target_lines))
106
 
        matcher = PatienceSequenceMatcher(None, source_lines, target_lines)
107
 
        matcher_blocks = list(matcher.get_matching_blocks())
 
119
        matcher = KnitSequenceMatcher(None, source_lines, target_lines)
 
120
        matcher_blocks = list(list(matcher.get_matching_blocks()))
108
121
        self.assertEqual(matcher_blocks, delta_blocks)
109
122
 
110
123
    def test_get_line_delta_blocks(self):
330
343
            transport.append_bytes(packname, bytes)
331
344
        writer = pack.ContainerWriter(write_data)
332
345
        writer.begin()
333
 
        access = pack_repo._DirectPackAccess({})
 
346
        access = _DirectPackAccess({})
334
347
        access.set_writer(writer, index, (transport, packname))
335
348
        return access, writer
336
349
 
343
356
        writer.end()
344
357
        return memos
345
358
 
346
 
    def test_pack_collection_pack_retries(self):
347
 
        """An explicit pack of a pack collection succeeds even when a
348
 
        concurrent pack happens.
349
 
        """
350
 
        builder = self.make_branch_builder('.')
351
 
        builder.start_series()
352
 
        builder.build_snapshot('rev-1', None, [
353
 
            ('add', ('', 'root-id', 'directory', None)),
354
 
            ('add', ('file', 'file-id', 'file', 'content\nrev 1\n')),
355
 
            ])
356
 
        builder.build_snapshot('rev-2', ['rev-1'], [
357
 
            ('modify', ('file-id', 'content\nrev 2\n')),
358
 
            ])
359
 
        builder.build_snapshot('rev-3', ['rev-2'], [
360
 
            ('modify', ('file-id', 'content\nrev 3\n')),
361
 
            ])
362
 
        self.addCleanup(builder.finish_series)
363
 
        b = builder.get_branch()
364
 
        self.addCleanup(b.lock_write().unlock)
365
 
        repo = b.repository
366
 
        collection = repo._pack_collection
367
 
        # Concurrently repack the repo.
368
 
        reopened_repo = repo.bzrdir.open_repository()
369
 
        reopened_repo.pack()
370
 
        # Pack the new pack.
371
 
        collection.pack()
372
 
 
373
359
    def make_vf_for_retrying(self):
374
360
        """Create 3 packs and a reload function.
375
361
 
380
366
        :return: (versioned_file, reload_counter)
381
367
            versioned_file  a KnitVersionedFiles using the packs for access
382
368
        """
383
 
        builder = self.make_branch_builder('.', format="1.9")
384
 
        builder.start_series()
385
 
        builder.build_snapshot('rev-1', None, [
386
 
            ('add', ('', 'root-id', 'directory', None)),
387
 
            ('add', ('file', 'file-id', 'file', 'content\nrev 1\n')),
388
 
            ])
389
 
        builder.build_snapshot('rev-2', ['rev-1'], [
390
 
            ('modify', ('file-id', 'content\nrev 2\n')),
391
 
            ])
392
 
        builder.build_snapshot('rev-3', ['rev-2'], [
393
 
            ('modify', ('file-id', 'content\nrev 3\n')),
394
 
            ])
395
 
        builder.finish_series()
396
 
        b = builder.get_branch()
397
 
        b.lock_write()
398
 
        self.addCleanup(b.unlock)
 
369
        tree = self.make_branch_and_memory_tree('tree')
 
370
        tree.lock_write()
 
371
        self.addCleanup(tree.branch.repository.unlock)
 
372
        tree.add([''], ['root-id'])
 
373
        tree.commit('one', rev_id='rev-1')
 
374
        tree.commit('two', rev_id='rev-2')
 
375
        tree.commit('three', rev_id='rev-3')
399
376
        # Pack these three revisions into another pack file, but don't remove
400
377
        # the originals
401
 
        repo = b.repository
 
378
        repo = tree.branch.repository
402
379
        collection = repo._pack_collection
403
380
        collection.ensure_loaded()
404
381
        orig_packs = collection.packs
405
 
        packer = knitpack_repo.KnitPacker(collection, orig_packs, '.testpack')
 
382
        packer = pack_repo.Packer(collection, orig_packs, '.testpack')
406
383
        new_pack = packer.pack()
407
384
        # forget about the new pack
408
385
        collection.reset()
409
386
        repo.refresh_data()
410
 
        vf = repo.revisions
 
387
        vf = tree.branch.repository.revisions
 
388
        del tree
411
389
        # Set up a reload() function that switches to using the new pack file
412
390
        new_index = new_pack.revision_index
413
391
        access_tuple = new_pack.access_tuple()
447
425
        except _TestException, e:
448
426
            retry_exc = errors.RetryWithNewPacks(None, reload_occurred=False,
449
427
                                                 exc_info=sys.exc_info())
450
 
        # GZ 2010-08-10: Cycle with exc_info affects 3 tests
451
428
        return retry_exc
452
429
 
453
430
    def test_read_from_several_packs(self):
462
439
        memos.extend(access.add_raw_records([('key', 5)], 'alpha'))
463
440
        writer.end()
464
441
        transport = self.get_transport()
465
 
        access = pack_repo._DirectPackAccess({"FOO":(transport, 'packfile'),
 
442
        access = _DirectPackAccess({"FOO":(transport, 'packfile'),
466
443
            "FOOBAR":(transport, 'pack2'),
467
444
            "BAZ":(transport, 'pack3')})
468
445
        self.assertEqual(['1234567890', '12345', 'alpha'],
478
455
 
479
456
    def test_set_writer(self):
480
457
        """The writer should be settable post construction."""
481
 
        access = pack_repo._DirectPackAccess({})
 
458
        access = _DirectPackAccess({})
482
459
        transport = self.get_transport()
483
460
        packname = 'packfile'
484
461
        index = 'foo'
496
473
        transport = self.get_transport()
497
474
        reload_called, reload_func = self.make_reload_func()
498
475
        # Note that the index key has changed from 'foo' to 'bar'
499
 
        access = pack_repo._DirectPackAccess({'bar':(transport, 'packname')},
 
476
        access = _DirectPackAccess({'bar':(transport, 'packname')},
500
477
                                   reload_func=reload_func)
501
478
        e = self.assertListRaises(errors.RetryWithNewPacks,
502
479
                                  access.get_raw_records, memos)
511
488
        memos = self.make_pack_file()
512
489
        transport = self.get_transport()
513
490
        # Note that the index key has changed from 'foo' to 'bar'
514
 
        access = pack_repo._DirectPackAccess({'bar':(transport, 'packname')})
 
491
        access = _DirectPackAccess({'bar':(transport, 'packname')})
515
492
        e = self.assertListRaises(KeyError, access.get_raw_records, memos)
516
493
 
517
494
    def test_missing_file_raises_retry(self):
519
496
        transport = self.get_transport()
520
497
        reload_called, reload_func = self.make_reload_func()
521
498
        # Note that the 'filename' has been changed to 'different-packname'
522
 
        access = pack_repo._DirectPackAccess(
523
 
            {'foo':(transport, 'different-packname')},
524
 
            reload_func=reload_func)
 
499
        access = _DirectPackAccess({'foo':(transport, 'different-packname')},
 
500
                                   reload_func=reload_func)
525
501
        e = self.assertListRaises(errors.RetryWithNewPacks,
526
502
                                  access.get_raw_records, memos)
527
503
        # The file has gone missing, so we assume we need to reload
535
511
        memos = self.make_pack_file()
536
512
        transport = self.get_transport()
537
513
        # Note that the 'filename' has been changed to 'different-packname'
538
 
        access = pack_repo._DirectPackAccess(
539
 
            {'foo': (transport, 'different-packname')})
 
514
        access = _DirectPackAccess({'foo':(transport, 'different-packname')})
540
515
        e = self.assertListRaises(errors.NoSuchFile,
541
516
                                  access.get_raw_records, memos)
542
517
 
546
521
        failing_transport = MockReadvFailingTransport(
547
522
                                [transport.get_bytes('packname')])
548
523
        reload_called, reload_func = self.make_reload_func()
549
 
        access = pack_repo._DirectPackAccess(
550
 
            {'foo': (failing_transport, 'packname')},
551
 
            reload_func=reload_func)
 
524
        access = _DirectPackAccess({'foo':(failing_transport, 'packname')},
 
525
                                   reload_func=reload_func)
552
526
        # Asking for a single record will not trigger the Mock failure
553
527
        self.assertEqual(['1234567890'],
554
528
            list(access.get_raw_records(memos[:1])))
570
544
        failing_transport = MockReadvFailingTransport(
571
545
                                [transport.get_bytes('packname')])
572
546
        reload_called, reload_func = self.make_reload_func()
573
 
        access = pack_repo._DirectPackAccess(
574
 
            {'foo':(failing_transport, 'packname')})
 
547
        access = _DirectPackAccess({'foo':(failing_transport, 'packname')})
575
548
        # Asking for a single record will not trigger the Mock failure
576
549
        self.assertEqual(['1234567890'],
577
550
            list(access.get_raw_records(memos[:1])))
582
555
                                  access.get_raw_records, memos)
583
556
 
584
557
    def test_reload_or_raise_no_reload(self):
585
 
        access = pack_repo._DirectPackAccess({}, reload_func=None)
 
558
        access = _DirectPackAccess({}, reload_func=None)
586
559
        retry_exc = self.make_retry_exception()
587
560
        # Without a reload_func, we will just re-raise the original exception
588
561
        self.assertRaises(_TestException, access.reload_or_raise, retry_exc)
589
562
 
590
563
    def test_reload_or_raise_reload_changed(self):
591
564
        reload_called, reload_func = self.make_reload_func(return_val=True)
592
 
        access = pack_repo._DirectPackAccess({}, reload_func=reload_func)
 
565
        access = _DirectPackAccess({}, reload_func=reload_func)
593
566
        retry_exc = self.make_retry_exception()
594
567
        access.reload_or_raise(retry_exc)
595
568
        self.assertEqual([1], reload_called)
599
572
 
600
573
    def test_reload_or_raise_reload_no_change(self):
601
574
        reload_called, reload_func = self.make_reload_func(return_val=False)
602
 
        access = pack_repo._DirectPackAccess({}, reload_func=reload_func)
 
575
        access = _DirectPackAccess({}, reload_func=reload_func)
603
576
        retry_exc = self.make_retry_exception()
604
577
        # If reload_occurred is False, then we consider it an error to have
605
578
        # reload_func() return False (no changes).
736
709
 
737
710
    def make_multiple_records(self):
738
711
        """Create the content for multiple records."""
739
 
        sha1sum = osutils.sha_string('foo\nbar\n')
 
712
        sha1sum = osutils.sha('foo\nbar\n').hexdigest()
740
713
        total_txt = []
741
714
        gz_txt = self.create_gz_content('version rev-id-1 2 %s\n'
742
715
                                        'foo\n'
745
718
                                        % (sha1sum,))
746
719
        record_1 = (0, len(gz_txt), sha1sum)
747
720
        total_txt.append(gz_txt)
748
 
        sha1sum = osutils.sha_string('baz\n')
 
721
        sha1sum = osutils.sha('baz\n').hexdigest()
749
722
        gz_txt = self.create_gz_content('version rev-id-2 1 %s\n'
750
723
                                        'baz\n'
751
724
                                        'end rev-id-2\n'
755
728
        return total_txt, record_1, record_2
756
729
 
757
730
    def test_valid_knit_data(self):
758
 
        sha1sum = osutils.sha_string('foo\nbar\n')
 
731
        sha1sum = osutils.sha('foo\nbar\n').hexdigest()
759
732
        gz_txt = self.create_gz_content('version rev-id-1 2 %s\n'
760
733
                                        'foo\n'
761
734
                                        'bar\n'
792
765
                         raw_contents)
793
766
 
794
767
    def test_not_enough_lines(self):
795
 
        sha1sum = osutils.sha_string('foo\n')
 
768
        sha1sum = osutils.sha('foo\n').hexdigest()
796
769
        # record says 2 lines data says 1
797
770
        gz_txt = self.create_gz_content('version rev-id-1 2 %s\n'
798
771
                                        'foo\n'
810
783
        self.assertEqual([(('rev-id-1',),  gz_txt, sha1sum)], raw_contents)
811
784
 
812
785
    def test_too_many_lines(self):
813
 
        sha1sum = osutils.sha_string('foo\nbar\n')
 
786
        sha1sum = osutils.sha('foo\nbar\n').hexdigest()
814
787
        # record says 1 lines data says 2
815
788
        gz_txt = self.create_gz_content('version rev-id-1 1 %s\n'
816
789
                                        'foo\n'
829
802
        self.assertEqual([(('rev-id-1',), gz_txt, sha1sum)], raw_contents)
830
803
 
831
804
    def test_mismatched_version_id(self):
832
 
        sha1sum = osutils.sha_string('foo\nbar\n')
 
805
        sha1sum = osutils.sha('foo\nbar\n').hexdigest()
833
806
        gz_txt = self.create_gz_content('version rev-id-1 2 %s\n'
834
807
                                        'foo\n'
835
808
                                        'bar\n'
848
821
            knit._read_records_iter_raw(records))
849
822
 
850
823
    def test_uncompressed_data(self):
851
 
        sha1sum = osutils.sha_string('foo\nbar\n')
 
824
        sha1sum = osutils.sha('foo\nbar\n').hexdigest()
852
825
        txt = ('version rev-id-1 2 %s\n'
853
826
               'foo\n'
854
827
               'bar\n'
868
841
            knit._read_records_iter_raw(records))
869
842
 
870
843
    def test_corrupted_data(self):
871
 
        sha1sum = osutils.sha_string('foo\nbar\n')
 
844
        sha1sum = osutils.sha('foo\nbar\n').hexdigest()
872
845
        gz_txt = self.create_gz_content('version rev-id-1 2 %s\n'
873
846
                                        'foo\n'
874
847
                                        'bar\n'
891
864
 
892
865
    def get_knit_index(self, transport, name, mode):
893
866
        mapper = ConstantMapper(name)
 
867
        orig = knit._load_data
 
868
        def reset():
 
869
            knit._load_data = orig
 
870
        self.addCleanup(reset)
894
871
        from bzrlib._knit_load_data_py import _load_data_py
895
 
        self.overrideAttr(knit, '_load_data', _load_data_py)
 
872
        knit._load_data = _load_data_py
896
873
        allow_writes = lambda: 'w' in mode
897
874
        return _KndxIndex(transport, mapper, lambda:None, allow_writes, lambda:True)
898
875
 
1196
1173
            self.assertRaises(errors.KnitCorrupt, index.keys)
1197
1174
        except TypeError, e:
1198
1175
            if (str(e) == ('exceptions must be strings, classes, or instances,'
1199
 
                           ' not exceptions.IndexError')):
 
1176
                           ' not exceptions.IndexError')
 
1177
                and sys.version_info[0:2] >= (2,5)):
1200
1178
                self.knownFailure('Pyrex <0.9.5 fails with TypeError when'
1201
1179
                                  ' raising new style exceptions with python'
1202
1180
                                  ' >=2.5')
1215
1193
            self.assertRaises(errors.KnitCorrupt, index.keys)
1216
1194
        except TypeError, e:
1217
1195
            if (str(e) == ('exceptions must be strings, classes, or instances,'
1218
 
                           ' not exceptions.ValueError')):
 
1196
                           ' not exceptions.ValueError')
 
1197
                and sys.version_info[0:2] >= (2,5)):
1219
1198
                self.knownFailure('Pyrex <0.9.5 fails with TypeError when'
1220
1199
                                  ' raising new style exceptions with python'
1221
1200
                                  ' >=2.5')
1234
1213
            self.assertRaises(errors.KnitCorrupt, index.keys)
1235
1214
        except TypeError, e:
1236
1215
            if (str(e) == ('exceptions must be strings, classes, or instances,'
1237
 
                           ' not exceptions.ValueError')):
 
1216
                           ' not exceptions.ValueError')
 
1217
                and sys.version_info[0:2] >= (2,5)):
1238
1218
                self.knownFailure('Pyrex <0.9.5 fails with TypeError when'
1239
1219
                                  ' raising new style exceptions with python'
1240
1220
                                  ' >=2.5')
1251
1231
            self.assertRaises(errors.KnitCorrupt, index.keys)
1252
1232
        except TypeError, e:
1253
1233
            if (str(e) == ('exceptions must be strings, classes, or instances,'
1254
 
                           ' not exceptions.ValueError')):
 
1234
                           ' not exceptions.ValueError')
 
1235
                and sys.version_info[0:2] >= (2,5)):
1255
1236
                self.knownFailure('Pyrex <0.9.5 fails with TypeError when'
1256
1237
                                  ' raising new style exceptions with python'
1257
1238
                                  ' >=2.5')
1268
1249
            self.assertRaises(errors.KnitCorrupt, index.keys)
1269
1250
        except TypeError, e:
1270
1251
            if (str(e) == ('exceptions must be strings, classes, or instances,'
1271
 
                           ' not exceptions.ValueError')):
 
1252
                           ' not exceptions.ValueError')
 
1253
                and sys.version_info[0:2] >= (2,5)):
1272
1254
                self.knownFailure('Pyrex <0.9.5 fails with TypeError when'
1273
1255
                                  ' raising new style exceptions with python'
1274
1256
                                  ' >=2.5')
1318
1300
 
1319
1301
class LowLevelKnitIndexTests_c(LowLevelKnitIndexTests):
1320
1302
 
1321
 
    _test_needs_features = [compiled_knit_feature]
 
1303
    _test_needs_features = [CompiledKnitFeature]
1322
1304
 
1323
1305
    def get_knit_index(self, transport, name, mode):
1324
1306
        mapper = ConstantMapper(name)
1325
 
        from bzrlib._knit_load_data_pyx import _load_data_c
1326
 
        self.overrideAttr(knit, '_load_data', _load_data_c)
 
1307
        orig = knit._load_data
 
1308
        def reset():
 
1309
            knit._load_data = orig
 
1310
        self.addCleanup(reset)
 
1311
        from bzrlib._knit_load_data_c import _load_data_c
 
1312
        knit._load_data = _load_data_c
1327
1313
        allow_writes = lambda: mode == 'w'
1328
 
        return _KndxIndex(transport, mapper, lambda:None,
1329
 
                          allow_writes, lambda:True)
1330
 
 
1331
 
 
1332
 
class Test_KnitAnnotator(TestCaseWithMemoryTransport):
1333
 
 
1334
 
    def make_annotator(self):
1335
 
        factory = knit.make_pack_factory(True, True, 1)
1336
 
        vf = factory(self.get_transport())
1337
 
        return knit._KnitAnnotator(vf)
1338
 
 
1339
 
    def test__expand_fulltext(self):
1340
 
        ann = self.make_annotator()
1341
 
        rev_key = ('rev-id',)
1342
 
        ann._num_compression_children[rev_key] = 1
1343
 
        res = ann._expand_record(rev_key, (('parent-id',),), None,
1344
 
                           ['line1\n', 'line2\n'], ('fulltext', True))
1345
 
        # The content object and text lines should be cached appropriately
1346
 
        self.assertEqual(['line1\n', 'line2'], res)
1347
 
        content_obj = ann._content_objects[rev_key]
1348
 
        self.assertEqual(['line1\n', 'line2\n'], content_obj._lines)
1349
 
        self.assertEqual(res, content_obj.text())
1350
 
        self.assertEqual(res, ann._text_cache[rev_key])
1351
 
 
1352
 
    def test__expand_delta_comp_parent_not_available(self):
1353
 
        # Parent isn't available yet, so we return nothing, but queue up this
1354
 
        # node for later processing
1355
 
        ann = self.make_annotator()
1356
 
        rev_key = ('rev-id',)
1357
 
        parent_key = ('parent-id',)
1358
 
        record = ['0,1,1\n', 'new-line\n']
1359
 
        details = ('line-delta', False)
1360
 
        res = ann._expand_record(rev_key, (parent_key,), parent_key,
1361
 
                                 record, details)
1362
 
        self.assertEqual(None, res)
1363
 
        self.assertTrue(parent_key in ann._pending_deltas)
1364
 
        pending = ann._pending_deltas[parent_key]
1365
 
        self.assertEqual(1, len(pending))
1366
 
        self.assertEqual((rev_key, (parent_key,), record, details), pending[0])
1367
 
 
1368
 
    def test__expand_record_tracks_num_children(self):
1369
 
        ann = self.make_annotator()
1370
 
        rev_key = ('rev-id',)
1371
 
        rev2_key = ('rev2-id',)
1372
 
        parent_key = ('parent-id',)
1373
 
        record = ['0,1,1\n', 'new-line\n']
1374
 
        details = ('line-delta', False)
1375
 
        ann._num_compression_children[parent_key] = 2
1376
 
        ann._expand_record(parent_key, (), None, ['line1\n', 'line2\n'],
1377
 
                           ('fulltext', False))
1378
 
        res = ann._expand_record(rev_key, (parent_key,), parent_key,
1379
 
                                 record, details)
1380
 
        self.assertEqual({parent_key: 1}, ann._num_compression_children)
1381
 
        # Expanding the second child should remove the content object, and the
1382
 
        # num_compression_children entry
1383
 
        res = ann._expand_record(rev2_key, (parent_key,), parent_key,
1384
 
                                 record, details)
1385
 
        self.assertFalse(parent_key in ann._content_objects)
1386
 
        self.assertEqual({}, ann._num_compression_children)
1387
 
        # We should not cache the content_objects for rev2 and rev, because
1388
 
        # they do not have compression children of their own.
1389
 
        self.assertEqual({}, ann._content_objects)
1390
 
 
1391
 
    def test__expand_delta_records_blocks(self):
1392
 
        ann = self.make_annotator()
1393
 
        rev_key = ('rev-id',)
1394
 
        parent_key = ('parent-id',)
1395
 
        record = ['0,1,1\n', 'new-line\n']
1396
 
        details = ('line-delta', True)
1397
 
        ann._num_compression_children[parent_key] = 2
1398
 
        ann._expand_record(parent_key, (), None,
1399
 
                           ['line1\n', 'line2\n', 'line3\n'],
1400
 
                           ('fulltext', False))
1401
 
        ann._expand_record(rev_key, (parent_key,), parent_key, record, details)
1402
 
        self.assertEqual({(rev_key, parent_key): [(1, 1, 1), (3, 3, 0)]},
1403
 
                         ann._matching_blocks)
1404
 
        rev2_key = ('rev2-id',)
1405
 
        record = ['0,1,1\n', 'new-line\n']
1406
 
        details = ('line-delta', False)
1407
 
        ann._expand_record(rev2_key, (parent_key,), parent_key, record, details)
1408
 
        self.assertEqual([(1, 1, 2), (3, 3, 0)],
1409
 
                         ann._matching_blocks[(rev2_key, parent_key)])
1410
 
 
1411
 
    def test__get_parent_ann_uses_matching_blocks(self):
1412
 
        ann = self.make_annotator()
1413
 
        rev_key = ('rev-id',)
1414
 
        parent_key = ('parent-id',)
1415
 
        parent_ann = [(parent_key,)]*3
1416
 
        block_key = (rev_key, parent_key)
1417
 
        ann._annotations_cache[parent_key] = parent_ann
1418
 
        ann._matching_blocks[block_key] = [(0, 1, 1), (3, 3, 0)]
1419
 
        # We should not try to access any parent_lines content, because we know
1420
 
        # we already have the matching blocks
1421
 
        par_ann, blocks = ann._get_parent_annotations_and_matches(rev_key,
1422
 
                                        ['1\n', '2\n', '3\n'], parent_key)
1423
 
        self.assertEqual(parent_ann, par_ann)
1424
 
        self.assertEqual([(0, 1, 1), (3, 3, 0)], blocks)
1425
 
        self.assertEqual({}, ann._matching_blocks)
1426
 
 
1427
 
    def test__process_pending(self):
1428
 
        ann = self.make_annotator()
1429
 
        rev_key = ('rev-id',)
1430
 
        p1_key = ('p1-id',)
1431
 
        p2_key = ('p2-id',)
1432
 
        record = ['0,1,1\n', 'new-line\n']
1433
 
        details = ('line-delta', False)
1434
 
        p1_record = ['line1\n', 'line2\n']
1435
 
        ann._num_compression_children[p1_key] = 1
1436
 
        res = ann._expand_record(rev_key, (p1_key,p2_key), p1_key,
1437
 
                                 record, details)
1438
 
        self.assertEqual(None, res)
1439
 
        # self.assertTrue(p1_key in ann._pending_deltas)
1440
 
        self.assertEqual({}, ann._pending_annotation)
1441
 
        # Now insert p1, and we should be able to expand the delta
1442
 
        res = ann._expand_record(p1_key, (), None, p1_record,
1443
 
                                 ('fulltext', False))
1444
 
        self.assertEqual(p1_record, res)
1445
 
        ann._annotations_cache[p1_key] = [(p1_key,)]*2
1446
 
        res = ann._process_pending(p1_key)
1447
 
        self.assertEqual([], res)
1448
 
        self.assertFalse(p1_key in ann._pending_deltas)
1449
 
        self.assertTrue(p2_key in ann._pending_annotation)
1450
 
        self.assertEqual({p2_key: [(rev_key, (p1_key, p2_key))]},
1451
 
                         ann._pending_annotation)
1452
 
        # Now fill in parent 2, and pending annotation should be satisfied
1453
 
        res = ann._expand_record(p2_key, (), None, [], ('fulltext', False))
1454
 
        ann._annotations_cache[p2_key] = []
1455
 
        res = ann._process_pending(p2_key)
1456
 
        self.assertEqual([rev_key], res)
1457
 
        self.assertEqual({}, ann._pending_annotation)
1458
 
        self.assertEqual({}, ann._pending_deltas)
1459
 
 
1460
 
    def test_record_delta_removes_basis(self):
1461
 
        ann = self.make_annotator()
1462
 
        ann._expand_record(('parent-id',), (), None,
1463
 
                           ['line1\n', 'line2\n'], ('fulltext', False))
1464
 
        ann._num_compression_children['parent-id'] = 2
1465
 
 
1466
 
    def test_annotate_special_text(self):
1467
 
        ann = self.make_annotator()
1468
 
        vf = ann._vf
1469
 
        rev1_key = ('rev-1',)
1470
 
        rev2_key = ('rev-2',)
1471
 
        rev3_key = ('rev-3',)
1472
 
        spec_key = ('special:',)
1473
 
        vf.add_lines(rev1_key, [], ['initial content\n'])
1474
 
        vf.add_lines(rev2_key, [rev1_key], ['initial content\n',
1475
 
                                            'common content\n',
1476
 
                                            'content in 2\n'])
1477
 
        vf.add_lines(rev3_key, [rev1_key], ['initial content\n',
1478
 
                                            'common content\n',
1479
 
                                            'content in 3\n'])
1480
 
        spec_text = ('initial content\n'
1481
 
                     'common content\n'
1482
 
                     'content in 2\n'
1483
 
                     'content in 3\n')
1484
 
        ann.add_special_text(spec_key, [rev2_key, rev3_key], spec_text)
1485
 
        anns, lines = ann.annotate(spec_key)
1486
 
        self.assertEqual([(rev1_key,),
1487
 
                          (rev2_key, rev3_key),
1488
 
                          (rev2_key,),
1489
 
                          (rev3_key,),
1490
 
                         ], anns)
1491
 
        self.assertEqualDiff(spec_text, ''.join(lines))
 
1314
        return _KndxIndex(transport, mapper, lambda:None, allow_writes, lambda:True)
1492
1315
 
1493
1316
 
1494
1317
class KnitTests(TestCaseWithTransport):
1603
1426
        # could leave an empty .kndx file, which bzr would later claim was a
1604
1427
        # corrupted file since the header was not present. In reality, the file
1605
1428
        # just wasn't created, so it should be ignored.
1606
 
        t = transport.get_transport('.')
 
1429
        t = get_transport('.')
1607
1430
        t.put_bytes('test.kndx', '')
1608
1431
 
1609
1432
        knit = self.make_test_knit()
1610
1433
 
1611
1434
    def test_knit_index_checks_header(self):
1612
 
        t = transport.get_transport('.')
 
1435
        t = get_transport('.')
1613
1436
        t.put_bytes('test.kndx', '# not really a knit header\n\n')
1614
1437
        k = self.make_test_knit()
1615
1438
        self.assertRaises(KnitHeaderError, k.keys)
1814
1637
              ([('missing-parent', ), ('ghost', )], [('missing-parent', )]))])
1815
1638
        return graph_index
1816
1639
 
1817
 
    def make_g_index_missing_parent(self):
1818
 
        graph_index = self.make_g_index('missing_parent', 2,
1819
 
            [(('parent', ), ' 100 78', ([], [])),
1820
 
             (('tip', ), ' 100 78',
1821
 
              ([('parent', ), ('missing-parent', )], [('parent', )])),
1822
 
              ])
1823
 
        return graph_index
1824
 
 
1825
1640
    def make_g_index_no_external_refs(self):
1826
1641
        graph_index = self.make_g_index('no_external_refs', 2,
1827
1642
            [(('rev', ), ' 100 78',
1835
1650
        index.scan_unvalidated_index(unvalidated)
1836
1651
        self.assertEqual(frozenset(), index.get_missing_compression_parents())
1837
1652
 
1838
 
    def test_add_missing_compression_parent_unvalidated_index(self):
 
1653
    def test_add_incomplete_unvalidated_index(self):
1839
1654
        unvalidated = self.make_g_index_missing_compression_parent()
1840
1655
        combined = CombinedGraphIndex([unvalidated])
1841
1656
        index = _KnitGraphIndex(combined, lambda: True, deltas=True)
1847
1662
            frozenset([('missing-parent',)]),
1848
1663
            index.get_missing_compression_parents())
1849
1664
 
1850
 
    def test_add_missing_noncompression_parent_unvalidated_index(self):
1851
 
        unvalidated = self.make_g_index_missing_parent()
1852
 
        combined = CombinedGraphIndex([unvalidated])
1853
 
        index = _KnitGraphIndex(combined, lambda: True, deltas=True,
1854
 
            track_external_parent_refs=True)
1855
 
        index.scan_unvalidated_index(unvalidated)
1856
 
        self.assertEqual(
1857
 
            frozenset([('missing-parent',)]), index.get_missing_parents())
1858
 
 
1859
 
    def test_track_external_parent_refs(self):
1860
 
        g_index = self.make_g_index('empty', 2, [])
1861
 
        combined = CombinedGraphIndex([g_index])
1862
 
        index = _KnitGraphIndex(combined, lambda: True, deltas=True,
1863
 
            add_callback=self.catch_add, track_external_parent_refs=True)
1864
 
        self.caught_entries = []
1865
 
        index.add_records([
1866
 
            (('new-key',), 'fulltext,no-eol', (None, 50, 60),
1867
 
             [('parent-1',), ('parent-2',)])])
1868
 
        self.assertEqual(
1869
 
            frozenset([('parent-1',), ('parent-2',)]),
1870
 
            index.get_missing_parents())
1871
 
 
1872
1665
    def test_add_unvalidated_index_with_present_external_references(self):
1873
1666
        index = self.two_graph_index(deltas=True)
1874
1667
        # Ugly hack to get at one of the underlying GraphIndex objects that
2237
2030
        # self.assertEqual([("annotate", key_basis)], basis.calls)
2238
2031
        self.assertEqual([('get_parent_map', set([key_basis])),
2239
2032
            ('get_parent_map', set([key_basis])),
2240
 
            ('get_record_stream', [key_basis], 'topological', True)],
 
2033
            ('get_parent_map', set([key_basis])),
 
2034
            ('get_record_stream', [key_basis], 'unordered', True)],
2241
2035
            basis.calls)
2242
2036
 
2243
2037
    def test_check(self):
2349
2143
        # ask which fallbacks have which parents.
2350
2144
        self.assertEqual([
2351
2145
            ("get_parent_map", set([key_basis, key_basis_2, key_missing])),
2352
 
            # topological is requested from the fallback, because that is what
2353
 
            # was requested at the top level.
2354
 
            ("get_record_stream", [key_basis_2, key_basis], 'topological', True)],
 
2146
            # unordered is asked for by the underlying worker as it still
 
2147
            # buffers everything while answering - which is a problem!
 
2148
            ("get_record_stream", [key_basis_2, key_basis], 'unordered', True)],
2355
2149
            calls)
2356
2150
 
2357
2151
    def test_get_record_stream_unordered_deltas(self):
2443
2237
        key_basis = ('bar',)
2444
2238
        key_missing = ('missing',)
2445
2239
        test.add_lines(key, (), ['foo\n'])
2446
 
        key_sha1sum = osutils.sha_string('foo\n')
 
2240
        key_sha1sum = osutils.sha('foo\n').hexdigest()
2447
2241
        sha1s = test.get_sha1s([key])
2448
2242
        self.assertEqual({key: key_sha1sum}, sha1s)
2449
2243
        self.assertEqual([], basis.calls)
2451
2245
        # directly (rather than via text reconstruction) so that remote servers
2452
2246
        # etc don't have to answer with full content.
2453
2247
        basis.add_lines(key_basis, (), ['foo\n', 'bar\n'])
2454
 
        basis_sha1sum = osutils.sha_string('foo\nbar\n')
 
2248
        basis_sha1sum = osutils.sha('foo\nbar\n').hexdigest()
2455
2249
        basis.calls = []
2456
2250
        sha1s = test.get_sha1s([key, key_missing, key_basis])
2457
2251
        self.assertEqual({key: key_sha1sum,
2578
2372
        last_call = basis.calls[-1]
2579
2373
        self.assertEqual('get_record_stream', last_call[0])
2580
2374
        self.assertEqual(set([key_left, key_right]), set(last_call[1]))
2581
 
        self.assertEqual('topological', last_call[2])
 
2375
        self.assertEqual('unordered', last_call[2])
2582
2376
        self.assertEqual(True, last_call[3])
2583
2377
 
2584
2378