~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_knit.py

  • Committer: Johan Walles
  • Date: 2009-05-07 05:08:46 UTC
  • mfrom: (4342 +trunk)
  • mto: This revision was merged to the branch mainline in revision 4343.
  • Revision ID: johan.walles@gmail.com-20090507050846-nkwvcyauf1eh653q
MergeĀ fromĀ upstream.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2006-2011 Canonical Ltd
 
1
# Copyright (C) 2005, 2006, 2007 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
17
17
"""Tests for Knit data structure"""
18
18
 
19
19
from cStringIO import StringIO
 
20
import difflib
20
21
import gzip
21
22
import sys
22
23
 
23
24
from bzrlib import (
24
25
    errors,
 
26
    generate_ids,
25
27
    knit,
26
28
    multiparent,
27
29
    osutils,
28
30
    pack,
29
 
    tests,
30
 
    transport,
31
31
    )
32
32
from bzrlib.errors import (
 
33
    RevisionAlreadyPresent,
33
34
    KnitHeaderError,
 
35
    RevisionNotPresent,
34
36
    NoSuchFile,
35
37
    )
36
38
from bzrlib.index import *
37
39
from bzrlib.knit import (
38
40
    AnnotatedKnitContent,
39
41
    KnitContent,
 
42
    KnitSequenceMatcher,
40
43
    KnitVersionedFiles,
41
44
    PlainKnitContent,
42
45
    _VFContentMapGenerator,
 
46
    _DirectPackAccess,
43
47
    _KndxIndex,
44
48
    _KnitGraphIndex,
45
49
    _KnitKeyAccess,
46
50
    make_file_factory,
47
51
    )
48
 
from bzrlib.patiencediff import PatienceSequenceMatcher
49
 
from bzrlib.repofmt import (
50
 
    knitpack_repo,
51
 
    pack_repo,
52
 
    )
 
52
from bzrlib.repofmt import pack_repo
53
53
from bzrlib.tests import (
 
54
    Feature,
 
55
    KnownFailure,
54
56
    TestCase,
55
57
    TestCaseWithMemoryTransport,
56
58
    TestCaseWithTransport,
57
59
    TestNotApplicable,
58
60
    )
 
61
from bzrlib.transport import get_transport
 
62
from bzrlib.transport.memory import MemoryTransport
 
63
from bzrlib.tuned_gzip import GzipFile
59
64
from bzrlib.versionedfile import (
60
65
    AbsentContentFactory,
61
66
    ConstantMapper,
64
69
    )
65
70
 
66
71
 
67
 
compiled_knit_feature = tests.ModuleAvailableFeature(
68
 
                            'bzrlib._knit_load_data_pyx')
 
72
class _CompiledKnitFeature(Feature):
 
73
 
 
74
    def _probe(self):
 
75
        try:
 
76
            import bzrlib._knit_load_data_c
 
77
        except ImportError:
 
78
            return False
 
79
        return True
 
80
 
 
81
    def feature_name(self):
 
82
        return 'bzrlib._knit_load_data_c'
 
83
 
 
84
CompiledKnitFeature = _CompiledKnitFeature()
69
85
 
70
86
 
71
87
class KnitContentTestsMixin(object):
100
116
        line_delta = source_content.line_delta(target_content)
101
117
        delta_blocks = list(KnitContent.get_line_delta_blocks(line_delta,
102
118
            source_lines, target_lines))
103
 
        matcher = PatienceSequenceMatcher(None, source_lines, target_lines)
104
 
        matcher_blocks = list(matcher.get_matching_blocks())
 
119
        matcher = KnitSequenceMatcher(None, source_lines, target_lines)
 
120
        matcher_blocks = list(list(matcher.get_matching_blocks()))
105
121
        self.assertEqual(matcher_blocks, delta_blocks)
106
122
 
107
123
    def test_get_line_delta_blocks(self):
327
343
            transport.append_bytes(packname, bytes)
328
344
        writer = pack.ContainerWriter(write_data)
329
345
        writer.begin()
330
 
        access = pack_repo._DirectPackAccess({})
 
346
        access = _DirectPackAccess({})
331
347
        access.set_writer(writer, index, (transport, packname))
332
348
        return access, writer
333
349
 
340
356
        writer.end()
341
357
        return memos
342
358
 
343
 
    def test_pack_collection_pack_retries(self):
344
 
        """An explicit pack of a pack collection succeeds even when a
345
 
        concurrent pack happens.
346
 
        """
347
 
        builder = self.make_branch_builder('.')
348
 
        builder.start_series()
349
 
        builder.build_snapshot('rev-1', None, [
350
 
            ('add', ('', 'root-id', 'directory', None)),
351
 
            ('add', ('file', 'file-id', 'file', 'content\nrev 1\n')),
352
 
            ])
353
 
        builder.build_snapshot('rev-2', ['rev-1'], [
354
 
            ('modify', ('file-id', 'content\nrev 2\n')),
355
 
            ])
356
 
        builder.build_snapshot('rev-3', ['rev-2'], [
357
 
            ('modify', ('file-id', 'content\nrev 3\n')),
358
 
            ])
359
 
        self.addCleanup(builder.finish_series)
360
 
        b = builder.get_branch()
361
 
        self.addCleanup(b.lock_write().unlock)
362
 
        repo = b.repository
363
 
        collection = repo._pack_collection
364
 
        # Concurrently repack the repo.
365
 
        reopened_repo = repo.bzrdir.open_repository()
366
 
        reopened_repo.pack()
367
 
        # Pack the new pack.
368
 
        collection.pack()
369
 
 
370
359
    def make_vf_for_retrying(self):
371
360
        """Create 3 packs and a reload function.
372
361
 
377
366
        :return: (versioned_file, reload_counter)
378
367
            versioned_file  a KnitVersionedFiles using the packs for access
379
368
        """
380
 
        builder = self.make_branch_builder('.', format="1.9")
381
 
        builder.start_series()
382
 
        builder.build_snapshot('rev-1', None, [
383
 
            ('add', ('', 'root-id', 'directory', None)),
384
 
            ('add', ('file', 'file-id', 'file', 'content\nrev 1\n')),
385
 
            ])
386
 
        builder.build_snapshot('rev-2', ['rev-1'], [
387
 
            ('modify', ('file-id', 'content\nrev 2\n')),
388
 
            ])
389
 
        builder.build_snapshot('rev-3', ['rev-2'], [
390
 
            ('modify', ('file-id', 'content\nrev 3\n')),
391
 
            ])
392
 
        builder.finish_series()
393
 
        b = builder.get_branch()
394
 
        b.lock_write()
395
 
        self.addCleanup(b.unlock)
 
369
        tree = self.make_branch_and_memory_tree('tree')
 
370
        tree.lock_write()
 
371
        self.addCleanup(tree.branch.repository.unlock)
 
372
        tree.add([''], ['root-id'])
 
373
        tree.commit('one', rev_id='rev-1')
 
374
        tree.commit('two', rev_id='rev-2')
 
375
        tree.commit('three', rev_id='rev-3')
396
376
        # Pack these three revisions into another pack file, but don't remove
397
377
        # the originals
398
 
        repo = b.repository
 
378
        repo = tree.branch.repository
399
379
        collection = repo._pack_collection
400
380
        collection.ensure_loaded()
401
381
        orig_packs = collection.packs
402
 
        packer = knitpack_repo.KnitPacker(collection, orig_packs, '.testpack')
 
382
        packer = pack_repo.Packer(collection, orig_packs, '.testpack')
403
383
        new_pack = packer.pack()
404
384
        # forget about the new pack
405
385
        collection.reset()
406
386
        repo.refresh_data()
407
 
        vf = repo.revisions
 
387
        vf = tree.branch.repository.revisions
 
388
        del tree
408
389
        # Set up a reload() function that switches to using the new pack file
409
390
        new_index = new_pack.revision_index
410
391
        access_tuple = new_pack.access_tuple()
444
425
        except _TestException, e:
445
426
            retry_exc = errors.RetryWithNewPacks(None, reload_occurred=False,
446
427
                                                 exc_info=sys.exc_info())
447
 
        # GZ 2010-08-10: Cycle with exc_info affects 3 tests
448
428
        return retry_exc
449
429
 
450
430
    def test_read_from_several_packs(self):
459
439
        memos.extend(access.add_raw_records([('key', 5)], 'alpha'))
460
440
        writer.end()
461
441
        transport = self.get_transport()
462
 
        access = pack_repo._DirectPackAccess({"FOO":(transport, 'packfile'),
 
442
        access = _DirectPackAccess({"FOO":(transport, 'packfile'),
463
443
            "FOOBAR":(transport, 'pack2'),
464
444
            "BAZ":(transport, 'pack3')})
465
445
        self.assertEqual(['1234567890', '12345', 'alpha'],
475
455
 
476
456
    def test_set_writer(self):
477
457
        """The writer should be settable post construction."""
478
 
        access = pack_repo._DirectPackAccess({})
 
458
        access = _DirectPackAccess({})
479
459
        transport = self.get_transport()
480
460
        packname = 'packfile'
481
461
        index = 'foo'
493
473
        transport = self.get_transport()
494
474
        reload_called, reload_func = self.make_reload_func()
495
475
        # Note that the index key has changed from 'foo' to 'bar'
496
 
        access = pack_repo._DirectPackAccess({'bar':(transport, 'packname')},
 
476
        access = _DirectPackAccess({'bar':(transport, 'packname')},
497
477
                                   reload_func=reload_func)
498
478
        e = self.assertListRaises(errors.RetryWithNewPacks,
499
479
                                  access.get_raw_records, memos)
508
488
        memos = self.make_pack_file()
509
489
        transport = self.get_transport()
510
490
        # Note that the index key has changed from 'foo' to 'bar'
511
 
        access = pack_repo._DirectPackAccess({'bar':(transport, 'packname')})
 
491
        access = _DirectPackAccess({'bar':(transport, 'packname')})
512
492
        e = self.assertListRaises(KeyError, access.get_raw_records, memos)
513
493
 
514
494
    def test_missing_file_raises_retry(self):
516
496
        transport = self.get_transport()
517
497
        reload_called, reload_func = self.make_reload_func()
518
498
        # Note that the 'filename' has been changed to 'different-packname'
519
 
        access = pack_repo._DirectPackAccess(
520
 
            {'foo':(transport, 'different-packname')},
521
 
            reload_func=reload_func)
 
499
        access = _DirectPackAccess({'foo':(transport, 'different-packname')},
 
500
                                   reload_func=reload_func)
522
501
        e = self.assertListRaises(errors.RetryWithNewPacks,
523
502
                                  access.get_raw_records, memos)
524
503
        # The file has gone missing, so we assume we need to reload
532
511
        memos = self.make_pack_file()
533
512
        transport = self.get_transport()
534
513
        # Note that the 'filename' has been changed to 'different-packname'
535
 
        access = pack_repo._DirectPackAccess(
536
 
            {'foo': (transport, 'different-packname')})
 
514
        access = _DirectPackAccess({'foo':(transport, 'different-packname')})
537
515
        e = self.assertListRaises(errors.NoSuchFile,
538
516
                                  access.get_raw_records, memos)
539
517
 
543
521
        failing_transport = MockReadvFailingTransport(
544
522
                                [transport.get_bytes('packname')])
545
523
        reload_called, reload_func = self.make_reload_func()
546
 
        access = pack_repo._DirectPackAccess(
547
 
            {'foo': (failing_transport, 'packname')},
548
 
            reload_func=reload_func)
 
524
        access = _DirectPackAccess({'foo':(failing_transport, 'packname')},
 
525
                                   reload_func=reload_func)
549
526
        # Asking for a single record will not trigger the Mock failure
550
527
        self.assertEqual(['1234567890'],
551
528
            list(access.get_raw_records(memos[:1])))
567
544
        failing_transport = MockReadvFailingTransport(
568
545
                                [transport.get_bytes('packname')])
569
546
        reload_called, reload_func = self.make_reload_func()
570
 
        access = pack_repo._DirectPackAccess(
571
 
            {'foo':(failing_transport, 'packname')})
 
547
        access = _DirectPackAccess({'foo':(failing_transport, 'packname')})
572
548
        # Asking for a single record will not trigger the Mock failure
573
549
        self.assertEqual(['1234567890'],
574
550
            list(access.get_raw_records(memos[:1])))
579
555
                                  access.get_raw_records, memos)
580
556
 
581
557
    def test_reload_or_raise_no_reload(self):
582
 
        access = pack_repo._DirectPackAccess({}, reload_func=None)
 
558
        access = _DirectPackAccess({}, reload_func=None)
583
559
        retry_exc = self.make_retry_exception()
584
560
        # Without a reload_func, we will just re-raise the original exception
585
561
        self.assertRaises(_TestException, access.reload_or_raise, retry_exc)
586
562
 
587
563
    def test_reload_or_raise_reload_changed(self):
588
564
        reload_called, reload_func = self.make_reload_func(return_val=True)
589
 
        access = pack_repo._DirectPackAccess({}, reload_func=reload_func)
 
565
        access = _DirectPackAccess({}, reload_func=reload_func)
590
566
        retry_exc = self.make_retry_exception()
591
567
        access.reload_or_raise(retry_exc)
592
568
        self.assertEqual([1], reload_called)
596
572
 
597
573
    def test_reload_or_raise_reload_no_change(self):
598
574
        reload_called, reload_func = self.make_reload_func(return_val=False)
599
 
        access = pack_repo._DirectPackAccess({}, reload_func=reload_func)
 
575
        access = _DirectPackAccess({}, reload_func=reload_func)
600
576
        retry_exc = self.make_retry_exception()
601
577
        # If reload_occurred is False, then we consider it an error to have
602
578
        # reload_func() return False (no changes).
733
709
 
734
710
    def make_multiple_records(self):
735
711
        """Create the content for multiple records."""
736
 
        sha1sum = osutils.sha_string('foo\nbar\n')
 
712
        sha1sum = osutils.sha('foo\nbar\n').hexdigest()
737
713
        total_txt = []
738
714
        gz_txt = self.create_gz_content('version rev-id-1 2 %s\n'
739
715
                                        'foo\n'
742
718
                                        % (sha1sum,))
743
719
        record_1 = (0, len(gz_txt), sha1sum)
744
720
        total_txt.append(gz_txt)
745
 
        sha1sum = osutils.sha_string('baz\n')
 
721
        sha1sum = osutils.sha('baz\n').hexdigest()
746
722
        gz_txt = self.create_gz_content('version rev-id-2 1 %s\n'
747
723
                                        'baz\n'
748
724
                                        'end rev-id-2\n'
752
728
        return total_txt, record_1, record_2
753
729
 
754
730
    def test_valid_knit_data(self):
755
 
        sha1sum = osutils.sha_string('foo\nbar\n')
 
731
        sha1sum = osutils.sha('foo\nbar\n').hexdigest()
756
732
        gz_txt = self.create_gz_content('version rev-id-1 2 %s\n'
757
733
                                        'foo\n'
758
734
                                        'bar\n'
789
765
                         raw_contents)
790
766
 
791
767
    def test_not_enough_lines(self):
792
 
        sha1sum = osutils.sha_string('foo\n')
 
768
        sha1sum = osutils.sha('foo\n').hexdigest()
793
769
        # record says 2 lines data says 1
794
770
        gz_txt = self.create_gz_content('version rev-id-1 2 %s\n'
795
771
                                        'foo\n'
807
783
        self.assertEqual([(('rev-id-1',),  gz_txt, sha1sum)], raw_contents)
808
784
 
809
785
    def test_too_many_lines(self):
810
 
        sha1sum = osutils.sha_string('foo\nbar\n')
 
786
        sha1sum = osutils.sha('foo\nbar\n').hexdigest()
811
787
        # record says 1 lines data says 2
812
788
        gz_txt = self.create_gz_content('version rev-id-1 1 %s\n'
813
789
                                        'foo\n'
826
802
        self.assertEqual([(('rev-id-1',), gz_txt, sha1sum)], raw_contents)
827
803
 
828
804
    def test_mismatched_version_id(self):
829
 
        sha1sum = osutils.sha_string('foo\nbar\n')
 
805
        sha1sum = osutils.sha('foo\nbar\n').hexdigest()
830
806
        gz_txt = self.create_gz_content('version rev-id-1 2 %s\n'
831
807
                                        'foo\n'
832
808
                                        'bar\n'
845
821
            knit._read_records_iter_raw(records))
846
822
 
847
823
    def test_uncompressed_data(self):
848
 
        sha1sum = osutils.sha_string('foo\nbar\n')
 
824
        sha1sum = osutils.sha('foo\nbar\n').hexdigest()
849
825
        txt = ('version rev-id-1 2 %s\n'
850
826
               'foo\n'
851
827
               'bar\n'
865
841
            knit._read_records_iter_raw(records))
866
842
 
867
843
    def test_corrupted_data(self):
868
 
        sha1sum = osutils.sha_string('foo\nbar\n')
 
844
        sha1sum = osutils.sha('foo\nbar\n').hexdigest()
869
845
        gz_txt = self.create_gz_content('version rev-id-1 2 %s\n'
870
846
                                        'foo\n'
871
847
                                        'bar\n'
888
864
 
889
865
    def get_knit_index(self, transport, name, mode):
890
866
        mapper = ConstantMapper(name)
 
867
        orig = knit._load_data
 
868
        def reset():
 
869
            knit._load_data = orig
 
870
        self.addCleanup(reset)
891
871
        from bzrlib._knit_load_data_py import _load_data_py
892
 
        self.overrideAttr(knit, '_load_data', _load_data_py)
 
872
        knit._load_data = _load_data_py
893
873
        allow_writes = lambda: 'w' in mode
894
874
        return _KndxIndex(transport, mapper, lambda:None, allow_writes, lambda:True)
895
875
 
1193
1173
            self.assertRaises(errors.KnitCorrupt, index.keys)
1194
1174
        except TypeError, e:
1195
1175
            if (str(e) == ('exceptions must be strings, classes, or instances,'
1196
 
                           ' not exceptions.IndexError')):
 
1176
                           ' not exceptions.IndexError')
 
1177
                and sys.version_info[0:2] >= (2,5)):
1197
1178
                self.knownFailure('Pyrex <0.9.5 fails with TypeError when'
1198
1179
                                  ' raising new style exceptions with python'
1199
1180
                                  ' >=2.5')
1212
1193
            self.assertRaises(errors.KnitCorrupt, index.keys)
1213
1194
        except TypeError, e:
1214
1195
            if (str(e) == ('exceptions must be strings, classes, or instances,'
1215
 
                           ' not exceptions.ValueError')):
 
1196
                           ' not exceptions.ValueError')
 
1197
                and sys.version_info[0:2] >= (2,5)):
1216
1198
                self.knownFailure('Pyrex <0.9.5 fails with TypeError when'
1217
1199
                                  ' raising new style exceptions with python'
1218
1200
                                  ' >=2.5')
1231
1213
            self.assertRaises(errors.KnitCorrupt, index.keys)
1232
1214
        except TypeError, e:
1233
1215
            if (str(e) == ('exceptions must be strings, classes, or instances,'
1234
 
                           ' not exceptions.ValueError')):
 
1216
                           ' not exceptions.ValueError')
 
1217
                and sys.version_info[0:2] >= (2,5)):
1235
1218
                self.knownFailure('Pyrex <0.9.5 fails with TypeError when'
1236
1219
                                  ' raising new style exceptions with python'
1237
1220
                                  ' >=2.5')
1248
1231
            self.assertRaises(errors.KnitCorrupt, index.keys)
1249
1232
        except TypeError, e:
1250
1233
            if (str(e) == ('exceptions must be strings, classes, or instances,'
1251
 
                           ' not exceptions.ValueError')):
 
1234
                           ' not exceptions.ValueError')
 
1235
                and sys.version_info[0:2] >= (2,5)):
1252
1236
                self.knownFailure('Pyrex <0.9.5 fails with TypeError when'
1253
1237
                                  ' raising new style exceptions with python'
1254
1238
                                  ' >=2.5')
1265
1249
            self.assertRaises(errors.KnitCorrupt, index.keys)
1266
1250
        except TypeError, e:
1267
1251
            if (str(e) == ('exceptions must be strings, classes, or instances,'
1268
 
                           ' not exceptions.ValueError')):
 
1252
                           ' not exceptions.ValueError')
 
1253
                and sys.version_info[0:2] >= (2,5)):
1269
1254
                self.knownFailure('Pyrex <0.9.5 fails with TypeError when'
1270
1255
                                  ' raising new style exceptions with python'
1271
1256
                                  ' >=2.5')
1315
1300
 
1316
1301
class LowLevelKnitIndexTests_c(LowLevelKnitIndexTests):
1317
1302
 
1318
 
    _test_needs_features = [compiled_knit_feature]
 
1303
    _test_needs_features = [CompiledKnitFeature]
1319
1304
 
1320
1305
    def get_knit_index(self, transport, name, mode):
1321
1306
        mapper = ConstantMapper(name)
1322
 
        from bzrlib._knit_load_data_pyx import _load_data_c
1323
 
        self.overrideAttr(knit, '_load_data', _load_data_c)
 
1307
        orig = knit._load_data
 
1308
        def reset():
 
1309
            knit._load_data = orig
 
1310
        self.addCleanup(reset)
 
1311
        from bzrlib._knit_load_data_c import _load_data_c
 
1312
        knit._load_data = _load_data_c
1324
1313
        allow_writes = lambda: mode == 'w'
1325
 
        return _KndxIndex(transport, mapper, lambda:None,
1326
 
                          allow_writes, lambda:True)
1327
 
 
1328
 
 
1329
 
class Test_KnitAnnotator(TestCaseWithMemoryTransport):
1330
 
 
1331
 
    def make_annotator(self):
1332
 
        factory = knit.make_pack_factory(True, True, 1)
1333
 
        vf = factory(self.get_transport())
1334
 
        return knit._KnitAnnotator(vf)
1335
 
 
1336
 
    def test__expand_fulltext(self):
1337
 
        ann = self.make_annotator()
1338
 
        rev_key = ('rev-id',)
1339
 
        ann._num_compression_children[rev_key] = 1
1340
 
        res = ann._expand_record(rev_key, (('parent-id',),), None,
1341
 
                           ['line1\n', 'line2\n'], ('fulltext', True))
1342
 
        # The content object and text lines should be cached appropriately
1343
 
        self.assertEqual(['line1\n', 'line2'], res)
1344
 
        content_obj = ann._content_objects[rev_key]
1345
 
        self.assertEqual(['line1\n', 'line2\n'], content_obj._lines)
1346
 
        self.assertEqual(res, content_obj.text())
1347
 
        self.assertEqual(res, ann._text_cache[rev_key])
1348
 
 
1349
 
    def test__expand_delta_comp_parent_not_available(self):
1350
 
        # Parent isn't available yet, so we return nothing, but queue up this
1351
 
        # node for later processing
1352
 
        ann = self.make_annotator()
1353
 
        rev_key = ('rev-id',)
1354
 
        parent_key = ('parent-id',)
1355
 
        record = ['0,1,1\n', 'new-line\n']
1356
 
        details = ('line-delta', False)
1357
 
        res = ann._expand_record(rev_key, (parent_key,), parent_key,
1358
 
                                 record, details)
1359
 
        self.assertEqual(None, res)
1360
 
        self.assertTrue(parent_key in ann._pending_deltas)
1361
 
        pending = ann._pending_deltas[parent_key]
1362
 
        self.assertEqual(1, len(pending))
1363
 
        self.assertEqual((rev_key, (parent_key,), record, details), pending[0])
1364
 
 
1365
 
    def test__expand_record_tracks_num_children(self):
1366
 
        ann = self.make_annotator()
1367
 
        rev_key = ('rev-id',)
1368
 
        rev2_key = ('rev2-id',)
1369
 
        parent_key = ('parent-id',)
1370
 
        record = ['0,1,1\n', 'new-line\n']
1371
 
        details = ('line-delta', False)
1372
 
        ann._num_compression_children[parent_key] = 2
1373
 
        ann._expand_record(parent_key, (), None, ['line1\n', 'line2\n'],
1374
 
                           ('fulltext', False))
1375
 
        res = ann._expand_record(rev_key, (parent_key,), parent_key,
1376
 
                                 record, details)
1377
 
        self.assertEqual({parent_key: 1}, ann._num_compression_children)
1378
 
        # Expanding the second child should remove the content object, and the
1379
 
        # num_compression_children entry
1380
 
        res = ann._expand_record(rev2_key, (parent_key,), parent_key,
1381
 
                                 record, details)
1382
 
        self.assertFalse(parent_key in ann._content_objects)
1383
 
        self.assertEqual({}, ann._num_compression_children)
1384
 
        # We should not cache the content_objects for rev2 and rev, because
1385
 
        # they do not have compression children of their own.
1386
 
        self.assertEqual({}, ann._content_objects)
1387
 
 
1388
 
    def test__expand_delta_records_blocks(self):
1389
 
        ann = self.make_annotator()
1390
 
        rev_key = ('rev-id',)
1391
 
        parent_key = ('parent-id',)
1392
 
        record = ['0,1,1\n', 'new-line\n']
1393
 
        details = ('line-delta', True)
1394
 
        ann._num_compression_children[parent_key] = 2
1395
 
        ann._expand_record(parent_key, (), None,
1396
 
                           ['line1\n', 'line2\n', 'line3\n'],
1397
 
                           ('fulltext', False))
1398
 
        ann._expand_record(rev_key, (parent_key,), parent_key, record, details)
1399
 
        self.assertEqual({(rev_key, parent_key): [(1, 1, 1), (3, 3, 0)]},
1400
 
                         ann._matching_blocks)
1401
 
        rev2_key = ('rev2-id',)
1402
 
        record = ['0,1,1\n', 'new-line\n']
1403
 
        details = ('line-delta', False)
1404
 
        ann._expand_record(rev2_key, (parent_key,), parent_key, record, details)
1405
 
        self.assertEqual([(1, 1, 2), (3, 3, 0)],
1406
 
                         ann._matching_blocks[(rev2_key, parent_key)])
1407
 
 
1408
 
    def test__get_parent_ann_uses_matching_blocks(self):
1409
 
        ann = self.make_annotator()
1410
 
        rev_key = ('rev-id',)
1411
 
        parent_key = ('parent-id',)
1412
 
        parent_ann = [(parent_key,)]*3
1413
 
        block_key = (rev_key, parent_key)
1414
 
        ann._annotations_cache[parent_key] = parent_ann
1415
 
        ann._matching_blocks[block_key] = [(0, 1, 1), (3, 3, 0)]
1416
 
        # We should not try to access any parent_lines content, because we know
1417
 
        # we already have the matching blocks
1418
 
        par_ann, blocks = ann._get_parent_annotations_and_matches(rev_key,
1419
 
                                        ['1\n', '2\n', '3\n'], parent_key)
1420
 
        self.assertEqual(parent_ann, par_ann)
1421
 
        self.assertEqual([(0, 1, 1), (3, 3, 0)], blocks)
1422
 
        self.assertEqual({}, ann._matching_blocks)
1423
 
 
1424
 
    def test__process_pending(self):
1425
 
        ann = self.make_annotator()
1426
 
        rev_key = ('rev-id',)
1427
 
        p1_key = ('p1-id',)
1428
 
        p2_key = ('p2-id',)
1429
 
        record = ['0,1,1\n', 'new-line\n']
1430
 
        details = ('line-delta', False)
1431
 
        p1_record = ['line1\n', 'line2\n']
1432
 
        ann._num_compression_children[p1_key] = 1
1433
 
        res = ann._expand_record(rev_key, (p1_key,p2_key), p1_key,
1434
 
                                 record, details)
1435
 
        self.assertEqual(None, res)
1436
 
        # self.assertTrue(p1_key in ann._pending_deltas)
1437
 
        self.assertEqual({}, ann._pending_annotation)
1438
 
        # Now insert p1, and we should be able to expand the delta
1439
 
        res = ann._expand_record(p1_key, (), None, p1_record,
1440
 
                                 ('fulltext', False))
1441
 
        self.assertEqual(p1_record, res)
1442
 
        ann._annotations_cache[p1_key] = [(p1_key,)]*2
1443
 
        res = ann._process_pending(p1_key)
1444
 
        self.assertEqual([], res)
1445
 
        self.assertFalse(p1_key in ann._pending_deltas)
1446
 
        self.assertTrue(p2_key in ann._pending_annotation)
1447
 
        self.assertEqual({p2_key: [(rev_key, (p1_key, p2_key))]},
1448
 
                         ann._pending_annotation)
1449
 
        # Now fill in parent 2, and pending annotation should be satisfied
1450
 
        res = ann._expand_record(p2_key, (), None, [], ('fulltext', False))
1451
 
        ann._annotations_cache[p2_key] = []
1452
 
        res = ann._process_pending(p2_key)
1453
 
        self.assertEqual([rev_key], res)
1454
 
        self.assertEqual({}, ann._pending_annotation)
1455
 
        self.assertEqual({}, ann._pending_deltas)
1456
 
 
1457
 
    def test_record_delta_removes_basis(self):
1458
 
        ann = self.make_annotator()
1459
 
        ann._expand_record(('parent-id',), (), None,
1460
 
                           ['line1\n', 'line2\n'], ('fulltext', False))
1461
 
        ann._num_compression_children['parent-id'] = 2
1462
 
 
1463
 
    def test_annotate_special_text(self):
1464
 
        ann = self.make_annotator()
1465
 
        vf = ann._vf
1466
 
        rev1_key = ('rev-1',)
1467
 
        rev2_key = ('rev-2',)
1468
 
        rev3_key = ('rev-3',)
1469
 
        spec_key = ('special:',)
1470
 
        vf.add_lines(rev1_key, [], ['initial content\n'])
1471
 
        vf.add_lines(rev2_key, [rev1_key], ['initial content\n',
1472
 
                                            'common content\n',
1473
 
                                            'content in 2\n'])
1474
 
        vf.add_lines(rev3_key, [rev1_key], ['initial content\n',
1475
 
                                            'common content\n',
1476
 
                                            'content in 3\n'])
1477
 
        spec_text = ('initial content\n'
1478
 
                     'common content\n'
1479
 
                     'content in 2\n'
1480
 
                     'content in 3\n')
1481
 
        ann.add_special_text(spec_key, [rev2_key, rev3_key], spec_text)
1482
 
        anns, lines = ann.annotate(spec_key)
1483
 
        self.assertEqual([(rev1_key,),
1484
 
                          (rev2_key, rev3_key),
1485
 
                          (rev2_key,),
1486
 
                          (rev3_key,),
1487
 
                         ], anns)
1488
 
        self.assertEqualDiff(spec_text, ''.join(lines))
 
1314
        return _KndxIndex(transport, mapper, lambda:None, allow_writes, lambda:True)
1489
1315
 
1490
1316
 
1491
1317
class KnitTests(TestCaseWithTransport):
1600
1426
        # could leave an empty .kndx file, which bzr would later claim was a
1601
1427
        # corrupted file since the header was not present. In reality, the file
1602
1428
        # just wasn't created, so it should be ignored.
1603
 
        t = transport.get_transport('.')
 
1429
        t = get_transport('.')
1604
1430
        t.put_bytes('test.kndx', '')
1605
1431
 
1606
1432
        knit = self.make_test_knit()
1607
1433
 
1608
1434
    def test_knit_index_checks_header(self):
1609
 
        t = transport.get_transport('.')
 
1435
        t = get_transport('.')
1610
1436
        t.put_bytes('test.kndx', '# not really a knit header\n\n')
1611
1437
        k = self.make_test_knit()
1612
1438
        self.assertRaises(KnitHeaderError, k.keys)
2234
2060
        # self.assertEqual([("annotate", key_basis)], basis.calls)
2235
2061
        self.assertEqual([('get_parent_map', set([key_basis])),
2236
2062
            ('get_parent_map', set([key_basis])),
2237
 
            ('get_record_stream', [key_basis], 'topological', True)],
 
2063
            ('get_parent_map', set([key_basis])),
 
2064
            ('get_record_stream', [key_basis], 'unordered', True)],
2238
2065
            basis.calls)
2239
2066
 
2240
2067
    def test_check(self):
2346
2173
        # ask which fallbacks have which parents.
2347
2174
        self.assertEqual([
2348
2175
            ("get_parent_map", set([key_basis, key_basis_2, key_missing])),
2349
 
            # topological is requested from the fallback, because that is what
2350
 
            # was requested at the top level.
2351
 
            ("get_record_stream", [key_basis_2, key_basis], 'topological', True)],
 
2176
            # unordered is asked for by the underlying worker as it still
 
2177
            # buffers everything while answering - which is a problem!
 
2178
            ("get_record_stream", [key_basis_2, key_basis], 'unordered', True)],
2352
2179
            calls)
2353
2180
 
2354
2181
    def test_get_record_stream_unordered_deltas(self):
2440
2267
        key_basis = ('bar',)
2441
2268
        key_missing = ('missing',)
2442
2269
        test.add_lines(key, (), ['foo\n'])
2443
 
        key_sha1sum = osutils.sha_string('foo\n')
 
2270
        key_sha1sum = osutils.sha('foo\n').hexdigest()
2444
2271
        sha1s = test.get_sha1s([key])
2445
2272
        self.assertEqual({key: key_sha1sum}, sha1s)
2446
2273
        self.assertEqual([], basis.calls)
2448
2275
        # directly (rather than via text reconstruction) so that remote servers
2449
2276
        # etc don't have to answer with full content.
2450
2277
        basis.add_lines(key_basis, (), ['foo\n', 'bar\n'])
2451
 
        basis_sha1sum = osutils.sha_string('foo\nbar\n')
 
2278
        basis_sha1sum = osutils.sha('foo\nbar\n').hexdigest()
2452
2279
        basis.calls = []
2453
2280
        sha1s = test.get_sha1s([key, key_missing, key_basis])
2454
2281
        self.assertEqual({key: key_sha1sum,
2575
2402
        last_call = basis.calls[-1]
2576
2403
        self.assertEqual('get_record_stream', last_call[0])
2577
2404
        self.assertEqual(set([key_left, key_right]), set(last_call[1]))
2578
 
        self.assertEqual('topological', last_call[2])
 
2405
        self.assertEqual('unordered', last_call[2])
2579
2406
        self.assertEqual(True, last_call[3])
2580
2407
 
2581
2408