~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/repofmt/groupcompress_repo.py

  • Committer: Colin Watson
  • Date: 2015-07-02 11:30:47 UTC
  • mto: This revision was merged to the branch mainline in revision 6605.
  • Revision ID: cjwatson@canonical.com-20150702113047-359s4zsi07wvfwso
Use assertLength.

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2008, 2009, 2010 Canonical Ltd
 
1
# Copyright (C) 2008-2011 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
16
16
 
17
17
"""Repository formats using CHK inventories and groupcompress compression."""
18
18
 
 
19
from __future__ import absolute_import
 
20
 
19
21
import time
20
22
 
21
23
from bzrlib import (
22
 
    bzrdir,
 
24
    controldir,
23
25
    chk_map,
24
26
    chk_serializer,
25
27
    debug,
26
28
    errors,
27
29
    index as _mod_index,
28
30
    inventory,
29
 
    knit,
30
31
    osutils,
31
32
    pack,
32
33
    revision as _mod_revision,
33
34
    trace,
34
35
    ui,
 
36
    versionedfile,
35
37
    )
36
38
from bzrlib.btree_index import (
37
39
    BTreeGraphIndex,
38
40
    BTreeBuilder,
39
41
    )
 
42
from bzrlib.decorators import needs_write_lock
40
43
from bzrlib.groupcompress import (
41
44
    _GCGraphIndex,
42
45
    GroupCompressVersionedFiles,
43
46
    )
44
47
from bzrlib.repofmt.pack_repo import (
 
48
    _DirectPackAccess,
45
49
    Pack,
46
50
    NewPack,
47
 
    KnitPackRepository,
48
 
    KnitPackStreamSource,
 
51
    PackRepository,
49
52
    PackRootCommitBuilder,
50
53
    RepositoryPackCollection,
51
54
    RepositoryFormatPack,
52
55
    ResumedPack,
53
56
    Packer,
54
57
    )
 
58
from bzrlib.vf_repository import (
 
59
    StreamSource,
 
60
    )
55
61
from bzrlib.static_tuple import StaticTuple
56
62
 
57
63
 
262
268
        remaining_keys = set(keys)
263
269
        counter = [0]
264
270
        if self._gather_text_refs:
265
 
            bytes_to_info = inventory.CHKInventory._bytes_to_utf8name_key
266
271
            self._text_refs = set()
267
272
        def _get_referenced_stream(root_keys, parse_leaf_nodes=False):
268
273
            cur_keys = root_keys
289
294
                    # Store is None, because we know we have a LeafNode, and we
290
295
                    # just want its entries
291
296
                    for file_id, bytes in node.iteritems(None):
292
 
                        name_utf8, file_id, revision_id = bytes_to_info(bytes)
293
 
                        self._text_refs.add((file_id, revision_id))
 
297
                        self._text_refs.add(chk_map._bytes_to_text_key(bytes))
294
298
                def next_stream():
295
299
                    stream = source_vf.get_record_stream(cur_keys,
296
300
                                                         'as-requested', True)
352
356
        """Build a VersionedFiles instance on top of this group of packs."""
353
357
        index_name = index_name + '_index'
354
358
        index_to_pack = {}
355
 
        access = knit._DirectPackAccess(index_to_pack,
356
 
                                        reload_func=self._reload_func)
 
359
        access = _DirectPackAccess(index_to_pack,
 
360
                                   reload_func=self._reload_func)
357
361
        if for_write:
358
362
            # Use new_pack
359
363
            if self.new_pack is None:
421
425
        inventory_keys = source_vf.keys()
422
426
        missing_inventories = set(self.revision_keys).difference(inventory_keys)
423
427
        if missing_inventories:
424
 
            missing_inventories = sorted(missing_inventories)
425
 
            raise ValueError('We are missing inventories for revisions: %s'
426
 
                % (missing_inventories,))
 
428
            # Go back to the original repo, to see if these are really missing
 
429
            # https://bugs.launchpad.net/bzr/+bug/437003
 
430
            # If we are packing a subset of the repo, it is fine to just have
 
431
            # the data in another Pack file, which is not included in this pack
 
432
            # operation.
 
433
            inv_index = self._pack_collection.repo.inventories._index
 
434
            pmap = inv_index.get_parent_map(missing_inventories)
 
435
            really_missing = missing_inventories.difference(pmap)
 
436
            if really_missing:
 
437
                missing_inventories = sorted(really_missing)
 
438
                raise ValueError('We are missing inventories for revisions: %s'
 
439
                    % (missing_inventories,))
427
440
        self._copy_stream(source_vf, target_vf, inventory_keys,
428
441
                          'inventories', self._get_filtered_inv_stream, 2)
429
442
 
 
443
    def _get_chk_vfs_for_copy(self):
 
444
        return self._build_vfs('chk', False, False)
 
445
 
430
446
    def _copy_chk_texts(self):
431
 
        source_vf, target_vf = self._build_vfs('chk', False, False)
 
447
        source_vf, target_vf = self._get_chk_vfs_for_copy()
432
448
        # TODO: This is technically spurious... if it is a performance issue,
433
449
        #       remove it
434
450
        total_keys = source_vf.keys()
580
596
        return new_pack.data_inserted() and self._data_changed
581
597
 
582
598
 
 
599
class GCCHKCanonicalizingPacker(GCCHKPacker):
 
600
    """A packer that ensures inventories have canonical-form CHK maps.
 
601
    
 
602
    Ideally this would be part of reconcile, but it's very slow and rarely
 
603
    needed.  (It repairs repositories affected by
 
604
    https://bugs.launchpad.net/bzr/+bug/522637).
 
605
    """
 
606
 
 
607
    def __init__(self, *args, **kwargs):
 
608
        super(GCCHKCanonicalizingPacker, self).__init__(*args, **kwargs)
 
609
        self._data_changed = False
 
610
 
 
611
    def _exhaust_stream(self, source_vf, keys, message, vf_to_stream, pb_offset):
 
612
        """Create and exhaust a stream, but don't insert it.
 
613
 
 
614
        This is useful to get the side-effects of generating a stream.
 
615
        """
 
616
        self.pb.update('scanning %s' % (message,), pb_offset)
 
617
        child_pb = ui.ui_factory.nested_progress_bar()
 
618
        try:
 
619
            list(vf_to_stream(source_vf, keys, message, child_pb))
 
620
        finally:
 
621
            child_pb.finished()
 
622
 
 
623
    def _copy_inventory_texts(self):
 
624
        source_vf, target_vf = self._build_vfs('inventory', True, True)
 
625
        source_chk_vf, target_chk_vf = self._get_chk_vfs_for_copy()
 
626
        inventory_keys = source_vf.keys()
 
627
        # First, copy the existing CHKs on the assumption that most of them
 
628
        # will be correct.  This will save us from having to reinsert (and
 
629
        # recompress) these records later at the cost of perhaps preserving a
 
630
        # few unused CHKs. 
 
631
        # (Iterate but don't insert _get_filtered_inv_stream to populate the
 
632
        # variables needed by GCCHKPacker._copy_chk_texts.)
 
633
        self._exhaust_stream(source_vf, inventory_keys, 'inventories',
 
634
                self._get_filtered_inv_stream, 2)
 
635
        GCCHKPacker._copy_chk_texts(self)
 
636
        # Now copy and fix the inventories, and any regenerated CHKs.
 
637
        def chk_canonicalizing_inv_stream(source_vf, keys, message, pb=None):
 
638
            return self._get_filtered_canonicalizing_inv_stream(
 
639
                source_vf, keys, message, pb, source_chk_vf, target_chk_vf)
 
640
        self._copy_stream(source_vf, target_vf, inventory_keys,
 
641
                          'inventories', chk_canonicalizing_inv_stream, 4)
 
642
 
 
643
    def _copy_chk_texts(self):
 
644
        # No-op; in this class this happens during _copy_inventory_texts.
 
645
        pass
 
646
 
 
647
    def _get_filtered_canonicalizing_inv_stream(self, source_vf, keys, message,
 
648
            pb=None, source_chk_vf=None, target_chk_vf=None):
 
649
        """Filter the texts of inventories, regenerating CHKs to make sure they
 
650
        are canonical.
 
651
        """
 
652
        total_keys = len(keys)
 
653
        target_chk_vf = versionedfile.NoDupeAddLinesDecorator(target_chk_vf)
 
654
        def _filtered_inv_stream():
 
655
            stream = source_vf.get_record_stream(keys, 'groupcompress', True)
 
656
            search_key_name = None
 
657
            for idx, record in enumerate(stream):
 
658
                # Inventories should always be with revisions; assume success.
 
659
                bytes = record.get_bytes_as('fulltext')
 
660
                chk_inv = inventory.CHKInventory.deserialise(
 
661
                    source_chk_vf, bytes, record.key)
 
662
                if pb is not None:
 
663
                    pb.update('inv', idx, total_keys)
 
664
                chk_inv.id_to_entry._ensure_root()
 
665
                if search_key_name is None:
 
666
                    # Find the name corresponding to the search_key_func
 
667
                    search_key_reg = chk_map.search_key_registry
 
668
                    for search_key_name, func in search_key_reg.iteritems():
 
669
                        if func == chk_inv.id_to_entry._search_key_func:
 
670
                            break
 
671
                canonical_inv = inventory.CHKInventory.from_inventory(
 
672
                    target_chk_vf, chk_inv,
 
673
                    maximum_size=chk_inv.id_to_entry._root_node._maximum_size,
 
674
                    search_key_name=search_key_name)
 
675
                if chk_inv.id_to_entry.key() != canonical_inv.id_to_entry.key():
 
676
                    trace.mutter(
 
677
                        'Non-canonical CHK map for id_to_entry of inv: %s '
 
678
                        '(root is %s, should be %s)' % (chk_inv.revision_id,
 
679
                        chk_inv.id_to_entry.key()[0],
 
680
                        canonical_inv.id_to_entry.key()[0]))
 
681
                    self._data_changed = True
 
682
                p_id_map = chk_inv.parent_id_basename_to_file_id
 
683
                p_id_map._ensure_root()
 
684
                canon_p_id_map = canonical_inv.parent_id_basename_to_file_id
 
685
                if p_id_map.key() != canon_p_id_map.key():
 
686
                    trace.mutter(
 
687
                        'Non-canonical CHK map for parent_id_to_basename of '
 
688
                        'inv: %s (root is %s, should be %s)'
 
689
                        % (chk_inv.revision_id, p_id_map.key()[0],
 
690
                           canon_p_id_map.key()[0]))
 
691
                    self._data_changed = True
 
692
                yield versionedfile.ChunkedContentFactory(record.key,
 
693
                        record.parents, record.sha1,
 
694
                        canonical_inv.to_lines())
 
695
            # We have finished processing all of the inventory records, we
 
696
            # don't need these sets anymore
 
697
        return _filtered_inv_stream()
 
698
 
 
699
    def _use_pack(self, new_pack):
 
700
        """Override _use_pack to check for reconcile having changed content."""
 
701
        return new_pack.data_inserted() and self._data_changed
 
702
 
 
703
 
583
704
class GCRepositoryPackCollection(RepositoryPackCollection):
584
705
 
585
706
    pack_factory = GCPack
586
707
    resumed_pack_factory = ResumedGCPack
 
708
    normal_packer_class = GCCHKPacker
 
709
    optimising_packer_class = GCCHKPacker
587
710
 
588
711
    def _check_new_inventories(self):
589
712
        """Detect missing inventories or chk root entries for the new revisions
635
758
            expected_chk_roots)
636
759
        missing_chk_roots = expected_chk_roots.difference(present_chk_roots)
637
760
        if missing_chk_roots:
638
 
            problems.append("missing referenced chk root keys: %s"
 
761
            problems.append(
 
762
                "missing referenced chk root keys: %s."
 
763
                "Run 'bzr reconcile --canonicalize-chks' on the affected "
 
764
                "repository."
639
765
                % (sorted(missing_chk_roots),))
640
766
            # Don't bother checking any further.
641
767
            return problems
647
773
        chk_diff = chk_map.iter_interesting_nodes(
648
774
            chk_bytes_no_fallbacks, root_key_info.interesting_root_keys,
649
775
            root_key_info.uninteresting_root_keys)
650
 
        bytes_to_info = inventory.CHKInventory._bytes_to_utf8name_key
651
776
        text_keys = set()
652
777
        try:
653
 
            for record in _filter_text_keys(chk_diff, text_keys, bytes_to_info):
 
778
            for record in _filter_text_keys(chk_diff, text_keys,
 
779
                                            chk_map._bytes_to_text_key):
654
780
                pass
655
781
        except errors.NoSuchRevision, e:
656
782
            # XXX: It would be nice if we could give a more precise error here.
671
797
                % (sorted(missing_text_keys),))
672
798
        return problems
673
799
 
674
 
    def _execute_pack_operations(self, pack_operations,
675
 
                                 _packer_class=GCCHKPacker,
676
 
                                 reload_func=None):
677
 
        """Execute a series of pack operations.
678
 
 
679
 
        :param pack_operations: A list of [revision_count, packs_to_combine].
680
 
        :param _packer_class: The class of packer to use (default: Packer).
681
 
        :return: None.
682
 
        """
683
 
        # XXX: Copied across from RepositoryPackCollection simply because we
684
 
        #      want to override the _packer_class ... :(
685
 
        for revision_count, packs in pack_operations:
686
 
            # we may have no-ops from the setup logic
687
 
            if len(packs) == 0:
688
 
                continue
689
 
            packer = GCCHKPacker(self, packs, '.autopack',
690
 
                                 reload_func=reload_func)
691
 
            try:
692
 
                result = packer.pack()
693
 
            except errors.RetryWithNewPacks:
694
 
                # An exception is propagating out of this context, make sure
695
 
                # this packer has cleaned up. Packer() doesn't set its new_pack
696
 
                # state into the RepositoryPackCollection object, so we only
697
 
                # have access to it directly here.
698
 
                if packer.new_pack is not None:
699
 
                    packer.new_pack.abort()
700
 
                raise
701
 
            if result is None:
702
 
                return
703
 
            for pack in packs:
704
 
                self._remove_pack_from_memory(pack)
705
 
        # record the newly available packs and stop advertising the old
706
 
        # packs
707
 
        to_be_obsoleted = []
708
 
        for _, packs in pack_operations:
709
 
            to_be_obsoleted.extend(packs)
710
 
        result = self._save_pack_names(clear_obsolete_packs=True,
711
 
                                       obsolete_packs=to_be_obsoleted)
712
 
        return result
713
 
 
714
 
 
715
 
class CHKInventoryRepository(KnitPackRepository):
716
 
    """subclass of KnitPackRepository that uses CHK based inventories."""
 
800
 
 
801
class CHKInventoryRepository(PackRepository):
 
802
    """subclass of PackRepository that uses CHK based inventories."""
717
803
 
718
804
    def __init__(self, _format, a_bzrdir, control_files, _commit_builder_class,
719
805
        _serializer):
720
806
        """Overridden to change pack collection class."""
721
 
        KnitPackRepository.__init__(self, _format, a_bzrdir, control_files,
722
 
            _commit_builder_class, _serializer)
723
 
        # and now replace everything it did :)
 
807
        super(CHKInventoryRepository, self).__init__(_format, a_bzrdir,
 
808
            control_files, _commit_builder_class, _serializer)
724
809
        index_transport = self._transport.clone('indices')
725
810
        self._pack_collection = GCRepositoryPackCollection(self,
726
811
            self._transport, index_transport,
864
949
        if basis_inv is None:
865
950
            if basis_revision_id == _mod_revision.NULL_REVISION:
866
951
                new_inv = self._create_inv_from_null(delta, new_revision_id)
 
952
                if new_inv.root_id is None:
 
953
                    raise errors.RootMissing()
867
954
                inv_lines = new_inv.to_lines()
868
955
                return self._inventory_add_lines(new_revision_id, parents,
869
956
                    inv_lines, check_content=False), new_inv
870
957
            else:
871
958
                basis_tree = self.revision_tree(basis_revision_id)
872
959
                basis_tree.lock_read()
873
 
                basis_inv = basis_tree.inventory
 
960
                basis_inv = basis_tree.root_inventory
874
961
        try:
875
962
            result = basis_inv.create_by_apply_delta(delta, new_revision_id,
876
963
                propagate_caches=propagate_caches)
896
983
            if record.storage_kind != 'absent':
897
984
                texts[record.key] = record.get_bytes_as('fulltext')
898
985
            else:
899
 
                raise errors.NoSuchRevision(self, record.key)
 
986
                texts[record.key] = None
900
987
        for key in keys:
901
 
            yield inventory.CHKInventory.deserialise(self.chk_bytes, texts[key], key)
 
988
            bytes = texts[key]
 
989
            if bytes is None:
 
990
                yield (None, key[-1])
 
991
            else:
 
992
                yield (inventory.CHKInventory.deserialise(
 
993
                    self.chk_bytes, bytes, key), key[-1])
902
994
 
903
 
    def _iter_inventory_xmls(self, revision_ids, ordering):
 
995
    def _get_inventory_xml(self, revision_id):
 
996
        """Get serialized inventory as a string."""
904
997
        # Without a native 'xml' inventory, this method doesn't make sense.
905
998
        # However older working trees, and older bundles want it - so we supply
906
999
        # it allowing _get_inventory_xml to work. Bundles currently use the
907
1000
        # serializer directly; this also isn't ideal, but there isn't an xml
908
 
        # iteration interface offered at all for repositories. We could make
909
 
        # _iter_inventory_xmls be part of the contract, even if kept private.
910
 
        inv_to_str = self._serializer.write_inventory_to_string
911
 
        for inv in self.iter_inventories(revision_ids, ordering=ordering):
912
 
            yield inv_to_str(inv), inv.revision_id
 
1001
        # iteration interface offered at all for repositories.
 
1002
        return self._serializer.write_inventory_to_string(
 
1003
            self.get_inventory(revision_id))
913
1004
 
914
1005
    def _find_present_inventory_keys(self, revision_keys):
915
1006
        parent_map = self.inventories.get_parent_map(revision_keys)
999
1090
        finally:
1000
1091
            pb.finished()
1001
1092
 
 
1093
    @needs_write_lock
 
1094
    def reconcile_canonicalize_chks(self):
 
1095
        """Reconcile this repository to make sure all CHKs are in canonical
 
1096
        form.
 
1097
        """
 
1098
        from bzrlib.reconcile import PackReconciler
 
1099
        reconciler = PackReconciler(self, thorough=True, canonicalize_chks=True)
 
1100
        reconciler.reconcile()
 
1101
        return reconciler
 
1102
 
1002
1103
    def _reconcile_pack(self, collection, packs, extension, revs, pb):
1003
1104
        packer = GCCHKReconcilePacker(collection, packs, extension)
1004
1105
        return packer.pack(pb)
1005
1106
 
 
1107
    def _canonicalize_chks_pack(self, collection, packs, extension, revs, pb):
 
1108
        packer = GCCHKCanonicalizingPacker(collection, packs, extension, revs)
 
1109
        return packer.pack(pb)
 
1110
 
1006
1111
    def _get_source(self, to_format):
1007
1112
        """Return a source for streaming from this repository."""
1008
1113
        if self._format._serializer == to_format._serializer:
1013
1118
            return GroupCHKStreamSource(self, to_format)
1014
1119
        return super(CHKInventoryRepository, self)._get_source(to_format)
1015
1120
 
1016
 
 
1017
 
class GroupCHKStreamSource(KnitPackStreamSource):
 
1121
    def _find_inconsistent_revision_parents(self, revisions_iterator=None):
 
1122
        """Find revisions with different parent lists in the revision object
 
1123
        and in the index graph.
 
1124
 
 
1125
        :param revisions_iterator: None, or an iterator of (revid,
 
1126
            Revision-or-None). This iterator controls the revisions checked.
 
1127
        :returns: an iterator yielding tuples of (revison-id, parents-in-index,
 
1128
            parents-in-revision).
 
1129
        """
 
1130
        if not self.is_locked():
 
1131
            raise AssertionError()
 
1132
        vf = self.revisions
 
1133
        if revisions_iterator is None:
 
1134
            revisions_iterator = self._iter_revisions(None)
 
1135
        for revid, revision in revisions_iterator:
 
1136
            if revision is None:
 
1137
                pass
 
1138
            parent_map = vf.get_parent_map([(revid,)])
 
1139
            parents_according_to_index = tuple(parent[-1] for parent in
 
1140
                parent_map[(revid,)])
 
1141
            parents_according_to_revision = tuple(revision.parent_ids)
 
1142
            if parents_according_to_index != parents_according_to_revision:
 
1143
                yield (revid, parents_according_to_index,
 
1144
                    parents_according_to_revision)
 
1145
 
 
1146
    def _check_for_inconsistent_revision_parents(self):
 
1147
        inconsistencies = list(self._find_inconsistent_revision_parents())
 
1148
        if inconsistencies:
 
1149
            raise errors.BzrCheckError(
 
1150
                "Revision index has inconsistent parents.")
 
1151
 
 
1152
 
 
1153
class GroupCHKStreamSource(StreamSource):
1018
1154
    """Used when both the source and target repo are GroupCHK repos."""
1019
1155
 
1020
1156
    def __init__(self, from_repository, to_format):
1087
1223
                uninteresting_root_keys.add(inv.id_to_entry.key())
1088
1224
                uninteresting_pid_root_keys.add(
1089
1225
                    inv.parent_id_basename_to_file_id.key())
1090
 
        bytes_to_info = inventory.CHKInventory._bytes_to_utf8name_key
1091
1226
        chk_bytes = self.from_repository.chk_bytes
1092
1227
        def _filter_id_to_entry():
1093
1228
            interesting_nodes = chk_map.iter_interesting_nodes(chk_bytes,
1094
1229
                        self._chk_id_roots, uninteresting_root_keys)
1095
1230
            for record in _filter_text_keys(interesting_nodes, self._text_keys,
1096
 
                    bytes_to_info):
 
1231
                    chk_map._bytes_to_text_key):
1097
1232
                if record is not None:
1098
1233
                    yield record
1099
1234
            # Consumed
1108
1243
            self._chk_p_id_roots = None
1109
1244
        yield 'chk_bytes', _get_parent_id_basename_to_file_id_pages()
1110
1245
 
 
1246
    def _get_text_stream(self):
 
1247
        # Note: We know we don't have to handle adding root keys, because both
 
1248
        # the source and target are the identical network name.
 
1249
        text_stream = self.from_repository.texts.get_record_stream(
 
1250
                        self._text_keys, self._text_fetch_order, False)
 
1251
        return ('texts', text_stream)
 
1252
 
1111
1253
    def get_stream(self, search):
 
1254
        def wrap_and_count(pb, rc, stream):
 
1255
            """Yield records from stream while showing progress."""
 
1256
            count = 0
 
1257
            for record in stream:
 
1258
                if count == rc.STEP:
 
1259
                    rc.increment(count)
 
1260
                    pb.update('Estimate', rc.current, rc.max)
 
1261
                    count = 0
 
1262
                count += 1
 
1263
                yield record
 
1264
 
1112
1265
        revision_ids = search.get_keys()
 
1266
        pb = ui.ui_factory.nested_progress_bar()
 
1267
        rc = self._record_counter
 
1268
        self._record_counter.setup(len(revision_ids))
1113
1269
        for stream_info in self._fetch_revision_texts(revision_ids):
1114
 
            yield stream_info
 
1270
            yield (stream_info[0],
 
1271
                wrap_and_count(pb, rc, stream_info[1]))
1115
1272
        self._revision_keys = [(rev_id,) for rev_id in revision_ids]
1116
 
        self.from_repository.revisions.clear_cache()
1117
 
        self.from_repository.signatures.clear_cache()
1118
 
        yield self._get_inventory_stream(self._revision_keys)
1119
 
        self.from_repository.inventories.clear_cache()
1120
1273
        # TODO: The keys to exclude might be part of the search recipe
1121
1274
        # For now, exclude all parents that are at the edge of ancestry, for
1122
1275
        # which we have inventories
1123
1276
        from_repo = self.from_repository
1124
1277
        parent_keys = from_repo._find_parent_keys_of_revisions(
1125
1278
                        self._revision_keys)
 
1279
        self.from_repository.revisions.clear_cache()
 
1280
        self.from_repository.signatures.clear_cache()
 
1281
        # Clear the repo's get_parent_map cache too.
 
1282
        self.from_repository._unstacked_provider.disable_cache()
 
1283
        self.from_repository._unstacked_provider.enable_cache()
 
1284
        s = self._get_inventory_stream(self._revision_keys)
 
1285
        yield (s[0], wrap_and_count(pb, rc, s[1]))
 
1286
        self.from_repository.inventories.clear_cache()
1126
1287
        for stream_info in self._get_filtered_chk_streams(parent_keys):
1127
 
            yield stream_info
 
1288
            yield (stream_info[0], wrap_and_count(pb, rc, stream_info[1]))
1128
1289
        self.from_repository.chk_bytes.clear_cache()
1129
 
        yield self._get_text_stream()
 
1290
        s = self._get_text_stream()
 
1291
        yield (s[0], wrap_and_count(pb, rc, s[1]))
1130
1292
        self.from_repository.texts.clear_cache()
 
1293
        pb.update('Done', rc.max, rc.max)
 
1294
        pb.finished()
1131
1295
 
1132
1296
    def get_stream_for_missing_keys(self, missing_keys):
1133
1297
        # missing keys can only occur when we are byte copying and not
1187
1351
    return result
1188
1352
 
1189
1353
 
1190
 
def _filter_text_keys(interesting_nodes_iterable, text_keys, bytes_to_info):
 
1354
def _filter_text_keys(interesting_nodes_iterable, text_keys, bytes_to_text_key):
1191
1355
    """Iterate the result of iter_interesting_nodes, yielding the records
1192
1356
    and adding to text_keys.
1193
1357
    """
 
1358
    text_keys_update = text_keys.update
1194
1359
    for record, items in interesting_nodes_iterable:
1195
 
        for name, bytes in items:
1196
 
            # Note: we don't care about name_utf8, because groupcompress repos
1197
 
            # are always rich-root, so there are no synthesised root records to
1198
 
            # ignore.
1199
 
            _, file_id, revision_id = bytes_to_info(bytes)
1200
 
            file_id = intern(file_id)
1201
 
            revision_id = intern(revision_id)
1202
 
            text_keys.add(StaticTuple(file_id, revision_id).intern())
 
1360
        text_keys_update([bytes_to_text_key(b) for n,b in items])
1203
1361
        yield record
1204
1362
 
1205
1363
 
1206
 
 
1207
 
 
1208
 
class RepositoryFormatCHK1(RepositoryFormatPack):
1209
 
    """A hashed CHK+group compress pack repository."""
 
1364
class RepositoryFormat2a(RepositoryFormatPack):
 
1365
    """A CHK repository that uses the bencode revision serializer."""
1210
1366
 
1211
1367
    repository_class = CHKInventoryRepository
1212
1368
    supports_external_lookups = True
1213
1369
    supports_chks = True
1214
 
    # For right now, setting this to True gives us InterModel1And2 rather
1215
 
    # than InterDifferingSerializer
1216
1370
    _commit_builder_class = PackRootCommitBuilder
1217
1371
    rich_root_data = True
1218
 
    _serializer = chk_serializer.chk_serializer_255_bigpage
 
1372
    _serializer = chk_serializer.chk_bencode_serializer
1219
1373
    _commit_inv_deltas = True
1220
1374
    # What index classes to use
1221
1375
    index_builder_class = BTreeBuilder
1232
1386
    pack_compresses = True
1233
1387
 
1234
1388
    def _get_matching_bzrdir(self):
1235
 
        return bzrdir.format_registry.make_bzrdir('development6-rich-root')
1236
 
 
1237
 
    def _ignore_setting_bzrdir(self, format):
1238
 
        pass
1239
 
 
1240
 
    _matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
1241
 
 
1242
 
    def get_format_string(self):
1243
 
        """See RepositoryFormat.get_format_string()."""
1244
 
        return ('Bazaar development format - group compression and chk inventory'
1245
 
                ' (needs bzr.dev from 1.14)\n')
1246
 
 
1247
 
    def get_format_description(self):
1248
 
        """See RepositoryFormat.get_format_description()."""
1249
 
        return ("Development repository format - rich roots, group compression"
1250
 
            " and chk inventories")
1251
 
 
1252
 
 
1253
 
class RepositoryFormatCHK2(RepositoryFormatCHK1):
1254
 
    """A CHK repository that uses the bencode revision serializer."""
1255
 
 
1256
 
    _serializer = chk_serializer.chk_bencode_serializer
1257
 
 
1258
 
    def _get_matching_bzrdir(self):
1259
 
        return bzrdir.format_registry.make_bzrdir('development7-rich-root')
1260
 
 
1261
 
    def _ignore_setting_bzrdir(self, format):
1262
 
        pass
1263
 
 
1264
 
    _matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
1265
 
 
1266
 
    def get_format_string(self):
1267
 
        """See RepositoryFormat.get_format_string()."""
1268
 
        return ('Bazaar development format - chk repository with bencode '
1269
 
                'revision serialization (needs bzr.dev from 1.16)\n')
1270
 
 
1271
 
 
1272
 
class RepositoryFormat2a(RepositoryFormatCHK2):
1273
 
    """A CHK repository that uses the bencode revision serializer.
1274
 
 
1275
 
    This is the same as RepositoryFormatCHK2 but with a public name.
1276
 
    """
1277
 
 
1278
 
    _serializer = chk_serializer.chk_bencode_serializer
1279
 
 
1280
 
    def _get_matching_bzrdir(self):
1281
 
        return bzrdir.format_registry.make_bzrdir('2a')
1282
 
 
1283
 
    def _ignore_setting_bzrdir(self, format):
1284
 
        pass
1285
 
 
1286
 
    _matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
1287
 
 
1288
 
    def get_format_string(self):
 
1389
        return controldir.format_registry.make_bzrdir('2a')
 
1390
 
 
1391
    def _ignore_setting_bzrdir(self, format):
 
1392
        pass
 
1393
 
 
1394
    _matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
 
1395
 
 
1396
    @classmethod
 
1397
    def get_format_string(cls):
1289
1398
        return ('Bazaar repository format 2a (needs bzr 1.16 or later)\n')
1290
1399
 
1291
1400
    def get_format_description(self):
1292
1401
        """See RepositoryFormat.get_format_description()."""
1293
1402
        return ("Repository format 2a - rich roots, group compression"
1294
1403
            " and chk inventories")
 
1404
 
 
1405
 
 
1406
class RepositoryFormat2aSubtree(RepositoryFormat2a):
 
1407
    """A 2a repository format that supports nested trees.
 
1408
 
 
1409
    """
 
1410
 
 
1411
    def _get_matching_bzrdir(self):
 
1412
        return controldir.format_registry.make_bzrdir('development-subtree')
 
1413
 
 
1414
    def _ignore_setting_bzrdir(self, format):
 
1415
        pass
 
1416
 
 
1417
    _matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
 
1418
 
 
1419
    @classmethod
 
1420
    def get_format_string(cls):
 
1421
        return ('Bazaar development format 8\n')
 
1422
 
 
1423
    def get_format_description(self):
 
1424
        """See RepositoryFormat.get_format_description()."""
 
1425
        return ("Development repository format 8 - nested trees, "
 
1426
                "group compression and chk inventories")
 
1427
 
 
1428
    experimental = True
 
1429
    supports_tree_reference = True