~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/repofmt/groupcompress_repo.py

  • Committer: John Arbash Meinel
  • Date: 2010-02-04 16:06:36 UTC
  • mfrom: (5007 +trunk)
  • mto: This revision was merged to the branch mainline in revision 5023.
  • Revision ID: john@arbash-meinel.com-20100204160636-xqeuwz8bwt8bbts4
Merge bzr.dev 5007, resolve conflict, update NEWS

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2008-2011 Canonical Ltd
 
1
# Copyright (C) 2008, 2009, 2010 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
26
26
    errors,
27
27
    index as _mod_index,
28
28
    inventory,
 
29
    knit,
29
30
    osutils,
30
31
    pack,
 
32
    remote,
31
33
    revision as _mod_revision,
32
34
    trace,
33
35
    ui,
34
 
    versionedfile,
35
36
    )
36
37
from bzrlib.btree_index import (
37
38
    BTreeGraphIndex,
38
39
    BTreeBuilder,
39
40
    )
40
 
from bzrlib.decorators import needs_write_lock
41
41
from bzrlib.groupcompress import (
42
42
    _GCGraphIndex,
43
43
    GroupCompressVersionedFiles,
44
44
    )
45
45
from bzrlib.repofmt.pack_repo import (
46
 
    _DirectPackAccess,
47
46
    Pack,
48
47
    NewPack,
49
 
    PackRepository,
 
48
    KnitPackRepository,
 
49
    KnitPackStreamSource,
50
50
    PackRootCommitBuilder,
51
51
    RepositoryPackCollection,
52
52
    RepositoryFormatPack,
53
53
    ResumedPack,
54
54
    Packer,
55
55
    )
56
 
from bzrlib.vf_repository import (
57
 
    StreamSource,
58
 
    )
59
56
from bzrlib.static_tuple import StaticTuple
60
57
 
61
58
 
266
263
        remaining_keys = set(keys)
267
264
        counter = [0]
268
265
        if self._gather_text_refs:
 
266
            bytes_to_info = inventory.CHKInventory._bytes_to_utf8name_key
269
267
            self._text_refs = set()
270
268
        def _get_referenced_stream(root_keys, parse_leaf_nodes=False):
271
269
            cur_keys = root_keys
292
290
                    # Store is None, because we know we have a LeafNode, and we
293
291
                    # just want its entries
294
292
                    for file_id, bytes in node.iteritems(None):
295
 
                        self._text_refs.add(chk_map._bytes_to_text_key(bytes))
 
293
                        name_utf8, file_id, revision_id = bytes_to_info(bytes)
 
294
                        self._text_refs.add((file_id, revision_id))
296
295
                def next_stream():
297
296
                    stream = source_vf.get_record_stream(cur_keys,
298
297
                                                         'as-requested', True)
354
353
        """Build a VersionedFiles instance on top of this group of packs."""
355
354
        index_name = index_name + '_index'
356
355
        index_to_pack = {}
357
 
        access = _DirectPackAccess(index_to_pack,
358
 
                                   reload_func=self._reload_func)
 
356
        access = knit._DirectPackAccess(index_to_pack,
 
357
                                        reload_func=self._reload_func)
359
358
        if for_write:
360
359
            # Use new_pack
361
360
            if self.new_pack is None:
423
422
        inventory_keys = source_vf.keys()
424
423
        missing_inventories = set(self.revision_keys).difference(inventory_keys)
425
424
        if missing_inventories:
426
 
            # Go back to the original repo, to see if these are really missing
427
 
            # https://bugs.launchpad.net/bzr/+bug/437003
428
 
            # If we are packing a subset of the repo, it is fine to just have
429
 
            # the data in another Pack file, which is not included in this pack
430
 
            # operation.
431
 
            inv_index = self._pack_collection.repo.inventories._index
432
 
            pmap = inv_index.get_parent_map(missing_inventories)
433
 
            really_missing = missing_inventories.difference(pmap)
434
 
            if really_missing:
435
 
                missing_inventories = sorted(really_missing)
436
 
                raise ValueError('We are missing inventories for revisions: %s'
437
 
                    % (missing_inventories,))
 
425
            missing_inventories = sorted(missing_inventories)
 
426
            raise ValueError('We are missing inventories for revisions: %s'
 
427
                % (missing_inventories,))
438
428
        self._copy_stream(source_vf, target_vf, inventory_keys,
439
429
                          'inventories', self._get_filtered_inv_stream, 2)
440
430
 
441
 
    def _get_chk_vfs_for_copy(self):
442
 
        return self._build_vfs('chk', False, False)
443
 
 
444
431
    def _copy_chk_texts(self):
445
 
        source_vf, target_vf = self._get_chk_vfs_for_copy()
 
432
        source_vf, target_vf = self._build_vfs('chk', False, False)
446
433
        # TODO: This is technically spurious... if it is a performance issue,
447
434
        #       remove it
448
435
        total_keys = source_vf.keys()
594
581
        return new_pack.data_inserted() and self._data_changed
595
582
 
596
583
 
597
 
class GCCHKCanonicalizingPacker(GCCHKPacker):
598
 
    """A packer that ensures inventories have canonical-form CHK maps.
599
 
    
600
 
    Ideally this would be part of reconcile, but it's very slow and rarely
601
 
    needed.  (It repairs repositories affected by
602
 
    https://bugs.launchpad.net/bzr/+bug/522637).
603
 
    """
604
 
 
605
 
    def __init__(self, *args, **kwargs):
606
 
        super(GCCHKCanonicalizingPacker, self).__init__(*args, **kwargs)
607
 
        self._data_changed = False
608
 
 
609
 
    def _exhaust_stream(self, source_vf, keys, message, vf_to_stream, pb_offset):
610
 
        """Create and exhaust a stream, but don't insert it.
611
 
 
612
 
        This is useful to get the side-effects of generating a stream.
613
 
        """
614
 
        self.pb.update('scanning %s' % (message,), pb_offset)
615
 
        child_pb = ui.ui_factory.nested_progress_bar()
616
 
        try:
617
 
            list(vf_to_stream(source_vf, keys, message, child_pb))
618
 
        finally:
619
 
            child_pb.finished()
620
 
 
621
 
    def _copy_inventory_texts(self):
622
 
        source_vf, target_vf = self._build_vfs('inventory', True, True)
623
 
        source_chk_vf, target_chk_vf = self._get_chk_vfs_for_copy()
624
 
        inventory_keys = source_vf.keys()
625
 
        # First, copy the existing CHKs on the assumption that most of them
626
 
        # will be correct.  This will save us from having to reinsert (and
627
 
        # recompress) these records later at the cost of perhaps preserving a
628
 
        # few unused CHKs. 
629
 
        # (Iterate but don't insert _get_filtered_inv_stream to populate the
630
 
        # variables needed by GCCHKPacker._copy_chk_texts.)
631
 
        self._exhaust_stream(source_vf, inventory_keys, 'inventories',
632
 
                self._get_filtered_inv_stream, 2)
633
 
        GCCHKPacker._copy_chk_texts(self)
634
 
        # Now copy and fix the inventories, and any regenerated CHKs.
635
 
        def chk_canonicalizing_inv_stream(source_vf, keys, message, pb=None):
636
 
            return self._get_filtered_canonicalizing_inv_stream(
637
 
                source_vf, keys, message, pb, source_chk_vf, target_chk_vf)
638
 
        self._copy_stream(source_vf, target_vf, inventory_keys,
639
 
                          'inventories', chk_canonicalizing_inv_stream, 4)
640
 
 
641
 
    def _copy_chk_texts(self):
642
 
        # No-op; in this class this happens during _copy_inventory_texts.
643
 
        pass
644
 
 
645
 
    def _get_filtered_canonicalizing_inv_stream(self, source_vf, keys, message,
646
 
            pb=None, source_chk_vf=None, target_chk_vf=None):
647
 
        """Filter the texts of inventories, regenerating CHKs to make sure they
648
 
        are canonical.
649
 
        """
650
 
        total_keys = len(keys)
651
 
        target_chk_vf = versionedfile.NoDupeAddLinesDecorator(target_chk_vf)
652
 
        def _filtered_inv_stream():
653
 
            stream = source_vf.get_record_stream(keys, 'groupcompress', True)
654
 
            search_key_name = None
655
 
            for idx, record in enumerate(stream):
656
 
                # Inventories should always be with revisions; assume success.
657
 
                bytes = record.get_bytes_as('fulltext')
658
 
                chk_inv = inventory.CHKInventory.deserialise(
659
 
                    source_chk_vf, bytes, record.key)
660
 
                if pb is not None:
661
 
                    pb.update('inv', idx, total_keys)
662
 
                chk_inv.id_to_entry._ensure_root()
663
 
                if search_key_name is None:
664
 
                    # Find the name corresponding to the search_key_func
665
 
                    search_key_reg = chk_map.search_key_registry
666
 
                    for search_key_name, func in search_key_reg.iteritems():
667
 
                        if func == chk_inv.id_to_entry._search_key_func:
668
 
                            break
669
 
                canonical_inv = inventory.CHKInventory.from_inventory(
670
 
                    target_chk_vf, chk_inv,
671
 
                    maximum_size=chk_inv.id_to_entry._root_node._maximum_size,
672
 
                    search_key_name=search_key_name)
673
 
                if chk_inv.id_to_entry.key() != canonical_inv.id_to_entry.key():
674
 
                    trace.mutter(
675
 
                        'Non-canonical CHK map for id_to_entry of inv: %s '
676
 
                        '(root is %s, should be %s)' % (chk_inv.revision_id,
677
 
                        chk_inv.id_to_entry.key()[0],
678
 
                        canonical_inv.id_to_entry.key()[0]))
679
 
                    self._data_changed = True
680
 
                p_id_map = chk_inv.parent_id_basename_to_file_id
681
 
                p_id_map._ensure_root()
682
 
                canon_p_id_map = canonical_inv.parent_id_basename_to_file_id
683
 
                if p_id_map.key() != canon_p_id_map.key():
684
 
                    trace.mutter(
685
 
                        'Non-canonical CHK map for parent_id_to_basename of '
686
 
                        'inv: %s (root is %s, should be %s)'
687
 
                        % (chk_inv.revision_id, p_id_map.key()[0],
688
 
                           canon_p_id_map.key()[0]))
689
 
                    self._data_changed = True
690
 
                yield versionedfile.ChunkedContentFactory(record.key,
691
 
                        record.parents, record.sha1,
692
 
                        canonical_inv.to_lines())
693
 
            # We have finished processing all of the inventory records, we
694
 
            # don't need these sets anymore
695
 
        return _filtered_inv_stream()
696
 
 
697
 
    def _use_pack(self, new_pack):
698
 
        """Override _use_pack to check for reconcile having changed content."""
699
 
        return new_pack.data_inserted() and self._data_changed
700
 
 
701
 
 
702
584
class GCRepositoryPackCollection(RepositoryPackCollection):
703
585
 
704
586
    pack_factory = GCPack
705
587
    resumed_pack_factory = ResumedGCPack
706
 
    normal_packer_class = GCCHKPacker
707
 
    optimising_packer_class = GCCHKPacker
708
588
 
709
589
    def _check_new_inventories(self):
710
590
        """Detect missing inventories or chk root entries for the new revisions
768
648
        chk_diff = chk_map.iter_interesting_nodes(
769
649
            chk_bytes_no_fallbacks, root_key_info.interesting_root_keys,
770
650
            root_key_info.uninteresting_root_keys)
 
651
        bytes_to_info = inventory.CHKInventory._bytes_to_utf8name_key
771
652
        text_keys = set()
772
653
        try:
773
 
            for record in _filter_text_keys(chk_diff, text_keys,
774
 
                                            chk_map._bytes_to_text_key):
 
654
            for record in _filter_text_keys(chk_diff, text_keys, bytes_to_info):
775
655
                pass
776
656
        except errors.NoSuchRevision, e:
777
657
            # XXX: It would be nice if we could give a more precise error here.
792
672
                % (sorted(missing_text_keys),))
793
673
        return problems
794
674
 
795
 
 
796
 
class CHKInventoryRepository(PackRepository):
797
 
    """subclass of PackRepository that uses CHK based inventories."""
 
675
    def _execute_pack_operations(self, pack_operations,
 
676
                                 _packer_class=GCCHKPacker,
 
677
                                 reload_func=None):
 
678
        """Execute a series of pack operations.
 
679
 
 
680
        :param pack_operations: A list of [revision_count, packs_to_combine].
 
681
        :param _packer_class: The class of packer to use (default: Packer).
 
682
        :return: None.
 
683
        """
 
684
        # XXX: Copied across from RepositoryPackCollection simply because we
 
685
        #      want to override the _packer_class ... :(
 
686
        for revision_count, packs in pack_operations:
 
687
            # we may have no-ops from the setup logic
 
688
            if len(packs) == 0:
 
689
                continue
 
690
            packer = GCCHKPacker(self, packs, '.autopack',
 
691
                                 reload_func=reload_func)
 
692
            try:
 
693
                result = packer.pack()
 
694
            except errors.RetryWithNewPacks:
 
695
                # An exception is propagating out of this context, make sure
 
696
                # this packer has cleaned up. Packer() doesn't set its new_pack
 
697
                # state into the RepositoryPackCollection object, so we only
 
698
                # have access to it directly here.
 
699
                if packer.new_pack is not None:
 
700
                    packer.new_pack.abort()
 
701
                raise
 
702
            if result is None:
 
703
                return
 
704
            for pack in packs:
 
705
                self._remove_pack_from_memory(pack)
 
706
        # record the newly available packs and stop advertising the old
 
707
        # packs
 
708
        to_be_obsoleted = []
 
709
        for _, packs in pack_operations:
 
710
            to_be_obsoleted.extend(packs)
 
711
        result = self._save_pack_names(clear_obsolete_packs=True,
 
712
                                       obsolete_packs=to_be_obsoleted)
 
713
        return result
 
714
 
 
715
 
 
716
class CHKInventoryRepository(KnitPackRepository):
 
717
    """subclass of KnitPackRepository that uses CHK based inventories."""
798
718
 
799
719
    def __init__(self, _format, a_bzrdir, control_files, _commit_builder_class,
800
720
        _serializer):
801
721
        """Overridden to change pack collection class."""
802
 
        super(CHKInventoryRepository, self).__init__(_format, a_bzrdir,
803
 
            control_files, _commit_builder_class, _serializer)
 
722
        KnitPackRepository.__init__(self, _format, a_bzrdir, control_files,
 
723
            _commit_builder_class, _serializer)
 
724
        # and now replace everything it did :)
804
725
        index_transport = self._transport.clone('indices')
805
726
        self._pack_collection = GCRepositoryPackCollection(self,
806
727
            self._transport, index_transport,
944
865
        if basis_inv is None:
945
866
            if basis_revision_id == _mod_revision.NULL_REVISION:
946
867
                new_inv = self._create_inv_from_null(delta, new_revision_id)
947
 
                if new_inv.root_id is None:
948
 
                    raise errors.RootMissing()
949
868
                inv_lines = new_inv.to_lines()
950
869
                return self._inventory_add_lines(new_revision_id, parents,
951
870
                    inv_lines, check_content=False), new_inv
1081
1000
        finally:
1082
1001
            pb.finished()
1083
1002
 
1084
 
    @needs_write_lock
1085
 
    def reconcile_canonicalize_chks(self):
1086
 
        """Reconcile this repository to make sure all CHKs are in canonical
1087
 
        form.
1088
 
        """
1089
 
        from bzrlib.reconcile import PackReconciler
1090
 
        reconciler = PackReconciler(self, thorough=True, canonicalize_chks=True)
1091
 
        reconciler.reconcile()
1092
 
        return reconciler
1093
 
 
1094
1003
    def _reconcile_pack(self, collection, packs, extension, revs, pb):
1095
1004
        packer = GCCHKReconcilePacker(collection, packs, extension)
1096
1005
        return packer.pack(pb)
1097
1006
 
1098
 
    def _canonicalize_chks_pack(self, collection, packs, extension, revs, pb):
1099
 
        packer = GCCHKCanonicalizingPacker(collection, packs, extension, revs)
1100
 
        return packer.pack(pb)
1101
 
 
1102
1007
    def _get_source(self, to_format):
1103
1008
        """Return a source for streaming from this repository."""
1104
1009
        if self._format._serializer == to_format._serializer:
1109
1014
            return GroupCHKStreamSource(self, to_format)
1110
1015
        return super(CHKInventoryRepository, self)._get_source(to_format)
1111
1016
 
1112
 
    def _find_inconsistent_revision_parents(self, revisions_iterator=None):
1113
 
        """Find revisions with different parent lists in the revision object
1114
 
        and in the index graph.
1115
 
 
1116
 
        :param revisions_iterator: None, or an iterator of (revid,
1117
 
            Revision-or-None). This iterator controls the revisions checked.
1118
 
        :returns: an iterator yielding tuples of (revison-id, parents-in-index,
1119
 
            parents-in-revision).
1120
 
        """
1121
 
        if not self.is_locked():
1122
 
            raise AssertionError()
1123
 
        vf = self.revisions
1124
 
        if revisions_iterator is None:
1125
 
            revisions_iterator = self._iter_revisions(None)
1126
 
        for revid, revision in revisions_iterator:
1127
 
            if revision is None:
1128
 
                pass
1129
 
            parent_map = vf.get_parent_map([(revid,)])
1130
 
            parents_according_to_index = tuple(parent[-1] for parent in
1131
 
                parent_map[(revid,)])
1132
 
            parents_according_to_revision = tuple(revision.parent_ids)
1133
 
            if parents_according_to_index != parents_according_to_revision:
1134
 
                yield (revid, parents_according_to_index,
1135
 
                    parents_according_to_revision)
1136
 
 
1137
 
    def _check_for_inconsistent_revision_parents(self):
1138
 
        inconsistencies = list(self._find_inconsistent_revision_parents())
1139
 
        if inconsistencies:
1140
 
            raise errors.BzrCheckError(
1141
 
                "Revision index has inconsistent parents.")
1142
 
 
1143
 
 
1144
 
class GroupCHKStreamSource(StreamSource):
 
1017
 
 
1018
class GroupCHKStreamSource(KnitPackStreamSource):
1145
1019
    """Used when both the source and target repo are GroupCHK repos."""
1146
1020
 
1147
1021
    def __init__(self, from_repository, to_format):
1214
1088
                uninteresting_root_keys.add(inv.id_to_entry.key())
1215
1089
                uninteresting_pid_root_keys.add(
1216
1090
                    inv.parent_id_basename_to_file_id.key())
 
1091
        bytes_to_info = inventory.CHKInventory._bytes_to_utf8name_key
1217
1092
        chk_bytes = self.from_repository.chk_bytes
1218
1093
        def _filter_id_to_entry():
1219
1094
            interesting_nodes = chk_map.iter_interesting_nodes(chk_bytes,
1220
1095
                        self._chk_id_roots, uninteresting_root_keys)
1221
1096
            for record in _filter_text_keys(interesting_nodes, self._text_keys,
1222
 
                    chk_map._bytes_to_text_key):
 
1097
                    bytes_to_info):
1223
1098
                if record is not None:
1224
1099
                    yield record
1225
1100
            # Consumed
1234
1109
            self._chk_p_id_roots = None
1235
1110
        yield 'chk_bytes', _get_parent_id_basename_to_file_id_pages()
1236
1111
 
1237
 
    def _get_text_stream(self):
1238
 
        # Note: We know we don't have to handle adding root keys, because both
1239
 
        # the source and target are the identical network name.
1240
 
        text_stream = self.from_repository.texts.get_record_stream(
1241
 
                        self._text_keys, self._text_fetch_order, False)
1242
 
        return ('texts', text_stream)
1243
 
 
1244
1112
    def get_stream(self, search):
1245
 
        def wrap_and_count(pb, rc, stream):
1246
 
            """Yield records from stream while showing progress."""
1247
 
            count = 0
1248
 
            for record in stream:
1249
 
                if count == rc.STEP:
1250
 
                    rc.increment(count)
1251
 
                    pb.update('Estimate', rc.current, rc.max)
1252
 
                    count = 0
1253
 
                count += 1
1254
 
                yield record
1255
 
 
1256
1113
        revision_ids = search.get_keys()
1257
 
        pb = ui.ui_factory.nested_progress_bar()
1258
 
        rc = self._record_counter
1259
 
        self._record_counter.setup(len(revision_ids))
1260
1114
        for stream_info in self._fetch_revision_texts(revision_ids):
1261
 
            yield (stream_info[0],
1262
 
                wrap_and_count(pb, rc, stream_info[1]))
 
1115
            yield stream_info
1263
1116
        self._revision_keys = [(rev_id,) for rev_id in revision_ids]
 
1117
        self.from_repository.revisions.clear_cache()
 
1118
        self.from_repository.signatures.clear_cache()
 
1119
        yield self._get_inventory_stream(self._revision_keys)
 
1120
        self.from_repository.inventories.clear_cache()
1264
1121
        # TODO: The keys to exclude might be part of the search recipe
1265
1122
        # For now, exclude all parents that are at the edge of ancestry, for
1266
1123
        # which we have inventories
1267
1124
        from_repo = self.from_repository
1268
1125
        parent_keys = from_repo._find_parent_keys_of_revisions(
1269
1126
                        self._revision_keys)
1270
 
        self.from_repository.revisions.clear_cache()
1271
 
        self.from_repository.signatures.clear_cache()
1272
 
        # Clear the repo's get_parent_map cache too.
1273
 
        self.from_repository._unstacked_provider.disable_cache()
1274
 
        self.from_repository._unstacked_provider.enable_cache()
1275
 
        s = self._get_inventory_stream(self._revision_keys)
1276
 
        yield (s[0], wrap_and_count(pb, rc, s[1]))
1277
 
        self.from_repository.inventories.clear_cache()
1278
1127
        for stream_info in self._get_filtered_chk_streams(parent_keys):
1279
 
            yield (stream_info[0], wrap_and_count(pb, rc, stream_info[1]))
 
1128
            yield stream_info
1280
1129
        self.from_repository.chk_bytes.clear_cache()
1281
 
        s = self._get_text_stream()
1282
 
        yield (s[0], wrap_and_count(pb, rc, s[1]))
 
1130
        yield self._get_text_stream()
1283
1131
        self.from_repository.texts.clear_cache()
1284
 
        pb.update('Done', rc.max, rc.max)
1285
 
        pb.finished()
1286
1132
 
1287
1133
    def get_stream_for_missing_keys(self, missing_keys):
1288
1134
        # missing keys can only occur when we are byte copying and not
1342
1188
    return result
1343
1189
 
1344
1190
 
1345
 
def _filter_text_keys(interesting_nodes_iterable, text_keys, bytes_to_text_key):
 
1191
def _filter_text_keys(interesting_nodes_iterable, text_keys, bytes_to_info):
1346
1192
    """Iterate the result of iter_interesting_nodes, yielding the records
1347
1193
    and adding to text_keys.
1348
1194
    """
1349
 
    text_keys_update = text_keys.update
1350
1195
    for record, items in interesting_nodes_iterable:
1351
 
        text_keys_update([bytes_to_text_key(b) for n,b in items])
 
1196
        for name, bytes in items:
 
1197
            # Note: we don't care about name_utf8, because groupcompress repos
 
1198
            # are always rich-root, so there are no synthesised root records to
 
1199
            # ignore.
 
1200
            _, file_id, revision_id = bytes_to_info(bytes)
 
1201
            file_id = intern(file_id)
 
1202
            revision_id = intern(revision_id)
 
1203
            text_keys.add(StaticTuple(file_id, revision_id).intern())
1352
1204
        yield record
1353
1205
 
1354
1206
 
1355
 
class RepositoryFormat2a(RepositoryFormatPack):
1356
 
    """A CHK repository that uses the bencode revision serializer."""
 
1207
 
 
1208
 
 
1209
class RepositoryFormatCHK1(RepositoryFormatPack):
 
1210
    """A hashed CHK+group compress pack repository."""
1357
1211
 
1358
1212
    repository_class = CHKInventoryRepository
1359
1213
    supports_external_lookups = True
1360
1214
    supports_chks = True
 
1215
    # For right now, setting this to True gives us InterModel1And2 rather
 
1216
    # than InterDifferingSerializer
1361
1217
    _commit_builder_class = PackRootCommitBuilder
1362
1218
    rich_root_data = True
1363
 
    _serializer = chk_serializer.chk_bencode_serializer
 
1219
    _serializer = chk_serializer.chk_serializer_255_bigpage
1364
1220
    _commit_inv_deltas = True
1365
1221
    # What index classes to use
1366
1222
    index_builder_class = BTreeBuilder
1377
1233
    pack_compresses = True
1378
1234
 
1379
1235
    def _get_matching_bzrdir(self):
 
1236
        return bzrdir.format_registry.make_bzrdir('development6-rich-root')
 
1237
 
 
1238
    def _ignore_setting_bzrdir(self, format):
 
1239
        pass
 
1240
 
 
1241
    _matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
 
1242
 
 
1243
    def get_format_string(self):
 
1244
        """See RepositoryFormat.get_format_string()."""
 
1245
        return ('Bazaar development format - group compression and chk inventory'
 
1246
                ' (needs bzr.dev from 1.14)\n')
 
1247
 
 
1248
    def get_format_description(self):
 
1249
        """See RepositoryFormat.get_format_description()."""
 
1250
        return ("Development repository format - rich roots, group compression"
 
1251
            " and chk inventories")
 
1252
 
 
1253
 
 
1254
class RepositoryFormatCHK2(RepositoryFormatCHK1):
 
1255
    """A CHK repository that uses the bencode revision serializer."""
 
1256
 
 
1257
    _serializer = chk_serializer.chk_bencode_serializer
 
1258
 
 
1259
    def _get_matching_bzrdir(self):
 
1260
        return bzrdir.format_registry.make_bzrdir('development7-rich-root')
 
1261
 
 
1262
    def _ignore_setting_bzrdir(self, format):
 
1263
        pass
 
1264
 
 
1265
    _matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
 
1266
 
 
1267
    def get_format_string(self):
 
1268
        """See RepositoryFormat.get_format_string()."""
 
1269
        return ('Bazaar development format - chk repository with bencode '
 
1270
                'revision serialization (needs bzr.dev from 1.16)\n')
 
1271
 
 
1272
 
 
1273
class RepositoryFormat2a(RepositoryFormatCHK2):
 
1274
    """A CHK repository that uses the bencode revision serializer.
 
1275
 
 
1276
    This is the same as RepositoryFormatCHK2 but with a public name.
 
1277
    """
 
1278
 
 
1279
    _serializer = chk_serializer.chk_bencode_serializer
 
1280
 
 
1281
    def _get_matching_bzrdir(self):
1380
1282
        return bzrdir.format_registry.make_bzrdir('2a')
1381
1283
 
1382
1284
    def _ignore_setting_bzrdir(self, format):
1391
1293
        """See RepositoryFormat.get_format_description()."""
1392
1294
        return ("Repository format 2a - rich roots, group compression"
1393
1295
            " and chk inventories")
1394
 
 
1395
 
 
1396
 
class RepositoryFormat2aSubtree(RepositoryFormat2a):
1397
 
    """A 2a repository format that supports nested trees.
1398
 
 
1399
 
    """
1400
 
 
1401
 
    def _get_matching_bzrdir(self):
1402
 
        return bzrdir.format_registry.make_bzrdir('development-subtree')
1403
 
 
1404
 
    def _ignore_setting_bzrdir(self, format):
1405
 
        pass
1406
 
 
1407
 
    _matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
1408
 
 
1409
 
    def get_format_string(self):
1410
 
        return ('Bazaar development format 8\n')
1411
 
 
1412
 
    def get_format_description(self):
1413
 
        """See RepositoryFormat.get_format_description()."""
1414
 
        return ("Development repository format 8 - nested trees, "
1415
 
                "group compression and chk inventories")
1416
 
 
1417
 
    experimental = True
1418
 
    supports_tree_reference = True