584
540
class GCRepositoryPackCollection(RepositoryPackCollection):
586
542
pack_factory = GCPack
587
resumed_pack_factory = ResumedGCPack
589
def _check_new_inventories(self):
590
"""Detect missing inventories or chk root entries for the new revisions
593
:returns: list of strs, summarising any problems found. If the list is
594
empty no problems were found.
596
# Ensure that all revisions added in this write group have:
597
# - corresponding inventories,
598
# - chk root entries for those inventories,
599
# - and any present parent inventories have their chk root
601
# And all this should be independent of any fallback repository.
603
key_deps = self.repo.revisions._index._key_dependencies
604
new_revisions_keys = key_deps.get_new_keys()
605
no_fallback_inv_index = self.repo.inventories._index
606
no_fallback_chk_bytes_index = self.repo.chk_bytes._index
607
no_fallback_texts_index = self.repo.texts._index
608
inv_parent_map = no_fallback_inv_index.get_parent_map(
610
# Are any inventories for corresponding to the new revisions missing?
611
corresponding_invs = set(inv_parent_map)
612
missing_corresponding = set(new_revisions_keys)
613
missing_corresponding.difference_update(corresponding_invs)
614
if missing_corresponding:
615
problems.append("inventories missing for revisions %s" %
616
(sorted(missing_corresponding),))
618
# Are any chk root entries missing for any inventories? This includes
619
# any present parent inventories, which may be used when calculating
620
# deltas for streaming.
621
all_inv_keys = set(corresponding_invs)
622
for parent_inv_keys in inv_parent_map.itervalues():
623
all_inv_keys.update(parent_inv_keys)
624
# Filter out ghost parents.
625
all_inv_keys.intersection_update(
626
no_fallback_inv_index.get_parent_map(all_inv_keys))
627
parent_invs_only_keys = all_inv_keys.symmetric_difference(
630
inv_ids = [key[-1] for key in all_inv_keys]
631
parent_invs_only_ids = [key[-1] for key in parent_invs_only_keys]
632
root_key_info = _build_interesting_key_sets(
633
self.repo, inv_ids, parent_invs_only_ids)
634
expected_chk_roots = root_key_info.all_keys()
635
present_chk_roots = no_fallback_chk_bytes_index.get_parent_map(
637
missing_chk_roots = expected_chk_roots.difference(present_chk_roots)
638
if missing_chk_roots:
639
problems.append("missing referenced chk root keys: %s"
640
% (sorted(missing_chk_roots),))
641
# Don't bother checking any further.
643
# Find all interesting chk_bytes records, and make sure they are
644
# present, as well as the text keys they reference.
645
chk_bytes_no_fallbacks = self.repo.chk_bytes.without_fallbacks()
646
chk_bytes_no_fallbacks._search_key_func = \
647
self.repo.chk_bytes._search_key_func
648
chk_diff = chk_map.iter_interesting_nodes(
649
chk_bytes_no_fallbacks, root_key_info.interesting_root_keys,
650
root_key_info.uninteresting_root_keys)
651
bytes_to_info = inventory.CHKInventory._bytes_to_utf8name_key
654
for record in _filter_text_keys(chk_diff, text_keys, bytes_to_info):
656
except errors.NoSuchRevision, e:
657
# XXX: It would be nice if we could give a more precise error here.
658
problems.append("missing chk node(s) for id_to_entry maps")
659
chk_diff = chk_map.iter_interesting_nodes(
660
chk_bytes_no_fallbacks, root_key_info.interesting_pid_root_keys,
661
root_key_info.uninteresting_pid_root_keys)
663
for interesting_rec, interesting_map in chk_diff:
665
except errors.NoSuchRevision, e:
667
"missing chk node(s) for parent_id_basename_to_file_id maps")
668
present_text_keys = no_fallback_texts_index.get_parent_map(text_keys)
669
missing_text_keys = text_keys.difference(present_text_keys)
670
if missing_text_keys:
671
problems.append("missing text keys: %r"
672
% (sorted(missing_text_keys),))
544
def _already_packed(self):
545
"""Is the collection already packed?"""
546
# Always repack GC repositories for now
675
549
def _execute_pack_operations(self, pack_operations,
676
550
_packer_class=GCCHKPacker,
734
604
self.inventories = GroupCompressVersionedFiles(
735
605
_GCGraphIndex(self._pack_collection.inventory_index.combined_index,
736
606
add_callback=self._pack_collection.inventory_index.add_callback,
737
parents=True, is_locked=self.is_locked,
738
inconsistency_fatal=False),
607
parents=True, is_locked=self.is_locked),
739
608
access=self._pack_collection.inventory_index.data_access)
740
609
self.revisions = GroupCompressVersionedFiles(
741
610
_GCGraphIndex(self._pack_collection.revision_index.combined_index,
742
611
add_callback=self._pack_collection.revision_index.add_callback,
743
parents=True, is_locked=self.is_locked,
744
track_external_parent_refs=True, track_new_keys=True),
612
parents=True, is_locked=self.is_locked),
745
613
access=self._pack_collection.revision_index.data_access,
747
615
self.signatures = GroupCompressVersionedFiles(
748
616
_GCGraphIndex(self._pack_collection.signature_index.combined_index,
749
617
add_callback=self._pack_collection.signature_index.add_callback,
750
parents=False, is_locked=self.is_locked,
751
inconsistency_fatal=False),
618
parents=False, is_locked=self.is_locked),
752
619
access=self._pack_collection.signature_index.data_access,
754
621
self.texts = GroupCompressVersionedFiles(
755
622
_GCGraphIndex(self._pack_collection.text_index.combined_index,
756
623
add_callback=self._pack_collection.text_index.add_callback,
757
parents=True, is_locked=self.is_locked,
758
inconsistency_fatal=False),
624
parents=True, is_locked=self.is_locked),
759
625
access=self._pack_collection.text_index.data_access)
760
626
# No parents, individual CHK pages don't have specific ancestry
761
627
self.chk_bytes = GroupCompressVersionedFiles(
762
628
_GCGraphIndex(self._pack_collection.chk_index.combined_index,
763
629
add_callback=self._pack_collection.chk_index.add_callback,
764
parents=False, is_locked=self.is_locked,
765
inconsistency_fatal=False),
630
parents=False, is_locked=self.is_locked),
766
631
access=self._pack_collection.chk_index.data_access)
767
search_key_name = self._format._serializer.search_key_name
768
search_key_func = chk_map.search_key_registry.get(search_key_name)
769
self.chk_bytes._search_key_func = search_key_func
770
632
# True when the repository object is 'write locked' (as opposed to the
771
633
# physical lock only taken out around changes to the pack-names list.)
772
634
# Another way to represent this would be a decorator around the control
795
657
return self._inventory_add_lines(revision_id, parents,
796
658
inv_lines, check_content=False)
798
def _create_inv_from_null(self, delta, revision_id):
799
"""This will mutate new_inv directly.
801
This is a simplified form of create_by_apply_delta which knows that all
802
the old values must be None, so everything is a create.
804
serializer = self._format._serializer
805
new_inv = inventory.CHKInventory(serializer.search_key_name)
806
new_inv.revision_id = revision_id
807
entry_to_bytes = new_inv._entry_to_bytes
808
id_to_entry_dict = {}
809
parent_id_basename_dict = {}
810
for old_path, new_path, file_id, entry in delta:
811
if old_path is not None:
812
raise ValueError('Invalid delta, somebody tried to delete %r'
813
' from the NULL_REVISION'
814
% ((old_path, file_id),))
816
raise ValueError('Invalid delta, delta from NULL_REVISION has'
817
' no new_path %r' % (file_id,))
819
new_inv.root_id = file_id
820
parent_id_basename_key = StaticTuple('', '').intern()
822
utf8_entry_name = entry.name.encode('utf-8')
823
parent_id_basename_key = StaticTuple(entry.parent_id,
824
utf8_entry_name).intern()
825
new_value = entry_to_bytes(entry)
827
# new_inv._path_to_fileid_cache[new_path] = file_id
828
key = StaticTuple(file_id).intern()
829
id_to_entry_dict[key] = new_value
830
parent_id_basename_dict[parent_id_basename_key] = file_id
832
new_inv._populate_from_dicts(self.chk_bytes, id_to_entry_dict,
833
parent_id_basename_dict, maximum_size=serializer.maximum_size)
836
660
def add_inventory_by_delta(self, basis_revision_id, delta, new_revision_id,
837
661
parents, basis_inv=None, propagate_caches=False):
838
662
"""Add a new inventory expressed as a delta against another revision.
858
682
repository format specific) of the serialized inventory, and the
859
683
resulting inventory.
685
if basis_revision_id == _mod_revision.NULL_REVISION:
686
return KnitPackRepository.add_inventory_by_delta(self,
687
basis_revision_id, delta, new_revision_id, parents)
861
688
if not self.is_in_write_group():
862
689
raise AssertionError("%r not in write group" % (self,))
863
690
_mod_revision.check_not_reserved_id(new_revision_id)
865
if basis_inv is None:
866
if basis_revision_id == _mod_revision.NULL_REVISION:
867
new_inv = self._create_inv_from_null(delta, new_revision_id)
868
inv_lines = new_inv.to_lines()
869
return self._inventory_add_lines(new_revision_id, parents,
870
inv_lines, check_content=False), new_inv
872
basis_tree = self.revision_tree(basis_revision_id)
873
basis_tree.lock_read()
691
basis_tree = self.revision_tree(basis_revision_id)
692
basis_tree.lock_read()
694
if basis_inv is None:
874
695
basis_inv = basis_tree.inventory
876
696
result = basis_inv.create_by_apply_delta(delta, new_revision_id,
877
697
propagate_caches=propagate_caches)
878
698
inv_lines = result.to_lines()
879
699
return self._inventory_add_lines(new_revision_id, parents,
880
700
inv_lines, check_content=False), result
882
if basis_tree is not None:
885
def _deserialise_inventory(self, revision_id, bytes):
886
return inventory.CHKInventory.deserialise(self.chk_bytes, bytes,
889
def _iter_inventories(self, revision_ids, ordering):
704
def _iter_inventories(self, revision_ids):
890
705
"""Iterate over many inventory objects."""
892
ordering = 'unordered'
893
706
keys = [(revision_id,) for revision_id in revision_ids]
894
stream = self.inventories.get_record_stream(keys, ordering, True)
707
stream = self.inventories.get_record_stream(keys, 'unordered', True)
896
709
for record in stream:
897
710
if record.storage_kind != 'absent':
902
715
yield inventory.CHKInventory.deserialise(self.chk_bytes, texts[key], key)
904
def _iter_inventory_xmls(self, revision_ids, ordering):
905
# Without a native 'xml' inventory, this method doesn't make sense.
906
# However older working trees, and older bundles want it - so we supply
907
# it allowing _get_inventory_xml to work. Bundles currently use the
908
# serializer directly; this also isn't ideal, but there isn't an xml
909
# iteration interface offered at all for repositories. We could make
910
# _iter_inventory_xmls be part of the contract, even if kept private.
911
inv_to_str = self._serializer.write_inventory_to_string
912
for inv in self.iter_inventories(revision_ids, ordering=ordering):
913
yield inv_to_str(inv), inv.revision_id
915
def _find_present_inventory_keys(self, revision_keys):
916
parent_map = self.inventories.get_parent_map(revision_keys)
917
present_inventory_keys = set(k for k in parent_map)
918
return present_inventory_keys
717
def _iter_inventory_xmls(self, revision_ids):
718
# Without a native 'xml' inventory, this method doesn't make sense, so
719
# make it raise to trap naughty direct users.
720
raise NotImplementedError(self._iter_inventory_xmls)
722
def _find_revision_outside_set(self, revision_ids):
723
revision_set = frozenset(revision_ids)
724
for revid in revision_ids:
725
parent_ids = self.get_parent_map([revid]).get(revid, ())
726
for parent in parent_ids:
727
if parent in revision_set:
728
# Parent is not outside the set
730
if parent not in self.get_parent_map([parent]):
734
return _mod_revision.NULL_REVISION
736
def _find_file_keys_to_fetch(self, revision_ids, pb):
737
rich_root = self.supports_rich_root()
738
revision_outside_set = self._find_revision_outside_set(revision_ids)
739
if revision_outside_set == _mod_revision.NULL_REVISION:
740
uninteresting_root_keys = set()
742
uninteresting_inv = self.get_inventory(revision_outside_set)
743
uninteresting_root_keys = set([uninteresting_inv.id_to_entry.key()])
744
interesting_root_keys = set()
745
for idx, inv in enumerate(self.iter_inventories(revision_ids)):
746
interesting_root_keys.add(inv.id_to_entry.key())
747
revision_ids = frozenset(revision_ids)
748
file_id_revisions = {}
749
bytes_to_info = inventory.CHKInventory._bytes_to_utf8name_key
750
for record, items in chk_map.iter_interesting_nodes(self.chk_bytes,
751
interesting_root_keys, uninteresting_root_keys,
753
# This is cheating a bit to use the last grabbed 'inv', but it
755
for name, bytes in items:
756
(name_utf8, file_id, revision_id) = bytes_to_info(bytes)
757
if not rich_root and name_utf8 == '':
759
if revision_id in revision_ids:
760
# Would we rather build this up into file_id => revision
763
file_id_revisions[file_id].add(revision_id)
765
file_id_revisions[file_id] = set([revision_id])
766
for file_id, revisions in file_id_revisions.iteritems():
767
yield ('file', file_id, revisions)
920
769
def fileids_altered_by_revision_ids(self, revision_ids, _inv_weave=None):
921
770
"""Find the file ids and versions affected by revisions.
927
776
revision_ids. Each altered file-ids has the exact revision_ids that
928
777
altered it listed explicitly.
930
rich_root = self.supports_rich_root()
931
bytes_to_info = inventory.CHKInventory._bytes_to_utf8name_key
932
file_id_revisions = {}
779
rich_roots = self.supports_rich_root()
933
781
pb = ui.ui_factory.nested_progress_bar()
935
revision_keys = [(r,) for r in revision_ids]
936
parent_keys = self._find_parent_keys_of_revisions(revision_keys)
937
# TODO: instead of using _find_present_inventory_keys, change the
938
# code paths to allow missing inventories to be tolerated.
939
# However, we only want to tolerate missing parent
940
# inventories, not missing inventories for revision_ids
941
present_parent_inv_keys = self._find_present_inventory_keys(
943
present_parent_inv_ids = set(
944
[k[-1] for k in present_parent_inv_keys])
945
inventories_to_read = set(revision_ids)
946
inventories_to_read.update(present_parent_inv_ids)
947
root_key_info = _build_interesting_key_sets(
948
self, inventories_to_read, present_parent_inv_ids)
949
interesting_root_keys = root_key_info.interesting_root_keys
950
uninteresting_root_keys = root_key_info.uninteresting_root_keys
951
chk_bytes = self.chk_bytes
952
for record, items in chk_map.iter_interesting_nodes(chk_bytes,
953
interesting_root_keys, uninteresting_root_keys,
955
for name, bytes in items:
956
(name_utf8, file_id, revision_id) = bytes_to_info(bytes)
957
# TODO: consider interning file_id, revision_id here, or
958
# pushing that intern() into bytes_to_info()
959
# TODO: rich_root should always be True here, for all
960
# repositories that support chk_bytes
961
if not rich_root and name_utf8 == '':
964
file_id_revisions[file_id].add(revision_id)
966
file_id_revisions[file_id] = set([revision_id])
783
total = len(revision_ids)
784
for pos, inv in enumerate(self.iter_inventories(revision_ids)):
785
pb.update("Finding text references", pos, total)
786
for entry in inv.iter_just_entries():
787
if entry.revision != inv.revision_id:
789
if not rich_roots and entry.file_id == inv.root_id:
791
alterations = result.setdefault(entry.file_id, set([]))
792
alterations.add(entry.revision)
969
return file_id_revisions
971
797
def find_text_key_references(self):
972
798
"""Find the text key references within the repository.
1007
833
def _get_source(self, to_format):
1008
834
"""Return a source for streaming from this repository."""
1009
if self._format._serializer == to_format._serializer:
835
if isinstance(to_format, remote.RemoteRepositoryFormat):
836
# Can't just check attributes on to_format with the current code,
838
to_format._ensure_real()
839
to_format = to_format._custom_format
840
if to_format.__class__ is self._format.__class__:
1010
841
# We must be exactly the same format, otherwise stuff like the chk
1011
# page layout might be different.
1012
# Actually, this test is just slightly looser than exact so that
1013
# CHK2 <-> 2a transfers will work.
842
# page layout might be different
1014
843
return GroupCHKStreamSource(self, to_format)
1015
844
return super(CHKInventoryRepository, self)._get_source(to_format)
1018
class GroupCHKStreamSource(KnitPackStreamSource):
846
def suspend_write_group(self):
847
raise errors.UnsuspendableWriteGroup(self)
849
def _resume_write_group(self, tokens):
850
raise errors.UnsuspendableWriteGroup(self)
853
class GroupCHKStreamSource(repository.StreamSource):
1019
854
"""Used when both the source and target repo are GroupCHK repos."""
1021
856
def __init__(self, from_repository, to_format):
1068
897
p_id_roots_set.clear()
1069
898
return ('inventories', _filtered_inv_stream())
1071
def _get_filtered_chk_streams(self, excluded_revision_keys):
900
def _get_filtered_chk_streams(self, excluded_keys):
1072
901
self._text_keys = set()
1073
excluded_revision_keys.discard(_mod_revision.NULL_REVISION)
1074
if not excluded_revision_keys:
902
excluded_keys.discard(_mod_revision.NULL_REVISION)
903
if not excluded_keys:
1075
904
uninteresting_root_keys = set()
1076
905
uninteresting_pid_root_keys = set()
1078
# filter out any excluded revisions whose inventories are not
1080
# TODO: Update Repository.iter_inventories() to add
1081
# ignore_missing=True
1082
present_keys = self.from_repository._find_present_inventory_keys(
1083
excluded_revision_keys)
1084
present_ids = [k[-1] for k in present_keys]
1085
907
uninteresting_root_keys = set()
1086
908
uninteresting_pid_root_keys = set()
1087
for inv in self.from_repository.iter_inventories(present_ids):
909
for inv in self.from_repository.iter_inventories(excluded_keys):
1088
910
uninteresting_root_keys.add(inv.id_to_entry.key())
1089
911
uninteresting_pid_root_keys.add(
1090
912
inv.parent_id_basename_to_file_id.key())
1091
913
bytes_to_info = inventory.CHKInventory._bytes_to_utf8name_key
1092
914
chk_bytes = self.from_repository.chk_bytes
1093
915
def _filter_id_to_entry():
1094
interesting_nodes = chk_map.iter_interesting_nodes(chk_bytes,
1095
self._chk_id_roots, uninteresting_root_keys)
1096
for record in _filter_text_keys(interesting_nodes, self._text_keys,
916
for record, items in chk_map.iter_interesting_nodes(chk_bytes,
917
self._chk_id_roots, uninteresting_root_keys):
918
for name, bytes in items:
919
# Note: we don't care about name_utf8, because we are always
921
_, file_id, revision_id = bytes_to_info(bytes)
922
self._text_keys.add((file_id, revision_id))
1098
923
if record is not None:
1101
self._chk_id_roots = None
1102
925
yield 'chk_bytes', _filter_id_to_entry()
1103
926
def _get_parent_id_basename_to_file_id_pages():
1104
927
for record, items in chk_map.iter_interesting_nodes(chk_bytes,
1105
928
self._chk_p_id_roots, uninteresting_pid_root_keys):
1106
929
if record is not None:
1109
self._chk_p_id_roots = None
1110
931
yield 'chk_bytes', _get_parent_id_basename_to_file_id_pages()
933
def _get_text_stream(self):
934
# Note: We know we don't have to handle adding root keys, because both
935
# the source and target are GCCHK, and those always support rich-roots
936
# We may want to request as 'unordered', in case the source has done a
938
return ('texts', self.from_repository.texts.get_record_stream(
939
self._text_keys, 'groupcompress', False))
1112
941
def get_stream(self, search):
1113
942
revision_ids = search.get_keys()
1114
943
for stream_info in self._fetch_revision_texts(revision_ids):
1115
944
yield stream_info
1116
945
self._revision_keys = [(rev_id,) for rev_id in revision_ids]
1117
self.from_repository.revisions.clear_cache()
1118
self.from_repository.signatures.clear_cache()
1119
yield self._get_inventory_stream(self._revision_keys)
1120
self.from_repository.inventories.clear_cache()
1121
# TODO: The keys to exclude might be part of the search recipe
1122
# For now, exclude all parents that are at the edge of ancestry, for
1123
# which we have inventories
1124
from_repo = self.from_repository
1125
parent_keys = from_repo._find_parent_keys_of_revisions(
1126
self._revision_keys)
1127
for stream_info in self._get_filtered_chk_streams(parent_keys):
946
yield self._get_filtered_inv_stream()
947
# The keys to exclude are part of the search recipe
948
_, _, exclude_keys, _ = search.get_recipe()
949
for stream_info in self._get_filtered_chk_streams(exclude_keys):
1128
950
yield stream_info
1129
self.from_repository.chk_bytes.clear_cache()
1130
951
yield self._get_text_stream()
1131
self.from_repository.texts.clear_cache()
1133
def get_stream_for_missing_keys(self, missing_keys):
1134
# missing keys can only occur when we are byte copying and not
1135
# translating (because translation means we don't send
1136
# unreconstructable deltas ever).
1137
missing_inventory_keys = set()
1138
for key in missing_keys:
1139
if key[0] != 'inventories':
1140
raise AssertionError('The only missing keys we should'
1141
' be filling in are inventory keys, not %s'
1143
missing_inventory_keys.add(key[1:])
1144
if self._chk_id_roots or self._chk_p_id_roots:
1145
raise AssertionError('Cannot call get_stream_for_missing_keys'
1146
' until all of get_stream() has been consumed.')
1147
# Yield the inventory stream, so we can find the chk stream
1148
# Some of the missing_keys will be missing because they are ghosts.
1149
# As such, we can ignore them. The Sink is required to verify there are
1150
# no unavailable texts when the ghost inventories are not filled in.
1151
yield self._get_inventory_stream(missing_inventory_keys,
1153
# We use the empty set for excluded_revision_keys, to make it clear
1154
# that we want to transmit all referenced chk pages.
1155
for stream_info in self._get_filtered_chk_streams(set()):
1159
class _InterestingKeyInfo(object):
1161
self.interesting_root_keys = set()
1162
self.interesting_pid_root_keys = set()
1163
self.uninteresting_root_keys = set()
1164
self.uninteresting_pid_root_keys = set()
1166
def all_interesting(self):
1167
return self.interesting_root_keys.union(self.interesting_pid_root_keys)
1169
def all_uninteresting(self):
1170
return self.uninteresting_root_keys.union(
1171
self.uninteresting_pid_root_keys)
1174
return self.all_interesting().union(self.all_uninteresting())
1177
def _build_interesting_key_sets(repo, inventory_ids, parent_only_inv_ids):
1178
result = _InterestingKeyInfo()
1179
for inv in repo.iter_inventories(inventory_ids, 'unordered'):
1180
root_key = inv.id_to_entry.key()
1181
pid_root_key = inv.parent_id_basename_to_file_id.key()
1182
if inv.revision_id in parent_only_inv_ids:
1183
result.uninteresting_root_keys.add(root_key)
1184
result.uninteresting_pid_root_keys.add(pid_root_key)
1186
result.interesting_root_keys.add(root_key)
1187
result.interesting_pid_root_keys.add(pid_root_key)
1191
def _filter_text_keys(interesting_nodes_iterable, text_keys, bytes_to_info):
1192
"""Iterate the result of iter_interesting_nodes, yielding the records
1193
and adding to text_keys.
1195
for record, items in interesting_nodes_iterable:
1196
for name, bytes in items:
1197
# Note: we don't care about name_utf8, because groupcompress repos
1198
# are always rich-root, so there are no synthesised root records to
1200
_, file_id, revision_id = bytes_to_info(bytes)
1201
file_id = intern(file_id)
1202
revision_id = intern(revision_id)
1203
text_keys.add(StaticTuple(file_id, revision_id).intern())
1209
954
class RepositoryFormatCHK1(RepositoryFormatPack):
1210
955
"""A hashed CHK+group compress pack repository."""
1212
957
repository_class = CHKInventoryRepository
1213
supports_external_lookups = True
1214
958
supports_chks = True
1215
959
# For right now, setting this to True gives us InterModel1And2 rather
1216
960
# than InterDifferingSerializer
1250
993
return ("Development repository format - rich roots, group compression"
1251
994
" and chk inventories")
1254
class RepositoryFormatCHK2(RepositoryFormatCHK1):
1255
"""A CHK repository that uses the bencode revision serializer."""
1257
_serializer = chk_serializer.chk_bencode_serializer
1259
def _get_matching_bzrdir(self):
1260
return bzrdir.format_registry.make_bzrdir('development7-rich-root')
1262
def _ignore_setting_bzrdir(self, format):
1265
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
1267
def get_format_string(self):
1268
"""See RepositoryFormat.get_format_string()."""
1269
return ('Bazaar development format - chk repository with bencode '
1270
'revision serialization (needs bzr.dev from 1.16)\n')
1273
class RepositoryFormat2a(RepositoryFormatCHK2):
1274
"""A CHK repository that uses the bencode revision serializer.
1276
This is the same as RepositoryFormatCHK2 but with a public name.
1279
_serializer = chk_serializer.chk_bencode_serializer
1281
def _get_matching_bzrdir(self):
1282
return bzrdir.format_registry.make_bzrdir('2a')
1284
def _ignore_setting_bzrdir(self, format):
1287
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
1289
def get_format_string(self):
1290
return ('Bazaar repository format 2a (needs bzr 1.16 or later)\n')
1292
def get_format_description(self):
1293
"""See RepositoryFormat.get_format_description()."""
1294
return ("Repository format 2a - rich roots, group compression"
1295
" and chk inventories")
996
def check_conversion_target(self, target_format):
997
if not target_format.rich_root_data:
998
raise errors.BadConversionTarget(
999
'Does not support rich root data.', target_format)
1000
if not getattr(target_format, 'supports_tree_reference', False):
1001
raise errors.BadConversionTarget(
1002
'Does not support nested trees', target_format)