589
588
"""Detect missing inventories or chk root entries for the new revisions
590
589
in this write group.
592
:returns: list of strs, summarising any problems found. If the list is
593
empty no problems were found.
591
:returns: set of missing keys. Note that not every missing key is
592
guaranteed to be reported.
594
if getattr(self.repo, 'chk_bytes', None) is None:
595
596
# Ensure that all revisions added in this write group have:
596
597
# - corresponding inventories,
597
598
# - chk root entries for those inventories,
598
599
# - and any present parent inventories have their chk root
600
601
# And all this should be independent of any fallback repository.
602
602
key_deps = self.repo.revisions._index._key_dependencies
603
603
new_revisions_keys = key_deps.get_new_keys()
604
604
no_fallback_inv_index = self.repo.inventories._index
605
605
no_fallback_chk_bytes_index = self.repo.chk_bytes._index
606
no_fallback_texts_index = self.repo.texts._index
607
606
inv_parent_map = no_fallback_inv_index.get_parent_map(
608
607
new_revisions_keys)
609
608
# Are any inventories for corresponding to the new revisions missing?
611
610
missing_corresponding = set(new_revisions_keys)
612
611
missing_corresponding.difference_update(corresponding_invs)
613
612
if missing_corresponding:
614
problems.append("inventories missing for revisions %s" %
615
(sorted(missing_corresponding),))
613
return [('inventories', key) for key in missing_corresponding]
617
614
# Are any chk root entries missing for any inventories? This includes
618
615
# any present parent inventories, which may be used when calculating
619
616
# deltas for streaming.
623
620
# Filter out ghost parents.
624
621
all_inv_keys.intersection_update(
625
622
no_fallback_inv_index.get_parent_map(all_inv_keys))
626
parent_invs_only_keys = all_inv_keys.symmetric_difference(
628
623
all_missing = set()
629
624
inv_ids = [key[-1] for key in all_inv_keys]
630
parent_invs_only_ids = [key[-1] for key in parent_invs_only_keys]
631
root_key_info = _build_interesting_key_sets(
632
self.repo, inv_ids, parent_invs_only_ids)
633
expected_chk_roots = root_key_info.all_keys()
634
present_chk_roots = no_fallback_chk_bytes_index.get_parent_map(
636
missing_chk_roots = expected_chk_roots.difference(present_chk_roots)
637
if missing_chk_roots:
638
problems.append("missing referenced chk root keys: %s"
639
% (sorted(missing_chk_roots),))
640
# Don't bother checking any further.
642
# Find all interesting chk_bytes records, and make sure they are
643
# present, as well as the text keys they reference.
644
chk_bytes_no_fallbacks = self.repo.chk_bytes.without_fallbacks()
645
chk_bytes_no_fallbacks._search_key_func = \
646
self.repo.chk_bytes._search_key_func
647
chk_diff = chk_map.iter_interesting_nodes(
648
chk_bytes_no_fallbacks, root_key_info.interesting_root_keys,
649
root_key_info.uninteresting_root_keys)
650
bytes_to_info = inventory.CHKInventory._bytes_to_utf8name_key
653
for record in _filter_text_keys(chk_diff, text_keys, bytes_to_info):
655
except errors.NoSuchRevision, e:
656
# XXX: It would be nice if we could give a more precise error here.
657
problems.append("missing chk node(s) for id_to_entry maps")
658
chk_diff = chk_map.iter_interesting_nodes(
659
chk_bytes_no_fallbacks, root_key_info.interesting_pid_root_keys,
660
root_key_info.uninteresting_pid_root_keys)
662
for interesting_rec, interesting_map in chk_diff:
664
except errors.NoSuchRevision, e:
666
"missing chk node(s) for parent_id_basename_to_file_id maps")
667
present_text_keys = no_fallback_texts_index.get_parent_map(text_keys)
668
missing_text_keys = text_keys.difference(present_text_keys)
669
if missing_text_keys:
670
problems.append("missing text keys: %r"
671
% (sorted(missing_text_keys),))
625
for inv in self.repo.iter_inventories(inv_ids, 'unordered'):
626
root_keys = set([inv.id_to_entry.key()])
627
if inv.parent_id_basename_to_file_id is not None:
628
root_keys.add(inv.parent_id_basename_to_file_id.key())
629
present = no_fallback_chk_bytes_index.get_parent_map(root_keys)
630
missing = root_keys.difference(present)
631
all_missing.update([('chk_bytes',) + key for key in missing])
674
634
def _execute_pack_operations(self, pack_operations,
675
635
_packer_class=GCCHKPacker,
676
636
reload_func=None):
704
664
self._remove_pack_from_memory(pack)
705
665
# record the newly available packs and stop advertising the old
708
for _, packs in pack_operations:
709
to_be_obsoleted.extend(packs)
710
result = self._save_pack_names(clear_obsolete_packs=True,
711
obsolete_packs=to_be_obsoleted)
667
result = self._save_pack_names(clear_obsolete_packs=True)
668
# Move the old packs out of the way now they are no longer referenced.
669
for revision_count, packs in pack_operations:
670
self._obsolete_packs(packs)
816
775
' no new_path %r' % (file_id,))
817
776
if new_path == '':
818
777
new_inv.root_id = file_id
819
parent_id_basename_key = StaticTuple('', '').intern()
778
parent_id_basename_key = ('', '')
821
780
utf8_entry_name = entry.name.encode('utf-8')
822
parent_id_basename_key = StaticTuple(entry.parent_id,
823
utf8_entry_name).intern()
781
parent_id_basename_key = (entry.parent_id, utf8_entry_name)
824
782
new_value = entry_to_bytes(entry)
825
783
# Populate Caches?
826
784
# new_inv._path_to_fileid_cache[new_path] = file_id
827
key = StaticTuple(file_id).intern()
828
id_to_entry_dict[key] = new_value
785
id_to_entry_dict[(file_id,)] = new_value
829
786
parent_id_basename_dict[parent_id_basename_key] = file_id
831
788
new_inv._populate_from_dicts(self.chk_bytes, id_to_entry_dict,
903
860
def _iter_inventory_xmls(self, revision_ids, ordering):
904
861
# Without a native 'xml' inventory, this method doesn't make sense.
905
862
# However older working trees, and older bundles want it - so we supply
906
# it allowing _get_inventory_xml to work. Bundles currently use the
863
# it allowing get_inventory_xml to work. Bundles currently use the
907
864
# serializer directly; this also isn't ideal, but there isn't an xml
908
865
# iteration interface offered at all for repositories. We could make
909
866
# _iter_inventory_xmls be part of the contract, even if kept private.
942
899
present_parent_inv_ids = set(
943
900
[k[-1] for k in present_parent_inv_keys])
901
uninteresting_root_keys = set()
902
interesting_root_keys = set()
944
903
inventories_to_read = set(revision_ids)
945
904
inventories_to_read.update(present_parent_inv_ids)
946
root_key_info = _build_interesting_key_sets(
947
self, inventories_to_read, present_parent_inv_ids)
948
interesting_root_keys = root_key_info.interesting_root_keys
949
uninteresting_root_keys = root_key_info.uninteresting_root_keys
905
for inv in self.iter_inventories(inventories_to_read):
906
entry_chk_root_key = inv.id_to_entry.key()
907
if inv.revision_id in present_parent_inv_ids:
908
uninteresting_root_keys.add(entry_chk_root_key)
910
interesting_root_keys.add(entry_chk_root_key)
950
912
chk_bytes = self.chk_bytes
951
913
for record, items in chk_map.iter_interesting_nodes(chk_bytes,
952
914
interesting_root_keys, uninteresting_root_keys,
954
916
for name, bytes in items:
955
917
(name_utf8, file_id, revision_id) = bytes_to_info(bytes)
956
# TODO: consider interning file_id, revision_id here, or
957
# pushing that intern() into bytes_to_info()
958
# TODO: rich_root should always be True here, for all
959
# repositories that support chk_bytes
960
918
if not rich_root and name_utf8 == '':
1090
1048
bytes_to_info = inventory.CHKInventory._bytes_to_utf8name_key
1091
1049
chk_bytes = self.from_repository.chk_bytes
1092
1050
def _filter_id_to_entry():
1093
interesting_nodes = chk_map.iter_interesting_nodes(chk_bytes,
1094
self._chk_id_roots, uninteresting_root_keys)
1095
for record in _filter_text_keys(interesting_nodes, self._text_keys,
1051
for record, items in chk_map.iter_interesting_nodes(chk_bytes,
1052
self._chk_id_roots, uninteresting_root_keys):
1053
for name, bytes in items:
1054
# Note: we don't care about name_utf8, because we are always
1056
_, file_id, revision_id = bytes_to_info(bytes)
1057
self._text_keys.add((file_id, revision_id))
1097
1058
if record is not None:
1113
1074
for stream_info in self._fetch_revision_texts(revision_ids):
1114
1075
yield stream_info
1115
1076
self._revision_keys = [(rev_id,) for rev_id in revision_ids]
1116
self.from_repository.revisions.clear_cache()
1117
self.from_repository.signatures.clear_cache()
1118
1077
yield self._get_inventory_stream(self._revision_keys)
1119
self.from_repository.inventories.clear_cache()
1120
1078
# TODO: The keys to exclude might be part of the search recipe
1121
1079
# For now, exclude all parents that are at the edge of ancestry, for
1122
1080
# which we have inventories
1142
1098
missing_inventory_keys.add(key[1:])
1143
1099
if self._chk_id_roots or self._chk_p_id_roots:
1144
1100
raise AssertionError('Cannot call get_stream_for_missing_keys'
1145
' until all of get_stream() has been consumed.')
1101
' untill all of get_stream() has been consumed.')
1146
1102
# Yield the inventory stream, so we can find the chk stream
1147
1103
# Some of the missing_keys will be missing because they are ghosts.
1148
1104
# As such, we can ignore them. The Sink is required to verify there are
1155
1111
yield stream_info
1158
class _InterestingKeyInfo(object):
1160
self.interesting_root_keys = set()
1161
self.interesting_pid_root_keys = set()
1162
self.uninteresting_root_keys = set()
1163
self.uninteresting_pid_root_keys = set()
1165
def all_interesting(self):
1166
return self.interesting_root_keys.union(self.interesting_pid_root_keys)
1168
def all_uninteresting(self):
1169
return self.uninteresting_root_keys.union(
1170
self.uninteresting_pid_root_keys)
1173
return self.all_interesting().union(self.all_uninteresting())
1176
def _build_interesting_key_sets(repo, inventory_ids, parent_only_inv_ids):
1177
result = _InterestingKeyInfo()
1178
for inv in repo.iter_inventories(inventory_ids, 'unordered'):
1179
root_key = inv.id_to_entry.key()
1180
pid_root_key = inv.parent_id_basename_to_file_id.key()
1181
if inv.revision_id in parent_only_inv_ids:
1182
result.uninteresting_root_keys.add(root_key)
1183
result.uninteresting_pid_root_keys.add(pid_root_key)
1185
result.interesting_root_keys.add(root_key)
1186
result.interesting_pid_root_keys.add(pid_root_key)
1190
def _filter_text_keys(interesting_nodes_iterable, text_keys, bytes_to_info):
1191
"""Iterate the result of iter_interesting_nodes, yielding the records
1192
and adding to text_keys.
1194
for record, items in interesting_nodes_iterable:
1195
for name, bytes in items:
1196
# Note: we don't care about name_utf8, because groupcompress repos
1197
# are always rich-root, so there are no synthesised root records to
1199
_, file_id, revision_id = bytes_to_info(bytes)
1200
file_id = intern(file_id)
1201
revision_id = intern(revision_id)
1202
text_keys.add(StaticTuple(file_id, revision_id).intern())
1208
1114
class RepositoryFormatCHK1(RepositoryFormatPack):
1209
1115
"""A hashed CHK+group compress pack repository."""