52
53
_newline_null_re = re.compile('[\n\0]')
56
def _has_key_from_parent_map(self, key):
57
"""Check if this index has one key.
59
If it's possible to check for multiple keys at once through
60
calling get_parent_map that should be faster.
62
return (key in self.get_parent_map([key]))
65
def _missing_keys_from_parent_map(self, keys):
66
return set(keys) - set(self.get_parent_map(keys))
55
69
class GraphIndexBuilder(object):
56
70
"""A builder that can build a GraphIndex.
58
72
The resulting graph has the structure:
60
74
_SIGNATURE OPTIONS NODES NEWLINE
61
75
_SIGNATURE := 'Bazaar Graph Index 1' NEWLINE
62
76
OPTIONS := 'node_ref_lists=' DIGITS NEWLINE
79
93
:param key_elements: The number of bytestrings in each key.
81
95
self.reference_lists = reference_lists
83
96
# A dict of {key: (absent, ref_lists, value)}
98
# Keys that are referenced but not actually present in this index
99
self._absent_keys = set()
85
100
self._nodes_by_key = None
86
101
self._key_length = key_elements
102
self._optimize_for_size = False
103
self._combine_backing_indices = True
88
105
def _check_key(self, key):
89
106
"""Raise BadIndexKey if key is not a valid key for this index."""
90
if type(key) != tuple:
107
if type(key) not in (tuple, StaticTuple):
91
108
raise errors.BadIndexKey(key)
92
109
if self._key_length != len(key):
93
110
raise errors.BadIndexKey(key)
95
112
if not element or _whitespace_re.search(element) is not None:
96
113
raise errors.BadIndexKey(element)
115
def _external_references(self):
116
"""Return references that are not present in this index.
120
# TODO: JAM 2008-11-21 This makes an assumption about how the reference
121
# lists are used. It is currently correct for pack-0.92 through
122
# 1.9, which use the node references (3rd column) second
123
# reference list as the compression parent. Perhaps this should
124
# be moved into something higher up the stack, since it
125
# makes assumptions about how the index is used.
126
if self.reference_lists > 1:
127
for node in self.iter_all_entries():
129
refs.update(node[3][1])
132
# If reference_lists == 0 there can be no external references, and
133
# if reference_lists == 1, then there isn't a place to store the
98
137
def _get_nodes_by_key(self):
99
138
if self._nodes_by_key is None:
100
139
nodes_by_key = {}
187
229
# There may be duplicates, but I don't think it is worth worrying
189
231
self._nodes[reference] = ('a', (), '')
232
self._absent_keys.update(absent_references)
233
self._absent_keys.discard(key)
190
234
self._nodes[key] = ('', node_refs, value)
192
235
if self._nodes_by_key is not None and self._key_length > 1:
193
236
self._update_nodes_by_key(key, value, node_refs)
238
def clear_cache(self):
239
"""See GraphIndex.clear_cache()
241
This is a no-op, but we need the api to conform to a generic 'Index'
195
245
def finish(self):
196
246
lines = [_SIGNATURE]
197
247
lines.append(_OPTION_NODE_REFS + str(self.reference_lists) + '\n')
198
248
lines.append(_OPTION_KEY_ELEMENTS + str(self._key_length) + '\n')
199
lines.append(_OPTION_LEN + str(len(self._keys)) + '\n')
249
key_count = len(self._nodes) - len(self._absent_keys)
250
lines.append(_OPTION_LEN + str(key_count) + '\n')
200
251
prefix_length = sum(len(x) for x in lines)
201
252
# references are byte offsets. To avoid having to do nasty
202
253
# polynomial work to resolve offsets (references to later in the
278
329
(len(result.getvalue()), expected_bytes))
332
def set_optimize(self, for_size=None, combine_backing_indices=None):
333
"""Change how the builder tries to optimize the result.
335
:param for_size: Tell the builder to try and make the index as small as
337
:param combine_backing_indices: If the builder spills to disk to save
338
memory, should the on-disk indices be combined. Set to True if you
339
are going to be probing the index, but to False if you are not. (If
340
you are not querying, then the time spent combining is wasted.)
343
# GraphIndexBuilder itself doesn't pay attention to the flag yet, but
345
if for_size is not None:
346
self._optimize_for_size = for_size
347
if combine_backing_indices is not None:
348
self._combine_backing_indices = combine_backing_indices
350
def find_ancestry(self, keys, ref_list_num):
351
"""See CombinedGraphIndex.find_ancestry()"""
357
for _, key, value, ref_lists in self.iter_entries(pending):
358
parent_keys = ref_lists[ref_list_num]
359
parent_map[key] = parent_keys
360
next_pending.update([p for p in parent_keys if p not in
362
missing_keys.update(pending.difference(parent_map))
363
pending = next_pending
364
return parent_map, missing_keys
282
367
class GraphIndex(object):
283
368
"""An index for data with embedded graphs.
285
370
The index maps keys to a list of key reference lists, and a value.
286
371
Each node has the same number of key reference lists. Each key reference
287
372
list can be empty or an arbitrary length. The value is an opaque NULL
288
terminated string without any newlines. The storage of the index is
373
terminated string without any newlines. The storage of the index is
289
374
hidden in the interface: keys and key references are always tuples of
290
375
bytestrings, never the internal representation (e.g. dictionary offsets).
382
475
node_value = value
383
476
self._nodes[key] = node_value
384
if self._key_length > 1:
385
# TODO: We may want to do this lazily, but if we are calling
386
# _buffer_all, we are likely to be doing
387
# iter_entries_prefix
388
key_dict = self._nodes_by_key
389
if self.node_ref_lists:
390
key_value = key, node_value[0], node_value[1]
392
key_value = key, node_value
393
# For a key of (foo, bar, baz) create
394
# _nodes_by_key[foo][bar][baz] = key_value
395
for subkey in key[:-1]:
396
key_dict = key_dict.setdefault(subkey, {})
397
key_dict[key[-1]] = key_value
398
477
# cache the keys for quick set intersections
399
self._keys = set(self._nodes)
400
478
if trailers != 1:
401
479
# there must be one line - the empty trailer line.
402
480
raise errors.BadIndexData(self)
482
def clear_cache(self):
483
"""Clear out any cached/memoized values.
485
This can be called at any time, but generally it is used when we have
486
extracted some information, but don't expect to be requesting any more
490
def external_references(self, ref_list_num):
491
"""Return references that are not present in this index.
494
if ref_list_num + 1 > self.node_ref_lists:
495
raise ValueError('No ref list %d, index has %d ref lists'
496
% (ref_list_num, self.node_ref_lists))
499
for key, (value, ref_lists) in nodes.iteritems():
500
ref_list = ref_lists[ref_list_num]
501
refs.update([ref for ref in ref_list if ref not in nodes])
504
def _get_nodes_by_key(self):
505
if self._nodes_by_key is None:
507
if self.node_ref_lists:
508
for key, (value, references) in self._nodes.iteritems():
509
key_dict = nodes_by_key
510
for subkey in key[:-1]:
511
key_dict = key_dict.setdefault(subkey, {})
512
key_dict[key[-1]] = key, value, references
514
for key, value in self._nodes.iteritems():
515
key_dict = nodes_by_key
516
for subkey in key[:-1]:
517
key_dict = key_dict.setdefault(subkey, {})
518
key_dict[key[-1]] = key, value
519
self._nodes_by_key = nodes_by_key
520
return self._nodes_by_key
404
522
def iter_all_entries(self):
405
523
"""Iterate over all keys within the index.
515
633
def _iter_entries_from_total_buffer(self, keys):
516
634
"""Iterate over keys when the entire index is parsed."""
517
keys = keys.intersection(self._keys)
635
# Note: See the note in BTreeBuilder.iter_entries for why we don't use
636
# .intersection() here
638
keys = [key for key in keys if key in nodes]
518
639
if self.node_ref_lists:
520
value, node_refs = self._nodes[key]
641
value, node_refs = nodes[key]
521
642
yield self, key, value, node_refs
524
yield self, key, self._nodes[key]
645
yield self, key, nodes[key]
526
647
def iter_entries(self, keys):
527
648
"""Iterate over keys within the index.
629
751
# the last thing looked up was a terminal element
630
752
yield (self, ) + key_dict
754
def _find_ancestors(self, keys, ref_list_num, parent_map, missing_keys):
755
"""See BTreeIndex._find_ancestors."""
756
# The api can be implemented as a trivial overlay on top of
757
# iter_entries, it is not an efficient implementation, but it at least
761
for index, key, value, refs in self.iter_entries(keys):
762
parent_keys = refs[ref_list_num]
764
parent_map[key] = parent_keys
765
search_keys.update(parent_keys)
766
# Figure out what, if anything, was missing
767
missing_keys.update(set(keys).difference(found_keys))
768
search_keys = search_keys.difference(parent_map)
632
771
def key_count(self):
633
772
"""Return an estimate of the number of keys in this index.
635
774
For GraphIndex the estimate is exact.
637
776
if self._key_count is None:
1058
1197
self._buffer_all()
1200
base_offset = self._base_offset
1201
if base_offset != 0:
1202
# Rewrite the ranges for the offset
1203
readv_ranges = [(start+base_offset, size)
1204
for start, size in readv_ranges]
1061
1205
readv_data = self._transport.readv(self._name, readv_ranges, True,
1206
self._size + self._base_offset)
1064
1208
for offset, data in readv_data:
1209
offset -= base_offset
1065
1210
self._bytes_read += len(data)
1212
# transport.readv() expanded to extra data which isn't part of
1214
data = data[-offset:]
1066
1216
if offset == 0 and len(data) == self._size:
1067
1217
# We read the whole range, most likely because the
1068
1218
# Transport upcast our readv ranges into one long request
1101
1251
in the index list.
1104
def __init__(self, indices):
1254
def __init__(self, indices, reload_func=None):
1105
1255
"""Create a CombinedGraphIndex backed by indices.
1107
1257
:param indices: An ordered list of indices to query for data.
1258
:param reload_func: A function to call if we find we are missing an
1259
index. Should have the form reload_func() => True/False to indicate
1260
if reloading actually changed anything.
1109
1262
self._indices = indices
1263
self._reload_func = reload_func
1111
1265
def __repr__(self):
1112
1266
return "%s(%s)" % (
1113
1267
self.__class__.__name__,
1114
1268
', '.join(map(repr, self._indices)))
1116
@symbol_versioning.deprecated_method(symbol_versioning.one_one)
1117
def get_parents(self, revision_ids):
1118
"""See graph._StackedParentsProvider.get_parents.
1120
This implementation thunks the graph.Graph.get_parents api across to
1123
:param revision_ids: An iterable of graph keys for this graph.
1124
:return: A list of parent details for each key in revision_ids.
1125
Each parent details will be one of:
1126
* None when the key was missing
1127
* (NULL_REVISION,) when the key has no parents.
1128
* (parent_key, parent_key...) otherwise.
1130
parent_map = self.get_parent_map(revision_ids)
1131
return [parent_map.get(r, None) for r in revision_ids]
1270
def clear_cache(self):
1271
"""See GraphIndex.clear_cache()"""
1272
for index in self._indices:
1133
1275
def get_parent_map(self, keys):
1134
"""See graph._StackedParentsProvider.get_parent_map"""
1276
"""See graph.StackedParentsProvider.get_parent_map"""
1135
1277
search_keys = set(keys)
1136
1278
if NULL_REVISION in search_keys:
1137
1279
search_keys.discard(NULL_REVISION)
1215
1369
seen_keys = set()
1216
for index in self._indices:
1217
for node in index.iter_entries_prefix(keys):
1218
if node[1] in seen_keys:
1220
seen_keys.add(node[1])
1372
for index in self._indices:
1373
for node in index.iter_entries_prefix(keys):
1374
if node[1] in seen_keys:
1376
seen_keys.add(node[1])
1379
except errors.NoSuchFile:
1380
self._reload_or_raise()
1382
def find_ancestry(self, keys, ref_list_num):
1383
"""Find the complete ancestry for the given set of keys.
1385
Note that this is a whole-ancestry request, so it should be used
1388
:param keys: An iterable of keys to look for
1389
:param ref_list_num: The reference list which references the parents
1391
:return: (parent_map, missing_keys)
1393
missing_keys = set()
1395
keys_to_lookup = set(keys)
1397
while keys_to_lookup:
1398
# keys that *all* indexes claim are missing, stop searching them
1400
all_index_missing = None
1401
# print 'gen\tidx\tsub\tn_keys\tn_pmap\tn_miss'
1402
# print '%4d\t\t\t%4d\t%5d\t%5d' % (generation, len(keys_to_lookup),
1404
# len(missing_keys))
1405
for index_idx, index in enumerate(self._indices):
1406
# TODO: we should probably be doing something with
1407
# 'missing_keys' since we've already determined that
1408
# those revisions have not been found anywhere
1409
index_missing_keys = set()
1410
# Find all of the ancestry we can from this index
1411
# keep looking until the search_keys set is empty, which means
1412
# things we didn't find should be in index_missing_keys
1413
search_keys = keys_to_lookup
1415
# print ' \t%2d\t\t%4d\t%5d\t%5d' % (
1416
# index_idx, len(search_keys),
1417
# len(parent_map), len(index_missing_keys))
1420
# TODO: ref_list_num should really be a parameter, since
1421
# CombinedGraphIndex does not know what the ref lists
1423
search_keys = index._find_ancestors(search_keys,
1424
ref_list_num, parent_map, index_missing_keys)
1425
# print ' \t \t%2d\t%4d\t%5d\t%5d' % (
1426
# sub_generation, len(search_keys),
1427
# len(parent_map), len(index_missing_keys))
1428
# Now set whatever was missing to be searched in the next index
1429
keys_to_lookup = index_missing_keys
1430
if all_index_missing is None:
1431
all_index_missing = set(index_missing_keys)
1433
all_index_missing.intersection_update(index_missing_keys)
1434
if not keys_to_lookup:
1436
if all_index_missing is None:
1437
# There were no indexes, so all search keys are 'missing'
1438
missing_keys.update(keys_to_lookup)
1439
keys_to_lookup = None
1441
missing_keys.update(all_index_missing)
1442
keys_to_lookup.difference_update(all_index_missing)
1443
return parent_map, missing_keys
1223
1445
def key_count(self):
1224
1446
"""Return an estimate of the number of keys in this index.
1226
1448
For CombinedGraphIndex this is approximated by the sum of the keys of
1227
1449
the child indices. As child indices may have duplicate keys this can
1228
1450
have a maximum error of the number of child indices * largest number of
1229
1451
keys in any index.
1231
return sum((index.key_count() for index in self._indices), 0)
1455
return sum((index.key_count() for index in self._indices), 0)
1456
except errors.NoSuchFile:
1457
self._reload_or_raise()
1459
missing_keys = _missing_keys_from_parent_map
1461
def _reload_or_raise(self):
1462
"""We just got a NoSuchFile exception.
1464
Try to reload the indices, if it fails, just raise the current
1467
if self._reload_func is None:
1469
exc_type, exc_value, exc_traceback = sys.exc_info()
1470
trace.mutter('Trying to reload after getting exception: %s',
1472
if not self._reload_func():
1473
# We tried to reload, but nothing changed, so we fail anyway
1474
trace.mutter('_reload_func indicated nothing has changed.'
1475
' Raising original exception.')
1476
raise exc_type, exc_value, exc_traceback
1233
1478
def validate(self):
1234
1479
"""Validate that everything in the index can be accessed."""
1235
for index in self._indices:
1482
for index in self._indices:
1485
except errors.NoSuchFile:
1486
self._reload_or_raise()
1239
1489
class InMemoryGraphIndex(GraphIndexBuilder):