97
107
class CachingParentsProvider(object):
98
"""A parents provider which will cache the revision => parents in a dict.
100
This is useful for providers that have an expensive lookup.
108
"""A parents provider which will cache the revision => parents as a dict.
110
This is useful for providers which have an expensive look up.
112
Either a ParentsProvider or a get_parent_map-like callback may be
113
supplied. If it provides extra un-asked-for parents, they will be cached,
114
but filtered out of get_parent_map.
116
The cache is enabled by default, but may be disabled and re-enabled.
118
def __init__(self, parent_provider=None, get_parent_map=None):
103
def __init__(self, parent_provider):
121
:param parent_provider: The ParentProvider to use. It or
122
get_parent_map must be supplied.
123
:param get_parent_map: The get_parent_map callback to use. It or
124
parent_provider must be supplied.
104
126
self._real_provider = parent_provider
105
# Theoretically we could use an LRUCache here
127
if get_parent_map is None:
128
self._get_parent_map = self._real_provider.get_parent_map
130
self._get_parent_map = get_parent_map
132
self.enable_cache(True)
135
return "%s(%r)" % (self.__class__.__name__, self._real_provider)
137
def enable_cache(self, cache_misses=True):
139
if self._cache is not None:
140
raise AssertionError('Cache enabled when already enabled.')
109
return "%s(%r)" % (self.__class__.__name__, self._real_provider)
142
self._cache_misses = cache_misses
143
self.missing_keys = set()
145
def disable_cache(self):
146
"""Disable and clear the cache."""
148
self._cache_misses = None
149
self.missing_keys = set()
151
def get_cached_map(self):
152
"""Return any cached get_parent_map values."""
153
if self._cache is None:
155
return dict(self._cache)
111
157
def get_parent_map(self, keys):
112
"""See _StackedParentsProvider.get_parent_map"""
114
# If the _real_provider doesn't have a key, we cache a value of None,
115
# which we then later use to realize we cannot provide a value for that
158
"""See StackedParentsProvider.get_parent_map."""
118
159
cache = self._cache
161
cache = self._get_parent_map(keys)
163
needed_revisions = set(key for key in keys if key not in cache)
164
# Do not ask for negatively cached keys
165
needed_revisions.difference_update(self.missing_keys)
167
parent_map = self._get_parent_map(needed_revisions)
168
cache.update(parent_map)
169
if self._cache_misses:
170
for key in needed_revisions:
171
if key not in parent_map:
172
self.note_missing_key(key)
122
if value is not None:
123
parent_map[key] = value
175
value = cache.get(key)
176
if value is not None:
128
new_parents = self._real_provider.get_parent_map(needed)
129
cache.update(new_parents)
130
parent_map.update(new_parents)
131
needed.difference_update(new_parents)
132
cache.update(dict.fromkeys(needed, None))
180
def note_missing_key(self, key):
181
"""Note that key is a missing key."""
182
if self._cache_misses:
183
self.missing_keys.add(key)
136
186
class Graph(object):
208
258
right = searchers[1].seen
209
259
return (left.difference(right), right.difference(left))
261
def find_distance_to_null(self, target_revision_id, known_revision_ids):
262
"""Find the left-hand distance to the NULL_REVISION.
264
(This can also be considered the revno of a branch at
267
:param target_revision_id: A revision_id which we would like to know
269
:param known_revision_ids: [(revision_id, revno)] A list of known
270
revno, revision_id tuples. We'll use this to seed the search.
272
# Map from revision_ids to a known value for their revno
273
known_revnos = dict(known_revision_ids)
274
cur_tip = target_revision_id
276
NULL_REVISION = revision.NULL_REVISION
277
known_revnos[NULL_REVISION] = 0
279
searching_known_tips = list(known_revnos.keys())
281
unknown_searched = {}
283
while cur_tip not in known_revnos:
284
unknown_searched[cur_tip] = num_steps
286
to_search = set([cur_tip])
287
to_search.update(searching_known_tips)
288
parent_map = self.get_parent_map(to_search)
289
parents = parent_map.get(cur_tip, None)
290
if not parents: # An empty list or None is a ghost
291
raise errors.GhostRevisionsHaveNoRevno(target_revision_id,
295
for revision_id in searching_known_tips:
296
parents = parent_map.get(revision_id, None)
300
next_revno = known_revnos[revision_id] - 1
301
if next in unknown_searched:
302
# We have enough information to return a value right now
303
return next_revno + unknown_searched[next]
304
if next in known_revnos:
306
known_revnos[next] = next_revno
307
next_known_tips.append(next)
308
searching_known_tips = next_known_tips
310
# We reached a known revision, so just add in how many steps it took to
312
return known_revnos[cur_tip] + num_steps
314
def find_lefthand_distances(self, keys):
315
"""Find the distance to null for all the keys in keys.
317
:param keys: keys to lookup.
318
:return: A dict key->distance for all of keys.
320
# Optimisable by concurrent searching, but a random spread should get
321
# some sort of hit rate.
328
(key, self.find_distance_to_null(key, known_revnos)))
329
except errors.GhostRevisionsHaveNoRevno:
332
known_revnos.append((key, -1))
333
return dict(known_revnos)
211
335
def find_unique_ancestors(self, unique_revision, common_revisions):
212
336
"""Find the unique ancestors for a revision versus others.
262
416
if unique_are_common_nodes:
263
417
ancestors = unique_searcher.find_seen_ancestors(
264
418
unique_are_common_nodes)
419
# TODO: This is a bit overboard, we only really care about
420
# the ancestors of the tips because the rest we
421
# already know. This is *correct* but causes us to
422
# search too much ancestry.
265
423
ancestors.update(common_searcher.find_seen_ancestors(ancestors))
266
424
unique_searcher.stop_searching_any(ancestors)
267
425
common_searcher.start_searching(ancestors)
269
unique_nodes = unique_searcher.seen.difference(common_searcher.seen)
427
return unique_searcher, common_searcher
429
def _make_unique_searchers(self, unique_nodes, unique_searcher,
431
"""Create a searcher for all the unique search tips (step 4).
433
As a side effect, the common_searcher will stop searching any nodes
434
that are ancestors of the unique searcher tips.
436
:return: (all_unique_searcher, unique_tip_searchers)
272
438
unique_tips = self._remove_simple_descendants(unique_nodes,
273
439
self.get_parent_map(unique_nodes))
275
441
if len(unique_tips) == 1:
276
unique_searchers = []
442
unique_tip_searchers = []
277
443
ancestor_all_unique = unique_searcher.find_seen_ancestors(unique_tips)
279
unique_searchers = []
445
unique_tip_searchers = []
280
446
for tip in unique_tips:
281
447
revs_to_search = unique_searcher.find_seen_ancestors([tip])
448
revs_to_search.update(
449
common_searcher.find_seen_ancestors(revs_to_search))
282
450
searcher = self._make_breadth_first_searcher(revs_to_search)
283
451
# We don't care about the starting nodes.
284
452
searcher._label = tip
286
unique_searchers.append(searcher)
454
unique_tip_searchers.append(searcher)
288
456
ancestor_all_unique = None
289
for searcher in unique_searchers:
457
for searcher in unique_tip_searchers:
290
458
if ancestor_all_unique is None:
291
459
ancestor_all_unique = set(searcher.seen)
293
461
ancestor_all_unique = ancestor_all_unique.intersection(
295
463
# Collapse all the common nodes into a single searcher
296
all_unique_searcher = self._make_breadth_first_searcher(ancestor_all_unique)
464
all_unique_searcher = self._make_breadth_first_searcher(
297
466
if ancestor_all_unique:
467
# We've seen these nodes in all the searchers, so we'll just go to
298
469
all_unique_searcher.step()
300
471
# Stop any search tips that are already known as ancestors of the
302
common_searcher.stop_searching_any(
473
stopped_common = common_searcher.stop_searching_any(
303
474
common_searcher.find_seen_ancestors(ancestor_all_unique))
305
476
total_stopped = 0
306
for searcher in unique_searchers:
477
for searcher in unique_tip_searchers:
307
478
total_stopped += len(searcher.stop_searching_any(
308
479
searcher.find_seen_ancestors(ancestor_all_unique)))
309
_mutter('For %s unique nodes, created %s + 1 unique searchers'
310
' (%s stopped search tips, %s common ancestors)',
311
len(unique_nodes), len(unique_searchers), total_stopped,
312
len(ancestor_all_unique))
313
del ancestor_all_unique
480
if 'graph' in debug.debug_flags:
481
trace.mutter('For %d unique nodes, created %d + 1 unique searchers'
482
' (%d stopped search tips, %d common ancestors'
483
' (%d stopped common)',
484
len(unique_nodes), len(unique_tip_searchers),
485
total_stopped, len(ancestor_all_unique),
487
return all_unique_searcher, unique_tip_searchers
489
def _step_unique_and_common_searchers(self, common_searcher,
490
unique_tip_searchers,
492
"""Step all the searchers"""
493
newly_seen_common = set(common_searcher.step())
494
newly_seen_unique = set()
495
for searcher in unique_tip_searchers:
496
next = set(searcher.step())
497
next.update(unique_searcher.find_seen_ancestors(next))
498
next.update(common_searcher.find_seen_ancestors(next))
499
for alt_searcher in unique_tip_searchers:
500
if alt_searcher is searcher:
502
next.update(alt_searcher.find_seen_ancestors(next))
503
searcher.start_searching(next)
504
newly_seen_unique.update(next)
505
return newly_seen_common, newly_seen_unique
507
def _find_nodes_common_to_all_unique(self, unique_tip_searchers,
509
newly_seen_unique, step_all_unique):
510
"""Find nodes that are common to all unique_tip_searchers.
512
If it is time, step the all_unique_searcher, and add its nodes to the
515
common_to_all_unique_nodes = newly_seen_unique.copy()
516
for searcher in unique_tip_searchers:
517
common_to_all_unique_nodes.intersection_update(searcher.seen)
518
common_to_all_unique_nodes.intersection_update(
519
all_unique_searcher.seen)
520
# Step all-unique less frequently than the other searchers.
521
# In the common case, we don't need to spider out far here, so
522
# avoid doing extra work.
524
tstart = time.clock()
525
nodes = all_unique_searcher.step()
526
common_to_all_unique_nodes.update(nodes)
527
if 'graph' in debug.debug_flags:
528
tdelta = time.clock() - tstart
529
trace.mutter('all_unique_searcher step() took %.3fs'
530
'for %d nodes (%d total), iteration: %s',
531
tdelta, len(nodes), len(all_unique_searcher.seen),
532
all_unique_searcher._iterations)
533
return common_to_all_unique_nodes
535
def _collapse_unique_searchers(self, unique_tip_searchers,
536
common_to_all_unique_nodes):
537
"""Combine searchers that are searching the same tips.
539
When two searchers are searching the same tips, we can stop one of the
540
searchers. We also know that the maximal set of common ancestors is the
541
intersection of the two original searchers.
543
:return: A list of searchers that are searching unique nodes.
545
# Filter out searchers that don't actually search different
546
# nodes. We already have the ancestry intersection for them
547
unique_search_tips = {}
548
for searcher in unique_tip_searchers:
549
stopped = searcher.stop_searching_any(common_to_all_unique_nodes)
550
will_search_set = frozenset(searcher._next_query)
551
if not will_search_set:
552
if 'graph' in debug.debug_flags:
553
trace.mutter('Unique searcher %s was stopped.'
554
' (%s iterations) %d nodes stopped',
556
searcher._iterations,
558
elif will_search_set not in unique_search_tips:
559
# This searcher is searching a unique set of nodes, let it
560
unique_search_tips[will_search_set] = [searcher]
562
unique_search_tips[will_search_set].append(searcher)
563
# TODO: it might be possible to collapse searchers faster when they
564
# only have *some* search tips in common.
565
next_unique_searchers = []
566
for searchers in unique_search_tips.itervalues():
567
if len(searchers) == 1:
568
# Searching unique tips, go for it
569
next_unique_searchers.append(searchers[0])
571
# These searchers have started searching the same tips, we
572
# don't need them to cover the same ground. The
573
# intersection of their ancestry won't change, so create a
574
# new searcher, combining their histories.
575
next_searcher = searchers[0]
576
for searcher in searchers[1:]:
577
next_searcher.seen.intersection_update(searcher.seen)
578
if 'graph' in debug.debug_flags:
579
trace.mutter('Combining %d searchers into a single'
580
' searcher searching %d nodes with'
583
len(next_searcher._next_query),
584
len(next_searcher.seen))
585
next_unique_searchers.append(next_searcher)
586
return next_unique_searchers
588
def _refine_unique_nodes(self, unique_searcher, all_unique_searcher,
589
unique_tip_searchers, common_searcher):
590
"""Steps 5-8 of find_unique_ancestors.
592
This function returns when common_searcher has stopped searching for
595
# We step the ancestor_all_unique searcher only every
596
# STEP_UNIQUE_SEARCHER_EVERY steps.
597
step_all_unique_counter = 0
315
598
# While we still have common nodes to search
316
599
while common_searcher._next_query:
317
newly_seen_common = set(common_searcher.step())
318
newly_seen_unique = set()
319
for searcher in unique_searchers:
320
newly_seen_unique.update(searcher.step())
601
newly_seen_unique) = self._step_unique_and_common_searchers(
602
common_searcher, unique_tip_searchers, unique_searcher)
321
603
# These nodes are common ancestors of all unique nodes
322
unique_are_common_nodes = newly_seen_unique.copy()
323
for searcher in unique_searchers:
324
unique_are_common_nodes = unique_are_common_nodes.intersection(
326
unique_are_common_nodes = unique_are_common_nodes.intersection(
327
all_unique_searcher.seen)
328
unique_are_common_nodes.update(all_unique_searcher.step())
604
common_to_all_unique_nodes = self._find_nodes_common_to_all_unique(
605
unique_tip_searchers, all_unique_searcher, newly_seen_unique,
606
step_all_unique_counter==0)
607
step_all_unique_counter = ((step_all_unique_counter + 1)
608
% STEP_UNIQUE_SEARCHER_EVERY)
329
610
if newly_seen_common:
330
611
# If a 'common' node is an ancestor of all unique searchers, we
331
612
# can stop searching it.
332
613
common_searcher.stop_searching_any(
333
614
all_unique_searcher.seen.intersection(newly_seen_common))
334
if unique_are_common_nodes:
335
# We have new common-to-all-unique-searchers nodes
336
for searcher in unique_searchers:
337
unique_are_common_nodes.update(
338
searcher.find_seen_ancestors(unique_are_common_nodes))
339
unique_are_common_nodes.update(
340
all_unique_searcher.find_seen_ancestors(unique_are_common_nodes))
341
# Since these are common, we can grab another set of ancestors
343
unique_are_common_nodes.update(
344
common_searcher.find_seen_ancestors(unique_are_common_nodes))
615
if common_to_all_unique_nodes:
616
common_to_all_unique_nodes.update(
617
common_searcher.find_seen_ancestors(
618
common_to_all_unique_nodes))
346
619
# The all_unique searcher can start searching the common nodes
347
620
# but everyone else can stop.
348
all_unique_searcher.start_searching(unique_are_common_nodes)
349
common_searcher.stop_searching_any(unique_are_common_nodes)
351
# Filter out searchers that don't actually search different
352
# nodes. We already have the ancestry intersection for them
353
next_unique_searchers = []
354
unique_search_sets = set()
355
for searcher in unique_searchers:
356
stopped = searcher.stop_searching_any(unique_are_common_nodes)
357
will_search_set = frozenset(searcher._next_query)
358
if not will_search_set:
359
_mutter('Unique searcher %s was stopped.'
360
' (%s iterations) %d nodes stopped',
362
searcher._iterations,
364
elif will_search_set not in unique_search_sets:
365
# This searcher is searching a unique set of nodes, let it
366
unique_search_sets.add(will_search_set)
367
next_unique_searchers.append(searcher)
369
_mutter('Unique searcher %s stopped for repeated'
370
' search of %s nodes',
371
searcher._label, len(will_search_set))
372
if len(unique_searchers) != len(next_unique_searchers):
373
_mutter('Collapsed %s unique searchers => %s'
375
len(unique_searchers), len(next_unique_searchers),
376
all_unique_searcher._iterations)
377
unique_searchers = next_unique_searchers
378
return unique_nodes.difference(common_searcher.seen)
380
@symbol_versioning.deprecated_method(symbol_versioning.one_one)
381
def get_parents(self, revisions):
382
"""Find revision ids of the parents of a list of revisions
384
A list is returned of the same length as the input. Each entry
385
is a list of parent ids for the corresponding input revision.
387
[NULL_REVISION] is used as the parent of the first user-committed
388
revision. Its parent list is empty.
390
If the revision is not present (i.e. a ghost), None is used in place
391
of the list of parents.
393
Deprecated in bzr 1.2 - please see get_parent_map.
395
parents = self.get_parent_map(revisions)
396
return [parents.get(r, None) for r in revisions]
621
# This is the sort of thing where we would like to not have it
622
# start_searching all of the nodes, but only mark all of them
623
# as seen, and have it search only the actual tips. Otherwise
624
# it is another get_parent_map() traversal for it to figure out
625
# what we already should know.
626
all_unique_searcher.start_searching(common_to_all_unique_nodes)
627
common_searcher.stop_searching_any(common_to_all_unique_nodes)
629
next_unique_searchers = self._collapse_unique_searchers(
630
unique_tip_searchers, common_to_all_unique_nodes)
631
if len(unique_tip_searchers) != len(next_unique_searchers):
632
if 'graph' in debug.debug_flags:
633
trace.mutter('Collapsed %d unique searchers => %d'
635
len(unique_tip_searchers),
636
len(next_unique_searchers),
637
all_unique_searcher._iterations)
638
unique_tip_searchers = next_unique_searchers
398
640
def get_parent_map(self, revisions):
399
641
"""Get a map of key:parent_list for revisions.
1193
1515
return self._keys
1518
"""Return false if the search lists 1 or more revisions."""
1519
return self._recipe[3] == 0
1521
def refine(self, seen, referenced):
1522
"""Create a new search by refining this search.
1524
:param seen: Revisions that have been satisfied.
1525
:param referenced: Revision references observed while satisfying some
1528
start = self._recipe[1]
1529
exclude = self._recipe[2]
1530
count = self._recipe[3]
1531
keys = self.get_keys()
1532
# New heads = referenced + old heads - seen things - exclude
1533
pending_refs = set(referenced)
1534
pending_refs.update(start)
1535
pending_refs.difference_update(seen)
1536
pending_refs.difference_update(exclude)
1537
# New exclude = old exclude + satisfied heads
1538
seen_heads = start.intersection(seen)
1539
exclude.update(seen_heads)
1540
# keys gets seen removed
1542
# length is reduced by len(seen)
1544
return SearchResult(pending_refs, exclude, count, keys)
1547
class PendingAncestryResult(object):
1548
"""A search result that will reconstruct the ancestry for some graph heads.
1550
Unlike SearchResult, this doesn't hold the complete search result in
1551
memory, it just holds a description of how to generate it.
1554
def __init__(self, heads, repo):
1557
:param heads: an iterable of graph heads.
1558
:param repo: a repository to use to generate the ancestry for the given
1561
self.heads = frozenset(heads)
1564
def get_recipe(self):
1565
"""Return a recipe that can be used to replay this search.
1567
The recipe allows reconstruction of the same results at a later date.
1569
:seealso SearchResult.get_recipe:
1571
:return: A tuple ('proxy-search', start_keys_set, set(), -1)
1572
To recreate this result, create a PendingAncestryResult with the
1575
return ('proxy-search', self.heads, set(), -1)
1578
"""See SearchResult.get_keys.
1580
Returns all the keys for the ancestry of the heads, excluding
1583
return self._get_keys(self.repo.get_graph())
1585
def _get_keys(self, graph):
1586
NULL_REVISION = revision.NULL_REVISION
1587
keys = [key for (key, parents) in graph.iter_ancestry(self.heads)
1588
if key != NULL_REVISION and parents is not None]
1592
"""Return false if the search lists 1 or more revisions."""
1593
if revision.NULL_REVISION in self.heads:
1594
return len(self.heads) == 1
1596
return len(self.heads) == 0
1598
def refine(self, seen, referenced):
1599
"""Create a new search by refining this search.
1601
:param seen: Revisions that have been satisfied.
1602
:param referenced: Revision references observed while satisfying some
1605
referenced = self.heads.union(referenced)
1606
return PendingAncestryResult(referenced - seen, self.repo)
1609
def collapse_linear_regions(parent_map):
1610
"""Collapse regions of the graph that are 'linear'.
1616
can be collapsed by removing B and getting::
1620
:param parent_map: A dictionary mapping children to their parents
1621
:return: Another dictionary with 'linear' chains collapsed
1623
# Note: this isn't a strictly minimal collapse. For example:
1631
# Will not have 'D' removed, even though 'E' could fit. Also:
1637
# A and C are both kept because they are edges of the graph. We *could* get
1638
# rid of A if we wanted.
1646
# Will not have any nodes removed, even though you do have an
1647
# 'uninteresting' linear D->B and E->C
1649
for child, parents in parent_map.iteritems():
1650
children.setdefault(child, [])
1652
children.setdefault(p, []).append(child)
1654
orig_children = dict(children)
1656
result = dict(parent_map)
1657
for node in parent_map:
1658
parents = result[node]
1659
if len(parents) == 1:
1660
parent_children = children[parents[0]]
1661
if len(parent_children) != 1:
1662
# This is not the only child
1664
node_children = children[node]
1665
if len(node_children) != 1:
1667
child_parents = result.get(node_children[0], None)
1668
if len(child_parents) != 1:
1669
# This is not its only parent
1671
# The child of this node only points at it, and the parent only has
1672
# this as a child. remove this node, and join the others together
1673
result[node_children[0]] = parents
1674
children[parents[0]] = node_children
1682
class GraphThunkIdsToKeys(object):
1683
"""Forwards calls about 'ids' to be about keys internally."""
1685
def __init__(self, graph):
1688
def heads(self, ids):
1689
"""See Graph.heads()"""
1690
as_keys = [(i,) for i in ids]
1691
head_keys = self._graph.heads(as_keys)
1692
return set([h[0] for h in head_keys])
1695
_counters = [0,0,0,0,0,0,0]
1697
from bzrlib._known_graph_pyx import KnownGraph
1698
except ImportError, e:
1699
osutils.failed_to_load_extension(e)
1700
from bzrlib._known_graph_py import KnownGraph