1
# Copyright (C) 2007 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21
from bzrlib.deprecated_graph import (node_distances, select_farthest)
22
from bzrlib.revision import NULL_REVISION
24
# DIAGRAM of terminology
34
# In this diagram, relative to G and H:
35
# A, B, C, D, E are common ancestors.
36
# C, D and E are border ancestors, because each has a non-common descendant.
37
# D and E are least common ancestors because none of their descendants are
39
# C is not a least common ancestor because its descendant, E, is a common
42
# The find_unique_lca algorithm will pick A in two steps:
43
# 1. find_lca('G', 'H') => ['D', 'E']
44
# 2. Since len(['D', 'E']) > 1, find_lca('D', 'E') => ['A']
48
class _StackedParentsProvider(object):
50
def __init__(self, parent_providers):
51
self._parent_providers = parent_providers
54
return "_StackedParentsProvider(%r)" % self._parent_providers
56
def get_parents(self, revision_ids):
57
"""Find revision ids of the parents of a list of revisions
59
A list is returned of the same length as the input. Each entry
60
is a list of parent ids for the corresponding input revision.
62
[NULL_REVISION] is used as the parent of the first user-committed
63
revision. Its parent list is empty.
65
If the revision is not present (i.e. a ghost), None is used in place
66
of the list of parents.
69
for parents_provider in self._parent_providers:
70
pending_revisions = [r for r in revision_ids if r not in found]
71
parent_list = parents_provider.get_parents(pending_revisions)
72
new_found = dict((k, v) for k, v in zip(pending_revisions,
73
parent_list) if v is not None)
74
found.update(new_found)
75
if len(found) == len(revision_ids):
77
return [found.get(r, None) for r in revision_ids]
81
"""Provide incremental access to revision graphs.
83
This is the generic implementation; it is intended to be subclassed to
84
specialize it for other repository types.
87
def __init__(self, parents_provider):
88
"""Construct a Graph that uses several graphs as its input
90
This should not normally be invoked directly, because there may be
91
specialized implementations for particular repository types. See
92
Repository.get_graph()
94
:param parents_func: an object providing a get_parents call
95
conforming to the behavior of StackedParentsProvider.get_parents
97
self.get_parents = parents_provider.get_parents
98
self._parents_provider = parents_provider
101
return 'Graph(%r)' % self._parents_provider
103
def find_lca(self, *revisions):
104
"""Determine the lowest common ancestors of the provided revisions
106
A lowest common ancestor is a common ancestor none of whose
107
descendants are common ancestors. In graphs, unlike trees, there may
108
be multiple lowest common ancestors.
110
This algorithm has two phases. Phase 1 identifies border ancestors,
111
and phase 2 filters border ancestors to determine lowest common
114
In phase 1, border ancestors are identified, using a breadth-first
115
search starting at the bottom of the graph. Searches are stopped
116
whenever a node or one of its descendants is determined to be common
118
In phase 2, the border ancestors are filtered to find the least
119
common ancestors. This is done by searching the ancestries of each
122
Phase 2 is perfomed on the principle that a border ancestor that is
123
not an ancestor of any other border ancestor is a least common
126
Searches are stopped when they find a node that is determined to be a
127
common ancestor of all border ancestors, because this shows that it
128
cannot be a descendant of any border ancestor.
130
The scaling of this operation should be proportional to
131
1. The number of uncommon ancestors
132
2. The number of border ancestors
133
3. The length of the shortest path between a border ancestor and an
134
ancestor of all border ancestors.
136
border_common, common, sides = self._find_border_ancestors(revisions)
137
# We may have common ancestors that can be reached from each other.
138
# - ask for the heads of them to filter it down to only ones that
139
# cannot be reached from each other - phase 2.
140
return self.heads(border_common)
142
def find_difference(self, left_revision, right_revision):
143
"""Determine the graph difference between two revisions"""
144
border, common, (left, right) = self._find_border_ancestors(
145
[left_revision, right_revision])
146
return (left.difference(right).difference(common),
147
right.difference(left).difference(common))
149
def _make_breadth_first_searcher(self, revisions):
150
return _BreadthFirstSearcher(revisions, self)
152
def _find_border_ancestors(self, revisions):
153
"""Find common ancestors with at least one uncommon descendant.
155
Border ancestors are identified using a breadth-first
156
search starting at the bottom of the graph. Searches are stopped
157
whenever a node or one of its descendants is determined to be common.
159
This will scale with the number of uncommon ancestors.
161
As well as the border ancestors, a set of seen common ancestors and a
162
list of sets of seen ancestors for each input revision is returned.
163
This allows calculation of graph difference from the results of this
166
if None in revisions:
167
raise errors.InvalidRevisionId(None, self)
168
common_searcher = self._make_breadth_first_searcher([])
169
common_ancestors = set()
170
searchers = [self._make_breadth_first_searcher([r])
172
active_searchers = searchers[:]
173
border_ancestors = set()
174
def update_common(searcher, revisions):
175
w_seen_ancestors = searcher.find_seen_ancestors(
177
stopped = searcher.stop_searching_any(w_seen_ancestors)
178
common_ancestors.update(w_seen_ancestors)
179
common_searcher.start_searching(stopped)
182
if len(active_searchers) == 0:
183
return border_ancestors, common_ancestors, [s.seen for s in
186
new_common = common_searcher.next()
187
common_ancestors.update(new_common)
188
except StopIteration:
191
for searcher in active_searchers:
192
for revision in new_common.intersection(searcher.seen):
193
update_common(searcher, revision)
196
new_active_searchers = []
197
for searcher in active_searchers:
199
newly_seen.update(searcher.next())
200
except StopIteration:
203
new_active_searchers.append(searcher)
204
active_searchers = new_active_searchers
205
for revision in newly_seen:
206
if revision in common_ancestors:
207
for searcher in searchers:
208
update_common(searcher, revision)
210
for searcher in searchers:
211
if revision not in searcher.seen:
214
border_ancestors.add(revision)
215
for searcher in searchers:
216
update_common(searcher, revision)
218
def heads(self, keys):
219
"""Return the heads from amongst keys.
221
This is done by searching the ancestries of each key. Any key that is
222
reachable from another key is not returned; all the others are.
224
This operation scales with the relative depth between any two keys. If
225
any two keys are completely disconnected all ancestry of both sides
228
:param keys: An iterable of keys.
229
:return: A set of the heads. Note that as a set there is no ordering
230
information. Callers will need to filter their input to create
231
order if they need it.
233
candidate_heads = set(keys)
234
if len(candidate_heads) < 2:
235
return candidate_heads
236
searchers = dict((c, self._make_breadth_first_searcher([c]))
237
for c in candidate_heads)
238
active_searchers = dict(searchers)
239
# skip over the actual candidate for each searcher
240
for searcher in active_searchers.itervalues():
242
# The common walker finds nodes that are common to two or more of the
243
# input keys, so that we don't access all history when a currently
244
# uncommon search point actually meets up with something behind a
245
# common search point. Common search points do not keep searches
246
# active; they just allow us to make searches inactive without
247
# accessing all history.
248
common_walker = self._make_breadth_first_searcher([])
249
while len(active_searchers) > 0:
254
except StopIteration:
256
for candidate in active_searchers.keys():
258
searcher = active_searchers[candidate]
260
# rare case: we deleted candidate in a previous iteration
261
# through this for loop, because it was determined to be
262
# a descendant of another candidate.
265
ancestors.update(searcher.next())
266
except StopIteration:
267
del active_searchers[candidate]
269
# process found nodes
271
for ancestor in ancestors:
272
if ancestor in candidate_heads:
273
candidate_heads.remove(ancestor)
274
del searchers[ancestor]
275
if ancestor in active_searchers:
276
del active_searchers[ancestor]
277
# it may meet up with a known common node
278
already_common = ancestor in common_walker.seen
279
if not already_common:
280
# or it may have been just reached by all the searchers:
281
for searcher in searchers.itervalues():
282
if ancestor not in searcher.seen:
288
# some searcher has encountered our known common nodes:
290
ancestor_set = set([ancestor])
291
for searcher in searchers.itervalues():
292
searcher.stop_searching_any(ancestor_set)
294
# The final active searcher has just reached this node,
295
# making it be known as a descendant of all candidates, so
296
# we can stop searching it, and any seen ancestors
297
new_common.add(ancestor)
298
for searcher in searchers.itervalues():
300
searcher.find_seen_ancestors(ancestor)
301
searcher.stop_searching_any(seen_ancestors)
302
common_walker.start_searching(new_common)
303
return candidate_heads
305
def find_unique_lca(self, left_revision, right_revision):
306
"""Find a unique LCA.
308
Find lowest common ancestors. If there is no unique common
309
ancestor, find the lowest common ancestors of those ancestors.
311
Iteration stops when a unique lowest common ancestor is found.
312
The graph origin is necessarily a unique lowest common ancestor.
314
Note that None is not an acceptable substitute for NULL_REVISION.
315
in the input for this method.
317
revisions = [left_revision, right_revision]
319
lca = self.find_lca(*revisions)
323
raise errors.NoCommonAncestor(left_revision, right_revision)
326
def iter_topo_order(self, revisions):
327
"""Iterate through the input revisions in topological order.
329
This sorting only ensures that parents come before their children.
330
An ancestor may sort after a descendant if the relationship is not
331
visible in the supplied list of revisions.
333
sorter = tsort.TopoSorter(zip(revisions, self.get_parents(revisions)))
334
return sorter.iter_topo_order()
336
def is_ancestor(self, candidate_ancestor, candidate_descendant):
337
"""Determine whether a revision is an ancestor of another.
339
We answer this using heads() as heads() has the logic to perform the
340
smallest number of parent looksup to determine the ancestral
341
relationship between N revisions.
343
return set([candidate_descendant]) == self.heads(
344
[candidate_ancestor, candidate_descendant])
347
class HeadsCache(object):
348
"""A cache of results for graph heads calls."""
350
def __init__(self, graph):
354
def heads(self, keys):
355
"""Return the heads of keys.
357
This matches the API of Graph.heads(), specifically the return value is
358
a set which can be mutated, and ordering of the input is not preserved
361
:see also: Graph.heads.
362
:param keys: The keys to calculate heads for.
363
:return: A set containing the heads, which may be mutated without
364
affecting future lookups.
366
keys = frozenset(keys)
368
return set(self._heads[keys])
370
heads = self.graph.heads(keys)
371
self._heads[keys] = heads
375
class HeadsCache(object):
376
"""A cache of results for graph heads calls."""
378
def __init__(self, graph):
382
def heads(self, keys):
383
"""Return the heads of keys.
385
:see also: Graph.heads.
386
:param keys: The keys to calculate heads for.
387
:return: A set containing the heads, which may be mutated without
388
affecting future lookups.
390
keys = frozenset(keys)
392
return set(self._heads[keys])
394
heads = self.graph.heads(keys)
395
self._heads[keys] = heads
399
class _BreadthFirstSearcher(object):
400
"""Parallel search the breadth-first the ancestry of revisions.
402
This class implements the iterator protocol, but additionally
403
1. provides a set of seen ancestors, and
404
2. allows some ancestries to be unsearched, via stop_searching_any
407
def __init__(self, revisions, parents_provider):
408
self._start = set(revisions)
409
self._search_revisions = None
410
self.seen = set(revisions)
411
self._parents_provider = parents_provider
414
return ('_BreadthFirstSearcher(self._search_revisions=%r,'
415
' self.seen=%r)' % (self._search_revisions, self.seen))
418
"""Return the next ancestors of this revision.
420
Ancestors are returned in the order they are seen in a breadth-first
421
traversal. No ancestor will be returned more than once.
423
if self._search_revisions is None:
424
self._search_revisions = self._start
426
new_search_revisions = set()
427
for parents in self._parents_provider.get_parents(
428
self._search_revisions):
431
new_search_revisions.update(p for p in parents if
433
self._search_revisions = new_search_revisions
434
if len(self._search_revisions) == 0:
435
raise StopIteration()
436
self.seen.update(self._search_revisions)
437
return self._search_revisions
442
def find_seen_ancestors(self, revision):
443
"""Find ancestors of this revision that have already been seen."""
444
searcher = _BreadthFirstSearcher([revision], self._parents_provider)
445
seen_ancestors = set()
446
for ancestors in searcher:
447
for ancestor in ancestors:
448
if ancestor not in self.seen:
449
searcher.stop_searching_any([ancestor])
451
seen_ancestors.add(ancestor)
452
return seen_ancestors
454
def stop_searching_any(self, revisions):
456
Remove any of the specified revisions from the search list.
458
None of the specified revisions are required to be present in the
459
search list. In this case, the call is a no-op.
461
stopped = self._search_revisions.intersection(revisions)
462
self._search_revisions = self._search_revisions.difference(revisions)
465
def start_searching(self, revisions):
466
if self._search_revisions is None:
467
self._start = set(revisions)
469
self._search_revisions.update(revisions.difference(self.seen))
470
self.seen.update(revisions)