1
# Copyright (C) 2007 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21
from bzrlib.deprecated_graph import (node_distances, select_farthest)
22
from bzrlib.revision import NULL_REVISION
24
# DIAGRAM of terminology
34
# In this diagram, relative to G and H:
35
# A, B, C, D, E are common ancestors.
36
# C, D and E are border ancestors, because each has a non-common descendant.
37
# D and E are least common ancestors because none of their descendants are
39
# C is not a least common ancestor because its descendant, E, is a common
42
# The find_unique_lca algorithm will pick A in two steps:
43
# 1. find_lca('G', 'H') => ['D', 'E']
44
# 2. Since len(['D', 'E']) > 1, find_lca('D', 'E') => ['A']
47
class DictParentsProvider(object):
49
def __init__(self, ancestry):
50
self.ancestry = ancestry
53
return 'DictParentsProvider(%r)' % self.ancestry
55
def get_parents(self, revisions):
56
return [self.ancestry.get(r, None) for r in revisions]
59
class _StackedParentsProvider(object):
61
def __init__(self, parent_providers):
62
self._parent_providers = parent_providers
65
return "_StackedParentsProvider(%r)" % self._parent_providers
67
def get_parents(self, revision_ids):
68
"""Find revision ids of the parents of a list of revisions
70
A list is returned of the same length as the input. Each entry
71
is a list of parent ids for the corresponding input revision.
73
[NULL_REVISION] is used as the parent of the first user-committed
74
revision. Its parent list is empty.
76
If the revision is not present (i.e. a ghost), None is used in place
77
of the list of parents.
80
for parents_provider in self._parent_providers:
81
pending_revisions = [r for r in revision_ids if r not in found]
82
parent_list = parents_provider.get_parents(pending_revisions)
83
new_found = dict((k, v) for k, v in zip(pending_revisions,
84
parent_list) if v is not None)
85
found.update(new_found)
86
if len(found) == len(revision_ids):
88
return [found.get(r, None) for r in revision_ids]
92
"""Provide incremental access to revision graphs.
94
This is the generic implementation; it is intended to be subclassed to
95
specialize it for other repository types.
98
def __init__(self, parents_provider):
99
"""Construct a Graph that uses several graphs as its input
101
This should not normally be invoked directly, because there may be
102
specialized implementations for particular repository types. See
103
Repository.get_graph()
105
:param parents_provider: An object providing a get_parents call
106
conforming to the behavior of StackedParentsProvider.get_parents
108
self.get_parents = parents_provider.get_parents
109
self._parents_provider = parents_provider
112
return 'Graph(%r)' % self._parents_provider
114
def find_lca(self, *revisions):
115
"""Determine the lowest common ancestors of the provided revisions
117
A lowest common ancestor is a common ancestor none of whose
118
descendants are common ancestors. In graphs, unlike trees, there may
119
be multiple lowest common ancestors.
121
This algorithm has two phases. Phase 1 identifies border ancestors,
122
and phase 2 filters border ancestors to determine lowest common
125
In phase 1, border ancestors are identified, using a breadth-first
126
search starting at the bottom of the graph. Searches are stopped
127
whenever a node or one of its descendants is determined to be common
129
In phase 2, the border ancestors are filtered to find the least
130
common ancestors. This is done by searching the ancestries of each
133
Phase 2 is perfomed on the principle that a border ancestor that is
134
not an ancestor of any other border ancestor is a least common
137
Searches are stopped when they find a node that is determined to be a
138
common ancestor of all border ancestors, because this shows that it
139
cannot be a descendant of any border ancestor.
141
The scaling of this operation should be proportional to
142
1. The number of uncommon ancestors
143
2. The number of border ancestors
144
3. The length of the shortest path between a border ancestor and an
145
ancestor of all border ancestors.
147
border_common, common, sides = self._find_border_ancestors(revisions)
148
# We may have common ancestors that can be reached from each other.
149
# - ask for the heads of them to filter it down to only ones that
150
# cannot be reached from each other - phase 2.
151
return self.heads(border_common)
153
def find_difference(self, left_revision, right_revision):
154
"""Determine the graph difference between two revisions"""
155
border, common, (left, right) = self._find_border_ancestors(
156
[left_revision, right_revision])
157
return (left.difference(right).difference(common),
158
right.difference(left).difference(common))
160
def _make_breadth_first_searcher(self, revisions):
161
return _BreadthFirstSearcher(revisions, self)
163
def _find_border_ancestors(self, revisions):
164
"""Find common ancestors with at least one uncommon descendant.
166
Border ancestors are identified using a breadth-first
167
search starting at the bottom of the graph. Searches are stopped
168
whenever a node or one of its descendants is determined to be common.
170
This will scale with the number of uncommon ancestors.
172
As well as the border ancestors, a set of seen common ancestors and a
173
list of sets of seen ancestors for each input revision is returned.
174
This allows calculation of graph difference from the results of this
177
if None in revisions:
178
raise errors.InvalidRevisionId(None, self)
179
common_searcher = self._make_breadth_first_searcher([])
180
common_ancestors = set()
181
searchers = [self._make_breadth_first_searcher([r])
183
active_searchers = searchers[:]
184
border_ancestors = set()
185
def update_common(searcher, revisions):
186
w_seen_ancestors = searcher.find_seen_ancestors(
188
stopped = searcher.stop_searching_any(w_seen_ancestors)
189
common_ancestors.update(w_seen_ancestors)
190
common_searcher.start_searching(stopped)
193
if len(active_searchers) == 0:
194
return border_ancestors, common_ancestors, [s.seen for s in
197
new_common = common_searcher.next()
198
common_ancestors.update(new_common)
199
except StopIteration:
202
for searcher in active_searchers:
203
for revision in new_common.intersection(searcher.seen):
204
update_common(searcher, revision)
207
new_active_searchers = []
208
for searcher in active_searchers:
210
newly_seen.update(searcher.next())
211
except StopIteration:
214
new_active_searchers.append(searcher)
215
active_searchers = new_active_searchers
216
for revision in newly_seen:
217
if revision in common_ancestors:
218
for searcher in searchers:
219
update_common(searcher, revision)
221
for searcher in searchers:
222
if revision not in searcher.seen:
225
border_ancestors.add(revision)
226
for searcher in searchers:
227
update_common(searcher, revision)
229
def heads(self, keys):
230
"""Return the heads from amongst keys.
232
This is done by searching the ancestries of each key. Any key that is
233
reachable from another key is not returned; all the others are.
235
This operation scales with the relative depth between any two keys. If
236
any two keys are completely disconnected all ancestry of both sides
239
:param keys: An iterable of keys.
240
:return: A set of the heads. Note that as a set there is no ordering
241
information. Callers will need to filter their input to create
242
order if they need it.
244
candidate_heads = set(keys)
245
if len(candidate_heads) < 2:
246
return candidate_heads
247
searchers = dict((c, self._make_breadth_first_searcher([c]))
248
for c in candidate_heads)
249
active_searchers = dict(searchers)
250
# skip over the actual candidate for each searcher
251
for searcher in active_searchers.itervalues():
253
# The common walker finds nodes that are common to two or more of the
254
# input keys, so that we don't access all history when a currently
255
# uncommon search point actually meets up with something behind a
256
# common search point. Common search points do not keep searches
257
# active; they just allow us to make searches inactive without
258
# accessing all history.
259
common_walker = self._make_breadth_first_searcher([])
260
while len(active_searchers) > 0:
265
except StopIteration:
266
# No common points being searched at this time.
268
for candidate in active_searchers.keys():
270
searcher = active_searchers[candidate]
272
# rare case: we deleted candidate in a previous iteration
273
# through this for loop, because it was determined to be
274
# a descendant of another candidate.
277
ancestors.update(searcher.next())
278
except StopIteration:
279
del active_searchers[candidate]
281
# process found nodes
283
for ancestor in ancestors:
284
if ancestor in candidate_heads:
285
candidate_heads.remove(ancestor)
286
del searchers[ancestor]
287
if ancestor in active_searchers:
288
del active_searchers[ancestor]
289
# it may meet up with a known common node
290
if ancestor in common_walker.seen:
291
# some searcher has encountered our known common nodes:
293
ancestor_set = set([ancestor])
294
for searcher in searchers.itervalues():
295
searcher.stop_searching_any(ancestor_set)
297
# or it may have been just reached by all the searchers:
298
for searcher in searchers.itervalues():
299
if ancestor not in searcher.seen:
302
# The final active searcher has just reached this node,
303
# making it be known as a descendant of all candidates,
304
# so we can stop searching it, and any seen ancestors
305
new_common.add(ancestor)
306
for searcher in searchers.itervalues():
308
searcher.find_seen_ancestors(ancestor)
309
searcher.stop_searching_any(seen_ancestors)
310
common_walker.start_searching(new_common)
311
return candidate_heads
313
def find_unique_lca(self, left_revision, right_revision):
314
"""Find a unique LCA.
316
Find lowest common ancestors. If there is no unique common
317
ancestor, find the lowest common ancestors of those ancestors.
319
Iteration stops when a unique lowest common ancestor is found.
320
The graph origin is necessarily a unique lowest common ancestor.
322
Note that None is not an acceptable substitute for NULL_REVISION.
323
in the input for this method.
325
revisions = [left_revision, right_revision]
327
lca = self.find_lca(*revisions)
331
raise errors.NoCommonAncestor(left_revision, right_revision)
334
def iter_topo_order(self, revisions):
335
"""Iterate through the input revisions in topological order.
337
This sorting only ensures that parents come before their children.
338
An ancestor may sort after a descendant if the relationship is not
339
visible in the supplied list of revisions.
341
sorter = tsort.TopoSorter(zip(revisions, self.get_parents(revisions)))
342
return sorter.iter_topo_order()
344
def is_ancestor(self, candidate_ancestor, candidate_descendant):
345
"""Determine whether a revision is an ancestor of another.
347
We answer this using heads() as heads() has the logic to perform the
348
smallest number of parent looksup to determine the ancestral
349
relationship between N revisions.
351
return set([candidate_descendant]) == self.heads(
352
[candidate_ancestor, candidate_descendant])
355
class HeadsCache(object):
356
"""A cache of results for graph heads calls."""
358
def __init__(self, graph):
362
def heads(self, keys):
363
"""Return the heads of keys.
365
This matches the API of Graph.heads(), specifically the return value is
366
a set which can be mutated, and ordering of the input is not preserved
369
:see also: Graph.heads.
370
:param keys: The keys to calculate heads for.
371
:return: A set containing the heads, which may be mutated without
372
affecting future lookups.
374
keys = frozenset(keys)
376
return set(self._heads[keys])
378
heads = self.graph.heads(keys)
379
self._heads[keys] = heads
383
class HeadsCache(object):
384
"""A cache of results for graph heads calls."""
386
def __init__(self, graph):
390
def heads(self, keys):
391
"""Return the heads of keys.
393
:see also: Graph.heads.
394
:param keys: The keys to calculate heads for.
395
:return: A set containing the heads, which may be mutated without
396
affecting future lookups.
398
keys = frozenset(keys)
400
return set(self._heads[keys])
402
heads = self.graph.heads(keys)
403
self._heads[keys] = heads
407
class _BreadthFirstSearcher(object):
408
"""Parallel search breadth-first the ancestry of revisions.
410
This class implements the iterator protocol, but additionally
411
1. provides a set of seen ancestors, and
412
2. allows some ancestries to be unsearched, via stop_searching_any
415
def __init__(self, revisions, parents_provider):
416
self._start = set(revisions)
417
self._search_revisions = None
418
self.seen = set(revisions)
419
self._parents_provider = parents_provider
422
return ('_BreadthFirstSearcher(self._search_revisions=%r,'
423
' self.seen=%r)' % (self._search_revisions, self.seen))
426
"""Return the next ancestors of this revision.
428
Ancestors are returned in the order they are seen in a breadth-first
429
traversal. No ancestor will be returned more than once.
431
if self._search_revisions is None:
432
self._search_revisions = self._start
434
new_search_revisions = set()
435
for parents in self._parents_provider.get_parents(
436
self._search_revisions):
439
new_search_revisions.update(p for p in parents if
441
self._search_revisions = new_search_revisions
442
if len(self._search_revisions) == 0:
443
raise StopIteration()
444
self.seen.update(self._search_revisions)
445
return self._search_revisions
450
def find_seen_ancestors(self, revision):
451
"""Find ancestors of this revision that have already been seen."""
452
searcher = _BreadthFirstSearcher([revision], self._parents_provider)
453
seen_ancestors = set()
454
for ancestors in searcher:
455
for ancestor in ancestors:
456
if ancestor not in self.seen:
457
searcher.stop_searching_any([ancestor])
459
seen_ancestors.add(ancestor)
460
return seen_ancestors
462
def stop_searching_any(self, revisions):
464
Remove any of the specified revisions from the search list.
466
None of the specified revisions are required to be present in the
467
search list. In this case, the call is a no-op.
469
stopped = self._search_revisions.intersection(revisions)
470
self._search_revisions = self._search_revisions.difference(revisions)
473
def start_searching(self, revisions):
474
if self._search_revisions is None:
475
self._start = set(revisions)
477
self._search_revisions.update(revisions.difference(self.seen))
478
self.seen.update(revisions)