159
159
def __repr__(self):
160
160
return 'Graph(%r)' % self._parents_provider
162
def _find_any_seen_ancestors(self, revisions, searchers):
163
"""Find any revisions that are seen by any of the searchers."""
164
# This will actually return more nodes than just aggregating
165
# find_seen_ancestors() across the searchers, because it can bridge
166
# 1-node gaps inline.
168
# seen contains what nodes have been returned, not what nodes
169
# have been queried. We don't want to probe for nodes that haven't
172
not_searched_yet = set()
173
for searcher in searchers:
174
all_seen.update(searcher.seen)
176
not_searched = set(searcher._next_query)
177
# Have these nodes been searched in any other searcher?
178
for other_searcher in searchers:
179
if other_searcher is searcher:
181
# This other searcher claims to have seen these nodes
182
maybe_searched = not_searched.intersection(other_searcher.seen)
183
searched = maybe_searched.difference(other_searcher._next_query)
184
not_searched.difference_update(searched)
186
# Now we know the real ones that haven't been searched
187
not_searched_yet.update(not_searched)
189
pending = set(revisions).intersection(all_seen)
190
seen_ancestors = set(pending)
192
pending.difference_update(not_searched_yet)
193
get_parent_map = self._parents_provider.get_parent_map
195
parent_map = get_parent_map(pending)
197
# We don't care if it is a ghost, since it can't be seen if it is
199
for parent_ids in parent_map.itervalues():
200
all_parents.extend(parent_ids)
201
next_pending = all_seen.intersection(all_parents).difference(seen_ancestors)
202
seen_ancestors.update(next_pending)
203
next_pending.difference_update(not_searched_yet)
204
pending = next_pending
206
return seen_ancestors
208
162
def find_lca(self, *revisions):
209
163
"""Determine the lowest common ancestors of the provided revisions
299
253
unique_are_common_nodes.update(
300
254
next_common_nodes.intersection(unique_searcher.seen))
301
255
if unique_are_common_nodes:
302
ancestors = self._find_any_seen_ancestors(unique_are_common_nodes,
303
[unique_searcher, common_searcher])
256
ancestors = unique_searcher.find_seen_ancestors(
257
unique_are_common_nodes)
258
ancestors.update(common_searcher.find_seen_ancestors(ancestors))
304
259
unique_searcher.stop_searching_any(ancestors)
305
260
common_searcher.start_searching(ancestors)
350
305
ancestor_all_unique.intersection(newly_seen_common))
351
306
if unique_are_common_nodes:
352
307
# We have new common-to-all-unique-searchers nodes
353
# Which also means we can check in the common_searcher
308
for searcher in unique_searchers:
309
unique_are_common_nodes.update(
310
searcher.find_seen_ancestors(unique_are_common_nodes))
311
# Since these are common, we can grab another set of ancestors
354
313
unique_are_common_nodes.update(
355
self._find_any_seen_ancestors(unique_are_common_nodes,
356
[common_searcher] + unique_searchers))
314
common_searcher.find_seen_ancestors(unique_are_common_nodes))
358
316
# We can tell all of the unique searchers to start at these
359
317
# nodes, and tell all of the common searchers to *stop*