59
60
def __repr__(self):
60
61
return 'DictParentsProvider(%r)' % self.ancestry
63
# Note: DictParentsProvider does not implement get_cached_parent_map
64
# Arguably, the data is clearly cached in memory. However, this class
65
# is mostly used for testing, and it keeps the tests clean to not
62
68
def get_parent_map(self, keys):
63
"""See _StackedParentsProvider.get_parent_map"""
69
"""See StackedParentsProvider.get_parent_map"""
64
70
ancestry = self.ancestry
65
return dict((k, ancestry[k]) for k in keys if k in ancestry)
68
class _StackedParentsProvider(object):
71
return dict([(k, ancestry[k]) for k in keys if k in ancestry])
74
class StackedParentsProvider(object):
75
"""A parents provider which stacks (or unions) multiple providers.
77
The providers are queries in the order of the provided parent_providers.
70
80
def __init__(self, parent_providers):
71
81
self._parent_providers = parent_providers
73
83
def __repr__(self):
74
return "_StackedParentsProvider(%r)" % self._parent_providers
84
return "%s(%r)" % (self.__class__.__name__, self._parent_providers)
76
86
def get_parent_map(self, keys):
77
87
"""Get a mapping of keys => parents
251
306
right = searchers[1].seen
252
307
return (left.difference(right), right.difference(left))
309
def find_descendants(self, old_key, new_key):
310
"""Find descendants of old_key that are ancestors of new_key."""
311
child_map = self.get_child_map(self._find_descendant_ancestors(
313
graph = Graph(DictParentsProvider(child_map))
314
searcher = graph._make_breadth_first_searcher([old_key])
318
def _find_descendant_ancestors(self, old_key, new_key):
319
"""Find ancestors of new_key that may be descendants of old_key."""
320
stop = self._make_breadth_first_searcher([old_key])
321
descendants = self._make_breadth_first_searcher([new_key])
322
for revisions in descendants:
323
old_stop = stop.seen.intersection(revisions)
324
descendants.stop_searching_any(old_stop)
325
seen_stop = descendants.find_seen_ancestors(stop.step())
326
descendants.stop_searching_any(seen_stop)
327
return descendants.seen.difference(stop.seen)
329
def get_child_map(self, keys):
330
"""Get a mapping from parents to children of the specified keys.
332
This is simply the inversion of get_parent_map. Only supplied keys
333
will be discovered as children.
334
:return: a dict of key:child_list for keys.
336
parent_map = self._parents_provider.get_parent_map(keys)
338
for child, parents in sorted(parent_map.items()):
339
for parent in parents:
340
parent_child.setdefault(parent, []).append(child)
254
343
def find_distance_to_null(self, target_revision_id, known_revision_ids):
255
344
"""Find the left-hand distance to the NULL_REVISION.
1434
1594
return revs, ghosts
1437
class SearchResult(object):
1438
"""The result of a breadth first search.
1440
A SearchResult provides the ability to reconstruct the search or access a
1441
set of the keys the search found.
1444
def __init__(self, start_keys, exclude_keys, key_count, keys):
1445
"""Create a SearchResult.
1447
:param start_keys: The keys the search started at.
1448
:param exclude_keys: The keys the search excludes.
1449
:param key_count: The total number of keys (from start to but not
1451
:param keys: The keys the search found. Note that in future we may get
1452
a SearchResult from a smart server, in which case the keys list is
1453
not necessarily immediately available.
1455
self._recipe = ('search', start_keys, exclude_keys, key_count)
1456
self._keys = frozenset(keys)
1458
def get_recipe(self):
1459
"""Return a recipe that can be used to replay this search.
1461
The recipe allows reconstruction of the same results at a later date
1462
without knowing all the found keys. The essential elements are a list
1463
of keys to start and to stop at. In order to give reproducible
1464
results when ghosts are encountered by a search they are automatically
1465
added to the exclude list (or else ghost filling may alter the
1468
:return: A tuple ('search', start_keys_set, exclude_keys_set,
1469
revision_count). To recreate the results of this search, create a
1470
breadth first searcher on the same graph starting at start_keys.
1471
Then call next() (or next_with_ghosts()) repeatedly, and on every
1472
result, call stop_searching_any on any keys from the exclude_keys
1473
set. The revision_count value acts as a trivial cross-check - the
1474
found revisions of the new search should have as many elements as
1475
revision_count. If it does not, then additional revisions have been
1476
ghosted since the search was executed the first time and the second
1482
"""Return the keys found in this search.
1484
:return: A set of keys.
1489
"""Return false if the search lists 1 or more revisions."""
1490
return self._recipe[3] == 0
1492
def refine(self, seen, referenced):
1493
"""Create a new search by refining this search.
1495
:param seen: Revisions that have been satisfied.
1496
:param referenced: Revision references observed while satisfying some
1499
start = self._recipe[1]
1500
exclude = self._recipe[2]
1501
count = self._recipe[3]
1502
keys = self.get_keys()
1503
# New heads = referenced + old heads - seen things - exclude
1504
pending_refs = set(referenced)
1505
pending_refs.update(start)
1506
pending_refs.difference_update(seen)
1507
pending_refs.difference_update(exclude)
1508
# New exclude = old exclude + satisfied heads
1509
seen_heads = start.intersection(seen)
1510
exclude.update(seen_heads)
1511
# keys gets seen removed
1513
# length is reduced by len(seen)
1515
return SearchResult(pending_refs, exclude, count, keys)
1518
class PendingAncestryResult(object):
1519
"""A search result that will reconstruct the ancestry for some graph heads.
1521
Unlike SearchResult, this doesn't hold the complete search result in
1522
memory, it just holds a description of how to generate it.
1525
def __init__(self, heads, repo):
1528
:param heads: an iterable of graph heads.
1529
:param repo: a repository to use to generate the ancestry for the given
1532
self.heads = frozenset(heads)
1535
def get_recipe(self):
1536
"""Return a recipe that can be used to replay this search.
1538
The recipe allows reconstruction of the same results at a later date.
1540
:seealso SearchResult.get_recipe:
1542
:return: A tuple ('proxy-search', start_keys_set, set(), -1)
1543
To recreate this result, create a PendingAncestryResult with the
1546
return ('proxy-search', self.heads, set(), -1)
1549
"""See SearchResult.get_keys.
1551
Returns all the keys for the ancestry of the heads, excluding
1554
return self._get_keys(self.repo.get_graph())
1556
def _get_keys(self, graph):
1557
NULL_REVISION = revision.NULL_REVISION
1558
keys = [key for (key, parents) in graph.iter_ancestry(self.heads)
1559
if key != NULL_REVISION]
1563
"""Return false if the search lists 1 or more revisions."""
1564
if revision.NULL_REVISION in self.heads:
1565
return len(self.heads) == 1
1567
return len(self.heads) == 0
1569
def refine(self, seen, referenced):
1570
"""Create a new search by refining this search.
1572
:param seen: Revisions that have been satisfied.
1573
:param referenced: Revision references observed while satisfying some
1576
referenced = self.heads.union(referenced)
1577
return PendingAncestryResult(referenced - seen, self.repo)
1597
def invert_parent_map(parent_map):
1598
"""Given a map from child => parents, create a map of parent=>children"""
1600
for child, parents in parent_map.iteritems():
1602
# Any given parent is likely to have only a small handful
1603
# of children, many will have only one. So we avoid mem overhead of
1604
# a list, in exchange for extra copying of tuples
1605
if p not in child_map:
1606
child_map[p] = (child,)
1608
child_map[p] = child_map[p] + (child,)
1580
1612
def collapse_linear_regions(parent_map):
1648
1680
removed.add(node)
1685
class GraphThunkIdsToKeys(object):
1686
"""Forwards calls about 'ids' to be about keys internally."""
1688
def __init__(self, graph):
1691
def topo_sort(self):
1692
return [r for (r,) in self._graph.topo_sort()]
1694
def heads(self, ids):
1695
"""See Graph.heads()"""
1696
as_keys = [(i,) for i in ids]
1697
head_keys = self._graph.heads(as_keys)
1698
return set([h[0] for h in head_keys])
1700
def merge_sort(self, tip_revision):
1701
nodes = self._graph.merge_sort((tip_revision,))
1703
node.key = node.key[0]
1706
def add_node(self, revision, parents):
1707
self._graph.add_node((revision,), [(p,) for p in parents])
1710
_counters = [0,0,0,0,0,0,0]
1712
from bzrlib._known_graph_pyx import KnownGraph
1713
except ImportError, e:
1714
osutils.failed_to_load_extension(e)
1715
from bzrlib._known_graph_py import KnownGraph