1536
1594
return revs, ghosts
1539
class AbstractSearchResult(object):
1540
"""The result of a search, describing a set of keys.
1542
Search results are typically used as the 'fetch_spec' parameter when
1545
:seealso: AbstractSearch
1548
def get_recipe(self):
1549
"""Return a recipe that can be used to replay this search.
1551
The recipe allows reconstruction of the same results at a later date.
1553
:return: A tuple of (search_kind_str, *details). The details vary by
1554
kind of search result.
1556
raise NotImplementedError(self.get_recipe)
1558
def get_network_struct(self):
1559
"""Return a tuple that can be transmitted via the HPSS protocol."""
1560
raise NotImplementedError(self.get_network_struct)
1563
"""Return the keys found in this search.
1565
:return: A set of keys.
1567
raise NotImplementedError(self.get_keys)
1570
"""Return false if the search lists 1 or more revisions."""
1571
raise NotImplementedError(self.is_empty)
1573
def refine(self, seen, referenced):
1574
"""Create a new search by refining this search.
1576
:param seen: Revisions that have been satisfied.
1577
:param referenced: Revision references observed while satisfying some
1579
:return: A search result.
1581
raise NotImplementedError(self.refine)
1584
class AbstractSearch(object):
1585
"""A search that can be executed, producing a search result.
1587
:seealso: AbstractSearchResult
1591
"""Construct a network-ready search result from this search description.
1593
This may take some time to search repositories, etc.
1595
:return: A search result (an object that implements
1596
AbstractSearchResult's API).
1598
raise NotImplementedError(self.execute)
1601
class SearchResult(AbstractSearchResult):
1602
"""The result of a breadth first search.
1604
A SearchResult provides the ability to reconstruct the search or access a
1605
set of the keys the search found.
1608
def __init__(self, start_keys, exclude_keys, key_count, keys):
1609
"""Create a SearchResult.
1611
:param start_keys: The keys the search started at.
1612
:param exclude_keys: The keys the search excludes.
1613
:param key_count: The total number of keys (from start to but not
1615
:param keys: The keys the search found. Note that in future we may get
1616
a SearchResult from a smart server, in which case the keys list is
1617
not necessarily immediately available.
1619
self._recipe = ('search', start_keys, exclude_keys, key_count)
1620
self._keys = frozenset(keys)
1623
kind, start_keys, exclude_keys, key_count = self._recipe
1624
if len(start_keys) > 5:
1625
start_keys_repr = repr(list(start_keys)[:5])[:-1] + ', ...]'
1627
start_keys_repr = repr(start_keys)
1628
if len(exclude_keys) > 5:
1629
exclude_keys_repr = repr(list(exclude_keys)[:5])[:-1] + ', ...]'
1631
exclude_keys_repr = repr(exclude_keys)
1632
return '<%s %s:(%s, %s, %d)>' % (self.__class__.__name__,
1633
kind, start_keys_repr, exclude_keys_repr, key_count)
1635
def get_recipe(self):
1636
"""Return a recipe that can be used to replay this search.
1638
The recipe allows reconstruction of the same results at a later date
1639
without knowing all the found keys. The essential elements are a list
1640
of keys to start and to stop at. In order to give reproducible
1641
results when ghosts are encountered by a search they are automatically
1642
added to the exclude list (or else ghost filling may alter the
1645
:return: A tuple ('search', start_keys_set, exclude_keys_set,
1646
revision_count). To recreate the results of this search, create a
1647
breadth first searcher on the same graph starting at start_keys.
1648
Then call next() (or next_with_ghosts()) repeatedly, and on every
1649
result, call stop_searching_any on any keys from the exclude_keys
1650
set. The revision_count value acts as a trivial cross-check - the
1651
found revisions of the new search should have as many elements as
1652
revision_count. If it does not, then additional revisions have been
1653
ghosted since the search was executed the first time and the second
1658
def get_network_struct(self):
1659
start_keys = ' '.join(self._recipe[1])
1660
stop_keys = ' '.join(self._recipe[2])
1661
count = str(self._recipe[3])
1662
return (self._recipe[0], '\n'.join((start_keys, stop_keys, count)))
1665
"""Return the keys found in this search.
1667
:return: A set of keys.
1672
"""Return false if the search lists 1 or more revisions."""
1673
return self._recipe[3] == 0
1675
def refine(self, seen, referenced):
1676
"""Create a new search by refining this search.
1678
:param seen: Revisions that have been satisfied.
1679
:param referenced: Revision references observed while satisfying some
1682
start = self._recipe[1]
1683
exclude = self._recipe[2]
1684
count = self._recipe[3]
1685
keys = self.get_keys()
1686
# New heads = referenced + old heads - seen things - exclude
1687
pending_refs = set(referenced)
1688
pending_refs.update(start)
1689
pending_refs.difference_update(seen)
1690
pending_refs.difference_update(exclude)
1691
# New exclude = old exclude + satisfied heads
1692
seen_heads = start.intersection(seen)
1693
exclude.update(seen_heads)
1694
# keys gets seen removed
1696
# length is reduced by len(seen)
1698
return SearchResult(pending_refs, exclude, count, keys)
1701
class PendingAncestryResult(AbstractSearchResult):
1702
"""A search result that will reconstruct the ancestry for some graph heads.
1704
Unlike SearchResult, this doesn't hold the complete search result in
1705
memory, it just holds a description of how to generate it.
1708
def __init__(self, heads, repo):
1711
:param heads: an iterable of graph heads.
1712
:param repo: a repository to use to generate the ancestry for the given
1715
self.heads = frozenset(heads)
1719
if len(self.heads) > 5:
1720
heads_repr = repr(list(self.heads)[:5])[:-1]
1721
heads_repr += ', <%d more>...]' % (len(self.heads) - 5,)
1723
heads_repr = repr(self.heads)
1724
return '<%s heads:%s repo:%r>' % (
1725
self.__class__.__name__, heads_repr, self.repo)
1727
def get_recipe(self):
1728
"""Return a recipe that can be used to replay this search.
1730
The recipe allows reconstruction of the same results at a later date.
1732
:seealso SearchResult.get_recipe:
1734
:return: A tuple ('proxy-search', start_keys_set, set(), -1)
1735
To recreate this result, create a PendingAncestryResult with the
1738
return ('proxy-search', self.heads, set(), -1)
1740
def get_network_struct(self):
1741
parts = ['ancestry-of']
1742
parts.extend(self.heads)
1746
"""See SearchResult.get_keys.
1748
Returns all the keys for the ancestry of the heads, excluding
1751
return self._get_keys(self.repo.get_graph())
1753
def _get_keys(self, graph):
1754
NULL_REVISION = revision.NULL_REVISION
1755
keys = [key for (key, parents) in graph.iter_ancestry(self.heads)
1756
if key != NULL_REVISION and parents is not None]
1760
"""Return false if the search lists 1 or more revisions."""
1761
if revision.NULL_REVISION in self.heads:
1762
return len(self.heads) == 1
1764
return len(self.heads) == 0
1766
def refine(self, seen, referenced):
1767
"""Create a new search by refining this search.
1769
:param seen: Revisions that have been satisfied.
1770
:param referenced: Revision references observed while satisfying some
1773
referenced = self.heads.union(referenced)
1774
return PendingAncestryResult(referenced - seen, self.repo)
1777
class EmptySearchResult(AbstractSearchResult):
1778
"""An empty search result."""
1784
class EverythingResult(AbstractSearchResult):
1785
"""A search result that simply requests everything in the repository."""
1787
def __init__(self, repo):
1791
return '%s(%r)' % (self.__class__.__name__, self._repo)
1793
def get_recipe(self):
1794
raise NotImplementedError(self.get_recipe)
1796
def get_network_struct(self):
1797
return ('everything',)
1800
if 'evil' in debug.debug_flags:
1801
from bzrlib import remote
1802
if isinstance(self._repo, remote.RemoteRepository):
1803
# warn developers (not users) not to do this
1804
trace.mutter_callsite(
1805
2, "EverythingResult(RemoteRepository).get_keys() is slow.")
1806
return self._repo.all_revision_ids()
1809
# It's ok for this to wrongly return False: the worst that can happen
1810
# is that RemoteStreamSource will initiate a get_stream on an empty
1811
# repository. And almost all repositories are non-empty.
1814
def refine(self, seen, referenced):
1815
heads = set(self._repo.all_revision_ids())
1816
heads.difference_update(seen)
1817
heads.update(referenced)
1818
return PendingAncestryResult(heads, self._repo)
1821
class EverythingNotInOther(AbstractSearch):
1822
"""Find all revisions in that are in one repo but not the other."""
1824
def __init__(self, to_repo, from_repo, find_ghosts=False):
1825
self.to_repo = to_repo
1826
self.from_repo = from_repo
1827
self.find_ghosts = find_ghosts
1830
return self.to_repo.search_missing_revision_ids(
1831
self.from_repo, find_ghosts=self.find_ghosts)
1834
class NotInOtherForRevs(AbstractSearch):
1835
"""Find all revisions missing in one repo for a some specific heads."""
1837
def __init__(self, to_repo, from_repo, required_ids, if_present_ids=None,
1841
:param required_ids: revision IDs of heads that must be found, or else
1842
the search will fail with NoSuchRevision. All revisions in their
1843
ancestry not already in the other repository will be included in
1845
:param if_present_ids: revision IDs of heads that may be absent in the
1846
source repository. If present, then their ancestry not already
1847
found in other will be included in the search result.
1849
self.to_repo = to_repo
1850
self.from_repo = from_repo
1851
self.find_ghosts = find_ghosts
1852
self.required_ids = required_ids
1853
self.if_present_ids = if_present_ids
1856
if len(self.required_ids) > 5:
1857
reqd_revs_repr = repr(list(self.required_ids)[:5])[:-1] + ', ...]'
1859
reqd_revs_repr = repr(self.required_ids)
1860
if self.if_present_ids and len(self.if_present_ids) > 5:
1861
ifp_revs_repr = repr(list(self.if_present_ids)[:5])[:-1] + ', ...]'
1863
ifp_revs_repr = repr(self.if_present_ids)
1865
return "<%s from:%r to:%r find_ghosts:%r req'd:%r if-present:%r>" % (
1866
self.__class__.__name__, self.from_repo, self.to_repo,
1867
self.find_ghosts, reqd_revs_repr, ifp_revs_repr)
1870
return self.to_repo.search_missing_revision_ids(
1871
self.from_repo, revision_ids=self.required_ids,
1872
if_present_ids=self.if_present_ids, find_ghosts=self.find_ghosts)
1597
def invert_parent_map(parent_map):
1598
"""Given a map from child => parents, create a map of parent=>children"""
1600
for child, parents in parent_map.iteritems():
1602
# Any given parent is likely to have only a small handful
1603
# of children, many will have only one. So we avoid mem overhead of
1604
# a list, in exchange for extra copying of tuples
1605
if p not in child_map:
1606
child_map[p] = (child,)
1608
child_map[p] = child_map[p] + (child,)
1875
1612
def collapse_linear_regions(parent_map):