101
103
class CachingParentsProvider(object):
102
"""A parents provider which will cache the revision => parents in a dict.
104
This is useful for providers that have an expensive lookup.
104
"""A parents provider which will cache the revision => parents as a dict.
106
This is useful for providers which have an expensive look up.
108
Either a ParentsProvider or a get_parent_map-like callback may be
109
supplied. If it provides extra un-asked-for parents, they will be cached,
110
but filtered out of get_parent_map.
112
The cache is enabled by default, but may be disabled and re-enabled.
114
def __init__(self, parent_provider=None, get_parent_map=None):
107
def __init__(self, parent_provider):
117
:param parent_provider: The ParentProvider to use. It or
118
get_parent_map must be supplied.
119
:param get_parent_map: The get_parent_map callback to use. It or
120
parent_provider must be supplied.
108
122
self._real_provider = parent_provider
109
# Theoretically we could use an LRUCache here
123
if get_parent_map is None:
124
self._get_parent_map = self._real_provider.get_parent_map
126
self._get_parent_map = get_parent_map
128
self.enable_cache(True)
131
return "%s(%r)" % (self.__class__.__name__, self._real_provider)
133
def enable_cache(self, cache_misses=True):
135
if self._cache is not None:
136
raise AssertionError('Cache enabled when already enabled.')
113
return "%s(%r)" % (self.__class__.__name__, self._real_provider)
138
self._cache_misses = cache_misses
139
self.missing_keys = set()
141
def disable_cache(self):
142
"""Disable and clear the cache."""
144
self._cache_misses = None
145
self.missing_keys = set()
147
def get_cached_map(self):
148
"""Return any cached get_parent_map values."""
149
if self._cache is None:
151
return dict(self._cache)
115
153
def get_parent_map(self, keys):
116
"""See _StackedParentsProvider.get_parent_map"""
118
# If the _real_provider doesn't have a key, we cache a value of None,
119
# which we then later use to realize we cannot provide a value for that
154
"""See StackedParentsProvider.get_parent_map."""
122
155
cache = self._cache
157
cache = self._get_parent_map(keys)
159
needed_revisions = set(key for key in keys if key not in cache)
160
# Do not ask for negatively cached keys
161
needed_revisions.difference_update(self.missing_keys)
163
parent_map = self._get_parent_map(needed_revisions)
164
cache.update(parent_map)
165
if self._cache_misses:
166
for key in needed_revisions:
167
if key not in parent_map:
168
self.note_missing_key(key)
126
if value is not None:
127
parent_map[key] = value
132
new_parents = self._real_provider.get_parent_map(needed)
133
cache.update(new_parents)
134
parent_map.update(new_parents)
135
needed.difference_update(new_parents)
136
cache.update(dict.fromkeys(needed, None))
171
value = cache.get(key)
172
if value is not None:
176
def note_missing_key(self, key):
177
"""Note that key is a missing key."""
178
if self._cache_misses:
179
self.missing_keys.add(key)
182
class CallableToParentsProviderAdapter(object):
183
"""A parents provider that adapts any callable to the parents provider API.
185
i.e. it accepts calls to self.get_parent_map and relays them to the
186
callable it was constructed with.
189
def __init__(self, a_callable):
190
self.callable = a_callable
193
return "%s(%r)" % (self.__class__.__name__, self.callable)
195
def get_parent_map(self, keys):
196
return self.callable(keys)
140
199
class Graph(object):
212
272
right = searchers[1].seen
213
273
return (left.difference(right), right.difference(left))
275
def find_descendants(self, old_key, new_key):
276
"""Find descendants of old_key that are ancestors of new_key."""
277
child_map = self.get_child_map(self._find_descendant_ancestors(
279
graph = Graph(DictParentsProvider(child_map))
280
searcher = graph._make_breadth_first_searcher([old_key])
284
def _find_descendant_ancestors(self, old_key, new_key):
285
"""Find ancestors of new_key that may be descendants of old_key."""
286
stop = self._make_breadth_first_searcher([old_key])
287
descendants = self._make_breadth_first_searcher([new_key])
288
for revisions in descendants:
289
old_stop = stop.seen.intersection(revisions)
290
descendants.stop_searching_any(old_stop)
291
seen_stop = descendants.find_seen_ancestors(stop.step())
292
descendants.stop_searching_any(seen_stop)
293
return descendants.seen.difference(stop.seen)
295
def get_child_map(self, keys):
296
"""Get a mapping from parents to children of the specified keys.
298
This is simply the inversion of get_parent_map. Only supplied keys
299
will be discovered as children.
300
:return: a dict of key:child_list for keys.
302
parent_map = self._parents_provider.get_parent_map(keys)
304
for child, parents in sorted(parent_map.items()):
305
for parent in parents:
306
parent_child.setdefault(parent, []).append(child)
215
309
def find_distance_to_null(self, target_revision_id, known_revision_ids):
216
310
"""Find the left-hand distance to the NULL_REVISION.
1395
1550
return revs, ghosts
1398
class SearchResult(object):
1553
class AbstractSearchResult(object):
1554
"""The result of a search, describing a set of keys.
1556
Search results are typically used as the 'fetch_spec' parameter when
1559
:seealso: AbstractSearch
1562
def get_recipe(self):
1563
"""Return a recipe that can be used to replay this search.
1565
The recipe allows reconstruction of the same results at a later date.
1567
:return: A tuple of `(search_kind_str, *details)`. The details vary by
1568
kind of search result.
1570
raise NotImplementedError(self.get_recipe)
1572
def get_network_struct(self):
1573
"""Return a tuple that can be transmitted via the HPSS protocol."""
1574
raise NotImplementedError(self.get_network_struct)
1577
"""Return the keys found in this search.
1579
:return: A set of keys.
1581
raise NotImplementedError(self.get_keys)
1584
"""Return false if the search lists 1 or more revisions."""
1585
raise NotImplementedError(self.is_empty)
1587
def refine(self, seen, referenced):
1588
"""Create a new search by refining this search.
1590
:param seen: Revisions that have been satisfied.
1591
:param referenced: Revision references observed while satisfying some
1593
:return: A search result.
1595
raise NotImplementedError(self.refine)
1598
class AbstractSearch(object):
1599
"""A search that can be executed, producing a search result.
1601
:seealso: AbstractSearchResult
1605
"""Construct a network-ready search result from this search description.
1607
This may take some time to search repositories, etc.
1609
:return: A search result (an object that implements
1610
AbstractSearchResult's API).
1612
raise NotImplementedError(self.execute)
1615
class SearchResult(AbstractSearchResult):
1399
1616
"""The result of a breadth first search.
1401
1618
A SearchResult provides the ability to reconstruct the search or access a
1413
1630
a SearchResult from a smart server, in which case the keys list is
1414
1631
not necessarily immediately available.
1416
self._recipe = (start_keys, exclude_keys, key_count)
1633
self._recipe = ('search', start_keys, exclude_keys, key_count)
1417
1634
self._keys = frozenset(keys)
1637
kind, start_keys, exclude_keys, key_count = self._recipe
1638
if len(start_keys) > 5:
1639
start_keys_repr = repr(list(start_keys)[:5])[:-1] + ', ...]'
1641
start_keys_repr = repr(start_keys)
1642
if len(exclude_keys) > 5:
1643
exclude_keys_repr = repr(list(exclude_keys)[:5])[:-1] + ', ...]'
1645
exclude_keys_repr = repr(exclude_keys)
1646
return '<%s %s:(%s, %s, %d)>' % (self.__class__.__name__,
1647
kind, start_keys_repr, exclude_keys_repr, key_count)
1419
1649
def get_recipe(self):
1420
1650
"""Return a recipe that can be used to replay this search.
1422
1652
The recipe allows reconstruction of the same results at a later date
1423
1653
without knowing all the found keys. The essential elements are a list
1424
of keys to start and and to stop at. In order to give reproducible
1654
of keys to start and to stop at. In order to give reproducible
1425
1655
results when ghosts are encountered by a search they are automatically
1426
1656
added to the exclude list (or else ghost filling may alter the
1429
:return: A tuple (start_keys_set, exclude_keys_set, revision_count). To
1430
recreate the results of this search, create a breadth first
1431
searcher on the same graph starting at start_keys. Then call next()
1432
(or next_with_ghosts()) repeatedly, and on every result, call
1433
stop_searching_any on any keys from the exclude_keys set. The
1434
revision_count value acts as a trivial cross-check - the found
1435
revisions of the new search should have as many elements as
1659
:return: A tuple ('search', start_keys_set, exclude_keys_set,
1660
revision_count). To recreate the results of this search, create a
1661
breadth first searcher on the same graph starting at start_keys.
1662
Then call next() (or next_with_ghosts()) repeatedly, and on every
1663
result, call stop_searching_any on any keys from the exclude_keys
1664
set. The revision_count value acts as a trivial cross-check - the
1665
found revisions of the new search should have as many elements as
1436
1666
revision_count. If it does not, then additional revisions have been
1437
1667
ghosted since the search was executed the first time and the second
1440
1670
return self._recipe
1672
def get_network_struct(self):
1673
start_keys = ' '.join(self._recipe[1])
1674
stop_keys = ' '.join(self._recipe[2])
1675
count = str(self._recipe[3])
1676
return (self._recipe[0], '\n'.join((start_keys, stop_keys, count)))
1442
1678
def get_keys(self):
1443
1679
"""Return the keys found in this search.
1447
1683
return self._keys
1686
"""Return false if the search lists 1 or more revisions."""
1687
return self._recipe[3] == 0
1689
def refine(self, seen, referenced):
1690
"""Create a new search by refining this search.
1692
:param seen: Revisions that have been satisfied.
1693
:param referenced: Revision references observed while satisfying some
1696
start = self._recipe[1]
1697
exclude = self._recipe[2]
1698
count = self._recipe[3]
1699
keys = self.get_keys()
1700
# New heads = referenced + old heads - seen things - exclude
1701
pending_refs = set(referenced)
1702
pending_refs.update(start)
1703
pending_refs.difference_update(seen)
1704
pending_refs.difference_update(exclude)
1705
# New exclude = old exclude + satisfied heads
1706
seen_heads = start.intersection(seen)
1707
exclude.update(seen_heads)
1708
# keys gets seen removed
1710
# length is reduced by len(seen)
1712
return SearchResult(pending_refs, exclude, count, keys)
1715
class PendingAncestryResult(AbstractSearchResult):
1716
"""A search result that will reconstruct the ancestry for some graph heads.
1718
Unlike SearchResult, this doesn't hold the complete search result in
1719
memory, it just holds a description of how to generate it.
1722
def __init__(self, heads, repo):
1725
:param heads: an iterable of graph heads.
1726
:param repo: a repository to use to generate the ancestry for the given
1729
self.heads = frozenset(heads)
1733
if len(self.heads) > 5:
1734
heads_repr = repr(list(self.heads)[:5])[:-1]
1735
heads_repr += ', <%d more>...]' % (len(self.heads) - 5,)
1737
heads_repr = repr(self.heads)
1738
return '<%s heads:%s repo:%r>' % (
1739
self.__class__.__name__, heads_repr, self.repo)
1741
def get_recipe(self):
1742
"""Return a recipe that can be used to replay this search.
1744
The recipe allows reconstruction of the same results at a later date.
1746
:seealso SearchResult.get_recipe:
1748
:return: A tuple ('proxy-search', start_keys_set, set(), -1)
1749
To recreate this result, create a PendingAncestryResult with the
1752
return ('proxy-search', self.heads, set(), -1)
1754
def get_network_struct(self):
1755
parts = ['ancestry-of']
1756
parts.extend(self.heads)
1760
"""See SearchResult.get_keys.
1762
Returns all the keys for the ancestry of the heads, excluding
1765
return self._get_keys(self.repo.get_graph())
1767
def _get_keys(self, graph):
1768
NULL_REVISION = revision.NULL_REVISION
1769
keys = [key for (key, parents) in graph.iter_ancestry(self.heads)
1770
if key != NULL_REVISION and parents is not None]
1774
"""Return false if the search lists 1 or more revisions."""
1775
if revision.NULL_REVISION in self.heads:
1776
return len(self.heads) == 1
1778
return len(self.heads) == 0
1780
def refine(self, seen, referenced):
1781
"""Create a new search by refining this search.
1783
:param seen: Revisions that have been satisfied.
1784
:param referenced: Revision references observed while satisfying some
1787
referenced = self.heads.union(referenced)
1788
return PendingAncestryResult(referenced - seen, self.repo)
1791
class EmptySearchResult(AbstractSearchResult):
1792
"""An empty search result."""
1798
class EverythingResult(AbstractSearchResult):
1799
"""A search result that simply requests everything in the repository."""
1801
def __init__(self, repo):
1805
return '%s(%r)' % (self.__class__.__name__, self._repo)
1807
def get_recipe(self):
1808
raise NotImplementedError(self.get_recipe)
1810
def get_network_struct(self):
1811
return ('everything',)
1814
if 'evil' in debug.debug_flags:
1815
from bzrlib import remote
1816
if isinstance(self._repo, remote.RemoteRepository):
1817
# warn developers (not users) not to do this
1818
trace.mutter_callsite(
1819
2, "EverythingResult(RemoteRepository).get_keys() is slow.")
1820
return self._repo.all_revision_ids()
1823
# It's ok for this to wrongly return False: the worst that can happen
1824
# is that RemoteStreamSource will initiate a get_stream on an empty
1825
# repository. And almost all repositories are non-empty.
1828
def refine(self, seen, referenced):
1829
heads = set(self._repo.all_revision_ids())
1830
heads.difference_update(seen)
1831
heads.update(referenced)
1832
return PendingAncestryResult(heads, self._repo)
1835
class EverythingNotInOther(AbstractSearch):
1836
"""Find all revisions in that are in one repo but not the other."""
1838
def __init__(self, to_repo, from_repo, find_ghosts=False):
1839
self.to_repo = to_repo
1840
self.from_repo = from_repo
1841
self.find_ghosts = find_ghosts
1844
return self.to_repo.search_missing_revision_ids(
1845
self.from_repo, find_ghosts=self.find_ghosts)
1848
class NotInOtherForRevs(AbstractSearch):
1849
"""Find all revisions missing in one repo for a some specific heads."""
1851
def __init__(self, to_repo, from_repo, required_ids, if_present_ids=None,
1852
find_ghosts=False, limit=None):
1855
:param required_ids: revision IDs of heads that must be found, or else
1856
the search will fail with NoSuchRevision. All revisions in their
1857
ancestry not already in the other repository will be included in
1859
:param if_present_ids: revision IDs of heads that may be absent in the
1860
source repository. If present, then their ancestry not already
1861
found in other will be included in the search result.
1862
:param limit: maximum number of revisions to fetch
1864
self.to_repo = to_repo
1865
self.from_repo = from_repo
1866
self.find_ghosts = find_ghosts
1867
self.required_ids = required_ids
1868
self.if_present_ids = if_present_ids
1872
if len(self.required_ids) > 5:
1873
reqd_revs_repr = repr(list(self.required_ids)[:5])[:-1] + ', ...]'
1875
reqd_revs_repr = repr(self.required_ids)
1876
if self.if_present_ids and len(self.if_present_ids) > 5:
1877
ifp_revs_repr = repr(list(self.if_present_ids)[:5])[:-1] + ', ...]'
1879
ifp_revs_repr = repr(self.if_present_ids)
1881
return ("<%s from:%r to:%r find_ghosts:%r req'd:%r if-present:%r"
1883
self.__class__.__name__, self.from_repo, self.to_repo,
1884
self.find_ghosts, reqd_revs_repr, ifp_revs_repr,
1888
return self.to_repo.search_missing_revision_ids(
1889
self.from_repo, revision_ids=self.required_ids,
1890
if_present_ids=self.if_present_ids, find_ghosts=self.find_ghosts,
1450
1894
def collapse_linear_regions(parent_map):
1451
1895
"""Collapse regions of the graph that are 'linear'.