13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19
19
from bzrlib import (
26
from bzrlib.symbol_versioning import deprecated_function, deprecated_in
28
28
STEP_UNIQUE_SEARCHER_EVERY = 5
60
60
return 'DictParentsProvider(%r)' % self.ancestry
62
62
def get_parent_map(self, keys):
63
"""See StackedParentsProvider.get_parent_map"""
63
"""See _StackedParentsProvider.get_parent_map"""
64
64
ancestry = self.ancestry
65
65
return dict((k, ancestry[k]) for k in keys if k in ancestry)
67
@deprecated_function(deprecated_in((1, 16, 0)))
68
def _StackedParentsProvider(*args, **kwargs):
69
return StackedParentsProvider(*args, **kwargs)
71
class StackedParentsProvider(object):
72
"""A parents provider which stacks (or unions) multiple providers.
74
The providers are queries in the order of the provided parent_providers.
68
class _StackedParentsProvider(object):
77
70
def __init__(self, parent_providers):
78
71
self._parent_providers = parent_providers
80
73
def __repr__(self):
81
return "%s(%r)" % (self.__class__.__name__, self._parent_providers)
74
return "_StackedParentsProvider(%r)" % self._parent_providers
83
76
def get_parent_map(self, keys):
84
77
"""Get a mapping of keys => parents
140
133
raise AssertionError('Cache enabled when already enabled.')
142
135
self._cache_misses = cache_misses
143
self.missing_keys = set()
145
137
def disable_cache(self):
146
138
"""Disable and clear the cache."""
147
139
self._cache = None
148
self._cache_misses = None
149
self.missing_keys = set()
151
141
def get_cached_map(self):
152
142
"""Return any cached get_parent_map values."""
153
143
if self._cache is None:
155
return dict(self._cache)
145
return dict((k, v) for k, v in self._cache.items()
157
148
def get_parent_map(self, keys):
158
"""See StackedParentsProvider.get_parent_map."""
161
cache = self._get_parent_map(keys)
149
"""See _StackedParentsProvider.get_parent_map."""
150
# Hack to build up the caching logic.
151
ancestry = self._cache
153
# Caching is disabled.
154
missing_revisions = set(keys)
163
needed_revisions = set(key for key in keys if key not in cache)
164
# Do not ask for negatively cached keys
165
needed_revisions.difference_update(self.missing_keys)
167
parent_map = self._get_parent_map(needed_revisions)
168
cache.update(parent_map)
169
if self._cache_misses:
170
for key in needed_revisions:
171
if key not in parent_map:
172
self.note_missing_key(key)
175
value = cache.get(key)
176
if value is not None:
180
def note_missing_key(self, key):
181
"""Note that key is a missing key."""
182
if self._cache_misses:
183
self.missing_keys.add(key)
157
missing_revisions = set(key for key in keys if key not in ancestry)
158
if missing_revisions:
159
parent_map = self._get_parent_map(missing_revisions)
160
ancestry.update(parent_map)
161
if self._cache_misses:
162
# None is never a valid parents list, so it can be used to
164
ancestry.update(dict((k, None) for k in missing_revisions
165
if k not in parent_map))
166
present_keys = [k for k in keys if ancestry.get(k) is not None]
167
return dict((k, ancestry[k]) for k in present_keys)
186
170
class Graph(object):
312
296
return known_revnos[cur_tip] + num_steps
314
def find_lefthand_distances(self, keys):
315
"""Find the distance to null for all the keys in keys.
317
:param keys: keys to lookup.
318
:return: A dict key->distance for all of keys.
320
# Optimisable by concurrent searching, but a random spread should get
321
# some sort of hit rate.
328
(key, self.find_distance_to_null(key, known_revnos)))
329
except errors.GhostRevisionsHaveNoRevno:
332
known_revnos.append((key, -1))
333
return dict(known_revnos)
335
298
def find_unique_ancestors(self, unique_revision, common_revisions):
336
299
"""Find the unique ancestors for a revision versus others.
637
600
all_unique_searcher._iterations)
638
601
unique_tip_searchers = next_unique_searchers
603
@symbol_versioning.deprecated_method(symbol_versioning.one_one)
604
def get_parents(self, revisions):
605
"""Find revision ids of the parents of a list of revisions
607
A list is returned of the same length as the input. Each entry
608
is a list of parent ids for the corresponding input revision.
610
[NULL_REVISION] is used as the parent of the first user-committed
611
revision. Its parent list is empty.
613
If the revision is not present (i.e. a ghost), None is used in place
614
of the list of parents.
616
Deprecated in bzr 1.2 - please see get_parent_map.
618
parents = self.get_parent_map(revisions)
619
return [parents.get(r, None) for r in revisions]
640
621
def get_parent_map(self, revisions):
641
622
"""Get a map of key:parent_list for revisions.
1490
1470
The recipe allows reconstruction of the same results at a later date
1491
1471
without knowing all the found keys. The essential elements are a list
1492
of keys to start and to stop at. In order to give reproducible
1472
of keys to start and and to stop at. In order to give reproducible
1493
1473
results when ghosts are encountered by a search they are automatically
1494
1474
added to the exclude list (or else ghost filling may alter the
1497
:return: A tuple ('search', start_keys_set, exclude_keys_set,
1498
revision_count). To recreate the results of this search, create a
1499
breadth first searcher on the same graph starting at start_keys.
1500
Then call next() (or next_with_ghosts()) repeatedly, and on every
1501
result, call stop_searching_any on any keys from the exclude_keys
1502
set. The revision_count value acts as a trivial cross-check - the
1503
found revisions of the new search should have as many elements as
1477
:return: A tuple (start_keys_set, exclude_keys_set, revision_count). To
1478
recreate the results of this search, create a breadth first
1479
searcher on the same graph starting at start_keys. Then call next()
1480
(or next_with_ghosts()) repeatedly, and on every result, call
1481
stop_searching_any on any keys from the exclude_keys set. The
1482
revision_count value acts as a trivial cross-check - the found
1483
revisions of the new search should have as many elements as
1504
1484
revision_count. If it does not, then additional revisions have been
1505
1485
ghosted since the search was executed the first time and the second
1515
1495
return self._keys
1518
"""Return false if the search lists 1 or more revisions."""
1519
return self._recipe[3] == 0
1521
def refine(self, seen, referenced):
1522
"""Create a new search by refining this search.
1524
:param seen: Revisions that have been satisfied.
1525
:param referenced: Revision references observed while satisfying some
1528
start = self._recipe[1]
1529
exclude = self._recipe[2]
1530
count = self._recipe[3]
1531
keys = self.get_keys()
1532
# New heads = referenced + old heads - seen things - exclude
1533
pending_refs = set(referenced)
1534
pending_refs.update(start)
1535
pending_refs.difference_update(seen)
1536
pending_refs.difference_update(exclude)
1537
# New exclude = old exclude + satisfied heads
1538
seen_heads = start.intersection(seen)
1539
exclude.update(seen_heads)
1540
# keys gets seen removed
1542
# length is reduced by len(seen)
1544
return SearchResult(pending_refs, exclude, count, keys)
1547
class PendingAncestryResult(object):
1548
"""A search result that will reconstruct the ancestry for some graph heads.
1550
Unlike SearchResult, this doesn't hold the complete search result in
1551
memory, it just holds a description of how to generate it.
1554
def __init__(self, heads, repo):
1557
:param heads: an iterable of graph heads.
1558
:param repo: a repository to use to generate the ancestry for the given
1561
self.heads = frozenset(heads)
1564
def get_recipe(self):
1565
"""Return a recipe that can be used to replay this search.
1567
The recipe allows reconstruction of the same results at a later date.
1569
:seealso SearchResult.get_recipe:
1571
:return: A tuple ('proxy-search', start_keys_set, set(), -1)
1572
To recreate this result, create a PendingAncestryResult with the
1575
return ('proxy-search', self.heads, set(), -1)
1578
"""See SearchResult.get_keys.
1580
Returns all the keys for the ancestry of the heads, excluding
1583
return self._get_keys(self.repo.get_graph())
1585
def _get_keys(self, graph):
1586
NULL_REVISION = revision.NULL_REVISION
1587
keys = [key for (key, parents) in graph.iter_ancestry(self.heads)
1588
if key != NULL_REVISION and parents is not None]
1592
"""Return false if the search lists 1 or more revisions."""
1593
if revision.NULL_REVISION in self.heads:
1594
return len(self.heads) == 1
1596
return len(self.heads) == 0
1598
def refine(self, seen, referenced):
1599
"""Create a new search by refining this search.
1601
:param seen: Revisions that have been satisfied.
1602
:param referenced: Revision references observed while satisfying some
1605
referenced = self.heads.union(referenced)
1606
return PendingAncestryResult(referenced - seen, self.repo)
1609
1498
def collapse_linear_regions(parent_map):
1610
1499
"""Collapse regions of the graph that are 'linear'.
1677
1566
removed.add(node)
1682
class GraphThunkIdsToKeys(object):
1683
"""Forwards calls about 'ids' to be about keys internally."""
1685
def __init__(self, graph):
1688
def heads(self, ids):
1689
"""See Graph.heads()"""
1690
as_keys = [(i,) for i in ids]
1691
head_keys = self._graph.heads(as_keys)
1692
return set([h[0] for h in head_keys])
1695
_counters = [0,0,0,0,0,0,0]
1697
from bzrlib._known_graph_pyx import KnownGraph
1698
except ImportError, e:
1699
osutils.failed_to_load_extension(e)
1700
from bzrlib._known_graph_py import KnownGraph