13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
from __future__ import absolute_import
19
from bzrlib.lazy_import import lazy_import
20
lazy_import(globals(), """
17
23
from bzrlib import (
30
revision as _mod_revision,
30
39
from bzrlib.decorators import needs_read_lock, needs_write_lock
31
40
from bzrlib.repository import (
33
MetaDirRepositoryFormat,
37
import bzrlib.revision as _mod_revision
38
from bzrlib.store.versioned import VersionedFileStore
39
from bzrlib.trace import mutter, note, warning
43
RepositoryFormatMetaDir,
45
from bzrlib.vf_repository import (
46
InterSameDataRepository,
47
MetaDirVersionedFileRepository,
48
MetaDirVersionedFileRepositoryFormat,
49
VersionedFileCommitBuilder,
50
VersionedFileRootCommitBuilder,
52
from bzrlib import symbol_versioning
42
55
class _KnitParentsProvider(object):
47
60
def __repr__(self):
48
61
return 'KnitParentsProvider(%r)' % self._knit
50
def get_parents(self, revision_ids):
52
for revision_id in revision_ids:
63
def get_parent_map(self, keys):
64
"""See graph.StackedParentsProvider.get_parent_map"""
66
for revision_id in keys:
67
if revision_id is None:
68
raise ValueError('get_parent_map(None) is not valid')
53
69
if revision_id == _mod_revision.NULL_REVISION:
70
parent_map[revision_id] = ()
57
parents = self._knit.get_parents_with_ghosts(revision_id)
74
self._knit.get_parents_with_ghosts(revision_id))
58
75
except errors.RevisionNotPresent:
61
78
if len(parents) == 0:
62
parents = [_mod_revision.NULL_REVISION]
63
parents_list.append(parents)
67
class KnitRepository(MetaDirRepository):
79
parents = (_mod_revision.NULL_REVISION,)
80
parent_map[revision_id] = parents
84
class _KnitsParentsProvider(object):
86
def __init__(self, knit, prefix=()):
87
"""Create a parent provider for string keys mapped to tuple keys."""
92
return 'KnitsParentsProvider(%r)' % self._knit
94
def get_parent_map(self, keys):
95
"""See graph.StackedParentsProvider.get_parent_map"""
96
parent_map = self._knit.get_parent_map(
97
[self._prefix + (key,) for key in keys])
99
for key, parents in parent_map.items():
101
if len(parents) == 0:
102
parents = (_mod_revision.NULL_REVISION,)
104
parents = tuple(parent[-1] for parent in parents)
105
result[revid] = parents
106
for revision_id in keys:
107
if revision_id == _mod_revision.NULL_REVISION:
108
result[revision_id] = ()
112
class KnitRepository(MetaDirVersionedFileRepository):
68
113
"""Knit format repository."""
70
_serializer = xml5.serializer_v5
72
def _warn_if_deprecated(self):
73
# This class isn't deprecated
76
def _inventory_add_lines(self, inv_vf, revid, parents, lines):
77
inv_vf.add_lines_with_ghosts(revid, parents, lines)
115
# These attributes are inherited from the Repository base class. Setting
116
# them to None ensures that if the constructor is changed to not initialize
117
# them, or a subclass fails to call the constructor, that an error will
118
# occur rather than the system working but generating incorrect data.
119
_commit_builder_class = None
122
def __init__(self, _format, a_bzrdir, control_files, _commit_builder_class,
124
super(KnitRepository, self).__init__(_format, a_bzrdir, control_files)
125
self._commit_builder_class = _commit_builder_class
126
self._serializer = _serializer
127
self._reconcile_fixes_text_parents = True
80
130
def _all_revision_ids(self):
81
131
"""See Repository.all_revision_ids()."""
82
# Knits get the revision graph from the index of the revision knit, so
83
# it's always possible even if they're on an unlistable transport.
84
return self._revision_store.all_revision_ids(self.get_transaction())
86
def fileid_involved_between_revs(self, from_revid, to_revid):
87
"""Find file_id(s) which are involved in the changes between revisions.
89
This determines the set of revisions which are involved, and then
90
finds all file ids affected by those revisions.
92
from_revid = osutils.safe_revision_id(from_revid)
93
to_revid = osutils.safe_revision_id(to_revid)
94
vf = self._get_revision_vf()
95
from_set = set(vf.get_ancestry(from_revid))
96
to_set = set(vf.get_ancestry(to_revid))
97
changed = to_set.difference(from_set)
98
return self._fileid_involved_by_set(changed)
100
def fileid_involved(self, last_revid=None):
101
"""Find all file_ids modified in the ancestry of last_revid.
103
:param last_revid: If None, last_revision() will be used.
106
changed = set(self.all_revision_ids())
108
changed = set(self.get_ancestry(last_revid))
111
return self._fileid_involved_by_set(changed)
114
def get_ancestry(self, revision_id, topo_sorted=True):
115
"""Return a list of revision-ids integrated by a revision.
117
This is topologically sorted, unless 'topo_sorted' is specified as
120
if revision_id is None:
122
revision_id = osutils.safe_revision_id(revision_id)
123
vf = self._get_revision_vf()
125
return [None] + vf.get_ancestry(revision_id, topo_sorted)
126
except errors.RevisionNotPresent:
127
raise errors.NoSuchRevision(self, revision_id)
132
return [key[0] for key in self.revisions.keys()]
134
def _activate_new_inventory(self):
135
"""Put a replacement inventory.new into use as inventories."""
136
# Copy the content across
138
t.copy('inventory.new.kndx', 'inventory.kndx')
140
t.copy('inventory.new.knit', 'inventory.knit')
141
except errors.NoSuchFile:
142
# empty inventories knit
143
t.delete('inventory.knit')
144
# delete the temp inventory
145
t.delete('inventory.new.kndx')
147
t.delete('inventory.new.knit')
148
except errors.NoSuchFile:
149
# empty inventories knit
151
# Force index reload (sanity check)
152
self.inventories._index._reset_cache()
153
self.inventories.keys()
155
def _backup_inventory(self):
157
t.copy('inventory.kndx', 'inventory.backup.kndx')
158
t.copy('inventory.knit', 'inventory.backup.knit')
160
def _move_file_id(self, from_id, to_id):
161
t = self._transport.clone('knits')
162
from_rel_url = self.texts._index._mapper.map((from_id, None))
163
to_rel_url = self.texts._index._mapper.map((to_id, None))
164
# We expect both files to always exist in this case.
165
for suffix in ('.knit', '.kndx'):
166
t.rename(from_rel_url + suffix, to_rel_url + suffix)
168
def _remove_file_id(self, file_id):
169
t = self._transport.clone('knits')
170
rel_url = self.texts._index._mapper.map((file_id, None))
171
for suffix in ('.kndx', '.knit'):
173
t.delete(rel_url + suffix)
174
except errors.NoSuchFile:
177
def _temp_inventories(self):
178
result = self._format._get_inventories(self._transport, self,
180
# Reconciling when the output has no revisions would result in no
181
# writes - but we want to ensure there is an inventory for
182
# compatibility with older clients that don't lazy-load.
183
result.get_parent_map([('A',)])
130
187
def get_revision(self, revision_id):
132
189
revision_id = osutils.safe_revision_id(revision_id)
133
190
return self.get_revision_reconcile(revision_id)
136
def get_revision_graph(self, revision_id=None):
137
"""Return a dictionary containing the revision graph.
139
:param revision_id: The revision_id to get a graph from. If None, then
140
the entire revision graph is returned. This is a deprecated mode of
141
operation and will be removed in the future.
142
:return: a dictionary of revision_id->revision_parents_list.
144
# special case NULL_REVISION
145
if revision_id == _mod_revision.NULL_REVISION:
147
revision_id = osutils.safe_revision_id(revision_id)
148
a_weave = self._get_revision_vf()
149
entire_graph = a_weave.get_graph()
150
if revision_id is None:
151
return a_weave.get_graph()
152
elif revision_id not in a_weave:
153
raise errors.NoSuchRevision(self, revision_id)
155
# add what can be reached from revision_id
157
pending = set([revision_id])
158
while len(pending) > 0:
160
result[node] = a_weave.get_parents(node)
161
for revision_id in result[node]:
162
if revision_id not in result:
163
pending.add(revision_id)
167
def get_revision_graph_with_ghosts(self, revision_ids=None):
168
"""Return a graph of the revisions with ghosts marked as applicable.
170
:param revision_ids: an iterable of revisions to graph or None for all.
171
:return: a Graph object with the graph reachable from revision_ids.
173
result = deprecated_graph.Graph()
174
vf = self._get_revision_vf()
175
versions = set(vf.versions())
177
pending = set(self.all_revision_ids())
180
pending = set(osutils.safe_revision_id(r) for r in revision_ids)
181
# special case NULL_REVISION
182
if _mod_revision.NULL_REVISION in pending:
183
pending.remove(_mod_revision.NULL_REVISION)
184
required = set(pending)
187
revision_id = pending.pop()
188
if not revision_id in versions:
189
if revision_id in required:
190
raise errors.NoSuchRevision(self, revision_id)
192
result.add_ghost(revision_id)
193
# mark it as done so we don't try for it again.
194
done.add(revision_id)
196
parent_ids = vf.get_parents_with_ghosts(revision_id)
197
for parent_id in parent_ids:
198
# is this queued or done ?
199
if (parent_id not in pending and
200
parent_id not in done):
202
pending.add(parent_id)
203
result.add_node(revision_id, parent_ids)
204
done.add(revision_id)
207
def _get_revision_vf(self):
208
""":return: a versioned file containing the revisions."""
209
vf = self._revision_store.get_revision_file(self.get_transaction())
212
def _get_history_vf(self):
213
"""Get a versionedfile whose history graph reflects all revisions.
215
For knit repositories, this is the revision knit.
217
return self._get_revision_vf()
192
def _refresh_data(self):
193
if not self.is_locked():
195
if self.is_in_write_group():
196
raise IsInWriteGroupError(self)
197
# Create a new transaction to force all knits to see the scope change.
198
# This is safe because we're outside a write group.
199
self.control_files._finish_transaction()
200
if self.is_write_locked():
201
self.control_files._set_write_transaction()
203
self.control_files._set_read_transaction()
219
205
@needs_write_lock
220
206
def reconcile(self, other=None, thorough=False):
223
209
reconciler = KnitReconciler(self, thorough=thorough)
224
210
reconciler.reconcile()
225
211
return reconciler
227
def revision_parents(self, revision_id):
228
revision_id = osutils.safe_revision_id(revision_id)
229
return self._get_revision_vf().get_parents(revision_id)
231
213
def _make_parents_provider(self):
232
return _KnitParentsProvider(self._get_revision_vf())
235
class KnitRepository3(KnitRepository):
237
def __init__(self, _format, a_bzrdir, control_files, _revision_store,
238
control_store, text_store):
239
KnitRepository.__init__(self, _format, a_bzrdir, control_files,
240
_revision_store, control_store, text_store)
241
self._serializer = xml7.serializer_v7
243
def deserialise_inventory(self, revision_id, xml):
244
"""Transform the xml into an inventory object.
246
:param revision_id: The expected revision id of the inventory.
247
:param xml: A serialised inventory.
249
result = self._serializer.read_inventory_from_string(xml)
250
assert result.root.revision is not None
253
def serialise_inventory(self, inv):
254
"""Transform the inventory object into XML text.
256
:param revision_id: The expected revision id of the inventory.
257
:param xml: A serialised inventory.
259
assert inv.revision_id is not None
260
assert inv.root.revision is not None
261
return KnitRepository.serialise_inventory(self, inv)
263
def get_commit_builder(self, branch, parents, config, timestamp=None,
264
timezone=None, committer=None, revprops=None,
266
"""Obtain a CommitBuilder for this repository.
268
:param branch: Branch to commit to.
269
:param parents: Revision ids of the parents of the new revision.
270
:param config: Configuration to use.
271
:param timestamp: Optional timestamp recorded for commit.
272
:param timezone: Optional timezone for timestamp.
273
:param committer: Optional committer to set for commit.
274
:param revprops: Optional dictionary of revision properties.
275
:param revision_id: Optional revision id.
277
revision_id = osutils.safe_revision_id(revision_id)
278
return RootCommitBuilder(self, parents, config, timestamp, timezone,
279
committer, revprops, revision_id)
282
class RepositoryFormatKnit(MetaDirRepositoryFormat):
283
"""Bzr repository knit format (generalized).
214
return _KnitsParentsProvider(self.revisions)
217
class RepositoryFormatKnit(MetaDirVersionedFileRepositoryFormat):
218
"""Bzr repository knit format (generalized).
285
220
This repository format has:
286
221
- knits for file texts and inventory
296
def _get_control_store(self, repo_transport, control_files):
297
"""Return the control store for this repository."""
298
return VersionedFileStore(
301
file_mode=control_files._file_mode,
302
versionedfile_class=knit.KnitVersionedFile,
303
versionedfile_kwargs={'factory':knit.KnitPlainFactory()},
306
def _get_revision_store(self, repo_transport, control_files):
307
"""See RepositoryFormat._get_revision_store()."""
308
from bzrlib.store.revision.knit import KnitRevisionStore
309
versioned_file_store = VersionedFileStore(
311
file_mode=control_files._file_mode,
314
versionedfile_class=knit.KnitVersionedFile,
315
versionedfile_kwargs={'delta':False,
316
'factory':knit.KnitPlainFactory(),
320
return KnitRevisionStore(versioned_file_store)
322
def _get_text_store(self, transport, control_files):
323
"""See RepositoryFormat._get_text_store()."""
324
return self._get_versioned_file_store('knits',
327
versionedfile_class=knit.KnitVersionedFile,
328
versionedfile_kwargs={
329
'create_parent_dir':True,
331
'dir_mode':control_files._dir_mode,
231
# Set this attribute in derived classes to control the repository class
232
# created by open and initialize.
233
repository_class = None
234
# Set this attribute in derived classes to control the
235
# _commit_builder_class that the repository objects will have passed to
237
_commit_builder_class = None
238
# Set this attribute in derived clases to control the _serializer that the
239
# repository objects will have passed to their constructor.
241
def _serializer(self):
242
return xml5.serializer_v5
243
# Knit based repositories handle ghosts reasonably well.
244
supports_ghosts = True
245
# External lookups are not supported in this format.
246
supports_external_lookups = False
248
supports_chks = False
249
_fetch_order = 'topological'
250
_fetch_uses_deltas = True
252
supports_funky_characters = True
253
# The revision.kndx could potentially claim a revision has a different
254
# parent to the revision text.
255
revision_graph_can_have_wrong_parents = True
257
def _get_inventories(self, repo_transport, repo, name='inventory'):
258
mapper = versionedfile.ConstantMapper(name)
259
index = _mod_knit._KndxIndex(repo_transport, mapper,
260
repo.get_transaction, repo.is_write_locked, repo.is_locked)
261
access = _mod_knit._KnitKeyAccess(repo_transport, mapper)
262
return _mod_knit.KnitVersionedFiles(index, access, annotated=False)
264
def _get_revisions(self, repo_transport, repo):
265
mapper = versionedfile.ConstantMapper('revisions')
266
index = _mod_knit._KndxIndex(repo_transport, mapper,
267
repo.get_transaction, repo.is_write_locked, repo.is_locked)
268
access = _mod_knit._KnitKeyAccess(repo_transport, mapper)
269
return _mod_knit.KnitVersionedFiles(index, access, max_delta_chain=0,
272
def _get_signatures(self, repo_transport, repo):
273
mapper = versionedfile.ConstantMapper('signatures')
274
index = _mod_knit._KndxIndex(repo_transport, mapper,
275
repo.get_transaction, repo.is_write_locked, repo.is_locked)
276
access = _mod_knit._KnitKeyAccess(repo_transport, mapper)
277
return _mod_knit.KnitVersionedFiles(index, access, max_delta_chain=0,
280
def _get_texts(self, repo_transport, repo):
281
mapper = versionedfile.HashEscapedPrefixMapper()
282
base_transport = repo_transport.clone('knits')
283
index = _mod_knit._KndxIndex(base_transport, mapper,
284
repo.get_transaction, repo.is_write_locked, repo.is_locked)
285
access = _mod_knit._KnitKeyAccess(base_transport, mapper)
286
return _mod_knit.KnitVersionedFiles(index, access, max_delta_chain=200,
335
289
def initialize(self, a_bzrdir, shared=False):
336
290
"""Create a knit format 1 repository.
340
294
:param shared: If true the repository will be initialized as a shared
343
mutter('creating repository in %s.', a_bzrdir.transport.base)
344
dirs = ['revision-store', 'knits']
297
trace.mutter('creating repository in %s.', a_bzrdir.transport.base)
346
300
utf8_files = [('format', self.get_format_string())]
348
302
self._upload_blank_content(a_bzrdir, dirs, files, utf8_files, shared)
349
303
repo_transport = a_bzrdir.get_repository_transport(None)
350
304
control_files = lockable_files.LockableFiles(repo_transport,
351
305
'lock', lockdir.LockDir)
352
control_store = self._get_control_store(repo_transport, control_files)
353
306
transaction = transactions.WriteTransaction()
354
# trigger a write of the inventory store.
355
control_store.get_weave_or_empty('inventory', transaction)
356
_revision_store = self._get_revision_store(repo_transport, control_files)
307
result = self.open(a_bzrdir=a_bzrdir, _found=True)
357
309
# the revision id here is irrelevant: it will not be stored, and cannot
359
_revision_store.has_revision_id('A', transaction)
360
_revision_store.get_signature_file(transaction)
361
return self.open(a_bzrdir=a_bzrdir, _found=True)
310
# already exist, we do this to create files on disk for older clients.
311
result.inventories.get_parent_map([('A',)])
312
result.revisions.get_parent_map([('A',)])
313
result.signatures.get_parent_map([('A',)])
315
self._run_post_repo_init_hooks(result, a_bzrdir, shared)
363
318
def open(self, a_bzrdir, _found=False, _override_transport=None):
364
319
"""See RepositoryFormat.open().
366
321
:param _override_transport: INTERNAL USE ONLY. Allows opening the
367
322
repository at a slightly different url
368
323
than normal. I.e. during 'upgrade'.
371
format = RepositoryFormat.find_format(a_bzrdir)
372
assert format.__class__ == self.__class__
326
format = RepositoryFormatMetaDir.find_format(a_bzrdir)
373
327
if _override_transport is not None:
374
328
repo_transport = _override_transport
376
330
repo_transport = a_bzrdir.get_repository_transport(None)
377
331
control_files = lockable_files.LockableFiles(repo_transport,
378
332
'lock', lockdir.LockDir)
379
text_store = self._get_text_store(repo_transport, control_files)
380
control_store = self._get_control_store(repo_transport, control_files)
381
_revision_store = self._get_revision_store(repo_transport, control_files)
382
return KnitRepository(_format=self,
333
repo = self.repository_class(_format=self,
383
334
a_bzrdir=a_bzrdir,
384
335
control_files=control_files,
385
_revision_store=_revision_store,
386
control_store=control_store,
387
text_store=text_store)
336
_commit_builder_class=self._commit_builder_class,
337
_serializer=self._serializer)
338
repo.revisions = self._get_revisions(repo_transport, repo)
339
repo.signatures = self._get_signatures(repo_transport, repo)
340
repo.inventories = self._get_inventories(repo_transport, repo)
341
repo.texts = self._get_texts(repo_transport, repo)
342
repo.chk_bytes = None
343
repo._transport = repo_transport
390
347
class RepositoryFormatKnit1(RepositoryFormatKnit):
434
395
- support for recording tree-references
437
repository_class = KnitRepository3
398
repository_class = KnitRepository
399
_commit_builder_class = VersionedFileRootCommitBuilder
438
400
rich_root_data = True
439
402
supports_tree_reference = True
404
def _serializer(self):
405
return xml7.serializer_v7
441
407
def _get_matching_bzrdir(self):
442
return bzrdir.format_registry.make_bzrdir('dirstate-with-subtree')
408
return controldir.format_registry.make_bzrdir('dirstate-with-subtree')
444
410
def _ignore_setting_bzrdir(self, format):
447
413
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
449
def check_conversion_target(self, target_format):
450
if not target_format.rich_root_data:
451
raise errors.BadConversionTarget(
452
'Does not support rich root data.', target_format)
453
if not getattr(target_format, 'supports_tree_reference', False):
454
raise errors.BadConversionTarget(
455
'Does not support nested trees', target_format)
457
def get_format_string(self):
416
def get_format_string(cls):
458
417
"""See RepositoryFormat.get_format_string()."""
459
418
return "Bazaar Knit Repository Format 3 (bzr 0.15)\n"
462
421
"""See RepositoryFormat.get_format_description()."""
463
422
return "Knit repository format 3"
465
def open(self, a_bzrdir, _found=False, _override_transport=None):
466
"""See RepositoryFormat.open().
468
:param _override_transport: INTERNAL USE ONLY. Allows opening the
469
repository at a slightly different url
470
than normal. I.e. during 'upgrade'.
425
class RepositoryFormatKnit4(RepositoryFormatKnit):
426
"""Bzr repository knit format 4.
428
This repository format has everything in format 3, except for
430
- knits for file texts and inventory
431
- hash subdirectory based stores.
432
- knits for revisions and signatures
433
- TextStores for revisions and signatures.
434
- a format marker of its own
435
- an optional 'shared-storage' flag
436
- an optional 'no-working-trees' flag
438
- support for recording full info about the tree root
441
repository_class = KnitRepository
442
_commit_builder_class = VersionedFileRootCommitBuilder
443
rich_root_data = True
444
supports_tree_reference = False
446
def _serializer(self):
447
return xml6.serializer_v6
449
def _get_matching_bzrdir(self):
450
return controldir.format_registry.make_bzrdir('rich-root')
452
def _ignore_setting_bzrdir(self, format):
455
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
458
def get_format_string(cls):
459
"""See RepositoryFormat.get_format_string()."""
460
return 'Bazaar Knit Repository Format 4 (bzr 1.0)\n'
462
def get_format_description(self):
463
"""See RepositoryFormat.get_format_description()."""
464
return "Knit repository format 4"
467
class InterKnitRepo(InterSameDataRepository):
468
"""Optimised code paths between Knit based repositories."""
471
def _get_repo_format_to_test(self):
472
return RepositoryFormatKnit1()
475
def is_compatible(source, target):
476
"""Be compatible with known Knit formats.
478
We don't test for the stores being of specific types because that
479
could lead to confusing results, and there is no need to be
473
format = RepositoryFormat.find_format(a_bzrdir)
474
assert format.__class__ == self.__class__
475
if _override_transport is not None:
476
repo_transport = _override_transport
483
are_knits = (isinstance(source._format, RepositoryFormatKnit) and
484
isinstance(target._format, RepositoryFormatKnit))
485
except AttributeError:
487
return are_knits and InterRepository._same_model(source, target)
490
def search_missing_revision_ids(self,
491
find_ghosts=True, revision_ids=None, if_present_ids=None,
493
"""See InterRepository.search_missing_revision_ids()."""
494
source_ids_set = self._present_source_revisions_for(
495
revision_ids, if_present_ids)
496
# source_ids is the worst possible case we may need to pull.
497
# now we want to filter source_ids against what we actually
498
# have in target, but don't try to check for existence where we know
499
# we do not have a revision as that would be pointless.
500
target_ids = set(self.target.all_revision_ids())
501
possibly_present_revisions = target_ids.intersection(source_ids_set)
502
actually_present_revisions = set(
503
self.target._eliminate_revisions_not_present(possibly_present_revisions))
504
required_revisions = source_ids_set.difference(actually_present_revisions)
505
if revision_ids is not None:
506
# we used get_ancestry to determine source_ids then we are assured all
507
# revisions referenced are present as they are installed in topological order.
508
# and the tip revision was validated by get_ancestry.
509
result_set = required_revisions
478
repo_transport = a_bzrdir.get_repository_transport(None)
479
control_files = lockable_files.LockableFiles(repo_transport, 'lock',
481
text_store = self._get_text_store(repo_transport, control_files)
482
control_store = self._get_control_store(repo_transport, control_files)
483
_revision_store = self._get_revision_store(repo_transport, control_files)
484
return self.repository_class(_format=self,
486
control_files=control_files,
487
_revision_store=_revision_store,
488
control_store=control_store,
489
text_store=text_store)
511
# if we just grabbed the possibly available ids, then
512
# we only have an estimate of whats available and need to validate
513
# that against the revision records.
515
self.source._eliminate_revisions_not_present(required_revisions))
516
if limit is not None:
517
topo_ordered = self.source.get_graph().iter_topo_order(result_set)
518
result_set = set(itertools.islice(topo_ordered, limit))
519
return self.source.revision_ids_to_search_result(result_set)
522
InterRepository.register_optimiser(InterKnitRepo)