13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
from bzrlib.lazy_import import lazy_import
18
lazy_import(globals(), """
17
21
from bzrlib import (
28
revision as _mod_revision,
29
37
from bzrlib.decorators import needs_read_lock, needs_write_lock
30
38
from bzrlib.repository import (
32
MetaDirRepositoryFormat,
36
import bzrlib.revision as _mod_revision
37
from bzrlib.store.versioned import VersionedFileStore
38
from bzrlib.trace import mutter, note, warning
41
class KnitRepository(MetaDirRepository):
43
from bzrlib.vf_repository import (
44
InterSameDataRepository,
45
MetaDirVersionedFileRepository,
46
MetaDirVersionedFileRepositoryFormat,
47
VersionedFileCommitBuilder,
48
VersionedFileRootCommitBuilder,
50
from bzrlib import symbol_versioning
53
class _KnitParentsProvider(object):
55
def __init__(self, knit):
59
return 'KnitParentsProvider(%r)' % self._knit
61
def get_parent_map(self, keys):
62
"""See graph.StackedParentsProvider.get_parent_map"""
64
for revision_id in keys:
65
if revision_id is None:
66
raise ValueError('get_parent_map(None) is not valid')
67
if revision_id == _mod_revision.NULL_REVISION:
68
parent_map[revision_id] = ()
72
self._knit.get_parents_with_ghosts(revision_id))
73
except errors.RevisionNotPresent:
77
parents = (_mod_revision.NULL_REVISION,)
78
parent_map[revision_id] = parents
82
class _KnitsParentsProvider(object):
84
def __init__(self, knit, prefix=()):
85
"""Create a parent provider for string keys mapped to tuple keys."""
90
return 'KnitsParentsProvider(%r)' % self._knit
92
def get_parent_map(self, keys):
93
"""See graph.StackedParentsProvider.get_parent_map"""
94
parent_map = self._knit.get_parent_map(
95
[self._prefix + (key,) for key in keys])
97
for key, parents in parent_map.items():
100
parents = (_mod_revision.NULL_REVISION,)
102
parents = tuple(parent[-1] for parent in parents)
103
result[revid] = parents
104
for revision_id in keys:
105
if revision_id == _mod_revision.NULL_REVISION:
106
result[revision_id] = ()
110
class KnitRepository(MetaDirVersionedFileRepository):
42
111
"""Knit format repository."""
45
_serializer = xml5.serializer_v5
47
def _warn_if_deprecated(self):
48
# This class isn't deprecated
51
def _inventory_add_lines(self, inv_vf, revid, parents, lines):
52
inv_vf.add_lines_with_ghosts(revid, parents, lines)
113
# These attributes are inherited from the Repository base class. Setting
114
# them to None ensures that if the constructor is changed to not initialize
115
# them, or a subclass fails to call the constructor, that an error will
116
# occur rather than the system working but generating incorrect data.
117
_commit_builder_class = None
120
def __init__(self, _format, a_bzrdir, control_files, _commit_builder_class,
122
super(KnitRepository, self).__init__(_format, a_bzrdir, control_files)
123
self._commit_builder_class = _commit_builder_class
124
self._serializer = _serializer
125
self._reconcile_fixes_text_parents = True
55
128
def _all_revision_ids(self):
56
129
"""See Repository.all_revision_ids()."""
57
# Knits get the revision graph from the index of the revision knit, so
58
# it's always possible even if they're on an unlistable transport.
59
return self._revision_store.all_revision_ids(self.get_transaction())
61
def fileid_involved_between_revs(self, from_revid, to_revid):
62
"""Find file_id(s) which are involved in the changes between revisions.
64
This determines the set of revisions which are involved, and then
65
finds all file ids affected by those revisions.
67
from_revid = osutils.safe_revision_id(from_revid)
68
to_revid = osutils.safe_revision_id(to_revid)
69
vf = self._get_revision_vf()
70
from_set = set(vf.get_ancestry(from_revid))
71
to_set = set(vf.get_ancestry(to_revid))
72
changed = to_set.difference(from_set)
73
return self._fileid_involved_by_set(changed)
75
def fileid_involved(self, last_revid=None):
76
"""Find all file_ids modified in the ancestry of last_revid.
78
:param last_revid: If None, last_revision() will be used.
81
changed = set(self.all_revision_ids())
83
changed = set(self.get_ancestry(last_revid))
86
return self._fileid_involved_by_set(changed)
89
def get_ancestry(self, revision_id):
90
"""Return a list of revision-ids integrated by a revision.
92
This is topologically sorted.
94
if revision_id is None:
96
revision_id = osutils.safe_revision_id(revision_id)
97
vf = self._get_revision_vf()
99
return [None] + vf.get_ancestry(revision_id)
100
except errors.RevisionNotPresent:
101
raise errors.NoSuchRevision(self, revision_id)
130
return [key[0] for key in self.revisions.keys()]
132
def _activate_new_inventory(self):
133
"""Put a replacement inventory.new into use as inventories."""
134
# Copy the content across
136
t.copy('inventory.new.kndx', 'inventory.kndx')
138
t.copy('inventory.new.knit', 'inventory.knit')
139
except errors.NoSuchFile:
140
# empty inventories knit
141
t.delete('inventory.knit')
142
# delete the temp inventory
143
t.delete('inventory.new.kndx')
145
t.delete('inventory.new.knit')
146
except errors.NoSuchFile:
147
# empty inventories knit
149
# Force index reload (sanity check)
150
self.inventories._index._reset_cache()
151
self.inventories.keys()
153
def _backup_inventory(self):
155
t.copy('inventory.kndx', 'inventory.backup.kndx')
156
t.copy('inventory.knit', 'inventory.backup.knit')
158
def _move_file_id(self, from_id, to_id):
159
t = self._transport.clone('knits')
160
from_rel_url = self.texts._index._mapper.map((from_id, None))
161
to_rel_url = self.texts._index._mapper.map((to_id, None))
162
# We expect both files to always exist in this case.
163
for suffix in ('.knit', '.kndx'):
164
t.rename(from_rel_url + suffix, to_rel_url + suffix)
166
def _remove_file_id(self, file_id):
167
t = self._transport.clone('knits')
168
rel_url = self.texts._index._mapper.map((file_id, None))
169
for suffix in ('.kndx', '.knit'):
171
t.delete(rel_url + suffix)
172
except errors.NoSuchFile:
175
def _temp_inventories(self):
176
result = self._format._get_inventories(self._transport, self,
178
# Reconciling when the output has no revisions would result in no
179
# writes - but we want to ensure there is an inventory for
180
# compatibility with older clients that don't lazy-load.
181
result.get_parent_map([('A',)])
104
185
def get_revision(self, revision_id):
106
187
revision_id = osutils.safe_revision_id(revision_id)
107
188
return self.get_revision_reconcile(revision_id)
110
def get_revision_graph(self, revision_id=None):
111
"""Return a dictionary containing the revision graph.
113
:param revision_id: The revision_id to get a graph from. If None, then
114
the entire revision graph is returned. This is a deprecated mode of
115
operation and will be removed in the future.
116
:return: a dictionary of revision_id->revision_parents_list.
118
# special case NULL_REVISION
119
if revision_id == _mod_revision.NULL_REVISION:
121
revision_id = osutils.safe_revision_id(revision_id)
122
a_weave = self._get_revision_vf()
123
entire_graph = a_weave.get_graph()
124
if revision_id is None:
125
return a_weave.get_graph()
126
elif revision_id not in a_weave:
127
raise errors.NoSuchRevision(self, revision_id)
129
# add what can be reached from revision_id
131
pending = set([revision_id])
132
while len(pending) > 0:
134
result[node] = a_weave.get_parents(node)
135
for revision_id in result[node]:
136
if revision_id not in result:
137
pending.add(revision_id)
141
def get_revision_graph_with_ghosts(self, revision_ids=None):
142
"""Return a graph of the revisions with ghosts marked as applicable.
144
:param revision_ids: an iterable of revisions to graph or None for all.
145
:return: a Graph object with the graph reachable from revision_ids.
147
result = graph.Graph()
148
vf = self._get_revision_vf()
149
versions = set(vf.versions())
151
pending = set(self.all_revision_ids())
154
pending = set(osutils.safe_revision_id(r) for r in revision_ids)
155
# special case NULL_REVISION
156
if _mod_revision.NULL_REVISION in pending:
157
pending.remove(_mod_revision.NULL_REVISION)
158
required = set(pending)
161
revision_id = pending.pop()
162
if not revision_id in versions:
163
if revision_id in required:
164
raise errors.NoSuchRevision(self, revision_id)
166
result.add_ghost(revision_id)
167
# mark it as done so we don't try for it again.
168
done.add(revision_id)
170
parent_ids = vf.get_parents_with_ghosts(revision_id)
171
for parent_id in parent_ids:
172
# is this queued or done ?
173
if (parent_id not in pending and
174
parent_id not in done):
176
pending.add(parent_id)
177
result.add_node(revision_id, parent_ids)
178
done.add(revision_id)
181
def _get_revision_vf(self):
182
""":return: a versioned file containing the revisions."""
183
vf = self._revision_store.get_revision_file(self.get_transaction())
186
def _get_history_vf(self):
187
"""Get a versionedfile whose history graph reflects all revisions.
189
For knit repositories, this is the revision knit.
191
return self._get_revision_vf()
190
def _refresh_data(self):
191
if not self.is_locked():
193
if self.is_in_write_group():
194
raise IsInWriteGroupError(self)
195
# Create a new transaction to force all knits to see the scope change.
196
# This is safe because we're outside a write group.
197
self.control_files._finish_transaction()
198
if self.is_write_locked():
199
self.control_files._set_write_transaction()
201
self.control_files._set_read_transaction()
193
203
@needs_write_lock
194
204
def reconcile(self, other=None, thorough=False):
197
207
reconciler = KnitReconciler(self, thorough=thorough)
198
208
reconciler.reconcile()
199
209
return reconciler
201
def revision_parents(self, revision_id):
202
revision_id = osutils.safe_revision_id(revision_id)
203
return self._get_revision_vf().get_parents(revision_id)
206
class KnitRepository2(KnitRepository):
208
def __init__(self, _format, a_bzrdir, control_files, _revision_store,
209
control_store, text_store):
210
KnitRepository.__init__(self, _format, a_bzrdir, control_files,
211
_revision_store, control_store, text_store)
212
self._serializer = xml6.serializer_v6
214
def deserialise_inventory(self, revision_id, xml):
215
"""Transform the xml into an inventory object.
217
:param revision_id: The expected revision id of the inventory.
218
:param xml: A serialised inventory.
220
result = self._serializer.read_inventory_from_string(xml)
221
assert result.root.revision is not None
224
def serialise_inventory(self, inv):
225
"""Transform the inventory object into XML text.
227
:param revision_id: The expected revision id of the inventory.
228
:param xml: A serialised inventory.
230
assert inv.revision_id is not None
231
assert inv.root.revision is not None
232
return KnitRepository.serialise_inventory(self, inv)
234
def get_commit_builder(self, branch, parents, config, timestamp=None,
235
timezone=None, committer=None, revprops=None,
237
"""Obtain a CommitBuilder for this repository.
239
:param branch: Branch to commit to.
240
:param parents: Revision ids of the parents of the new revision.
241
:param config: Configuration to use.
242
:param timestamp: Optional timestamp recorded for commit.
243
:param timezone: Optional timezone for timestamp.
244
:param committer: Optional committer to set for commit.
245
:param revprops: Optional dictionary of revision properties.
246
:param revision_id: Optional revision id.
248
revision_id = osutils.safe_revision_id(revision_id)
249
return RootCommitBuilder(self, parents, config, timestamp, timezone,
250
committer, revprops, revision_id)
253
class RepositoryFormatKnit(MetaDirRepositoryFormat):
254
"""Bzr repository knit format (generalized).
211
def _make_parents_provider(self):
212
return _KnitsParentsProvider(self.revisions)
215
class RepositoryFormatKnit(MetaDirVersionedFileRepositoryFormat):
216
"""Bzr repository knit format (generalized).
256
218
This repository format has:
257
219
- knits for file texts and inventory
267
def _get_control_store(self, repo_transport, control_files):
268
"""Return the control store for this repository."""
269
return VersionedFileStore(
272
file_mode=control_files._file_mode,
273
versionedfile_class=knit.KnitVersionedFile,
274
versionedfile_kwargs={'factory':knit.KnitPlainFactory()},
277
def _get_revision_store(self, repo_transport, control_files):
278
"""See RepositoryFormat._get_revision_store()."""
279
from bzrlib.store.revision.knit import KnitRevisionStore
280
versioned_file_store = VersionedFileStore(
282
file_mode=control_files._file_mode,
285
versionedfile_class=knit.KnitVersionedFile,
286
versionedfile_kwargs={'delta':False,
287
'factory':knit.KnitPlainFactory(),
291
return KnitRevisionStore(versioned_file_store)
293
def _get_text_store(self, transport, control_files):
294
"""See RepositoryFormat._get_text_store()."""
295
return self._get_versioned_file_store('knits',
298
versionedfile_class=knit.KnitVersionedFile,
299
versionedfile_kwargs={
300
'create_parent_dir':True,
302
'dir_mode':control_files._dir_mode,
229
# Set this attribute in derived classes to control the repository class
230
# created by open and initialize.
231
repository_class = None
232
# Set this attribute in derived classes to control the
233
# _commit_builder_class that the repository objects will have passed to
235
_commit_builder_class = None
236
# Set this attribute in derived clases to control the _serializer that the
237
# repository objects will have passed to their constructor.
239
def _serializer(self):
240
return xml5.serializer_v5
241
# Knit based repositories handle ghosts reasonably well.
242
supports_ghosts = True
243
# External lookups are not supported in this format.
244
supports_external_lookups = False
246
supports_chks = False
247
_fetch_order = 'topological'
248
_fetch_uses_deltas = True
250
supports_funky_characters = True
251
# The revision.kndx could potentially claim a revision has a different
252
# parent to the revision text.
253
revision_graph_can_have_wrong_parents = True
255
def _get_inventories(self, repo_transport, repo, name='inventory'):
256
mapper = versionedfile.ConstantMapper(name)
257
index = _mod_knit._KndxIndex(repo_transport, mapper,
258
repo.get_transaction, repo.is_write_locked, repo.is_locked)
259
access = _mod_knit._KnitKeyAccess(repo_transport, mapper)
260
return _mod_knit.KnitVersionedFiles(index, access, annotated=False)
262
def _get_revisions(self, repo_transport, repo):
263
mapper = versionedfile.ConstantMapper('revisions')
264
index = _mod_knit._KndxIndex(repo_transport, mapper,
265
repo.get_transaction, repo.is_write_locked, repo.is_locked)
266
access = _mod_knit._KnitKeyAccess(repo_transport, mapper)
267
return _mod_knit.KnitVersionedFiles(index, access, max_delta_chain=0,
270
def _get_signatures(self, repo_transport, repo):
271
mapper = versionedfile.ConstantMapper('signatures')
272
index = _mod_knit._KndxIndex(repo_transport, mapper,
273
repo.get_transaction, repo.is_write_locked, repo.is_locked)
274
access = _mod_knit._KnitKeyAccess(repo_transport, mapper)
275
return _mod_knit.KnitVersionedFiles(index, access, max_delta_chain=0,
278
def _get_texts(self, repo_transport, repo):
279
mapper = versionedfile.HashEscapedPrefixMapper()
280
base_transport = repo_transport.clone('knits')
281
index = _mod_knit._KndxIndex(base_transport, mapper,
282
repo.get_transaction, repo.is_write_locked, repo.is_locked)
283
access = _mod_knit._KnitKeyAccess(base_transport, mapper)
284
return _mod_knit.KnitVersionedFiles(index, access, max_delta_chain=200,
306
287
def initialize(self, a_bzrdir, shared=False):
307
288
"""Create a knit format 1 repository.
311
292
:param shared: If true the repository will be initialized as a shared
314
mutter('creating repository in %s.', a_bzrdir.transport.base)
315
dirs = ['revision-store', 'knits']
295
trace.mutter('creating repository in %s.', a_bzrdir.transport.base)
317
298
utf8_files = [('format', self.get_format_string())]
319
300
self._upload_blank_content(a_bzrdir, dirs, files, utf8_files, shared)
320
301
repo_transport = a_bzrdir.get_repository_transport(None)
321
302
control_files = lockable_files.LockableFiles(repo_transport,
322
303
'lock', lockdir.LockDir)
323
control_store = self._get_control_store(repo_transport, control_files)
324
304
transaction = transactions.WriteTransaction()
325
# trigger a write of the inventory store.
326
control_store.get_weave_or_empty('inventory', transaction)
327
_revision_store = self._get_revision_store(repo_transport, control_files)
305
result = self.open(a_bzrdir=a_bzrdir, _found=True)
328
307
# the revision id here is irrelevant: it will not be stored, and cannot
330
_revision_store.has_revision_id('A', transaction)
331
_revision_store.get_signature_file(transaction)
332
return self.open(a_bzrdir=a_bzrdir, _found=True)
308
# already exist, we do this to create files on disk for older clients.
309
result.inventories.get_parent_map([('A',)])
310
result.revisions.get_parent_map([('A',)])
311
result.signatures.get_parent_map([('A',)])
313
self._run_post_repo_init_hooks(result, a_bzrdir, shared)
334
316
def open(self, a_bzrdir, _found=False, _override_transport=None):
335
317
"""See RepositoryFormat.open().
337
319
:param _override_transport: INTERNAL USE ONLY. Allows opening the
338
320
repository at a slightly different url
339
321
than normal. I.e. during 'upgrade'.
342
324
format = RepositoryFormat.find_format(a_bzrdir)
343
assert format.__class__ == self.__class__
344
325
if _override_transport is not None:
345
326
repo_transport = _override_transport
347
328
repo_transport = a_bzrdir.get_repository_transport(None)
348
329
control_files = lockable_files.LockableFiles(repo_transport,
349
330
'lock', lockdir.LockDir)
350
text_store = self._get_text_store(repo_transport, control_files)
351
control_store = self._get_control_store(repo_transport, control_files)
352
_revision_store = self._get_revision_store(repo_transport, control_files)
353
return KnitRepository(_format=self,
331
repo = self.repository_class(_format=self,
354
332
a_bzrdir=a_bzrdir,
355
333
control_files=control_files,
356
_revision_store=_revision_store,
357
control_store=control_store,
358
text_store=text_store)
334
_commit_builder_class=self._commit_builder_class,
335
_serializer=self._serializer)
336
repo.revisions = self._get_revisions(repo_transport, repo)
337
repo.signatures = self._get_signatures(repo_transport, repo)
338
repo.inventories = self._get_inventories(repo_transport, repo)
339
repo.texts = self._get_texts(repo_transport, repo)
340
repo.chk_bytes = None
341
repo._transport = repo_transport
361
345
class RepositoryFormatKnit1(RepositoryFormatKnit):
399
388
- an optional 'shared-storage' flag
400
389
- an optional 'no-working-trees' flag
402
- Support for recording full info about the tree root
406
rich_root_data = True
408
def get_format_string(self):
409
"""See RepositoryFormat.get_format_string()."""
410
return "Bazaar Knit Repository Format 2\n"
412
def get_format_description(self):
413
"""See RepositoryFormat.get_format_description()."""
414
return "Knit repository format 2"
416
def check_conversion_target(self, target_format):
417
if not target_format.rich_root_data:
418
raise errors.BadConversionTarget(
419
'Does not support rich root data.', target_format)
421
def open(self, a_bzrdir, _found=False, _override_transport=None):
422
"""See RepositoryFormat.open().
424
:param _override_transport: INTERNAL USE ONLY. Allows opening the
425
repository at a slightly different url
426
than normal. I.e. during 'upgrade'.
391
- support for recording full info about the tree root
392
- support for recording tree-references
395
repository_class = KnitRepository
396
_commit_builder_class = VersionedFileRootCommitBuilder
397
rich_root_data = True
399
supports_tree_reference = True
401
def _serializer(self):
402
return xml7.serializer_v7
404
def _get_matching_bzrdir(self):
405
return bzrdir.format_registry.make_bzrdir('dirstate-with-subtree')
407
def _ignore_setting_bzrdir(self, format):
410
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
412
def get_format_string(self):
413
"""See RepositoryFormat.get_format_string()."""
414
return "Bazaar Knit Repository Format 3 (bzr 0.15)\n"
416
def get_format_description(self):
417
"""See RepositoryFormat.get_format_description()."""
418
return "Knit repository format 3"
421
class RepositoryFormatKnit4(RepositoryFormatKnit):
422
"""Bzr repository knit format 4.
424
This repository format has everything in format 3, except for
426
- knits for file texts and inventory
427
- hash subdirectory based stores.
428
- knits for revisions and signatures
429
- TextStores for revisions and signatures.
430
- a format marker of its own
431
- an optional 'shared-storage' flag
432
- an optional 'no-working-trees' flag
434
- support for recording full info about the tree root
437
repository_class = KnitRepository
438
_commit_builder_class = VersionedFileRootCommitBuilder
439
rich_root_data = True
440
supports_tree_reference = False
442
def _serializer(self):
443
return xml6.serializer_v6
445
def _get_matching_bzrdir(self):
446
return bzrdir.format_registry.make_bzrdir('rich-root')
448
def _ignore_setting_bzrdir(self, format):
451
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
453
def get_format_string(self):
454
"""See RepositoryFormat.get_format_string()."""
455
return 'Bazaar Knit Repository Format 4 (bzr 1.0)\n'
457
def get_format_description(self):
458
"""See RepositoryFormat.get_format_description()."""
459
return "Knit repository format 4"
462
class InterKnitRepo(InterSameDataRepository):
463
"""Optimised code paths between Knit based repositories."""
466
def _get_repo_format_to_test(self):
467
return RepositoryFormatKnit1()
470
def is_compatible(source, target):
471
"""Be compatible with known Knit formats.
473
We don't test for the stores being of specific types because that
474
could lead to confusing results, and there is no need to be
429
format = RepositoryFormat.find_format(a_bzrdir)
430
assert format.__class__ == self.__class__
431
if _override_transport is not None:
432
repo_transport = _override_transport
478
are_knits = (isinstance(source._format, RepositoryFormatKnit) and
479
isinstance(target._format, RepositoryFormatKnit))
480
except AttributeError:
482
return are_knits and InterRepository._same_model(source, target)
485
def search_missing_revision_ids(self,
486
revision_id=symbol_versioning.DEPRECATED_PARAMETER,
487
find_ghosts=True, revision_ids=None, if_present_ids=None,
489
"""See InterRepository.search_missing_revision_ids()."""
490
if symbol_versioning.deprecated_passed(revision_id):
491
symbol_versioning.warn(
492
'search_missing_revision_ids(revision_id=...) was '
493
'deprecated in 2.4. Use revision_ids=[...] instead.',
494
DeprecationWarning, stacklevel=2)
495
if revision_ids is not None:
496
raise AssertionError(
497
'revision_ids is mutually exclusive with revision_id')
498
if revision_id is not None:
499
revision_ids = [revision_id]
501
source_ids_set = self._present_source_revisions_for(
502
revision_ids, if_present_ids)
503
# source_ids is the worst possible case we may need to pull.
504
# now we want to filter source_ids against what we actually
505
# have in target, but don't try to check for existence where we know
506
# we do not have a revision as that would be pointless.
507
target_ids = set(self.target.all_revision_ids())
508
possibly_present_revisions = target_ids.intersection(source_ids_set)
509
actually_present_revisions = set(
510
self.target._eliminate_revisions_not_present(possibly_present_revisions))
511
required_revisions = source_ids_set.difference(actually_present_revisions)
512
if revision_ids is not None:
513
# we used get_ancestry to determine source_ids then we are assured all
514
# revisions referenced are present as they are installed in topological order.
515
# and the tip revision was validated by get_ancestry.
516
result_set = required_revisions
434
repo_transport = a_bzrdir.get_repository_transport(None)
435
control_files = lockable_files.LockableFiles(repo_transport, 'lock',
437
text_store = self._get_text_store(repo_transport, control_files)
438
control_store = self._get_control_store(repo_transport, control_files)
439
_revision_store = self._get_revision_store(repo_transport, control_files)
440
return KnitRepository2(_format=self,
442
control_files=control_files,
443
_revision_store=_revision_store,
444
control_store=control_store,
445
text_store=text_store)
518
# if we just grabbed the possibly available ids, then
519
# we only have an estimate of whats available and need to validate
520
# that against the revision records.
522
self.source._eliminate_revisions_not_present(required_revisions))
523
if limit is not None:
524
topo_ordered = self.source.get_graph().iter_topo_order(result_set)
525
result_set = set(itertools.islice(topo_ordered, limit))
526
return self.source.revision_ids_to_search_result(result_set)
529
InterRepository.register_optimiser(InterKnitRepo)