1
# Copyright (C) 2007-2011 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
"""Deprecated weave-based repository formats.
19
Weave based formats scaled linearly with history size and could not represent
25
from cStringIO import StringIO
28
from bzrlib.lazy_import import lazy_import
29
lazy_import(globals(), """
51
from bzrlib.decorators import needs_read_lock, needs_write_lock
52
from bzrlib.repository import (
55
InterSameDataRepository,
56
MetaDirVersionedFileRepository,
57
MetaDirRepositoryFormat,
61
from bzrlib.store.text import TextStore
62
from bzrlib.versionedfile import (
64
FulltextContentFactory,
69
class AllInOneRepository(Repository):
70
"""Legacy support - the repository behaviour for all-in-one branches."""
73
def _serializer(self):
74
return xml5.serializer_v5
76
def _escape(self, file_or_path):
77
if not isinstance(file_or_path, basestring):
78
file_or_path = '/'.join(file_or_path)
79
if file_or_path == '':
81
return urlutils.escape(osutils.safe_unicode(file_or_path))
83
def __init__(self, _format, a_bzrdir):
84
# we reuse one control files instance.
85
dir_mode = a_bzrdir._get_dir_mode()
86
file_mode = a_bzrdir._get_file_mode()
88
def get_store(name, compressed=True, prefixed=False):
89
# FIXME: This approach of assuming stores are all entirely compressed
90
# or entirely uncompressed is tidy, but breaks upgrade from
91
# some existing branches where there's a mixture; we probably
92
# still want the option to look for both.
93
relpath = self._escape(name)
94
store = TextStore(a_bzrdir.transport.clone(relpath),
95
prefixed=prefixed, compressed=compressed,
100
# not broken out yet because the controlweaves|inventory_store
101
# and texts bits are still different.
102
if isinstance(_format, RepositoryFormat4):
103
# cannot remove these - there is still no consistent api
104
# which allows access to this old info.
105
self.inventory_store = get_store('inventory-store')
106
self._text_store = get_store('text-store')
107
super(AllInOneRepository, self).__init__(_format, a_bzrdir, a_bzrdir._control_files)
110
def _all_possible_ids(self):
111
"""Return all the possible revisions that we could find."""
112
if 'evil' in debug.debug_flags:
113
trace.mutter_callsite(
114
3, "_all_possible_ids scales with size of history.")
115
return [key[-1] for key in self.inventories.keys()]
118
def _all_revision_ids(self):
119
"""Returns a list of all the revision ids in the repository.
121
These are in as much topological order as the underlying store can
122
present: for weaves ghosts may lead to a lack of correctness until
123
the reweave updates the parents list.
125
return [key[-1] for key in self.revisions.keys()]
127
def _activate_new_inventory(self):
128
"""Put a replacement inventory.new into use as inventories."""
129
# Copy the content across
130
t = self.bzrdir._control_files._transport
131
t.copy('inventory.new.weave', 'inventory.weave')
132
# delete the temp inventory
133
t.delete('inventory.new.weave')
134
# Check we can parse the new weave properly as a sanity check
135
self.inventories.keys()
137
def _backup_inventory(self):
138
t = self.bzrdir._control_files._transport
139
t.copy('inventory.weave', 'inventory.backup.weave')
141
def _temp_inventories(self):
142
t = self.bzrdir._control_files._transport
143
return self._format._get_inventories(t, self, 'inventory.new')
145
def get_commit_builder(self, branch, parents, config, timestamp=None,
146
timezone=None, committer=None, revprops=None,
148
self._check_ascii_revisionid(revision_id, self.get_commit_builder)
149
result = CommitBuilder(self, parents, config, timestamp, timezone,
150
committer, revprops, revision_id)
151
self.start_write_group()
155
def get_revisions(self, revision_ids):
156
revs = self._get_revisions(revision_ids)
159
def _inventory_add_lines(self, revision_id, parents, lines,
161
"""Store lines in inv_vf and return the sha1 of the inventory."""
162
present_parents = self.get_graph().get_parent_map(parents)
164
for parent in parents:
165
if parent in present_parents:
166
final_parents.append((parent,))
167
return self.inventories.add_lines((revision_id,), final_parents, lines,
168
check_content=check_content)[0]
171
"""AllInOne repositories cannot be shared."""
175
def set_make_working_trees(self, new_value):
176
"""Set the policy flag for making working trees when creating branches.
178
This only applies to branches that use this repository.
180
The default is 'True'.
181
:param new_value: True to restore the default, False to disable making
184
raise errors.RepositoryUpgradeRequired(self.user_url)
186
def make_working_trees(self):
187
"""Returns the policy for making working trees on new branches."""
190
def revision_graph_can_have_wrong_parents(self):
191
# XXX: This is an old format that we don't support full checking on, so
192
# just claim that checking for this inconsistency is not required.
196
class WeaveMetaDirRepository(MetaDirVersionedFileRepository):
197
"""A subclass of MetaDirRepository to set weave specific policy."""
199
def __init__(self, _format, a_bzrdir, control_files):
200
super(WeaveMetaDirRepository, self).__init__(_format, a_bzrdir, control_files)
201
self._serializer = _format._serializer
204
def _all_possible_ids(self):
205
"""Return all the possible revisions that we could find."""
206
if 'evil' in debug.debug_flags:
207
trace.mutter_callsite(
208
3, "_all_possible_ids scales with size of history.")
209
return [key[-1] for key in self.inventories.keys()]
212
def _all_revision_ids(self):
213
"""Returns a list of all the revision ids in the repository.
215
These are in as much topological order as the underlying store can
216
present: for weaves ghosts may lead to a lack of correctness until
217
the reweave updates the parents list.
219
return [key[-1] for key in self.revisions.keys()]
221
def _activate_new_inventory(self):
222
"""Put a replacement inventory.new into use as inventories."""
223
# Copy the content across
225
t.copy('inventory.new.weave', 'inventory.weave')
226
# delete the temp inventory
227
t.delete('inventory.new.weave')
228
# Check we can parse the new weave properly as a sanity check
229
self.inventories.keys()
231
def _backup_inventory(self):
233
t.copy('inventory.weave', 'inventory.backup.weave')
235
def _temp_inventories(self):
237
return self._format._get_inventories(t, self, 'inventory.new')
239
def get_commit_builder(self, branch, parents, config, timestamp=None,
240
timezone=None, committer=None, revprops=None,
242
self._check_ascii_revisionid(revision_id, self.get_commit_builder)
243
result = CommitBuilder(self, parents, config, timestamp, timezone,
244
committer, revprops, revision_id)
245
self.start_write_group()
249
def get_revision(self, revision_id):
250
"""Return the Revision object for a named revision"""
251
r = self.get_revision_reconcile(revision_id)
254
def _inventory_add_lines(self, revision_id, parents, lines,
256
"""Store lines in inv_vf and return the sha1 of the inventory."""
257
present_parents = self.get_graph().get_parent_map(parents)
259
for parent in parents:
260
if parent in present_parents:
261
final_parents.append((parent,))
262
return self.inventories.add_lines((revision_id,), final_parents, lines,
263
check_content=check_content)[0]
265
def revision_graph_can_have_wrong_parents(self):
269
class PreSplitOutRepositoryFormat(RepositoryFormat):
270
"""Base class for the pre split out repository formats."""
272
rich_root_data = False
273
supports_tree_reference = False
274
supports_ghosts = False
275
supports_external_lookups = False
276
supports_chks = False
277
_fetch_order = 'topological'
278
_fetch_reconcile = True
280
supports_leaving_lock = False
282
def initialize(self, a_bzrdir, shared=False, _internal=False):
283
"""Create a weave repository."""
285
raise errors.IncompatibleFormat(self, a_bzrdir._format)
288
# always initialized when the bzrdir is.
289
return self.open(a_bzrdir, _found=True)
291
# Create an empty weave
293
weavefile.write_weave_v5(weave.Weave(), sio)
294
empty_weave = sio.getvalue()
296
trace.mutter('creating repository in %s.', a_bzrdir.transport.base)
298
# FIXME: RBC 20060125 don't peek under the covers
299
# NB: no need to escape relative paths that are url safe.
300
control_files = lockable_files.LockableFiles(a_bzrdir.transport,
301
'branch-lock', lockable_files.TransportLock)
302
control_files.create_lock()
303
control_files.lock_write()
304
transport = a_bzrdir.transport
306
transport.mkdir_multi(['revision-store', 'weaves'],
307
mode=a_bzrdir._get_dir_mode())
308
transport.put_bytes_non_atomic('inventory.weave', empty_weave,
309
mode=a_bzrdir._get_file_mode())
311
control_files.unlock()
312
repository = self.open(a_bzrdir, _found=True)
313
self._run_post_repo_init_hooks(repository, a_bzrdir, shared)
316
def open(self, a_bzrdir, _found=False):
317
"""See RepositoryFormat.open()."""
319
# we are being called directly and must probe.
320
raise NotImplementedError
322
repo_transport = a_bzrdir.get_repository_transport(None)
323
control_files = a_bzrdir._control_files
324
result = AllInOneRepository(_format=self, a_bzrdir=a_bzrdir)
325
result.revisions = self._get_revisions(repo_transport, result)
326
result.signatures = self._get_signatures(repo_transport, result)
327
result.inventories = self._get_inventories(repo_transport, result)
328
result.texts = self._get_texts(repo_transport, result)
329
result.chk_bytes = None
333
class RepositoryFormat4(PreSplitOutRepositoryFormat):
334
"""Bzr repository format 4.
336
This repository format has:
338
- TextStores for texts, inventories,revisions.
340
This format is deprecated: it indexes texts using a text id which is
341
removed in format 5; initialization and write support for this format
345
supports_funky_characters = False
347
_matchingbzrdir = bzrdir.BzrDirFormat4()
349
def get_format_description(self):
350
"""See RepositoryFormat.get_format_description()."""
351
return "Repository format 4"
353
def initialize(self, url, shared=False, _internal=False):
354
"""Format 4 branches cannot be created."""
355
raise errors.UninitializableFormat(self)
357
def is_supported(self):
358
"""Format 4 is not supported.
360
It is not supported because the model changed from 4 to 5 and the
361
conversion logic is expensive - so doing it on the fly was not
366
def _get_inventories(self, repo_transport, repo, name='inventory'):
367
# No inventories store written so far.
370
def _get_revisions(self, repo_transport, repo):
371
from bzrlib.xml4 import serializer_v4
372
return RevisionTextStore(repo_transport.clone('revision-store'),
373
serializer_v4, True, versionedfile.PrefixMapper(),
374
repo.is_locked, repo.is_write_locked)
376
def _get_signatures(self, repo_transport, repo):
377
return SignatureTextStore(repo_transport.clone('revision-store'),
378
False, versionedfile.PrefixMapper(),
379
repo.is_locked, repo.is_write_locked)
381
def _get_texts(self, repo_transport, repo):
385
class RepositoryFormat5(PreSplitOutRepositoryFormat):
386
"""Bzr control format 5.
388
This repository format has:
389
- weaves for file texts and inventory
391
- TextStores for revisions and signatures.
394
_versionedfile_class = weave.WeaveFile
395
_matchingbzrdir = bzrdir.BzrDirFormat5()
396
supports_funky_characters = False
399
def _serializer(self):
400
return xml5.serializer_v5
402
def get_format_description(self):
403
"""See RepositoryFormat.get_format_description()."""
404
return "Weave repository format 5"
406
def network_name(self):
407
"""The network name for this format is the control dirs disk label."""
408
return self._matchingbzrdir.get_format_string()
410
def _get_inventories(self, repo_transport, repo, name='inventory'):
411
mapper = versionedfile.ConstantMapper(name)
412
return versionedfile.ThunkedVersionedFiles(repo_transport,
413
weave.WeaveFile, mapper, repo.is_locked)
415
def _get_revisions(self, repo_transport, repo):
416
return RevisionTextStore(repo_transport.clone('revision-store'),
417
xml5.serializer_v5, False, versionedfile.PrefixMapper(),
418
repo.is_locked, repo.is_write_locked)
420
def _get_signatures(self, repo_transport, repo):
421
return SignatureTextStore(repo_transport.clone('revision-store'),
422
False, versionedfile.PrefixMapper(),
423
repo.is_locked, repo.is_write_locked)
425
def _get_texts(self, repo_transport, repo):
426
mapper = versionedfile.PrefixMapper()
427
base_transport = repo_transport.clone('weaves')
428
return versionedfile.ThunkedVersionedFiles(base_transport,
429
weave.WeaveFile, mapper, repo.is_locked)
432
class RepositoryFormat6(PreSplitOutRepositoryFormat):
433
"""Bzr control format 6.
435
This repository format has:
436
- weaves for file texts and inventory
437
- hash subdirectory based stores.
438
- TextStores for revisions and signatures.
441
_versionedfile_class = weave.WeaveFile
442
_matchingbzrdir = bzrdir.BzrDirFormat6()
443
supports_funky_characters = False
445
def _serializer(self):
446
return xml5.serializer_v5
448
def get_format_description(self):
449
"""See RepositoryFormat.get_format_description()."""
450
return "Weave repository format 6"
452
def network_name(self):
453
"""The network name for this format is the control dirs disk label."""
454
return self._matchingbzrdir.get_format_string()
456
def _get_inventories(self, repo_transport, repo, name='inventory'):
457
mapper = versionedfile.ConstantMapper(name)
458
return versionedfile.ThunkedVersionedFiles(repo_transport,
459
weave.WeaveFile, mapper, repo.is_locked)
461
def _get_revisions(self, repo_transport, repo):
462
return RevisionTextStore(repo_transport.clone('revision-store'),
463
xml5.serializer_v5, False, versionedfile.HashPrefixMapper(),
464
repo.is_locked, repo.is_write_locked)
466
def _get_signatures(self, repo_transport, repo):
467
return SignatureTextStore(repo_transport.clone('revision-store'),
468
False, versionedfile.HashPrefixMapper(),
469
repo.is_locked, repo.is_write_locked)
471
def _get_texts(self, repo_transport, repo):
472
mapper = versionedfile.HashPrefixMapper()
473
base_transport = repo_transport.clone('weaves')
474
return versionedfile.ThunkedVersionedFiles(base_transport,
475
weave.WeaveFile, mapper, repo.is_locked)
478
class RepositoryFormat7(MetaDirRepositoryFormat):
481
This repository format has:
482
- weaves for file texts and inventory
483
- hash subdirectory based stores.
484
- TextStores for revisions and signatures.
485
- a format marker of its own
486
- an optional 'shared-storage' flag
487
- an optional 'no-working-trees' flag
490
_versionedfile_class = weave.WeaveFile
491
supports_ghosts = False
492
supports_chks = False
493
supports_funky_characters = False
495
_fetch_order = 'topological'
496
_fetch_reconcile = True
499
def _serializer(self):
500
return xml5.serializer_v5
502
def get_format_string(self):
503
"""See RepositoryFormat.get_format_string()."""
504
return "Bazaar-NG Repository format 7"
506
def get_format_description(self):
507
"""See RepositoryFormat.get_format_description()."""
508
return "Weave repository format 7"
510
def _get_inventories(self, repo_transport, repo, name='inventory'):
511
mapper = versionedfile.ConstantMapper(name)
512
return versionedfile.ThunkedVersionedFiles(repo_transport,
513
weave.WeaveFile, mapper, repo.is_locked)
515
def _get_revisions(self, repo_transport, repo):
516
return RevisionTextStore(repo_transport.clone('revision-store'),
517
xml5.serializer_v5, True, versionedfile.HashPrefixMapper(),
518
repo.is_locked, repo.is_write_locked)
520
def _get_signatures(self, repo_transport, repo):
521
return SignatureTextStore(repo_transport.clone('revision-store'),
522
True, versionedfile.HashPrefixMapper(),
523
repo.is_locked, repo.is_write_locked)
525
def _get_texts(self, repo_transport, repo):
526
mapper = versionedfile.HashPrefixMapper()
527
base_transport = repo_transport.clone('weaves')
528
return versionedfile.ThunkedVersionedFiles(base_transport,
529
weave.WeaveFile, mapper, repo.is_locked)
531
def initialize(self, a_bzrdir, shared=False):
532
"""Create a weave repository.
534
:param shared: If true the repository will be initialized as a shared
537
# Create an empty weave
539
weavefile.write_weave_v5(weave.Weave(), sio)
540
empty_weave = sio.getvalue()
542
trace.mutter('creating repository in %s.', a_bzrdir.transport.base)
543
dirs = ['revision-store', 'weaves']
544
files = [('inventory.weave', StringIO(empty_weave)),
546
utf8_files = [('format', self.get_format_string())]
548
self._upload_blank_content(a_bzrdir, dirs, files, utf8_files, shared)
549
return self.open(a_bzrdir=a_bzrdir, _found=True)
551
def open(self, a_bzrdir, _found=False, _override_transport=None):
552
"""See RepositoryFormat.open().
554
:param _override_transport: INTERNAL USE ONLY. Allows opening the
555
repository at a slightly different url
556
than normal. I.e. during 'upgrade'.
559
format = RepositoryFormat.find_format(a_bzrdir)
560
if _override_transport is not None:
561
repo_transport = _override_transport
563
repo_transport = a_bzrdir.get_repository_transport(None)
564
control_files = lockable_files.LockableFiles(repo_transport,
565
'lock', lockdir.LockDir)
566
result = WeaveMetaDirRepository(_format=self, a_bzrdir=a_bzrdir,
567
control_files=control_files)
568
result.revisions = self._get_revisions(repo_transport, result)
569
result.signatures = self._get_signatures(repo_transport, result)
570
result.inventories = self._get_inventories(repo_transport, result)
571
result.texts = self._get_texts(repo_transport, result)
572
result.chk_bytes = None
573
result._transport = repo_transport
577
class TextVersionedFiles(VersionedFiles):
578
"""Just-a-bunch-of-files based VersionedFile stores."""
580
def __init__(self, transport, compressed, mapper, is_locked, can_write):
581
self._compressed = compressed
582
self._transport = transport
583
self._mapper = mapper
588
self._is_locked = is_locked
589
self._can_write = can_write
591
def add_lines(self, key, parents, lines):
592
"""Add a revision to the store."""
593
if not self._is_locked():
594
raise errors.ObjectNotLocked(self)
595
if not self._can_write():
596
raise errors.ReadOnlyError(self)
598
raise ValueError('bad idea to put / in %r' % (key,))
599
text = ''.join(lines)
601
text = tuned_gzip.bytes_to_gzip(text)
602
path = self._map(key)
603
self._transport.put_bytes_non_atomic(path, text, create_parent_dir=True)
605
def insert_record_stream(self, stream):
607
for record in stream:
608
# Raise an error when a record is missing.
609
if record.storage_kind == 'absent':
610
raise errors.RevisionNotPresent([record.key[0]], self)
611
# adapt to non-tuple interface
612
if record.storage_kind == 'fulltext':
613
self.add_lines(record.key, None,
614
osutils.split_lines(record.get_bytes_as('fulltext')))
616
adapter_key = record.storage_kind, 'fulltext'
618
adapter = adapters[adapter_key]
620
adapter_factory = adapter_registry.get(adapter_key)
621
adapter = adapter_factory(self)
622
adapters[adapter_key] = adapter
623
lines = osutils.split_lines(adapter.get_bytes(
624
record, record.get_bytes_as(record.storage_kind)))
626
self.add_lines(record.key, None, lines)
627
except RevisionAlreadyPresent:
630
def _load_text(self, key):
631
if not self._is_locked():
632
raise errors.ObjectNotLocked(self)
633
path = self._map(key)
635
text = self._transport.get_bytes(path)
636
compressed = self._compressed
637
except errors.NoSuchFile:
639
# try without the .gz
642
text = self._transport.get_bytes(path)
644
except errors.NoSuchFile:
649
text = gzip.GzipFile(mode='rb', fileobj=StringIO(text)).read()
653
return self._mapper.map(key) + self._ext
656
class RevisionTextStore(TextVersionedFiles):
657
"""Legacy thunk for format 4 repositories."""
659
def __init__(self, transport, serializer, compressed, mapper, is_locked,
661
"""Create a RevisionTextStore at transport with serializer."""
662
TextVersionedFiles.__init__(self, transport, compressed, mapper,
663
is_locked, can_write)
664
self._serializer = serializer
666
def _load_text_parents(self, key):
667
text = self._load_text(key)
670
parents = self._serializer.read_revision_from_string(text).parent_ids
671
return text, tuple((parent,) for parent in parents)
673
def get_parent_map(self, keys):
676
parents = self._load_text_parents(key)[1]
679
result[key] = parents
682
def get_known_graph_ancestry(self, keys):
683
"""Get a KnownGraph instance with the ancestry of keys."""
685
parent_map = self.get_parent_map(keys)
686
kg = _mod_graph.KnownGraph(parent_map)
689
def get_record_stream(self, keys, sort_order, include_delta_closure):
691
text, parents = self._load_text_parents(key)
693
yield AbsentContentFactory(key)
695
yield FulltextContentFactory(key, parents, None, text)
698
if not self._is_locked():
699
raise errors.ObjectNotLocked(self)
701
for quoted_relpath in self._transport.iter_files_recursive():
702
relpath = urllib.unquote(quoted_relpath)
703
path, ext = os.path.splitext(relpath)
706
if not relpath.endswith('.sig'):
707
relpaths.add(relpath)
708
paths = list(relpaths)
709
return set([self._mapper.unmap(path) for path in paths])
712
class SignatureTextStore(TextVersionedFiles):
713
"""Legacy thunk for format 4-7 repositories."""
715
def __init__(self, transport, compressed, mapper, is_locked, can_write):
716
TextVersionedFiles.__init__(self, transport, compressed, mapper,
717
is_locked, can_write)
718
self._ext = '.sig' + self._ext
720
def get_parent_map(self, keys):
723
text = self._load_text(key)
729
def get_record_stream(self, keys, sort_order, include_delta_closure):
731
text = self._load_text(key)
733
yield AbsentContentFactory(key)
735
yield FulltextContentFactory(key, None, None, text)
738
if not self._is_locked():
739
raise errors.ObjectNotLocked(self)
741
for quoted_relpath in self._transport.iter_files_recursive():
742
relpath = urllib.unquote(quoted_relpath)
743
path, ext = os.path.splitext(relpath)
746
if not relpath.endswith('.sig'):
748
relpaths.add(relpath[:-4])
749
paths = list(relpaths)
750
return set([self._mapper.unmap(path) for path in paths])
753
class InterWeaveRepo(InterSameDataRepository):
754
"""Optimised code paths between Weave based repositories.
758
def _get_repo_format_to_test(self):
759
return RepositoryFormat7()
762
def is_compatible(source, target):
763
"""Be compatible with known Weave formats.
765
We don't test for the stores being of specific types because that
766
could lead to confusing results, and there is no need to be
770
return (isinstance(source._format, (RepositoryFormat5,
772
RepositoryFormat7)) and
773
isinstance(target._format, (RepositoryFormat5,
776
except AttributeError:
780
def copy_content(self, revision_id=None):
781
"""See InterRepository.copy_content()."""
782
# weave specific optimised path:
784
self.target.set_make_working_trees(self.source.make_working_trees())
785
except (errors.RepositoryUpgradeRequired, NotImplemented):
788
if self.source._transport.listable():
789
pb = ui.ui_factory.nested_progress_bar()
791
self.target.texts.insert_record_stream(
792
self.source.texts.get_record_stream(
793
self.source.texts.keys(), 'topological', False))
794
pb.update('Copying inventory', 0, 1)
795
self.target.inventories.insert_record_stream(
796
self.source.inventories.get_record_stream(
797
self.source.inventories.keys(), 'topological', False))
798
self.target.signatures.insert_record_stream(
799
self.source.signatures.get_record_stream(
800
self.source.signatures.keys(),
802
self.target.revisions.insert_record_stream(
803
self.source.revisions.get_record_stream(
804
self.source.revisions.keys(),
805
'topological', True))
809
self.target.fetch(self.source, revision_id=revision_id)
812
def search_missing_revision_ids(self,
813
revision_id=symbol_versioning.DEPRECATED_PARAMETER,
814
find_ghosts=True, revision_ids=None, if_present_ids=None):
815
"""See InterRepository.search_missing_revision_ids()."""
816
# we want all revisions to satisfy revision_id in source.
817
# but we don't want to stat every file here and there.
818
# we want then, all revisions other needs to satisfy revision_id
819
# checked, but not those that we have locally.
820
# so the first thing is to get a subset of the revisions to
821
# satisfy revision_id in source, and then eliminate those that
822
# we do already have.
823
# this is slow on high latency connection to self, but as this
824
# disk format scales terribly for push anyway due to rewriting
825
# inventory.weave, this is considered acceptable.
827
if symbol_versioning.deprecated_passed(revision_id):
828
symbol_versioning.warn(
829
'search_missing_revision_ids(revision_id=...) was '
830
'deprecated in 2.4. Use revision_ids=[...] instead.',
831
DeprecationWarning, stacklevel=2)
832
if revision_ids is not None:
833
raise AssertionError(
834
'revision_ids is mutually exclusive with revision_id')
835
if revision_id is not None:
836
revision_ids = [revision_id]
838
source_ids_set = self._present_source_revisions_for(
839
revision_ids, if_present_ids)
840
# source_ids is the worst possible case we may need to pull.
841
# now we want to filter source_ids against what we actually
842
# have in target, but don't try to check for existence where we know
843
# we do not have a revision as that would be pointless.
844
target_ids = set(self.target._all_possible_ids())
845
possibly_present_revisions = target_ids.intersection(source_ids_set)
846
actually_present_revisions = set(
847
self.target._eliminate_revisions_not_present(possibly_present_revisions))
848
required_revisions = source_ids_set.difference(actually_present_revisions)
849
if revision_ids is not None:
850
# we used get_ancestry to determine source_ids then we are assured all
851
# revisions referenced are present as they are installed in topological order.
852
# and the tip revision was validated by get_ancestry.
853
result_set = required_revisions
855
# if we just grabbed the possibly available ids, then
856
# we only have an estimate of whats available and need to validate
857
# that against the revision records.
859
self.source._eliminate_revisions_not_present(required_revisions))
860
return self.source.revision_ids_to_search_result(result_set)
863
InterRepository.register_optimiser(InterWeaveRepo)