13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
"""Deprecated weave-based repository formats.
19
Weave based formats scaled linearly with history size and could not represent
23
from __future__ import absolute_import
27
from cStringIO import StringIO
29
from bzrlib.lazy_import import lazy_import
30
lazy_import(globals(), """
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
18
"""Old weave-based repository formats"""
20
from StringIO import StringIO
53
30
from bzrlib.decorators import needs_read_lock, needs_write_lock
54
31
from bzrlib.repository import (
56
RepositoryFormatMetaDir,
33
MetaDirRepositoryFormat,
58
37
from bzrlib.store.text import TextStore
59
from bzrlib.versionedfile import (
61
FulltextContentFactory,
64
from bzrlib.vf_repository import (
65
InterSameDataRepository,
66
VersionedFileCommitBuilder,
67
VersionedFileRepository,
68
VersionedFileRepositoryFormat,
69
MetaDirVersionedFileRepository,
70
MetaDirVersionedFileRepositoryFormat,
73
from bzrlib.plugins.weave_fmt import bzrdir as weave_bzrdir
76
class AllInOneRepository(VersionedFileRepository):
38
from bzrlib.trace import mutter
41
class AllInOneRepository(Repository):
77
42
"""Legacy support - the repository behaviour for all-in-one branches."""
80
def _serializer(self):
81
return xml5.serializer_v5
83
def _escape(self, file_or_path):
84
if not isinstance(file_or_path, basestring):
85
file_or_path = '/'.join(file_or_path)
86
if file_or_path == '':
88
return urlutils.escape(osutils.safe_unicode(file_or_path))
90
def __init__(self, _format, a_bzrdir):
44
_serializer = xml5.serializer_v5
46
def __init__(self, _format, a_bzrdir, _revision_store, control_store, text_store):
91
47
# we reuse one control files instance.
92
dir_mode = a_bzrdir._get_dir_mode()
93
file_mode = a_bzrdir._get_file_mode()
48
dir_mode = a_bzrdir._control_files._dir_mode
49
file_mode = a_bzrdir._control_files._file_mode
95
51
def get_store(name, compressed=True, prefixed=False):
96
52
# FIXME: This approach of assuming stores are all entirely compressed
97
# or entirely uncompressed is tidy, but breaks upgrade from
98
# some existing branches where there's a mixture; we probably
53
# or entirely uncompressed is tidy, but breaks upgrade from
54
# some existing branches where there's a mixture; we probably
99
55
# still want the option to look for both.
100
relpath = self._escape(name)
101
store = TextStore(a_bzrdir.transport.clone(relpath),
56
relpath = a_bzrdir._control_files._escape(name)
57
store = TextStore(a_bzrdir._control_files._transport.clone(relpath),
102
58
prefixed=prefixed, compressed=compressed,
103
59
dir_mode=dir_mode,
104
60
file_mode=file_mode)
61
#if self._transport.should_cache():
62
# cache_path = os.path.join(self.cache_root, name)
63
# os.mkdir(cache_path)
64
# store = bzrlib.store.CachedStore(store, cache_path)
107
67
# not broken out yet because the controlweaves|inventory_store
108
# and texts bits are still different.
68
# and text_store | weave_store bits are still different.
109
69
if isinstance(_format, RepositoryFormat4):
110
# cannot remove these - there is still no consistent api
70
# cannot remove these - there is still no consistent api
111
71
# which allows access to this old info.
112
72
self.inventory_store = get_store('inventory-store')
113
self._text_store = get_store('text-store')
114
super(AllInOneRepository, self).__init__(_format, a_bzrdir, a_bzrdir._control_files)
117
def _all_possible_ids(self):
118
"""Return all the possible revisions that we could find."""
119
if 'evil' in debug.debug_flags:
120
trace.mutter_callsite(
121
3, "_all_possible_ids scales with size of history.")
122
return [key[-1] for key in self.inventories.keys()]
125
def _all_revision_ids(self):
126
"""Returns a list of all the revision ids in the repository.
128
These are in as much topological order as the underlying store can
129
present: for weaves ghosts may lead to a lack of correctness until
130
the reweave updates the parents list.
132
return [key[-1] for key in self.revisions.keys()]
134
def _activate_new_inventory(self):
135
"""Put a replacement inventory.new into use as inventories."""
136
# Copy the content across
137
t = self.bzrdir._control_files._transport
138
t.copy('inventory.new.weave', 'inventory.weave')
139
# delete the temp inventory
140
t.delete('inventory.new.weave')
141
# Check we can parse the new weave properly as a sanity check
142
self.inventories.keys()
144
def _backup_inventory(self):
145
t = self.bzrdir._control_files._transport
146
t.copy('inventory.weave', 'inventory.backup.weave')
148
def _temp_inventories(self):
149
t = self.bzrdir._control_files._transport
150
return self._format._get_inventories(t, self, 'inventory.new')
73
text_store = get_store('text-store')
74
super(AllInOneRepository, self).__init__(_format, a_bzrdir, a_bzrdir._control_files, _revision_store, control_store, text_store)
152
76
def get_commit_builder(self, branch, parents, config, timestamp=None,
153
77
timezone=None, committer=None, revprops=None,
154
revision_id=None, lossy=False):
155
79
self._check_ascii_revisionid(revision_id, self.get_commit_builder)
156
result = VersionedFileCommitBuilder(self, parents, config, timestamp,
157
timezone, committer, revprops, revision_id, lossy=lossy)
158
self.start_write_group()
80
return Repository.get_commit_builder(self, branch, parents, config,
81
timestamp, timezone, committer, revprops, revision_id)
162
def get_revisions(self, revision_ids):
163
revs = self._get_revisions(revision_ids)
166
def _inventory_add_lines(self, revision_id, parents, lines,
168
"""Store lines in inv_vf and return the sha1 of the inventory."""
169
present_parents = self.get_graph().get_parent_map(parents)
171
for parent in parents:
172
if parent in present_parents:
173
final_parents.append((parent,))
174
return self.inventories.add_lines((revision_id,), final_parents, lines,
175
check_content=check_content)[0]
177
84
def is_shared(self):
178
85
"""AllInOne repositories cannot be shared."""
188
95
:param new_value: True to restore the default, False to disable making
191
raise errors.RepositoryUpgradeRequired(self.user_url)
98
raise NotImplementedError(self.set_make_working_trees)
193
100
def make_working_trees(self):
194
101
"""Returns the policy for making working trees on new branches."""
198
class WeaveMetaDirRepository(MetaDirVersionedFileRepository):
105
class WeaveMetaDirRepository(MetaDirRepository):
199
106
"""A subclass of MetaDirRepository to set weave specific policy."""
201
def __init__(self, _format, a_bzrdir, control_files):
202
super(WeaveMetaDirRepository, self).__init__(_format, a_bzrdir, control_files)
203
self._serializer = _format._serializer
206
def _all_possible_ids(self):
207
"""Return all the possible revisions that we could find."""
208
if 'evil' in debug.debug_flags:
209
trace.mutter_callsite(
210
3, "_all_possible_ids scales with size of history.")
211
return [key[-1] for key in self.inventories.keys()]
214
def _all_revision_ids(self):
215
"""Returns a list of all the revision ids in the repository.
217
These are in as much topological order as the underlying store can
218
present: for weaves ghosts may lead to a lack of correctness until
219
the reweave updates the parents list.
221
return [key[-1] for key in self.revisions.keys()]
223
def _activate_new_inventory(self):
224
"""Put a replacement inventory.new into use as inventories."""
225
# Copy the content across
227
t.copy('inventory.new.weave', 'inventory.weave')
228
# delete the temp inventory
229
t.delete('inventory.new.weave')
230
# Check we can parse the new weave properly as a sanity check
231
self.inventories.keys()
233
def _backup_inventory(self):
235
t.copy('inventory.weave', 'inventory.backup.weave')
237
def _temp_inventories(self):
239
return self._format._get_inventories(t, self, 'inventory.new')
108
_serializer = xml5.serializer_v5
241
110
def get_commit_builder(self, branch, parents, config, timestamp=None,
242
111
timezone=None, committer=None, revprops=None,
243
revision_id=None, lossy=False):
244
113
self._check_ascii_revisionid(revision_id, self.get_commit_builder)
245
result = VersionedFileCommitBuilder(self, parents, config, timestamp,
246
timezone, committer, revprops, revision_id, lossy=lossy)
247
self.start_write_group()
251
def get_revision(self, revision_id):
252
"""Return the Revision object for a named revision"""
253
r = self.get_revision_reconcile(revision_id)
256
def _inventory_add_lines(self, revision_id, parents, lines,
258
"""Store lines in inv_vf and return the sha1 of the inventory."""
259
present_parents = self.get_graph().get_parent_map(parents)
261
for parent in parents:
262
if parent in present_parents:
263
final_parents.append((parent,))
264
return self.inventories.add_lines((revision_id,), final_parents, lines,
265
check_content=check_content)[0]
268
class PreSplitOutRepositoryFormat(VersionedFileRepositoryFormat):
114
return MetaDirRepository.get_commit_builder(self, branch, parents,
115
config, timestamp, timezone, committer, revprops, revision_id)
118
class PreSplitOutRepositoryFormat(RepositoryFormat):
269
119
"""Base class for the pre split out repository formats."""
271
121
rich_root_data = False
272
122
supports_tree_reference = False
273
supports_ghosts = False
274
supports_external_lookups = False
275
supports_chks = False
276
supports_nesting_repositories = True
277
_fetch_order = 'topological'
278
_fetch_reconcile = True
280
supports_leaving_lock = False
281
# XXX: This is an old format that we don't support full checking on, so
282
# just claim that checking for this inconsistency is not required.
283
revision_graph_can_have_wrong_parents = False
285
124
def initialize(self, a_bzrdir, shared=False, _internal=False):
286
"""Create a weave repository."""
125
"""Create a weave repository.
127
TODO: when creating split out bzr branch formats, move this to a common
128
base for Format5, Format6. or something like that.
288
131
raise errors.IncompatibleFormat(self, a_bzrdir._format)
290
133
if not _internal:
291
134
# always initialized when the bzrdir is.
292
135
return self.open(a_bzrdir, _found=True)
294
137
# Create an empty weave
296
139
weavefile.write_weave_v5(weave.Weave(), sio)
297
140
empty_weave = sio.getvalue()
299
trace.mutter('creating repository in %s.', a_bzrdir.transport.base)
142
mutter('creating repository in %s.', a_bzrdir.transport.base)
143
dirs = ['revision-store', 'weaves']
144
files = [('inventory.weave', StringIO(empty_weave)),
301
147
# FIXME: RBC 20060125 don't peek under the covers
302
148
# NB: no need to escape relative paths that are url safe.
303
149
control_files = lockable_files.LockableFiles(a_bzrdir.transport,
304
'branch-lock', lockable_files.TransportLock)
150
'branch-lock', lockable_files.TransportLock)
305
151
control_files.create_lock()
306
152
control_files.lock_write()
307
transport = a_bzrdir.transport
153
control_files._transport.mkdir_multi(dirs,
154
mode=control_files._dir_mode)
309
transport.mkdir_multi(['revision-store', 'weaves'],
310
mode=a_bzrdir._get_dir_mode())
311
transport.put_bytes_non_atomic('inventory.weave', empty_weave,
312
mode=a_bzrdir._get_file_mode())
156
for file, content in files:
157
control_files.put(file, content)
314
159
control_files.unlock()
315
repository = self.open(a_bzrdir, _found=True)
316
self._run_post_repo_init_hooks(repository, a_bzrdir, shared)
160
return self.open(a_bzrdir, _found=True)
162
def _get_control_store(self, repo_transport, control_files):
163
"""Return the control store for this repository."""
164
return self._get_versioned_file_store('',
169
def _get_text_store(self, transport, control_files):
170
"""Get a store for file texts for this format."""
171
raise NotImplementedError(self._get_text_store)
319
173
def open(self, a_bzrdir, _found=False):
320
174
"""See RepositoryFormat.open()."""
399
256
_versionedfile_class = weave.WeaveFile
400
_matchingbzrdir = weave_bzrdir.BzrDirFormat5()
401
supports_funky_characters = False
257
_matchingbzrdir = bzrdir.BzrDirFormat5()
404
def _serializer(self):
405
return xml5.serializer_v5
260
super(RepositoryFormat5, self).__init__()
407
262
def get_format_description(self):
408
263
"""See RepositoryFormat.get_format_description()."""
409
264
return "Weave repository format 5"
411
def network_name(self):
412
"""The network name for this format is the control dirs disk label."""
413
return self._matchingbzrdir.get_format_string()
415
def _get_inventories(self, repo_transport, repo, name='inventory'):
416
mapper = versionedfile.ConstantMapper(name)
417
return versionedfile.ThunkedVersionedFiles(repo_transport,
418
weave.WeaveFile, mapper, repo.is_locked)
420
def _get_revisions(self, repo_transport, repo):
421
return RevisionTextStore(repo_transport.clone('revision-store'),
422
xml5.serializer_v5, False, versionedfile.PrefixMapper(),
423
repo.is_locked, repo.is_write_locked)
425
def _get_signatures(self, repo_transport, repo):
426
return SignatureTextStore(repo_transport.clone('revision-store'),
427
False, versionedfile.PrefixMapper(),
428
repo.is_locked, repo.is_write_locked)
430
def _get_texts(self, repo_transport, repo):
431
mapper = versionedfile.PrefixMapper()
432
base_transport = repo_transport.clone('weaves')
433
return versionedfile.ThunkedVersionedFiles(base_transport,
434
weave.WeaveFile, mapper, repo.is_locked)
266
def _get_revision_store(self, repo_transport, control_files):
267
"""See RepositoryFormat._get_revision_store()."""
268
"""Return the revision store object for this a_bzrdir."""
269
return self._get_text_rev_store(repo_transport,
274
def _get_text_store(self, transport, control_files):
275
"""See RepositoryFormat._get_text_store()."""
276
return self._get_versioned_file_store('weaves', transport, control_files, prefixed=False)
437
279
class RepositoryFormat6(PreSplitOutRepositoryFormat):
446
288
_versionedfile_class = weave.WeaveFile
447
_matchingbzrdir = weave_bzrdir.BzrDirFormat6()
448
supports_funky_characters = False
450
def _serializer(self):
451
return xml5.serializer_v5
289
_matchingbzrdir = bzrdir.BzrDirFormat6()
292
super(RepositoryFormat6, self).__init__()
453
294
def get_format_description(self):
454
295
"""See RepositoryFormat.get_format_description()."""
455
296
return "Weave repository format 6"
457
def network_name(self):
458
"""The network name for this format is the control dirs disk label."""
459
return self._matchingbzrdir.get_format_string()
461
def _get_inventories(self, repo_transport, repo, name='inventory'):
462
mapper = versionedfile.ConstantMapper(name)
463
return versionedfile.ThunkedVersionedFiles(repo_transport,
464
weave.WeaveFile, mapper, repo.is_locked)
466
def _get_revisions(self, repo_transport, repo):
467
return RevisionTextStore(repo_transport.clone('revision-store'),
468
xml5.serializer_v5, False, versionedfile.HashPrefixMapper(),
469
repo.is_locked, repo.is_write_locked)
471
def _get_signatures(self, repo_transport, repo):
472
return SignatureTextStore(repo_transport.clone('revision-store'),
473
False, versionedfile.HashPrefixMapper(),
474
repo.is_locked, repo.is_write_locked)
476
def _get_texts(self, repo_transport, repo):
477
mapper = versionedfile.HashPrefixMapper()
478
base_transport = repo_transport.clone('weaves')
479
return versionedfile.ThunkedVersionedFiles(base_transport,
480
weave.WeaveFile, mapper, repo.is_locked)
483
class RepositoryFormat7(MetaDirVersionedFileRepositoryFormat):
298
def _get_revision_store(self, repo_transport, control_files):
299
"""See RepositoryFormat._get_revision_store()."""
300
return self._get_text_rev_store(repo_transport,
306
def _get_text_store(self, transport, control_files):
307
"""See RepositoryFormat._get_text_store()."""
308
return self._get_versioned_file_store('weaves', transport, control_files)
311
class RepositoryFormat7(MetaDirRepositoryFormat):
484
312
"""Bzr repository 7.
486
314
This repository format has:
546
366
weavefile.write_weave_v5(weave.Weave(), sio)
547
367
empty_weave = sio.getvalue()
549
trace.mutter('creating repository in %s.', a_bzrdir.transport.base)
369
mutter('creating repository in %s.', a_bzrdir.transport.base)
550
370
dirs = ['revision-store', 'weaves']
551
files = [('inventory.weave', StringIO(empty_weave)),
371
files = [('inventory.weave', StringIO(empty_weave)),
553
373
utf8_files = [('format', self.get_format_string())]
555
375
self._upload_blank_content(a_bzrdir, dirs, files, utf8_files, shared)
556
376
return self.open(a_bzrdir=a_bzrdir, _found=True)
558
378
def open(self, a_bzrdir, _found=False, _override_transport=None):
559
379
"""See RepositoryFormat.open().
561
381
:param _override_transport: INTERNAL USE ONLY. Allows opening the
562
382
repository at a slightly different url
563
383
than normal. I.e. during 'upgrade'.
566
format = RepositoryFormatMetaDir.find_format(a_bzrdir)
386
format = RepositoryFormat.find_format(a_bzrdir)
387
assert format.__class__ == self.__class__
567
388
if _override_transport is not None:
568
389
repo_transport = _override_transport
570
391
repo_transport = a_bzrdir.get_repository_transport(None)
571
392
control_files = lockable_files.LockableFiles(repo_transport,
572
393
'lock', lockdir.LockDir)
573
result = WeaveMetaDirRepository(_format=self, a_bzrdir=a_bzrdir,
574
control_files=control_files)
575
result.revisions = self._get_revisions(repo_transport, result)
576
result.signatures = self._get_signatures(repo_transport, result)
577
result.inventories = self._get_inventories(repo_transport, result)
578
result.texts = self._get_texts(repo_transport, result)
579
result.chk_bytes = None
580
result._transport = repo_transport
583
def is_deprecated(self):
587
class TextVersionedFiles(VersionedFiles):
588
"""Just-a-bunch-of-files based VersionedFile stores."""
590
def __init__(self, transport, compressed, mapper, is_locked, can_write):
591
self._compressed = compressed
592
self._transport = transport
593
self._mapper = mapper
598
self._is_locked = is_locked
599
self._can_write = can_write
601
def add_lines(self, key, parents, lines):
602
"""Add a revision to the store."""
603
if not self._is_locked():
604
raise errors.ObjectNotLocked(self)
605
if not self._can_write():
606
raise errors.ReadOnlyError(self)
608
raise ValueError('bad idea to put / in %r' % (key,))
609
text = ''.join(lines)
611
text = tuned_gzip.bytes_to_gzip(text)
612
path = self._map(key)
613
self._transport.put_bytes_non_atomic(path, text, create_parent_dir=True)
615
def insert_record_stream(self, stream):
617
for record in stream:
618
# Raise an error when a record is missing.
619
if record.storage_kind == 'absent':
620
raise errors.RevisionNotPresent([record.key[0]], self)
621
# adapt to non-tuple interface
622
if record.storage_kind == 'fulltext':
623
self.add_lines(record.key, None,
624
osutils.split_lines(record.get_bytes_as('fulltext')))
626
adapter_key = record.storage_kind, 'fulltext'
628
adapter = adapters[adapter_key]
630
adapter_factory = adapter_registry.get(adapter_key)
631
adapter = adapter_factory(self)
632
adapters[adapter_key] = adapter
633
lines = osutils.split_lines(adapter.get_bytes(
634
record, record.get_bytes_as(record.storage_kind)))
636
self.add_lines(record.key, None, lines)
637
except errors.RevisionAlreadyPresent:
640
def _load_text(self, key):
641
if not self._is_locked():
642
raise errors.ObjectNotLocked(self)
643
path = self._map(key)
645
text = self._transport.get_bytes(path)
646
compressed = self._compressed
647
except errors.NoSuchFile:
649
# try without the .gz
652
text = self._transport.get_bytes(path)
654
except errors.NoSuchFile:
659
text = gzip.GzipFile(mode='rb', fileobj=StringIO(text)).read()
663
return self._mapper.map(key) + self._ext
666
class RevisionTextStore(TextVersionedFiles):
667
"""Legacy thunk for format 4 repositories."""
669
def __init__(self, transport, serializer, compressed, mapper, is_locked,
671
"""Create a RevisionTextStore at transport with serializer."""
672
TextVersionedFiles.__init__(self, transport, compressed, mapper,
673
is_locked, can_write)
674
self._serializer = serializer
676
def _load_text_parents(self, key):
677
text = self._load_text(key)
680
parents = self._serializer.read_revision_from_string(text).parent_ids
681
return text, tuple((parent,) for parent in parents)
683
def get_parent_map(self, keys):
686
parents = self._load_text_parents(key)[1]
689
result[key] = parents
692
def get_known_graph_ancestry(self, keys):
693
"""Get a KnownGraph instance with the ancestry of keys."""
695
parent_map = self.get_parent_map(keys)
696
kg = _mod_graph.KnownGraph(parent_map)
699
def get_record_stream(self, keys, sort_order, include_delta_closure):
701
text, parents = self._load_text_parents(key)
703
yield AbsentContentFactory(key)
705
yield FulltextContentFactory(key, parents, None, text)
708
if not self._is_locked():
709
raise errors.ObjectNotLocked(self)
711
for quoted_relpath in self._transport.iter_files_recursive():
712
relpath = urlutils.unquote(quoted_relpath)
713
path, ext = os.path.splitext(relpath)
716
if not relpath.endswith('.sig'):
717
relpaths.add(relpath)
718
paths = list(relpaths)
719
return set([self._mapper.unmap(path) for path in paths])
722
class SignatureTextStore(TextVersionedFiles):
723
"""Legacy thunk for format 4-7 repositories."""
725
def __init__(self, transport, compressed, mapper, is_locked, can_write):
726
TextVersionedFiles.__init__(self, transport, compressed, mapper,
727
is_locked, can_write)
728
self._ext = '.sig' + self._ext
730
def get_parent_map(self, keys):
733
text = self._load_text(key)
739
def get_record_stream(self, keys, sort_order, include_delta_closure):
741
text = self._load_text(key)
743
yield AbsentContentFactory(key)
745
yield FulltextContentFactory(key, None, None, text)
748
if not self._is_locked():
749
raise errors.ObjectNotLocked(self)
751
for quoted_relpath in self._transport.iter_files_recursive():
752
relpath = urlutils.unquote(quoted_relpath)
753
path, ext = os.path.splitext(relpath)
756
if not relpath.endswith('.sig'):
758
relpaths.add(relpath[:-4])
759
paths = list(relpaths)
760
return set([self._mapper.unmap(path) for path in paths])
763
class InterWeaveRepo(InterSameDataRepository):
764
"""Optimised code paths between Weave based repositories.
768
def _get_repo_format_to_test(self):
769
return RepositoryFormat7()
772
def is_compatible(source, target):
773
"""Be compatible with known Weave formats.
775
We don't test for the stores being of specific types because that
776
could lead to confusing results, and there is no need to be
780
return (isinstance(source._format, (RepositoryFormat5,
782
RepositoryFormat7)) and
783
isinstance(target._format, (RepositoryFormat5,
786
except AttributeError:
790
def copy_content(self, revision_id=None):
791
"""See InterRepository.copy_content()."""
792
# weave specific optimised path:
794
self.target.set_make_working_trees(self.source.make_working_trees())
795
except (errors.RepositoryUpgradeRequired, NotImplemented):
798
if self.source._transport.listable():
799
pb = ui.ui_factory.nested_progress_bar()
801
self.target.texts.insert_record_stream(
802
self.source.texts.get_record_stream(
803
self.source.texts.keys(), 'topological', False))
804
pb.update('Copying inventory', 0, 1)
805
self.target.inventories.insert_record_stream(
806
self.source.inventories.get_record_stream(
807
self.source.inventories.keys(), 'topological', False))
808
self.target.signatures.insert_record_stream(
809
self.source.signatures.get_record_stream(
810
self.source.signatures.keys(),
812
self.target.revisions.insert_record_stream(
813
self.source.revisions.get_record_stream(
814
self.source.revisions.keys(),
815
'topological', True))
819
self.target.fetch(self.source, revision_id=revision_id)
822
def search_missing_revision_ids(self,
823
revision_id=symbol_versioning.DEPRECATED_PARAMETER,
824
find_ghosts=True, revision_ids=None, if_present_ids=None,
826
"""See InterRepository.search_missing_revision_ids()."""
827
# we want all revisions to satisfy revision_id in source.
828
# but we don't want to stat every file here and there.
829
# we want then, all revisions other needs to satisfy revision_id
830
# checked, but not those that we have locally.
831
# so the first thing is to get a subset of the revisions to
832
# satisfy revision_id in source, and then eliminate those that
833
# we do already have.
834
# this is slow on high latency connection to self, but as this
835
# disk format scales terribly for push anyway due to rewriting
836
# inventory.weave, this is considered acceptable.
838
if symbol_versioning.deprecated_passed(revision_id):
839
symbol_versioning.warn(
840
'search_missing_revision_ids(revision_id=...) was '
841
'deprecated in 2.4. Use revision_ids=[...] instead.',
842
DeprecationWarning, stacklevel=2)
843
if revision_ids is not None:
844
raise AssertionError(
845
'revision_ids is mutually exclusive with revision_id')
846
if revision_id is not None:
847
revision_ids = [revision_id]
849
source_ids_set = self._present_source_revisions_for(
850
revision_ids, if_present_ids)
851
# source_ids is the worst possible case we may need to pull.
852
# now we want to filter source_ids against what we actually
853
# have in target, but don't try to check for existence where we know
854
# we do not have a revision as that would be pointless.
855
target_ids = set(self.target._all_possible_ids())
856
possibly_present_revisions = target_ids.intersection(source_ids_set)
857
actually_present_revisions = set(
858
self.target._eliminate_revisions_not_present(possibly_present_revisions))
859
required_revisions = source_ids_set.difference(actually_present_revisions)
860
if revision_ids is not None:
861
# we used get_ancestry to determine source_ids then we are assured all
862
# revisions referenced are present as they are installed in topological order.
863
# and the tip revision was validated by get_ancestry.
864
result_set = required_revisions
866
# if we just grabbed the possibly available ids, then
867
# we only have an estimate of whats available and need to validate
868
# that against the revision records.
870
self.source._eliminate_revisions_not_present(required_revisions))
871
if limit is not None:
872
topo_ordered = self.get_graph().iter_topo_order(result_set)
873
result_set = set(itertools.islice(topo_ordered, limit))
874
return self.source.revision_ids_to_search_result(result_set)
877
InterRepository.register_optimiser(InterWeaveRepo)
880
def get_extra_interrepo_test_combinations():
881
from bzrlib.repofmt import knitrepo
882
return [(InterRepository, RepositoryFormat5(),
883
knitrepo.RepositoryFormatKnit3())]
394
text_store = self._get_text_store(repo_transport, control_files)
395
control_store = self._get_control_store(repo_transport, control_files)
396
_revision_store = self._get_revision_store(repo_transport, control_files)
397
return WeaveMetaDirRepository(_format=self,
399
control_files=control_files,
400
_revision_store=_revision_store,
401
control_store=control_store,
402
text_store=text_store)
405
_legacy_formats = [RepositoryFormat4(),