23
from __future__ import absolute_import
27
24
from cStringIO import StringIO
29
from bzrlib.lazy_import import lazy_import
30
lazy_import(globals(), """
34
revision as _mod_revision,
53
40
from bzrlib.decorators import needs_read_lock, needs_write_lock
54
41
from bzrlib.repository import (
56
RepositoryFormatMetaDir,
43
MetaDirVersionedFileRepository,
44
MetaDirRepositoryFormat,
58
48
from bzrlib.store.text import TextStore
49
from bzrlib.trace import mutter
50
from bzrlib.tuned_gzip import GzipFile, bytes_to_gzip
59
51
from bzrlib.versionedfile import (
60
52
AbsentContentFactory,
61
53
FulltextContentFactory,
64
from bzrlib.vf_repository import (
65
InterSameDataRepository,
66
VersionedFileCommitBuilder,
67
VersionedFileRepository,
68
VersionedFileRepositoryFormat,
69
MetaDirVersionedFileRepository,
70
MetaDirVersionedFileRepositoryFormat,
73
from bzrlib.plugins.weave_fmt import bzrdir as weave_bzrdir
76
class AllInOneRepository(VersionedFileRepository):
58
class AllInOneRepository(Repository):
77
59
"""Legacy support - the repository behaviour for all-in-one branches."""
80
def _serializer(self):
81
return xml5.serializer_v5
83
def _escape(self, file_or_path):
84
if not isinstance(file_or_path, basestring):
85
file_or_path = '/'.join(file_or_path)
86
if file_or_path == '':
88
return urlutils.escape(osutils.safe_unicode(file_or_path))
61
_serializer = xml5.serializer_v5
90
63
def __init__(self, _format, a_bzrdir):
91
64
# we reuse one control files instance.
152
124
def get_commit_builder(self, branch, parents, config, timestamp=None,
153
125
timezone=None, committer=None, revprops=None,
154
revision_id=None, lossy=False):
155
127
self._check_ascii_revisionid(revision_id, self.get_commit_builder)
156
result = VersionedFileCommitBuilder(self, parents, config, timestamp,
157
timezone, committer, revprops, revision_id, lossy=lossy)
128
result = CommitBuilder(self, parents, config, timestamp, timezone,
129
committer, revprops, revision_id)
158
130
self.start_write_group()
188
161
:param new_value: True to restore the default, False to disable making
191
raise errors.RepositoryUpgradeRequired(self.user_url)
164
raise errors.RepositoryUpgradeRequired(self.bzrdir.root_transport.base)
193
166
def make_working_trees(self):
194
167
"""Returns the policy for making working trees on new branches."""
170
def revision_graph_can_have_wrong_parents(self):
171
# XXX: This is an old format that we don't support full checking on, so
172
# just claim that checking for this inconsistency is not required.
198
176
class WeaveMetaDirRepository(MetaDirVersionedFileRepository):
199
177
"""A subclass of MetaDirRepository to set weave specific policy."""
201
def __init__(self, _format, a_bzrdir, control_files):
202
super(WeaveMetaDirRepository, self).__init__(_format, a_bzrdir, control_files)
203
self._serializer = _format._serializer
179
_serializer = xml5.serializer_v5
206
182
def _all_possible_ids(self):
207
183
"""Return all the possible revisions that we could find."""
208
184
if 'evil' in debug.debug_flags:
209
trace.mutter_callsite(
210
3, "_all_possible_ids scales with size of history.")
185
mutter_callsite(3, "_all_possible_ids scales with size of history.")
211
186
return [key[-1] for key in self.inventories.keys()]
214
189
def _all_revision_ids(self):
215
"""Returns a list of all the revision ids in the repository.
190
"""Returns a list of all the revision ids in the repository.
217
These are in as much topological order as the underlying store can
192
These are in as much topological order as the underlying store can
218
193
present: for weaves ghosts may lead to a lack of correctness until
219
194
the reweave updates the parents list.
241
216
def get_commit_builder(self, branch, parents, config, timestamp=None,
242
217
timezone=None, committer=None, revprops=None,
243
revision_id=None, lossy=False):
244
219
self._check_ascii_revisionid(revision_id, self.get_commit_builder)
245
result = VersionedFileCommitBuilder(self, parents, config, timestamp,
246
timezone, committer, revprops, revision_id, lossy=lossy)
220
result = CommitBuilder(self, parents, config, timestamp, timezone,
221
committer, revprops, revision_id)
247
222
self.start_write_group()
264
239
return self.inventories.add_lines((revision_id,), final_parents, lines,
265
240
check_content=check_content)[0]
268
class PreSplitOutRepositoryFormat(VersionedFileRepositoryFormat):
242
def revision_graph_can_have_wrong_parents(self):
246
class PreSplitOutRepositoryFormat(RepositoryFormat):
269
247
"""Base class for the pre split out repository formats."""
271
249
rich_root_data = False
272
250
supports_tree_reference = False
273
251
supports_ghosts = False
274
252
supports_external_lookups = False
275
supports_chks = False
276
supports_nesting_repositories = True
277
_fetch_order = 'topological'
278
_fetch_reconcile = True
280
supports_leaving_lock = False
281
# XXX: This is an old format that we don't support full checking on, so
282
# just claim that checking for this inconsistency is not required.
283
revision_graph_can_have_wrong_parents = False
285
254
def initialize(self, a_bzrdir, shared=False, _internal=False):
286
255
"""Create a weave repository."""
399
366
_versionedfile_class = weave.WeaveFile
400
_matchingbzrdir = weave_bzrdir.BzrDirFormat5()
401
supports_funky_characters = False
367
_matchingbzrdir = bzrdir.BzrDirFormat5()
404
def _serializer(self):
405
return xml5.serializer_v5
370
super(RepositoryFormat5, self).__init__()
407
372
def get_format_description(self):
408
373
"""See RepositoryFormat.get_format_description()."""
409
374
return "Weave repository format 5"
411
def network_name(self):
412
"""The network name for this format is the control dirs disk label."""
413
return self._matchingbzrdir.get_format_string()
415
376
def _get_inventories(self, repo_transport, repo, name='inventory'):
416
377
mapper = versionedfile.ConstantMapper(name)
417
378
return versionedfile.ThunkedVersionedFiles(repo_transport,
418
379
weave.WeaveFile, mapper, repo.is_locked)
420
381
def _get_revisions(self, repo_transport, repo):
382
from bzrlib.xml5 import serializer_v5
421
383
return RevisionTextStore(repo_transport.clone('revision-store'),
422
xml5.serializer_v5, False, versionedfile.PrefixMapper(),
384
serializer_v5, False, versionedfile.PrefixMapper(),
423
385
repo.is_locked, repo.is_write_locked)
425
387
def _get_signatures(self, repo_transport, repo):
446
408
_versionedfile_class = weave.WeaveFile
447
_matchingbzrdir = weave_bzrdir.BzrDirFormat6()
448
supports_funky_characters = False
450
def _serializer(self):
451
return xml5.serializer_v5
409
_matchingbzrdir = bzrdir.BzrDirFormat6()
412
super(RepositoryFormat6, self).__init__()
453
414
def get_format_description(self):
454
415
"""See RepositoryFormat.get_format_description()."""
455
416
return "Weave repository format 6"
457
def network_name(self):
458
"""The network name for this format is the control dirs disk label."""
459
return self._matchingbzrdir.get_format_string()
461
418
def _get_inventories(self, repo_transport, repo, name='inventory'):
462
419
mapper = versionedfile.ConstantMapper(name)
463
420
return versionedfile.ThunkedVersionedFiles(repo_transport,
464
421
weave.WeaveFile, mapper, repo.is_locked)
466
423
def _get_revisions(self, repo_transport, repo):
424
from bzrlib.xml5 import serializer_v5
467
425
return RevisionTextStore(repo_transport.clone('revision-store'),
468
xml5.serializer_v5, False, versionedfile.HashPrefixMapper(),
426
serializer_v5, False, versionedfile.HashPrefixMapper(),
469
427
repo.is_locked, repo.is_write_locked)
471
429
def _get_signatures(self, repo_transport, repo):
514
461
"""See RepositoryFormat.get_format_description()."""
515
462
return "Weave repository format 7"
464
def check_conversion_target(self, target_format):
517
467
def _get_inventories(self, repo_transport, repo, name='inventory'):
518
468
mapper = versionedfile.ConstantMapper(name)
519
469
return versionedfile.ThunkedVersionedFiles(repo_transport,
520
470
weave.WeaveFile, mapper, repo.is_locked)
522
472
def _get_revisions(self, repo_transport, repo):
473
from bzrlib.xml5 import serializer_v5
523
474
return RevisionTextStore(repo_transport.clone('revision-store'),
524
xml5.serializer_v5, True, versionedfile.HashPrefixMapper(),
475
serializer_v5, True, versionedfile.HashPrefixMapper(),
525
476
repo.is_locked, repo.is_write_locked)
527
478
def _get_signatures(self, repo_transport, repo):
546
497
weavefile.write_weave_v5(weave.Weave(), sio)
547
498
empty_weave = sio.getvalue()
549
trace.mutter('creating repository in %s.', a_bzrdir.transport.base)
500
mutter('creating repository in %s.', a_bzrdir.transport.base)
550
501
dirs = ['revision-store', 'weaves']
551
files = [('inventory.weave', StringIO(empty_weave)),
502
files = [('inventory.weave', StringIO(empty_weave)),
553
504
utf8_files = [('format', self.get_format_string())]
555
506
self._upload_blank_content(a_bzrdir, dirs, files, utf8_files, shared)
556
507
return self.open(a_bzrdir=a_bzrdir, _found=True)
558
509
def open(self, a_bzrdir, _found=False, _override_transport=None):
559
510
"""See RepositoryFormat.open().
561
512
:param _override_transport: INTERNAL USE ONLY. Allows opening the
562
513
repository at a slightly different url
563
514
than normal. I.e. during 'upgrade'.
566
format = RepositoryFormatMetaDir.find_format(a_bzrdir)
517
format = RepositoryFormat.find_format(a_bzrdir)
567
518
if _override_transport is not None:
568
519
repo_transport = _override_transport
759
699
paths = list(relpaths)
760
700
return set([self._mapper.unmap(path) for path in paths])
763
class InterWeaveRepo(InterSameDataRepository):
764
"""Optimised code paths between Weave based repositories.
768
def _get_repo_format_to_test(self):
769
return RepositoryFormat7()
772
def is_compatible(source, target):
773
"""Be compatible with known Weave formats.
775
We don't test for the stores being of specific types because that
776
could lead to confusing results, and there is no need to be
780
return (isinstance(source._format, (RepositoryFormat5,
782
RepositoryFormat7)) and
783
isinstance(target._format, (RepositoryFormat5,
786
except AttributeError:
790
def copy_content(self, revision_id=None):
791
"""See InterRepository.copy_content()."""
792
# weave specific optimised path:
794
self.target.set_make_working_trees(self.source.make_working_trees())
795
except (errors.RepositoryUpgradeRequired, NotImplemented):
798
if self.source._transport.listable():
799
pb = ui.ui_factory.nested_progress_bar()
801
self.target.texts.insert_record_stream(
802
self.source.texts.get_record_stream(
803
self.source.texts.keys(), 'topological', False))
804
pb.update('Copying inventory', 0, 1)
805
self.target.inventories.insert_record_stream(
806
self.source.inventories.get_record_stream(
807
self.source.inventories.keys(), 'topological', False))
808
self.target.signatures.insert_record_stream(
809
self.source.signatures.get_record_stream(
810
self.source.signatures.keys(),
812
self.target.revisions.insert_record_stream(
813
self.source.revisions.get_record_stream(
814
self.source.revisions.keys(),
815
'topological', True))
819
self.target.fetch(self.source, revision_id=revision_id)
822
def search_missing_revision_ids(self,
823
revision_id=symbol_versioning.DEPRECATED_PARAMETER,
824
find_ghosts=True, revision_ids=None, if_present_ids=None,
826
"""See InterRepository.search_missing_revision_ids()."""
827
# we want all revisions to satisfy revision_id in source.
828
# but we don't want to stat every file here and there.
829
# we want then, all revisions other needs to satisfy revision_id
830
# checked, but not those that we have locally.
831
# so the first thing is to get a subset of the revisions to
832
# satisfy revision_id in source, and then eliminate those that
833
# we do already have.
834
# this is slow on high latency connection to self, but as this
835
# disk format scales terribly for push anyway due to rewriting
836
# inventory.weave, this is considered acceptable.
838
if symbol_versioning.deprecated_passed(revision_id):
839
symbol_versioning.warn(
840
'search_missing_revision_ids(revision_id=...) was '
841
'deprecated in 2.4. Use revision_ids=[...] instead.',
842
DeprecationWarning, stacklevel=2)
843
if revision_ids is not None:
844
raise AssertionError(
845
'revision_ids is mutually exclusive with revision_id')
846
if revision_id is not None:
847
revision_ids = [revision_id]
849
source_ids_set = self._present_source_revisions_for(
850
revision_ids, if_present_ids)
851
# source_ids is the worst possible case we may need to pull.
852
# now we want to filter source_ids against what we actually
853
# have in target, but don't try to check for existence where we know
854
# we do not have a revision as that would be pointless.
855
target_ids = set(self.target._all_possible_ids())
856
possibly_present_revisions = target_ids.intersection(source_ids_set)
857
actually_present_revisions = set(
858
self.target._eliminate_revisions_not_present(possibly_present_revisions))
859
required_revisions = source_ids_set.difference(actually_present_revisions)
860
if revision_ids is not None:
861
# we used get_ancestry to determine source_ids then we are assured all
862
# revisions referenced are present as they are installed in topological order.
863
# and the tip revision was validated by get_ancestry.
864
result_set = required_revisions
866
# if we just grabbed the possibly available ids, then
867
# we only have an estimate of whats available and need to validate
868
# that against the revision records.
870
self.source._eliminate_revisions_not_present(required_revisions))
871
if limit is not None:
872
topo_ordered = self.get_graph().iter_topo_order(result_set)
873
result_set = set(itertools.islice(topo_ordered, limit))
874
return self.source.revision_ids_to_search_result(result_set)
877
InterRepository.register_optimiser(InterWeaveRepo)
880
def get_extra_interrepo_test_combinations():
881
from bzrlib.repofmt import knitrepo
882
return [(InterRepository, RepositoryFormat5(),
883
knitrepo.RepositoryFormatKnit3())]
702
_legacy_formats = [RepositoryFormat4(),