25
24
from cStringIO import StringIO
28
from bzrlib.lazy_import import lazy_import
29
lazy_import(globals(), """
34
revision as _mod_revision,
52
40
from bzrlib.decorators import needs_read_lock, needs_write_lock
53
41
from bzrlib.repository import (
43
MetaDirVersionedFileRepository,
44
MetaDirRepositoryFormat,
57
48
from bzrlib.store.text import TextStore
49
from bzrlib.trace import mutter
50
from bzrlib.tuned_gzip import GzipFile, bytes_to_gzip
58
51
from bzrlib.versionedfile import (
59
52
AbsentContentFactory,
60
53
FulltextContentFactory,
63
from bzrlib.vf_repository import (
64
InterSameDataRepository,
65
VersionedFileCommitBuilder,
66
VersionedFileRepository,
67
VersionedFileRepositoryFormat,
68
MetaDirVersionedFileRepository,
69
MetaDirVersionedFileRepositoryFormat,
72
from bzrlib.plugins.weave_fmt import bzrdir as weave_bzrdir
75
class AllInOneRepository(VersionedFileRepository):
58
class AllInOneRepository(Repository):
76
59
"""Legacy support - the repository behaviour for all-in-one branches."""
79
def _serializer(self):
80
return xml5.serializer_v5
82
def _escape(self, file_or_path):
83
if not isinstance(file_or_path, basestring):
84
file_or_path = '/'.join(file_or_path)
85
if file_or_path == '':
87
return urlutils.escape(osutils.safe_unicode(file_or_path))
61
_serializer = xml5.serializer_v5
89
63
def __init__(self, _format, a_bzrdir):
90
64
# we reuse one control files instance.
106
80
# not broken out yet because the controlweaves|inventory_store
107
81
# and texts bits are still different.
108
82
if isinstance(_format, RepositoryFormat4):
109
# cannot remove these - there is still no consistent api
83
# cannot remove these - there is still no consistent api
110
84
# which allows access to this old info.
111
85
self.inventory_store = get_store('inventory-store')
112
86
self._text_store = get_store('text-store')
113
87
super(AllInOneRepository, self).__init__(_format, a_bzrdir, a_bzrdir._control_files)
88
self._fetch_order = 'topological'
89
self._fetch_reconcile = True
116
92
def _all_possible_ids(self):
117
93
"""Return all the possible revisions that we could find."""
118
94
if 'evil' in debug.debug_flags:
119
trace.mutter_callsite(
120
3, "_all_possible_ids scales with size of history.")
95
mutter_callsite(3, "_all_possible_ids scales with size of history.")
121
96
return [key[-1] for key in self.inventories.keys()]
124
99
def _all_revision_ids(self):
125
"""Returns a list of all the revision ids in the repository.
100
"""Returns a list of all the revision ids in the repository.
127
These are in as much topological order as the underlying store can
102
These are in as much topological order as the underlying store can
128
103
present: for weaves ghosts may lead to a lack of correctness until
129
104
the reweave updates the parents list.
151
126
def get_commit_builder(self, branch, parents, config, timestamp=None,
152
127
timezone=None, committer=None, revprops=None,
153
revision_id=None, lossy=False):
154
129
self._check_ascii_revisionid(revision_id, self.get_commit_builder)
155
result = VersionedFileCommitBuilder(self, parents, config, timestamp,
156
timezone, committer, revprops, revision_id, lossy=lossy)
130
result = CommitBuilder(self, parents, config, timestamp, timezone,
131
committer, revprops, revision_id)
157
132
self.start_write_group()
187
162
:param new_value: True to restore the default, False to disable making
190
raise errors.RepositoryUpgradeRequired(self.user_url)
165
raise errors.RepositoryUpgradeRequired(self.bzrdir.root_transport.base)
192
167
def make_working_trees(self):
193
168
"""Returns the policy for making working trees on new branches."""
171
def revision_graph_can_have_wrong_parents(self):
172
# XXX: This is an old format that we don't support full checking on, so
173
# just claim that checking for this inconsistency is not required.
197
177
class WeaveMetaDirRepository(MetaDirVersionedFileRepository):
198
178
"""A subclass of MetaDirRepository to set weave specific policy."""
180
_serializer = xml5.serializer_v5
200
182
def __init__(self, _format, a_bzrdir, control_files):
201
183
super(WeaveMetaDirRepository, self).__init__(_format, a_bzrdir, control_files)
202
self._serializer = _format._serializer
184
self._fetch_order = 'topological'
185
self._fetch_reconcile = True
205
188
def _all_possible_ids(self):
206
189
"""Return all the possible revisions that we could find."""
207
190
if 'evil' in debug.debug_flags:
208
trace.mutter_callsite(
209
3, "_all_possible_ids scales with size of history.")
191
mutter_callsite(3, "_all_possible_ids scales with size of history.")
210
192
return [key[-1] for key in self.inventories.keys()]
213
195
def _all_revision_ids(self):
214
"""Returns a list of all the revision ids in the repository.
196
"""Returns a list of all the revision ids in the repository.
216
These are in as much topological order as the underlying store can
198
These are in as much topological order as the underlying store can
217
199
present: for weaves ghosts may lead to a lack of correctness until
218
200
the reweave updates the parents list.
240
222
def get_commit_builder(self, branch, parents, config, timestamp=None,
241
223
timezone=None, committer=None, revprops=None,
242
revision_id=None, lossy=False):
243
225
self._check_ascii_revisionid(revision_id, self.get_commit_builder)
244
result = VersionedFileCommitBuilder(self, parents, config, timestamp,
245
timezone, committer, revprops, revision_id, lossy=lossy)
226
result = CommitBuilder(self, parents, config, timestamp, timezone,
227
committer, revprops, revision_id)
246
228
self.start_write_group()
263
245
return self.inventories.add_lines((revision_id,), final_parents, lines,
264
246
check_content=check_content)[0]
267
class PreSplitOutRepositoryFormat(VersionedFileRepositoryFormat):
248
def revision_graph_can_have_wrong_parents(self):
252
class PreSplitOutRepositoryFormat(RepositoryFormat):
268
253
"""Base class for the pre split out repository formats."""
270
255
rich_root_data = False
271
256
supports_tree_reference = False
272
257
supports_ghosts = False
273
258
supports_external_lookups = False
274
supports_chks = False
275
supports_nesting_repositories = True
276
_fetch_order = 'topological'
277
_fetch_reconcile = True
279
supports_leaving_lock = False
280
# XXX: This is an old format that we don't support full checking on, so
281
# just claim that checking for this inconsistency is not required.
282
revision_graph_can_have_wrong_parents = False
284
260
def initialize(self, a_bzrdir, shared=False, _internal=False):
285
261
"""Create a weave repository."""
399
374
_versionedfile_class = weave.WeaveFile
400
_matchingbzrdir = weave_bzrdir.BzrDirFormat5()
401
supports_funky_characters = False
375
_matchingbzrdir = bzrdir.BzrDirFormat5()
404
def _serializer(self):
405
return xml5.serializer_v5
378
super(RepositoryFormat5, self).__init__()
379
self._fetch_order = 'topological'
380
self._fetch_reconcile = True
407
382
def get_format_description(self):
408
383
"""See RepositoryFormat.get_format_description()."""
409
384
return "Weave repository format 5"
411
def network_name(self):
412
"""The network name for this format is the control dirs disk label."""
413
return self._matchingbzrdir.get_format_string()
415
386
def _get_inventories(self, repo_transport, repo, name='inventory'):
416
387
mapper = versionedfile.ConstantMapper(name)
417
388
return versionedfile.ThunkedVersionedFiles(repo_transport,
418
389
weave.WeaveFile, mapper, repo.is_locked)
420
391
def _get_revisions(self, repo_transport, repo):
392
from bzrlib.xml5 import serializer_v5
421
393
return RevisionTextStore(repo_transport.clone('revision-store'),
422
xml5.serializer_v5, False, versionedfile.PrefixMapper(),
394
serializer_v5, False, versionedfile.PrefixMapper(),
423
395
repo.is_locked, repo.is_write_locked)
425
397
def _get_signatures(self, repo_transport, repo):
446
418
_versionedfile_class = weave.WeaveFile
447
_matchingbzrdir = weave_bzrdir.BzrDirFormat6()
448
supports_funky_characters = False
450
def _serializer(self):
451
return xml5.serializer_v5
419
_matchingbzrdir = bzrdir.BzrDirFormat6()
422
super(RepositoryFormat6, self).__init__()
423
self._fetch_order = 'topological'
424
self._fetch_reconcile = True
453
426
def get_format_description(self):
454
427
"""See RepositoryFormat.get_format_description()."""
455
428
return "Weave repository format 6"
457
def network_name(self):
458
"""The network name for this format is the control dirs disk label."""
459
return self._matchingbzrdir.get_format_string()
461
430
def _get_inventories(self, repo_transport, repo, name='inventory'):
462
431
mapper = versionedfile.ConstantMapper(name)
463
432
return versionedfile.ThunkedVersionedFiles(repo_transport,
464
433
weave.WeaveFile, mapper, repo.is_locked)
466
435
def _get_revisions(self, repo_transport, repo):
436
from bzrlib.xml5 import serializer_v5
467
437
return RevisionTextStore(repo_transport.clone('revision-store'),
468
xml5.serializer_v5, False, versionedfile.HashPrefixMapper(),
438
serializer_v5, False, versionedfile.HashPrefixMapper(),
469
439
repo.is_locked, repo.is_write_locked)
471
441
def _get_signatures(self, repo_transport, repo):
513
473
"""See RepositoryFormat.get_format_description()."""
514
474
return "Weave repository format 7"
476
def check_conversion_target(self, target_format):
516
479
def _get_inventories(self, repo_transport, repo, name='inventory'):
517
480
mapper = versionedfile.ConstantMapper(name)
518
481
return versionedfile.ThunkedVersionedFiles(repo_transport,
519
482
weave.WeaveFile, mapper, repo.is_locked)
521
484
def _get_revisions(self, repo_transport, repo):
485
from bzrlib.xml5 import serializer_v5
522
486
return RevisionTextStore(repo_transport.clone('revision-store'),
523
xml5.serializer_v5, True, versionedfile.HashPrefixMapper(),
487
serializer_v5, True, versionedfile.HashPrefixMapper(),
524
488
repo.is_locked, repo.is_write_locked)
526
490
def _get_signatures(self, repo_transport, repo):
545
509
weavefile.write_weave_v5(weave.Weave(), sio)
546
510
empty_weave = sio.getvalue()
548
trace.mutter('creating repository in %s.', a_bzrdir.transport.base)
512
mutter('creating repository in %s.', a_bzrdir.transport.base)
549
513
dirs = ['revision-store', 'weaves']
550
files = [('inventory.weave', StringIO(empty_weave)),
514
files = [('inventory.weave', StringIO(empty_weave)),
552
516
utf8_files = [('format', self.get_format_string())]
554
518
self._upload_blank_content(a_bzrdir, dirs, files, utf8_files, shared)
555
519
return self.open(a_bzrdir=a_bzrdir, _found=True)
557
521
def open(self, a_bzrdir, _found=False, _override_transport=None):
558
522
"""See RepositoryFormat.open().
560
524
:param _override_transport: INTERNAL USE ONLY. Allows opening the
561
525
repository at a slightly different url
562
526
than normal. I.e. during 'upgrade'.
758
711
paths = list(relpaths)
759
712
return set([self._mapper.unmap(path) for path in paths])
762
class InterWeaveRepo(InterSameDataRepository):
763
"""Optimised code paths between Weave based repositories.
767
def _get_repo_format_to_test(self):
768
return RepositoryFormat7()
771
def is_compatible(source, target):
772
"""Be compatible with known Weave formats.
774
We don't test for the stores being of specific types because that
775
could lead to confusing results, and there is no need to be
779
return (isinstance(source._format, (RepositoryFormat5,
781
RepositoryFormat7)) and
782
isinstance(target._format, (RepositoryFormat5,
785
except AttributeError:
789
def copy_content(self, revision_id=None):
790
"""See InterRepository.copy_content()."""
791
# weave specific optimised path:
793
self.target.set_make_working_trees(self.source.make_working_trees())
794
except (errors.RepositoryUpgradeRequired, NotImplemented):
797
if self.source._transport.listable():
798
pb = ui.ui_factory.nested_progress_bar()
800
self.target.texts.insert_record_stream(
801
self.source.texts.get_record_stream(
802
self.source.texts.keys(), 'topological', False))
803
pb.update('Copying inventory', 0, 1)
804
self.target.inventories.insert_record_stream(
805
self.source.inventories.get_record_stream(
806
self.source.inventories.keys(), 'topological', False))
807
self.target.signatures.insert_record_stream(
808
self.source.signatures.get_record_stream(
809
self.source.signatures.keys(),
811
self.target.revisions.insert_record_stream(
812
self.source.revisions.get_record_stream(
813
self.source.revisions.keys(),
814
'topological', True))
818
self.target.fetch(self.source, revision_id=revision_id)
821
def search_missing_revision_ids(self,
822
revision_id=symbol_versioning.DEPRECATED_PARAMETER,
823
find_ghosts=True, revision_ids=None, if_present_ids=None,
825
"""See InterRepository.search_missing_revision_ids()."""
826
# we want all revisions to satisfy revision_id in source.
827
# but we don't want to stat every file here and there.
828
# we want then, all revisions other needs to satisfy revision_id
829
# checked, but not those that we have locally.
830
# so the first thing is to get a subset of the revisions to
831
# satisfy revision_id in source, and then eliminate those that
832
# we do already have.
833
# this is slow on high latency connection to self, but as this
834
# disk format scales terribly for push anyway due to rewriting
835
# inventory.weave, this is considered acceptable.
837
if symbol_versioning.deprecated_passed(revision_id):
838
symbol_versioning.warn(
839
'search_missing_revision_ids(revision_id=...) was '
840
'deprecated in 2.4. Use revision_ids=[...] instead.',
841
DeprecationWarning, stacklevel=2)
842
if revision_ids is not None:
843
raise AssertionError(
844
'revision_ids is mutually exclusive with revision_id')
845
if revision_id is not None:
846
revision_ids = [revision_id]
848
source_ids_set = self._present_source_revisions_for(
849
revision_ids, if_present_ids)
850
# source_ids is the worst possible case we may need to pull.
851
# now we want to filter source_ids against what we actually
852
# have in target, but don't try to check for existence where we know
853
# we do not have a revision as that would be pointless.
854
target_ids = set(self.target._all_possible_ids())
855
possibly_present_revisions = target_ids.intersection(source_ids_set)
856
actually_present_revisions = set(
857
self.target._eliminate_revisions_not_present(possibly_present_revisions))
858
required_revisions = source_ids_set.difference(actually_present_revisions)
859
if revision_ids is not None:
860
# we used get_ancestry to determine source_ids then we are assured all
861
# revisions referenced are present as they are installed in topological order.
862
# and the tip revision was validated by get_ancestry.
863
result_set = required_revisions
865
# if we just grabbed the possibly available ids, then
866
# we only have an estimate of whats available and need to validate
867
# that against the revision records.
869
self.source._eliminate_revisions_not_present(required_revisions))
870
if limit is not None:
871
topo_ordered = self.get_graph().iter_topo_order(result_set)
872
result_set = set(itertools.islice(topo_ordered, limit))
873
return self.source.revision_ids_to_search_result(result_set)
876
InterRepository.register_optimiser(InterWeaveRepo)
879
def get_extra_interrepo_test_combinations():
880
from bzrlib.repofmt import knitrepo
881
return [(InterRepository, RepositoryFormat5(),
882
knitrepo.RepositoryFormatKnit3())]
714
_legacy_formats = [RepositoryFormat4(),