160
175
self._validate_unicode_text(value,
161
176
'revision property (%s)' % (key,))
178
def _ensure_fallback_inventories(self):
179
"""Ensure that appropriate inventories are available.
181
This only applies to repositories that are stacked, and is about
182
enusring the stacking invariants. Namely, that for any revision that is
183
present, we either have all of the file content, or we have the parent
184
inventory and the delta file content.
186
if not self.repository._fallback_repositories:
188
if not self.repository._format.supports_chks:
189
raise errors.BzrError("Cannot commit directly to a stacked branch"
190
" in pre-2a formats. See "
191
"https://bugs.launchpad.net/bzr/+bug/375013 for details.")
192
# This is a stacked repo, we need to make sure we have the parent
193
# inventories for the parents.
194
parent_keys = [(p,) for p in self.parents]
195
parent_map = self.repository.inventories._index.get_parent_map(parent_keys)
196
missing_parent_keys = set([pk for pk in parent_keys
197
if pk not in parent_map])
198
fallback_repos = list(reversed(self.repository._fallback_repositories))
199
missing_keys = [('inventories', pk[0])
200
for pk in missing_parent_keys]
202
while missing_keys and fallback_repos:
203
fallback_repo = fallback_repos.pop()
204
source = fallback_repo._get_source(self.repository._format)
205
sink = self.repository._get_sink()
206
stream = source.get_stream_for_missing_keys(missing_keys)
207
missing_keys = sink.insert_stream_without_locking(stream,
208
self.repository._format)
210
raise errors.BzrError('Unable to fill in parent inventories for a'
163
213
def commit(self, message):
164
214
"""Make the actual commit.
1524
1598
@needs_read_lock
1525
def search_missing_revision_ids(self, other, revision_id=None, find_ghosts=True):
1599
def search_missing_revision_ids(self, other,
1600
revision_id=symbol_versioning.DEPRECATED_PARAMETER,
1601
find_ghosts=True, revision_ids=None, if_present_ids=None):
1526
1602
"""Return the revision ids that other has that this does not.
1528
1604
These are returned in topological order.
1530
1606
revision_id: only return revision ids included by revision_id.
1608
if symbol_versioning.deprecated_passed(revision_id):
1609
symbol_versioning.warn(
1610
'search_missing_revision_ids(revision_id=...) was '
1611
'deprecated in 2.4. Use revision_ids=[...] instead.',
1612
DeprecationWarning, stacklevel=3)
1613
if revision_ids is not None:
1614
raise AssertionError(
1615
'revision_ids is mutually exclusive with revision_id')
1616
if revision_id is not None:
1617
revision_ids = [revision_id]
1532
1618
return InterRepository.get(other, self).search_missing_revision_ids(
1533
revision_id, find_ghosts)
1619
find_ghosts=find_ghosts, revision_ids=revision_ids,
1620
if_present_ids=if_present_ids)
1536
1623
def open(base):
3007
class RepositoryFormatRegistry(registry.FormatRegistry):
3008
"""Repository format registry."""
3010
def __init__(self, other_registry=None):
3011
super(RepositoryFormatRegistry, self).__init__(other_registry)
3012
self._extra_formats = []
3014
def register(self, format):
3015
"""Register a new repository format."""
3016
super(RepositoryFormatRegistry, self).register(
3017
format.get_format_string(), format)
3019
def remove(self, format):
3020
"""Remove a registered repository format."""
3021
super(RepositoryFormatRegistry, self).remove(
3022
format.get_format_string())
3024
def register_extra(self, format):
3025
"""Register a repository format that can not be used in a metadir.
3027
This is mainly useful to allow custom repository formats, such as older
3028
Bazaar formats and foreign formats, to be tested.
3030
self._extra_formats.append(registry._ObjectGetter(format))
3032
def remove_extra(self, format):
3033
"""Remove an extra repository format.
3035
self._extra_formats.remove(registry._ObjectGetter(format))
3037
def register_extra_lazy(self, module_name, member_name):
3038
"""Register a repository format lazily.
3040
self._extra_formats.append(
3041
registry._LazyObjectGetter(module_name, member_name))
3043
def get_default(self):
3044
"""Return the current default format."""
3045
from bzrlib import bzrdir
3046
return bzrdir.format_registry.make_bzrdir('default').repository_format
3048
def _get_extra(self):
3050
for getter in self._extra_formats:
3051
f = getter.get_obj()
3058
"""Return all repository formats, even those not usable in metadirs.
3060
return [self.get(k) for k in self.keys()] + self._get_extra()
2968
3063
network_format_registry = registry.FormatRegistry()
2969
3064
"""Registry of formats indexed by their network name.
3325
3435
'bzrlib.repofmt.pack_repo',
3326
3436
'RepositoryFormatKnitPack6RichRoot',
3438
format_registry.register_lazy(
3439
'Bazaar repository format 2a (needs bzr 1.16 or later)\n',
3440
'bzrlib.repofmt.groupcompress_repo',
3441
'RepositoryFormat2a',
3329
3444
# Development formats.
3330
# Obsolete but kept pending a CHK based subtree format.
3445
# Check their docstrings to see if/when they are obsolete.
3331
3446
format_registry.register_lazy(
3332
3447
("Bazaar development format 2 with subtree support "
3333
3448
"(needs bzr.dev from before 1.8)\n"),
3334
3449
'bzrlib.repofmt.pack_repo',
3335
3450
'RepositoryFormatPackDevelopment2Subtree',
3338
# 1.14->1.16 go below here
3339
format_registry.register_lazy(
3340
'Bazaar development format - group compression and chk inventory'
3341
' (needs bzr.dev from 1.14)\n',
3342
'bzrlib.repofmt.groupcompress_repo',
3343
'RepositoryFormatCHK1',
3346
format_registry.register_lazy(
3347
'Bazaar development format - chk repository with bencode revision '
3348
'serialization (needs bzr.dev from 1.16)\n',
3349
'bzrlib.repofmt.groupcompress_repo',
3350
'RepositoryFormatCHK2',
3352
format_registry.register_lazy(
3353
'Bazaar repository format 2a (needs bzr 1.16 or later)\n',
3354
'bzrlib.repofmt.groupcompress_repo',
3355
'RepositoryFormat2a',
3452
format_registry.register_lazy(
3453
'Bazaar development format 8\n',
3454
'bzrlib.repofmt.groupcompress_repo',
3455
'RepositoryFormat2aSubtree',
3466
3569
return searcher.get_result()
3468
3571
@needs_read_lock
3469
def search_missing_revision_ids(self, revision_id=None, find_ghosts=True):
3572
def search_missing_revision_ids(self,
3573
revision_id=symbol_versioning.DEPRECATED_PARAMETER,
3574
find_ghosts=True, revision_ids=None, if_present_ids=None):
3470
3575
"""Return the revision ids that source has that target does not.
3472
3577
:param revision_id: only return revision ids included by this
3579
:param revision_ids: return revision ids included by these
3580
revision_ids. NoSuchRevision will be raised if any of these
3581
revisions are not present.
3582
:param if_present_ids: like revision_ids, but will not cause
3583
NoSuchRevision if any of these are absent, instead they will simply
3584
not be in the result. This is useful for e.g. finding revisions
3585
to fetch for tags, which may reference absent revisions.
3474
3586
:param find_ghosts: If True find missing revisions in deep history
3475
3587
rather than just finding the surface difference.
3476
3588
:return: A bzrlib.graph.SearchResult.
3590
if symbol_versioning.deprecated_passed(revision_id):
3591
symbol_versioning.warn(
3592
'search_missing_revision_ids(revision_id=...) was '
3593
'deprecated in 2.4. Use revision_ids=[...] instead.',
3594
DeprecationWarning, stacklevel=2)
3595
if revision_ids is not None:
3596
raise AssertionError(
3597
'revision_ids is mutually exclusive with revision_id')
3598
if revision_id is not None:
3599
revision_ids = [revision_id]
3478
3601
# stop searching at found target revisions.
3479
if not find_ghosts and revision_id is not None:
3480
return self._walk_to_common_revisions([revision_id])
3602
if not find_ghosts and (revision_ids is not None or if_present_ids is
3604
return self._walk_to_common_revisions(revision_ids,
3605
if_present_ids=if_present_ids)
3481
3606
# generic, possibly worst case, slow code path.
3482
3607
target_ids = set(self.target.all_revision_ids())
3483
if revision_id is not None:
3484
source_ids = self.source.get_ancestry(revision_id)
3485
if source_ids[0] is not None:
3486
raise AssertionError()
3489
source_ids = self.source.all_revision_ids()
3608
source_ids = self._present_source_revisions_for(
3609
revision_ids, if_present_ids)
3490
3610
result_set = set(source_ids).difference(target_ids)
3491
3611
return self.source.revision_ids_to_search_result(result_set)
3613
def _present_source_revisions_for(self, revision_ids, if_present_ids=None):
3614
"""Returns set of all revisions in ancestry of revision_ids present in
3617
:param revision_ids: if None, all revisions in source are returned.
3618
:param if_present_ids: like revision_ids, but if any/all of these are
3619
absent no error is raised.
3621
if revision_ids is not None or if_present_ids is not None:
3622
# First, ensure all specified revisions exist. Callers expect
3623
# NoSuchRevision when they pass absent revision_ids here.
3624
if revision_ids is None:
3625
revision_ids = set()
3626
if if_present_ids is None:
3627
if_present_ids = set()
3628
revision_ids = set(revision_ids)
3629
if_present_ids = set(if_present_ids)
3630
all_wanted_ids = revision_ids.union(if_present_ids)
3631
graph = self.source.get_graph()
3632
present_revs = set(graph.get_parent_map(all_wanted_ids))
3633
missing = revision_ids.difference(present_revs)
3635
raise errors.NoSuchRevision(self.source, missing.pop())
3636
found_ids = all_wanted_ids.intersection(present_revs)
3637
source_ids = [rev_id for (rev_id, parents) in
3638
graph.iter_ancestry(found_ids)
3639
if rev_id != _mod_revision.NULL_REVISION
3640
and parents is not None]
3642
source_ids = self.source.all_revision_ids()
3643
return set(source_ids)
3494
3646
def _same_model(source, target):
3495
3647
"""True if source and target have the same data representation.
3536
3688
return InterRepository._same_model(source, target)
3539
class InterWeaveRepo(InterSameDataRepository):
3540
"""Optimised code paths between Weave based repositories.
3542
This should be in bzrlib/repofmt/weaverepo.py but we have not yet
3543
implemented lazy inter-object optimisation.
3547
def _get_repo_format_to_test(self):
3548
from bzrlib.repofmt import weaverepo
3549
return weaverepo.RepositoryFormat7()
3552
def is_compatible(source, target):
3553
"""Be compatible with known Weave formats.
3555
We don't test for the stores being of specific types because that
3556
could lead to confusing results, and there is no need to be
3559
from bzrlib.repofmt.weaverepo import (
3565
return (isinstance(source._format, (RepositoryFormat5,
3567
RepositoryFormat7)) and
3568
isinstance(target._format, (RepositoryFormat5,
3570
RepositoryFormat7)))
3571
except AttributeError:
3575
def copy_content(self, revision_id=None):
3576
"""See InterRepository.copy_content()."""
3577
# weave specific optimised path:
3579
self.target.set_make_working_trees(self.source.make_working_trees())
3580
except (errors.RepositoryUpgradeRequired, NotImplemented):
3582
# FIXME do not peek!
3583
if self.source._transport.listable():
3584
pb = ui.ui_factory.nested_progress_bar()
3586
self.target.texts.insert_record_stream(
3587
self.source.texts.get_record_stream(
3588
self.source.texts.keys(), 'topological', False))
3589
pb.update('Copying inventory', 0, 1)
3590
self.target.inventories.insert_record_stream(
3591
self.source.inventories.get_record_stream(
3592
self.source.inventories.keys(), 'topological', False))
3593
self.target.signatures.insert_record_stream(
3594
self.source.signatures.get_record_stream(
3595
self.source.signatures.keys(),
3597
self.target.revisions.insert_record_stream(
3598
self.source.revisions.get_record_stream(
3599
self.source.revisions.keys(),
3600
'topological', True))
3604
self.target.fetch(self.source, revision_id=revision_id)
3607
def search_missing_revision_ids(self, revision_id=None, find_ghosts=True):
3608
"""See InterRepository.missing_revision_ids()."""
3609
# we want all revisions to satisfy revision_id in source.
3610
# but we don't want to stat every file here and there.
3611
# we want then, all revisions other needs to satisfy revision_id
3612
# checked, but not those that we have locally.
3613
# so the first thing is to get a subset of the revisions to
3614
# satisfy revision_id in source, and then eliminate those that
3615
# we do already have.
3616
# this is slow on high latency connection to self, but as this
3617
# disk format scales terribly for push anyway due to rewriting
3618
# inventory.weave, this is considered acceptable.
3620
if revision_id is not None:
3621
source_ids = self.source.get_ancestry(revision_id)
3622
if source_ids[0] is not None:
3623
raise AssertionError()
3626
source_ids = self.source._all_possible_ids()
3627
source_ids_set = set(source_ids)
3628
# source_ids is the worst possible case we may need to pull.
3629
# now we want to filter source_ids against what we actually
3630
# have in target, but don't try to check for existence where we know
3631
# we do not have a revision as that would be pointless.
3632
target_ids = set(self.target._all_possible_ids())
3633
possibly_present_revisions = target_ids.intersection(source_ids_set)
3634
actually_present_revisions = set(
3635
self.target._eliminate_revisions_not_present(possibly_present_revisions))
3636
required_revisions = source_ids_set.difference(actually_present_revisions)
3637
if revision_id is not None:
3638
# we used get_ancestry to determine source_ids then we are assured all
3639
# revisions referenced are present as they are installed in topological order.
3640
# and the tip revision was validated by get_ancestry.
3641
result_set = required_revisions
3643
# if we just grabbed the possibly available ids, then
3644
# we only have an estimate of whats available and need to validate
3645
# that against the revision records.
3647
self.source._eliminate_revisions_not_present(required_revisions))
3648
return self.source.revision_ids_to_search_result(result_set)
3651
class InterKnitRepo(InterSameDataRepository):
3652
"""Optimised code paths between Knit based repositories."""
3655
def _get_repo_format_to_test(self):
3656
from bzrlib.repofmt import knitrepo
3657
return knitrepo.RepositoryFormatKnit1()
3660
def is_compatible(source, target):
3661
"""Be compatible with known Knit formats.
3663
We don't test for the stores being of specific types because that
3664
could lead to confusing results, and there is no need to be
3667
from bzrlib.repofmt.knitrepo import RepositoryFormatKnit
3669
are_knits = (isinstance(source._format, RepositoryFormatKnit) and
3670
isinstance(target._format, RepositoryFormatKnit))
3671
except AttributeError:
3673
return are_knits and InterRepository._same_model(source, target)
3676
def search_missing_revision_ids(self, revision_id=None, find_ghosts=True):
3677
"""See InterRepository.missing_revision_ids()."""
3678
if revision_id is not None:
3679
source_ids = self.source.get_ancestry(revision_id)
3680
if source_ids[0] is not None:
3681
raise AssertionError()
3684
source_ids = self.source.all_revision_ids()
3685
source_ids_set = set(source_ids)
3686
# source_ids is the worst possible case we may need to pull.
3687
# now we want to filter source_ids against what we actually
3688
# have in target, but don't try to check for existence where we know
3689
# we do not have a revision as that would be pointless.
3690
target_ids = set(self.target.all_revision_ids())
3691
possibly_present_revisions = target_ids.intersection(source_ids_set)
3692
actually_present_revisions = set(
3693
self.target._eliminate_revisions_not_present(possibly_present_revisions))
3694
required_revisions = source_ids_set.difference(actually_present_revisions)
3695
if revision_id is not None:
3696
# we used get_ancestry to determine source_ids then we are assured all
3697
# revisions referenced are present as they are installed in topological order.
3698
# and the tip revision was validated by get_ancestry.
3699
result_set = required_revisions
3701
# if we just grabbed the possibly available ids, then
3702
# we only have an estimate of whats available and need to validate
3703
# that against the revision records.
3705
self.source._eliminate_revisions_not_present(required_revisions))
3706
return self.source.revision_ids_to_search_result(result_set)
3709
3691
class InterDifferingSerializer(InterRepository):
4249
4222
is_resume = False
4251
4224
# locked_insert_stream performs a commit|suspend.
4252
return self._locked_insert_stream(stream, src_format, is_resume)
4225
missing_keys = self.insert_stream_without_locking(stream,
4226
src_format, is_resume)
4228
# suspend the write group and tell the caller what we is
4229
# missing. We know we can suspend or else we would not have
4230
# entered this code path. (All repositories that can handle
4231
# missing keys can handle suspending a write group).
4232
write_group_tokens = self.target_repo.suspend_write_group()
4233
return write_group_tokens, missing_keys
4234
hint = self.target_repo.commit_write_group()
4235
to_serializer = self.target_repo._format._serializer
4236
src_serializer = src_format._serializer
4237
if (to_serializer != src_serializer and
4238
self.target_repo._format.pack_compresses):
4239
self.target_repo.pack(hint=hint)
4254
4242
self.target_repo.abort_write_group(suppress_errors=True)
4257
4245
self.target_repo.unlock()
4259
def _locked_insert_stream(self, stream, src_format, is_resume):
4247
def insert_stream_without_locking(self, stream, src_format,
4249
"""Insert a stream's content into the target repository.
4251
This assumes that you already have a locked repository and an active
4254
:param src_format: a bzr repository format.
4255
:param is_resume: Passed down to get_missing_parent_inventories to
4256
indicate if we should be checking for missing texts at the same
4259
:return: A set of keys that are missing.
4261
if not self.target_repo.is_write_locked():
4262
raise errors.ObjectNotLocked(self)
4263
if not self.target_repo.is_in_write_group():
4264
raise errors.BzrError('you must already be in a write group')
4260
4265
to_serializer = self.target_repo._format._serializer
4261
4266
src_serializer = src_format._serializer
4262
4267
new_pack = None