160
175
self._validate_unicode_text(value,
161
176
'revision property (%s)' % (key,))
178
def _ensure_fallback_inventories(self):
179
"""Ensure that appropriate inventories are available.
181
This only applies to repositories that are stacked, and is about
182
enusring the stacking invariants. Namely, that for any revision that is
183
present, we either have all of the file content, or we have the parent
184
inventory and the delta file content.
186
if not self.repository._fallback_repositories:
188
if not self.repository._format.supports_chks:
189
raise errors.BzrError("Cannot commit directly to a stacked branch"
190
" in pre-2a formats. See "
191
"https://bugs.launchpad.net/bzr/+bug/375013 for details.")
192
# This is a stacked repo, we need to make sure we have the parent
193
# inventories for the parents.
194
parent_keys = [(p,) for p in self.parents]
195
parent_map = self.repository.inventories._index.get_parent_map(parent_keys)
196
missing_parent_keys = set([pk for pk in parent_keys
197
if pk not in parent_map])
198
fallback_repos = list(reversed(self.repository._fallback_repositories))
199
missing_keys = [('inventories', pk[0])
200
for pk in missing_parent_keys]
202
while missing_keys and fallback_repos:
203
fallback_repo = fallback_repos.pop()
204
source = fallback_repo._get_source(self.repository._format)
205
sink = self.repository._get_sink()
206
stream = source.get_stream_for_missing_keys(missing_keys)
207
missing_keys = sink.insert_stream_without_locking(stream,
208
self.repository._format)
210
raise errors.BzrError('Unable to fill in parent inventories for a'
163
213
def commit(self, message):
164
214
"""Make the actual commit.
1548
1598
@needs_read_lock
1549
def search_missing_revision_ids(self, other, revision_id=None, find_ghosts=True):
1599
def search_missing_revision_ids(self, other,
1600
revision_id=symbol_versioning.DEPRECATED_PARAMETER,
1601
find_ghosts=True, revision_ids=None, if_present_ids=None):
1550
1602
"""Return the revision ids that other has that this does not.
1552
1604
These are returned in topological order.
1554
1606
revision_id: only return revision ids included by revision_id.
1608
if symbol_versioning.deprecated_passed(revision_id):
1609
symbol_versioning.warn(
1610
'search_missing_revision_ids(revision_id=...) was '
1611
'deprecated in 2.4. Use revision_ids=[...] instead.',
1612
DeprecationWarning, stacklevel=3)
1613
if revision_ids is not None:
1614
raise AssertionError(
1615
'revision_ids is mutually exclusive with revision_id')
1616
if revision_id is not None:
1617
revision_ids = [revision_id]
1556
1618
return InterRepository.get(other, self).search_missing_revision_ids(
1557
revision_id, find_ghosts)
1619
find_ghosts=find_ghosts, revision_ids=revision_ids,
1620
if_present_ids=if_present_ids)
1560
1623
def open(base):
3007
class RepositoryFormatRegistry(registry.FormatRegistry):
3008
"""Repository format registry."""
3010
def __init__(self, other_registry=None):
3011
super(RepositoryFormatRegistry, self).__init__(other_registry)
3012
self._extra_formats = []
3014
def register(self, format):
3015
"""Register a new repository format."""
3016
super(RepositoryFormatRegistry, self).register(
3017
format.get_format_string(), format)
3019
def remove(self, format):
3020
"""Remove a registered repository format."""
3021
super(RepositoryFormatRegistry, self).remove(
3022
format.get_format_string())
3024
def register_extra(self, format):
3025
"""Register a repository format that can not be used in a metadir.
3027
This is mainly useful to allow custom repository formats, such as older
3028
Bazaar formats and foreign formats, to be tested.
3030
self._extra_formats.append(registry._ObjectGetter(format))
3032
def remove_extra(self, format):
3033
"""Remove an extra repository format.
3035
self._extra_formats.remove(registry._ObjectGetter(format))
3037
def register_extra_lazy(self, module_name, member_name):
3038
"""Register a repository format lazily.
3040
self._extra_formats.append(
3041
registry._LazyObjectGetter(module_name, member_name))
3043
def get_default(self):
3044
"""Return the current default format."""
3045
from bzrlib import bzrdir
3046
return bzrdir.format_registry.make_bzrdir('default').repository_format
3048
def _get_extra(self):
3050
for getter in self._extra_formats:
3051
f = getter.get_obj()
3058
"""Return all repository formats, even those not usable in metadirs.
3060
return [self.get(k) for k in self.keys()] + self._get_extra()
2992
3063
network_format_registry = registry.FormatRegistry()
2993
3064
"""Registry of formats indexed by their network name.
3349
3435
'bzrlib.repofmt.pack_repo',
3350
3436
'RepositoryFormatKnitPack6RichRoot',
3438
format_registry.register_lazy(
3439
'Bazaar repository format 2a (needs bzr 1.16 or later)\n',
3440
'bzrlib.repofmt.groupcompress_repo',
3441
'RepositoryFormat2a',
3353
3444
# Development formats.
3354
# Obsolete but kept pending a CHK based subtree format.
3445
# Check their docstrings to see if/when they are obsolete.
3355
3446
format_registry.register_lazy(
3356
3447
("Bazaar development format 2 with subtree support "
3357
3448
"(needs bzr.dev from before 1.8)\n"),
3358
3449
'bzrlib.repofmt.pack_repo',
3359
3450
'RepositoryFormatPackDevelopment2Subtree',
3362
# 1.14->1.16 go below here
3363
format_registry.register_lazy(
3364
'Bazaar development format - group compression and chk inventory'
3365
' (needs bzr.dev from 1.14)\n',
3366
'bzrlib.repofmt.groupcompress_repo',
3367
'RepositoryFormatCHK1',
3370
format_registry.register_lazy(
3371
'Bazaar development format - chk repository with bencode revision '
3372
'serialization (needs bzr.dev from 1.16)\n',
3373
'bzrlib.repofmt.groupcompress_repo',
3374
'RepositoryFormatCHK2',
3376
format_registry.register_lazy(
3377
'Bazaar repository format 2a (needs bzr 1.16 or later)\n',
3378
'bzrlib.repofmt.groupcompress_repo',
3379
'RepositoryFormat2a',
3452
format_registry.register_lazy(
3453
'Bazaar development format 8\n',
3454
'bzrlib.repofmt.groupcompress_repo',
3455
'RepositoryFormat2aSubtree',
3490
3570
return searcher.get_result()
3492
3572
@needs_read_lock
3493
def search_missing_revision_ids(self, revision_id=None, find_ghosts=True):
3573
def search_missing_revision_ids(self,
3574
revision_id=symbol_versioning.DEPRECATED_PARAMETER,
3575
find_ghosts=True, revision_ids=None, if_present_ids=None):
3494
3576
"""Return the revision ids that source has that target does not.
3496
3578
:param revision_id: only return revision ids included by this
3580
:param revision_ids: return revision ids included by these
3581
revision_ids. NoSuchRevision will be raised if any of these
3582
revisions are not present.
3583
:param if_present_ids: like revision_ids, but will not cause
3584
NoSuchRevision if any of these are absent, instead they will simply
3585
not be in the result. This is useful for e.g. finding revisions
3586
to fetch for tags, which may reference absent revisions.
3498
3587
:param find_ghosts: If True find missing revisions in deep history
3499
3588
rather than just finding the surface difference.
3500
3589
:return: A bzrlib.graph.SearchResult.
3591
if symbol_versioning.deprecated_passed(revision_id):
3592
symbol_versioning.warn(
3593
'search_missing_revision_ids(revision_id=...) was '
3594
'deprecated in 2.4. Use revision_ids=[...] instead.',
3595
DeprecationWarning, stacklevel=2)
3596
if revision_ids is not None:
3597
raise AssertionError(
3598
'revision_ids is mutually exclusive with revision_id')
3599
if revision_id is not None:
3600
revision_ids = [revision_id]
3502
3602
# stop searching at found target revisions.
3503
if not find_ghosts and revision_id is not None:
3504
return self._walk_to_common_revisions([revision_id])
3603
if not find_ghosts and (revision_ids is not None or if_present_ids is
3605
return self._walk_to_common_revisions(revision_ids,
3606
if_present_ids=if_present_ids)
3505
3607
# generic, possibly worst case, slow code path.
3506
3608
target_ids = set(self.target.all_revision_ids())
3507
if revision_id is not None:
3508
source_ids = self.source.get_ancestry(revision_id)
3509
if source_ids[0] is not None:
3510
raise AssertionError()
3513
source_ids = self.source.all_revision_ids()
3609
source_ids = self._present_source_revisions_for(
3610
revision_ids, if_present_ids)
3514
3611
result_set = set(source_ids).difference(target_ids)
3515
3612
return self.source.revision_ids_to_search_result(result_set)
3614
def _present_source_revisions_for(self, revision_ids, if_present_ids=None):
3615
"""Returns set of all revisions in ancestry of revision_ids present in
3618
:param revision_ids: if None, all revisions in source are returned.
3619
:param if_present_ids: like revision_ids, but if any/all of these are
3620
absent no error is raised.
3622
if revision_ids is not None or if_present_ids is not None:
3623
# First, ensure all specified revisions exist. Callers expect
3624
# NoSuchRevision when they pass absent revision_ids here.
3625
if revision_ids is None:
3626
revision_ids = set()
3627
if if_present_ids is None:
3628
if_present_ids = set()
3629
revision_ids = set(revision_ids)
3630
if_present_ids = set(if_present_ids)
3631
all_wanted_ids = revision_ids.union(if_present_ids)
3632
graph = self.source.get_graph()
3633
present_revs = set(graph.get_parent_map(all_wanted_ids))
3634
missing = revision_ids.difference(present_revs)
3636
raise errors.NoSuchRevision(self.source, missing.pop())
3637
found_ids = all_wanted_ids.intersection(present_revs)
3638
source_ids = [rev_id for (rev_id, parents) in
3639
graph.iter_ancestry(found_ids)
3640
if rev_id != _mod_revision.NULL_REVISION
3641
and parents is not None]
3643
source_ids = self.source.all_revision_ids()
3644
return set(source_ids)
3518
3647
def _same_model(source, target):
3519
3648
"""True if source and target have the same data representation.
3560
3689
return InterRepository._same_model(source, target)
3563
class InterWeaveRepo(InterSameDataRepository):
3564
"""Optimised code paths between Weave based repositories.
3566
This should be in bzrlib/repofmt/weaverepo.py but we have not yet
3567
implemented lazy inter-object optimisation.
3571
def _get_repo_format_to_test(self):
3572
from bzrlib.repofmt import weaverepo
3573
return weaverepo.RepositoryFormat7()
3576
def is_compatible(source, target):
3577
"""Be compatible with known Weave formats.
3579
We don't test for the stores being of specific types because that
3580
could lead to confusing results, and there is no need to be
3583
from bzrlib.repofmt.weaverepo import (
3589
return (isinstance(source._format, (RepositoryFormat5,
3591
RepositoryFormat7)) and
3592
isinstance(target._format, (RepositoryFormat5,
3594
RepositoryFormat7)))
3595
except AttributeError:
3599
def copy_content(self, revision_id=None):
3600
"""See InterRepository.copy_content()."""
3601
# weave specific optimised path:
3603
self.target.set_make_working_trees(self.source.make_working_trees())
3604
except (errors.RepositoryUpgradeRequired, NotImplemented):
3606
# FIXME do not peek!
3607
if self.source._transport.listable():
3608
pb = ui.ui_factory.nested_progress_bar()
3610
self.target.texts.insert_record_stream(
3611
self.source.texts.get_record_stream(
3612
self.source.texts.keys(), 'topological', False))
3613
pb.update('Copying inventory', 0, 1)
3614
self.target.inventories.insert_record_stream(
3615
self.source.inventories.get_record_stream(
3616
self.source.inventories.keys(), 'topological', False))
3617
self.target.signatures.insert_record_stream(
3618
self.source.signatures.get_record_stream(
3619
self.source.signatures.keys(),
3621
self.target.revisions.insert_record_stream(
3622
self.source.revisions.get_record_stream(
3623
self.source.revisions.keys(),
3624
'topological', True))
3628
self.target.fetch(self.source, revision_id=revision_id)
3631
def search_missing_revision_ids(self, revision_id=None, find_ghosts=True):
3632
"""See InterRepository.missing_revision_ids()."""
3633
# we want all revisions to satisfy revision_id in source.
3634
# but we don't want to stat every file here and there.
3635
# we want then, all revisions other needs to satisfy revision_id
3636
# checked, but not those that we have locally.
3637
# so the first thing is to get a subset of the revisions to
3638
# satisfy revision_id in source, and then eliminate those that
3639
# we do already have.
3640
# this is slow on high latency connection to self, but as this
3641
# disk format scales terribly for push anyway due to rewriting
3642
# inventory.weave, this is considered acceptable.
3644
if revision_id is not None:
3645
source_ids = self.source.get_ancestry(revision_id)
3646
if source_ids[0] is not None:
3647
raise AssertionError()
3650
source_ids = self.source._all_possible_ids()
3651
source_ids_set = set(source_ids)
3652
# source_ids is the worst possible case we may need to pull.
3653
# now we want to filter source_ids against what we actually
3654
# have in target, but don't try to check for existence where we know
3655
# we do not have a revision as that would be pointless.
3656
target_ids = set(self.target._all_possible_ids())
3657
possibly_present_revisions = target_ids.intersection(source_ids_set)
3658
actually_present_revisions = set(
3659
self.target._eliminate_revisions_not_present(possibly_present_revisions))
3660
required_revisions = source_ids_set.difference(actually_present_revisions)
3661
if revision_id is not None:
3662
# we used get_ancestry to determine source_ids then we are assured all
3663
# revisions referenced are present as they are installed in topological order.
3664
# and the tip revision was validated by get_ancestry.
3665
result_set = required_revisions
3667
# if we just grabbed the possibly available ids, then
3668
# we only have an estimate of whats available and need to validate
3669
# that against the revision records.
3671
self.source._eliminate_revisions_not_present(required_revisions))
3672
return self.source.revision_ids_to_search_result(result_set)
3675
class InterKnitRepo(InterSameDataRepository):
3676
"""Optimised code paths between Knit based repositories."""
3679
def _get_repo_format_to_test(self):
3680
from bzrlib.repofmt import knitrepo
3681
return knitrepo.RepositoryFormatKnit1()
3684
def is_compatible(source, target):
3685
"""Be compatible with known Knit formats.
3687
We don't test for the stores being of specific types because that
3688
could lead to confusing results, and there is no need to be
3691
from bzrlib.repofmt.knitrepo import RepositoryFormatKnit
3693
are_knits = (isinstance(source._format, RepositoryFormatKnit) and
3694
isinstance(target._format, RepositoryFormatKnit))
3695
except AttributeError:
3697
return are_knits and InterRepository._same_model(source, target)
3700
def search_missing_revision_ids(self, revision_id=None, find_ghosts=True):
3701
"""See InterRepository.missing_revision_ids()."""
3702
if revision_id is not None:
3703
source_ids = self.source.get_ancestry(revision_id)
3704
if source_ids[0] is not None:
3705
raise AssertionError()
3708
source_ids = self.source.all_revision_ids()
3709
source_ids_set = set(source_ids)
3710
# source_ids is the worst possible case we may need to pull.
3711
# now we want to filter source_ids against what we actually
3712
# have in target, but don't try to check for existence where we know
3713
# we do not have a revision as that would be pointless.
3714
target_ids = set(self.target.all_revision_ids())
3715
possibly_present_revisions = target_ids.intersection(source_ids_set)
3716
actually_present_revisions = set(
3717
self.target._eliminate_revisions_not_present(possibly_present_revisions))
3718
required_revisions = source_ids_set.difference(actually_present_revisions)
3719
if revision_id is not None:
3720
# we used get_ancestry to determine source_ids then we are assured all
3721
# revisions referenced are present as they are installed in topological order.
3722
# and the tip revision was validated by get_ancestry.
3723
result_set = required_revisions
3725
# if we just grabbed the possibly available ids, then
3726
# we only have an estimate of whats available and need to validate
3727
# that against the revision records.
3729
self.source._eliminate_revisions_not_present(required_revisions))
3730
return self.source.revision_ids_to_search_result(result_set)
3733
3692
class InterDifferingSerializer(InterRepository):
4273
4231
is_resume = False
4275
4233
# locked_insert_stream performs a commit|suspend.
4276
return self._locked_insert_stream(stream, src_format, is_resume)
4234
missing_keys = self.insert_stream_without_locking(stream,
4235
src_format, is_resume)
4237
# suspend the write group and tell the caller what we is
4238
# missing. We know we can suspend or else we would not have
4239
# entered this code path. (All repositories that can handle
4240
# missing keys can handle suspending a write group).
4241
write_group_tokens = self.target_repo.suspend_write_group()
4242
return write_group_tokens, missing_keys
4243
hint = self.target_repo.commit_write_group()
4244
to_serializer = self.target_repo._format._serializer
4245
src_serializer = src_format._serializer
4246
if (to_serializer != src_serializer and
4247
self.target_repo._format.pack_compresses):
4248
self.target_repo.pack(hint=hint)
4278
4251
self.target_repo.abort_write_group(suppress_errors=True)
4281
4254
self.target_repo.unlock()
4283
def _locked_insert_stream(self, stream, src_format, is_resume):
4256
def insert_stream_without_locking(self, stream, src_format,
4258
"""Insert a stream's content into the target repository.
4260
This assumes that you already have a locked repository and an active
4263
:param src_format: a bzr repository format.
4264
:param is_resume: Passed down to get_missing_parent_inventories to
4265
indicate if we should be checking for missing texts at the same
4268
:return: A set of keys that are missing.
4270
if not self.target_repo.is_write_locked():
4271
raise errors.ObjectNotLocked(self)
4272
if not self.target_repo.is_in_write_group():
4273
raise errors.BzrError('you must already be in a write group')
4284
4274
to_serializer = self.target_repo._format._serializer
4285
4275
src_serializer = src_format._serializer
4286
4276
new_pack = None