3272
3231
return self.get_format_string()
3275
# Pre-0.8 formats that don't have a disk format string (because they are
3276
# versioned by the matching control directory). We use the control directories
3277
# disk format string as a key for the network_name because they meet the
3278
# constraints (simple string, unique, immutable).
3279
network_format_registry.register_lazy(
3280
"Bazaar-NG branch, format 5\n",
3281
'bzrlib.repofmt.weaverepo',
3282
'RepositoryFormat5',
3284
network_format_registry.register_lazy(
3285
"Bazaar-NG branch, format 6\n",
3286
'bzrlib.repofmt.weaverepo',
3287
'RepositoryFormat6',
3290
3234
# formats which have no format string are not discoverable or independently
3291
3235
# creatable on disk, so are not registered in format_registry. They're
3292
# all in bzrlib.repofmt.weaverepo now. When an instance of one of these is
3236
# all in bzrlib.repofmt.knitreponow. When an instance of one of these is
3293
3237
# needed, it's constructed directly by the BzrDir. Non-native formats where
3294
3238
# the repository is not separately opened are similar.
3296
3240
format_registry.register_lazy(
3297
'Bazaar-NG Repository format 7',
3298
'bzrlib.repofmt.weaverepo',
3302
format_registry.register_lazy(
3303
3241
'Bazaar-NG Knit Repository Format 1',
3304
3242
'bzrlib.repofmt.knitrepo',
3305
3243
'RepositoryFormatKnit1',
3360
3298
'bzrlib.repofmt.pack_repo',
3361
3299
'RepositoryFormatKnitPack6RichRoot',
3301
format_registry.register_lazy(
3302
'Bazaar repository format 2a (needs bzr 1.16 or later)\n',
3303
'bzrlib.repofmt.groupcompress_repo',
3304
'RepositoryFormat2a',
3364
3307
# Development formats.
3365
# Obsolete but kept pending a CHK based subtree format.
3308
# Check their docstrings to see if/when they are obsolete.
3366
3309
format_registry.register_lazy(
3367
3310
("Bazaar development format 2 with subtree support "
3368
3311
"(needs bzr.dev from before 1.8)\n"),
3369
3312
'bzrlib.repofmt.pack_repo',
3370
3313
'RepositoryFormatPackDevelopment2Subtree',
3373
# 1.14->1.16 go below here
3374
format_registry.register_lazy(
3375
'Bazaar development format - group compression and chk inventory'
3376
' (needs bzr.dev from 1.14)\n',
3377
'bzrlib.repofmt.groupcompress_repo',
3378
'RepositoryFormatCHK1',
3381
format_registry.register_lazy(
3382
'Bazaar development format - chk repository with bencode revision '
3383
'serialization (needs bzr.dev from 1.16)\n',
3384
'bzrlib.repofmt.groupcompress_repo',
3385
'RepositoryFormatCHK2',
3387
format_registry.register_lazy(
3388
'Bazaar repository format 2a (needs bzr 1.16 or later)\n',
3389
'bzrlib.repofmt.groupcompress_repo',
3390
'RepositoryFormat2a',
3315
format_registry.register_lazy(
3316
'Bazaar development format 8\n',
3317
'bzrlib.repofmt.groupcompress_repo',
3318
'RepositoryFormat2aSubtree',
3571
3499
return InterRepository._same_model(source, target)
3574
class InterWeaveRepo(InterSameDataRepository):
3575
"""Optimised code paths between Weave based repositories.
3577
This should be in bzrlib/repofmt/weaverepo.py but we have not yet
3578
implemented lazy inter-object optimisation.
3582
def _get_repo_format_to_test(self):
3583
from bzrlib.repofmt import weaverepo
3584
return weaverepo.RepositoryFormat7()
3587
def is_compatible(source, target):
3588
"""Be compatible with known Weave formats.
3590
We don't test for the stores being of specific types because that
3591
could lead to confusing results, and there is no need to be
3594
from bzrlib.repofmt.weaverepo import (
3600
return (isinstance(source._format, (RepositoryFormat5,
3602
RepositoryFormat7)) and
3603
isinstance(target._format, (RepositoryFormat5,
3605
RepositoryFormat7)))
3606
except AttributeError:
3610
def copy_content(self, revision_id=None):
3611
"""See InterRepository.copy_content()."""
3612
# weave specific optimised path:
3614
self.target.set_make_working_trees(self.source.make_working_trees())
3615
except (errors.RepositoryUpgradeRequired, NotImplemented):
3617
# FIXME do not peek!
3618
if self.source._transport.listable():
3619
pb = ui.ui_factory.nested_progress_bar()
3621
self.target.texts.insert_record_stream(
3622
self.source.texts.get_record_stream(
3623
self.source.texts.keys(), 'topological', False))
3624
pb.update('Copying inventory', 0, 1)
3625
self.target.inventories.insert_record_stream(
3626
self.source.inventories.get_record_stream(
3627
self.source.inventories.keys(), 'topological', False))
3628
self.target.signatures.insert_record_stream(
3629
self.source.signatures.get_record_stream(
3630
self.source.signatures.keys(),
3632
self.target.revisions.insert_record_stream(
3633
self.source.revisions.get_record_stream(
3634
self.source.revisions.keys(),
3635
'topological', True))
3639
self.target.fetch(self.source, revision_id=revision_id)
3642
def search_missing_revision_ids(self, revision_id=None, find_ghosts=True):
3643
"""See InterRepository.missing_revision_ids()."""
3644
# we want all revisions to satisfy revision_id in source.
3645
# but we don't want to stat every file here and there.
3646
# we want then, all revisions other needs to satisfy revision_id
3647
# checked, but not those that we have locally.
3648
# so the first thing is to get a subset of the revisions to
3649
# satisfy revision_id in source, and then eliminate those that
3650
# we do already have.
3651
# this is slow on high latency connection to self, but as this
3652
# disk format scales terribly for push anyway due to rewriting
3653
# inventory.weave, this is considered acceptable.
3655
if revision_id is not None:
3656
source_ids = self.source.get_ancestry(revision_id)
3657
if source_ids[0] is not None:
3658
raise AssertionError()
3661
source_ids = self.source._all_possible_ids()
3662
source_ids_set = set(source_ids)
3663
# source_ids is the worst possible case we may need to pull.
3664
# now we want to filter source_ids against what we actually
3665
# have in target, but don't try to check for existence where we know
3666
# we do not have a revision as that would be pointless.
3667
target_ids = set(self.target._all_possible_ids())
3668
possibly_present_revisions = target_ids.intersection(source_ids_set)
3669
actually_present_revisions = set(
3670
self.target._eliminate_revisions_not_present(possibly_present_revisions))
3671
required_revisions = source_ids_set.difference(actually_present_revisions)
3672
if revision_id is not None:
3673
# we used get_ancestry to determine source_ids then we are assured all
3674
# revisions referenced are present as they are installed in topological order.
3675
# and the tip revision was validated by get_ancestry.
3676
result_set = required_revisions
3678
# if we just grabbed the possibly available ids, then
3679
# we only have an estimate of whats available and need to validate
3680
# that against the revision records.
3682
self.source._eliminate_revisions_not_present(required_revisions))
3683
return self.source.revision_ids_to_search_result(result_set)
3686
class InterKnitRepo(InterSameDataRepository):
3687
"""Optimised code paths between Knit based repositories."""
3690
def _get_repo_format_to_test(self):
3691
from bzrlib.repofmt import knitrepo
3692
return knitrepo.RepositoryFormatKnit1()
3695
def is_compatible(source, target):
3696
"""Be compatible with known Knit formats.
3698
We don't test for the stores being of specific types because that
3699
could lead to confusing results, and there is no need to be
3702
from bzrlib.repofmt.knitrepo import RepositoryFormatKnit
3704
are_knits = (isinstance(source._format, RepositoryFormatKnit) and
3705
isinstance(target._format, RepositoryFormatKnit))
3706
except AttributeError:
3708
return are_knits and InterRepository._same_model(source, target)
3711
def search_missing_revision_ids(self, revision_id=None, find_ghosts=True):
3712
"""See InterRepository.missing_revision_ids()."""
3713
if revision_id is not None:
3714
source_ids = self.source.get_ancestry(revision_id)
3715
if source_ids[0] is not None:
3716
raise AssertionError()
3719
source_ids = self.source.all_revision_ids()
3720
source_ids_set = set(source_ids)
3721
# source_ids is the worst possible case we may need to pull.
3722
# now we want to filter source_ids against what we actually
3723
# have in target, but don't try to check for existence where we know
3724
# we do not have a revision as that would be pointless.
3725
target_ids = set(self.target.all_revision_ids())
3726
possibly_present_revisions = target_ids.intersection(source_ids_set)
3727
actually_present_revisions = set(
3728
self.target._eliminate_revisions_not_present(possibly_present_revisions))
3729
required_revisions = source_ids_set.difference(actually_present_revisions)
3730
if revision_id is not None:
3731
# we used get_ancestry to determine source_ids then we are assured all
3732
# revisions referenced are present as they are installed in topological order.
3733
# and the tip revision was validated by get_ancestry.
3734
result_set = required_revisions
3736
# if we just grabbed the possibly available ids, then
3737
# we only have an estimate of whats available and need to validate
3738
# that against the revision records.
3740
self.source._eliminate_revisions_not_present(required_revisions))
3741
return self.source.revision_ids_to_search_result(result_set)
3744
3502
class InterDifferingSerializer(InterRepository):