~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/repository.py

  • Committer: Canonical.com Patch Queue Manager
  • Date: 2009-03-17 02:35:51 UTC
  • mfrom: (4144.3.12 simplify-interrepo-stack)
  • Revision ID: pqm@pqm.ubuntu.com-20090317023551-f1jm1klbuy1eg4hv
(andrew) Remove InterPackToRemotePack too.

Show diffs side-by-side

added added

removed removed

Lines of Context:
34
34
    lockdir,
35
35
    lru_cache,
36
36
    osutils,
37
 
    remote,
38
37
    revision as _mod_revision,
39
38
    symbol_versioning,
40
39
    tsort,
2627
2626
    _optimisers = []
2628
2627
    """The available optimised InterRepository types."""
2629
2628
 
2630
 
    def __init__(self, source, target):
2631
 
        InterObject.__init__(self, source, target)
2632
 
        # These two attributes may be overridden by e.g. InterOtherToRemote to
2633
 
        # provide a faster implementation.
2634
 
        self.target_get_graph = self.target.get_graph
2635
 
        self.target_get_parent_map = self.target.get_parent_map
2636
 
 
2637
2629
    @needs_write_lock
2638
2630
    def copy_content(self, revision_id=None):
2639
2631
        """Make a complete copy of the content in self into destination.
2676
2668
        :param revision_ids: The start point for the search.
2677
2669
        :return: A set of revision ids.
2678
2670
        """
2679
 
        target_graph = self.target_get_graph()
 
2671
        target_graph = self.target.get_graph()
2680
2672
        revision_ids = frozenset(revision_ids)
2681
2673
        # Fast path for the case where all the revisions are already in the
2682
2674
        # target repo.
3037
3029
            # till then:
3038
3030
            source_revision_ids = frozenset(self.source.all_revision_ids())
3039
3031
            revision_ids = source_revision_ids - \
3040
 
                frozenset(self.target_get_parent_map(source_revision_ids))
 
3032
                frozenset(self.target.get_parent_map(source_revision_ids))
3041
3033
            revision_keys = [(revid,) for revid in revision_ids]
3042
 
            target_pack_collection = self._get_target_pack_collection()
3043
 
            index = target_pack_collection.revision_index.combined_index
 
3034
            index = self.target._pack_collection.revision_index.combined_index
3044
3035
            present_revision_ids = set(item[1][0] for item in
3045
3036
                index.iter_entries(revision_keys))
3046
3037
            revision_ids = set(revision_ids) - present_revision_ids
3066
3057
 
3067
3058
    def _pack(self, source, target, revision_ids):
3068
3059
        from bzrlib.repofmt.pack_repo import Packer
3069
 
        target_pack_collection = self._get_target_pack_collection()
3070
3060
        packs = source._pack_collection.all_packs()
3071
 
        pack = Packer(target_pack_collection, packs, '.fetch',
 
3061
        pack = Packer(self.target._pack_collection, packs, '.fetch',
3072
3062
            revision_ids).pack()
3073
3063
        if pack is not None:
3074
 
            target_pack_collection._save_pack_names()
 
3064
            self.target._pack_collection._save_pack_names()
3075
3065
            copied_revs = pack.get_revision_count()
3076
3066
            # Trigger an autopack. This may duplicate effort as we've just done
3077
3067
            # a pack creation, but for now it is simpler to think about as
3078
3068
            # 'upload data, then repack if needed'.
3079
 
            self._autopack()
 
3069
            self.target._pack_collection.autopack()
3080
3070
            return (copied_revs, [])
3081
3071
        else:
3082
3072
            return (0, [])
3083
3073
 
3084
 
    def _autopack(self):
3085
 
        self.target._pack_collection.autopack()
3086
 
 
3087
 
    def _get_target_pack_collection(self):
3088
 
        return self.target._pack_collection
3089
 
 
3090
3074
    @needs_read_lock
3091
3075
    def search_missing_revision_ids(self, revision_id=None, find_ghosts=True):
3092
3076
        """See InterRepository.missing_revision_ids().
3099
3083
        elif revision_id is not None:
3100
3084
            # Find ghosts: search for revisions pointing from one repository to
3101
3085
            # the other, and vice versa, anywhere in the history of revision_id.
3102
 
            graph = self.target_get_graph(other_repository=self.source)
 
3086
            graph = self.target.get_graph(other_repository=self.source)
3103
3087
            searcher = graph._make_breadth_first_searcher([revision_id])
3104
3088
            found_ids = set()
3105
3089
            while True:
3115
3099
            # Double query here: should be able to avoid this by changing the
3116
3100
            # graph api further.
3117
3101
            result_set = found_ids - frozenset(
3118
 
                self.target_get_parent_map(found_ids))
 
3102
                self.target.get_parent_map(found_ids))
3119
3103
        else:
3120
3104
            source_ids = self.source.all_revision_ids()
3121
3105
            # source_ids is the worst possible case we may need to pull.
3299
3283
        return basis_id, basis_tree
3300
3284
 
3301
3285
 
3302
 
class InterPackToRemotePack(InterPackRepo):
3303
 
    """A specialisation of InterPackRepo for a target that is a
3304
 
    RemoteRepository.
3305
 
 
3306
 
    This will use the get_parent_map RPC rather than plain readvs, and also
3307
 
    uses an RPC for autopacking.
3308
 
    """
3309
 
 
3310
 
    @staticmethod
3311
 
    def is_compatible(source, target):
3312
 
        from bzrlib.repofmt.pack_repo import RepositoryFormatPack
3313
 
        if isinstance(source._format, RepositoryFormatPack):
3314
 
            if isinstance(target, remote.RemoteRepository):
3315
 
                target._format._ensure_real()
3316
 
                if isinstance(target._format._custom_format,
3317
 
                              RepositoryFormatPack):
3318
 
                    if InterRepository._same_model(source, target):
3319
 
                        return True
3320
 
        return False
3321
 
 
3322
 
    def _autopack(self):
3323
 
        self.target.autopack()
3324
 
 
3325
 
    @needs_write_lock
3326
 
    def fetch(self, revision_id=None, pb=None, find_ghosts=False,
3327
 
            fetch_spec=None):
3328
 
        """See InterRepository.fetch()."""
3329
 
        if self.target._client._medium._is_remote_before((1, 13)):
3330
 
            # The server won't support the insert_stream RPC, so just use
3331
 
            # regular InterPackRepo logic.  This avoids a bug that causes many
3332
 
            # round-trips for small append calls.
3333
 
            return InterPackRepo.fetch(self, revision_id=revision_id, pb=pb,
3334
 
                find_ghosts=find_ghosts, fetch_spec=fetch_spec)
3335
 
        # Always fetch using the generic streaming fetch code, to allow
3336
 
        # streaming fetching into remote servers.
3337
 
        from bzrlib.fetch import RepoFetcher
3338
 
        fetcher = RepoFetcher(self.target, self.source, revision_id,
3339
 
                              pb, find_ghosts, fetch_spec=fetch_spec)
3340
 
 
3341
 
    def _get_target_pack_collection(self):
3342
 
        return self.target._real_repository._pack_collection
3343
 
 
3344
 
    @classmethod
3345
 
    def _get_repo_format_to_test(self):
3346
 
        return None
3347
 
 
3348
 
 
3349
3286
InterRepository.register_optimiser(InterDifferingSerializer)
3350
3287
InterRepository.register_optimiser(InterSameDataRepository)
3351
3288
InterRepository.register_optimiser(InterWeaveRepo)
3352
3289
InterRepository.register_optimiser(InterKnitRepo)
3353
3290
InterRepository.register_optimiser(InterPackRepo)
3354
 
InterRepository.register_optimiser(InterPackToRemotePack)
3355
3291
 
3356
3292
 
3357
3293
class CopyConverter(object):