~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/branch.py

  • Committer: Andrew Bennetts
  • Date: 2011-05-18 15:45:07 UTC
  • mto: This revision was merged to the branch mainline in revision 5895.
  • Revision ID: andrew.bennetts@canonical.com-20110518154507-t3qudgcb7fj3omjc
Use pydoctor in api-docs make target.

Show diffs side-by-side

added added

removed removed

Lines of Context:
24
24
from bzrlib import (
25
25
        bzrdir,
26
26
        cache_utf8,
27
 
        cleanup,
28
27
        config as _mod_config,
29
28
        debug,
30
29
        errors,
455
454
            after. If None, the rest of history is included.
456
455
        :param stop_rule: if stop_revision_id is not None, the precise rule
457
456
            to use for termination:
458
 
 
459
457
            * 'exclude' - leave the stop revision out of the result (default)
460
458
            * 'include' - the stop revision is the last item in the result
461
459
            * 'with-merges' - include the stop revision and all of its
463
461
            * 'with-merges-without-common-ancestry' - filter out revisions 
464
462
              that are in both ancestries
465
463
        :param direction: either 'reverse' or 'forward':
466
 
 
467
464
            * reverse means return the start_revision_id first, i.e.
468
465
              start at the most recent revision and go backwards in history
469
466
            * forward returns tuples in the opposite order to reverse.
513
510
        rev_iter = iter(merge_sorted_revisions)
514
511
        if start_revision_id is not None:
515
512
            for node in rev_iter:
516
 
                rev_id = node.key
 
513
                rev_id = node.key[-1]
517
514
                if rev_id != start_revision_id:
518
515
                    continue
519
516
                else:
525
522
        if stop_revision_id is None:
526
523
            # Yield everything
527
524
            for node in rev_iter:
528
 
                rev_id = node.key
 
525
                rev_id = node.key[-1]
529
526
                yield (rev_id, node.merge_depth, node.revno,
530
527
                       node.end_of_merge)
531
528
        elif stop_rule == 'exclude':
532
529
            for node in rev_iter:
533
 
                rev_id = node.key
 
530
                rev_id = node.key[-1]
534
531
                if rev_id == stop_revision_id:
535
532
                    return
536
533
                yield (rev_id, node.merge_depth, node.revno,
537
534
                       node.end_of_merge)
538
535
        elif stop_rule == 'include':
539
536
            for node in rev_iter:
540
 
                rev_id = node.key
 
537
                rev_id = node.key[-1]
541
538
                yield (rev_id, node.merge_depth, node.revno,
542
539
                       node.end_of_merge)
543
540
                if rev_id == stop_revision_id:
549
546
            ancestors = graph.find_unique_ancestors(start_revision_id,
550
547
                                                    [stop_revision_id])
551
548
            for node in rev_iter:
552
 
                rev_id = node.key
 
549
                rev_id = node.key[-1]
553
550
                if rev_id not in ancestors:
554
551
                    continue
555
552
                yield (rev_id, node.merge_depth, node.revno,
565
562
            reached_stop_revision_id = False
566
563
            revision_id_whitelist = []
567
564
            for node in rev_iter:
568
 
                rev_id = node.key
 
565
                rev_id = node.key[-1]
569
566
                if rev_id == left_parent:
570
567
                    # reached the left parent after the stop_revision
571
568
                    return
670
667
        raise errors.UnsupportedOperation(self.get_reference_info, self)
671
668
 
672
669
    @needs_write_lock
673
 
    def fetch(self, from_branch, last_revision=None, limit=None):
 
670
    def fetch(self, from_branch, last_revision=None):
674
671
        """Copy revisions from from_branch into this branch.
675
672
 
676
673
        :param from_branch: Where to copy from.
677
674
        :param last_revision: What revision to stop at (None for at the end
678
675
                              of the branch.
679
 
        :param limit: Optional rough limit of revisions to fetch
680
676
        :return: None
681
677
        """
682
 
        return InterBranch.get(from_branch, self).fetch(last_revision, limit=limit)
 
678
        return InterBranch.get(from_branch, self).fetch(last_revision)
683
679
 
684
680
    def get_bound_location(self):
685
681
        """Return the URL of the branch we are bound to.
778
774
        configured to check constraints on history, in which case this may not
779
775
        be permitted.
780
776
        """
781
 
        raise NotImplementedError(self.set_last_revision_info)
 
777
        raise NotImplementedError(self.last_revision_info)
782
778
 
783
779
    @needs_write_lock
784
780
    def generate_revision_history(self, revision_id, last_rev=None,
932
928
 
933
929
        :seealso: Branch._get_tags_bytes.
934
930
        """
935
 
        op = cleanup.OperationWithCleanups(self._set_tags_bytes_locked)
936
 
        op.add_cleanup(self.lock_write().unlock)
937
 
        return op.run_simple(bytes)
 
931
        return _run_with_write_locked_target(self, self._set_tags_bytes_locked,
 
932
                bytes)
938
933
 
939
934
    def _set_tags_bytes_locked(self, bytes):
940
935
        self._tags_bytes = bytes
1422
1417
        :param to_location: The url to produce the checkout at
1423
1418
        :param revision_id: The revision to check out
1424
1419
        :param lightweight: If True, produce a lightweight checkout, otherwise,
1425
 
            produce a bound branch (heavyweight checkout)
 
1420
        produce a bound branch (heavyweight checkout)
1426
1421
        :param accelerator_tree: A tree which can be used for retrieving file
1427
1422
            contents more quickly than the revision tree, i.e. a workingtree.
1428
1423
            The revision tree will be used for cases where accelerator_tree's
1474
1469
 
1475
1470
    def reference_parent(self, file_id, path, possible_transports=None):
1476
1471
        """Return the parent branch for a tree-reference file_id
1477
 
 
1478
1472
        :param file_id: The file_id of the tree reference
1479
1473
        :param path: The path of the file_id in the tree
1480
1474
        :return: A branch associated with the file_id
1870
1864
 
1871
1865
 
1872
1866
class ChangeBranchTipParams(object):
1873
 
    """Object holding parameters passed to `*_change_branch_tip` hooks.
 
1867
    """Object holding parameters passed to *_change_branch_tip hooks.
1874
1868
 
1875
1869
    There are 5 fields that hooks may wish to access:
1876
1870
 
1908
1902
 
1909
1903
 
1910
1904
class BranchInitHookParams(object):
1911
 
    """Object holding parameters passed to `*_branch_init` hooks.
 
1905
    """Object holding parameters passed to *_branch_init hooks.
1912
1906
 
1913
1907
    There are 4 fields that hooks may wish to access:
1914
1908
 
1948
1942
 
1949
1943
 
1950
1944
class SwitchHookParams(object):
1951
 
    """Object holding parameters passed to `*_switch` hooks.
 
1945
    """Object holding parameters passed to *_switch hooks.
1952
1946
 
1953
1947
    There are 4 fields that hooks may wish to access:
1954
1948
 
2942
2936
        # you can always ask for the URL; but you might not be able to use it
2943
2937
        # if the repo can't support stacking.
2944
2938
        ## self._check_stackable_repo()
2945
 
        # stacked_on_location is only ever defined in branch.conf, so don't
2946
 
        # waste effort reading the whole stack of config files.
2947
 
        config = self.get_config()._get_branch_data_config()
2948
 
        stacked_url = self._get_config_location('stacked_on_location',
2949
 
            config=config)
 
2939
        stacked_url = self._get_config_location('stacked_on_location')
2950
2940
        if stacked_url is None:
2951
2941
            raise errors.NotStacked(self)
2952
2942
        return stacked_url
3174
3164
        branch._transport.put_bytes('format', format.get_format_string())
3175
3165
 
3176
3166
 
 
3167
def _run_with_write_locked_target(target, callable, *args, **kwargs):
 
3168
    """Run ``callable(*args, **kwargs)``, write-locking target for the
 
3169
    duration.
 
3170
 
 
3171
    _run_with_write_locked_target will attempt to release the lock it acquires.
 
3172
 
 
3173
    If an exception is raised by callable, then that exception *will* be
 
3174
    propagated, even if the unlock attempt raises its own error.  Thus
 
3175
    _run_with_write_locked_target should be preferred to simply doing::
 
3176
 
 
3177
        target.lock_write()
 
3178
        try:
 
3179
            return callable(*args, **kwargs)
 
3180
        finally:
 
3181
            target.unlock()
 
3182
 
 
3183
    """
 
3184
    # This is very similar to bzrlib.decorators.needs_write_lock.  Perhaps they
 
3185
    # should share code?
 
3186
    target.lock_write()
 
3187
    try:
 
3188
        result = callable(*args, **kwargs)
 
3189
    except:
 
3190
        exc_info = sys.exc_info()
 
3191
        try:
 
3192
            target.unlock()
 
3193
        finally:
 
3194
            raise exc_info[0], exc_info[1], exc_info[2]
 
3195
    else:
 
3196
        target.unlock()
 
3197
        return result
 
3198
 
 
3199
 
3177
3200
class InterBranch(InterObject):
3178
3201
    """This class represents operations taking place between two branches.
3179
3202
 
3225
3248
        raise NotImplementedError(self.copy_content_into)
3226
3249
 
3227
3250
    @needs_write_lock
3228
 
    def fetch(self, stop_revision=None, limit=None):
 
3251
    def fetch(self, stop_revision=None):
3229
3252
        """Fetch revisions.
3230
3253
 
3231
3254
        :param stop_revision: Last revision to fetch
3232
 
        :param limit: Optional rough limit of revisions to fetch
3233
3255
        """
3234
3256
        raise NotImplementedError(self.fetch)
3235
3257
 
3273
3295
            self.source.tags.merge_to(self.target.tags)
3274
3296
 
3275
3297
    @needs_write_lock
3276
 
    def fetch(self, stop_revision=None, limit=None):
 
3298
    def fetch(self, stop_revision=None):
3277
3299
        if self.target.base == self.source.base:
3278
3300
            return (0, [])
3279
3301
        self.source.lock_read()
3284
3306
            fetch_spec_factory.source_repo = self.source.repository
3285
3307
            fetch_spec_factory.target_repo = self.target.repository
3286
3308
            fetch_spec_factory.target_repo_kind = fetch.TargetRepoKinds.PREEXISTING
3287
 
            fetch_spec_factory.limit = limit
3288
3309
            fetch_spec = fetch_spec_factory.make_fetch_spec()
3289
3310
            return self.target.repository.fetch(self.source.repository,
3290
3311
                fetch_spec=fetch_spec)
3369
3390
 
3370
3391
        This is the basic concrete implementation of push()
3371
3392
 
3372
 
        :param _override_hook_source_branch: If specified, run the hooks
3373
 
            passing this Branch as the source, rather than self.  This is for
3374
 
            use of RemoteBranch, where push is delegated to the underlying
3375
 
            vfs-based Branch.
 
3393
        :param _override_hook_source_branch: If specified, run
 
3394
        the hooks passing this Branch as the source, rather than self.
 
3395
        This is for use of RemoteBranch, where push is delegated to the
 
3396
        underlying vfs-based Branch.
3376
3397
        """
3377
3398
        if lossy:
3378
3399
            raise errors.LossyPushToSameVCS(self.source, self.target)
3379
3400
        # TODO: Public option to disable running hooks - should be trivial but
3380
3401
        # needs tests.
3381
 
 
3382
 
        op = cleanup.OperationWithCleanups(self._push_with_bound_branches)
3383
 
        op.add_cleanup(self.source.lock_read().unlock)
3384
 
        op.add_cleanup(self.target.lock_write().unlock)
3385
 
        return op.run(overwrite, stop_revision,
3386
 
            _override_hook_source_branch=_override_hook_source_branch)
 
3402
        self.source.lock_read()
 
3403
        try:
 
3404
            return _run_with_write_locked_target(
 
3405
                self.target, self._push_with_bound_branches, overwrite,
 
3406
                stop_revision, 
 
3407
                _override_hook_source_branch=_override_hook_source_branch)
 
3408
        finally:
 
3409
            self.source.unlock()
3387
3410
 
3388
3411
    def _basic_push(self, overwrite, stop_revision):
3389
3412
        """Basic implementation of push without bound branches or hooks.
3407
3430
        result.new_revno, result.new_revid = self.target.last_revision_info()
3408
3431
        return result
3409
3432
 
3410
 
    def _push_with_bound_branches(self, operation, overwrite, stop_revision,
 
3433
    def _push_with_bound_branches(self, overwrite, stop_revision,
3411
3434
            _override_hook_source_branch=None):
3412
3435
        """Push from source into target, and into target's master if any.
3413
3436
        """
3425
3448
            # be bound to itself? -- mbp 20070507
3426
3449
            master_branch = self.target.get_master_branch()
3427
3450
            master_branch.lock_write()
3428
 
            operation.add_cleanup(master_branch.unlock)
3429
 
            # push into the master from the source branch.
3430
 
            master_inter = InterBranch.get(self.source, master_branch)
3431
 
            master_inter._basic_push(overwrite, stop_revision)
3432
 
            # and push into the target branch from the source. Note that
3433
 
            # we push from the source branch again, because it's considered
3434
 
            # the highest bandwidth repository.
3435
 
            result = self._basic_push(overwrite, stop_revision)
3436
 
            result.master_branch = master_branch
3437
 
            result.local_branch = self.target
 
3451
            try:
 
3452
                # push into the master from the source branch.
 
3453
                master_inter = InterBranch.get(self.source, master_branch)
 
3454
                master_inter._basic_push(overwrite, stop_revision)
 
3455
                # and push into the target branch from the source. Note that
 
3456
                # we push from the source branch again, because it's considered
 
3457
                # the highest bandwidth repository.
 
3458
                result = self._basic_push(overwrite, stop_revision)
 
3459
                result.master_branch = master_branch
 
3460
                result.local_branch = self.target
 
3461
                _run_hooks()
 
3462
                return result
 
3463
            finally:
 
3464
                master_branch.unlock()
3438
3465
        else:
3439
 
            master_branch = None
3440
3466
            # no master branch
3441
3467
            result = self._basic_push(overwrite, stop_revision)
3442
3468
            # TODO: Why set master_branch and local_branch if there's no
3444
3470
            # 20070504
3445
3471
            result.master_branch = self.target
3446
3472
            result.local_branch = None
3447
 
        _run_hooks()
3448
 
        return result
 
3473
            _run_hooks()
 
3474
            return result
3449
3475
 
3450
3476
    def _pull(self, overwrite=False, stop_revision=None,
3451
3477
             possible_transports=None, _hook_master=None, run_hooks=True,