155
116
direction='reverse',
156
117
start_revision=None,
157
118
end_revision=None,
161
120
"""Write out human-readable log of commits to this branch.
163
This function is being retained for backwards compatibility but
164
should not be extended with new parameters. Use the new Logger class
165
instead, eg. Logger(branch, rqst).show(lf), adding parameters to the
166
make_log_request_dict function.
168
:param lf: The LogFormatter object showing the output.
170
:param specific_fileid: If not None, list only the commits affecting the
171
specified file, rather than all commits.
173
:param verbose: If True show added/changed/deleted/renamed files.
175
:param direction: 'reverse' (default) is latest to earliest; 'forward' is
178
:param start_revision: If not None, only show revisions >= start_revision
180
:param end_revision: If not None, only show revisions <= end_revision
182
:param search: If not None, only show revisions with matching commit
185
:param limit: If set, shows only 'limit' revisions, all revisions are shown
188
:param show_diff: If True, output a diff after each revision.
190
# Convert old-style parameters to new-style parameters
191
if specific_fileid is not None:
192
file_ids = [specific_fileid]
197
delta_type = 'partial'
204
diff_type = 'partial'
210
# Build the request and execute it
211
rqst = make_log_request_dict(direction=direction, specific_fileids=file_ids,
212
start_revision=start_revision, end_revision=end_revision,
213
limit=limit, message_search=search,
214
delta_type=delta_type, diff_type=diff_type)
215
Logger(branch, rqst).show(lf)
218
# Note: This needs to be kept this in sync with the defaults in
219
# make_log_request_dict() below
220
_DEFAULT_REQUEST_PARAMS = {
221
'direction': 'reverse',
223
'generate_tags': True,
224
'exclude_common_ancestry': False,
225
'_match_using_deltas': True,
229
def make_log_request_dict(direction='reverse', specific_fileids=None,
230
start_revision=None, end_revision=None, limit=None,
231
message_search=None, levels=1, generate_tags=True,
233
diff_type=None, _match_using_deltas=True,
234
exclude_common_ancestry=False,
237
"""Convenience function for making a logging request dictionary.
239
Using this function may make code slightly safer by ensuring
240
parameters have the correct names. It also provides a reference
241
point for documenting the supported parameters.
243
:param direction: 'reverse' (default) is latest to earliest;
244
'forward' is earliest to latest.
246
:param specific_fileids: If not None, only include revisions
247
affecting the specified files, rather than all revisions.
249
:param start_revision: If not None, only generate
250
revisions >= start_revision
252
:param end_revision: If not None, only generate
253
revisions <= end_revision
255
:param limit: If set, generate only 'limit' revisions, all revisions
256
are shown if None or 0.
258
:param message_search: If not None, only include revisions with
259
matching commit messages
261
:param levels: the number of levels of revisions to
262
generate; 1 for just the mainline; 0 for all levels.
264
:param generate_tags: If True, include tags for matched revisions.
266
:param delta_type: Either 'full', 'partial' or None.
267
'full' means generate the complete delta - adds/deletes/modifies/etc;
268
'partial' means filter the delta using specific_fileids;
269
None means do not generate any delta.
271
:param diff_type: Either 'full', 'partial' or None.
272
'full' means generate the complete diff - adds/deletes/modifies/etc;
273
'partial' means filter the diff using specific_fileids;
274
None means do not generate any diff.
276
:param _match_using_deltas: a private parameter controlling the
277
algorithm used for matching specific_fileids. This parameter
278
may be removed in the future so bzrlib client code should NOT
281
:param exclude_common_ancestry: Whether -rX..Y should be interpreted as a
282
range operator or as a graph difference.
284
:param signature: show digital signature information
287
'direction': direction,
288
'specific_fileids': specific_fileids,
289
'start_revision': start_revision,
290
'end_revision': end_revision,
292
'message_search': message_search,
294
'generate_tags': generate_tags,
295
'delta_type': delta_type,
296
'diff_type': diff_type,
297
'exclude_common_ancestry': exclude_common_ancestry,
298
'signature': signature,
299
# Add 'private' attributes for features that may be deprecated
300
'_match_using_deltas': _match_using_deltas,
304
def _apply_log_request_defaults(rqst):
305
"""Apply default values to a request dictionary."""
306
result = _DEFAULT_REQUEST_PARAMS.copy()
312
def format_signature_validity(rev_id, repo):
313
"""get the signature validity
123
LogFormatter object to show the output.
126
If true, list only the commits affecting the specified
127
file, rather than all commits.
130
If true show added/changed/deleted/renamed files.
133
'reverse' (default) is latest to earliest;
134
'forward' is earliest to latest.
137
If not None, only show revisions >= start_revision
140
If not None, only show revisions <= end_revision
142
from bzrlib.osutils import format_date
143
from bzrlib.errors import BzrCheckError
144
from bzrlib.textui import show_status
315
:param rev_id: revision id to validate
316
:param repo: repository of revision
317
:return: human readable string to print to log
319
from bzrlib import gpg
321
gpg_strategy = gpg.GPGStrategy(None)
322
result = repo.verify_revision(rev_id, gpg_strategy)
323
if result[0] == gpg.SIGNATURE_VALID:
324
return "valid signature from {0}".format(result[1])
325
if result[0] == gpg.SIGNATURE_KEY_MISSING:
326
return "unknown key {0}".format(result[1])
327
if result[0] == gpg.SIGNATURE_NOT_VALID:
328
return "invalid signature!"
329
if result[0] == gpg.SIGNATURE_NOT_SIGNED:
330
return "no signature"
333
class LogGenerator(object):
334
"""A generator of log revisions."""
336
def iter_log_revisions(self):
337
"""Iterate over LogRevision objects.
339
:return: An iterator yielding LogRevision objects.
341
raise NotImplementedError(self.iter_log_revisions)
344
class Logger(object):
345
"""An object that generates, formats and displays a log."""
347
def __init__(self, branch, rqst):
350
:param branch: the branch to log
351
:param rqst: A dictionary specifying the query parameters.
352
See make_log_request_dict() for supported values.
355
self.rqst = _apply_log_request_defaults(rqst)
360
:param lf: The LogFormatter object to send the output to.
362
if not isinstance(lf, LogFormatter):
363
warn("not a LogFormatter instance: %r" % lf)
365
self.branch.lock_read()
367
if getattr(lf, 'begin_log', None):
370
if getattr(lf, 'end_log', None):
375
def _show_body(self, lf):
376
"""Show the main log output.
378
Subclasses may wish to override this.
380
# Tweak the LogRequest based on what the LogFormatter can handle.
381
# (There's no point generating stuff if the formatter can't display it.)
383
rqst['levels'] = lf.get_levels()
384
if not getattr(lf, 'supports_tags', False):
385
rqst['generate_tags'] = False
386
if not getattr(lf, 'supports_delta', False):
387
rqst['delta_type'] = None
388
if not getattr(lf, 'supports_diff', False):
389
rqst['diff_type'] = None
390
if not getattr(lf, 'supports_signatures', False):
391
rqst['signature'] = False
393
# Find and print the interesting revisions
394
generator = self._generator_factory(self.branch, rqst)
395
for lr in generator.iter_log_revisions():
399
def _generator_factory(self, branch, rqst):
400
"""Make the LogGenerator object to use.
402
Subclasses may wish to override this.
404
return _DefaultLogGenerator(branch, rqst)
407
class _StartNotLinearAncestor(Exception):
408
"""Raised when a start revision is not found walking left-hand history."""
411
class _DefaultLogGenerator(LogGenerator):
412
"""The default generator of log revisions."""
414
def __init__(self, branch, rqst):
417
if rqst.get('generate_tags') and branch.supports_tags():
418
self.rev_tag_dict = branch.tags.get_reverse_tag_dict()
420
self.rev_tag_dict = {}
422
def iter_log_revisions(self):
423
"""Iterate over LogRevision objects.
425
:return: An iterator yielding LogRevision objects.
428
levels = rqst.get('levels')
429
limit = rqst.get('limit')
430
diff_type = rqst.get('diff_type')
431
show_signature = rqst.get('signature')
433
revision_iterator = self._create_log_revision_iterator()
434
for revs in revision_iterator:
435
for (rev_id, revno, merge_depth), rev, delta in revs:
436
# 0 levels means show everything; merge_depth counts from 0
437
if levels != 0 and merge_depth >= levels:
439
if diff_type is None:
442
diff = self._format_diff(rev, rev_id, diff_type)
444
signature = format_signature_validity(rev_id,
445
self.branch.repository)
448
yield LogRevision(rev, revno, merge_depth, delta,
449
self.rev_tag_dict.get(rev_id), diff, signature)
452
if log_count >= limit:
455
def _format_diff(self, rev, rev_id, diff_type):
456
repo = self.branch.repository
457
if len(rev.parent_ids) == 0:
458
ancestor_id = _mod_revision.NULL_REVISION
460
ancestor_id = rev.parent_ids[0]
461
tree_1 = repo.revision_tree(ancestor_id)
462
tree_2 = repo.revision_tree(rev_id)
463
file_ids = self.rqst.get('specific_fileids')
464
if diff_type == 'partial' and file_ids is not None:
465
specific_files = [tree_2.id2path(id) for id in file_ids]
467
specific_files = None
469
path_encoding = get_diff_header_encoding()
470
diff.show_diff_trees(tree_1, tree_2, s, specific_files, old_label='',
471
new_label='', path_encoding=path_encoding)
474
def _create_log_revision_iterator(self):
475
"""Create a revision iterator for log.
477
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
480
self.start_rev_id, self.end_rev_id = _get_revision_limits(
481
self.branch, self.rqst.get('start_revision'),
482
self.rqst.get('end_revision'))
483
if self.rqst.get('_match_using_deltas'):
484
return self._log_revision_iterator_using_delta_matching()
486
# We're using the per-file-graph algorithm. This scales really
487
# well but only makes sense if there is a single file and it's
489
file_count = len(self.rqst.get('specific_fileids'))
491
raise BzrError("illegal LogRequest: must match-using-deltas "
492
"when logging %d files" % file_count)
493
return self._log_revision_iterator_using_per_file_graph()
495
def _log_revision_iterator_using_delta_matching(self):
496
# Get the base revisions, filtering by the revision range
498
generate_merge_revisions = rqst.get('levels') != 1
499
delayed_graph_generation = not rqst.get('specific_fileids') and (
500
rqst.get('limit') or self.start_rev_id or self.end_rev_id)
501
view_revisions = _calc_view_revisions(
502
self.branch, self.start_rev_id, self.end_rev_id,
503
rqst.get('direction'),
504
generate_merge_revisions=generate_merge_revisions,
505
delayed_graph_generation=delayed_graph_generation,
506
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
508
# Apply the other filters
509
return make_log_rev_iterator(self.branch, view_revisions,
510
rqst.get('delta_type'), rqst.get('message_search'),
511
file_ids=rqst.get('specific_fileids'),
512
direction=rqst.get('direction'))
514
def _log_revision_iterator_using_per_file_graph(self):
515
# Get the base revisions, filtering by the revision range.
516
# Note that we always generate the merge revisions because
517
# filter_revisions_touching_file_id() requires them ...
519
view_revisions = _calc_view_revisions(
520
self.branch, self.start_rev_id, self.end_rev_id,
521
rqst.get('direction'), generate_merge_revisions=True,
522
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
523
if not isinstance(view_revisions, list):
524
view_revisions = list(view_revisions)
525
view_revisions = _filter_revisions_touching_file_id(self.branch,
526
rqst.get('specific_fileids')[0], view_revisions,
527
include_merges=rqst.get('levels') != 1)
528
return make_log_rev_iterator(self.branch, view_revisions,
529
rqst.get('delta_type'), rqst.get('message_search'))
532
def _calc_view_revisions(branch, start_rev_id, end_rev_id, direction,
533
generate_merge_revisions,
534
delayed_graph_generation=False,
535
exclude_common_ancestry=False,
537
"""Calculate the revisions to view.
539
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
540
a list of the same tuples.
542
if (exclude_common_ancestry and start_rev_id == end_rev_id):
543
raise errors.BzrCommandError(
544
'--exclude-common-ancestry requires two different revisions')
545
if direction not in ('reverse', 'forward'):
546
raise ValueError('invalid direction %r' % direction)
547
br_revno, br_rev_id = branch.last_revision_info()
551
if (end_rev_id and start_rev_id == end_rev_id
552
and (not generate_merge_revisions
553
or not _has_merges(branch, end_rev_id))):
554
# If a single revision is requested, check we can handle it
555
iter_revs = _generate_one_revision(branch, end_rev_id, br_rev_id,
557
elif not generate_merge_revisions:
558
# If we only want to see linear revisions, we can iterate ...
559
iter_revs = _generate_flat_revisions(branch, start_rev_id, end_rev_id,
560
direction, exclude_common_ancestry)
561
if direction == 'forward':
562
iter_revs = reversed(iter_revs)
564
iter_revs = _generate_all_revisions(branch, start_rev_id, end_rev_id,
565
direction, delayed_graph_generation,
566
exclude_common_ancestry)
567
if direction == 'forward':
568
iter_revs = _rebase_merge_depth(reverse_by_depth(list(iter_revs)))
572
def _generate_one_revision(branch, rev_id, br_rev_id, br_revno):
573
if rev_id == br_rev_id:
575
return [(br_rev_id, br_revno, 0)]
577
revno_str = _compute_revno_str(branch, rev_id)
578
return [(rev_id, revno_str, 0)]
581
def _generate_flat_revisions(branch, start_rev_id, end_rev_id, direction,
582
exclude_common_ancestry=False):
583
result = _linear_view_revisions(
584
branch, start_rev_id, end_rev_id,
585
exclude_common_ancestry=exclude_common_ancestry)
586
# If a start limit was given and it's not obviously an
587
# ancestor of the end limit, check it before outputting anything
588
if direction == 'forward' or (start_rev_id
589
and not _is_obvious_ancestor(branch, start_rev_id, end_rev_id)):
591
result = list(result)
592
except _StartNotLinearAncestor:
593
raise errors.BzrCommandError('Start revision not found in'
594
' left-hand history of end revision.')
598
def _generate_all_revisions(branch, start_rev_id, end_rev_id, direction,
599
delayed_graph_generation,
600
exclude_common_ancestry=False):
601
# On large trees, generating the merge graph can take 30-60 seconds
602
# so we delay doing it until a merge is detected, incrementally
603
# returning initial (non-merge) revisions while we can.
605
# The above is only true for old formats (<= 0.92), for newer formats, a
606
# couple of seconds only should be needed to load the whole graph and the
607
# other graph operations needed are even faster than that -- vila 100201
608
initial_revisions = []
609
if delayed_graph_generation:
611
for rev_id, revno, depth in _linear_view_revisions(
612
branch, start_rev_id, end_rev_id, exclude_common_ancestry):
613
if _has_merges(branch, rev_id):
614
# The end_rev_id can be nested down somewhere. We need an
615
# explicit ancestry check. There is an ambiguity here as we
616
# may not raise _StartNotLinearAncestor for a revision that
617
# is an ancestor but not a *linear* one. But since we have
618
# loaded the graph to do the check (or calculate a dotted
619
# revno), we may as well accept to show the log... We need
620
# the check only if start_rev_id is not None as all
621
# revisions have _mod_revision.NULL_REVISION as an ancestor
623
graph = branch.repository.get_graph()
624
if (start_rev_id is not None
625
and not graph.is_ancestor(start_rev_id, end_rev_id)):
626
raise _StartNotLinearAncestor()
627
# Since we collected the revisions so far, we need to
632
initial_revisions.append((rev_id, revno, depth))
634
# No merged revisions found
635
return initial_revisions
636
except _StartNotLinearAncestor:
637
# A merge was never detected so the lower revision limit can't
638
# be nested down somewhere
639
raise errors.BzrCommandError('Start revision not found in'
640
' history of end revision.')
642
# We exit the loop above because we encounter a revision with merges, from
643
# this revision, we need to switch to _graph_view_revisions.
645
# A log including nested merges is required. If the direction is reverse,
646
# we rebase the initial merge depths so that the development line is
647
# shown naturally, i.e. just like it is for linear logging. We can easily
648
# make forward the exact opposite display, but showing the merge revisions
649
# indented at the end seems slightly nicer in that case.
650
view_revisions = chain(iter(initial_revisions),
651
_graph_view_revisions(branch, start_rev_id, end_rev_id,
652
rebase_initial_depths=(direction == 'reverse'),
653
exclude_common_ancestry=exclude_common_ancestry))
654
return view_revisions
657
def _has_merges(branch, rev_id):
658
"""Does a revision have multiple parents or not?"""
659
parents = branch.repository.get_parent_map([rev_id]).get(rev_id, [])
660
return len(parents) > 1
663
def _compute_revno_str(branch, rev_id):
664
"""Compute the revno string from a rev_id.
666
:return: The revno string, or None if the revision is not in the supplied
670
revno = branch.revision_id_to_dotted_revno(rev_id)
671
except errors.NoSuchRevision:
672
# The revision must be outside of this branch
675
return '.'.join(str(n) for n in revno)
678
def _is_obvious_ancestor(branch, start_rev_id, end_rev_id):
679
"""Is start_rev_id an obvious ancestor of end_rev_id?"""
680
if start_rev_id and end_rev_id:
682
start_dotted = branch.revision_id_to_dotted_revno(start_rev_id)
683
end_dotted = branch.revision_id_to_dotted_revno(end_rev_id)
684
except errors.NoSuchRevision:
685
# one or both is not in the branch; not obvious
687
if len(start_dotted) == 1 and len(end_dotted) == 1:
689
return start_dotted[0] <= end_dotted[0]
690
elif (len(start_dotted) == 3 and len(end_dotted) == 3 and
691
start_dotted[0:1] == end_dotted[0:1]):
692
# both on same development line
693
return start_dotted[2] <= end_dotted[2]
697
# if either start or end is not specified then we use either the first or
698
# the last revision and *they* are obvious ancestors.
702
def _linear_view_revisions(branch, start_rev_id, end_rev_id,
703
exclude_common_ancestry=False):
704
"""Calculate a sequence of revisions to view, newest to oldest.
706
:param start_rev_id: the lower revision-id
707
:param end_rev_id: the upper revision-id
708
:param exclude_common_ancestry: Whether the start_rev_id should be part of
709
the iterated revisions.
710
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
711
:raises _StartNotLinearAncestor: if a start_rev_id is specified but
712
is not found walking the left-hand history
714
br_revno, br_rev_id = branch.last_revision_info()
715
repo = branch.repository
716
graph = repo.get_graph()
717
if start_rev_id is None and end_rev_id is None:
719
for revision_id in graph.iter_lefthand_ancestry(br_rev_id,
720
(_mod_revision.NULL_REVISION,)):
721
yield revision_id, str(cur_revno), 0
724
if end_rev_id is None:
725
end_rev_id = br_rev_id
726
found_start = start_rev_id is None
727
for revision_id in graph.iter_lefthand_ancestry(end_rev_id,
728
(_mod_revision.NULL_REVISION,)):
729
revno_str = _compute_revno_str(branch, revision_id)
730
if not found_start and revision_id == start_rev_id:
731
if not exclude_common_ancestry:
732
yield revision_id, revno_str, 0
736
yield revision_id, revno_str, 0
739
raise _StartNotLinearAncestor()
742
def _graph_view_revisions(branch, start_rev_id, end_rev_id,
743
rebase_initial_depths=True,
744
exclude_common_ancestry=False):
745
"""Calculate revisions to view including merges, newest to oldest.
747
:param branch: the branch
748
:param start_rev_id: the lower revision-id
749
:param end_rev_id: the upper revision-id
750
:param rebase_initial_depth: should depths be rebased until a mainline
752
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
754
if exclude_common_ancestry:
755
stop_rule = 'with-merges-without-common-ancestry'
757
stop_rule = 'with-merges'
758
view_revisions = branch.iter_merge_sorted_revisions(
759
start_revision_id=end_rev_id, stop_revision_id=start_rev_id,
761
if not rebase_initial_depths:
762
for (rev_id, merge_depth, revno, end_of_merge
764
yield rev_id, '.'.join(map(str, revno)), merge_depth
766
# We're following a development line starting at a merged revision.
767
# We need to adjust depths down by the initial depth until we find
768
# a depth less than it. Then we use that depth as the adjustment.
769
# If and when we reach the mainline, depth adjustment ends.
770
depth_adjustment = None
771
for (rev_id, merge_depth, revno, end_of_merge
773
if depth_adjustment is None:
774
depth_adjustment = merge_depth
776
if merge_depth < depth_adjustment:
777
# From now on we reduce the depth adjustement, this can be
778
# surprising for users. The alternative requires two passes
779
# which breaks the fast display of the first revision
781
depth_adjustment = merge_depth
782
merge_depth -= depth_adjustment
783
yield rev_id, '.'.join(map(str, revno)), merge_depth
786
@deprecated_function(deprecated_in((2, 2, 0)))
787
def calculate_view_revisions(branch, start_revision, end_revision, direction,
788
specific_fileid, generate_merge_revisions):
789
"""Calculate the revisions to view.
791
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
792
a list of the same tuples.
794
start_rev_id, end_rev_id = _get_revision_limits(branch, start_revision,
796
view_revisions = list(_calc_view_revisions(branch, start_rev_id, end_rev_id,
797
direction, generate_merge_revisions or specific_fileid))
146
from warnings import warn
148
if not isinstance(lf, LogFormatter):
149
warn("not a LogFormatter instance: %r" % lf)
798
151
if specific_fileid:
799
view_revisions = _filter_revisions_touching_file_id(branch,
800
specific_fileid, view_revisions,
801
include_merges=generate_merge_revisions)
802
return _rebase_merge_depth(view_revisions)
805
def _rebase_merge_depth(view_revisions):
806
"""Adjust depths upwards so the top level is 0."""
807
# If either the first or last revision have a merge_depth of 0, we're done
808
if view_revisions and view_revisions[0][2] and view_revisions[-1][2]:
809
min_depth = min([d for r,n,d in view_revisions])
811
view_revisions = [(r,n,d-min_depth) for r,n,d in view_revisions]
812
return view_revisions
815
def make_log_rev_iterator(branch, view_revisions, generate_delta, search,
816
file_ids=None, direction='reverse'):
817
"""Create a revision iterator for log.
819
:param branch: The branch being logged.
820
:param view_revisions: The revisions being viewed.
821
:param generate_delta: Whether to generate a delta for each revision.
822
Permitted values are None, 'full' and 'partial'.
823
:param search: A user text search string.
824
:param file_ids: If non empty, only revisions matching one or more of
825
the file-ids are to be kept.
826
:param direction: the direction in which view_revisions is sorted
827
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
830
# Convert view_revisions into (view, None, None) groups to fit with
831
# the standard interface here.
832
if type(view_revisions) == list:
833
# A single batch conversion is faster than many incremental ones.
834
# As we have all the data, do a batch conversion.
835
nones = [None] * len(view_revisions)
836
log_rev_iterator = iter([zip(view_revisions, nones, nones)])
839
for view in view_revisions:
840
yield (view, None, None)
841
log_rev_iterator = iter([_convert()])
842
for adapter in log_adapters:
843
# It would be nicer if log adapters were first class objects
844
# with custom parameters. This will do for now. IGC 20090127
845
if adapter == _make_delta_filter:
846
log_rev_iterator = adapter(branch, generate_delta,
847
search, log_rev_iterator, file_ids, direction)
849
log_rev_iterator = adapter(branch, generate_delta,
850
search, log_rev_iterator)
851
return log_rev_iterator
854
def _make_search_filter(branch, generate_delta, search, log_rev_iterator):
855
"""Create a filtered iterator of log_rev_iterator matching on a regex.
857
:param branch: The branch being logged.
858
:param generate_delta: Whether to generate a delta for each revision.
859
:param search: A user text search string.
860
:param log_rev_iterator: An input iterator containing all revisions that
861
could be displayed, in lists.
862
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
866
return log_rev_iterator
867
searchRE = lazy_regex.lazy_compile(search, re.IGNORECASE)
868
return _filter_message_re(searchRE, log_rev_iterator)
871
def _filter_message_re(searchRE, log_rev_iterator):
872
for revs in log_rev_iterator:
874
for (rev_id, revno, merge_depth), rev, delta in revs:
875
if searchRE.search(rev.message):
876
new_revs.append(((rev_id, revno, merge_depth), rev, delta))
880
def _make_delta_filter(branch, generate_delta, search, log_rev_iterator,
881
fileids=None, direction='reverse'):
882
"""Add revision deltas to a log iterator if needed.
884
:param branch: The branch being logged.
885
:param generate_delta: Whether to generate a delta for each revision.
886
Permitted values are None, 'full' and 'partial'.
887
:param search: A user text search string.
888
:param log_rev_iterator: An input iterator containing all revisions that
889
could be displayed, in lists.
890
:param fileids: If non empty, only revisions matching one or more of
891
the file-ids are to be kept.
892
:param direction: the direction in which view_revisions is sorted
893
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
896
if not generate_delta and not fileids:
897
return log_rev_iterator
898
return _generate_deltas(branch.repository, log_rev_iterator,
899
generate_delta, fileids, direction)
902
def _generate_deltas(repository, log_rev_iterator, delta_type, fileids,
904
"""Create deltas for each batch of revisions in log_rev_iterator.
906
If we're only generating deltas for the sake of filtering against
907
file-ids, we stop generating deltas once all file-ids reach the
908
appropriate life-cycle point. If we're receiving data newest to
909
oldest, then that life-cycle point is 'add', otherwise it's 'remove'.
911
check_fileids = fileids is not None and len(fileids) > 0
913
fileid_set = set(fileids)
914
if direction == 'reverse':
920
for revs in log_rev_iterator:
921
# If we were matching against fileids and we've run out,
922
# there's nothing left to do
923
if check_fileids and not fileid_set:
925
revisions = [rev[1] for rev in revs]
927
if delta_type == 'full' and not check_fileids:
928
deltas = repository.get_deltas_for_revisions(revisions)
929
for rev, delta in izip(revs, deltas):
930
new_revs.append((rev[0], rev[1], delta))
932
deltas = repository.get_deltas_for_revisions(revisions, fileid_set)
933
for rev, delta in izip(revs, deltas):
935
if delta is None or not delta.has_changed():
938
_update_fileids(delta, fileid_set, stop_on)
939
if delta_type is None:
941
elif delta_type == 'full':
942
# If the file matches all the time, rebuilding
943
# a full delta like this in addition to a partial
944
# one could be slow. However, it's likely that
945
# most revisions won't get this far, making it
946
# faster to filter on the partial deltas and
947
# build the occasional full delta than always
948
# building full deltas and filtering those.
950
delta = repository.get_revision_delta(rev_id)
951
new_revs.append((rev[0], rev[1], delta))
955
def _update_fileids(delta, fileids, stop_on):
956
"""Update the set of file-ids to search based on file lifecycle events.
958
:param fileids: a set of fileids to update
959
:param stop_on: either 'add' or 'remove' - take file-ids out of the
960
fileids set once their add or remove entry is detected respectively
963
for item in delta.added:
964
if item[1] in fileids:
965
fileids.remove(item[1])
966
elif stop_on == 'delete':
967
for item in delta.removed:
968
if item[1] in fileids:
969
fileids.remove(item[1])
972
def _make_revision_objects(branch, generate_delta, search, log_rev_iterator):
973
"""Extract revision objects from the repository
975
:param branch: The branch being logged.
976
:param generate_delta: Whether to generate a delta for each revision.
977
:param search: A user text search string.
978
:param log_rev_iterator: An input iterator containing all revisions that
979
could be displayed, in lists.
980
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
983
repository = branch.repository
984
for revs in log_rev_iterator:
985
# r = revision_id, n = revno, d = merge depth
986
revision_ids = [view[0] for view, _, _ in revs]
987
revisions = repository.get_revisions(revision_ids)
988
revs = [(rev[0], revision, rev[2]) for rev, revision in
989
izip(revs, revisions)]
993
def _make_batch_filter(branch, generate_delta, search, log_rev_iterator):
994
"""Group up a single large batch into smaller ones.
996
:param branch: The branch being logged.
997
:param generate_delta: Whether to generate a delta for each revision.
998
:param search: A user text search string.
999
:param log_rev_iterator: An input iterator containing all revisions that
1000
could be displayed, in lists.
1001
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
1004
repository = branch.repository
1006
for batch in log_rev_iterator:
1009
step = [detail for _, detail in zip(range(num), batch)]
1013
num = min(int(num * 1.5), 200)
1016
def _get_revision_limits(branch, start_revision, end_revision):
1017
"""Get and check revision limits.
1019
:param branch: The branch containing the revisions.
1021
:param start_revision: The first revision to be logged.
1022
For backwards compatibility this may be a mainline integer revno,
1023
but for merge revision support a RevisionInfo is expected.
1025
:param end_revision: The last revision to be logged.
1026
For backwards compatibility this may be a mainline integer revno,
1027
but for merge revision support a RevisionInfo is expected.
1029
:return: (start_rev_id, end_rev_id) tuple.
1031
branch_revno, branch_rev_id = branch.last_revision_info()
1033
if start_revision is None:
1036
if isinstance(start_revision, revisionspec.RevisionInfo):
1037
start_rev_id = start_revision.rev_id
1038
start_revno = start_revision.revno or 1
1040
branch.check_real_revno(start_revision)
1041
start_revno = start_revision
1042
start_rev_id = branch.get_rev_id(start_revno)
1045
if end_revision is None:
1046
end_revno = branch_revno
1048
if isinstance(end_revision, revisionspec.RevisionInfo):
1049
end_rev_id = end_revision.rev_id
1050
end_revno = end_revision.revno or branch_revno
1052
branch.check_real_revno(end_revision)
1053
end_revno = end_revision
1054
end_rev_id = branch.get_rev_id(end_revno)
1056
if branch_revno != 0:
1057
if (start_rev_id == _mod_revision.NULL_REVISION
1058
or end_rev_id == _mod_revision.NULL_REVISION):
1059
raise errors.BzrCommandError('Logging revision 0 is invalid.')
1060
if start_revno > end_revno:
1061
raise errors.BzrCommandError("Start revision must be older than "
1062
"the end revision.")
1063
return (start_rev_id, end_rev_id)
1066
def _get_mainline_revs(branch, start_revision, end_revision):
1067
"""Get the mainline revisions from the branch.
1069
Generates the list of mainline revisions for the branch.
1071
:param branch: The branch containing the revisions.
1073
:param start_revision: The first revision to be logged.
1074
For backwards compatibility this may be a mainline integer revno,
1075
but for merge revision support a RevisionInfo is expected.
1077
:param end_revision: The last revision to be logged.
1078
For backwards compatibility this may be a mainline integer revno,
1079
but for merge revision support a RevisionInfo is expected.
1081
:return: A (mainline_revs, rev_nos, start_rev_id, end_rev_id) tuple.
1083
branch_revno, branch_last_revision = branch.last_revision_info()
1084
if branch_revno == 0:
1085
return None, None, None, None
1087
# For mainline generation, map start_revision and end_revision to
1088
# mainline revnos. If the revision is not on the mainline choose the
1089
# appropriate extreme of the mainline instead - the extra will be
1091
# Also map the revisions to rev_ids, to be used in the later filtering
1094
if start_revision is None:
1097
if isinstance(start_revision, revisionspec.RevisionInfo):
1098
start_rev_id = start_revision.rev_id
1099
start_revno = start_revision.revno or 1
1101
branch.check_real_revno(start_revision)
1102
start_revno = start_revision
1105
if end_revision is None:
1106
end_revno = branch_revno
1108
if isinstance(end_revision, revisionspec.RevisionInfo):
1109
end_rev_id = end_revision.rev_id
1110
end_revno = end_revision.revno or branch_revno
1112
branch.check_real_revno(end_revision)
1113
end_revno = end_revision
1115
if ((start_rev_id == _mod_revision.NULL_REVISION)
1116
or (end_rev_id == _mod_revision.NULL_REVISION)):
1117
raise errors.BzrCommandError('Logging revision 0 is invalid.')
1118
if start_revno > end_revno:
1119
raise errors.BzrCommandError("Start revision must be older than "
1120
"the end revision.")
1122
if end_revno < start_revno:
1123
return None, None, None, None
1124
cur_revno = branch_revno
1127
graph = branch.repository.get_graph()
1128
for revision_id in graph.iter_lefthand_ancestry(
1129
branch_last_revision, (_mod_revision.NULL_REVISION,)):
1130
if cur_revno < start_revno:
1131
# We have gone far enough, but we always add 1 more revision
1132
rev_nos[revision_id] = cur_revno
1133
mainline_revs.append(revision_id)
1135
if cur_revno <= end_revno:
1136
rev_nos[revision_id] = cur_revno
1137
mainline_revs.append(revision_id)
1140
# We walked off the edge of all revisions, so we add a 'None' marker
1141
mainline_revs.append(None)
1143
mainline_revs.reverse()
1145
# override the mainline to look like the revision history.
1146
return mainline_revs, rev_nos, start_rev_id, end_rev_id
1149
@deprecated_function(deprecated_in((2, 2, 0)))
1150
def _filter_revision_range(view_revisions, start_rev_id, end_rev_id):
1151
"""Filter view_revisions based on revision ranges.
1153
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
1154
tuples to be filtered.
1156
:param start_rev_id: If not NONE specifies the first revision to be logged.
1157
If NONE then all revisions up to the end_rev_id are logged.
1159
:param end_rev_id: If not NONE specifies the last revision to be logged.
1160
If NONE then all revisions up to the end of the log are logged.
1162
:return: The filtered view_revisions.
1164
if start_rev_id or end_rev_id:
1165
revision_ids = [r for r, n, d in view_revisions]
1167
start_index = revision_ids.index(start_rev_id)
1170
if start_rev_id == end_rev_id:
1171
end_index = start_index
1174
end_index = revision_ids.index(end_rev_id)
152
mutter('get log for file_id %r' % specific_fileid)
154
if search is not None:
156
searchRE = re.compile(search, re.IGNORECASE)
160
which_revs = _enumerate_history(branch)
162
if start_revision is None:
165
branch.check_real_revno(start_revision)
167
if end_revision is None:
168
end_revision = len(which_revs)
170
branch.check_real_revno(end_revision)
172
# list indexes are 0-based; revisions are 1-based
173
cut_revs = which_revs[(start_revision-1):(end_revision)]
175
if direction == 'reverse':
177
elif direction == 'forward':
180
raise ValueError('invalid direction %r' % direction)
182
for revno, rev_id in cut_revs:
183
if verbose or specific_fileid:
184
delta = branch.get_revision_delta(revno)
187
if not delta.touches_file_id(specific_fileid):
191
# although we calculated it, throw it away without display
194
rev = branch.get_revision(rev_id)
197
if not searchRE.search(rev.message):
200
lf.show(revno, rev, delta)
204
def deltas_for_log_dummy(branch, which_revs):
205
"""Return all the revisions without intermediate deltas.
207
Useful for log commands that won't need the delta information.
210
for revno, revision_id in which_revs:
211
yield revno, branch.get_revision(revision_id), None
214
def deltas_for_log_reverse(branch, which_revs):
215
"""Compute deltas for display in latest-to-earliest order.
221
Sequence of (revno, revision_id) for the subset of history to examine
224
Sequence of (revno, rev, delta)
226
The delta is from the given revision to the next one in the
227
sequence, which makes sense if the log is being displayed from
230
last_revno = last_revision_id = last_tree = None
231
for revno, revision_id in which_revs:
232
this_tree = branch.revision_tree(revision_id)
233
this_revision = branch.get_revision(revision_id)
236
yield last_revno, last_revision, compare_trees(this_tree, last_tree, False)
238
this_tree = EmptyTree(branch.get_root_id())
241
last_revision = this_revision
242
last_tree = this_tree
246
this_tree = EmptyTree(branch.get_root_id())
248
this_revno = last_revno - 1
249
this_revision_id = branch.revision_history()[this_revno]
250
this_tree = branch.revision_tree(this_revision_id)
251
yield last_revno, last_revision, compare_trees(this_tree, last_tree, False)
254
def deltas_for_log_forward(branch, which_revs):
255
"""Compute deltas for display in forward log.
257
Given a sequence of (revno, revision_id) pairs, return
260
The delta is from the given revision to the next one in the
261
sequence, which makes sense if the log is being displayed from
264
last_revno = last_revision_id = last_tree = None
265
prev_tree = EmptyTree(branch.get_root_id())
267
for revno, revision_id in which_revs:
268
this_tree = branch.revision_tree(revision_id)
269
this_revision = branch.get_revision(revision_id)
273
last_tree = EmptyTree(branch.get_root_id())
1176
end_index = len(view_revisions) - 1
1177
# To include the revisions merged into the last revision,
1178
# extend end_rev_id down to, but not including, the next rev
1179
# with the same or lesser merge_depth
1180
end_merge_depth = view_revisions[end_index][2]
1182
for index in xrange(end_index+1, len(view_revisions)+1):
1183
if view_revisions[index][2] <= end_merge_depth:
1184
end_index = index - 1
1187
# if the search falls off the end then log to the end as well
1188
end_index = len(view_revisions) - 1
1189
view_revisions = view_revisions[start_index:end_index+1]
1190
return view_revisions
1193
def _filter_revisions_touching_file_id(branch, file_id, view_revisions,
1194
include_merges=True):
1195
r"""Return the list of revision ids which touch a given file id.
1197
The function filters view_revisions and returns a subset.
1198
This includes the revisions which directly change the file id,
1199
and the revisions which merge these changes. So if the
1212
And 'C' changes a file, then both C and D will be returned. F will not be
1213
returned even though it brings the changes to C into the branch starting
1214
with E. (Note that if we were using F as the tip instead of G, then we
1217
This will also be restricted based on a subset of the mainline.
1219
:param branch: The branch where we can get text revision information.
1221
:param file_id: Filter out revisions that do not touch file_id.
1223
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
1224
tuples. This is the list of revisions which will be filtered. It is
1225
assumed that view_revisions is in merge_sort order (i.e. newest
1228
:param include_merges: include merge revisions in the result or not
1230
:return: A list of (revision_id, dotted_revno, merge_depth) tuples.
1232
# Lookup all possible text keys to determine which ones actually modified
1234
graph = branch.repository.get_file_graph()
1235
get_parent_map = graph.get_parent_map
1236
text_keys = [(file_id, rev_id) for rev_id, revno, depth in view_revisions]
1238
# Looking up keys in batches of 1000 can cut the time in half, as well as
1239
# memory consumption. GraphIndex *does* like to look for a few keys in
1240
# parallel, it just doesn't like looking for *lots* of keys in parallel.
1241
# TODO: This code needs to be re-evaluated periodically as we tune the
1242
# indexing layer. We might consider passing in hints as to the known
1243
# access pattern (sparse/clustered, high success rate/low success
1244
# rate). This particular access is clustered with a low success rate.
1245
modified_text_revisions = set()
1247
for start in xrange(0, len(text_keys), chunk_size):
1248
next_keys = text_keys[start:start + chunk_size]
1249
# Only keep the revision_id portion of the key
1250
modified_text_revisions.update(
1251
[k[1] for k in get_parent_map(next_keys)])
1252
del text_keys, next_keys
1255
# Track what revisions will merge the current revision, replace entries
1256
# with 'None' when they have been added to result
1257
current_merge_stack = [None]
1258
for info in view_revisions:
1259
rev_id, revno, depth = info
1260
if depth == len(current_merge_stack):
1261
current_merge_stack.append(info)
1263
del current_merge_stack[depth + 1:]
1264
current_merge_stack[-1] = info
1266
if rev_id in modified_text_revisions:
1267
# This needs to be logged, along with the extra revisions
1268
for idx in xrange(len(current_merge_stack)):
1269
node = current_merge_stack[idx]
1270
if node is not None:
1271
if include_merges or node[2] == 0:
1273
current_merge_stack[idx] = None
1277
@deprecated_function(deprecated_in((2, 2, 0)))
1278
def get_view_revisions(mainline_revs, rev_nos, branch, direction,
1279
include_merges=True):
1280
"""Produce an iterator of revisions to show
1281
:return: an iterator of (revision_id, revno, merge_depth)
1282
(if there is no revno for a revision, None is supplied)
1284
if not include_merges:
1285
revision_ids = mainline_revs[1:]
1286
if direction == 'reverse':
1287
revision_ids.reverse()
1288
for revision_id in revision_ids:
1289
yield revision_id, str(rev_nos[revision_id]), 0
1291
graph = branch.repository.get_graph()
1292
# This asks for all mainline revisions, which means we only have to spider
1293
# sideways, rather than depth history. That said, its still size-of-history
1294
# and should be addressed.
1295
# mainline_revisions always includes an extra revision at the beginning, so
1297
parent_map = dict(((key, value) for key, value in
1298
graph.iter_ancestry(mainline_revs[1:]) if value is not None))
1299
# filter out ghosts; merge_sort errors on ghosts.
1300
rev_graph = _mod_repository._strip_NULL_ghosts(parent_map)
1301
merge_sorted_revisions = tsort.merge_sort(
1305
generate_revno=True)
1307
if direction == 'forward':
1308
# forward means oldest first.
1309
merge_sorted_revisions = reverse_by_depth(merge_sorted_revisions)
1310
elif direction != 'reverse':
1311
raise ValueError('invalid direction %r' % direction)
1313
for (sequence, rev_id, merge_depth, revno, end_of_merge
1314
) in merge_sorted_revisions:
1315
yield rev_id, '.'.join(map(str, revno)), merge_depth
1318
def reverse_by_depth(merge_sorted_revisions, _depth=0):
1319
"""Reverse revisions by depth.
1321
Revisions with a different depth are sorted as a group with the previous
1322
revision of that depth. There may be no topological justification for this,
1323
but it looks much nicer.
1325
# Add a fake revision at start so that we can always attach sub revisions
1326
merge_sorted_revisions = [(None, None, _depth)] + merge_sorted_revisions
1328
for val in merge_sorted_revisions:
1329
if val[2] == _depth:
1330
# Each revision at the current depth becomes a chunk grouping all
1331
# higher depth revisions.
1332
zd_revisions.append([val])
1334
zd_revisions[-1].append(val)
1335
for revisions in zd_revisions:
1336
if len(revisions) > 1:
1337
# We have higher depth revisions, let reverse them locally
1338
revisions[1:] = reverse_by_depth(revisions[1:], _depth + 1)
1339
zd_revisions.reverse()
1341
for chunk in zd_revisions:
1342
result.extend(chunk)
1344
# Top level call, get rid of the fake revisions that have been added
1345
result = [r for r in result if r[0] is not None and r[1] is not None]
1349
class LogRevision(object):
1350
"""A revision to be logged (by LogFormatter.log_revision).
1352
A simple wrapper for the attributes of a revision to be logged.
1353
The attributes may or may not be populated, as determined by the
1354
logging options and the log formatter capabilities.
1357
def __init__(self, rev=None, revno=None, merge_depth=0, delta=None,
1358
tags=None, diff=None, signature=None):
1363
self.revno = str(revno)
1364
self.merge_depth = merge_depth
1368
self.signature = signature
275
last_revno = revno - 1
276
last_revision_id = branch.revision_history()[last_revno]
277
last_tree = branch.revision_tree(last_revision_id)
279
yield revno, this_revision, compare_trees(last_tree, this_tree, False)
282
last_revision = this_revision
283
last_tree = this_tree
1371
286
class LogFormatter(object):
1372
"""Abstract class to display log messages.
1374
At a minimum, a derived class must implement the log_revision method.
1376
If the LogFormatter needs to be informed of the beginning or end of
1377
a log it should implement the begin_log and/or end_log hook methods.
1379
A LogFormatter should define the following supports_XXX flags
1380
to indicate which LogRevision attributes it supports:
1382
- supports_delta must be True if this log formatter supports delta.
1383
Otherwise the delta attribute may not be populated. The 'delta_format'
1384
attribute describes whether the 'short_status' format (1) or the long
1385
one (2) should be used.
1387
- supports_merge_revisions must be True if this log formatter supports
1388
merge revisions. If not, then only mainline revisions will be passed
1391
- preferred_levels is the number of levels this formatter defaults to.
1392
The default value is zero meaning display all levels.
1393
This value is only relevant if supports_merge_revisions is True.
1395
- supports_tags must be True if this log formatter supports tags.
1396
Otherwise the tags attribute may not be populated.
1398
- supports_diff must be True if this log formatter supports diffs.
1399
Otherwise the diff attribute may not be populated.
1401
- supports_signatures must be True if this log formatter supports GPG
1404
Plugins can register functions to show custom revision properties using
1405
the properties_handler_registry. The registered function
1406
must respect the following interface description::
1408
def my_show_properties(properties_dict):
1409
# code that returns a dict {'name':'value'} of the properties
1412
preferred_levels = 0
1414
def __init__(self, to_file, show_ids=False, show_timezone='original',
1415
delta_format=None, levels=None, show_advice=False,
1416
to_exact_file=None, author_list_handler=None):
1417
"""Create a LogFormatter.
1419
:param to_file: the file to output to
1420
:param to_exact_file: if set, gives an output stream to which
1421
non-Unicode diffs are written.
1422
:param show_ids: if True, revision-ids are to be displayed
1423
:param show_timezone: the timezone to use
1424
:param delta_format: the level of delta information to display
1425
or None to leave it to the formatter to decide
1426
:param levels: the number of levels to display; None or -1 to
1427
let the log formatter decide.
1428
:param show_advice: whether to show advice at the end of the
1430
:param author_list_handler: callable generating a list of
1431
authors to display for a given revision
287
"""Abstract class to display log messages."""
288
def __init__(self, to_file, show_ids=False, show_timezone='original'):
1433
289
self.to_file = to_file
1434
# 'exact' stream used to show diff, it should print content 'as is'
1435
# and should not try to decode/encode it to unicode to avoid bug #328007
1436
if to_exact_file is not None:
1437
self.to_exact_file = to_exact_file
1439
# XXX: somewhat hacky; this assumes it's a codec writer; it's better
1440
# for code that expects to get diffs to pass in the exact file
1442
self.to_exact_file = getattr(to_file, 'stream', to_file)
1443
290
self.show_ids = show_ids
1444
291
self.show_timezone = show_timezone
1445
if delta_format is None:
1446
# Ensures backward compatibility
1447
delta_format = 2 # long format
1448
self.delta_format = delta_format
1449
self.levels = levels
1450
self._show_advice = show_advice
1451
self._merge_count = 0
1452
self._author_list_handler = author_list_handler
1454
def get_levels(self):
1455
"""Get the number of levels to display or 0 for all."""
1456
if getattr(self, 'supports_merge_revisions', False):
1457
if self.levels is None or self.levels == -1:
1458
self.levels = self.preferred_levels
1463
def log_revision(self, revision):
1466
:param revision: The LogRevision to be logged.
294
def show(self, revno, rev, delta):
1468
295
raise NotImplementedError('not implemented in abstract base')
1470
def show_advice(self):
1471
"""Output user advice, if any, when the log is completed."""
1472
if self._show_advice and self.levels == 1 and self._merge_count > 0:
1473
advice_sep = self.get_advice_separator()
1475
self.to_file.write(advice_sep)
1477
"Use --include-merges or -n0 to see merged revisions.\n")
1479
def get_advice_separator(self):
1480
"""Get the text separating the log from the closing advice."""
1483
def short_committer(self, rev):
1484
name, address = config.parse_username(rev.committer)
1489
def short_author(self, rev):
1490
return self.authors(rev, 'first', short=True, sep=', ')
1492
def authors(self, rev, who, short=False, sep=None):
1493
"""Generate list of authors, taking --authors option into account.
1495
The caller has to specify the name of a author list handler,
1496
as provided by the author list registry, using the ``who``
1497
argument. That name only sets a default, though: when the
1498
user selected a different author list generation using the
1499
``--authors`` command line switch, as represented by the
1500
``author_list_handler`` constructor argument, that value takes
1503
:param rev: The revision for which to generate the list of authors.
1504
:param who: Name of the default handler.
1505
:param short: Whether to shorten names to either name or address.
1506
:param sep: What separator to use for automatic concatenation.
1508
if self._author_list_handler is not None:
1509
# The user did specify --authors, which overrides the default
1510
author_list_handler = self._author_list_handler
1512
# The user didn't specify --authors, so we use the caller's default
1513
author_list_handler = author_list_registry.get(who)
1514
names = author_list_handler(rev)
1516
for i in range(len(names)):
1517
name, address = config.parse_username(names[i])
1523
names = sep.join(names)
1526
def merge_marker(self, revision):
1527
"""Get the merge marker to include in the output or '' if none."""
1528
if len(revision.rev.parent_ids) > 1:
1529
self._merge_count += 1
1534
def show_properties(self, revision, indent):
1535
"""Displays the custom properties returned by each registered handler.
1537
If a registered handler raises an error it is propagated.
1539
for line in self.custom_properties(revision):
1540
self.to_file.write("%s%s\n" % (indent, line))
1542
def custom_properties(self, revision):
1543
"""Format the custom properties returned by each registered handler.
1545
If a registered handler raises an error it is propagated.
1547
:return: a list of formatted lines (excluding trailing newlines)
1549
lines = self._foreign_info_properties(revision)
1550
for key, handler in properties_handler_registry.iteritems():
1551
lines.extend(self._format_properties(handler(revision)))
1554
def _foreign_info_properties(self, rev):
1555
"""Custom log displayer for foreign revision identifiers.
1557
:param rev: Revision object.
1559
# Revision comes directly from a foreign repository
1560
if isinstance(rev, foreign.ForeignRevision):
1561
return self._format_properties(
1562
rev.mapping.vcs.show_foreign_revid(rev.foreign_revid))
1564
# Imported foreign revision revision ids always contain :
1565
if not ":" in rev.revision_id:
1568
# Revision was once imported from a foreign repository
1570
foreign_revid, mapping = \
1571
foreign.foreign_vcs_registry.parse_revision_id(rev.revision_id)
1572
except errors.InvalidRevisionId:
1575
return self._format_properties(
1576
mapping.vcs.show_foreign_revid(foreign_revid))
1578
def _format_properties(self, properties):
1580
for key, value in properties.items():
1581
lines.append(key + ': ' + value)
1584
def show_diff(self, to_file, diff, indent):
1585
for l in diff.rstrip().split('\n'):
1586
to_file.write(indent + '%s\n' % (l,))
1589
# Separator between revisions in long format
1590
_LONG_SEP = '-' * 60
1593
302
class LongLogFormatter(LogFormatter):
1595
supports_merge_revisions = True
1596
preferred_levels = 1
1597
supports_delta = True
1598
supports_tags = True
1599
supports_diff = True
1600
supports_signatures = True
1602
def __init__(self, *args, **kwargs):
1603
super(LongLogFormatter, self).__init__(*args, **kwargs)
1604
if self.show_timezone == 'original':
1605
self.date_string = self._date_string_original_timezone
1607
self.date_string = self._date_string_with_timezone
1609
def _date_string_with_timezone(self, rev):
1610
return format_date(rev.timestamp, rev.timezone or 0,
1613
def _date_string_original_timezone(self, rev):
1614
return format_date_with_offset_in_original_timezone(rev.timestamp,
1617
def log_revision(self, revision):
1618
"""Log a revision, either merged or not."""
1619
indent = ' ' * revision.merge_depth
1621
if revision.revno is not None:
1622
lines.append('revno: %s%s' % (revision.revno,
1623
self.merge_marker(revision)))
1625
lines.append('tags: %s' % (', '.join(revision.tags)))
1626
if self.show_ids or revision.revno is None:
1627
lines.append('revision-id: %s' % (revision.rev.revision_id,))
303
def show(self, revno, rev, delta):
304
from osutils import format_date
306
to_file = self.to_file
308
print >>to_file, '-' * 60
309
print >>to_file, 'revno:', revno
1628
310
if self.show_ids:
1629
for parent_id in revision.rev.parent_ids:
1630
lines.append('parent: %s' % (parent_id,))
1631
lines.extend(self.custom_properties(revision.rev))
1633
committer = revision.rev.committer
1634
authors = self.authors(revision.rev, 'all')
1635
if authors != [committer]:
1636
lines.append('author: %s' % (", ".join(authors),))
1637
lines.append('committer: %s' % (committer,))
1639
branch_nick = revision.rev.properties.get('branch-nick', None)
1640
if branch_nick is not None:
1641
lines.append('branch nick: %s' % (branch_nick,))
1643
lines.append('timestamp: %s' % (self.date_string(revision.rev),))
1645
if revision.signature is not None:
1646
lines.append('signature: ' + revision.signature)
1648
lines.append('message:')
1649
if not revision.rev.message:
1650
lines.append(' (no message)')
311
print >>to_file, 'revision-id:', rev.revision_id
313
for parent in rev.parents:
314
print >>to_file, 'parent:', parent.revision_id
316
print >>to_file, 'committer:', rev.committer
318
date_str = format_date(rev.timestamp,
321
print >>to_file, 'timestamp: %s' % date_str
323
print >>to_file, 'message:'
325
print >>to_file, ' (no message)'
1652
message = revision.rev.message.rstrip('\r\n')
1653
for l in message.split('\n'):
1654
lines.append(' %s' % (l,))
1656
# Dump the output, appending the delta and diff if requested
1657
to_file = self.to_file
1658
to_file.write("%s%s\n" % (indent, ('\n' + indent).join(lines)))
1659
if revision.delta is not None:
1660
# Use the standard status output to display changes
1661
from bzrlib.delta import report_delta
1662
report_delta(to_file, revision.delta, short_status=False,
1663
show_ids=self.show_ids, indent=indent)
1664
if revision.diff is not None:
1665
to_file.write(indent + 'diff:\n')
1667
# Note: we explicitly don't indent the diff (relative to the
1668
# revision information) so that the output can be fed to patch -p0
1669
self.show_diff(self.to_exact_file, revision.diff, indent)
1670
self.to_exact_file.flush()
1672
def get_advice_separator(self):
1673
"""Get the text separating the log from the closing advice."""
1674
return '-' * 60 + '\n'
327
for l in rev.message.split('\n'):
328
print >>to_file, ' ' + l
331
delta.show(to_file, self.show_ids)
1677
335
class ShortLogFormatter(LogFormatter):
1679
supports_merge_revisions = True
1680
preferred_levels = 1
1681
supports_delta = True
1682
supports_tags = True
1683
supports_diff = True
1685
def __init__(self, *args, **kwargs):
1686
super(ShortLogFormatter, self).__init__(*args, **kwargs)
1687
self.revno_width_by_depth = {}
1689
def log_revision(self, revision):
1690
# We need two indents: one per depth and one for the information
1691
# relative to that indent. Most mainline revnos are 5 chars or
1692
# less while dotted revnos are typically 11 chars or less. Once
1693
# calculated, we need to remember the offset for a given depth
1694
# as we might be starting from a dotted revno in the first column
1695
# and we want subsequent mainline revisions to line up.
1696
depth = revision.merge_depth
1697
indent = ' ' * depth
1698
revno_width = self.revno_width_by_depth.get(depth)
1699
if revno_width is None:
1700
if revision.revno is None or revision.revno.find('.') == -1:
1701
# mainline revno, e.g. 12345
1704
# dotted revno, e.g. 12345.10.55
1706
self.revno_width_by_depth[depth] = revno_width
1707
offset = ' ' * (revno_width + 1)
336
def show(self, revno, rev, delta):
337
from bzrlib.osutils import format_date
1709
339
to_file = self.to_file
1712
tags = ' {%s}' % (', '.join(revision.tags))
1713
to_file.write(indent + "%*s %s\t%s%s%s\n" % (revno_width,
1714
revision.revno or "", self.short_author(revision.rev),
1715
format_date(revision.rev.timestamp,
1716
revision.rev.timezone or 0,
1717
self.show_timezone, date_fmt="%Y-%m-%d",
1719
tags, self.merge_marker(revision)))
1720
self.show_properties(revision.rev, indent+offset)
1721
if self.show_ids or revision.revno is None:
1722
to_file.write(indent + offset + 'revision-id:%s\n'
1723
% (revision.rev.revision_id,))
1724
if not revision.rev.message:
1725
to_file.write(indent + offset + '(no message)\n')
1727
message = revision.rev.message.rstrip('\r\n')
1728
for l in message.split('\n'):
1729
to_file.write(indent + offset + '%s\n' % (l,))
1731
if revision.delta is not None:
1732
# Use the standard status output to display changes
1733
from bzrlib.delta import report_delta
1734
report_delta(to_file, revision.delta,
1735
short_status=self.delta_format==1,
1736
show_ids=self.show_ids, indent=indent + offset)
1737
if revision.diff is not None:
1738
self.show_diff(self.to_exact_file, revision.diff, ' ')
1742
class LineLogFormatter(LogFormatter):
1744
supports_merge_revisions = True
1745
preferred_levels = 1
1746
supports_tags = True
1748
def __init__(self, *args, **kwargs):
1749
super(LineLogFormatter, self).__init__(*args, **kwargs)
1750
width = terminal_width()
1751
if width is not None:
1752
# we need one extra space for terminals that wrap on last char
1754
self._max_chars = width
1756
def truncate(self, str, max_len):
1757
if max_len is None or len(str) <= max_len:
1759
return str[:max_len-3] + '...'
1761
def date_string(self, rev):
1762
return format_date(rev.timestamp, rev.timezone or 0,
1763
self.show_timezone, date_fmt="%Y-%m-%d",
1766
def message(self, rev):
341
print >>to_file, "%5d %s\t%s" % (revno, rev.committer,
342
format_date(rev.timestamp, rev.timezone or 0,
345
print >>to_file, ' revision-id:', rev.revision_id
1767
346
if not rev.message:
1768
return '(no message)'
1772
def log_revision(self, revision):
1773
indent = ' ' * revision.merge_depth
1774
self.to_file.write(self.log_string(revision.revno, revision.rev,
1775
self._max_chars, revision.tags, indent))
1776
self.to_file.write('\n')
1778
def log_string(self, revno, rev, max_chars, tags=None, prefix=''):
1779
"""Format log info into one string. Truncate tail of string
1781
:param revno: revision number or None.
1782
Revision numbers counts from 1.
1783
:param rev: revision object
1784
:param max_chars: maximum length of resulting string
1785
:param tags: list of tags or None
1786
:param prefix: string to prefix each line
1787
:return: formatted truncated string
1791
# show revno only when is not None
1792
out.append("%s:" % revno)
1793
if max_chars is not None:
1794
out.append(self.truncate(self.short_author(rev), (max_chars+3)/4))
1796
out.append(self.short_author(rev))
1797
out.append(self.date_string(rev))
1798
if len(rev.parent_ids) > 1:
1799
out.append('[merge]')
1801
tag_str = '{%s}' % (', '.join(tags))
1803
out.append(rev.get_summary())
1804
return self.truncate(prefix + " ".join(out).rstrip('\n'), max_chars)
1807
class GnuChangelogLogFormatter(LogFormatter):
1809
supports_merge_revisions = True
1810
supports_delta = True
1812
def log_revision(self, revision):
1813
"""Log a revision, either merged or not."""
1814
to_file = self.to_file
1816
date_str = format_date(revision.rev.timestamp,
1817
revision.rev.timezone or 0,
1819
date_fmt='%Y-%m-%d',
1821
committer_str = self.authors(revision.rev, 'first', sep=', ')
1822
committer_str = committer_str.replace(' <', ' <')
1823
to_file.write('%s %s\n\n' % (date_str,committer_str))
1825
if revision.delta is not None and revision.delta.has_changed():
1826
for c in revision.delta.added + revision.delta.removed + revision.delta.modified:
1828
to_file.write('\t* %s:\n' % (path,))
1829
for c in revision.delta.renamed:
1830
oldpath,newpath = c[:2]
1831
# For renamed files, show both the old and the new path
1832
to_file.write('\t* %s:\n\t* %s:\n' % (oldpath,newpath))
1835
if not revision.rev.message:
1836
to_file.write('\tNo commit message\n')
1838
message = revision.rev.message.rstrip('\r\n')
1839
for l in message.split('\n'):
1840
to_file.write('\t%s\n' % (l.lstrip(),))
1844
def line_log(rev, max_chars):
1845
lf = LineLogFormatter(None)
1846
return lf.log_string(None, rev, max_chars)
1849
class LogFormatterRegistry(registry.Registry):
1850
"""Registry for log formatters"""
1852
def make_formatter(self, name, *args, **kwargs):
1853
"""Construct a formatter from arguments.
1855
:param name: Name of the formatter to construct. 'short', 'long' and
1856
'line' are built-in.
1858
return self.get(name)(*args, **kwargs)
1860
def get_default(self, branch):
1861
return self.get(branch.get_config().log_format())
1864
log_formatter_registry = LogFormatterRegistry()
1867
log_formatter_registry.register('short', ShortLogFormatter,
1868
'Moderately short log format')
1869
log_formatter_registry.register('long', LongLogFormatter,
1870
'Detailed log format')
1871
log_formatter_registry.register('line', LineLogFormatter,
1872
'Log format with one line per revision')
1873
log_formatter_registry.register('gnu-changelog', GnuChangelogLogFormatter,
1874
'Format used by GNU ChangeLog files')
1877
def register_formatter(name, formatter):
1878
log_formatter_registry.register(name, formatter)
347
print >>to_file, ' (no message)'
349
for l in rev.message.split('\n'):
350
print >>to_file, ' ' + l
352
# TODO: Why not show the modified files in a shorter form as
353
# well? rewrap them single lines of appropriate length
355
delta.show(to_file, self.show_ids)
360
FORMATTERS = {'long': LongLogFormatter,
361
'short': ShortLogFormatter,
1881
365
def log_formatter(name, *args, **kwargs):
1882
"""Construct a formatter from arguments.
1884
name -- Name of the formatter to construct; currently 'long', 'short' and
1885
'line' are supported.
1888
return log_formatter_registry.make_formatter(name, *args, **kwargs)
1890
raise errors.BzrCommandError("unknown log formatter: %r" % name)
1893
def author_list_all(rev):
1894
return rev.get_apparent_authors()[:]
1897
def author_list_first(rev):
1898
lst = rev.get_apparent_authors()
366
from bzrlib.errors import BzrCommandError
369
return FORMATTERS[name](*args, **kwargs)
1901
370
except IndexError:
1905
def author_list_committer(rev):
1906
return [rev.committer]
1909
author_list_registry = registry.Registry()
1911
author_list_registry.register('all', author_list_all,
1914
author_list_registry.register('first', author_list_first,
1917
author_list_registry.register('committer', author_list_committer,
371
raise BzrCommandError("unknown log formatter: %r" % name)
1921
373
def show_one_log(revno, rev, delta, verbose, to_file, show_timezone):
1922
# deprecated; for compatibility
374
# deprecated; for compatability
1923
375
lf = LongLogFormatter(to_file=to_file, show_timezone=show_timezone)
1924
376
lf.show(revno, rev, delta)
1927
def show_changed_revisions(branch, old_rh, new_rh, to_file=None,
1929
"""Show the change in revision history comparing the old revision history to the new one.
1931
:param branch: The branch where the revisions exist
1932
:param old_rh: The old revision history
1933
:param new_rh: The new revision history
1934
:param to_file: A file to write the results to. If None, stdout will be used
1937
to_file = codecs.getwriter(get_terminal_encoding())(sys.stdout,
1939
lf = log_formatter(log_format,
1942
show_timezone='original')
1944
# This is the first index which is different between
1947
for i in xrange(max(len(new_rh),
1949
if (len(new_rh) <= i
1951
or new_rh[i] != old_rh[i]):
1955
if base_idx is None:
1956
to_file.write('Nothing seems to have changed\n')
1958
## TODO: It might be nice to do something like show_log
1959
## and show the merged entries. But since this is the
1960
## removed revisions, it shouldn't be as important
1961
if base_idx < len(old_rh):
1962
to_file.write('*'*60)
1963
to_file.write('\nRemoved Revisions:\n')
1964
for i in range(base_idx, len(old_rh)):
1965
rev = branch.repository.get_revision(old_rh[i])
1966
lr = LogRevision(rev, i+1, 0, None)
1968
to_file.write('*'*60)
1969
to_file.write('\n\n')
1970
if base_idx < len(new_rh):
1971
to_file.write('Added Revisions:\n')
1976
direction='forward',
1977
start_revision=base_idx+1,
1978
end_revision=len(new_rh),
1982
def get_history_change(old_revision_id, new_revision_id, repository):
1983
"""Calculate the uncommon lefthand history between two revisions.
1985
:param old_revision_id: The original revision id.
1986
:param new_revision_id: The new revision id.
1987
:param repository: The repository to use for the calculation.
1989
return old_history, new_history
1992
old_revisions = set()
1994
new_revisions = set()
1995
graph = repository.get_graph()
1996
new_iter = graph.iter_lefthand_ancestry(new_revision_id)
1997
old_iter = graph.iter_lefthand_ancestry(old_revision_id)
1998
stop_revision = None
2001
while do_new or do_old:
2004
new_revision = new_iter.next()
2005
except StopIteration:
2008
new_history.append(new_revision)
2009
new_revisions.add(new_revision)
2010
if new_revision in old_revisions:
2011
stop_revision = new_revision
2015
old_revision = old_iter.next()
2016
except StopIteration:
2019
old_history.append(old_revision)
2020
old_revisions.add(old_revision)
2021
if old_revision in new_revisions:
2022
stop_revision = old_revision
2024
new_history.reverse()
2025
old_history.reverse()
2026
if stop_revision is not None:
2027
new_history = new_history[new_history.index(stop_revision) + 1:]
2028
old_history = old_history[old_history.index(stop_revision) + 1:]
2029
return old_history, new_history
2032
def show_branch_change(branch, output, old_revno, old_revision_id):
2033
"""Show the changes made to a branch.
2035
:param branch: The branch to show changes about.
2036
:param output: A file-like object to write changes to.
2037
:param old_revno: The revno of the old tip.
2038
:param old_revision_id: The revision_id of the old tip.
2040
new_revno, new_revision_id = branch.last_revision_info()
2041
old_history, new_history = get_history_change(old_revision_id,
2044
if old_history == [] and new_history == []:
2045
output.write('Nothing seems to have changed\n')
2048
log_format = log_formatter_registry.get_default(branch)
2049
lf = log_format(show_ids=False, to_file=output, show_timezone='original')
2050
if old_history != []:
2051
output.write('*'*60)
2052
output.write('\nRemoved Revisions:\n')
2053
show_flat_log(branch.repository, old_history, old_revno, lf)
2054
output.write('*'*60)
2055
output.write('\n\n')
2056
if new_history != []:
2057
output.write('Added Revisions:\n')
2058
start_revno = new_revno - len(new_history) + 1
2059
show_log(branch, lf, None, verbose=False, direction='forward',
2060
start_revision=start_revno,)
2063
def show_flat_log(repository, history, last_revno, lf):
2064
"""Show a simple log of the specified history.
2066
:param repository: The repository to retrieve revisions from.
2067
:param history: A list of revision_ids indicating the lefthand history.
2068
:param last_revno: The revno of the last revision_id in the history.
2069
:param lf: The log formatter to use.
2071
start_revno = last_revno - len(history) + 1
2072
revisions = repository.get_revisions(history)
2073
for i, rev in enumerate(revisions):
2074
lr = LogRevision(rev, i + last_revno, 0, None)
2078
def _get_info_for_log_files(revisionspec_list, file_list, add_cleanup):
2079
"""Find file-ids and kinds given a list of files and a revision range.
2081
We search for files at the end of the range. If not found there,
2082
we try the start of the range.
2084
:param revisionspec_list: revision range as parsed on the command line
2085
:param file_list: the list of paths given on the command line;
2086
the first of these can be a branch location or a file path,
2087
the remainder must be file paths
2088
:param add_cleanup: When the branch returned is read locked,
2089
an unlock call will be queued to the cleanup.
2090
:return: (branch, info_list, start_rev_info, end_rev_info) where
2091
info_list is a list of (relative_path, file_id, kind) tuples where
2092
kind is one of values 'directory', 'file', 'symlink', 'tree-reference'.
2093
branch will be read-locked.
2095
from builtins import _get_revision_range
2096
tree, b, path = bzrdir.BzrDir.open_containing_tree_or_branch(file_list[0])
2097
add_cleanup(b.lock_read().unlock)
2098
# XXX: It's damn messy converting a list of paths to relative paths when
2099
# those paths might be deleted ones, they might be on a case-insensitive
2100
# filesystem and/or they might be in silly locations (like another branch).
2101
# For example, what should "log bzr://branch/dir/file1 file2" do? (Is
2102
# file2 implicitly in the same dir as file1 or should its directory be
2103
# taken from the current tree somehow?) For now, this solves the common
2104
# case of running log in a nested directory, assuming paths beyond the
2105
# first one haven't been deleted ...
2107
relpaths = [path] + tree.safe_relpath_files(file_list[1:])
2109
relpaths = [path] + file_list[1:]
2111
start_rev_info, end_rev_info = _get_revision_range(revisionspec_list, b,
2113
if relpaths in ([], [u'']):
2114
return b, [], start_rev_info, end_rev_info
2115
if start_rev_info is None and end_rev_info is None:
2117
tree = b.basis_tree()
2120
file_id = tree.path2id(fp)
2121
kind = _get_kind_for_file_id(tree, file_id)
2123
# go back to when time began
2126
rev1 = b.get_rev_id(1)
2127
except errors.NoSuchRevision:
2132
tree1 = b.repository.revision_tree(rev1)
2134
file_id = tree1.path2id(fp)
2135
kind = _get_kind_for_file_id(tree1, file_id)
2136
info_list.append((fp, file_id, kind))
2138
elif start_rev_info == end_rev_info:
2139
# One revision given - file must exist in it
2140
tree = b.repository.revision_tree(end_rev_info.rev_id)
2142
file_id = tree.path2id(fp)
2143
kind = _get_kind_for_file_id(tree, file_id)
2144
info_list.append((fp, file_id, kind))
2147
# Revision range given. Get the file-id from the end tree.
2148
# If that fails, try the start tree.
2149
rev_id = end_rev_info.rev_id
2151
tree = b.basis_tree()
2153
tree = b.repository.revision_tree(rev_id)
2156
file_id = tree.path2id(fp)
2157
kind = _get_kind_for_file_id(tree, file_id)
2160
rev_id = start_rev_info.rev_id
2162
rev1 = b.get_rev_id(1)
2163
tree1 = b.repository.revision_tree(rev1)
2165
tree1 = b.repository.revision_tree(rev_id)
2166
file_id = tree1.path2id(fp)
2167
kind = _get_kind_for_file_id(tree1, file_id)
2168
info_list.append((fp, file_id, kind))
2169
return b, info_list, start_rev_info, end_rev_info
2172
def _get_kind_for_file_id(tree, file_id):
2173
"""Return the kind of a file-id or None if it doesn't exist."""
2174
if file_id is not None:
2175
return tree.kind(file_id)
2180
properties_handler_registry = registry.Registry()
2182
# Use the properties handlers to print out bug information if available
2183
def _bugs_properties_handler(revision):
2184
if revision.properties.has_key('bugs'):
2185
bug_lines = revision.properties['bugs'].split('\n')
2186
bug_rows = [line.split(' ', 1) for line in bug_lines]
2187
fixed_bug_urls = [row[0] for row in bug_rows if
2188
len(row) > 1 and row[1] == 'fixed']
2191
return {'fixes bug(s)': ' '.join(fixed_bug_urls)}
2194
properties_handler_registry.register('bugs_properties_handler',
2195
_bugs_properties_handler)
2198
# adapters which revision ids to log are filtered. When log is called, the
2199
# log_rev_iterator is adapted through each of these factory methods.
2200
# Plugins are welcome to mutate this list in any way they like - as long
2201
# as the overall behaviour is preserved. At this point there is no extensible
2202
# mechanism for getting parameters to each factory method, and until there is
2203
# this won't be considered a stable api.
2207
# read revision objects
2208
_make_revision_objects,
2209
# filter on log messages
2210
_make_search_filter,
2211
# generate deltas for things we will show