214
209
Logger(branch, rqst).show(lf)
217
# Note: This needs to be kept this in sync with the defaults in
212
# Note: This needs to be kept in sync with the defaults in
218
213
# make_log_request_dict() below
219
214
_DEFAULT_REQUEST_PARAMS = {
220
215
'direction': 'reverse',
222
217
'generate_tags': True,
218
'exclude_common_ancestry': False,
223
219
'_match_using_deltas': True,
227
223
def make_log_request_dict(direction='reverse', specific_fileids=None,
228
start_revision=None, end_revision=None, limit=None,
229
message_search=None, levels=1, generate_tags=True, delta_type=None,
230
diff_type=None, _match_using_deltas=True):
224
start_revision=None, end_revision=None, limit=None,
225
message_search=None, levels=None, generate_tags=True,
227
diff_type=None, _match_using_deltas=True,
228
exclude_common_ancestry=False, match=None,
229
signature=False, omit_merges=False,
231
231
"""Convenience function for making a logging request dictionary.
233
233
Using this function may make code slightly safer by ensuring
271
272
algorithm used for matching specific_fileids. This parameter
272
273
may be removed in the future so bzrlib client code should NOT
276
:param exclude_common_ancestry: Whether -rX..Y should be interpreted as a
277
range operator or as a graph difference.
279
:param signature: show digital signature information
281
:param match: Dictionary of list of search strings to use when filtering
282
revisions. Keys can be 'message', 'author', 'committer', 'bugs' or
283
the empty string to match any of the preceding properties.
285
:param omit_merges: If True, commits with more than one parent are
289
# Take care of old style message_search parameter
292
if 'message' in match:
293
match['message'].append(message_search)
295
match['message'] = [message_search]
297
match={ 'message': [message_search] }
276
299
'direction': direction,
277
300
'specific_fileids': specific_fileids,
278
301
'start_revision': start_revision,
279
302
'end_revision': end_revision,
281
'message_search': message_search,
282
304
'levels': levels,
283
305
'generate_tags': generate_tags,
284
306
'delta_type': delta_type,
285
307
'diff_type': diff_type,
308
'exclude_common_ancestry': exclude_common_ancestry,
309
'signature': signature,
311
'omit_merges': omit_merges,
286
312
# Add 'private' attributes for features that may be deprecated
287
313
'_match_using_deltas': _match_using_deltas,
291
317
def _apply_log_request_defaults(rqst):
292
318
"""Apply default values to a request dictionary."""
293
result = _DEFAULT_REQUEST_PARAMS
319
result = _DEFAULT_REQUEST_PARAMS.copy()
295
321
result.update(rqst)
325
def format_signature_validity(rev_id, repo):
326
"""get the signature validity
328
:param rev_id: revision id to validate
329
:param repo: repository of revision
330
:return: human readable string to print to log
332
from bzrlib import gpg
334
gpg_strategy = gpg.GPGStrategy(None)
335
result = repo.verify_revision_signature(rev_id, gpg_strategy)
336
if result[0] == gpg.SIGNATURE_VALID:
337
return u"valid signature from {0}".format(result[1])
338
if result[0] == gpg.SIGNATURE_KEY_MISSING:
339
return "unknown key {0}".format(result[1])
340
if result[0] == gpg.SIGNATURE_NOT_VALID:
341
return "invalid signature!"
342
if result[0] == gpg.SIGNATURE_NOT_SIGNED:
343
return "no signature"
299
346
class LogGenerator(object):
300
347
"""A generator of log revisions."""
346
393
# Tweak the LogRequest based on what the LogFormatter can handle.
347
394
# (There's no point generating stuff if the formatter can't display it.)
349
rqst['levels'] = lf.get_levels()
396
if rqst['levels'] is None or lf.get_levels() > rqst['levels']:
397
# user didn't specify levels, use whatever the LF can handle:
398
rqst['levels'] = lf.get_levels()
350
400
if not getattr(lf, 'supports_tags', False):
351
401
rqst['generate_tags'] = False
352
402
if not getattr(lf, 'supports_delta', False):
353
403
rqst['delta_type'] = None
354
404
if not getattr(lf, 'supports_diff', False):
355
405
rqst['diff_type'] = None
406
if not getattr(lf, 'supports_signatures', False):
407
rqst['signature'] = False
357
409
# Find and print the interesting revisions
358
410
generator = self._generator_factory(self.branch, rqst)
455
517
generate_merge_revisions = rqst.get('levels') != 1
456
518
delayed_graph_generation = not rqst.get('specific_fileids') and (
457
519
rqst.get('limit') or self.start_rev_id or self.end_rev_id)
458
view_revisions = _calc_view_revisions(self.branch, self.start_rev_id,
459
self.end_rev_id, rqst.get('direction'), generate_merge_revisions,
460
delayed_graph_generation=delayed_graph_generation)
520
view_revisions = _calc_view_revisions(
521
self.branch, self.start_rev_id, self.end_rev_id,
522
rqst.get('direction'),
523
generate_merge_revisions=generate_merge_revisions,
524
delayed_graph_generation=delayed_graph_generation,
525
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
462
527
# Apply the other filters
463
528
return make_log_rev_iterator(self.branch, view_revisions,
464
rqst.get('delta_type'), rqst.get('message_search'),
529
rqst.get('delta_type'), rqst.get('match'),
465
530
file_ids=rqst.get('specific_fileids'),
466
531
direction=rqst.get('direction'))
470
535
# Note that we always generate the merge revisions because
471
536
# filter_revisions_touching_file_id() requires them ...
473
view_revisions = _calc_view_revisions(self.branch, self.start_rev_id,
474
self.end_rev_id, rqst.get('direction'), True)
538
view_revisions = _calc_view_revisions(
539
self.branch, self.start_rev_id, self.end_rev_id,
540
rqst.get('direction'), generate_merge_revisions=True,
541
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
475
542
if not isinstance(view_revisions, list):
476
543
view_revisions = list(view_revisions)
477
544
view_revisions = _filter_revisions_touching_file_id(self.branch,
478
545
rqst.get('specific_fileids')[0], view_revisions,
479
546
include_merges=rqst.get('levels') != 1)
480
547
return make_log_rev_iterator(self.branch, view_revisions,
481
rqst.get('delta_type'), rqst.get('message_search'))
548
rqst.get('delta_type'), rqst.get('match'))
484
551
def _calc_view_revisions(branch, start_rev_id, end_rev_id, direction,
485
generate_merge_revisions, delayed_graph_generation=False):
552
generate_merge_revisions,
553
delayed_graph_generation=False,
554
exclude_common_ancestry=False,
486
556
"""Calculate the revisions to view.
488
558
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
489
559
a list of the same tuples.
561
if (exclude_common_ancestry and start_rev_id == end_rev_id):
562
raise errors.BzrCommandError(gettext(
563
'--exclude-common-ancestry requires two different revisions'))
564
if direction not in ('reverse', 'forward'):
565
raise ValueError(gettext('invalid direction %r') % direction)
491
566
br_revno, br_rev_id = branch.last_revision_info()
492
567
if br_revno == 0:
495
# If a single revision is requested, check we can handle it
496
generate_single_revision = (end_rev_id and start_rev_id == end_rev_id and
497
(not generate_merge_revisions or not _has_merges(branch, end_rev_id)))
498
if generate_single_revision:
499
return _generate_one_revision(branch, end_rev_id, br_rev_id, br_revno)
501
# If we only want to see linear revisions, we can iterate ...
570
if (end_rev_id and start_rev_id == end_rev_id
571
and (not generate_merge_revisions
572
or not _has_merges(branch, end_rev_id))):
573
# If a single revision is requested, check we can handle it
574
return _generate_one_revision(branch, end_rev_id, br_rev_id,
502
576
if not generate_merge_revisions:
503
return _generate_flat_revisions(branch, start_rev_id, end_rev_id,
506
return _generate_all_revisions(branch, start_rev_id, end_rev_id,
507
direction, delayed_graph_generation)
578
# If we only want to see linear revisions, we can iterate ...
579
iter_revs = _linear_view_revisions(
580
branch, start_rev_id, end_rev_id,
581
exclude_common_ancestry=exclude_common_ancestry)
582
# If a start limit was given and it's not obviously an
583
# ancestor of the end limit, check it before outputting anything
584
if (direction == 'forward'
585
or (start_rev_id and not _is_obvious_ancestor(
586
branch, start_rev_id, end_rev_id))):
587
iter_revs = list(iter_revs)
588
if direction == 'forward':
589
iter_revs = reversed(iter_revs)
591
except _StartNotLinearAncestor:
592
# Switch to the slower implementation that may be able to find a
593
# non-obvious ancestor out of the left-hand history.
595
iter_revs = _generate_all_revisions(branch, start_rev_id, end_rev_id,
596
direction, delayed_graph_generation,
597
exclude_common_ancestry)
598
if direction == 'forward':
599
iter_revs = _rebase_merge_depth(reverse_by_depth(list(iter_revs)))
510
603
def _generate_one_revision(branch, rev_id, br_rev_id, br_revno):
513
606
return [(br_rev_id, br_revno, 0)]
515
revno = branch.revision_id_to_dotted_revno(rev_id)
516
revno_str = '.'.join(str(n) for n in revno)
608
revno_str = _compute_revno_str(branch, rev_id)
517
609
return [(rev_id, revno_str, 0)]
520
def _generate_flat_revisions(branch, start_rev_id, end_rev_id, direction):
521
result = _linear_view_revisions(branch, start_rev_id, end_rev_id)
522
# If a start limit was given and it's not obviously an
523
# ancestor of the end limit, check it before outputting anything
524
if direction == 'forward' or (start_rev_id
525
and not _is_obvious_ancestor(branch, start_rev_id, end_rev_id)):
527
result = list(result)
528
except _StartNotLinearAncestor:
529
raise errors.BzrCommandError('Start revision not found in'
530
' left-hand history of end revision.')
531
if direction == 'forward':
532
result = reversed(result)
536
612
def _generate_all_revisions(branch, start_rev_id, end_rev_id, direction,
537
delayed_graph_generation):
613
delayed_graph_generation,
614
exclude_common_ancestry=False):
538
615
# On large trees, generating the merge graph can take 30-60 seconds
539
616
# so we delay doing it until a merge is detected, incrementally
540
617
# returning initial (non-merge) revisions while we can.
546
623
if delayed_graph_generation:
548
625
for rev_id, revno, depth in _linear_view_revisions(
549
branch, start_rev_id, end_rev_id):
626
branch, start_rev_id, end_rev_id, exclude_common_ancestry):
550
627
if _has_merges(branch, rev_id):
551
628
# The end_rev_id can be nested down somewhere. We need an
552
629
# explicit ancestry check. There is an ambiguity here as we
553
630
# may not raise _StartNotLinearAncestor for a revision that
554
631
# is an ancestor but not a *linear* one. But since we have
555
632
# loaded the graph to do the check (or calculate a dotted
556
# revno), we may as well accept to show the log...
633
# revno), we may as well accept to show the log... We need
634
# the check only if start_rev_id is not None as all
635
# revisions have _mod_revision.NULL_REVISION as an ancestor
558
637
graph = branch.repository.get_graph()
559
if not graph.is_ancestor(start_rev_id, end_rev_id):
638
if (start_rev_id is not None
639
and not graph.is_ancestor(start_rev_id, end_rev_id)):
560
640
raise _StartNotLinearAncestor()
641
# Since we collected the revisions so far, we need to
561
643
end_rev_id = rev_id
564
646
initial_revisions.append((rev_id, revno, depth))
566
648
# No merged revisions found
567
if direction == 'reverse':
568
return initial_revisions
569
elif direction == 'forward':
570
return reversed(initial_revisions)
572
raise ValueError('invalid direction %r' % direction)
649
return initial_revisions
573
650
except _StartNotLinearAncestor:
574
651
# A merge was never detected so the lower revision limit can't
575
652
# be nested down somewhere
576
raise errors.BzrCommandError('Start revision not found in'
577
' history of end revision.')
653
raise errors.BzrCommandError(gettext('Start revision not found in'
654
' history of end revision.'))
656
# We exit the loop above because we encounter a revision with merges, from
657
# this revision, we need to switch to _graph_view_revisions.
579
659
# A log including nested merges is required. If the direction is reverse,
580
660
# we rebase the initial merge depths so that the development line is
583
663
# indented at the end seems slightly nicer in that case.
584
664
view_revisions = chain(iter(initial_revisions),
585
665
_graph_view_revisions(branch, start_rev_id, end_rev_id,
586
rebase_initial_depths=direction == 'reverse'))
587
if direction == 'reverse':
588
return view_revisions
589
elif direction == 'forward':
590
# Forward means oldest first, adjusting for depth.
591
view_revisions = reverse_by_depth(list(view_revisions))
592
return _rebase_merge_depth(view_revisions)
594
raise ValueError('invalid direction %r' % direction)
666
rebase_initial_depths=(direction == 'reverse'),
667
exclude_common_ancestry=exclude_common_ancestry))
668
return view_revisions
597
671
def _has_merges(branch, rev_id):
600
674
return len(parents) > 1
677
def _compute_revno_str(branch, rev_id):
678
"""Compute the revno string from a rev_id.
680
:return: The revno string, or None if the revision is not in the supplied
684
revno = branch.revision_id_to_dotted_revno(rev_id)
685
except errors.NoSuchRevision:
686
# The revision must be outside of this branch
689
return '.'.join(str(n) for n in revno)
603
692
def _is_obvious_ancestor(branch, start_rev_id, end_rev_id):
604
693
"""Is start_rev_id an obvious ancestor of end_rev_id?"""
605
694
if start_rev_id and end_rev_id:
606
start_dotted = branch.revision_id_to_dotted_revno(start_rev_id)
607
end_dotted = branch.revision_id_to_dotted_revno(end_rev_id)
696
start_dotted = branch.revision_id_to_dotted_revno(start_rev_id)
697
end_dotted = branch.revision_id_to_dotted_revno(end_rev_id)
698
except errors.NoSuchRevision:
699
# one or both is not in the branch; not obvious
608
701
if len(start_dotted) == 1 and len(end_dotted) == 1:
609
702
# both on mainline
610
703
return start_dotted[0] <= end_dotted[0]
623
def _linear_view_revisions(branch, start_rev_id, end_rev_id):
716
def _linear_view_revisions(branch, start_rev_id, end_rev_id,
717
exclude_common_ancestry=False):
624
718
"""Calculate a sequence of revisions to view, newest to oldest.
626
720
:param start_rev_id: the lower revision-id
627
721
:param end_rev_id: the upper revision-id
722
:param exclude_common_ancestry: Whether the start_rev_id should be part of
723
the iterated revisions.
628
724
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
629
725
:raises _StartNotLinearAncestor: if a start_rev_id is specified but
630
is not found walking the left-hand history
726
is not found walking the left-hand history
632
728
br_revno, br_rev_id = branch.last_revision_info()
633
729
repo = branch.repository
730
graph = repo.get_graph()
634
731
if start_rev_id is None and end_rev_id is None:
635
732
cur_revno = br_revno
636
for revision_id in repo.iter_reverse_revision_history(br_rev_id):
733
for revision_id in graph.iter_lefthand_ancestry(br_rev_id,
734
(_mod_revision.NULL_REVISION,)):
637
735
yield revision_id, str(cur_revno), 0
640
738
if end_rev_id is None:
641
739
end_rev_id = br_rev_id
642
740
found_start = start_rev_id is None
643
for revision_id in repo.iter_reverse_revision_history(end_rev_id):
644
revno = branch.revision_id_to_dotted_revno(revision_id)
645
revno_str = '.'.join(str(n) for n in revno)
741
for revision_id in graph.iter_lefthand_ancestry(end_rev_id,
742
(_mod_revision.NULL_REVISION,)):
743
revno_str = _compute_revno_str(branch, revision_id)
646
744
if not found_start and revision_id == start_rev_id:
647
yield revision_id, revno_str, 0
745
if not exclude_common_ancestry:
746
yield revision_id, revno_str, 0
648
747
found_start = True
693
797
yield rev_id, '.'.join(map(str, revno)), merge_depth
696
@deprecated_function(deprecated_in((2, 2, 0)))
697
def calculate_view_revisions(branch, start_revision, end_revision, direction,
698
specific_fileid, generate_merge_revisions):
699
"""Calculate the revisions to view.
701
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
702
a list of the same tuples.
704
start_rev_id, end_rev_id = _get_revision_limits(branch, start_revision,
706
view_revisions = list(_calc_view_revisions(branch, start_rev_id, end_rev_id,
707
direction, generate_merge_revisions or specific_fileid))
709
view_revisions = _filter_revisions_touching_file_id(branch,
710
specific_fileid, view_revisions,
711
include_merges=generate_merge_revisions)
712
return _rebase_merge_depth(view_revisions)
715
800
def _rebase_merge_depth(view_revisions):
716
801
"""Adjust depths upwards so the top level is 0."""
717
802
# If either the first or last revision have a merge_depth of 0, we're done
761
846
return log_rev_iterator
764
def _make_search_filter(branch, generate_delta, search, log_rev_iterator):
849
def _make_search_filter(branch, generate_delta, match, log_rev_iterator):
765
850
"""Create a filtered iterator of log_rev_iterator matching on a regex.
767
852
:param branch: The branch being logged.
768
853
:param generate_delta: Whether to generate a delta for each revision.
769
:param search: A user text search string.
854
:param match: A dictionary with properties as keys and lists of strings
855
as values. To match, a revision may match any of the supplied strings
856
within a single property but must match at least one string for each
770
858
:param log_rev_iterator: An input iterator containing all revisions that
771
859
could be displayed, in lists.
772
860
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
776
864
return log_rev_iterator
777
searchRE = re_compile_checked(search, re.IGNORECASE,
778
'log message filter')
779
return _filter_message_re(searchRE, log_rev_iterator)
782
def _filter_message_re(searchRE, log_rev_iterator):
865
searchRE = [(k, [re.compile(x, re.IGNORECASE) for x in v])
866
for (k,v) in match.iteritems()]
867
return _filter_re(searchRE, log_rev_iterator)
870
def _filter_re(searchRE, log_rev_iterator):
783
871
for revs in log_rev_iterator:
785
for (rev_id, revno, merge_depth), rev, delta in revs:
786
if searchRE.search(rev.message):
787
new_revs.append(((rev_id, revno, merge_depth), rev, delta))
872
new_revs = [rev for rev in revs if _match_filter(searchRE, rev[1])]
876
def _match_filter(searchRE, rev):
878
'message': (rev.message,),
879
'committer': (rev.committer,),
880
'author': (rev.get_apparent_authors()),
881
'bugs': list(rev.iter_bugs())
883
strings[''] = [item for inner_list in strings.itervalues()
884
for item in inner_list]
885
for (k,v) in searchRE:
886
if k in strings and not _match_any_filter(strings[k], v):
890
def _match_any_filter(strings, res):
891
return any([filter(None, map(re.search, strings)) for re in res])
791
893
def _make_delta_filter(branch, generate_delta, search, log_rev_iterator,
792
894
fileids=None, direction='reverse'):
1026
1127
if ((start_rev_id == _mod_revision.NULL_REVISION)
1027
1128
or (end_rev_id == _mod_revision.NULL_REVISION)):
1028
raise errors.BzrCommandError('Logging revision 0 is invalid.')
1129
raise errors.BzrCommandError(gettext('Logging revision 0 is invalid.'))
1029
1130
if start_revno > end_revno:
1030
raise errors.BzrCommandError("Start revision must be older than "
1031
"the end revision.")
1131
raise errors.BzrCommandError(gettext("Start revision must be older "
1132
"than the end revision."))
1033
1134
if end_revno < start_revno:
1034
1135
return None, None, None, None
1035
1136
cur_revno = branch_revno
1037
1138
mainline_revs = []
1038
for revision_id in branch.repository.iter_reverse_revision_history(
1039
branch_last_revision):
1139
graph = branch.repository.get_graph()
1140
for revision_id in graph.iter_lefthand_ancestry(
1141
branch_last_revision, (_mod_revision.NULL_REVISION,)):
1040
1142
if cur_revno < start_revno:
1041
1143
# We have gone far enough, but we always add 1 more revision
1042
1144
rev_nos[revision_id] = cur_revno
1056
1158
return mainline_revs, rev_nos, start_rev_id, end_rev_id
1059
@deprecated_function(deprecated_in((2, 2, 0)))
1060
def _filter_revision_range(view_revisions, start_rev_id, end_rev_id):
1061
"""Filter view_revisions based on revision ranges.
1063
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
1064
tuples to be filtered.
1066
:param start_rev_id: If not NONE specifies the first revision to be logged.
1067
If NONE then all revisions up to the end_rev_id are logged.
1069
:param end_rev_id: If not NONE specifies the last revision to be logged.
1070
If NONE then all revisions up to the end of the log are logged.
1072
:return: The filtered view_revisions.
1074
if start_rev_id or end_rev_id:
1075
revision_ids = [r for r, n, d in view_revisions]
1077
start_index = revision_ids.index(start_rev_id)
1080
if start_rev_id == end_rev_id:
1081
end_index = start_index
1084
end_index = revision_ids.index(end_rev_id)
1086
end_index = len(view_revisions) - 1
1087
# To include the revisions merged into the last revision,
1088
# extend end_rev_id down to, but not including, the next rev
1089
# with the same or lesser merge_depth
1090
end_merge_depth = view_revisions[end_index][2]
1092
for index in xrange(end_index+1, len(view_revisions)+1):
1093
if view_revisions[index][2] <= end_merge_depth:
1094
end_index = index - 1
1097
# if the search falls off the end then log to the end as well
1098
end_index = len(view_revisions) - 1
1099
view_revisions = view_revisions[start_index:end_index+1]
1100
return view_revisions
1103
1161
def _filter_revisions_touching_file_id(branch, file_id, view_revisions,
1104
1162
include_merges=True):
1105
1163
r"""Return the list of revision ids which touch a given file id.
1185
@deprecated_function(deprecated_in((2, 2, 0)))
1186
def get_view_revisions(mainline_revs, rev_nos, branch, direction,
1187
include_merges=True):
1188
"""Produce an iterator of revisions to show
1189
:return: an iterator of (revision_id, revno, merge_depth)
1190
(if there is no revno for a revision, None is supplied)
1192
if not include_merges:
1193
revision_ids = mainline_revs[1:]
1194
if direction == 'reverse':
1195
revision_ids.reverse()
1196
for revision_id in revision_ids:
1197
yield revision_id, str(rev_nos[revision_id]), 0
1199
graph = branch.repository.get_graph()
1200
# This asks for all mainline revisions, which means we only have to spider
1201
# sideways, rather than depth history. That said, its still size-of-history
1202
# and should be addressed.
1203
# mainline_revisions always includes an extra revision at the beginning, so
1205
parent_map = dict(((key, value) for key, value in
1206
graph.iter_ancestry(mainline_revs[1:]) if value is not None))
1207
# filter out ghosts; merge_sort errors on ghosts.
1208
rev_graph = _mod_repository._strip_NULL_ghosts(parent_map)
1209
merge_sorted_revisions = tsort.merge_sort(
1213
generate_revno=True)
1215
if direction == 'forward':
1216
# forward means oldest first.
1217
merge_sorted_revisions = reverse_by_depth(merge_sorted_revisions)
1218
elif direction != 'reverse':
1219
raise ValueError('invalid direction %r' % direction)
1221
for (sequence, rev_id, merge_depth, revno, end_of_merge
1222
) in merge_sorted_revisions:
1223
yield rev_id, '.'.join(map(str, revno)), merge_depth
1226
1245
def reverse_by_depth(merge_sorted_revisions, _depth=0):
1227
1246
"""Reverse revisions by depth.
1284
1307
to indicate which LogRevision attributes it supports:
1286
1309
- supports_delta must be True if this log formatter supports delta.
1287
Otherwise the delta attribute may not be populated. The 'delta_format'
1288
attribute describes whether the 'short_status' format (1) or the long
1289
one (2) should be used.
1310
Otherwise the delta attribute may not be populated. The 'delta_format'
1311
attribute describes whether the 'short_status' format (1) or the long
1312
one (2) should be used.
1291
1314
- supports_merge_revisions must be True if this log formatter supports
1292
merge revisions. If not, then only mainline revisions will be passed
1315
merge revisions. If not, then only mainline revisions will be passed
1295
1318
- preferred_levels is the number of levels this formatter defaults to.
1296
The default value is zero meaning display all levels.
1297
This value is only relevant if supports_merge_revisions is True.
1319
The default value is zero meaning display all levels.
1320
This value is only relevant if supports_merge_revisions is True.
1299
1322
- supports_tags must be True if this log formatter supports tags.
1300
Otherwise the tags attribute may not be populated.
1323
Otherwise the tags attribute may not be populated.
1302
1325
- supports_diff must be True if this log formatter supports diffs.
1303
Otherwise the diff attribute may not be populated.
1326
Otherwise the diff attribute may not be populated.
1328
- supports_signatures must be True if this log formatter supports GPG
1305
1331
Plugins can register functions to show custom revision properties using
1306
1332
the properties_handler_registry. The registered function
1307
must respect the following interface description:
1333
must respect the following interface description::
1308
1335
def my_show_properties(properties_dict):
1309
1336
# code that returns a dict {'name':'value'} of the properties
1386
1416
def short_author(self, rev):
1387
name, address = config.parse_username(rev.get_apparent_authors()[0])
1417
return self.authors(rev, 'first', short=True, sep=', ')
1419
def authors(self, rev, who, short=False, sep=None):
1420
"""Generate list of authors, taking --authors option into account.
1422
The caller has to specify the name of a author list handler,
1423
as provided by the author list registry, using the ``who``
1424
argument. That name only sets a default, though: when the
1425
user selected a different author list generation using the
1426
``--authors`` command line switch, as represented by the
1427
``author_list_handler`` constructor argument, that value takes
1430
:param rev: The revision for which to generate the list of authors.
1431
:param who: Name of the default handler.
1432
:param short: Whether to shorten names to either name or address.
1433
:param sep: What separator to use for automatic concatenation.
1435
if self._author_list_handler is not None:
1436
# The user did specify --authors, which overrides the default
1437
author_list_handler = self._author_list_handler
1439
# The user didn't specify --authors, so we use the caller's default
1440
author_list_handler = author_list_registry.get(who)
1441
names = author_list_handler(rev)
1443
for i in range(len(names)):
1444
name, address = config.parse_username(names[i])
1450
names = sep.join(names)
1392
1453
def merge_marker(self, revision):
1393
1454
"""Get the merge marker to include in the output or '' if none."""
1487
1550
self.merge_marker(revision)))
1488
1551
if revision.tags:
1489
1552
lines.append('tags: %s' % (', '.join(revision.tags)))
1553
if self.show_ids or revision.revno is None:
1491
1554
lines.append('revision-id: %s' % (revision.rev.revision_id,))
1492
1556
for parent_id in revision.rev.parent_ids:
1493
1557
lines.append('parent: %s' % (parent_id,))
1494
1558
lines.extend(self.custom_properties(revision.rev))
1496
1560
committer = revision.rev.committer
1497
authors = revision.rev.get_apparent_authors()
1561
authors = self.authors(revision.rev, 'all')
1498
1562
if authors != [committer]:
1499
1563
lines.append('author: %s' % (", ".join(authors),))
1500
1564
lines.append('committer: %s' % (committer,))
1570
1638
if revision.tags:
1571
1639
tags = ' {%s}' % (', '.join(revision.tags))
1572
1640
to_file.write(indent + "%*s %s\t%s%s%s\n" % (revno_width,
1573
revision.revno, self.short_author(revision.rev),
1641
revision.revno or "", self.short_author(revision.rev),
1574
1642
format_date(revision.rev.timestamp,
1575
1643
revision.rev.timezone or 0,
1576
1644
self.show_timezone, date_fmt="%Y-%m-%d",
1577
1645
show_offset=False),
1578
1646
tags, self.merge_marker(revision)))
1579
1647
self.show_properties(revision.rev, indent+offset)
1648
if self.show_ids or revision.revno is None:
1581
1649
to_file.write(indent + offset + 'revision-id:%s\n'
1582
1650
% (revision.rev.revision_id,))
1583
1651
if not revision.rev.message:
1634
1705
def log_string(self, revno, rev, max_chars, tags=None, prefix=''):
1635
1706
"""Format log info into one string. Truncate tail of string
1636
:param revno: revision number or None.
1637
Revision numbers counts from 1.
1638
:param rev: revision object
1639
:param max_chars: maximum length of resulting string
1640
:param tags: list of tags or None
1641
:param prefix: string to prefix each line
1642
:return: formatted truncated string
1708
:param revno: revision number or None.
1709
Revision numbers counts from 1.
1710
:param rev: revision object
1711
:param max_chars: maximum length of resulting string
1712
:param tags: list of tags or None
1713
:param prefix: string to prefix each line
1714
:return: formatted truncated string
1646
1718
# show revno only when is not None
1647
1719
out.append("%s:" % revno)
1648
out.append(self.truncate(self.short_author(rev), 20))
1720
if max_chars is not None:
1721
out.append(self.truncate(self.short_author(rev), (max_chars+3)/4))
1723
out.append(self.short_author(rev))
1649
1724
out.append(self.date_string(rev))
1650
1725
if len(rev.parent_ids) > 1:
1651
1726
out.append('[merge]')
1709
1785
return self.get(name)(*args, **kwargs)
1711
1787
def get_default(self, branch):
1712
return self.get(branch.get_config().log_format())
1788
c = branch.get_config_stack()
1789
return self.get(c.get('log_format'))
1715
1792
log_formatter_registry = LogFormatterRegistry()
1718
1795
log_formatter_registry.register('short', ShortLogFormatter,
1719
'Moderately short log format')
1796
'Moderately short log format.')
1720
1797
log_formatter_registry.register('long', LongLogFormatter,
1721
'Detailed log format')
1798
'Detailed log format.')
1722
1799
log_formatter_registry.register('line', LineLogFormatter,
1723
'Log format with one line per revision')
1800
'Log format with one line per revision.')
1724
1801
log_formatter_registry.register('gnu-changelog', GnuChangelogLogFormatter,
1725
'Format used by GNU ChangeLog files')
1802
'Format used by GNU ChangeLog files.')
1728
1805
def register_formatter(name, formatter):
1739
1816
return log_formatter_registry.make_formatter(name, *args, **kwargs)
1740
1817
except KeyError:
1741
raise errors.BzrCommandError("unknown log formatter: %r" % name)
1744
def show_one_log(revno, rev, delta, verbose, to_file, show_timezone):
1745
# deprecated; for compatibility
1746
lf = LongLogFormatter(to_file=to_file, show_timezone=show_timezone)
1747
lf.show(revno, rev, delta)
1818
raise errors.BzrCommandError(gettext("unknown log formatter: %r") % name)
1821
def author_list_all(rev):
1822
return rev.get_apparent_authors()[:]
1825
def author_list_first(rev):
1826
lst = rev.get_apparent_authors()
1833
def author_list_committer(rev):
1834
return [rev.committer]
1837
author_list_registry = registry.Registry()
1839
author_list_registry.register('all', author_list_all,
1842
author_list_registry.register('first', author_list_first,
1845
author_list_registry.register('committer', author_list_committer,
1750
1849
def show_changed_revisions(branch, old_rh, new_rh, to_file=None,
1907
2007
:param file_list: the list of paths given on the command line;
1908
2008
the first of these can be a branch location or a file path,
1909
2009
the remainder must be file paths
2010
:param add_cleanup: When the branch returned is read locked,
2011
an unlock call will be queued to the cleanup.
1910
2012
:return: (branch, info_list, start_rev_info, end_rev_info) where
1911
2013
info_list is a list of (relative_path, file_id, kind) tuples where
1912
2014
kind is one of values 'directory', 'file', 'symlink', 'tree-reference'.
1913
2015
branch will be read-locked.
1915
from builtins import _get_revision_range, safe_relpath_files
1916
tree, b, path = bzrdir.BzrDir.open_containing_tree_or_branch(file_list[0])
2017
from bzrlib.builtins import _get_revision_range
2018
tree, b, path = controldir.ControlDir.open_containing_tree_or_branch(
2020
add_cleanup(b.lock_read().unlock)
1918
2021
# XXX: It's damn messy converting a list of paths to relative paths when
1919
2022
# those paths might be deleted ones, they might be on a case-insensitive
1920
2023
# filesystem and/or they might be in silly locations (like another branch).