219
214
# make_log_request_dict() below
220
215
_DEFAULT_REQUEST_PARAMS = {
221
216
'direction': 'reverse',
223
218
'generate_tags': True,
224
'exclude_common_ancestry': False,
225
219
'_match_using_deltas': True,
229
223
def make_log_request_dict(direction='reverse', specific_fileids=None,
230
start_revision=None, end_revision=None, limit=None,
231
message_search=None, levels=None, generate_tags=True,
233
diff_type=None, _match_using_deltas=True,
234
exclude_common_ancestry=False, match=None,
224
start_revision=None, end_revision=None, limit=None,
225
message_search=None, levels=1, generate_tags=True, delta_type=None,
226
diff_type=None, _match_using_deltas=True):
237
227
"""Convenience function for making a logging request dictionary.
239
229
Using this function may make code slightly safer by ensuring
278
267
algorithm used for matching specific_fileids. This parameter
279
268
may be removed in the future so bzrlib client code should NOT
282
:param exclude_common_ancestry: Whether -rX..Y should be interpreted as a
283
range operator or as a graph difference.
285
:param signature: show digital signature information
287
:param match: Dictionary of list of search strings to use when filtering
288
revisions. Keys can be 'message', 'author', 'committer', 'bugs' or
289
the empty string to match any of the preceding properties.
292
# Take care of old style message_search parameter
295
if 'message' in match:
296
match['message'].append(message_search)
298
match['message'] = [message_search]
300
match={ 'message': [message_search] }
302
272
'direction': direction,
303
273
'specific_fileids': specific_fileids,
304
274
'start_revision': start_revision,
305
275
'end_revision': end_revision,
277
'message_search': message_search,
307
278
'levels': levels,
308
279
'generate_tags': generate_tags,
309
280
'delta_type': delta_type,
310
281
'diff_type': diff_type,
311
'exclude_common_ancestry': exclude_common_ancestry,
312
'signature': signature,
314
282
# Add 'private' attributes for features that may be deprecated
315
283
'_match_using_deltas': _match_using_deltas,
319
287
def _apply_log_request_defaults(rqst):
320
288
"""Apply default values to a request dictionary."""
321
result = _DEFAULT_REQUEST_PARAMS.copy()
289
result = _DEFAULT_REQUEST_PARAMS
323
291
result.update(rqst)
327
def format_signature_validity(rev_id, repo):
328
"""get the signature validity
330
:param rev_id: revision id to validate
331
:param repo: repository of revision
332
:return: human readable string to print to log
334
from bzrlib import gpg
336
gpg_strategy = gpg.GPGStrategy(None)
337
result = repo.verify_revision(rev_id, gpg_strategy)
338
if result[0] == gpg.SIGNATURE_VALID:
339
return "valid signature from {0}".format(result[1])
340
if result[0] == gpg.SIGNATURE_KEY_MISSING:
341
return "unknown key {0}".format(result[1])
342
if result[0] == gpg.SIGNATURE_NOT_VALID:
343
return "invalid signature!"
344
if result[0] == gpg.SIGNATURE_NOT_SIGNED:
345
return "no signature"
348
295
class LogGenerator(object):
349
296
"""A generator of log revisions."""
395
342
# Tweak the LogRequest based on what the LogFormatter can handle.
396
343
# (There's no point generating stuff if the formatter can't display it.)
398
if rqst['levels'] is None or lf.get_levels() > rqst['levels']:
399
# user didn't specify levels, use whatever the LF can handle:
400
rqst['levels'] = lf.get_levels()
345
rqst['levels'] = lf.get_levels()
402
346
if not getattr(lf, 'supports_tags', False):
403
347
rqst['generate_tags'] = False
404
348
if not getattr(lf, 'supports_delta', False):
405
349
rqst['delta_type'] = None
406
350
if not getattr(lf, 'supports_diff', False):
407
351
rqst['diff_type'] = None
408
if not getattr(lf, 'supports_signatures', False):
409
rqst['signature'] = False
411
353
# Find and print the interesting revisions
412
354
generator = self._generator_factory(self.branch, rqst)
516
451
generate_merge_revisions = rqst.get('levels') != 1
517
452
delayed_graph_generation = not rqst.get('specific_fileids') and (
518
453
rqst.get('limit') or self.start_rev_id or self.end_rev_id)
519
view_revisions = _calc_view_revisions(
520
self.branch, self.start_rev_id, self.end_rev_id,
521
rqst.get('direction'),
522
generate_merge_revisions=generate_merge_revisions,
523
delayed_graph_generation=delayed_graph_generation,
524
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
454
view_revisions = _calc_view_revisions(self.branch, self.start_rev_id,
455
self.end_rev_id, rqst.get('direction'), generate_merge_revisions,
456
delayed_graph_generation=delayed_graph_generation)
526
458
# Apply the other filters
527
459
return make_log_rev_iterator(self.branch, view_revisions,
528
rqst.get('delta_type'), rqst.get('match'),
460
rqst.get('delta_type'), rqst.get('message_search'),
529
461
file_ids=rqst.get('specific_fileids'),
530
462
direction=rqst.get('direction'))
534
466
# Note that we always generate the merge revisions because
535
467
# filter_revisions_touching_file_id() requires them ...
537
view_revisions = _calc_view_revisions(
538
self.branch, self.start_rev_id, self.end_rev_id,
539
rqst.get('direction'), generate_merge_revisions=True,
540
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
469
view_revisions = _calc_view_revisions(self.branch, self.start_rev_id,
470
self.end_rev_id, rqst.get('direction'), True)
541
471
if not isinstance(view_revisions, list):
542
472
view_revisions = list(view_revisions)
543
473
view_revisions = _filter_revisions_touching_file_id(self.branch,
544
474
rqst.get('specific_fileids')[0], view_revisions,
545
475
include_merges=rqst.get('levels') != 1)
546
476
return make_log_rev_iterator(self.branch, view_revisions,
547
rqst.get('delta_type'), rqst.get('match'))
477
rqst.get('delta_type'), rqst.get('message_search'))
550
480
def _calc_view_revisions(branch, start_rev_id, end_rev_id, direction,
551
generate_merge_revisions,
552
delayed_graph_generation=False,
553
exclude_common_ancestry=False,
481
generate_merge_revisions, delayed_graph_generation=False):
555
482
"""Calculate the revisions to view.
557
484
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
558
485
a list of the same tuples.
560
if (exclude_common_ancestry and start_rev_id == end_rev_id):
561
raise errors.BzrCommandError(
562
'--exclude-common-ancestry requires two different revisions')
563
if direction not in ('reverse', 'forward'):
564
raise ValueError('invalid direction %r' % direction)
565
487
br_revno, br_rev_id = branch.last_revision_info()
566
488
if br_revno == 0:
569
if (end_rev_id and start_rev_id == end_rev_id
570
and (not generate_merge_revisions
571
or not _has_merges(branch, end_rev_id))):
572
# If a single revision is requested, check we can handle it
573
iter_revs = _generate_one_revision(branch, end_rev_id, br_rev_id,
575
elif not generate_merge_revisions:
576
# If we only want to see linear revisions, we can iterate ...
577
iter_revs = _generate_flat_revisions(branch, start_rev_id, end_rev_id,
578
direction, exclude_common_ancestry)
579
if direction == 'forward':
580
iter_revs = reversed(iter_revs)
491
# If a single revision is requested, check we can handle it
492
generate_single_revision = (end_rev_id and start_rev_id == end_rev_id and
493
(not generate_merge_revisions or not _has_merges(branch, end_rev_id)))
494
if generate_single_revision:
495
return _generate_one_revision(branch, end_rev_id, br_rev_id, br_revno)
497
# If we only want to see linear revisions, we can iterate ...
498
if not generate_merge_revisions:
499
return _generate_flat_revisions(branch, start_rev_id, end_rev_id,
582
iter_revs = _generate_all_revisions(branch, start_rev_id, end_rev_id,
583
direction, delayed_graph_generation,
584
exclude_common_ancestry)
585
if direction == 'forward':
586
iter_revs = _rebase_merge_depth(reverse_by_depth(list(iter_revs)))
502
return _generate_all_revisions(branch, start_rev_id, end_rev_id,
503
direction, delayed_graph_generation)
590
506
def _generate_one_revision(branch, rev_id, br_rev_id, br_revno):
593
509
return [(br_rev_id, br_revno, 0)]
595
revno_str = _compute_revno_str(branch, rev_id)
511
revno = branch.revision_id_to_dotted_revno(rev_id)
512
revno_str = '.'.join(str(n) for n in revno)
596
513
return [(rev_id, revno_str, 0)]
599
def _generate_flat_revisions(branch, start_rev_id, end_rev_id, direction,
600
exclude_common_ancestry=False):
601
result = _linear_view_revisions(
602
branch, start_rev_id, end_rev_id,
603
exclude_common_ancestry=exclude_common_ancestry)
516
def _generate_flat_revisions(branch, start_rev_id, end_rev_id, direction):
517
result = _linear_view_revisions(branch, start_rev_id, end_rev_id)
604
518
# If a start limit was given and it's not obviously an
605
519
# ancestor of the end limit, check it before outputting anything
606
520
if direction == 'forward' or (start_rev_id
610
524
except _StartNotLinearAncestor:
611
525
raise errors.BzrCommandError('Start revision not found in'
612
526
' left-hand history of end revision.')
527
if direction == 'forward':
528
result = reversed(result)
616
532
def _generate_all_revisions(branch, start_rev_id, end_rev_id, direction,
617
delayed_graph_generation,
618
exclude_common_ancestry=False):
533
delayed_graph_generation):
619
534
# On large trees, generating the merge graph can take 30-60 seconds
620
535
# so we delay doing it until a merge is detected, incrementally
621
536
# returning initial (non-merge) revisions while we can.
623
# The above is only true for old formats (<= 0.92), for newer formats, a
624
# couple of seconds only should be needed to load the whole graph and the
625
# other graph operations needed are even faster than that -- vila 100201
626
537
initial_revisions = []
627
538
if delayed_graph_generation:
629
for rev_id, revno, depth in _linear_view_revisions(
630
branch, start_rev_id, end_rev_id, exclude_common_ancestry):
540
for rev_id, revno, depth in \
541
_linear_view_revisions(branch, start_rev_id, end_rev_id):
631
542
if _has_merges(branch, rev_id):
632
# The end_rev_id can be nested down somewhere. We need an
633
# explicit ancestry check. There is an ambiguity here as we
634
# may not raise _StartNotLinearAncestor for a revision that
635
# is an ancestor but not a *linear* one. But since we have
636
# loaded the graph to do the check (or calculate a dotted
637
# revno), we may as well accept to show the log... We need
638
# the check only if start_rev_id is not None as all
639
# revisions have _mod_revision.NULL_REVISION as an ancestor
641
graph = branch.repository.get_graph()
642
if (start_rev_id is not None
643
and not graph.is_ancestor(start_rev_id, end_rev_id)):
644
raise _StartNotLinearAncestor()
645
# Since we collected the revisions so far, we need to
647
543
end_rev_id = rev_id
650
546
initial_revisions.append((rev_id, revno, depth))
652
548
# No merged revisions found
653
return initial_revisions
549
if direction == 'reverse':
550
return initial_revisions
551
elif direction == 'forward':
552
return reversed(initial_revisions)
554
raise ValueError('invalid direction %r' % direction)
654
555
except _StartNotLinearAncestor:
655
556
# A merge was never detected so the lower revision limit can't
656
557
# be nested down somewhere
657
558
raise errors.BzrCommandError('Start revision not found in'
658
559
' history of end revision.')
660
# We exit the loop above because we encounter a revision with merges, from
661
# this revision, we need to switch to _graph_view_revisions.
663
561
# A log including nested merges is required. If the direction is reverse,
664
562
# we rebase the initial merge depths so that the development line is
665
563
# shown naturally, i.e. just like it is for linear logging. We can easily
667
565
# indented at the end seems slightly nicer in that case.
668
566
view_revisions = chain(iter(initial_revisions),
669
567
_graph_view_revisions(branch, start_rev_id, end_rev_id,
670
rebase_initial_depths=(direction == 'reverse'),
671
exclude_common_ancestry=exclude_common_ancestry))
672
return view_revisions
568
rebase_initial_depths=direction == 'reverse'))
569
if direction == 'reverse':
570
return view_revisions
571
elif direction == 'forward':
572
# Forward means oldest first, adjusting for depth.
573
view_revisions = reverse_by_depth(list(view_revisions))
574
return _rebase_merge_depth(view_revisions)
576
raise ValueError('invalid direction %r' % direction)
675
579
def _has_merges(branch, rev_id):
678
582
return len(parents) > 1
681
def _compute_revno_str(branch, rev_id):
682
"""Compute the revno string from a rev_id.
684
:return: The revno string, or None if the revision is not in the supplied
688
revno = branch.revision_id_to_dotted_revno(rev_id)
689
except errors.NoSuchRevision:
690
# The revision must be outside of this branch
693
return '.'.join(str(n) for n in revno)
696
585
def _is_obvious_ancestor(branch, start_rev_id, end_rev_id):
697
586
"""Is start_rev_id an obvious ancestor of end_rev_id?"""
698
587
if start_rev_id and end_rev_id:
700
start_dotted = branch.revision_id_to_dotted_revno(start_rev_id)
701
end_dotted = branch.revision_id_to_dotted_revno(end_rev_id)
702
except errors.NoSuchRevision:
703
# one or both is not in the branch; not obvious
588
start_dotted = branch.revision_id_to_dotted_revno(start_rev_id)
589
end_dotted = branch.revision_id_to_dotted_revno(end_rev_id)
705
590
if len(start_dotted) == 1 and len(end_dotted) == 1:
706
591
# both on mainline
707
592
return start_dotted[0] <= end_dotted[0]
715
# if either start or end is not specified then we use either the first or
716
# the last revision and *they* are obvious ancestors.
720
def _linear_view_revisions(branch, start_rev_id, end_rev_id,
721
exclude_common_ancestry=False):
603
def _linear_view_revisions(branch, start_rev_id, end_rev_id):
722
604
"""Calculate a sequence of revisions to view, newest to oldest.
724
606
:param start_rev_id: the lower revision-id
725
607
:param end_rev_id: the upper revision-id
726
:param exclude_common_ancestry: Whether the start_rev_id should be part of
727
the iterated revisions.
728
608
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
729
609
:raises _StartNotLinearAncestor: if a start_rev_id is specified but
730
is not found walking the left-hand history
610
is not found walking the left-hand history
732
612
br_revno, br_rev_id = branch.last_revision_info()
733
613
repo = branch.repository
734
graph = repo.get_graph()
735
614
if start_rev_id is None and end_rev_id is None:
736
615
cur_revno = br_revno
737
for revision_id in graph.iter_lefthand_ancestry(br_rev_id,
738
(_mod_revision.NULL_REVISION,)):
616
for revision_id in repo.iter_reverse_revision_history(br_rev_id):
739
617
yield revision_id, str(cur_revno), 0
742
620
if end_rev_id is None:
743
621
end_rev_id = br_rev_id
744
622
found_start = start_rev_id is None
745
for revision_id in graph.iter_lefthand_ancestry(end_rev_id,
746
(_mod_revision.NULL_REVISION,)):
747
revno_str = _compute_revno_str(branch, revision_id)
623
for revision_id in repo.iter_reverse_revision_history(end_rev_id):
624
revno = branch.revision_id_to_dotted_revno(revision_id)
625
revno_str = '.'.join(str(n) for n in revno)
748
626
if not found_start and revision_id == start_rev_id:
749
if not exclude_common_ancestry:
750
yield revision_id, revno_str, 0
627
yield revision_id, revno_str, 0
751
628
found_start = True
792
664
depth_adjustment = merge_depth
793
665
if depth_adjustment:
794
666
if merge_depth < depth_adjustment:
795
# From now on we reduce the depth adjustement, this can be
796
# surprising for users. The alternative requires two passes
797
# which breaks the fast display of the first revision
799
667
depth_adjustment = merge_depth
800
668
merge_depth -= depth_adjustment
801
669
yield rev_id, '.'.join(map(str, revno)), merge_depth
672
def calculate_view_revisions(branch, start_revision, end_revision, direction,
673
specific_fileid, generate_merge_revisions):
674
"""Calculate the revisions to view.
676
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
677
a list of the same tuples.
679
# This method is no longer called by the main code path.
680
# It is retained for API compatibility and may be deprecated
682
start_rev_id, end_rev_id = _get_revision_limits(branch, start_revision,
684
view_revisions = list(_calc_view_revisions(branch, start_rev_id, end_rev_id,
685
direction, generate_merge_revisions or specific_fileid))
687
view_revisions = _filter_revisions_touching_file_id(branch,
688
specific_fileid, view_revisions,
689
include_merges=generate_merge_revisions)
690
return _rebase_merge_depth(view_revisions)
804
693
def _rebase_merge_depth(view_revisions):
805
694
"""Adjust depths upwards so the top level is 0."""
806
695
# If either the first or last revision have a merge_depth of 0, we're done
850
739
return log_rev_iterator
853
def _make_search_filter(branch, generate_delta, match, log_rev_iterator):
742
def _make_search_filter(branch, generate_delta, search, log_rev_iterator):
854
743
"""Create a filtered iterator of log_rev_iterator matching on a regex.
856
745
:param branch: The branch being logged.
857
746
:param generate_delta: Whether to generate a delta for each revision.
858
:param match: A dictionary with properties as keys and lists of strings
859
as values. To match, a revision may match any of the supplied strings
860
within a single property but must match at least one string for each
747
:param search: A user text search string.
862
748
:param log_rev_iterator: An input iterator containing all revisions that
863
749
could be displayed, in lists.
864
750
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
868
754
return log_rev_iterator
869
searchRE = [(k, [re.compile(x, re.IGNORECASE) for x in v])
870
for (k,v) in match.iteritems()]
871
return _filter_re(searchRE, log_rev_iterator)
874
def _filter_re(searchRE, log_rev_iterator):
755
searchRE = re_compile_checked(search, re.IGNORECASE,
756
'log message filter')
757
return _filter_message_re(searchRE, log_rev_iterator)
760
def _filter_message_re(searchRE, log_rev_iterator):
875
761
for revs in log_rev_iterator:
876
new_revs = [rev for rev in revs if _match_filter(searchRE, rev[1])]
880
def _match_filter(searchRE, rev):
882
'message': (rev.message,),
883
'committer': (rev.committer,),
884
'author': (rev.get_apparent_authors()),
885
'bugs': list(rev.iter_bugs())
887
strings[''] = [item for inner_list in strings.itervalues()
888
for item in inner_list]
889
for (k,v) in searchRE:
890
if k in strings and not _match_any_filter(strings[k], v):
894
def _match_any_filter(strings, res):
895
return any([filter(None, map(re.search, strings)) for re in res])
763
for (rev_id, revno, merge_depth), rev, delta in revs:
764
if searchRE.search(rev.message):
765
new_revs.append(((rev_id, revno, merge_depth), rev, delta))
897
769
def _make_delta_filter(branch, generate_delta, search, log_rev_iterator,
898
770
fileids=None, direction='reverse'):
1162
1034
return mainline_revs, rev_nos, start_rev_id, end_rev_id
1037
def _filter_revision_range(view_revisions, start_rev_id, end_rev_id):
1038
"""Filter view_revisions based on revision ranges.
1040
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
1041
tuples to be filtered.
1043
:param start_rev_id: If not NONE specifies the first revision to be logged.
1044
If NONE then all revisions up to the end_rev_id are logged.
1046
:param end_rev_id: If not NONE specifies the last revision to be logged.
1047
If NONE then all revisions up to the end of the log are logged.
1049
:return: The filtered view_revisions.
1051
# This method is no longer called by the main code path.
1052
# It may be removed soon. IGC 20090127
1053
if start_rev_id or end_rev_id:
1054
revision_ids = [r for r, n, d in view_revisions]
1056
start_index = revision_ids.index(start_rev_id)
1059
if start_rev_id == end_rev_id:
1060
end_index = start_index
1063
end_index = revision_ids.index(end_rev_id)
1065
end_index = len(view_revisions) - 1
1066
# To include the revisions merged into the last revision,
1067
# extend end_rev_id down to, but not including, the next rev
1068
# with the same or lesser merge_depth
1069
end_merge_depth = view_revisions[end_index][2]
1071
for index in xrange(end_index+1, len(view_revisions)+1):
1072
if view_revisions[index][2] <= end_merge_depth:
1073
end_index = index - 1
1076
# if the search falls off the end then log to the end as well
1077
end_index = len(view_revisions) - 1
1078
view_revisions = view_revisions[start_index:end_index+1]
1079
return view_revisions
1165
1082
def _filter_revisions_touching_file_id(branch, file_id, view_revisions,
1166
1083
include_merges=True):
1167
1084
r"""Return the list of revision ids which touch a given file id.
1164
def get_view_revisions(mainline_revs, rev_nos, branch, direction,
1165
include_merges=True):
1166
"""Produce an iterator of revisions to show
1167
:return: an iterator of (revision_id, revno, merge_depth)
1168
(if there is no revno for a revision, None is supplied)
1170
# This method is no longer called by the main code path.
1171
# It is retained for API compatibility and may be deprecated
1172
# soon. IGC 20090127
1173
if not include_merges:
1174
revision_ids = mainline_revs[1:]
1175
if direction == 'reverse':
1176
revision_ids.reverse()
1177
for revision_id in revision_ids:
1178
yield revision_id, str(rev_nos[revision_id]), 0
1180
graph = branch.repository.get_graph()
1181
# This asks for all mainline revisions, which means we only have to spider
1182
# sideways, rather than depth history. That said, its still size-of-history
1183
# and should be addressed.
1184
# mainline_revisions always includes an extra revision at the beginning, so
1186
parent_map = dict(((key, value) for key, value in
1187
graph.iter_ancestry(mainline_revs[1:]) if value is not None))
1188
# filter out ghosts; merge_sort errors on ghosts.
1189
rev_graph = _mod_repository._strip_NULL_ghosts(parent_map)
1190
merge_sorted_revisions = tsort.merge_sort(
1194
generate_revno=True)
1196
if direction == 'forward':
1197
# forward means oldest first.
1198
merge_sorted_revisions = reverse_by_depth(merge_sorted_revisions)
1199
elif direction != 'reverse':
1200
raise ValueError('invalid direction %r' % direction)
1202
for (sequence, rev_id, merge_depth, revno, end_of_merge
1203
) in merge_sorted_revisions:
1204
yield rev_id, '.'.join(map(str, revno)), merge_depth
1249
1207
def reverse_by_depth(merge_sorted_revisions, _depth=0):
1250
1208
"""Reverse revisions by depth.
1311
1265
to indicate which LogRevision attributes it supports:
1313
1267
- supports_delta must be True if this log formatter supports delta.
1314
Otherwise the delta attribute may not be populated. The 'delta_format'
1315
attribute describes whether the 'short_status' format (1) or the long
1316
one (2) should be used.
1268
Otherwise the delta attribute may not be populated. The 'delta_format'
1269
attribute describes whether the 'short_status' format (1) or the long
1270
one (2) should be used.
1318
1272
- supports_merge_revisions must be True if this log formatter supports
1319
merge revisions. If not, then only mainline revisions will be passed
1273
merge revisions. If not, then only mainline revisions will be passed
1322
1276
- preferred_levels is the number of levels this formatter defaults to.
1323
The default value is zero meaning display all levels.
1324
This value is only relevant if supports_merge_revisions is True.
1277
The default value is zero meaning display all levels.
1278
This value is only relevant if supports_merge_revisions is True.
1326
1280
- supports_tags must be True if this log formatter supports tags.
1327
Otherwise the tags attribute may not be populated.
1281
Otherwise the tags attribute may not be populated.
1329
1283
- supports_diff must be True if this log formatter supports diffs.
1330
Otherwise the diff attribute may not be populated.
1332
- supports_signatures must be True if this log formatter supports GPG
1284
Otherwise the diff attribute may not be populated.
1335
1286
Plugins can register functions to show custom revision properties using
1336
1287
the properties_handler_registry. The registered function
1337
must respect the following interface description::
1288
must respect the following interface description:
1339
1289
def my_show_properties(properties_dict):
1340
1290
# code that returns a dict {'name':'value'} of the properties
1343
1293
preferred_levels = 0
1345
1295
def __init__(self, to_file, show_ids=False, show_timezone='original',
1346
delta_format=None, levels=None, show_advice=False,
1347
to_exact_file=None, author_list_handler=None):
1296
delta_format=None, levels=None, show_advice=False,
1297
to_exact_file=None):
1348
1298
"""Create a LogFormatter.
1350
1300
:param to_file: the file to output to
1351
:param to_exact_file: if set, gives an output stream to which
1301
:param to_exact_file: if set, gives an output stream to which
1352
1302
non-Unicode diffs are written.
1353
1303
:param show_ids: if True, revision-ids are to be displayed
1354
1304
:param show_timezone: the timezone to use
1420
1367
def short_author(self, rev):
1421
return self.authors(rev, 'first', short=True, sep=', ')
1423
def authors(self, rev, who, short=False, sep=None):
1424
"""Generate list of authors, taking --authors option into account.
1426
The caller has to specify the name of a author list handler,
1427
as provided by the author list registry, using the ``who``
1428
argument. That name only sets a default, though: when the
1429
user selected a different author list generation using the
1430
``--authors`` command line switch, as represented by the
1431
``author_list_handler`` constructor argument, that value takes
1434
:param rev: The revision for which to generate the list of authors.
1435
:param who: Name of the default handler.
1436
:param short: Whether to shorten names to either name or address.
1437
:param sep: What separator to use for automatic concatenation.
1439
if self._author_list_handler is not None:
1440
# The user did specify --authors, which overrides the default
1441
author_list_handler = self._author_list_handler
1443
# The user didn't specify --authors, so we use the caller's default
1444
author_list_handler = author_list_registry.get(who)
1445
names = author_list_handler(rev)
1447
for i in range(len(names)):
1448
name, address = config.parse_username(names[i])
1454
names = sep.join(names)
1368
name, address = config.parse_username(rev.get_apparent_authors()[0])
1457
1373
def merge_marker(self, revision):
1458
1374
"""Get the merge marker to include in the output or '' if none."""
1554
1468
self.merge_marker(revision)))
1555
1469
if revision.tags:
1556
1470
lines.append('tags: %s' % (', '.join(revision.tags)))
1557
if self.show_ids or revision.revno is None:
1558
1472
lines.append('revision-id: %s' % (revision.rev.revision_id,))
1560
1473
for parent_id in revision.rev.parent_ids:
1561
1474
lines.append('parent: %s' % (parent_id,))
1562
1475
lines.extend(self.custom_properties(revision.rev))
1564
1477
committer = revision.rev.committer
1565
authors = self.authors(revision.rev, 'all')
1478
authors = revision.rev.get_apparent_authors()
1566
1479
if authors != [committer]:
1567
1480
lines.append('author: %s' % (", ".join(authors),))
1568
1481
lines.append('committer: %s' % (committer,))
1588
1498
to_file = self.to_file
1589
1499
to_file.write("%s%s\n" % (indent, ('\n' + indent).join(lines)))
1590
1500
if revision.delta is not None:
1591
# Use the standard status output to display changes
1592
from bzrlib.delta import report_delta
1593
report_delta(to_file, revision.delta, short_status=False,
1594
show_ids=self.show_ids, indent=indent)
1501
# We don't respect delta_format for compatibility
1502
revision.delta.show(to_file, self.show_ids, indent=indent,
1595
1504
if revision.diff is not None:
1596
1505
to_file.write(indent + 'diff:\n')
1597
1506
to_file.flush()
1642
1551
if revision.tags:
1643
1552
tags = ' {%s}' % (', '.join(revision.tags))
1644
1553
to_file.write(indent + "%*s %s\t%s%s%s\n" % (revno_width,
1645
revision.revno or "", self.short_author(revision.rev),
1554
revision.revno, self.short_author(revision.rev),
1646
1555
format_date(revision.rev.timestamp,
1647
1556
revision.rev.timezone or 0,
1648
1557
self.show_timezone, date_fmt="%Y-%m-%d",
1649
1558
show_offset=False),
1650
1559
tags, self.merge_marker(revision)))
1651
1560
self.show_properties(revision.rev, indent+offset)
1652
if self.show_ids or revision.revno is None:
1653
1562
to_file.write(indent + offset + 'revision-id:%s\n'
1654
1563
% (revision.rev.revision_id,))
1655
1564
if not revision.rev.message:
1660
1569
to_file.write(indent + offset + '%s\n' % (l,))
1662
1571
if revision.delta is not None:
1663
# Use the standard status output to display changes
1664
from bzrlib.delta import report_delta
1665
report_delta(to_file, revision.delta,
1666
short_status=self.delta_format==1,
1667
show_ids=self.show_ids, indent=indent + offset)
1572
revision.delta.show(to_file, self.show_ids, indent=indent + offset,
1573
short_status=self.delta_format==1)
1668
1574
if revision.diff is not None:
1669
1575
self.show_diff(self.to_exact_file, revision.diff, ' ')
1670
1576
to_file.write('\n')
1709
1615
def log_string(self, revno, rev, max_chars, tags=None, prefix=''):
1710
1616
"""Format log info into one string. Truncate tail of string
1712
:param revno: revision number or None.
1713
Revision numbers counts from 1.
1714
:param rev: revision object
1715
:param max_chars: maximum length of resulting string
1716
:param tags: list of tags or None
1717
:param prefix: string to prefix each line
1718
:return: formatted truncated string
1617
:param revno: revision number or None.
1618
Revision numbers counts from 1.
1619
:param rev: revision object
1620
:param max_chars: maximum length of resulting string
1621
:param tags: list of tags or None
1622
:param prefix: string to prefix each line
1623
:return: formatted truncated string
1722
1627
# show revno only when is not None
1723
1628
out.append("%s:" % revno)
1724
if max_chars is not None:
1725
out.append(self.truncate(self.short_author(rev), (max_chars+3)/4))
1727
out.append(self.short_author(rev))
1629
out.append(self.truncate(self.short_author(rev), 20))
1728
1630
out.append(self.date_string(rev))
1729
1631
if len(rev.parent_ids) > 1:
1730
1632
out.append('[merge]')
1821
1722
raise errors.BzrCommandError("unknown log formatter: %r" % name)
1824
def author_list_all(rev):
1825
return rev.get_apparent_authors()[:]
1828
def author_list_first(rev):
1829
lst = rev.get_apparent_authors()
1836
def author_list_committer(rev):
1837
return [rev.committer]
1840
author_list_registry = registry.Registry()
1842
author_list_registry.register('all', author_list_all,
1845
author_list_registry.register('first', author_list_first,
1848
author_list_registry.register('committer', author_list_committer,
1725
def show_one_log(revno, rev, delta, verbose, to_file, show_timezone):
1726
# deprecated; for compatibility
1727
lf = LongLogFormatter(to_file=to_file, show_timezone=show_timezone)
1728
lf.show(revno, rev, delta)
1852
1731
def show_changed_revisions(branch, old_rh, new_rh, to_file=None,
2010
1888
:param file_list: the list of paths given on the command line;
2011
1889
the first of these can be a branch location or a file path,
2012
1890
the remainder must be file paths
2013
:param add_cleanup: When the branch returned is read locked,
2014
an unlock call will be queued to the cleanup.
2015
1891
:return: (branch, info_list, start_rev_info, end_rev_info) where
2016
1892
info_list is a list of (relative_path, file_id, kind) tuples where
2017
1893
kind is one of values 'directory', 'file', 'symlink', 'tree-reference'.
2018
1894
branch will be read-locked.
2020
from builtins import _get_revision_range
1896
from builtins import _get_revision_range, safe_relpath_files
2021
1897
tree, b, path = bzrdir.BzrDir.open_containing_tree_or_branch(file_list[0])
2022
add_cleanup(b.lock_read().unlock)
2023
1899
# XXX: It's damn messy converting a list of paths to relative paths when
2024
1900
# those paths might be deleted ones, they might be on a case-insensitive
2025
1901
# filesystem and/or they might be in silly locations (like another branch).