120
154
direction='reverse',
121
155
start_revision=None,
122
156
end_revision=None,
124
161
"""Write out human-readable log of commits to this branch.
127
LogFormatter object to show the output.
130
If true, list only the commits affecting the specified
131
file, rather than all commits.
134
If true show added/changed/deleted/renamed files.
137
'reverse' (default) is latest to earliest;
138
'forward' is earliest to latest.
141
If not None, only show revisions >= start_revision
144
If not None, only show revisions <= end_revision
163
This function is being retained for backwards compatibility but
164
should not be extended with new parameters. Use the new Logger class
165
instead, eg. Logger(branch, rqst).show(lf), adding parameters to the
166
make_log_request_dict function.
168
:param lf: The LogFormatter object showing the output.
170
:param specific_fileid: If not None, list only the commits affecting the
171
specified file, rather than all commits.
173
:param verbose: If True show added/changed/deleted/renamed files.
175
:param direction: 'reverse' (default) is latest to earliest; 'forward' is
178
:param start_revision: If not None, only show revisions >= start_revision
180
:param end_revision: If not None, only show revisions <= end_revision
182
:param search: If not None, only show revisions with matching commit
185
:param limit: If set, shows only 'limit' revisions, all revisions are shown
188
:param show_diff: If True, output a diff after each revision.
190
:param match: Dictionary of search lists to use when matching revision
193
# Convert old-style parameters to new-style parameters
194
if specific_fileid is not None:
195
file_ids = [specific_fileid]
200
delta_type = 'partial'
207
diff_type = 'partial'
213
# Build the request and execute it
214
rqst = make_log_request_dict(direction=direction, specific_fileids=file_ids,
215
start_revision=start_revision, end_revision=end_revision,
216
limit=limit, message_search=search,
217
delta_type=delta_type, diff_type=diff_type)
218
Logger(branch, rqst).show(lf)
221
# Note: This needs to be kept in sync with the defaults in
222
# make_log_request_dict() below
223
_DEFAULT_REQUEST_PARAMS = {
224
'direction': 'reverse',
226
'generate_tags': True,
227
'exclude_common_ancestry': False,
228
'_match_using_deltas': True,
232
def make_log_request_dict(direction='reverse', specific_fileids=None,
233
start_revision=None, end_revision=None, limit=None,
234
message_search=None, levels=None, generate_tags=True,
236
diff_type=None, _match_using_deltas=True,
237
exclude_common_ancestry=False, match=None,
238
signature=False, omit_merges=False,
240
"""Convenience function for making a logging request dictionary.
242
Using this function may make code slightly safer by ensuring
243
parameters have the correct names. It also provides a reference
244
point for documenting the supported parameters.
246
:param direction: 'reverse' (default) is latest to earliest;
247
'forward' is earliest to latest.
249
:param specific_fileids: If not None, only include revisions
250
affecting the specified files, rather than all revisions.
252
:param start_revision: If not None, only generate
253
revisions >= start_revision
255
:param end_revision: If not None, only generate
256
revisions <= end_revision
258
:param limit: If set, generate only 'limit' revisions, all revisions
259
are shown if None or 0.
261
:param message_search: If not None, only include revisions with
262
matching commit messages
264
:param levels: the number of levels of revisions to
265
generate; 1 for just the mainline; 0 for all levels, or None for
268
:param generate_tags: If True, include tags for matched revisions.
270
:param delta_type: Either 'full', 'partial' or None.
271
'full' means generate the complete delta - adds/deletes/modifies/etc;
272
'partial' means filter the delta using specific_fileids;
273
None means do not generate any delta.
275
:param diff_type: Either 'full', 'partial' or None.
276
'full' means generate the complete diff - adds/deletes/modifies/etc;
277
'partial' means filter the diff using specific_fileids;
278
None means do not generate any diff.
280
:param _match_using_deltas: a private parameter controlling the
281
algorithm used for matching specific_fileids. This parameter
282
may be removed in the future so bzrlib client code should NOT
285
:param exclude_common_ancestry: Whether -rX..Y should be interpreted as a
286
range operator or as a graph difference.
288
:param signature: show digital signature information
290
:param match: Dictionary of list of search strings to use when filtering
291
revisions. Keys can be 'message', 'author', 'committer', 'bugs' or
292
the empty string to match any of the preceding properties.
294
:param omit_merges: If True, commits with more than one parent are
298
# Take care of old style message_search parameter
301
if 'message' in match:
302
match['message'].append(message_search)
304
match['message'] = [message_search]
306
match={ 'message': [message_search] }
308
'direction': direction,
309
'specific_fileids': specific_fileids,
310
'start_revision': start_revision,
311
'end_revision': end_revision,
314
'generate_tags': generate_tags,
315
'delta_type': delta_type,
316
'diff_type': diff_type,
317
'exclude_common_ancestry': exclude_common_ancestry,
318
'signature': signature,
320
'omit_merges': omit_merges,
321
# Add 'private' attributes for features that may be deprecated
322
'_match_using_deltas': _match_using_deltas,
326
def _apply_log_request_defaults(rqst):
327
"""Apply default values to a request dictionary."""
328
result = _DEFAULT_REQUEST_PARAMS.copy()
334
def format_signature_validity(rev_id, repo):
335
"""get the signature validity
337
:param rev_id: revision id to validate
338
:param repo: repository of revision
339
:return: human readable string to print to log
341
from bzrlib import gpg
343
gpg_strategy = gpg.GPGStrategy(None)
344
result = repo.verify_revision_signature(rev_id, gpg_strategy)
345
if result[0] == gpg.SIGNATURE_VALID:
346
return "valid signature from {0}".format(result[1])
347
if result[0] == gpg.SIGNATURE_KEY_MISSING:
348
return "unknown key {0}".format(result[1])
349
if result[0] == gpg.SIGNATURE_NOT_VALID:
350
return "invalid signature!"
351
if result[0] == gpg.SIGNATURE_NOT_SIGNED:
352
return "no signature"
355
class LogGenerator(object):
356
"""A generator of log revisions."""
358
def iter_log_revisions(self):
359
"""Iterate over LogRevision objects.
361
:return: An iterator yielding LogRevision objects.
363
raise NotImplementedError(self.iter_log_revisions)
366
class Logger(object):
367
"""An object that generates, formats and displays a log."""
369
def __init__(self, branch, rqst):
372
:param branch: the branch to log
373
:param rqst: A dictionary specifying the query parameters.
374
See make_log_request_dict() for supported values.
377
self.rqst = _apply_log_request_defaults(rqst)
382
:param lf: The LogFormatter object to send the output to.
384
if not isinstance(lf, LogFormatter):
385
warn("not a LogFormatter instance: %r" % lf)
387
self.branch.lock_read()
389
if getattr(lf, 'begin_log', None):
392
if getattr(lf, 'end_log', None):
397
def _show_body(self, lf):
398
"""Show the main log output.
400
Subclasses may wish to override this.
402
# Tweak the LogRequest based on what the LogFormatter can handle.
403
# (There's no point generating stuff if the formatter can't display it.)
405
if rqst['levels'] is None or lf.get_levels() > rqst['levels']:
406
# user didn't specify levels, use whatever the LF can handle:
407
rqst['levels'] = lf.get_levels()
409
if not getattr(lf, 'supports_tags', False):
410
rqst['generate_tags'] = False
411
if not getattr(lf, 'supports_delta', False):
412
rqst['delta_type'] = None
413
if not getattr(lf, 'supports_diff', False):
414
rqst['diff_type'] = None
415
if not getattr(lf, 'supports_signatures', False):
416
rqst['signature'] = False
418
# Find and print the interesting revisions
419
generator = self._generator_factory(self.branch, rqst)
420
for lr in generator.iter_log_revisions():
424
def _generator_factory(self, branch, rqst):
425
"""Make the LogGenerator object to use.
427
Subclasses may wish to override this.
429
return _DefaultLogGenerator(branch, rqst)
432
class _StartNotLinearAncestor(Exception):
433
"""Raised when a start revision is not found walking left-hand history."""
436
class _DefaultLogGenerator(LogGenerator):
437
"""The default generator of log revisions."""
439
def __init__(self, branch, rqst):
442
if rqst.get('generate_tags') and branch.supports_tags():
443
self.rev_tag_dict = branch.tags.get_reverse_tag_dict()
445
self.rev_tag_dict = {}
447
def iter_log_revisions(self):
448
"""Iterate over LogRevision objects.
450
:return: An iterator yielding LogRevision objects.
453
levels = rqst.get('levels')
454
limit = rqst.get('limit')
455
diff_type = rqst.get('diff_type')
456
show_signature = rqst.get('signature')
457
omit_merges = rqst.get('omit_merges')
459
revision_iterator = self._create_log_revision_iterator()
460
for revs in revision_iterator:
461
for (rev_id, revno, merge_depth), rev, delta in revs:
462
# 0 levels means show everything; merge_depth counts from 0
463
if levels != 0 and merge_depth >= levels:
465
if omit_merges and len(rev.parent_ids) > 1:
467
if diff_type is None:
470
diff = self._format_diff(rev, rev_id, diff_type)
472
signature = format_signature_validity(rev_id,
473
self.branch.repository)
476
yield LogRevision(rev, revno, merge_depth, delta,
477
self.rev_tag_dict.get(rev_id), diff, signature)
480
if log_count >= limit:
483
def _format_diff(self, rev, rev_id, diff_type):
484
repo = self.branch.repository
485
if len(rev.parent_ids) == 0:
486
ancestor_id = _mod_revision.NULL_REVISION
488
ancestor_id = rev.parent_ids[0]
489
tree_1 = repo.revision_tree(ancestor_id)
490
tree_2 = repo.revision_tree(rev_id)
491
file_ids = self.rqst.get('specific_fileids')
492
if diff_type == 'partial' and file_ids is not None:
493
specific_files = [tree_2.id2path(id) for id in file_ids]
495
specific_files = None
497
path_encoding = get_diff_header_encoding()
498
diff.show_diff_trees(tree_1, tree_2, s, specific_files, old_label='',
499
new_label='', path_encoding=path_encoding)
502
def _create_log_revision_iterator(self):
503
"""Create a revision iterator for log.
505
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
508
self.start_rev_id, self.end_rev_id = _get_revision_limits(
509
self.branch, self.rqst.get('start_revision'),
510
self.rqst.get('end_revision'))
511
if self.rqst.get('_match_using_deltas'):
512
return self._log_revision_iterator_using_delta_matching()
514
# We're using the per-file-graph algorithm. This scales really
515
# well but only makes sense if there is a single file and it's
517
file_count = len(self.rqst.get('specific_fileids'))
519
raise BzrError("illegal LogRequest: must match-using-deltas "
520
"when logging %d files" % file_count)
521
return self._log_revision_iterator_using_per_file_graph()
523
def _log_revision_iterator_using_delta_matching(self):
524
# Get the base revisions, filtering by the revision range
526
generate_merge_revisions = rqst.get('levels') != 1
527
delayed_graph_generation = not rqst.get('specific_fileids') and (
528
rqst.get('limit') or self.start_rev_id or self.end_rev_id)
529
view_revisions = _calc_view_revisions(
530
self.branch, self.start_rev_id, self.end_rev_id,
531
rqst.get('direction'),
532
generate_merge_revisions=generate_merge_revisions,
533
delayed_graph_generation=delayed_graph_generation,
534
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
536
# Apply the other filters
537
return make_log_rev_iterator(self.branch, view_revisions,
538
rqst.get('delta_type'), rqst.get('match'),
539
file_ids=rqst.get('specific_fileids'),
540
direction=rqst.get('direction'))
542
def _log_revision_iterator_using_per_file_graph(self):
543
# Get the base revisions, filtering by the revision range.
544
# Note that we always generate the merge revisions because
545
# filter_revisions_touching_file_id() requires them ...
547
view_revisions = _calc_view_revisions(
548
self.branch, self.start_rev_id, self.end_rev_id,
549
rqst.get('direction'), generate_merge_revisions=True,
550
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
551
if not isinstance(view_revisions, list):
552
view_revisions = list(view_revisions)
553
view_revisions = _filter_revisions_touching_file_id(self.branch,
554
rqst.get('specific_fileids')[0], view_revisions,
555
include_merges=rqst.get('levels') != 1)
556
return make_log_rev_iterator(self.branch, view_revisions,
557
rqst.get('delta_type'), rqst.get('match'))
560
def _calc_view_revisions(branch, start_rev_id, end_rev_id, direction,
561
generate_merge_revisions,
562
delayed_graph_generation=False,
563
exclude_common_ancestry=False,
565
"""Calculate the revisions to view.
567
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
568
a list of the same tuples.
570
if (exclude_common_ancestry and start_rev_id == end_rev_id):
571
raise errors.BzrCommandError(gettext(
572
'--exclude-common-ancestry requires two different revisions'))
573
if direction not in ('reverse', 'forward'):
574
raise ValueError(gettext('invalid direction %r') % direction)
575
br_revno, br_rev_id = branch.last_revision_info()
579
if (end_rev_id and start_rev_id == end_rev_id
580
and (not generate_merge_revisions
581
or not _has_merges(branch, end_rev_id))):
582
# If a single revision is requested, check we can handle it
583
return _generate_one_revision(branch, end_rev_id, br_rev_id,
585
if not generate_merge_revisions:
587
# If we only want to see linear revisions, we can iterate ...
588
iter_revs = _linear_view_revisions(
589
branch, start_rev_id, end_rev_id,
590
exclude_common_ancestry=exclude_common_ancestry)
591
# If a start limit was given and it's not obviously an
592
# ancestor of the end limit, check it before outputting anything
593
if (direction == 'forward'
594
or (start_rev_id and not _is_obvious_ancestor(
595
branch, start_rev_id, end_rev_id))):
596
iter_revs = list(iter_revs)
597
if direction == 'forward':
598
iter_revs = reversed(iter_revs)
600
except _StartNotLinearAncestor:
601
# Switch to the slower implementation that may be able to find a
602
# non-obvious ancestor out of the left-hand history.
604
iter_revs = _generate_all_revisions(branch, start_rev_id, end_rev_id,
605
direction, delayed_graph_generation,
606
exclude_common_ancestry)
607
if direction == 'forward':
608
iter_revs = _rebase_merge_depth(reverse_by_depth(list(iter_revs)))
612
def _generate_one_revision(branch, rev_id, br_rev_id, br_revno):
613
if rev_id == br_rev_id:
615
return [(br_rev_id, br_revno, 0)]
617
revno_str = _compute_revno_str(branch, rev_id)
618
return [(rev_id, revno_str, 0)]
621
def _generate_all_revisions(branch, start_rev_id, end_rev_id, direction,
622
delayed_graph_generation,
623
exclude_common_ancestry=False):
624
# On large trees, generating the merge graph can take 30-60 seconds
625
# so we delay doing it until a merge is detected, incrementally
626
# returning initial (non-merge) revisions while we can.
628
# The above is only true for old formats (<= 0.92), for newer formats, a
629
# couple of seconds only should be needed to load the whole graph and the
630
# other graph operations needed are even faster than that -- vila 100201
631
initial_revisions = []
632
if delayed_graph_generation:
634
for rev_id, revno, depth in _linear_view_revisions(
635
branch, start_rev_id, end_rev_id, exclude_common_ancestry):
636
if _has_merges(branch, rev_id):
637
# The end_rev_id can be nested down somewhere. We need an
638
# explicit ancestry check. There is an ambiguity here as we
639
# may not raise _StartNotLinearAncestor for a revision that
640
# is an ancestor but not a *linear* one. But since we have
641
# loaded the graph to do the check (or calculate a dotted
642
# revno), we may as well accept to show the log... We need
643
# the check only if start_rev_id is not None as all
644
# revisions have _mod_revision.NULL_REVISION as an ancestor
646
graph = branch.repository.get_graph()
647
if (start_rev_id is not None
648
and not graph.is_ancestor(start_rev_id, end_rev_id)):
649
raise _StartNotLinearAncestor()
650
# Since we collected the revisions so far, we need to
655
initial_revisions.append((rev_id, revno, depth))
657
# No merged revisions found
658
return initial_revisions
659
except _StartNotLinearAncestor:
660
# A merge was never detected so the lower revision limit can't
661
# be nested down somewhere
662
raise errors.BzrCommandError(gettext('Start revision not found in'
663
' history of end revision.'))
665
# We exit the loop above because we encounter a revision with merges, from
666
# this revision, we need to switch to _graph_view_revisions.
668
# A log including nested merges is required. If the direction is reverse,
669
# we rebase the initial merge depths so that the development line is
670
# shown naturally, i.e. just like it is for linear logging. We can easily
671
# make forward the exact opposite display, but showing the merge revisions
672
# indented at the end seems slightly nicer in that case.
673
view_revisions = chain(iter(initial_revisions),
674
_graph_view_revisions(branch, start_rev_id, end_rev_id,
675
rebase_initial_depths=(direction == 'reverse'),
676
exclude_common_ancestry=exclude_common_ancestry))
677
return view_revisions
680
def _has_merges(branch, rev_id):
681
"""Does a revision have multiple parents or not?"""
682
parents = branch.repository.get_parent_map([rev_id]).get(rev_id, [])
683
return len(parents) > 1
686
def _compute_revno_str(branch, rev_id):
687
"""Compute the revno string from a rev_id.
689
:return: The revno string, or None if the revision is not in the supplied
148
_show_log(branch, lf, specific_fileid, verbose, direction,
149
start_revision, end_revision, search)
153
def _show_log(branch,
155
specific_fileid=None,
161
"""Worker function for show_log - see show_log."""
162
from bzrlib.osutils import format_date
163
from bzrlib.errors import BzrCheckError
165
from warnings import warn
167
if not isinstance(lf, LogFormatter):
168
warn("not a LogFormatter instance: %r" % lf)
171
mutter('get log for file_id %r', specific_fileid)
173
if search is not None:
175
searchRE = re.compile(search, re.IGNORECASE)
179
which_revs = _enumerate_history(branch)
181
if start_revision is None:
184
branch.check_real_revno(start_revision)
186
if end_revision is None:
187
end_revision = len(which_revs)
189
branch.check_real_revno(end_revision)
191
# list indexes are 0-based; revisions are 1-based
192
cut_revs = which_revs[(start_revision-1):(end_revision)]
196
# convert the revision history to a dictionary:
197
rev_nos = dict((k, v) for v, k in cut_revs)
693
revno = branch.revision_id_to_dotted_revno(rev_id)
694
except errors.NoSuchRevision:
695
# The revision must be outside of this branch
698
return '.'.join(str(n) for n in revno)
701
def _is_obvious_ancestor(branch, start_rev_id, end_rev_id):
702
"""Is start_rev_id an obvious ancestor of end_rev_id?"""
703
if start_rev_id and end_rev_id:
705
start_dotted = branch.revision_id_to_dotted_revno(start_rev_id)
706
end_dotted = branch.revision_id_to_dotted_revno(end_rev_id)
707
except errors.NoSuchRevision:
708
# one or both is not in the branch; not obvious
710
if len(start_dotted) == 1 and len(end_dotted) == 1:
712
return start_dotted[0] <= end_dotted[0]
713
elif (len(start_dotted) == 3 and len(end_dotted) == 3 and
714
start_dotted[0:1] == end_dotted[0:1]):
715
# both on same development line
716
return start_dotted[2] <= end_dotted[2]
720
# if either start or end is not specified then we use either the first or
721
# the last revision and *they* are obvious ancestors.
725
def _linear_view_revisions(branch, start_rev_id, end_rev_id,
726
exclude_common_ancestry=False):
727
"""Calculate a sequence of revisions to view, newest to oldest.
729
:param start_rev_id: the lower revision-id
730
:param end_rev_id: the upper revision-id
731
:param exclude_common_ancestry: Whether the start_rev_id should be part of
732
the iterated revisions.
733
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
734
:raises _StartNotLinearAncestor: if a start_rev_id is specified but
735
is not found walking the left-hand history
737
br_revno, br_rev_id = branch.last_revision_info()
738
repo = branch.repository
739
graph = repo.get_graph()
740
if start_rev_id is None and end_rev_id is None:
742
for revision_id in graph.iter_lefthand_ancestry(br_rev_id,
743
(_mod_revision.NULL_REVISION,)):
744
yield revision_id, str(cur_revno), 0
747
if end_rev_id is None:
748
end_rev_id = br_rev_id
749
found_start = start_rev_id is None
750
for revision_id in graph.iter_lefthand_ancestry(end_rev_id,
751
(_mod_revision.NULL_REVISION,)):
752
revno_str = _compute_revno_str(branch, revision_id)
753
if not found_start and revision_id == start_rev_id:
754
if not exclude_common_ancestry:
755
yield revision_id, revno_str, 0
759
yield revision_id, revno_str, 0
762
raise _StartNotLinearAncestor()
765
def _graph_view_revisions(branch, start_rev_id, end_rev_id,
766
rebase_initial_depths=True,
767
exclude_common_ancestry=False):
768
"""Calculate revisions to view including merges, newest to oldest.
770
:param branch: the branch
771
:param start_rev_id: the lower revision-id
772
:param end_rev_id: the upper revision-id
773
:param rebase_initial_depth: should depths be rebased until a mainline
775
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
777
if exclude_common_ancestry:
778
stop_rule = 'with-merges-without-common-ancestry'
780
stop_rule = 'with-merges'
781
view_revisions = branch.iter_merge_sorted_revisions(
782
start_revision_id=end_rev_id, stop_revision_id=start_rev_id,
784
if not rebase_initial_depths:
785
for (rev_id, merge_depth, revno, end_of_merge
787
yield rev_id, '.'.join(map(str, revno)), merge_depth
789
# We're following a development line starting at a merged revision.
790
# We need to adjust depths down by the initial depth until we find
791
# a depth less than it. Then we use that depth as the adjustment.
792
# If and when we reach the mainline, depth adjustment ends.
793
depth_adjustment = None
794
for (rev_id, merge_depth, revno, end_of_merge
796
if depth_adjustment is None:
797
depth_adjustment = merge_depth
799
if merge_depth < depth_adjustment:
800
# From now on we reduce the depth adjustement, this can be
801
# surprising for users. The alternative requires two passes
802
# which breaks the fast display of the first revision
804
depth_adjustment = merge_depth
805
merge_depth -= depth_adjustment
806
yield rev_id, '.'.join(map(str, revno)), merge_depth
809
def _rebase_merge_depth(view_revisions):
810
"""Adjust depths upwards so the top level is 0."""
811
# If either the first or last revision have a merge_depth of 0, we're done
812
if view_revisions and view_revisions[0][2] and view_revisions[-1][2]:
813
min_depth = min([d for r,n,d in view_revisions])
815
view_revisions = [(r,n,d-min_depth) for r,n,d in view_revisions]
816
return view_revisions
819
def make_log_rev_iterator(branch, view_revisions, generate_delta, search,
820
file_ids=None, direction='reverse'):
821
"""Create a revision iterator for log.
823
:param branch: The branch being logged.
824
:param view_revisions: The revisions being viewed.
825
:param generate_delta: Whether to generate a delta for each revision.
826
Permitted values are None, 'full' and 'partial'.
827
:param search: A user text search string.
828
:param file_ids: If non empty, only revisions matching one or more of
829
the file-ids are to be kept.
830
:param direction: the direction in which view_revisions is sorted
831
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
834
# Convert view_revisions into (view, None, None) groups to fit with
835
# the standard interface here.
836
if type(view_revisions) == list:
837
# A single batch conversion is faster than many incremental ones.
838
# As we have all the data, do a batch conversion.
839
nones = [None] * len(view_revisions)
840
log_rev_iterator = iter([zip(view_revisions, nones, nones)])
843
for view in view_revisions:
844
yield (view, None, None)
845
log_rev_iterator = iter([_convert()])
846
for adapter in log_adapters:
847
# It would be nicer if log adapters were first class objects
848
# with custom parameters. This will do for now. IGC 20090127
849
if adapter == _make_delta_filter:
850
log_rev_iterator = adapter(branch, generate_delta,
851
search, log_rev_iterator, file_ids, direction)
853
log_rev_iterator = adapter(branch, generate_delta,
854
search, log_rev_iterator)
855
return log_rev_iterator
858
def _make_search_filter(branch, generate_delta, match, log_rev_iterator):
859
"""Create a filtered iterator of log_rev_iterator matching on a regex.
861
:param branch: The branch being logged.
862
:param generate_delta: Whether to generate a delta for each revision.
863
:param match: A dictionary with properties as keys and lists of strings
864
as values. To match, a revision may match any of the supplied strings
865
within a single property but must match at least one string for each
867
:param log_rev_iterator: An input iterator containing all revisions that
868
could be displayed, in lists.
869
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
873
return log_rev_iterator
874
searchRE = [(k, [re.compile(x, re.IGNORECASE) for x in v])
875
for (k,v) in match.iteritems()]
876
return _filter_re(searchRE, log_rev_iterator)
879
def _filter_re(searchRE, log_rev_iterator):
880
for revs in log_rev_iterator:
881
new_revs = [rev for rev in revs if _match_filter(searchRE, rev[1])]
885
def _match_filter(searchRE, rev):
887
'message': (rev.message,),
888
'committer': (rev.committer,),
889
'author': (rev.get_apparent_authors()),
890
'bugs': list(rev.iter_bugs())
892
strings[''] = [item for inner_list in strings.itervalues()
893
for item in inner_list]
894
for (k,v) in searchRE:
895
if k in strings and not _match_any_filter(strings[k], v):
899
def _match_any_filter(strings, res):
900
return any([filter(None, map(re.search, strings)) for re in res])
902
def _make_delta_filter(branch, generate_delta, search, log_rev_iterator,
903
fileids=None, direction='reverse'):
904
"""Add revision deltas to a log iterator if needed.
906
:param branch: The branch being logged.
907
:param generate_delta: Whether to generate a delta for each revision.
908
Permitted values are None, 'full' and 'partial'.
909
:param search: A user text search string.
910
:param log_rev_iterator: An input iterator containing all revisions that
911
could be displayed, in lists.
912
:param fileids: If non empty, only revisions matching one or more of
913
the file-ids are to be kept.
914
:param direction: the direction in which view_revisions is sorted
915
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
918
if not generate_delta and not fileids:
919
return log_rev_iterator
920
return _generate_deltas(branch.repository, log_rev_iterator,
921
generate_delta, fileids, direction)
924
def _generate_deltas(repository, log_rev_iterator, delta_type, fileids,
926
"""Create deltas for each batch of revisions in log_rev_iterator.
928
If we're only generating deltas for the sake of filtering against
929
file-ids, we stop generating deltas once all file-ids reach the
930
appropriate life-cycle point. If we're receiving data newest to
931
oldest, then that life-cycle point is 'add', otherwise it's 'remove'.
933
check_fileids = fileids is not None and len(fileids) > 0
935
fileid_set = set(fileids)
936
if direction == 'reverse':
942
for revs in log_rev_iterator:
943
# If we were matching against fileids and we've run out,
944
# there's nothing left to do
945
if check_fileids and not fileid_set:
947
revisions = [rev[1] for rev in revs]
949
if delta_type == 'full' and not check_fileids:
950
deltas = repository.get_deltas_for_revisions(revisions)
951
for rev, delta in izip(revs, deltas):
952
new_revs.append((rev[0], rev[1], delta))
954
deltas = repository.get_deltas_for_revisions(revisions, fileid_set)
955
for rev, delta in izip(revs, deltas):
957
if delta is None or not delta.has_changed():
960
_update_fileids(delta, fileid_set, stop_on)
961
if delta_type is None:
963
elif delta_type == 'full':
964
# If the file matches all the time, rebuilding
965
# a full delta like this in addition to a partial
966
# one could be slow. However, it's likely that
967
# most revisions won't get this far, making it
968
# faster to filter on the partial deltas and
969
# build the occasional full delta than always
970
# building full deltas and filtering those.
972
delta = repository.get_revision_delta(rev_id)
973
new_revs.append((rev[0], rev[1], delta))
977
def _update_fileids(delta, fileids, stop_on):
978
"""Update the set of file-ids to search based on file lifecycle events.
980
:param fileids: a set of fileids to update
981
:param stop_on: either 'add' or 'remove' - take file-ids out of the
982
fileids set once their add or remove entry is detected respectively
985
for item in delta.added:
986
if item[1] in fileids:
987
fileids.remove(item[1])
988
elif stop_on == 'delete':
989
for item in delta.removed:
990
if item[1] in fileids:
991
fileids.remove(item[1])
994
def _make_revision_objects(branch, generate_delta, search, log_rev_iterator):
995
"""Extract revision objects from the repository
997
:param branch: The branch being logged.
998
:param generate_delta: Whether to generate a delta for each revision.
999
:param search: A user text search string.
1000
:param log_rev_iterator: An input iterator containing all revisions that
1001
could be displayed, in lists.
1002
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
1005
repository = branch.repository
1006
for revs in log_rev_iterator:
1007
# r = revision_id, n = revno, d = merge depth
1008
revision_ids = [view[0] for view, _, _ in revs]
1009
revisions = repository.get_revisions(revision_ids)
1010
revs = [(rev[0], revision, rev[2]) for rev, revision in
1011
izip(revs, revisions)]
1015
def _make_batch_filter(branch, generate_delta, search, log_rev_iterator):
1016
"""Group up a single large batch into smaller ones.
1018
:param branch: The branch being logged.
1019
:param generate_delta: Whether to generate a delta for each revision.
1020
:param search: A user text search string.
1021
:param log_rev_iterator: An input iterator containing all revisions that
1022
could be displayed, in lists.
1023
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
1027
for batch in log_rev_iterator:
1030
step = [detail for _, detail in zip(range(num), batch)]
1034
num = min(int(num * 1.5), 200)
1037
def _get_revision_limits(branch, start_revision, end_revision):
1038
"""Get and check revision limits.
1040
:param branch: The branch containing the revisions.
1042
:param start_revision: The first revision to be logged.
1043
For backwards compatibility this may be a mainline integer revno,
1044
but for merge revision support a RevisionInfo is expected.
1046
:param end_revision: The last revision to be logged.
1047
For backwards compatibility this may be a mainline integer revno,
1048
but for merge revision support a RevisionInfo is expected.
1050
:return: (start_rev_id, end_rev_id) tuple.
1052
branch_revno, branch_rev_id = branch.last_revision_info()
1054
if start_revision is None:
1057
if isinstance(start_revision, revisionspec.RevisionInfo):
1058
start_rev_id = start_revision.rev_id
1059
start_revno = start_revision.revno or 1
1061
branch.check_real_revno(start_revision)
1062
start_revno = start_revision
1063
start_rev_id = branch.get_rev_id(start_revno)
1066
if end_revision is None:
1067
end_revno = branch_revno
1069
if isinstance(end_revision, revisionspec.RevisionInfo):
1070
end_rev_id = end_revision.rev_id
1071
end_revno = end_revision.revno or branch_revno
1073
branch.check_real_revno(end_revision)
1074
end_revno = end_revision
1075
end_rev_id = branch.get_rev_id(end_revno)
1077
if branch_revno != 0:
1078
if (start_rev_id == _mod_revision.NULL_REVISION
1079
or end_rev_id == _mod_revision.NULL_REVISION):
1080
raise errors.BzrCommandError(gettext('Logging revision 0 is invalid.'))
1081
if start_revno > end_revno:
1082
raise errors.BzrCommandError(gettext("Start revision must be "
1083
"older than the end revision."))
1084
return (start_rev_id, end_rev_id)
1087
def _get_mainline_revs(branch, start_revision, end_revision):
1088
"""Get the mainline revisions from the branch.
1090
Generates the list of mainline revisions for the branch.
1092
:param branch: The branch containing the revisions.
1094
:param start_revision: The first revision to be logged.
1095
For backwards compatibility this may be a mainline integer revno,
1096
but for merge revision support a RevisionInfo is expected.
1098
:param end_revision: The last revision to be logged.
1099
For backwards compatibility this may be a mainline integer revno,
1100
but for merge revision support a RevisionInfo is expected.
1102
:return: A (mainline_revs, rev_nos, start_rev_id, end_rev_id) tuple.
1104
branch_revno, branch_last_revision = branch.last_revision_info()
1105
if branch_revno == 0:
1106
return None, None, None, None
1108
# For mainline generation, map start_revision and end_revision to
1109
# mainline revnos. If the revision is not on the mainline choose the
1110
# appropriate extreme of the mainline instead - the extra will be
1112
# Also map the revisions to rev_ids, to be used in the later filtering
1115
if start_revision is None:
1118
if isinstance(start_revision, revisionspec.RevisionInfo):
1119
start_rev_id = start_revision.rev_id
1120
start_revno = start_revision.revno or 1
1122
branch.check_real_revno(start_revision)
1123
start_revno = start_revision
1126
if end_revision is None:
1127
end_revno = branch_revno
1129
if isinstance(end_revision, revisionspec.RevisionInfo):
1130
end_rev_id = end_revision.rev_id
1131
end_revno = end_revision.revno or branch_revno
1133
branch.check_real_revno(end_revision)
1134
end_revno = end_revision
1136
if ((start_rev_id == _mod_revision.NULL_REVISION)
1137
or (end_rev_id == _mod_revision.NULL_REVISION)):
1138
raise errors.BzrCommandError(gettext('Logging revision 0 is invalid.'))
1139
if start_revno > end_revno:
1140
raise errors.BzrCommandError(gettext("Start revision must be older "
1141
"than the end revision."))
1143
if end_revno < start_revno:
1144
return None, None, None, None
1145
cur_revno = branch_revno
1148
graph = branch.repository.get_graph()
1149
for revision_id in graph.iter_lefthand_ancestry(
1150
branch_last_revision, (_mod_revision.NULL_REVISION,)):
1151
if cur_revno < start_revno:
1152
# We have gone far enough, but we always add 1 more revision
1153
rev_nos[revision_id] = cur_revno
1154
mainline_revs.append(revision_id)
1156
if cur_revno <= end_revno:
1157
rev_nos[revision_id] = cur_revno
1158
mainline_revs.append(revision_id)
1161
# We walked off the edge of all revisions, so we add a 'None' marker
1162
mainline_revs.append(None)
1164
mainline_revs.reverse()
199
1166
# override the mainline to look like the revision history.
200
mainline_revs = [revision_id for index, revision_id in cut_revs]
201
if cut_revs[0][0] == 1:
202
mainline_revs.insert(0, None)
204
mainline_revs.insert(0, which_revs[start_revision-2][1])
205
if getattr(lf, 'show_merge', None) is not None:
206
include_merges = True
208
include_merges = False
209
view_revisions = list(get_view_revisions(mainline_revs, rev_nos, branch,
210
direction, include_merges=include_merges))
212
def iter_revisions():
213
# r = revision, n = revno, d = merge depth
214
revision_ids = [r for r, n, d in view_revisions]
215
zeros = set(r for r, n, d in view_revisions if d == 0)
217
repository = branch.repository
220
revisions = repository.get_revisions(revision_ids[:num])
221
if verbose or specific_fileid:
222
delta_revisions = [r for r in revisions if
223
r.revision_id in zeros]
224
deltas = repository.get_deltas_for_revisions(delta_revisions)
225
cur_deltas = dict(izip((r.revision_id for r in
226
delta_revisions), deltas))
227
for revision in revisions:
228
# The delta value will be None unless
229
# 1. verbose or specific_fileid is specified, and
230
# 2. the revision is a mainline revision
231
yield revision, cur_deltas.get(revision.revision_id)
232
revision_ids = revision_ids[num:]
235
# now we just print all the revisions
236
for ((rev_id, revno, merge_depth), (rev, delta)) in \
237
izip(view_revisions, iter_revisions()):
240
if not searchRE.search(rev.message):
244
# a mainline revision.
247
if not delta.touches_file_id(specific_fileid):
251
# although we calculated it, throw it away without display
254
lf.show(revno, rev, delta)
1167
return mainline_revs, rev_nos, start_rev_id, end_rev_id
1170
def _filter_revisions_touching_file_id(branch, file_id, view_revisions,
1171
include_merges=True):
1172
r"""Return the list of revision ids which touch a given file id.
1174
The function filters view_revisions and returns a subset.
1175
This includes the revisions which directly change the file id,
1176
and the revisions which merge these changes. So if the
1189
And 'C' changes a file, then both C and D will be returned. F will not be
1190
returned even though it brings the changes to C into the branch starting
1191
with E. (Note that if we were using F as the tip instead of G, then we
1194
This will also be restricted based on a subset of the mainline.
1196
:param branch: The branch where we can get text revision information.
1198
:param file_id: Filter out revisions that do not touch file_id.
1200
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
1201
tuples. This is the list of revisions which will be filtered. It is
1202
assumed that view_revisions is in merge_sort order (i.e. newest
1205
:param include_merges: include merge revisions in the result or not
1207
:return: A list of (revision_id, dotted_revno, merge_depth) tuples.
1209
# Lookup all possible text keys to determine which ones actually modified
1211
graph = branch.repository.get_file_graph()
1212
get_parent_map = graph.get_parent_map
1213
text_keys = [(file_id, rev_id) for rev_id, revno, depth in view_revisions]
1215
# Looking up keys in batches of 1000 can cut the time in half, as well as
1216
# memory consumption. GraphIndex *does* like to look for a few keys in
1217
# parallel, it just doesn't like looking for *lots* of keys in parallel.
1218
# TODO: This code needs to be re-evaluated periodically as we tune the
1219
# indexing layer. We might consider passing in hints as to the known
1220
# access pattern (sparse/clustered, high success rate/low success
1221
# rate). This particular access is clustered with a low success rate.
1222
modified_text_revisions = set()
1224
for start in xrange(0, len(text_keys), chunk_size):
1225
next_keys = text_keys[start:start + chunk_size]
1226
# Only keep the revision_id portion of the key
1227
modified_text_revisions.update(
1228
[k[1] for k in get_parent_map(next_keys)])
1229
del text_keys, next_keys
1232
# Track what revisions will merge the current revision, replace entries
1233
# with 'None' when they have been added to result
1234
current_merge_stack = [None]
1235
for info in view_revisions:
1236
rev_id, revno, depth = info
1237
if depth == len(current_merge_stack):
1238
current_merge_stack.append(info)
256
lf.show_merge(rev, merge_depth)
259
def get_view_revisions(mainline_revs, rev_nos, branch, direction,
260
include_merges=True):
261
"""Produce an iterator of revisions to show
262
:return: an iterator of (revision_id, revno, merge_depth)
263
(if there is no revno for a revision, None is supplied)
265
if include_merges is False:
266
revision_ids = mainline_revs[1:]
267
if direction == 'reverse':
268
revision_ids.reverse()
269
for revision_id in revision_ids:
270
yield revision_id, rev_nos[revision_id], 0
272
merge_sorted_revisions = merge_sort(
273
branch.repository.get_revision_graph(mainline_revs[-1]),
277
if direction == 'forward':
278
# forward means oldest first.
279
merge_sorted_revisions = reverse_by_depth(merge_sorted_revisions)
280
elif direction != 'reverse':
281
raise ValueError('invalid direction %r' % direction)
283
revision_history = branch.revision_history()
285
for sequence, rev_id, merge_depth, end_of_merge in merge_sorted_revisions:
286
yield rev_id, rev_nos.get(rev_id), merge_depth
1240
del current_merge_stack[depth + 1:]
1241
current_merge_stack[-1] = info
1243
if rev_id in modified_text_revisions:
1244
# This needs to be logged, along with the extra revisions
1245
for idx in xrange(len(current_merge_stack)):
1246
node = current_merge_stack[idx]
1247
if node is not None:
1248
if include_merges or node[2] == 0:
1250
current_merge_stack[idx] = None
289
1254
def reverse_by_depth(merge_sorted_revisions, _depth=0):
293
1258
revision of that depth. There may be no topological justification for this,
294
1259
but it looks much nicer.
1261
# Add a fake revision at start so that we can always attach sub revisions
1262
merge_sorted_revisions = [(None, None, _depth)] + merge_sorted_revisions
296
1263
zd_revisions = []
297
1264
for val in merge_sorted_revisions:
298
1265
if val[2] == _depth:
1266
# Each revision at the current depth becomes a chunk grouping all
1267
# higher depth revisions.
299
1268
zd_revisions.append([val])
301
assert val[2] > _depth
302
1270
zd_revisions[-1].append(val)
303
1271
for revisions in zd_revisions:
304
1272
if len(revisions) > 1:
1273
# We have higher depth revisions, let reverse them locally
305
1274
revisions[1:] = reverse_by_depth(revisions[1:], _depth + 1)
306
1275
zd_revisions.reverse()
308
1277
for chunk in zd_revisions:
309
1278
result.extend(chunk)
1280
# Top level call, get rid of the fake revisions that have been added
1281
result = [r for r in result if r[0] is not None and r[1] is not None]
1285
class LogRevision(object):
1286
"""A revision to be logged (by LogFormatter.log_revision).
1288
A simple wrapper for the attributes of a revision to be logged.
1289
The attributes may or may not be populated, as determined by the
1290
logging options and the log formatter capabilities.
1293
def __init__(self, rev=None, revno=None, merge_depth=0, delta=None,
1294
tags=None, diff=None, signature=None):
1299
self.revno = str(revno)
1300
self.merge_depth = merge_depth
1304
self.signature = signature
313
1307
class LogFormatter(object):
314
"""Abstract class to display log messages."""
316
def __init__(self, to_file, show_ids=False, show_timezone='original'):
1308
"""Abstract class to display log messages.
1310
At a minimum, a derived class must implement the log_revision method.
1312
If the LogFormatter needs to be informed of the beginning or end of
1313
a log it should implement the begin_log and/or end_log hook methods.
1315
A LogFormatter should define the following supports_XXX flags
1316
to indicate which LogRevision attributes it supports:
1318
- supports_delta must be True if this log formatter supports delta.
1319
Otherwise the delta attribute may not be populated. The 'delta_format'
1320
attribute describes whether the 'short_status' format (1) or the long
1321
one (2) should be used.
1323
- supports_merge_revisions must be True if this log formatter supports
1324
merge revisions. If not, then only mainline revisions will be passed
1327
- preferred_levels is the number of levels this formatter defaults to.
1328
The default value is zero meaning display all levels.
1329
This value is only relevant if supports_merge_revisions is True.
1331
- supports_tags must be True if this log formatter supports tags.
1332
Otherwise the tags attribute may not be populated.
1334
- supports_diff must be True if this log formatter supports diffs.
1335
Otherwise the diff attribute may not be populated.
1337
- supports_signatures must be True if this log formatter supports GPG
1340
Plugins can register functions to show custom revision properties using
1341
the properties_handler_registry. The registered function
1342
must respect the following interface description::
1344
def my_show_properties(properties_dict):
1345
# code that returns a dict {'name':'value'} of the properties
1348
preferred_levels = 0
1350
def __init__(self, to_file, show_ids=False, show_timezone='original',
1351
delta_format=None, levels=None, show_advice=False,
1352
to_exact_file=None, author_list_handler=None):
1353
"""Create a LogFormatter.
1355
:param to_file: the file to output to
1356
:param to_exact_file: if set, gives an output stream to which
1357
non-Unicode diffs are written.
1358
:param show_ids: if True, revision-ids are to be displayed
1359
:param show_timezone: the timezone to use
1360
:param delta_format: the level of delta information to display
1361
or None to leave it to the formatter to decide
1362
:param levels: the number of levels to display; None or -1 to
1363
let the log formatter decide.
1364
:param show_advice: whether to show advice at the end of the
1366
:param author_list_handler: callable generating a list of
1367
authors to display for a given revision
317
1369
self.to_file = to_file
1370
# 'exact' stream used to show diff, it should print content 'as is'
1371
# and should not try to decode/encode it to unicode to avoid bug #328007
1372
if to_exact_file is not None:
1373
self.to_exact_file = to_exact_file
1375
# XXX: somewhat hacky; this assumes it's a codec writer; it's better
1376
# for code that expects to get diffs to pass in the exact file
1378
self.to_exact_file = getattr(to_file, 'stream', to_file)
318
1379
self.show_ids = show_ids
319
1380
self.show_timezone = show_timezone
321
def show(self, revno, rev, delta):
1381
if delta_format is None:
1382
# Ensures backward compatibility
1383
delta_format = 2 # long format
1384
self.delta_format = delta_format
1385
self.levels = levels
1386
self._show_advice = show_advice
1387
self._merge_count = 0
1388
self._author_list_handler = author_list_handler
1390
def get_levels(self):
1391
"""Get the number of levels to display or 0 for all."""
1392
if getattr(self, 'supports_merge_revisions', False):
1393
if self.levels is None or self.levels == -1:
1394
self.levels = self.preferred_levels
1399
def log_revision(self, revision):
1402
:param revision: The LogRevision to be logged.
322
1404
raise NotImplementedError('not implemented in abstract base')
1406
def show_advice(self):
1407
"""Output user advice, if any, when the log is completed."""
1408
if self._show_advice and self.levels == 1 and self._merge_count > 0:
1409
advice_sep = self.get_advice_separator()
1411
self.to_file.write(advice_sep)
1413
"Use --include-merged or -n0 to see merged revisions.\n")
1415
def get_advice_separator(self):
1416
"""Get the text separating the log from the closing advice."""
324
1419
def short_committer(self, rev):
325
return re.sub('<.*@.*>', '', rev.committer).strip(' ')
1420
name, address = config.parse_username(rev.committer)
1425
def short_author(self, rev):
1426
return self.authors(rev, 'first', short=True, sep=', ')
1428
def authors(self, rev, who, short=False, sep=None):
1429
"""Generate list of authors, taking --authors option into account.
1431
The caller has to specify the name of a author list handler,
1432
as provided by the author list registry, using the ``who``
1433
argument. That name only sets a default, though: when the
1434
user selected a different author list generation using the
1435
``--authors`` command line switch, as represented by the
1436
``author_list_handler`` constructor argument, that value takes
1439
:param rev: The revision for which to generate the list of authors.
1440
:param who: Name of the default handler.
1441
:param short: Whether to shorten names to either name or address.
1442
:param sep: What separator to use for automatic concatenation.
1444
if self._author_list_handler is not None:
1445
# The user did specify --authors, which overrides the default
1446
author_list_handler = self._author_list_handler
1448
# The user didn't specify --authors, so we use the caller's default
1449
author_list_handler = author_list_registry.get(who)
1450
names = author_list_handler(rev)
1452
for i in range(len(names)):
1453
name, address = config.parse_username(names[i])
1459
names = sep.join(names)
1462
def merge_marker(self, revision):
1463
"""Get the merge marker to include in the output or '' if none."""
1464
if len(revision.rev.parent_ids) > 1:
1465
self._merge_count += 1
1470
def show_properties(self, revision, indent):
1471
"""Displays the custom properties returned by each registered handler.
1473
If a registered handler raises an error it is propagated.
1475
for line in self.custom_properties(revision):
1476
self.to_file.write("%s%s\n" % (indent, line))
1478
def custom_properties(self, revision):
1479
"""Format the custom properties returned by each registered handler.
1481
If a registered handler raises an error it is propagated.
1483
:return: a list of formatted lines (excluding trailing newlines)
1485
lines = self._foreign_info_properties(revision)
1486
for key, handler in properties_handler_registry.iteritems():
1487
lines.extend(self._format_properties(handler(revision)))
1490
def _foreign_info_properties(self, rev):
1491
"""Custom log displayer for foreign revision identifiers.
1493
:param rev: Revision object.
1495
# Revision comes directly from a foreign repository
1496
if isinstance(rev, foreign.ForeignRevision):
1497
return self._format_properties(
1498
rev.mapping.vcs.show_foreign_revid(rev.foreign_revid))
1500
# Imported foreign revision revision ids always contain :
1501
if not ":" in rev.revision_id:
1504
# Revision was once imported from a foreign repository
1506
foreign_revid, mapping = \
1507
foreign.foreign_vcs_registry.parse_revision_id(rev.revision_id)
1508
except errors.InvalidRevisionId:
1511
return self._format_properties(
1512
mapping.vcs.show_foreign_revid(foreign_revid))
1514
def _format_properties(self, properties):
1516
for key, value in properties.items():
1517
lines.append(key + ': ' + value)
1520
def show_diff(self, to_file, diff, indent):
1521
for l in diff.rstrip().split('\n'):
1522
to_file.write(indent + '%s\n' % (l,))
1525
# Separator between revisions in long format
1526
_LONG_SEP = '-' * 60
328
1529
class LongLogFormatter(LogFormatter):
329
def show(self, revno, rev, delta):
330
return self._show_helper(revno=revno, rev=rev, delta=delta)
332
def show_merge(self, rev, merge_depth):
333
return self._show_helper(rev=rev, indent=' '*merge_depth, merged=True, delta=None)
335
def _show_helper(self, rev=None, revno=None, indent='', merged=False, delta=None):
336
"""Show a revision, either merged or not."""
337
from bzrlib.osutils import format_date
338
to_file = self.to_file
339
print >>to_file, indent+'-' * 60
340
if revno is not None:
341
print >>to_file, 'revno:', revno
343
print >>to_file, indent+'merged:', rev.revision_id
345
print >>to_file, indent+'revision-id:', rev.revision_id
1531
supports_merge_revisions = True
1532
preferred_levels = 1
1533
supports_delta = True
1534
supports_tags = True
1535
supports_diff = True
1536
supports_signatures = True
1538
def __init__(self, *args, **kwargs):
1539
super(LongLogFormatter, self).__init__(*args, **kwargs)
1540
if self.show_timezone == 'original':
1541
self.date_string = self._date_string_original_timezone
1543
self.date_string = self._date_string_with_timezone
1545
def _date_string_with_timezone(self, rev):
1546
return format_date(rev.timestamp, rev.timezone or 0,
1549
def _date_string_original_timezone(self, rev):
1550
return format_date_with_offset_in_original_timezone(rev.timestamp,
1553
def log_revision(self, revision):
1554
"""Log a revision, either merged or not."""
1555
indent = ' ' * revision.merge_depth
1557
if revision.revno is not None:
1558
lines.append('revno: %s%s' % (revision.revno,
1559
self.merge_marker(revision)))
1561
lines.append('tags: %s' % (', '.join(revision.tags)))
1562
if self.show_ids or revision.revno is None:
1563
lines.append('revision-id: %s' % (revision.rev.revision_id,))
346
1564
if self.show_ids:
347
for parent_id in rev.parent_ids:
348
print >>to_file, indent+'parent:', parent_id
349
print >>to_file, indent+'committer:', rev.committer
351
print >>to_file, indent+'branch nick: %s' % \
352
rev.properties['branch-nick']
355
date_str = format_date(rev.timestamp,
358
print >>to_file, indent+'timestamp: %s' % date_str
360
print >>to_file, indent+'message:'
362
print >>to_file, indent+' (no message)'
1565
for parent_id in revision.rev.parent_ids:
1566
lines.append('parent: %s' % (parent_id,))
1567
lines.extend(self.custom_properties(revision.rev))
1569
committer = revision.rev.committer
1570
authors = self.authors(revision.rev, 'all')
1571
if authors != [committer]:
1572
lines.append('author: %s' % (", ".join(authors),))
1573
lines.append('committer: %s' % (committer,))
1575
branch_nick = revision.rev.properties.get('branch-nick', None)
1576
if branch_nick is not None:
1577
lines.append('branch nick: %s' % (branch_nick,))
1579
lines.append('timestamp: %s' % (self.date_string(revision.rev),))
1581
if revision.signature is not None:
1582
lines.append('signature: ' + revision.signature)
1584
lines.append('message:')
1585
if not revision.rev.message:
1586
lines.append(' (no message)')
364
message = rev.message.rstrip('\r\n')
1588
message = revision.rev.message.rstrip('\r\n')
365
1589
for l in message.split('\n'):
366
print >>to_file, indent+' ' + l
368
delta.show(to_file, self.show_ids)
1590
lines.append(' %s' % (l,))
1592
# Dump the output, appending the delta and diff if requested
1593
to_file = self.to_file
1594
to_file.write("%s%s\n" % (indent, ('\n' + indent).join(lines)))
1595
if revision.delta is not None:
1596
# Use the standard status output to display changes
1597
from bzrlib.delta import report_delta
1598
report_delta(to_file, revision.delta, short_status=False,
1599
show_ids=self.show_ids, indent=indent)
1600
if revision.diff is not None:
1601
to_file.write(indent + 'diff:\n')
1603
# Note: we explicitly don't indent the diff (relative to the
1604
# revision information) so that the output can be fed to patch -p0
1605
self.show_diff(self.to_exact_file, revision.diff, indent)
1606
self.to_exact_file.flush()
1608
def get_advice_separator(self):
1609
"""Get the text separating the log from the closing advice."""
1610
return '-' * 60 + '\n'
371
1613
class ShortLogFormatter(LogFormatter):
372
def show(self, revno, rev, delta):
373
from bzrlib.osutils import format_date
1615
supports_merge_revisions = True
1616
preferred_levels = 1
1617
supports_delta = True
1618
supports_tags = True
1619
supports_diff = True
1621
def __init__(self, *args, **kwargs):
1622
super(ShortLogFormatter, self).__init__(*args, **kwargs)
1623
self.revno_width_by_depth = {}
1625
def log_revision(self, revision):
1626
# We need two indents: one per depth and one for the information
1627
# relative to that indent. Most mainline revnos are 5 chars or
1628
# less while dotted revnos are typically 11 chars or less. Once
1629
# calculated, we need to remember the offset for a given depth
1630
# as we might be starting from a dotted revno in the first column
1631
# and we want subsequent mainline revisions to line up.
1632
depth = revision.merge_depth
1633
indent = ' ' * depth
1634
revno_width = self.revno_width_by_depth.get(depth)
1635
if revno_width is None:
1636
if revision.revno is None or revision.revno.find('.') == -1:
1637
# mainline revno, e.g. 12345
1640
# dotted revno, e.g. 12345.10.55
1642
self.revno_width_by_depth[depth] = revno_width
1643
offset = ' ' * (revno_width + 1)
375
1645
to_file = self.to_file
376
date_str = format_date(rev.timestamp, rev.timezone or 0,
378
print >>to_file, "%5d %s\t%s" % (revno, self.short_committer(rev),
379
format_date(rev.timestamp, rev.timezone or 0,
1648
tags = ' {%s}' % (', '.join(revision.tags))
1649
to_file.write(indent + "%*s %s\t%s%s%s\n" % (revno_width,
1650
revision.revno or "", self.short_author(revision.rev),
1651
format_date(revision.rev.timestamp,
1652
revision.rev.timezone or 0,
380
1653
self.show_timezone, date_fmt="%Y-%m-%d",
383
print >>to_file, ' revision-id:', rev.revision_id
385
print >>to_file, ' (no message)'
1655
tags, self.merge_marker(revision)))
1656
self.show_properties(revision.rev, indent+offset)
1657
if self.show_ids or revision.revno is None:
1658
to_file.write(indent + offset + 'revision-id:%s\n'
1659
% (revision.rev.revision_id,))
1660
if not revision.rev.message:
1661
to_file.write(indent + offset + '(no message)\n')
387
message = rev.message.rstrip('\r\n')
1663
message = revision.rev.message.rstrip('\r\n')
388
1664
for l in message.split('\n'):
389
print >>to_file, ' ' + l
1665
to_file.write(indent + offset + '%s\n' % (l,))
391
# TODO: Why not show the modified files in a shorter form as
392
# well? rewrap them single lines of appropriate length
394
delta.show(to_file, self.show_ids)
1667
if revision.delta is not None:
1668
# Use the standard status output to display changes
1669
from bzrlib.delta import report_delta
1670
report_delta(to_file, revision.delta,
1671
short_status=self.delta_format==1,
1672
show_ids=self.show_ids, indent=indent + offset)
1673
if revision.diff is not None:
1674
self.show_diff(self.to_exact_file, revision.diff, ' ')
398
1678
class LineLogFormatter(LogFormatter):
1680
supports_merge_revisions = True
1681
preferred_levels = 1
1682
supports_tags = True
1684
def __init__(self, *args, **kwargs):
1685
super(LineLogFormatter, self).__init__(*args, **kwargs)
1686
width = terminal_width()
1687
if width is not None:
1688
# we need one extra space for terminals that wrap on last char
1690
self._max_chars = width
399
1692
def truncate(self, str, max_len):
400
if len(str) <= max_len:
1693
if max_len is None or len(str) <= max_len:
402
return str[:max_len-3]+'...'
1695
return str[:max_len-3] + '...'
404
1697
def date_string(self, rev):
405
from bzrlib.osutils import format_date
406
return format_date(rev.timestamp, rev.timezone or 0,
1698
return format_date(rev.timestamp, rev.timezone or 0,
407
1699
self.show_timezone, date_fmt="%Y-%m-%d",
408
1700
show_offset=False)