1
# Copyright (C) 2005-2011 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
"""Code to show logs of changes.
21
Various flavors of log can be produced:
23
* for one file, or the whole tree, and (not done yet) for
24
files in a given directory
26
* in "verbose" mode with a description of what changed from one
29
* with file-ids and revision-ids shown
31
Logs are actually written out through an abstract LogFormatter
32
interface, which allows for different preferred formats. Plugins can
35
Logs can be produced in either forward (oldest->newest) or reverse
36
(newest->oldest) order.
38
Logs can be filtered to show only revisions matching a particular
39
search string, or within a particular range of revisions. The range
40
can be given as date/times, which are reduced to revisions before
43
In verbose mode we show a summary of what changed in each particular
44
revision. Note that this is the delta for changes in that revision
45
relative to its left-most parent, not the delta relative to the last
46
logged revision. So for example if you ask for a verbose log of
47
changes touching hello.c you will get a list of those revisions also
48
listing other things that were changed in the same revision, but not
49
all the changes since the previous revision that touched hello.c.
53
from cStringIO import StringIO
54
from itertools import (
60
from warnings import (
64
from bzrlib.lazy_import import lazy_import
65
lazy_import(globals(), """
73
repository as _mod_repository,
74
revision as _mod_revision,
78
from bzrlib.i18n import gettext, ngettext
85
from bzrlib.osutils import (
87
format_date_with_offset_in_original_timezone,
88
get_diff_header_encoding,
89
get_terminal_encoding,
94
def find_touching_revisions(branch, file_id):
95
"""Yield a description of revisions which affect the file_id.
97
Each returned element is (revno, revision_id, description)
99
This is the list of revisions where the file is either added,
100
modified, renamed or deleted.
102
TODO: Perhaps some way to limit this to only particular revisions,
103
or to traverse a non-mainline set of revisions?
108
graph = branch.repository.get_graph()
109
history = list(graph.iter_lefthand_ancestry(branch.last_revision(),
110
[_mod_revision.NULL_REVISION]))
111
for revision_id in reversed(history):
112
this_inv = branch.repository.get_inventory(revision_id)
113
if this_inv.has_id(file_id):
114
this_ie = this_inv[file_id]
115
this_path = this_inv.id2path(file_id)
117
this_ie = this_path = None
119
# now we know how it was last time, and how it is in this revision.
120
# are those two states effectively the same or not?
122
if not this_ie and not last_ie:
123
# not present in either
125
elif this_ie and not last_ie:
126
yield revno, revision_id, "added " + this_path
127
elif not this_ie and last_ie:
129
yield revno, revision_id, "deleted " + last_path
130
elif this_path != last_path:
131
yield revno, revision_id, ("renamed %s => %s" % (last_path, this_path))
132
elif (this_ie.text_size != last_ie.text_size
133
or this_ie.text_sha1 != last_ie.text_sha1):
134
yield revno, revision_id, "modified " + this_path
137
last_path = this_path
141
def _enumerate_history(branch):
144
for rev_id in branch.revision_history():
145
rh.append((revno, rev_id))
152
specific_fileid=None,
161
"""Write out human-readable log of commits to this branch.
163
This function is being retained for backwards compatibility but
164
should not be extended with new parameters. Use the new Logger class
165
instead, eg. Logger(branch, rqst).show(lf), adding parameters to the
166
make_log_request_dict function.
168
:param lf: The LogFormatter object showing the output.
170
:param specific_fileid: If not None, list only the commits affecting the
171
specified file, rather than all commits.
173
:param verbose: If True show added/changed/deleted/renamed files.
175
:param direction: 'reverse' (default) is latest to earliest; 'forward' is
178
:param start_revision: If not None, only show revisions >= start_revision
180
:param end_revision: If not None, only show revisions <= end_revision
182
:param search: If not None, only show revisions with matching commit
185
:param limit: If set, shows only 'limit' revisions, all revisions are shown
188
:param show_diff: If True, output a diff after each revision.
190
:param match: Dictionary of search lists to use when matching revision
193
# Convert old-style parameters to new-style parameters
194
if specific_fileid is not None:
195
file_ids = [specific_fileid]
200
delta_type = 'partial'
207
diff_type = 'partial'
213
# Build the request and execute it
214
rqst = make_log_request_dict(direction=direction, specific_fileids=file_ids,
215
start_revision=start_revision, end_revision=end_revision,
216
limit=limit, message_search=search,
217
delta_type=delta_type, diff_type=diff_type)
218
Logger(branch, rqst).show(lf)
221
# Note: This needs to be kept in sync with the defaults in
222
# make_log_request_dict() below
223
_DEFAULT_REQUEST_PARAMS = {
224
'direction': 'reverse',
226
'generate_tags': True,
227
'exclude_common_ancestry': False,
228
'_match_using_deltas': True,
232
def make_log_request_dict(direction='reverse', specific_fileids=None,
233
start_revision=None, end_revision=None, limit=None,
234
message_search=None, levels=None, generate_tags=True,
236
diff_type=None, _match_using_deltas=True,
237
exclude_common_ancestry=False, match=None,
238
signature=False, omit_merges=False,
240
"""Convenience function for making a logging request dictionary.
242
Using this function may make code slightly safer by ensuring
243
parameters have the correct names. It also provides a reference
244
point for documenting the supported parameters.
246
:param direction: 'reverse' (default) is latest to earliest;
247
'forward' is earliest to latest.
249
:param specific_fileids: If not None, only include revisions
250
affecting the specified files, rather than all revisions.
252
:param start_revision: If not None, only generate
253
revisions >= start_revision
255
:param end_revision: If not None, only generate
256
revisions <= end_revision
258
:param limit: If set, generate only 'limit' revisions, all revisions
259
are shown if None or 0.
261
:param message_search: If not None, only include revisions with
262
matching commit messages
264
:param levels: the number of levels of revisions to
265
generate; 1 for just the mainline; 0 for all levels, or None for
268
:param generate_tags: If True, include tags for matched revisions.
270
:param delta_type: Either 'full', 'partial' or None.
271
'full' means generate the complete delta - adds/deletes/modifies/etc;
272
'partial' means filter the delta using specific_fileids;
273
None means do not generate any delta.
275
:param diff_type: Either 'full', 'partial' or None.
276
'full' means generate the complete diff - adds/deletes/modifies/etc;
277
'partial' means filter the diff using specific_fileids;
278
None means do not generate any diff.
280
:param _match_using_deltas: a private parameter controlling the
281
algorithm used for matching specific_fileids. This parameter
282
may be removed in the future so bzrlib client code should NOT
285
:param exclude_common_ancestry: Whether -rX..Y should be interpreted as a
286
range operator or as a graph difference.
288
:param signature: show digital signature information
290
:param match: Dictionary of list of search strings to use when filtering
291
revisions. Keys can be 'message', 'author', 'committer', 'bugs' or
292
the empty string to match any of the preceding properties.
294
:param omit_merges: If True, commits with more than one parent are
298
# Take care of old style message_search parameter
301
if 'message' in match:
302
match['message'].append(message_search)
304
match['message'] = [message_search]
306
match={ 'message': [message_search] }
308
'direction': direction,
309
'specific_fileids': specific_fileids,
310
'start_revision': start_revision,
311
'end_revision': end_revision,
314
'generate_tags': generate_tags,
315
'delta_type': delta_type,
316
'diff_type': diff_type,
317
'exclude_common_ancestry': exclude_common_ancestry,
318
'signature': signature,
320
'omit_merges': omit_merges,
321
# Add 'private' attributes for features that may be deprecated
322
'_match_using_deltas': _match_using_deltas,
326
def _apply_log_request_defaults(rqst):
327
"""Apply default values to a request dictionary."""
328
result = _DEFAULT_REQUEST_PARAMS.copy()
334
def format_signature_validity(rev_id, repo):
335
"""get the signature validity
337
:param rev_id: revision id to validate
338
:param repo: repository of revision
339
:return: human readable string to print to log
341
from bzrlib import gpg
343
gpg_strategy = gpg.GPGStrategy(None)
344
result = repo.verify_revision_signature(rev_id, gpg_strategy)
345
if result[0] == gpg.SIGNATURE_VALID:
346
return "valid signature from {0}".format(result[1])
347
if result[0] == gpg.SIGNATURE_KEY_MISSING:
348
return "unknown key {0}".format(result[1])
349
if result[0] == gpg.SIGNATURE_NOT_VALID:
350
return "invalid signature!"
351
if result[0] == gpg.SIGNATURE_NOT_SIGNED:
352
return "no signature"
355
class LogGenerator(object):
356
"""A generator of log revisions."""
358
def iter_log_revisions(self):
359
"""Iterate over LogRevision objects.
361
:return: An iterator yielding LogRevision objects.
363
raise NotImplementedError(self.iter_log_revisions)
366
class Logger(object):
367
"""An object that generates, formats and displays a log."""
369
def __init__(self, branch, rqst):
372
:param branch: the branch to log
373
:param rqst: A dictionary specifying the query parameters.
374
See make_log_request_dict() for supported values.
377
self.rqst = _apply_log_request_defaults(rqst)
382
:param lf: The LogFormatter object to send the output to.
384
if not isinstance(lf, LogFormatter):
385
warn("not a LogFormatter instance: %r" % lf)
387
self.branch.lock_read()
389
if getattr(lf, 'begin_log', None):
392
if getattr(lf, 'end_log', None):
397
def _show_body(self, lf):
398
"""Show the main log output.
400
Subclasses may wish to override this.
402
# Tweak the LogRequest based on what the LogFormatter can handle.
403
# (There's no point generating stuff if the formatter can't display it.)
405
if rqst['levels'] is None or lf.get_levels() > rqst['levels']:
406
# user didn't specify levels, use whatever the LF can handle:
407
rqst['levels'] = lf.get_levels()
409
if not getattr(lf, 'supports_tags', False):
410
rqst['generate_tags'] = False
411
if not getattr(lf, 'supports_delta', False):
412
rqst['delta_type'] = None
413
if not getattr(lf, 'supports_diff', False):
414
rqst['diff_type'] = None
415
if not getattr(lf, 'supports_signatures', False):
416
rqst['signature'] = False
418
# Find and print the interesting revisions
419
generator = self._generator_factory(self.branch, rqst)
420
for lr in generator.iter_log_revisions():
424
def _generator_factory(self, branch, rqst):
425
"""Make the LogGenerator object to use.
427
Subclasses may wish to override this.
429
return _DefaultLogGenerator(branch, rqst)
432
class _StartNotLinearAncestor(Exception):
433
"""Raised when a start revision is not found walking left-hand history."""
436
class _DefaultLogGenerator(LogGenerator):
437
"""The default generator of log revisions."""
439
def __init__(self, branch, rqst):
442
if rqst.get('generate_tags') and branch.supports_tags():
443
self.rev_tag_dict = branch.tags.get_reverse_tag_dict()
445
self.rev_tag_dict = {}
447
def iter_log_revisions(self):
448
"""Iterate over LogRevision objects.
450
:return: An iterator yielding LogRevision objects.
453
levels = rqst.get('levels')
454
limit = rqst.get('limit')
455
diff_type = rqst.get('diff_type')
456
show_signature = rqst.get('signature')
457
omit_merges = rqst.get('omit_merges')
459
revision_iterator = self._create_log_revision_iterator()
460
for revs in revision_iterator:
461
for (rev_id, revno, merge_depth), rev, delta in revs:
462
# 0 levels means show everything; merge_depth counts from 0
463
if levels != 0 and merge_depth >= levels:
465
if omit_merges and len(rev.parent_ids) > 1:
467
if diff_type is None:
470
diff = self._format_diff(rev, rev_id, diff_type)
472
signature = format_signature_validity(rev_id,
473
self.branch.repository)
476
yield LogRevision(rev, revno, merge_depth, delta,
477
self.rev_tag_dict.get(rev_id), diff, signature)
480
if log_count >= limit:
483
def _format_diff(self, rev, rev_id, diff_type):
484
repo = self.branch.repository
485
if len(rev.parent_ids) == 0:
486
ancestor_id = _mod_revision.NULL_REVISION
488
ancestor_id = rev.parent_ids[0]
489
tree_1 = repo.revision_tree(ancestor_id)
490
tree_2 = repo.revision_tree(rev_id)
491
file_ids = self.rqst.get('specific_fileids')
492
if diff_type == 'partial' and file_ids is not None:
493
specific_files = [tree_2.id2path(id) for id in file_ids]
495
specific_files = None
497
path_encoding = get_diff_header_encoding()
498
diff.show_diff_trees(tree_1, tree_2, s, specific_files, old_label='',
499
new_label='', path_encoding=path_encoding)
502
def _create_log_revision_iterator(self):
503
"""Create a revision iterator for log.
505
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
508
self.start_rev_id, self.end_rev_id = _get_revision_limits(
509
self.branch, self.rqst.get('start_revision'),
510
self.rqst.get('end_revision'))
511
if self.rqst.get('_match_using_deltas'):
512
return self._log_revision_iterator_using_delta_matching()
514
# We're using the per-file-graph algorithm. This scales really
515
# well but only makes sense if there is a single file and it's
517
file_count = len(self.rqst.get('specific_fileids'))
519
raise BzrError("illegal LogRequest: must match-using-deltas "
520
"when logging %d files" % file_count)
521
return self._log_revision_iterator_using_per_file_graph()
523
def _log_revision_iterator_using_delta_matching(self):
524
# Get the base revisions, filtering by the revision range
526
generate_merge_revisions = rqst.get('levels') != 1
527
delayed_graph_generation = not rqst.get('specific_fileids') and (
528
rqst.get('limit') or self.start_rev_id or self.end_rev_id)
529
view_revisions = _calc_view_revisions(
530
self.branch, self.start_rev_id, self.end_rev_id,
531
rqst.get('direction'),
532
generate_merge_revisions=generate_merge_revisions,
533
delayed_graph_generation=delayed_graph_generation,
534
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
536
# Apply the other filters
537
return make_log_rev_iterator(self.branch, view_revisions,
538
rqst.get('delta_type'), rqst.get('match'),
539
file_ids=rqst.get('specific_fileids'),
540
direction=rqst.get('direction'))
542
def _log_revision_iterator_using_per_file_graph(self):
543
# Get the base revisions, filtering by the revision range.
544
# Note that we always generate the merge revisions because
545
# filter_revisions_touching_file_id() requires them ...
547
view_revisions = _calc_view_revisions(
548
self.branch, self.start_rev_id, self.end_rev_id,
549
rqst.get('direction'), generate_merge_revisions=True,
550
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
551
if not isinstance(view_revisions, list):
552
view_revisions = list(view_revisions)
553
view_revisions = _filter_revisions_touching_file_id(self.branch,
554
rqst.get('specific_fileids')[0], view_revisions,
555
include_merges=rqst.get('levels') != 1)
556
return make_log_rev_iterator(self.branch, view_revisions,
557
rqst.get('delta_type'), rqst.get('match'))
560
def _calc_view_revisions(branch, start_rev_id, end_rev_id, direction,
561
generate_merge_revisions,
562
delayed_graph_generation=False,
563
exclude_common_ancestry=False,
565
"""Calculate the revisions to view.
567
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
568
a list of the same tuples.
570
if (exclude_common_ancestry and start_rev_id == end_rev_id):
571
raise errors.BzrCommandError(gettext(
572
'--exclude-common-ancestry requires two different revisions'))
573
if direction not in ('reverse', 'forward'):
574
raise ValueError(gettext('invalid direction %r') % direction)
575
br_revno, br_rev_id = branch.last_revision_info()
579
if (end_rev_id and start_rev_id == end_rev_id
580
and (not generate_merge_revisions
581
or not _has_merges(branch, end_rev_id))):
582
# If a single revision is requested, check we can handle it
583
iter_revs = _generate_one_revision(branch, end_rev_id, br_rev_id,
585
elif not generate_merge_revisions:
586
# If we only want to see linear revisions, we can iterate ...
587
iter_revs = _generate_flat_revisions(branch, start_rev_id, end_rev_id,
588
direction, exclude_common_ancestry)
589
if direction == 'forward':
590
iter_revs = reversed(iter_revs)
592
iter_revs = _generate_all_revisions(branch, start_rev_id, end_rev_id,
593
direction, delayed_graph_generation,
594
exclude_common_ancestry)
595
if direction == 'forward':
596
iter_revs = _rebase_merge_depth(reverse_by_depth(list(iter_revs)))
600
def _generate_one_revision(branch, rev_id, br_rev_id, br_revno):
601
if rev_id == br_rev_id:
603
return [(br_rev_id, br_revno, 0)]
605
revno_str = _compute_revno_str(branch, rev_id)
606
return [(rev_id, revno_str, 0)]
609
def _generate_flat_revisions(branch, start_rev_id, end_rev_id, direction,
610
exclude_common_ancestry=False):
611
result = _linear_view_revisions(
612
branch, start_rev_id, end_rev_id,
613
exclude_common_ancestry=exclude_common_ancestry)
614
# If a start limit was given and it's not obviously an
615
# ancestor of the end limit, check it before outputting anything
616
if direction == 'forward' or (start_rev_id
617
and not _is_obvious_ancestor(branch, start_rev_id, end_rev_id)):
619
result = list(result)
620
except _StartNotLinearAncestor:
621
raise errors.BzrCommandError(gettext('Start revision not found in'
622
' left-hand history of end revision.'))
626
def _generate_all_revisions(branch, start_rev_id, end_rev_id, direction,
627
delayed_graph_generation,
628
exclude_common_ancestry=False):
629
# On large trees, generating the merge graph can take 30-60 seconds
630
# so we delay doing it until a merge is detected, incrementally
631
# returning initial (non-merge) revisions while we can.
633
# The above is only true for old formats (<= 0.92), for newer formats, a
634
# couple of seconds only should be needed to load the whole graph and the
635
# other graph operations needed are even faster than that -- vila 100201
636
initial_revisions = []
637
if delayed_graph_generation:
639
for rev_id, revno, depth in _linear_view_revisions(
640
branch, start_rev_id, end_rev_id, exclude_common_ancestry):
641
if _has_merges(branch, rev_id):
642
# The end_rev_id can be nested down somewhere. We need an
643
# explicit ancestry check. There is an ambiguity here as we
644
# may not raise _StartNotLinearAncestor for a revision that
645
# is an ancestor but not a *linear* one. But since we have
646
# loaded the graph to do the check (or calculate a dotted
647
# revno), we may as well accept to show the log... We need
648
# the check only if start_rev_id is not None as all
649
# revisions have _mod_revision.NULL_REVISION as an ancestor
651
graph = branch.repository.get_graph()
652
if (start_rev_id is not None
653
and not graph.is_ancestor(start_rev_id, end_rev_id)):
654
raise _StartNotLinearAncestor()
655
# Since we collected the revisions so far, we need to
660
initial_revisions.append((rev_id, revno, depth))
662
# No merged revisions found
663
return initial_revisions
664
except _StartNotLinearAncestor:
665
# A merge was never detected so the lower revision limit can't
666
# be nested down somewhere
667
raise errors.BzrCommandError(gettext('Start revision not found in'
668
' history of end revision.'))
670
# We exit the loop above because we encounter a revision with merges, from
671
# this revision, we need to switch to _graph_view_revisions.
673
# A log including nested merges is required. If the direction is reverse,
674
# we rebase the initial merge depths so that the development line is
675
# shown naturally, i.e. just like it is for linear logging. We can easily
676
# make forward the exact opposite display, but showing the merge revisions
677
# indented at the end seems slightly nicer in that case.
678
view_revisions = chain(iter(initial_revisions),
679
_graph_view_revisions(branch, start_rev_id, end_rev_id,
680
rebase_initial_depths=(direction == 'reverse'),
681
exclude_common_ancestry=exclude_common_ancestry))
682
return view_revisions
685
def _has_merges(branch, rev_id):
686
"""Does a revision have multiple parents or not?"""
687
parents = branch.repository.get_parent_map([rev_id]).get(rev_id, [])
688
return len(parents) > 1
691
def _compute_revno_str(branch, rev_id):
692
"""Compute the revno string from a rev_id.
694
:return: The revno string, or None if the revision is not in the supplied
698
revno = branch.revision_id_to_dotted_revno(rev_id)
699
except errors.NoSuchRevision:
700
# The revision must be outside of this branch
703
return '.'.join(str(n) for n in revno)
706
def _is_obvious_ancestor(branch, start_rev_id, end_rev_id):
707
"""Is start_rev_id an obvious ancestor of end_rev_id?"""
708
if start_rev_id and end_rev_id:
710
start_dotted = branch.revision_id_to_dotted_revno(start_rev_id)
711
end_dotted = branch.revision_id_to_dotted_revno(end_rev_id)
712
except errors.NoSuchRevision:
713
# one or both is not in the branch; not obvious
715
if len(start_dotted) == 1 and len(end_dotted) == 1:
717
return start_dotted[0] <= end_dotted[0]
718
elif (len(start_dotted) == 3 and len(end_dotted) == 3 and
719
start_dotted[0:1] == end_dotted[0:1]):
720
# both on same development line
721
return start_dotted[2] <= end_dotted[2]
725
# if either start or end is not specified then we use either the first or
726
# the last revision and *they* are obvious ancestors.
730
def _linear_view_revisions(branch, start_rev_id, end_rev_id,
731
exclude_common_ancestry=False):
732
"""Calculate a sequence of revisions to view, newest to oldest.
734
:param start_rev_id: the lower revision-id
735
:param end_rev_id: the upper revision-id
736
:param exclude_common_ancestry: Whether the start_rev_id should be part of
737
the iterated revisions.
738
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
739
:raises _StartNotLinearAncestor: if a start_rev_id is specified but
740
is not found walking the left-hand history
742
br_revno, br_rev_id = branch.last_revision_info()
743
repo = branch.repository
744
graph = repo.get_graph()
745
if start_rev_id is None and end_rev_id is None:
747
for revision_id in graph.iter_lefthand_ancestry(br_rev_id,
748
(_mod_revision.NULL_REVISION,)):
749
yield revision_id, str(cur_revno), 0
752
if end_rev_id is None:
753
end_rev_id = br_rev_id
754
found_start = start_rev_id is None
755
for revision_id in graph.iter_lefthand_ancestry(end_rev_id,
756
(_mod_revision.NULL_REVISION,)):
757
revno_str = _compute_revno_str(branch, revision_id)
758
if not found_start and revision_id == start_rev_id:
759
if not exclude_common_ancestry:
760
yield revision_id, revno_str, 0
764
yield revision_id, revno_str, 0
767
raise _StartNotLinearAncestor()
770
def _graph_view_revisions(branch, start_rev_id, end_rev_id,
771
rebase_initial_depths=True,
772
exclude_common_ancestry=False):
773
"""Calculate revisions to view including merges, newest to oldest.
775
:param branch: the branch
776
:param start_rev_id: the lower revision-id
777
:param end_rev_id: the upper revision-id
778
:param rebase_initial_depth: should depths be rebased until a mainline
780
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
782
if exclude_common_ancestry:
783
stop_rule = 'with-merges-without-common-ancestry'
785
stop_rule = 'with-merges'
786
view_revisions = branch.iter_merge_sorted_revisions(
787
start_revision_id=end_rev_id, stop_revision_id=start_rev_id,
789
if not rebase_initial_depths:
790
for (rev_id, merge_depth, revno, end_of_merge
792
yield rev_id, '.'.join(map(str, revno)), merge_depth
794
# We're following a development line starting at a merged revision.
795
# We need to adjust depths down by the initial depth until we find
796
# a depth less than it. Then we use that depth as the adjustment.
797
# If and when we reach the mainline, depth adjustment ends.
798
depth_adjustment = None
799
for (rev_id, merge_depth, revno, end_of_merge
801
if depth_adjustment is None:
802
depth_adjustment = merge_depth
804
if merge_depth < depth_adjustment:
805
# From now on we reduce the depth adjustement, this can be
806
# surprising for users. The alternative requires two passes
807
# which breaks the fast display of the first revision
809
depth_adjustment = merge_depth
810
merge_depth -= depth_adjustment
811
yield rev_id, '.'.join(map(str, revno)), merge_depth
814
def _rebase_merge_depth(view_revisions):
815
"""Adjust depths upwards so the top level is 0."""
816
# If either the first or last revision have a merge_depth of 0, we're done
817
if view_revisions and view_revisions[0][2] and view_revisions[-1][2]:
818
min_depth = min([d for r,n,d in view_revisions])
820
view_revisions = [(r,n,d-min_depth) for r,n,d in view_revisions]
821
return view_revisions
824
def make_log_rev_iterator(branch, view_revisions, generate_delta, search,
825
file_ids=None, direction='reverse'):
826
"""Create a revision iterator for log.
828
:param branch: The branch being logged.
829
:param view_revisions: The revisions being viewed.
830
:param generate_delta: Whether to generate a delta for each revision.
831
Permitted values are None, 'full' and 'partial'.
832
:param search: A user text search string.
833
:param file_ids: If non empty, only revisions matching one or more of
834
the file-ids are to be kept.
835
:param direction: the direction in which view_revisions is sorted
836
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
839
# Convert view_revisions into (view, None, None) groups to fit with
840
# the standard interface here.
841
if type(view_revisions) == list:
842
# A single batch conversion is faster than many incremental ones.
843
# As we have all the data, do a batch conversion.
844
nones = [None] * len(view_revisions)
845
log_rev_iterator = iter([zip(view_revisions, nones, nones)])
848
for view in view_revisions:
849
yield (view, None, None)
850
log_rev_iterator = iter([_convert()])
851
for adapter in log_adapters:
852
# It would be nicer if log adapters were first class objects
853
# with custom parameters. This will do for now. IGC 20090127
854
if adapter == _make_delta_filter:
855
log_rev_iterator = adapter(branch, generate_delta,
856
search, log_rev_iterator, file_ids, direction)
858
log_rev_iterator = adapter(branch, generate_delta,
859
search, log_rev_iterator)
860
return log_rev_iterator
863
def _make_search_filter(branch, generate_delta, match, log_rev_iterator):
864
"""Create a filtered iterator of log_rev_iterator matching on a regex.
866
:param branch: The branch being logged.
867
:param generate_delta: Whether to generate a delta for each revision.
868
:param match: A dictionary with properties as keys and lists of strings
869
as values. To match, a revision may match any of the supplied strings
870
within a single property but must match at least one string for each
872
:param log_rev_iterator: An input iterator containing all revisions that
873
could be displayed, in lists.
874
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
878
return log_rev_iterator
879
searchRE = [(k, [re.compile(x, re.IGNORECASE) for x in v])
880
for (k,v) in match.iteritems()]
881
return _filter_re(searchRE, log_rev_iterator)
884
def _filter_re(searchRE, log_rev_iterator):
885
for revs in log_rev_iterator:
886
new_revs = [rev for rev in revs if _match_filter(searchRE, rev[1])]
890
def _match_filter(searchRE, rev):
892
'message': (rev.message,),
893
'committer': (rev.committer,),
894
'author': (rev.get_apparent_authors()),
895
'bugs': list(rev.iter_bugs())
897
strings[''] = [item for inner_list in strings.itervalues()
898
for item in inner_list]
899
for (k,v) in searchRE:
900
if k in strings and not _match_any_filter(strings[k], v):
904
def _match_any_filter(strings, res):
905
return any([filter(None, map(re.search, strings)) for re in res])
907
def _make_delta_filter(branch, generate_delta, search, log_rev_iterator,
908
fileids=None, direction='reverse'):
909
"""Add revision deltas to a log iterator if needed.
911
:param branch: The branch being logged.
912
:param generate_delta: Whether to generate a delta for each revision.
913
Permitted values are None, 'full' and 'partial'.
914
:param search: A user text search string.
915
:param log_rev_iterator: An input iterator containing all revisions that
916
could be displayed, in lists.
917
:param fileids: If non empty, only revisions matching one or more of
918
the file-ids are to be kept.
919
:param direction: the direction in which view_revisions is sorted
920
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
923
if not generate_delta and not fileids:
924
return log_rev_iterator
925
return _generate_deltas(branch.repository, log_rev_iterator,
926
generate_delta, fileids, direction)
929
def _generate_deltas(repository, log_rev_iterator, delta_type, fileids,
931
"""Create deltas for each batch of revisions in log_rev_iterator.
933
If we're only generating deltas for the sake of filtering against
934
file-ids, we stop generating deltas once all file-ids reach the
935
appropriate life-cycle point. If we're receiving data newest to
936
oldest, then that life-cycle point is 'add', otherwise it's 'remove'.
938
check_fileids = fileids is not None and len(fileids) > 0
940
fileid_set = set(fileids)
941
if direction == 'reverse':
947
for revs in log_rev_iterator:
948
# If we were matching against fileids and we've run out,
949
# there's nothing left to do
950
if check_fileids and not fileid_set:
952
revisions = [rev[1] for rev in revs]
954
if delta_type == 'full' and not check_fileids:
955
deltas = repository.get_deltas_for_revisions(revisions)
956
for rev, delta in izip(revs, deltas):
957
new_revs.append((rev[0], rev[1], delta))
959
deltas = repository.get_deltas_for_revisions(revisions, fileid_set)
960
for rev, delta in izip(revs, deltas):
962
if delta is None or not delta.has_changed():
965
_update_fileids(delta, fileid_set, stop_on)
966
if delta_type is None:
968
elif delta_type == 'full':
969
# If the file matches all the time, rebuilding
970
# a full delta like this in addition to a partial
971
# one could be slow. However, it's likely that
972
# most revisions won't get this far, making it
973
# faster to filter on the partial deltas and
974
# build the occasional full delta than always
975
# building full deltas and filtering those.
977
delta = repository.get_revision_delta(rev_id)
978
new_revs.append((rev[0], rev[1], delta))
982
def _update_fileids(delta, fileids, stop_on):
983
"""Update the set of file-ids to search based on file lifecycle events.
985
:param fileids: a set of fileids to update
986
:param stop_on: either 'add' or 'remove' - take file-ids out of the
987
fileids set once their add or remove entry is detected respectively
990
for item in delta.added:
991
if item[1] in fileids:
992
fileids.remove(item[1])
993
elif stop_on == 'delete':
994
for item in delta.removed:
995
if item[1] in fileids:
996
fileids.remove(item[1])
999
def _make_revision_objects(branch, generate_delta, search, log_rev_iterator):
1000
"""Extract revision objects from the repository
1002
:param branch: The branch being logged.
1003
:param generate_delta: Whether to generate a delta for each revision.
1004
:param search: A user text search string.
1005
:param log_rev_iterator: An input iterator containing all revisions that
1006
could be displayed, in lists.
1007
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
1010
repository = branch.repository
1011
for revs in log_rev_iterator:
1012
# r = revision_id, n = revno, d = merge depth
1013
revision_ids = [view[0] for view, _, _ in revs]
1014
revisions = repository.get_revisions(revision_ids)
1015
revs = [(rev[0], revision, rev[2]) for rev, revision in
1016
izip(revs, revisions)]
1020
def _make_batch_filter(branch, generate_delta, search, log_rev_iterator):
1021
"""Group up a single large batch into smaller ones.
1023
:param branch: The branch being logged.
1024
:param generate_delta: Whether to generate a delta for each revision.
1025
:param search: A user text search string.
1026
:param log_rev_iterator: An input iterator containing all revisions that
1027
could be displayed, in lists.
1028
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
1032
for batch in log_rev_iterator:
1035
step = [detail for _, detail in zip(range(num), batch)]
1039
num = min(int(num * 1.5), 200)
1042
def _get_revision_limits(branch, start_revision, end_revision):
1043
"""Get and check revision limits.
1045
:param branch: The branch containing the revisions.
1047
:param start_revision: The first revision to be logged.
1048
For backwards compatibility this may be a mainline integer revno,
1049
but for merge revision support a RevisionInfo is expected.
1051
:param end_revision: The last revision to be logged.
1052
For backwards compatibility this may be a mainline integer revno,
1053
but for merge revision support a RevisionInfo is expected.
1055
:return: (start_rev_id, end_rev_id) tuple.
1057
branch_revno, branch_rev_id = branch.last_revision_info()
1059
if start_revision is None:
1062
if isinstance(start_revision, revisionspec.RevisionInfo):
1063
start_rev_id = start_revision.rev_id
1064
start_revno = start_revision.revno or 1
1066
branch.check_real_revno(start_revision)
1067
start_revno = start_revision
1068
start_rev_id = branch.get_rev_id(start_revno)
1071
if end_revision is None:
1072
end_revno = branch_revno
1074
if isinstance(end_revision, revisionspec.RevisionInfo):
1075
end_rev_id = end_revision.rev_id
1076
end_revno = end_revision.revno or branch_revno
1078
branch.check_real_revno(end_revision)
1079
end_revno = end_revision
1080
end_rev_id = branch.get_rev_id(end_revno)
1082
if branch_revno != 0:
1083
if (start_rev_id == _mod_revision.NULL_REVISION
1084
or end_rev_id == _mod_revision.NULL_REVISION):
1085
raise errors.BzrCommandError(gettext('Logging revision 0 is invalid.'))
1086
if start_revno > end_revno:
1087
raise errors.BzrCommandError(gettext("Start revision must be "
1088
"older than the end revision."))
1089
return (start_rev_id, end_rev_id)
1092
def _get_mainline_revs(branch, start_revision, end_revision):
1093
"""Get the mainline revisions from the branch.
1095
Generates the list of mainline revisions for the branch.
1097
:param branch: The branch containing the revisions.
1099
:param start_revision: The first revision to be logged.
1100
For backwards compatibility this may be a mainline integer revno,
1101
but for merge revision support a RevisionInfo is expected.
1103
:param end_revision: The last revision to be logged.
1104
For backwards compatibility this may be a mainline integer revno,
1105
but for merge revision support a RevisionInfo is expected.
1107
:return: A (mainline_revs, rev_nos, start_rev_id, end_rev_id) tuple.
1109
branch_revno, branch_last_revision = branch.last_revision_info()
1110
if branch_revno == 0:
1111
return None, None, None, None
1113
# For mainline generation, map start_revision and end_revision to
1114
# mainline revnos. If the revision is not on the mainline choose the
1115
# appropriate extreme of the mainline instead - the extra will be
1117
# Also map the revisions to rev_ids, to be used in the later filtering
1120
if start_revision is None:
1123
if isinstance(start_revision, revisionspec.RevisionInfo):
1124
start_rev_id = start_revision.rev_id
1125
start_revno = start_revision.revno or 1
1127
branch.check_real_revno(start_revision)
1128
start_revno = start_revision
1131
if end_revision is None:
1132
end_revno = branch_revno
1134
if isinstance(end_revision, revisionspec.RevisionInfo):
1135
end_rev_id = end_revision.rev_id
1136
end_revno = end_revision.revno or branch_revno
1138
branch.check_real_revno(end_revision)
1139
end_revno = end_revision
1141
if ((start_rev_id == _mod_revision.NULL_REVISION)
1142
or (end_rev_id == _mod_revision.NULL_REVISION)):
1143
raise errors.BzrCommandError(gettext('Logging revision 0 is invalid.'))
1144
if start_revno > end_revno:
1145
raise errors.BzrCommandError(gettext("Start revision must be older "
1146
"than the end revision."))
1148
if end_revno < start_revno:
1149
return None, None, None, None
1150
cur_revno = branch_revno
1153
graph = branch.repository.get_graph()
1154
for revision_id in graph.iter_lefthand_ancestry(
1155
branch_last_revision, (_mod_revision.NULL_REVISION,)):
1156
if cur_revno < start_revno:
1157
# We have gone far enough, but we always add 1 more revision
1158
rev_nos[revision_id] = cur_revno
1159
mainline_revs.append(revision_id)
1161
if cur_revno <= end_revno:
1162
rev_nos[revision_id] = cur_revno
1163
mainline_revs.append(revision_id)
1166
# We walked off the edge of all revisions, so we add a 'None' marker
1167
mainline_revs.append(None)
1169
mainline_revs.reverse()
1171
# override the mainline to look like the revision history.
1172
return mainline_revs, rev_nos, start_rev_id, end_rev_id
1175
def _filter_revisions_touching_file_id(branch, file_id, view_revisions,
1176
include_merges=True):
1177
r"""Return the list of revision ids which touch a given file id.
1179
The function filters view_revisions and returns a subset.
1180
This includes the revisions which directly change the file id,
1181
and the revisions which merge these changes. So if the
1194
And 'C' changes a file, then both C and D will be returned. F will not be
1195
returned even though it brings the changes to C into the branch starting
1196
with E. (Note that if we were using F as the tip instead of G, then we
1199
This will also be restricted based on a subset of the mainline.
1201
:param branch: The branch where we can get text revision information.
1203
:param file_id: Filter out revisions that do not touch file_id.
1205
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
1206
tuples. This is the list of revisions which will be filtered. It is
1207
assumed that view_revisions is in merge_sort order (i.e. newest
1210
:param include_merges: include merge revisions in the result or not
1212
:return: A list of (revision_id, dotted_revno, merge_depth) tuples.
1214
# Lookup all possible text keys to determine which ones actually modified
1216
graph = branch.repository.get_file_graph()
1217
get_parent_map = graph.get_parent_map
1218
text_keys = [(file_id, rev_id) for rev_id, revno, depth in view_revisions]
1220
# Looking up keys in batches of 1000 can cut the time in half, as well as
1221
# memory consumption. GraphIndex *does* like to look for a few keys in
1222
# parallel, it just doesn't like looking for *lots* of keys in parallel.
1223
# TODO: This code needs to be re-evaluated periodically as we tune the
1224
# indexing layer. We might consider passing in hints as to the known
1225
# access pattern (sparse/clustered, high success rate/low success
1226
# rate). This particular access is clustered with a low success rate.
1227
modified_text_revisions = set()
1229
for start in xrange(0, len(text_keys), chunk_size):
1230
next_keys = text_keys[start:start + chunk_size]
1231
# Only keep the revision_id portion of the key
1232
modified_text_revisions.update(
1233
[k[1] for k in get_parent_map(next_keys)])
1234
del text_keys, next_keys
1237
# Track what revisions will merge the current revision, replace entries
1238
# with 'None' when they have been added to result
1239
current_merge_stack = [None]
1240
for info in view_revisions:
1241
rev_id, revno, depth = info
1242
if depth == len(current_merge_stack):
1243
current_merge_stack.append(info)
1245
del current_merge_stack[depth + 1:]
1246
current_merge_stack[-1] = info
1248
if rev_id in modified_text_revisions:
1249
# This needs to be logged, along with the extra revisions
1250
for idx in xrange(len(current_merge_stack)):
1251
node = current_merge_stack[idx]
1252
if node is not None:
1253
if include_merges or node[2] == 0:
1255
current_merge_stack[idx] = None
1259
def reverse_by_depth(merge_sorted_revisions, _depth=0):
1260
"""Reverse revisions by depth.
1262
Revisions with a different depth are sorted as a group with the previous
1263
revision of that depth. There may be no topological justification for this,
1264
but it looks much nicer.
1266
# Add a fake revision at start so that we can always attach sub revisions
1267
merge_sorted_revisions = [(None, None, _depth)] + merge_sorted_revisions
1269
for val in merge_sorted_revisions:
1270
if val[2] == _depth:
1271
# Each revision at the current depth becomes a chunk grouping all
1272
# higher depth revisions.
1273
zd_revisions.append([val])
1275
zd_revisions[-1].append(val)
1276
for revisions in zd_revisions:
1277
if len(revisions) > 1:
1278
# We have higher depth revisions, let reverse them locally
1279
revisions[1:] = reverse_by_depth(revisions[1:], _depth + 1)
1280
zd_revisions.reverse()
1282
for chunk in zd_revisions:
1283
result.extend(chunk)
1285
# Top level call, get rid of the fake revisions that have been added
1286
result = [r for r in result if r[0] is not None and r[1] is not None]
1290
class LogRevision(object):
1291
"""A revision to be logged (by LogFormatter.log_revision).
1293
A simple wrapper for the attributes of a revision to be logged.
1294
The attributes may or may not be populated, as determined by the
1295
logging options and the log formatter capabilities.
1298
def __init__(self, rev=None, revno=None, merge_depth=0, delta=None,
1299
tags=None, diff=None, signature=None):
1304
self.revno = str(revno)
1305
self.merge_depth = merge_depth
1309
self.signature = signature
1312
class LogFormatter(object):
1313
"""Abstract class to display log messages.
1315
At a minimum, a derived class must implement the log_revision method.
1317
If the LogFormatter needs to be informed of the beginning or end of
1318
a log it should implement the begin_log and/or end_log hook methods.
1320
A LogFormatter should define the following supports_XXX flags
1321
to indicate which LogRevision attributes it supports:
1323
- supports_delta must be True if this log formatter supports delta.
1324
Otherwise the delta attribute may not be populated. The 'delta_format'
1325
attribute describes whether the 'short_status' format (1) or the long
1326
one (2) should be used.
1328
- supports_merge_revisions must be True if this log formatter supports
1329
merge revisions. If not, then only mainline revisions will be passed
1332
- preferred_levels is the number of levels this formatter defaults to.
1333
The default value is zero meaning display all levels.
1334
This value is only relevant if supports_merge_revisions is True.
1336
- supports_tags must be True if this log formatter supports tags.
1337
Otherwise the tags attribute may not be populated.
1339
- supports_diff must be True if this log formatter supports diffs.
1340
Otherwise the diff attribute may not be populated.
1342
- supports_signatures must be True if this log formatter supports GPG
1345
Plugins can register functions to show custom revision properties using
1346
the properties_handler_registry. The registered function
1347
must respect the following interface description::
1349
def my_show_properties(properties_dict):
1350
# code that returns a dict {'name':'value'} of the properties
1353
preferred_levels = 0
1355
def __init__(self, to_file, show_ids=False, show_timezone='original',
1356
delta_format=None, levels=None, show_advice=False,
1357
to_exact_file=None, author_list_handler=None):
1358
"""Create a LogFormatter.
1360
:param to_file: the file to output to
1361
:param to_exact_file: if set, gives an output stream to which
1362
non-Unicode diffs are written.
1363
:param show_ids: if True, revision-ids are to be displayed
1364
:param show_timezone: the timezone to use
1365
:param delta_format: the level of delta information to display
1366
or None to leave it to the formatter to decide
1367
:param levels: the number of levels to display; None or -1 to
1368
let the log formatter decide.
1369
:param show_advice: whether to show advice at the end of the
1371
:param author_list_handler: callable generating a list of
1372
authors to display for a given revision
1374
self.to_file = to_file
1375
# 'exact' stream used to show diff, it should print content 'as is'
1376
# and should not try to decode/encode it to unicode to avoid bug #328007
1377
if to_exact_file is not None:
1378
self.to_exact_file = to_exact_file
1380
# XXX: somewhat hacky; this assumes it's a codec writer; it's better
1381
# for code that expects to get diffs to pass in the exact file
1383
self.to_exact_file = getattr(to_file, 'stream', to_file)
1384
self.show_ids = show_ids
1385
self.show_timezone = show_timezone
1386
if delta_format is None:
1387
# Ensures backward compatibility
1388
delta_format = 2 # long format
1389
self.delta_format = delta_format
1390
self.levels = levels
1391
self._show_advice = show_advice
1392
self._merge_count = 0
1393
self._author_list_handler = author_list_handler
1395
def get_levels(self):
1396
"""Get the number of levels to display or 0 for all."""
1397
if getattr(self, 'supports_merge_revisions', False):
1398
if self.levels is None or self.levels == -1:
1399
self.levels = self.preferred_levels
1404
def log_revision(self, revision):
1407
:param revision: The LogRevision to be logged.
1409
raise NotImplementedError('not implemented in abstract base')
1411
def show_advice(self):
1412
"""Output user advice, if any, when the log is completed."""
1413
if self._show_advice and self.levels == 1 and self._merge_count > 0:
1414
advice_sep = self.get_advice_separator()
1416
self.to_file.write(advice_sep)
1418
"Use --include-merged or -n0 to see merged revisions.\n")
1420
def get_advice_separator(self):
1421
"""Get the text separating the log from the closing advice."""
1424
def short_committer(self, rev):
1425
name, address = config.parse_username(rev.committer)
1430
def short_author(self, rev):
1431
return self.authors(rev, 'first', short=True, sep=', ')
1433
def authors(self, rev, who, short=False, sep=None):
1434
"""Generate list of authors, taking --authors option into account.
1436
The caller has to specify the name of a author list handler,
1437
as provided by the author list registry, using the ``who``
1438
argument. That name only sets a default, though: when the
1439
user selected a different author list generation using the
1440
``--authors`` command line switch, as represented by the
1441
``author_list_handler`` constructor argument, that value takes
1444
:param rev: The revision for which to generate the list of authors.
1445
:param who: Name of the default handler.
1446
:param short: Whether to shorten names to either name or address.
1447
:param sep: What separator to use for automatic concatenation.
1449
if self._author_list_handler is not None:
1450
# The user did specify --authors, which overrides the default
1451
author_list_handler = self._author_list_handler
1453
# The user didn't specify --authors, so we use the caller's default
1454
author_list_handler = author_list_registry.get(who)
1455
names = author_list_handler(rev)
1457
for i in range(len(names)):
1458
name, address = config.parse_username(names[i])
1464
names = sep.join(names)
1467
def merge_marker(self, revision):
1468
"""Get the merge marker to include in the output or '' if none."""
1469
if len(revision.rev.parent_ids) > 1:
1470
self._merge_count += 1
1475
def show_properties(self, revision, indent):
1476
"""Displays the custom properties returned by each registered handler.
1478
If a registered handler raises an error it is propagated.
1480
for line in self.custom_properties(revision):
1481
self.to_file.write("%s%s\n" % (indent, line))
1483
def custom_properties(self, revision):
1484
"""Format the custom properties returned by each registered handler.
1486
If a registered handler raises an error it is propagated.
1488
:return: a list of formatted lines (excluding trailing newlines)
1490
lines = self._foreign_info_properties(revision)
1491
for key, handler in properties_handler_registry.iteritems():
1492
lines.extend(self._format_properties(handler(revision)))
1495
def _foreign_info_properties(self, rev):
1496
"""Custom log displayer for foreign revision identifiers.
1498
:param rev: Revision object.
1500
# Revision comes directly from a foreign repository
1501
if isinstance(rev, foreign.ForeignRevision):
1502
return self._format_properties(
1503
rev.mapping.vcs.show_foreign_revid(rev.foreign_revid))
1505
# Imported foreign revision revision ids always contain :
1506
if not ":" in rev.revision_id:
1509
# Revision was once imported from a foreign repository
1511
foreign_revid, mapping = \
1512
foreign.foreign_vcs_registry.parse_revision_id(rev.revision_id)
1513
except errors.InvalidRevisionId:
1516
return self._format_properties(
1517
mapping.vcs.show_foreign_revid(foreign_revid))
1519
def _format_properties(self, properties):
1521
for key, value in properties.items():
1522
lines.append(key + ': ' + value)
1525
def show_diff(self, to_file, diff, indent):
1526
for l in diff.rstrip().split('\n'):
1527
to_file.write(indent + '%s\n' % (l,))
1530
# Separator between revisions in long format
1531
_LONG_SEP = '-' * 60
1534
class LongLogFormatter(LogFormatter):
1536
supports_merge_revisions = True
1537
preferred_levels = 1
1538
supports_delta = True
1539
supports_tags = True
1540
supports_diff = True
1541
supports_signatures = True
1543
def __init__(self, *args, **kwargs):
1544
super(LongLogFormatter, self).__init__(*args, **kwargs)
1545
if self.show_timezone == 'original':
1546
self.date_string = self._date_string_original_timezone
1548
self.date_string = self._date_string_with_timezone
1550
def _date_string_with_timezone(self, rev):
1551
return format_date(rev.timestamp, rev.timezone or 0,
1554
def _date_string_original_timezone(self, rev):
1555
return format_date_with_offset_in_original_timezone(rev.timestamp,
1558
def log_revision(self, revision):
1559
"""Log a revision, either merged or not."""
1560
indent = ' ' * revision.merge_depth
1562
if revision.revno is not None:
1563
lines.append('revno: %s%s' % (revision.revno,
1564
self.merge_marker(revision)))
1566
lines.append('tags: %s' % (', '.join(revision.tags)))
1567
if self.show_ids or revision.revno is None:
1568
lines.append('revision-id: %s' % (revision.rev.revision_id,))
1570
for parent_id in revision.rev.parent_ids:
1571
lines.append('parent: %s' % (parent_id,))
1572
lines.extend(self.custom_properties(revision.rev))
1574
committer = revision.rev.committer
1575
authors = self.authors(revision.rev, 'all')
1576
if authors != [committer]:
1577
lines.append('author: %s' % (", ".join(authors),))
1578
lines.append('committer: %s' % (committer,))
1580
branch_nick = revision.rev.properties.get('branch-nick', None)
1581
if branch_nick is not None:
1582
lines.append('branch nick: %s' % (branch_nick,))
1584
lines.append('timestamp: %s' % (self.date_string(revision.rev),))
1586
if revision.signature is not None:
1587
lines.append('signature: ' + revision.signature)
1589
lines.append('message:')
1590
if not revision.rev.message:
1591
lines.append(' (no message)')
1593
message = revision.rev.message.rstrip('\r\n')
1594
for l in message.split('\n'):
1595
lines.append(' %s' % (l,))
1597
# Dump the output, appending the delta and diff if requested
1598
to_file = self.to_file
1599
to_file.write("%s%s\n" % (indent, ('\n' + indent).join(lines)))
1600
if revision.delta is not None:
1601
# Use the standard status output to display changes
1602
from bzrlib.delta import report_delta
1603
report_delta(to_file, revision.delta, short_status=False,
1604
show_ids=self.show_ids, indent=indent)
1605
if revision.diff is not None:
1606
to_file.write(indent + 'diff:\n')
1608
# Note: we explicitly don't indent the diff (relative to the
1609
# revision information) so that the output can be fed to patch -p0
1610
self.show_diff(self.to_exact_file, revision.diff, indent)
1611
self.to_exact_file.flush()
1613
def get_advice_separator(self):
1614
"""Get the text separating the log from the closing advice."""
1615
return '-' * 60 + '\n'
1618
class ShortLogFormatter(LogFormatter):
1620
supports_merge_revisions = True
1621
preferred_levels = 1
1622
supports_delta = True
1623
supports_tags = True
1624
supports_diff = True
1626
def __init__(self, *args, **kwargs):
1627
super(ShortLogFormatter, self).__init__(*args, **kwargs)
1628
self.revno_width_by_depth = {}
1630
def log_revision(self, revision):
1631
# We need two indents: one per depth and one for the information
1632
# relative to that indent. Most mainline revnos are 5 chars or
1633
# less while dotted revnos are typically 11 chars or less. Once
1634
# calculated, we need to remember the offset for a given depth
1635
# as we might be starting from a dotted revno in the first column
1636
# and we want subsequent mainline revisions to line up.
1637
depth = revision.merge_depth
1638
indent = ' ' * depth
1639
revno_width = self.revno_width_by_depth.get(depth)
1640
if revno_width is None:
1641
if revision.revno is None or revision.revno.find('.') == -1:
1642
# mainline revno, e.g. 12345
1645
# dotted revno, e.g. 12345.10.55
1647
self.revno_width_by_depth[depth] = revno_width
1648
offset = ' ' * (revno_width + 1)
1650
to_file = self.to_file
1653
tags = ' {%s}' % (', '.join(revision.tags))
1654
to_file.write(indent + "%*s %s\t%s%s%s\n" % (revno_width,
1655
revision.revno or "", self.short_author(revision.rev),
1656
format_date(revision.rev.timestamp,
1657
revision.rev.timezone or 0,
1658
self.show_timezone, date_fmt="%Y-%m-%d",
1660
tags, self.merge_marker(revision)))
1661
self.show_properties(revision.rev, indent+offset)
1662
if self.show_ids or revision.revno is None:
1663
to_file.write(indent + offset + 'revision-id:%s\n'
1664
% (revision.rev.revision_id,))
1665
if not revision.rev.message:
1666
to_file.write(indent + offset + '(no message)\n')
1668
message = revision.rev.message.rstrip('\r\n')
1669
for l in message.split('\n'):
1670
to_file.write(indent + offset + '%s\n' % (l,))
1672
if revision.delta is not None:
1673
# Use the standard status output to display changes
1674
from bzrlib.delta import report_delta
1675
report_delta(to_file, revision.delta,
1676
short_status=self.delta_format==1,
1677
show_ids=self.show_ids, indent=indent + offset)
1678
if revision.diff is not None:
1679
self.show_diff(self.to_exact_file, revision.diff, ' ')
1683
class LineLogFormatter(LogFormatter):
1685
supports_merge_revisions = True
1686
preferred_levels = 1
1687
supports_tags = True
1689
def __init__(self, *args, **kwargs):
1690
super(LineLogFormatter, self).__init__(*args, **kwargs)
1691
width = terminal_width()
1692
if width is not None:
1693
# we need one extra space for terminals that wrap on last char
1695
self._max_chars = width
1697
def truncate(self, str, max_len):
1698
if max_len is None or len(str) <= max_len:
1700
return str[:max_len-3] + '...'
1702
def date_string(self, rev):
1703
return format_date(rev.timestamp, rev.timezone or 0,
1704
self.show_timezone, date_fmt="%Y-%m-%d",
1707
def message(self, rev):
1709
return '(no message)'
1713
def log_revision(self, revision):
1714
indent = ' ' * revision.merge_depth
1715
self.to_file.write(self.log_string(revision.revno, revision.rev,
1716
self._max_chars, revision.tags, indent))
1717
self.to_file.write('\n')
1719
def log_string(self, revno, rev, max_chars, tags=None, prefix=''):
1720
"""Format log info into one string. Truncate tail of string
1722
:param revno: revision number or None.
1723
Revision numbers counts from 1.
1724
:param rev: revision object
1725
:param max_chars: maximum length of resulting string
1726
:param tags: list of tags or None
1727
:param prefix: string to prefix each line
1728
:return: formatted truncated string
1732
# show revno only when is not None
1733
out.append("%s:" % revno)
1734
if max_chars is not None:
1735
out.append(self.truncate(self.short_author(rev), (max_chars+3)/4))
1737
out.append(self.short_author(rev))
1738
out.append(self.date_string(rev))
1739
if len(rev.parent_ids) > 1:
1740
out.append('[merge]')
1742
tag_str = '{%s}' % (', '.join(tags))
1744
out.append(rev.get_summary())
1745
return self.truncate(prefix + " ".join(out).rstrip('\n'), max_chars)
1748
class GnuChangelogLogFormatter(LogFormatter):
1750
supports_merge_revisions = True
1751
supports_delta = True
1753
def log_revision(self, revision):
1754
"""Log a revision, either merged or not."""
1755
to_file = self.to_file
1757
date_str = format_date(revision.rev.timestamp,
1758
revision.rev.timezone or 0,
1760
date_fmt='%Y-%m-%d',
1762
committer_str = self.authors(revision.rev, 'first', sep=', ')
1763
committer_str = committer_str.replace(' <', ' <')
1764
to_file.write('%s %s\n\n' % (date_str,committer_str))
1766
if revision.delta is not None and revision.delta.has_changed():
1767
for c in revision.delta.added + revision.delta.removed + revision.delta.modified:
1769
to_file.write('\t* %s:\n' % (path,))
1770
for c in revision.delta.renamed:
1771
oldpath,newpath = c[:2]
1772
# For renamed files, show both the old and the new path
1773
to_file.write('\t* %s:\n\t* %s:\n' % (oldpath,newpath))
1776
if not revision.rev.message:
1777
to_file.write('\tNo commit message\n')
1779
message = revision.rev.message.rstrip('\r\n')
1780
for l in message.split('\n'):
1781
to_file.write('\t%s\n' % (l.lstrip(),))
1785
def line_log(rev, max_chars):
1786
lf = LineLogFormatter(None)
1787
return lf.log_string(None, rev, max_chars)
1790
class LogFormatterRegistry(registry.Registry):
1791
"""Registry for log formatters"""
1793
def make_formatter(self, name, *args, **kwargs):
1794
"""Construct a formatter from arguments.
1796
:param name: Name of the formatter to construct. 'short', 'long' and
1797
'line' are built-in.
1799
return self.get(name)(*args, **kwargs)
1801
def get_default(self, branch):
1802
c = branch.get_config_stack()
1803
return self.get(c.get('log_format'))
1806
log_formatter_registry = LogFormatterRegistry()
1809
log_formatter_registry.register('short', ShortLogFormatter,
1810
'Moderately short log format.')
1811
log_formatter_registry.register('long', LongLogFormatter,
1812
'Detailed log format.')
1813
log_formatter_registry.register('line', LineLogFormatter,
1814
'Log format with one line per revision.')
1815
log_formatter_registry.register('gnu-changelog', GnuChangelogLogFormatter,
1816
'Format used by GNU ChangeLog files.')
1819
def register_formatter(name, formatter):
1820
log_formatter_registry.register(name, formatter)
1823
def log_formatter(name, *args, **kwargs):
1824
"""Construct a formatter from arguments.
1826
name -- Name of the formatter to construct; currently 'long', 'short' and
1827
'line' are supported.
1830
return log_formatter_registry.make_formatter(name, *args, **kwargs)
1832
raise errors.BzrCommandError(gettext("unknown log formatter: %r") % name)
1835
def author_list_all(rev):
1836
return rev.get_apparent_authors()[:]
1839
def author_list_first(rev):
1840
lst = rev.get_apparent_authors()
1847
def author_list_committer(rev):
1848
return [rev.committer]
1851
author_list_registry = registry.Registry()
1853
author_list_registry.register('all', author_list_all,
1856
author_list_registry.register('first', author_list_first,
1859
author_list_registry.register('committer', author_list_committer,
1863
def show_changed_revisions(branch, old_rh, new_rh, to_file=None,
1865
"""Show the change in revision history comparing the old revision history to the new one.
1867
:param branch: The branch where the revisions exist
1868
:param old_rh: The old revision history
1869
:param new_rh: The new revision history
1870
:param to_file: A file to write the results to. If None, stdout will be used
1873
to_file = codecs.getwriter(get_terminal_encoding())(sys.stdout,
1875
lf = log_formatter(log_format,
1878
show_timezone='original')
1880
# This is the first index which is different between
1883
for i in xrange(max(len(new_rh),
1885
if (len(new_rh) <= i
1887
or new_rh[i] != old_rh[i]):
1891
if base_idx is None:
1892
to_file.write('Nothing seems to have changed\n')
1894
## TODO: It might be nice to do something like show_log
1895
## and show the merged entries. But since this is the
1896
## removed revisions, it shouldn't be as important
1897
if base_idx < len(old_rh):
1898
to_file.write('*'*60)
1899
to_file.write('\nRemoved Revisions:\n')
1900
for i in range(base_idx, len(old_rh)):
1901
rev = branch.repository.get_revision(old_rh[i])
1902
lr = LogRevision(rev, i+1, 0, None)
1904
to_file.write('*'*60)
1905
to_file.write('\n\n')
1906
if base_idx < len(new_rh):
1907
to_file.write('Added Revisions:\n')
1912
direction='forward',
1913
start_revision=base_idx+1,
1914
end_revision=len(new_rh),
1918
def get_history_change(old_revision_id, new_revision_id, repository):
1919
"""Calculate the uncommon lefthand history between two revisions.
1921
:param old_revision_id: The original revision id.
1922
:param new_revision_id: The new revision id.
1923
:param repository: The repository to use for the calculation.
1925
return old_history, new_history
1928
old_revisions = set()
1930
new_revisions = set()
1931
graph = repository.get_graph()
1932
new_iter = graph.iter_lefthand_ancestry(new_revision_id)
1933
old_iter = graph.iter_lefthand_ancestry(old_revision_id)
1934
stop_revision = None
1937
while do_new or do_old:
1940
new_revision = new_iter.next()
1941
except StopIteration:
1944
new_history.append(new_revision)
1945
new_revisions.add(new_revision)
1946
if new_revision in old_revisions:
1947
stop_revision = new_revision
1951
old_revision = old_iter.next()
1952
except StopIteration:
1955
old_history.append(old_revision)
1956
old_revisions.add(old_revision)
1957
if old_revision in new_revisions:
1958
stop_revision = old_revision
1960
new_history.reverse()
1961
old_history.reverse()
1962
if stop_revision is not None:
1963
new_history = new_history[new_history.index(stop_revision) + 1:]
1964
old_history = old_history[old_history.index(stop_revision) + 1:]
1965
return old_history, new_history
1968
def show_branch_change(branch, output, old_revno, old_revision_id):
1969
"""Show the changes made to a branch.
1971
:param branch: The branch to show changes about.
1972
:param output: A file-like object to write changes to.
1973
:param old_revno: The revno of the old tip.
1974
:param old_revision_id: The revision_id of the old tip.
1976
new_revno, new_revision_id = branch.last_revision_info()
1977
old_history, new_history = get_history_change(old_revision_id,
1980
if old_history == [] and new_history == []:
1981
output.write('Nothing seems to have changed\n')
1984
log_format = log_formatter_registry.get_default(branch)
1985
lf = log_format(show_ids=False, to_file=output, show_timezone='original')
1986
if old_history != []:
1987
output.write('*'*60)
1988
output.write('\nRemoved Revisions:\n')
1989
show_flat_log(branch.repository, old_history, old_revno, lf)
1990
output.write('*'*60)
1991
output.write('\n\n')
1992
if new_history != []:
1993
output.write('Added Revisions:\n')
1994
start_revno = new_revno - len(new_history) + 1
1995
show_log(branch, lf, None, verbose=False, direction='forward',
1996
start_revision=start_revno,)
1999
def show_flat_log(repository, history, last_revno, lf):
2000
"""Show a simple log of the specified history.
2002
:param repository: The repository to retrieve revisions from.
2003
:param history: A list of revision_ids indicating the lefthand history.
2004
:param last_revno: The revno of the last revision_id in the history.
2005
:param lf: The log formatter to use.
2007
start_revno = last_revno - len(history) + 1
2008
revisions = repository.get_revisions(history)
2009
for i, rev in enumerate(revisions):
2010
lr = LogRevision(rev, i + last_revno, 0, None)
2014
def _get_info_for_log_files(revisionspec_list, file_list, add_cleanup):
2015
"""Find file-ids and kinds given a list of files and a revision range.
2017
We search for files at the end of the range. If not found there,
2018
we try the start of the range.
2020
:param revisionspec_list: revision range as parsed on the command line
2021
:param file_list: the list of paths given on the command line;
2022
the first of these can be a branch location or a file path,
2023
the remainder must be file paths
2024
:param add_cleanup: When the branch returned is read locked,
2025
an unlock call will be queued to the cleanup.
2026
:return: (branch, info_list, start_rev_info, end_rev_info) where
2027
info_list is a list of (relative_path, file_id, kind) tuples where
2028
kind is one of values 'directory', 'file', 'symlink', 'tree-reference'.
2029
branch will be read-locked.
2031
from builtins import _get_revision_range
2032
tree, b, path = controldir.ControlDir.open_containing_tree_or_branch(
2034
add_cleanup(b.lock_read().unlock)
2035
# XXX: It's damn messy converting a list of paths to relative paths when
2036
# those paths might be deleted ones, they might be on a case-insensitive
2037
# filesystem and/or they might be in silly locations (like another branch).
2038
# For example, what should "log bzr://branch/dir/file1 file2" do? (Is
2039
# file2 implicitly in the same dir as file1 or should its directory be
2040
# taken from the current tree somehow?) For now, this solves the common
2041
# case of running log in a nested directory, assuming paths beyond the
2042
# first one haven't been deleted ...
2044
relpaths = [path] + tree.safe_relpath_files(file_list[1:])
2046
relpaths = [path] + file_list[1:]
2048
start_rev_info, end_rev_info = _get_revision_range(revisionspec_list, b,
2050
if relpaths in ([], [u'']):
2051
return b, [], start_rev_info, end_rev_info
2052
if start_rev_info is None and end_rev_info is None:
2054
tree = b.basis_tree()
2057
file_id = tree.path2id(fp)
2058
kind = _get_kind_for_file_id(tree, file_id)
2060
# go back to when time began
2063
rev1 = b.get_rev_id(1)
2064
except errors.NoSuchRevision:
2069
tree1 = b.repository.revision_tree(rev1)
2071
file_id = tree1.path2id(fp)
2072
kind = _get_kind_for_file_id(tree1, file_id)
2073
info_list.append((fp, file_id, kind))
2075
elif start_rev_info == end_rev_info:
2076
# One revision given - file must exist in it
2077
tree = b.repository.revision_tree(end_rev_info.rev_id)
2079
file_id = tree.path2id(fp)
2080
kind = _get_kind_for_file_id(tree, file_id)
2081
info_list.append((fp, file_id, kind))
2084
# Revision range given. Get the file-id from the end tree.
2085
# If that fails, try the start tree.
2086
rev_id = end_rev_info.rev_id
2088
tree = b.basis_tree()
2090
tree = b.repository.revision_tree(rev_id)
2093
file_id = tree.path2id(fp)
2094
kind = _get_kind_for_file_id(tree, file_id)
2097
rev_id = start_rev_info.rev_id
2099
rev1 = b.get_rev_id(1)
2100
tree1 = b.repository.revision_tree(rev1)
2102
tree1 = b.repository.revision_tree(rev_id)
2103
file_id = tree1.path2id(fp)
2104
kind = _get_kind_for_file_id(tree1, file_id)
2105
info_list.append((fp, file_id, kind))
2106
return b, info_list, start_rev_info, end_rev_info
2109
def _get_kind_for_file_id(tree, file_id):
2110
"""Return the kind of a file-id or None if it doesn't exist."""
2111
if file_id is not None:
2112
return tree.kind(file_id)
2117
properties_handler_registry = registry.Registry()
2119
# Use the properties handlers to print out bug information if available
2120
def _bugs_properties_handler(revision):
2121
if revision.properties.has_key('bugs'):
2122
bug_lines = revision.properties['bugs'].split('\n')
2123
bug_rows = [line.split(' ', 1) for line in bug_lines]
2124
fixed_bug_urls = [row[0] for row in bug_rows if
2125
len(row) > 1 and row[1] == 'fixed']
2128
return {ngettext('fixes bug', 'fixes bugs', len(fixed_bug_urls)):\
2129
' '.join(fixed_bug_urls)}
2132
properties_handler_registry.register('bugs_properties_handler',
2133
_bugs_properties_handler)
2136
# adapters which revision ids to log are filtered. When log is called, the
2137
# log_rev_iterator is adapted through each of these factory methods.
2138
# Plugins are welcome to mutate this list in any way they like - as long
2139
# as the overall behaviour is preserved. At this point there is no extensible
2140
# mechanism for getting parameters to each factory method, and until there is
2141
# this won't be considered a stable api.
2145
# read revision objects
2146
_make_revision_objects,
2147
# filter on log messages
2148
_make_search_filter,
2149
# generate deltas for things we will show