154
90
direction='reverse',
155
91
start_revision=None,
156
92
end_revision=None,
160
94
"""Write out human-readable log of commits to this branch.
162
This function is being retained for backwards compatibility but
163
should not be extended with new parameters. Use the new Logger class
164
instead, eg. Logger(branch, rqst).show(lf), adding parameters to the
165
make_log_request_dict function.
167
:param lf: The LogFormatter object showing the output.
169
:param specific_fileid: If not None, list only the commits affecting the
170
specified file, rather than all commits.
172
:param verbose: If True show added/changed/deleted/renamed files.
174
:param direction: 'reverse' (default) is latest to earliest; 'forward' is
177
:param start_revision: If not None, only show revisions >= start_revision
179
:param end_revision: If not None, only show revisions <= end_revision
181
:param search: If not None, only show revisions with matching commit
184
:param limit: If set, shows only 'limit' revisions, all revisions are shown
187
:param show_diff: If True, output a diff after each revision.
189
# Convert old-style parameters to new-style parameters
190
if specific_fileid is not None:
191
file_ids = [specific_fileid]
196
delta_type = 'partial'
203
diff_type = 'partial'
209
# Build the request and execute it
210
rqst = make_log_request_dict(direction=direction, specific_fileids=file_ids,
211
start_revision=start_revision, end_revision=end_revision,
212
limit=limit, message_search=search,
213
delta_type=delta_type, diff_type=diff_type)
214
Logger(branch, rqst).show(lf)
217
# Note: This needs to be kept this in sync with the defaults in
218
# make_log_request_dict() below
219
_DEFAULT_REQUEST_PARAMS = {
220
'direction': 'reverse',
222
'generate_tags': True,
223
'exclude_common_ancestry': False,
224
'_match_using_deltas': True,
228
def make_log_request_dict(direction='reverse', specific_fileids=None,
229
start_revision=None, end_revision=None, limit=None,
230
message_search=None, levels=1, generate_tags=True,
232
diff_type=None, _match_using_deltas=True,
233
exclude_common_ancestry=False,
235
"""Convenience function for making a logging request dictionary.
237
Using this function may make code slightly safer by ensuring
238
parameters have the correct names. It also provides a reference
239
point for documenting the supported parameters.
241
:param direction: 'reverse' (default) is latest to earliest;
242
'forward' is earliest to latest.
244
:param specific_fileids: If not None, only include revisions
245
affecting the specified files, rather than all revisions.
247
:param start_revision: If not None, only generate
248
revisions >= start_revision
250
:param end_revision: If not None, only generate
251
revisions <= end_revision
253
:param limit: If set, generate only 'limit' revisions, all revisions
254
are shown if None or 0.
256
:param message_search: If not None, only include revisions with
257
matching commit messages
259
:param levels: the number of levels of revisions to
260
generate; 1 for just the mainline; 0 for all levels.
262
:param generate_tags: If True, include tags for matched revisions.
264
:param delta_type: Either 'full', 'partial' or None.
265
'full' means generate the complete delta - adds/deletes/modifies/etc;
266
'partial' means filter the delta using specific_fileids;
267
None means do not generate any delta.
269
:param diff_type: Either 'full', 'partial' or None.
270
'full' means generate the complete diff - adds/deletes/modifies/etc;
271
'partial' means filter the diff using specific_fileids;
272
None means do not generate any diff.
274
:param _match_using_deltas: a private parameter controlling the
275
algorithm used for matching specific_fileids. This parameter
276
may be removed in the future so bzrlib client code should NOT
279
:param exclude_common_ancestry: Whether -rX..Y should be interpreted as a
280
range operator or as a graph difference.
283
'direction': direction,
284
'specific_fileids': specific_fileids,
285
'start_revision': start_revision,
286
'end_revision': end_revision,
288
'message_search': message_search,
290
'generate_tags': generate_tags,
291
'delta_type': delta_type,
292
'diff_type': diff_type,
293
'exclude_common_ancestry': exclude_common_ancestry,
294
# Add 'private' attributes for features that may be deprecated
295
'_match_using_deltas': _match_using_deltas,
299
def _apply_log_request_defaults(rqst):
300
"""Apply default values to a request dictionary."""
301
result = _DEFAULT_REQUEST_PARAMS
307
class LogGenerator(object):
308
"""A generator of log revisions."""
310
def iter_log_revisions(self):
311
"""Iterate over LogRevision objects.
313
:return: An iterator yielding LogRevision objects.
315
raise NotImplementedError(self.iter_log_revisions)
318
class Logger(object):
319
"""An object that generates, formats and displays a log."""
321
def __init__(self, branch, rqst):
324
:param branch: the branch to log
325
:param rqst: A dictionary specifying the query parameters.
326
See make_log_request_dict() for supported values.
329
self.rqst = _apply_log_request_defaults(rqst)
334
:param lf: The LogFormatter object to send the output to.
336
if not isinstance(lf, LogFormatter):
337
warn("not a LogFormatter instance: %r" % lf)
339
self.branch.lock_read()
341
if getattr(lf, 'begin_log', None):
344
if getattr(lf, 'end_log', None):
349
def _show_body(self, lf):
350
"""Show the main log output.
352
Subclasses may wish to override this.
354
# Tweak the LogRequest based on what the LogFormatter can handle.
355
# (There's no point generating stuff if the formatter can't display it.)
357
rqst['levels'] = lf.get_levels()
358
if not getattr(lf, 'supports_tags', False):
359
rqst['generate_tags'] = False
360
if not getattr(lf, 'supports_delta', False):
361
rqst['delta_type'] = None
362
if not getattr(lf, 'supports_diff', False):
363
rqst['diff_type'] = None
365
# Find and print the interesting revisions
366
generator = self._generator_factory(self.branch, rqst)
367
for lr in generator.iter_log_revisions():
371
def _generator_factory(self, branch, rqst):
372
"""Make the LogGenerator object to use.
374
Subclasses may wish to override this.
376
return _DefaultLogGenerator(branch, rqst)
379
class _StartNotLinearAncestor(Exception):
380
"""Raised when a start revision is not found walking left-hand history."""
383
class _DefaultLogGenerator(LogGenerator):
384
"""The default generator of log revisions."""
386
def __init__(self, branch, rqst):
389
if rqst.get('generate_tags') and branch.supports_tags():
390
self.rev_tag_dict = branch.tags.get_reverse_tag_dict()
392
self.rev_tag_dict = {}
394
def iter_log_revisions(self):
395
"""Iterate over LogRevision objects.
397
:return: An iterator yielding LogRevision objects.
400
levels = rqst.get('levels')
401
limit = rqst.get('limit')
402
diff_type = rqst.get('diff_type')
404
revision_iterator = self._create_log_revision_iterator()
405
for revs in revision_iterator:
406
for (rev_id, revno, merge_depth), rev, delta in revs:
407
# 0 levels means show everything; merge_depth counts from 0
408
if levels != 0 and merge_depth >= levels:
410
if diff_type is None:
413
diff = self._format_diff(rev, rev_id, diff_type)
414
yield LogRevision(rev, revno, merge_depth, delta,
415
self.rev_tag_dict.get(rev_id), diff)
418
if log_count >= limit:
421
def _format_diff(self, rev, rev_id, diff_type):
422
repo = self.branch.repository
423
if len(rev.parent_ids) == 0:
424
ancestor_id = _mod_revision.NULL_REVISION
426
ancestor_id = rev.parent_ids[0]
427
tree_1 = repo.revision_tree(ancestor_id)
428
tree_2 = repo.revision_tree(rev_id)
429
file_ids = self.rqst.get('specific_fileids')
430
if diff_type == 'partial' and file_ids is not None:
431
specific_files = [tree_2.id2path(id) for id in file_ids]
433
specific_files = None
435
path_encoding = osutils.get_diff_header_encoding()
436
diff.show_diff_trees(tree_1, tree_2, s, specific_files, old_label='',
437
new_label='', path_encoding=path_encoding)
440
def _create_log_revision_iterator(self):
441
"""Create a revision iterator for log.
443
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
446
self.start_rev_id, self.end_rev_id = _get_revision_limits(
447
self.branch, self.rqst.get('start_revision'),
448
self.rqst.get('end_revision'))
449
if self.rqst.get('_match_using_deltas'):
450
return self._log_revision_iterator_using_delta_matching()
452
# We're using the per-file-graph algorithm. This scales really
453
# well but only makes sense if there is a single file and it's
455
file_count = len(self.rqst.get('specific_fileids'))
457
raise BzrError("illegal LogRequest: must match-using-deltas "
458
"when logging %d files" % file_count)
459
return self._log_revision_iterator_using_per_file_graph()
461
def _log_revision_iterator_using_delta_matching(self):
462
# Get the base revisions, filtering by the revision range
464
generate_merge_revisions = rqst.get('levels') != 1
465
delayed_graph_generation = not rqst.get('specific_fileids') and (
466
rqst.get('limit') or self.start_rev_id or self.end_rev_id)
467
view_revisions = _calc_view_revisions(
468
self.branch, self.start_rev_id, self.end_rev_id,
469
rqst.get('direction'),
470
generate_merge_revisions=generate_merge_revisions,
471
delayed_graph_generation=delayed_graph_generation,
472
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
474
# Apply the other filters
475
return make_log_rev_iterator(self.branch, view_revisions,
476
rqst.get('delta_type'), rqst.get('message_search'),
477
file_ids=rqst.get('specific_fileids'),
478
direction=rqst.get('direction'))
480
def _log_revision_iterator_using_per_file_graph(self):
481
# Get the base revisions, filtering by the revision range.
482
# Note that we always generate the merge revisions because
483
# filter_revisions_touching_file_id() requires them ...
485
view_revisions = _calc_view_revisions(
486
self.branch, self.start_rev_id, self.end_rev_id,
487
rqst.get('direction'), generate_merge_revisions=True,
488
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
489
if not isinstance(view_revisions, list):
490
view_revisions = list(view_revisions)
491
view_revisions = _filter_revisions_touching_file_id(self.branch,
492
rqst.get('specific_fileids')[0], view_revisions,
493
include_merges=rqst.get('levels') != 1)
494
return make_log_rev_iterator(self.branch, view_revisions,
495
rqst.get('delta_type'), rqst.get('message_search'))
498
def _calc_view_revisions(branch, start_rev_id, end_rev_id, direction,
499
generate_merge_revisions,
500
delayed_graph_generation=False,
501
exclude_common_ancestry=False,
503
"""Calculate the revisions to view.
505
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
506
a list of the same tuples.
508
if (exclude_common_ancestry and start_rev_id == end_rev_id):
509
raise errors.BzrCommandError(
510
'--exclude-common-ancestry requires two different revisions')
511
if direction not in ('reverse', 'forward'):
512
raise ValueError('invalid direction %r' % direction)
513
br_revno, br_rev_id = branch.last_revision_info()
517
if (end_rev_id and start_rev_id == end_rev_id
518
and (not generate_merge_revisions
519
or not _has_merges(branch, end_rev_id))):
520
# If a single revision is requested, check we can handle it
521
iter_revs = _generate_one_revision(branch, end_rev_id, br_rev_id,
523
elif not generate_merge_revisions:
524
# If we only want to see linear revisions, we can iterate ...
525
iter_revs = _generate_flat_revisions(branch, start_rev_id, end_rev_id,
526
direction, exclude_common_ancestry)
527
if direction == 'forward':
528
iter_revs = reversed(iter_revs)
530
iter_revs = _generate_all_revisions(branch, start_rev_id, end_rev_id,
531
direction, delayed_graph_generation,
532
exclude_common_ancestry)
533
if direction == 'forward':
534
iter_revs = _rebase_merge_depth(reverse_by_depth(list(iter_revs)))
538
def _generate_one_revision(branch, rev_id, br_rev_id, br_revno):
539
if rev_id == br_rev_id:
541
return [(br_rev_id, br_revno, 0)]
543
revno = branch.revision_id_to_dotted_revno(rev_id)
544
revno_str = '.'.join(str(n) for n in revno)
545
return [(rev_id, revno_str, 0)]
548
def _generate_flat_revisions(branch, start_rev_id, end_rev_id, direction,
549
exclude_common_ancestry=False):
550
result = _linear_view_revisions(
551
branch, start_rev_id, end_rev_id,
552
exclude_common_ancestry=exclude_common_ancestry)
553
# If a start limit was given and it's not obviously an
554
# ancestor of the end limit, check it before outputting anything
555
if direction == 'forward' or (start_rev_id
556
and not _is_obvious_ancestor(branch, start_rev_id, end_rev_id)):
558
result = list(result)
559
except _StartNotLinearAncestor:
560
raise errors.BzrCommandError('Start revision not found in'
561
' left-hand history of end revision.')
565
def _generate_all_revisions(branch, start_rev_id, end_rev_id, direction,
566
delayed_graph_generation,
567
exclude_common_ancestry=False):
568
# On large trees, generating the merge graph can take 30-60 seconds
569
# so we delay doing it until a merge is detected, incrementally
570
# returning initial (non-merge) revisions while we can.
572
# The above is only true for old formats (<= 0.92), for newer formats, a
573
# couple of seconds only should be needed to load the whole graph and the
574
# other graph operations needed are even faster than that -- vila 100201
575
initial_revisions = []
576
if delayed_graph_generation:
578
for rev_id, revno, depth in _linear_view_revisions(
579
branch, start_rev_id, end_rev_id, exclude_common_ancestry):
580
if _has_merges(branch, rev_id):
581
# The end_rev_id can be nested down somewhere. We need an
582
# explicit ancestry check. There is an ambiguity here as we
583
# may not raise _StartNotLinearAncestor for a revision that
584
# is an ancestor but not a *linear* one. But since we have
585
# loaded the graph to do the check (or calculate a dotted
586
# revno), we may as well accept to show the log... We need
587
# the check only if start_rev_id is not None as all
588
# revisions have _mod_revision.NULL_REVISION as an ancestor
590
graph = branch.repository.get_graph()
591
if (start_rev_id is not None
592
and not graph.is_ancestor(start_rev_id, end_rev_id)):
593
raise _StartNotLinearAncestor()
594
# Since we collected the revisions so far, we need to
599
initial_revisions.append((rev_id, revno, depth))
601
# No merged revisions found
602
return initial_revisions
603
except _StartNotLinearAncestor:
604
# A merge was never detected so the lower revision limit can't
605
# be nested down somewhere
606
raise errors.BzrCommandError('Start revision not found in'
607
' history of end revision.')
609
# We exit the loop above because we encounter a revision with merges, from
610
# this revision, we need to switch to _graph_view_revisions.
612
# A log including nested merges is required. If the direction is reverse,
613
# we rebase the initial merge depths so that the development line is
614
# shown naturally, i.e. just like it is for linear logging. We can easily
615
# make forward the exact opposite display, but showing the merge revisions
616
# indented at the end seems slightly nicer in that case.
617
view_revisions = chain(iter(initial_revisions),
618
_graph_view_revisions(branch, start_rev_id, end_rev_id,
619
rebase_initial_depths=(direction == 'reverse'),
620
exclude_common_ancestry=exclude_common_ancestry))
621
return view_revisions
624
def _has_merges(branch, rev_id):
625
"""Does a revision have multiple parents or not?"""
626
parents = branch.repository.get_parent_map([rev_id]).get(rev_id, [])
627
return len(parents) > 1
630
def _is_obvious_ancestor(branch, start_rev_id, end_rev_id):
631
"""Is start_rev_id an obvious ancestor of end_rev_id?"""
632
if start_rev_id and end_rev_id:
633
start_dotted = branch.revision_id_to_dotted_revno(start_rev_id)
634
end_dotted = branch.revision_id_to_dotted_revno(end_rev_id)
635
if len(start_dotted) == 1 and len(end_dotted) == 1:
637
return start_dotted[0] <= end_dotted[0]
638
elif (len(start_dotted) == 3 and len(end_dotted) == 3 and
639
start_dotted[0:1] == end_dotted[0:1]):
640
# both on same development line
641
return start_dotted[2] <= end_dotted[2]
645
# if either start or end is not specified then we use either the first or
646
# the last revision and *they* are obvious ancestors.
650
def _linear_view_revisions(branch, start_rev_id, end_rev_id,
651
exclude_common_ancestry=False):
652
"""Calculate a sequence of revisions to view, newest to oldest.
654
:param start_rev_id: the lower revision-id
655
:param end_rev_id: the upper revision-id
656
:param exclude_common_ancestry: Whether the start_rev_id should be part of
657
the iterated revisions.
658
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
659
:raises _StartNotLinearAncestor: if a start_rev_id is specified but
660
is not found walking the left-hand history
662
br_revno, br_rev_id = branch.last_revision_info()
663
repo = branch.repository
664
if start_rev_id is None and end_rev_id is None:
666
for revision_id in repo.iter_reverse_revision_history(br_rev_id):
667
yield revision_id, str(cur_revno), 0
670
if end_rev_id is None:
671
end_rev_id = br_rev_id
672
found_start = start_rev_id is None
673
for revision_id in repo.iter_reverse_revision_history(end_rev_id):
674
revno = branch.revision_id_to_dotted_revno(revision_id)
675
revno_str = '.'.join(str(n) for n in revno)
676
if not found_start and revision_id == start_rev_id:
677
if not exclude_common_ancestry:
678
yield revision_id, revno_str, 0
682
yield revision_id, revno_str, 0
685
raise _StartNotLinearAncestor()
688
def _graph_view_revisions(branch, start_rev_id, end_rev_id,
689
rebase_initial_depths=True,
690
exclude_common_ancestry=False):
691
"""Calculate revisions to view including merges, newest to oldest.
693
:param branch: the branch
694
:param start_rev_id: the lower revision-id
695
:param end_rev_id: the upper revision-id
696
:param rebase_initial_depth: should depths be rebased until a mainline
698
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
700
if exclude_common_ancestry:
701
stop_rule = 'with-merges-without-common-ancestry'
703
stop_rule = 'with-merges'
704
view_revisions = branch.iter_merge_sorted_revisions(
705
start_revision_id=end_rev_id, stop_revision_id=start_rev_id,
707
if not rebase_initial_depths:
708
for (rev_id, merge_depth, revno, end_of_merge
710
yield rev_id, '.'.join(map(str, revno)), merge_depth
712
# We're following a development line starting at a merged revision.
713
# We need to adjust depths down by the initial depth until we find
714
# a depth less than it. Then we use that depth as the adjustment.
715
# If and when we reach the mainline, depth adjustment ends.
716
depth_adjustment = None
717
for (rev_id, merge_depth, revno, end_of_merge
719
if depth_adjustment is None:
720
depth_adjustment = merge_depth
722
if merge_depth < depth_adjustment:
723
# From now on we reduce the depth adjustement, this can be
724
# surprising for users. The alternative requires two passes
725
# which breaks the fast display of the first revision
727
depth_adjustment = merge_depth
728
merge_depth -= depth_adjustment
729
yield rev_id, '.'.join(map(str, revno)), merge_depth
732
@deprecated_function(deprecated_in((2, 2, 0)))
733
def calculate_view_revisions(branch, start_revision, end_revision, direction,
734
specific_fileid, generate_merge_revisions):
735
"""Calculate the revisions to view.
737
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
738
a list of the same tuples.
740
start_rev_id, end_rev_id = _get_revision_limits(branch, start_revision,
742
view_revisions = list(_calc_view_revisions(branch, start_rev_id, end_rev_id,
743
direction, generate_merge_revisions or specific_fileid))
97
LogFormatter object to show the output.
100
If true, list only the commits affecting the specified
101
file, rather than all commits.
104
If true show added/changed/deleted/renamed files.
107
'reverse' (default) is latest to earliest;
108
'forward' is earliest to latest.
111
If not None, only show revisions >= start_revision
114
If not None, only show revisions <= end_revision
116
from bzrlib.osutils import format_date
117
from bzrlib.errors import BzrCheckError
118
from bzrlib.textui import show_status
120
from warnings import warn
122
if not isinstance(lf, LogFormatter):
123
warn("not a LogFormatter instance: %r" % lf)
744
125
if specific_fileid:
745
view_revisions = _filter_revisions_touching_file_id(branch,
746
specific_fileid, view_revisions,
747
include_merges=generate_merge_revisions)
748
return _rebase_merge_depth(view_revisions)
751
def _rebase_merge_depth(view_revisions):
752
"""Adjust depths upwards so the top level is 0."""
753
# If either the first or last revision have a merge_depth of 0, we're done
754
if view_revisions and view_revisions[0][2] and view_revisions[-1][2]:
755
min_depth = min([d for r,n,d in view_revisions])
757
view_revisions = [(r,n,d-min_depth) for r,n,d in view_revisions]
758
return view_revisions
761
def make_log_rev_iterator(branch, view_revisions, generate_delta, search,
762
file_ids=None, direction='reverse'):
763
"""Create a revision iterator for log.
765
:param branch: The branch being logged.
766
:param view_revisions: The revisions being viewed.
767
:param generate_delta: Whether to generate a delta for each revision.
768
Permitted values are None, 'full' and 'partial'.
769
:param search: A user text search string.
770
:param file_ids: If non empty, only revisions matching one or more of
771
the file-ids are to be kept.
772
:param direction: the direction in which view_revisions is sorted
773
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
776
# Convert view_revisions into (view, None, None) groups to fit with
777
# the standard interface here.
778
if type(view_revisions) == list:
779
# A single batch conversion is faster than many incremental ones.
780
# As we have all the data, do a batch conversion.
781
nones = [None] * len(view_revisions)
782
log_rev_iterator = iter([zip(view_revisions, nones, nones)])
785
for view in view_revisions:
786
yield (view, None, None)
787
log_rev_iterator = iter([_convert()])
788
for adapter in log_adapters:
789
# It would be nicer if log adapters were first class objects
790
# with custom parameters. This will do for now. IGC 20090127
791
if adapter == _make_delta_filter:
792
log_rev_iterator = adapter(branch, generate_delta,
793
search, log_rev_iterator, file_ids, direction)
795
log_rev_iterator = adapter(branch, generate_delta,
796
search, log_rev_iterator)
797
return log_rev_iterator
800
def _make_search_filter(branch, generate_delta, search, log_rev_iterator):
801
"""Create a filtered iterator of log_rev_iterator matching on a regex.
803
:param branch: The branch being logged.
804
:param generate_delta: Whether to generate a delta for each revision.
805
:param search: A user text search string.
806
:param log_rev_iterator: An input iterator containing all revisions that
807
could be displayed, in lists.
808
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
812
return log_rev_iterator
813
searchRE = re.compile(search, re.IGNORECASE)
814
return _filter_message_re(searchRE, log_rev_iterator)
817
def _filter_message_re(searchRE, log_rev_iterator):
818
for revs in log_rev_iterator:
820
for (rev_id, revno, merge_depth), rev, delta in revs:
821
if searchRE.search(rev.message):
822
new_revs.append(((rev_id, revno, merge_depth), rev, delta))
826
def _make_delta_filter(branch, generate_delta, search, log_rev_iterator,
827
fileids=None, direction='reverse'):
828
"""Add revision deltas to a log iterator if needed.
830
:param branch: The branch being logged.
831
:param generate_delta: Whether to generate a delta for each revision.
832
Permitted values are None, 'full' and 'partial'.
833
:param search: A user text search string.
834
:param log_rev_iterator: An input iterator containing all revisions that
835
could be displayed, in lists.
836
:param fileids: If non empty, only revisions matching one or more of
837
the file-ids are to be kept.
838
:param direction: the direction in which view_revisions is sorted
839
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
842
if not generate_delta and not fileids:
843
return log_rev_iterator
844
return _generate_deltas(branch.repository, log_rev_iterator,
845
generate_delta, fileids, direction)
848
def _generate_deltas(repository, log_rev_iterator, delta_type, fileids,
850
"""Create deltas for each batch of revisions in log_rev_iterator.
852
If we're only generating deltas for the sake of filtering against
853
file-ids, we stop generating deltas once all file-ids reach the
854
appropriate life-cycle point. If we're receiving data newest to
855
oldest, then that life-cycle point is 'add', otherwise it's 'remove'.
857
check_fileids = fileids is not None and len(fileids) > 0
859
fileid_set = set(fileids)
860
if direction == 'reverse':
866
for revs in log_rev_iterator:
867
# If we were matching against fileids and we've run out,
868
# there's nothing left to do
869
if check_fileids and not fileid_set:
871
revisions = [rev[1] for rev in revs]
873
if delta_type == 'full' and not check_fileids:
874
deltas = repository.get_deltas_for_revisions(revisions)
875
for rev, delta in izip(revs, deltas):
876
new_revs.append((rev[0], rev[1], delta))
878
deltas = repository.get_deltas_for_revisions(revisions, fileid_set)
879
for rev, delta in izip(revs, deltas):
881
if delta is None or not delta.has_changed():
884
_update_fileids(delta, fileid_set, stop_on)
885
if delta_type is None:
887
elif delta_type == 'full':
888
# If the file matches all the time, rebuilding
889
# a full delta like this in addition to a partial
890
# one could be slow. However, it's likely that
891
# most revisions won't get this far, making it
892
# faster to filter on the partial deltas and
893
# build the occasional full delta than always
894
# building full deltas and filtering those.
896
delta = repository.get_revision_delta(rev_id)
897
new_revs.append((rev[0], rev[1], delta))
901
def _update_fileids(delta, fileids, stop_on):
902
"""Update the set of file-ids to search based on file lifecycle events.
126
mutter('get log for file_id %r' % specific_fileid)
128
if search is not None:
130
searchRE = re.compile(search, re.IGNORECASE)
134
which_revs = branch.enum_history(direction)
135
which_revs = [x for x in which_revs if (
136
(start_revision is None or x[0] >= start_revision)
137
and (end_revision is None or x[0] <= end_revision))]
139
if not (verbose or specific_fileid):
140
# no need to know what changed between revisions
141
with_deltas = deltas_for_log_dummy(branch, which_revs)
142
elif direction == 'reverse':
143
with_deltas = deltas_for_log_reverse(branch, which_revs)
145
with_deltas = deltas_for_log_forward(branch, which_revs)
147
for revno, rev, delta in with_deltas:
149
if not delta.touches_file_id(specific_fileid):
153
# although we calculated it, throw it away without display
156
if searchRE is None or searchRE.search(rev.message):
157
lf.show(revno, rev, delta)
161
def deltas_for_log_dummy(branch, which_revs):
162
for revno, revision_id in which_revs:
163
yield revno, branch.get_revision(revision_id), None
166
def deltas_for_log_reverse(branch, which_revs):
167
"""Compute deltas for display in reverse log.
169
Given a sequence of (revno, revision_id) pairs, return
172
The delta is from the given revision to the next one in the
173
sequence, which makes sense if the log is being displayed from
176
from tree import EmptyTree
177
from diff import compare_trees
904
:param fileids: a set of fileids to update
905
:param stop_on: either 'add' or 'remove' - take file-ids out of the
906
fileids set once their add or remove entry is detected respectively
909
for item in delta.added:
910
if item[1] in fileids:
911
fileids.remove(item[1])
912
elif stop_on == 'delete':
913
for item in delta.removed:
914
if item[1] in fileids:
915
fileids.remove(item[1])
918
def _make_revision_objects(branch, generate_delta, search, log_rev_iterator):
919
"""Extract revision objects from the repository
921
:param branch: The branch being logged.
922
:param generate_delta: Whether to generate a delta for each revision.
923
:param search: A user text search string.
924
:param log_rev_iterator: An input iterator containing all revisions that
925
could be displayed, in lists.
926
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
929
repository = branch.repository
930
for revs in log_rev_iterator:
931
# r = revision_id, n = revno, d = merge depth
932
revision_ids = [view[0] for view, _, _ in revs]
933
revisions = repository.get_revisions(revision_ids)
934
revs = [(rev[0], revision, rev[2]) for rev, revision in
935
izip(revs, revisions)]
939
def _make_batch_filter(branch, generate_delta, search, log_rev_iterator):
940
"""Group up a single large batch into smaller ones.
942
:param branch: The branch being logged.
943
:param generate_delta: Whether to generate a delta for each revision.
944
:param search: A user text search string.
945
:param log_rev_iterator: An input iterator containing all revisions that
946
could be displayed, in lists.
947
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
950
repository = branch.repository
952
for batch in log_rev_iterator:
955
step = [detail for _, detail in zip(range(num), batch)]
959
num = min(int(num * 1.5), 200)
962
def _get_revision_limits(branch, start_revision, end_revision):
963
"""Get and check revision limits.
965
:param branch: The branch containing the revisions.
967
:param start_revision: The first revision to be logged.
968
For backwards compatibility this may be a mainline integer revno,
969
but for merge revision support a RevisionInfo is expected.
971
:param end_revision: The last revision to be logged.
972
For backwards compatibility this may be a mainline integer revno,
973
but for merge revision support a RevisionInfo is expected.
975
:return: (start_rev_id, end_rev_id) tuple.
977
branch_revno, branch_rev_id = branch.last_revision_info()
979
if start_revision is None:
982
if isinstance(start_revision, revisionspec.RevisionInfo):
983
start_rev_id = start_revision.rev_id
984
start_revno = start_revision.revno or 1
986
branch.check_real_revno(start_revision)
987
start_revno = start_revision
988
start_rev_id = branch.get_rev_id(start_revno)
991
if end_revision is None:
992
end_revno = branch_revno
994
if isinstance(end_revision, revisionspec.RevisionInfo):
995
end_rev_id = end_revision.rev_id
996
end_revno = end_revision.revno or branch_revno
998
branch.check_real_revno(end_revision)
999
end_revno = end_revision
1000
end_rev_id = branch.get_rev_id(end_revno)
1002
if branch_revno != 0:
1003
if (start_rev_id == _mod_revision.NULL_REVISION
1004
or end_rev_id == _mod_revision.NULL_REVISION):
1005
raise errors.BzrCommandError('Logging revision 0 is invalid.')
1006
if start_revno > end_revno:
1007
raise errors.BzrCommandError("Start revision must be older than "
1008
"the end revision.")
1009
return (start_rev_id, end_rev_id)
1012
def _get_mainline_revs(branch, start_revision, end_revision):
1013
"""Get the mainline revisions from the branch.
1015
Generates the list of mainline revisions for the branch.
1017
:param branch: The branch containing the revisions.
1019
:param start_revision: The first revision to be logged.
1020
For backwards compatibility this may be a mainline integer revno,
1021
but for merge revision support a RevisionInfo is expected.
1023
:param end_revision: The last revision to be logged.
1024
For backwards compatibility this may be a mainline integer revno,
1025
but for merge revision support a RevisionInfo is expected.
1027
:return: A (mainline_revs, rev_nos, start_rev_id, end_rev_id) tuple.
1029
branch_revno, branch_last_revision = branch.last_revision_info()
1030
if branch_revno == 0:
1031
return None, None, None, None
1033
# For mainline generation, map start_revision and end_revision to
1034
# mainline revnos. If the revision is not on the mainline choose the
1035
# appropriate extreme of the mainline instead - the extra will be
1037
# Also map the revisions to rev_ids, to be used in the later filtering
1040
if start_revision is None:
1043
if isinstance(start_revision, revisionspec.RevisionInfo):
1044
start_rev_id = start_revision.rev_id
1045
start_revno = start_revision.revno or 1
1047
branch.check_real_revno(start_revision)
1048
start_revno = start_revision
1051
if end_revision is None:
1052
end_revno = branch_revno
1054
if isinstance(end_revision, revisionspec.RevisionInfo):
1055
end_rev_id = end_revision.rev_id
1056
end_revno = end_revision.revno or branch_revno
1058
branch.check_real_revno(end_revision)
1059
end_revno = end_revision
1061
if ((start_rev_id == _mod_revision.NULL_REVISION)
1062
or (end_rev_id == _mod_revision.NULL_REVISION)):
1063
raise errors.BzrCommandError('Logging revision 0 is invalid.')
1064
if start_revno > end_revno:
1065
raise errors.BzrCommandError("Start revision must be older than "
1066
"the end revision.")
1068
if end_revno < start_revno:
1069
return None, None, None, None
1070
cur_revno = branch_revno
1073
for revision_id in branch.repository.iter_reverse_revision_history(
1074
branch_last_revision):
1075
if cur_revno < start_revno:
1076
# We have gone far enough, but we always add 1 more revision
1077
rev_nos[revision_id] = cur_revno
1078
mainline_revs.append(revision_id)
1080
if cur_revno <= end_revno:
1081
rev_nos[revision_id] = cur_revno
1082
mainline_revs.append(revision_id)
1085
# We walked off the edge of all revisions, so we add a 'None' marker
1086
mainline_revs.append(None)
1088
mainline_revs.reverse()
1090
# override the mainline to look like the revision history.
1091
return mainline_revs, rev_nos, start_rev_id, end_rev_id
1094
@deprecated_function(deprecated_in((2, 2, 0)))
1095
def _filter_revision_range(view_revisions, start_rev_id, end_rev_id):
1096
"""Filter view_revisions based on revision ranges.
1098
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
1099
tuples to be filtered.
1101
:param start_rev_id: If not NONE specifies the first revision to be logged.
1102
If NONE then all revisions up to the end_rev_id are logged.
1104
:param end_rev_id: If not NONE specifies the last revision to be logged.
1105
If NONE then all revisions up to the end of the log are logged.
1107
:return: The filtered view_revisions.
1109
if start_rev_id or end_rev_id:
1110
revision_ids = [r for r, n, d in view_revisions]
1112
start_index = revision_ids.index(start_rev_id)
1115
if start_rev_id == end_rev_id:
1116
end_index = start_index
1119
end_index = revision_ids.index(end_rev_id)
179
last_revno = last_revision_id = last_tree = None
180
for revno, revision_id in which_revs:
181
this_tree = branch.revision_tree(revision_id)
182
this_revision = branch.get_revision(revision_id)
185
yield last_revno, last_revision, compare_trees(this_tree, last_tree, False)
187
this_tree = EmptyTree(branch.get_root_id())
190
last_revision = this_revision
191
last_tree = this_tree
195
this_tree = EmptyTree(branch.get_root_id())
197
this_revno = last_revno - 1
198
this_revision_id = branch.revision_history()[this_revno]
199
this_tree = branch.revision_tree(this_revision_id)
200
yield last_revno, last_revision, compare_trees(this_tree, last_tree, False)
203
def deltas_for_log_forward(branch, which_revs):
204
"""Compute deltas for display in forward log.
206
Given a sequence of (revno, revision_id) pairs, return
209
The delta is from the given revision to the next one in the
210
sequence, which makes sense if the log is being displayed from
213
from tree import EmptyTree
214
from diff import compare_trees
216
last_revno = last_revision_id = last_tree = None
217
prev_tree = EmptyTree(branch.get_root_id())
219
for revno, revision_id in which_revs:
220
this_tree = branch.revision_tree(revision_id)
221
this_revision = branch.get_revision(revision_id)
225
last_tree = EmptyTree(branch.get_root_id())
1121
end_index = len(view_revisions) - 1
1122
# To include the revisions merged into the last revision,
1123
# extend end_rev_id down to, but not including, the next rev
1124
# with the same or lesser merge_depth
1125
end_merge_depth = view_revisions[end_index][2]
1127
for index in xrange(end_index+1, len(view_revisions)+1):
1128
if view_revisions[index][2] <= end_merge_depth:
1129
end_index = index - 1
1132
# if the search falls off the end then log to the end as well
1133
end_index = len(view_revisions) - 1
1134
view_revisions = view_revisions[start_index:end_index+1]
1135
return view_revisions
1138
def _filter_revisions_touching_file_id(branch, file_id, view_revisions,
1139
include_merges=True):
1140
r"""Return the list of revision ids which touch a given file id.
1142
The function filters view_revisions and returns a subset.
1143
This includes the revisions which directly change the file id,
1144
and the revisions which merge these changes. So if the
1156
And 'C' changes a file, then both C and D will be returned. F will not be
1157
returned even though it brings the changes to C into the branch starting
1158
with E. (Note that if we were using F as the tip instead of G, then we
1161
This will also be restricted based on a subset of the mainline.
1163
:param branch: The branch where we can get text revision information.
1165
:param file_id: Filter out revisions that do not touch file_id.
1167
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
1168
tuples. This is the list of revisions which will be filtered. It is
1169
assumed that view_revisions is in merge_sort order (i.e. newest
1172
:param include_merges: include merge revisions in the result or not
1174
:return: A list of (revision_id, dotted_revno, merge_depth) tuples.
1176
# Lookup all possible text keys to determine which ones actually modified
1178
text_keys = [(file_id, rev_id) for rev_id, revno, depth in view_revisions]
1180
# Looking up keys in batches of 1000 can cut the time in half, as well as
1181
# memory consumption. GraphIndex *does* like to look for a few keys in
1182
# parallel, it just doesn't like looking for *lots* of keys in parallel.
1183
# TODO: This code needs to be re-evaluated periodically as we tune the
1184
# indexing layer. We might consider passing in hints as to the known
1185
# access pattern (sparse/clustered, high success rate/low success
1186
# rate). This particular access is clustered with a low success rate.
1187
get_parent_map = branch.repository.texts.get_parent_map
1188
modified_text_revisions = set()
1190
for start in xrange(0, len(text_keys), chunk_size):
1191
next_keys = text_keys[start:start + chunk_size]
1192
# Only keep the revision_id portion of the key
1193
modified_text_revisions.update(
1194
[k[1] for k in get_parent_map(next_keys)])
1195
del text_keys, next_keys
1198
# Track what revisions will merge the current revision, replace entries
1199
# with 'None' when they have been added to result
1200
current_merge_stack = [None]
1201
for info in view_revisions:
1202
rev_id, revno, depth = info
1203
if depth == len(current_merge_stack):
1204
current_merge_stack.append(info)
1206
del current_merge_stack[depth + 1:]
1207
current_merge_stack[-1] = info
1209
if rev_id in modified_text_revisions:
1210
# This needs to be logged, along with the extra revisions
1211
for idx in xrange(len(current_merge_stack)):
1212
node = current_merge_stack[idx]
1213
if node is not None:
1214
if include_merges or node[2] == 0:
1216
current_merge_stack[idx] = None
1220
@deprecated_function(deprecated_in((2, 2, 0)))
1221
def get_view_revisions(mainline_revs, rev_nos, branch, direction,
1222
include_merges=True):
1223
"""Produce an iterator of revisions to show
1224
:return: an iterator of (revision_id, revno, merge_depth)
1225
(if there is no revno for a revision, None is supplied)
1227
if not include_merges:
1228
revision_ids = mainline_revs[1:]
1229
if direction == 'reverse':
1230
revision_ids.reverse()
1231
for revision_id in revision_ids:
1232
yield revision_id, str(rev_nos[revision_id]), 0
1234
graph = branch.repository.get_graph()
1235
# This asks for all mainline revisions, which means we only have to spider
1236
# sideways, rather than depth history. That said, its still size-of-history
1237
# and should be addressed.
1238
# mainline_revisions always includes an extra revision at the beginning, so
1240
parent_map = dict(((key, value) for key, value in
1241
graph.iter_ancestry(mainline_revs[1:]) if value is not None))
1242
# filter out ghosts; merge_sort errors on ghosts.
1243
rev_graph = _mod_repository._strip_NULL_ghosts(parent_map)
1244
merge_sorted_revisions = tsort.merge_sort(
1248
generate_revno=True)
1250
if direction == 'forward':
1251
# forward means oldest first.
1252
merge_sorted_revisions = reverse_by_depth(merge_sorted_revisions)
1253
elif direction != 'reverse':
1254
raise ValueError('invalid direction %r' % direction)
1256
for (sequence, rev_id, merge_depth, revno, end_of_merge
1257
) in merge_sorted_revisions:
1258
yield rev_id, '.'.join(map(str, revno)), merge_depth
1261
def reverse_by_depth(merge_sorted_revisions, _depth=0):
1262
"""Reverse revisions by depth.
1264
Revisions with a different depth are sorted as a group with the previous
1265
revision of that depth. There may be no topological justification for this,
1266
but it looks much nicer.
1268
# Add a fake revision at start so that we can always attach sub revisions
1269
merge_sorted_revisions = [(None, None, _depth)] + merge_sorted_revisions
1271
for val in merge_sorted_revisions:
1272
if val[2] == _depth:
1273
# Each revision at the current depth becomes a chunk grouping all
1274
# higher depth revisions.
1275
zd_revisions.append([val])
1277
zd_revisions[-1].append(val)
1278
for revisions in zd_revisions:
1279
if len(revisions) > 1:
1280
# We have higher depth revisions, let reverse them locally
1281
revisions[1:] = reverse_by_depth(revisions[1:], _depth + 1)
1282
zd_revisions.reverse()
1284
for chunk in zd_revisions:
1285
result.extend(chunk)
1287
# Top level call, get rid of the fake revisions that have been added
1288
result = [r for r in result if r[0] is not None and r[1] is not None]
1292
class LogRevision(object):
1293
"""A revision to be logged (by LogFormatter.log_revision).
1295
A simple wrapper for the attributes of a revision to be logged.
1296
The attributes may or may not be populated, as determined by the
1297
logging options and the log formatter capabilities.
1300
def __init__(self, rev=None, revno=None, merge_depth=0, delta=None,
1301
tags=None, diff=None):
1303
self.revno = str(revno)
1304
self.merge_depth = merge_depth
227
last_revno = revno - 1
228
last_revision_id = branch.revision_history()[last_revno]
229
last_tree = branch.revision_tree(last_revision_id)
231
yield revno, this_revision, compare_trees(last_tree, this_tree, False)
234
last_revision = this_revision
235
last_tree = this_tree
1310
238
class LogFormatter(object):
1311
"""Abstract class to display log messages.
1313
At a minimum, a derived class must implement the log_revision method.
1315
If the LogFormatter needs to be informed of the beginning or end of
1316
a log it should implement the begin_log and/or end_log hook methods.
1318
A LogFormatter should define the following supports_XXX flags
1319
to indicate which LogRevision attributes it supports:
1321
- supports_delta must be True if this log formatter supports delta.
1322
Otherwise the delta attribute may not be populated. The 'delta_format'
1323
attribute describes whether the 'short_status' format (1) or the long
1324
one (2) should be used.
1326
- supports_merge_revisions must be True if this log formatter supports
1327
merge revisions. If not, then only mainline revisions will be passed
1330
- preferred_levels is the number of levels this formatter defaults to.
1331
The default value is zero meaning display all levels.
1332
This value is only relevant if supports_merge_revisions is True.
1334
- supports_tags must be True if this log formatter supports tags.
1335
Otherwise the tags attribute may not be populated.
1337
- supports_diff must be True if this log formatter supports diffs.
1338
Otherwise the diff attribute may not be populated.
1340
Plugins can register functions to show custom revision properties using
1341
the properties_handler_registry. The registered function
1342
must respect the following interface description:
1343
def my_show_properties(properties_dict):
1344
# code that returns a dict {'name':'value'} of the properties
1347
preferred_levels = 0
1349
def __init__(self, to_file, show_ids=False, show_timezone='original',
1350
delta_format=None, levels=None, show_advice=False,
1351
to_exact_file=None, author_list_handler=None):
1352
"""Create a LogFormatter.
1354
:param to_file: the file to output to
1355
:param to_exact_file: if set, gives an output stream to which
1356
non-Unicode diffs are written.
1357
:param show_ids: if True, revision-ids are to be displayed
1358
:param show_timezone: the timezone to use
1359
:param delta_format: the level of delta information to display
1360
or None to leave it to the formatter to decide
1361
:param levels: the number of levels to display; None or -1 to
1362
let the log formatter decide.
1363
:param show_advice: whether to show advice at the end of the
1365
:param author_list_handler: callable generating a list of
1366
authors to display for a given revision
239
"""Abstract class to display log messages."""
240
def __init__(self, to_file, show_ids=False, show_timezone=False):
1368
241
self.to_file = to_file
1369
# 'exact' stream used to show diff, it should print content 'as is'
1370
# and should not try to decode/encode it to unicode to avoid bug #328007
1371
if to_exact_file is not None:
1372
self.to_exact_file = to_exact_file
1374
# XXX: somewhat hacky; this assumes it's a codec writer; it's better
1375
# for code that expects to get diffs to pass in the exact file
1377
self.to_exact_file = getattr(to_file, 'stream', to_file)
1378
242
self.show_ids = show_ids
1379
243
self.show_timezone = show_timezone
1380
if delta_format is None:
1381
# Ensures backward compatibility
1382
delta_format = 2 # long format
1383
self.delta_format = delta_format
1384
self.levels = levels
1385
self._show_advice = show_advice
1386
self._merge_count = 0
1387
self._author_list_handler = author_list_handler
1389
def get_levels(self):
1390
"""Get the number of levels to display or 0 for all."""
1391
if getattr(self, 'supports_merge_revisions', False):
1392
if self.levels is None or self.levels == -1:
1393
self.levels = self.preferred_levels
1398
def log_revision(self, revision):
1401
:param revision: The LogRevision to be logged.
1403
raise NotImplementedError('not implemented in abstract base')
1405
def show_advice(self):
1406
"""Output user advice, if any, when the log is completed."""
1407
if self._show_advice and self.levels == 1 and self._merge_count > 0:
1408
advice_sep = self.get_advice_separator()
1410
self.to_file.write(advice_sep)
1412
"Use --include-merges or -n0 to see merged revisions.\n")
1414
def get_advice_separator(self):
1415
"""Get the text separating the log from the closing advice."""
1418
def short_committer(self, rev):
1419
name, address = config.parse_username(rev.committer)
1424
def short_author(self, rev):
1425
return self.authors(rev, 'first', short=True, sep=', ')
1427
def authors(self, rev, who, short=False, sep=None):
1428
"""Generate list of authors, taking --authors option into account.
1430
The caller has to specify the name of a author list handler,
1431
as provided by the author list registry, using the ``who``
1432
argument. That name only sets a default, though: when the
1433
user selected a different author list generation using the
1434
``--authors`` command line switch, as represented by the
1435
``author_list_handler`` constructor argument, that value takes
1438
:param rev: The revision for which to generate the list of authors.
1439
:param who: Name of the default handler.
1440
:param short: Whether to shorten names to either name or address.
1441
:param sep: What separator to use for automatic concatenation.
1443
if self._author_list_handler is not None:
1444
# The user did specify --authors, which overrides the default
1445
author_list_handler = self._author_list_handler
1447
# The user didn't specify --authors, so we use the caller's default
1448
author_list_handler = author_list_registry.get(who)
1449
names = author_list_handler(rev)
1451
for i in range(len(names)):
1452
name, address = config.parse_username(names[i])
1458
names = sep.join(names)
1461
def merge_marker(self, revision):
1462
"""Get the merge marker to include in the output or '' if none."""
1463
if len(revision.rev.parent_ids) > 1:
1464
self._merge_count += 1
1469
def show_properties(self, revision, indent):
1470
"""Displays the custom properties returned by each registered handler.
1472
If a registered handler raises an error it is propagated.
1474
for line in self.custom_properties(revision):
1475
self.to_file.write("%s%s\n" % (indent, line))
1477
def custom_properties(self, revision):
1478
"""Format the custom properties returned by each registered handler.
1480
If a registered handler raises an error it is propagated.
1482
:return: a list of formatted lines (excluding trailing newlines)
1484
lines = self._foreign_info_properties(revision)
1485
for key, handler in properties_handler_registry.iteritems():
1486
lines.extend(self._format_properties(handler(revision)))
1489
def _foreign_info_properties(self, rev):
1490
"""Custom log displayer for foreign revision identifiers.
1492
:param rev: Revision object.
1494
# Revision comes directly from a foreign repository
1495
if isinstance(rev, foreign.ForeignRevision):
1496
return self._format_properties(
1497
rev.mapping.vcs.show_foreign_revid(rev.foreign_revid))
1499
# Imported foreign revision revision ids always contain :
1500
if not ":" in rev.revision_id:
1503
# Revision was once imported from a foreign repository
1505
foreign_revid, mapping = \
1506
foreign.foreign_vcs_registry.parse_revision_id(rev.revision_id)
1507
except errors.InvalidRevisionId:
1510
return self._format_properties(
1511
mapping.vcs.show_foreign_revid(foreign_revid))
1513
def _format_properties(self, properties):
1515
for key, value in properties.items():
1516
lines.append(key + ': ' + value)
1519
def show_diff(self, to_file, diff, indent):
1520
for l in diff.rstrip().split('\n'):
1521
to_file.write(indent + '%s\n' % (l,))
1524
# Separator between revisions in long format
1525
_LONG_SEP = '-' * 60
1528
250
class LongLogFormatter(LogFormatter):
1530
supports_merge_revisions = True
1531
preferred_levels = 1
1532
supports_delta = True
1533
supports_tags = True
1534
supports_diff = True
1536
def __init__(self, *args, **kwargs):
1537
super(LongLogFormatter, self).__init__(*args, **kwargs)
1538
if self.show_timezone == 'original':
1539
self.date_string = self._date_string_original_timezone
1541
self.date_string = self._date_string_with_timezone
1543
def _date_string_with_timezone(self, rev):
1544
return format_date(rev.timestamp, rev.timezone or 0,
1547
def _date_string_original_timezone(self, rev):
1548
return format_date_with_offset_in_original_timezone(rev.timestamp,
1551
def log_revision(self, revision):
1552
"""Log a revision, either merged or not."""
1553
indent = ' ' * revision.merge_depth
1555
if revision.revno is not None:
1556
lines.append('revno: %s%s' % (revision.revno,
1557
self.merge_marker(revision)))
1559
lines.append('tags: %s' % (', '.join(revision.tags)))
251
def show(self, revno, rev, delta):
252
from osutils import format_date
254
to_file = self.to_file
256
print >>to_file, '-' * 60
257
print >>to_file, 'revno:', revno
1560
258
if self.show_ids:
1561
lines.append('revision-id: %s' % (revision.rev.revision_id,))
1562
for parent_id in revision.rev.parent_ids:
1563
lines.append('parent: %s' % (parent_id,))
1564
lines.extend(self.custom_properties(revision.rev))
1566
committer = revision.rev.committer
1567
authors = self.authors(revision.rev, 'all')
1568
if authors != [committer]:
1569
lines.append('author: %s' % (", ".join(authors),))
1570
lines.append('committer: %s' % (committer,))
1572
branch_nick = revision.rev.properties.get('branch-nick', None)
1573
if branch_nick is not None:
1574
lines.append('branch nick: %s' % (branch_nick,))
1576
lines.append('timestamp: %s' % (self.date_string(revision.rev),))
1578
lines.append('message:')
1579
if not revision.rev.message:
1580
lines.append(' (no message)')
259
print >>to_file, 'revision-id:', rev.revision_id
260
print >>to_file, 'committer:', rev.committer
261
print >>to_file, 'timestamp: %s' % (format_date(rev.timestamp, rev.timezone or 0,
264
print >>to_file, 'message:'
266
print >>to_file, ' (no message)'
1582
message = revision.rev.message.rstrip('\r\n')
1583
for l in message.split('\n'):
1584
lines.append(' %s' % (l,))
1586
# Dump the output, appending the delta and diff if requested
1587
to_file = self.to_file
1588
to_file.write("%s%s\n" % (indent, ('\n' + indent).join(lines)))
1589
if revision.delta is not None:
1590
# Use the standard status output to display changes
1591
from bzrlib.delta import report_delta
1592
report_delta(to_file, revision.delta, short_status=False,
1593
show_ids=self.show_ids, indent=indent)
1594
if revision.diff is not None:
1595
to_file.write(indent + 'diff:\n')
1597
# Note: we explicitly don't indent the diff (relative to the
1598
# revision information) so that the output can be fed to patch -p0
1599
self.show_diff(self.to_exact_file, revision.diff, indent)
1600
self.to_exact_file.flush()
1602
def get_advice_separator(self):
1603
"""Get the text separating the log from the closing advice."""
1604
return '-' * 60 + '\n'
268
for l in rev.message.split('\n'):
269
print >>to_file, ' ' + l
272
delta.show(to_file, self.show_ids)
1607
276
class ShortLogFormatter(LogFormatter):
1609
supports_merge_revisions = True
1610
preferred_levels = 1
1611
supports_delta = True
1612
supports_tags = True
1613
supports_diff = True
1615
def __init__(self, *args, **kwargs):
1616
super(ShortLogFormatter, self).__init__(*args, **kwargs)
1617
self.revno_width_by_depth = {}
1619
def log_revision(self, revision):
1620
# We need two indents: one per depth and one for the information
1621
# relative to that indent. Most mainline revnos are 5 chars or
1622
# less while dotted revnos are typically 11 chars or less. Once
1623
# calculated, we need to remember the offset for a given depth
1624
# as we might be starting from a dotted revno in the first column
1625
# and we want subsequent mainline revisions to line up.
1626
depth = revision.merge_depth
1627
indent = ' ' * depth
1628
revno_width = self.revno_width_by_depth.get(depth)
1629
if revno_width is None:
1630
if revision.revno.find('.') == -1:
1631
# mainline revno, e.g. 12345
1634
# dotted revno, e.g. 12345.10.55
1636
self.revno_width_by_depth[depth] = revno_width
1637
offset = ' ' * (revno_width + 1)
277
def show(self, revno, rev, delta):
278
from bzrlib.osutils import format_date
1639
280
to_file = self.to_file
1642
tags = ' {%s}' % (', '.join(revision.tags))
1643
to_file.write(indent + "%*s %s\t%s%s%s\n" % (revno_width,
1644
revision.revno, self.short_author(revision.rev),
1645
format_date(revision.rev.timestamp,
1646
revision.rev.timezone or 0,
1647
self.show_timezone, date_fmt="%Y-%m-%d",
1649
tags, self.merge_marker(revision)))
1650
self.show_properties(revision.rev, indent+offset)
282
print >>to_file, "%5d %s\t%s" % (revno, rev.committer,
283
format_date(rev.timestamp, rev.timezone or 0,
1651
285
if self.show_ids:
1652
to_file.write(indent + offset + 'revision-id:%s\n'
1653
% (revision.rev.revision_id,))
1654
if not revision.rev.message:
1655
to_file.write(indent + offset + '(no message)\n')
1657
message = revision.rev.message.rstrip('\r\n')
1658
for l in message.split('\n'):
1659
to_file.write(indent + offset + '%s\n' % (l,))
1661
if revision.delta is not None:
1662
# Use the standard status output to display changes
1663
from bzrlib.delta import report_delta
1664
report_delta(to_file, revision.delta,
1665
short_status=self.delta_format==1,
1666
show_ids=self.show_ids, indent=indent + offset)
1667
if revision.diff is not None:
1668
self.show_diff(self.to_exact_file, revision.diff, ' ')
1672
class LineLogFormatter(LogFormatter):
1674
supports_merge_revisions = True
1675
preferred_levels = 1
1676
supports_tags = True
1678
def __init__(self, *args, **kwargs):
1679
super(LineLogFormatter, self).__init__(*args, **kwargs)
1680
width = terminal_width()
1681
if width is not None:
1682
# we need one extra space for terminals that wrap on last char
1684
self._max_chars = width
1686
def truncate(self, str, max_len):
1687
if max_len is None or len(str) <= max_len:
1689
return str[:max_len-3] + '...'
1691
def date_string(self, rev):
1692
return format_date(rev.timestamp, rev.timezone or 0,
1693
self.show_timezone, date_fmt="%Y-%m-%d",
1696
def message(self, rev):
286
print >>to_file, ' revision-id:', rev.revision_id
1697
287
if not rev.message:
1698
return '(no message)'
1702
def log_revision(self, revision):
1703
indent = ' ' * revision.merge_depth
1704
self.to_file.write(self.log_string(revision.revno, revision.rev,
1705
self._max_chars, revision.tags, indent))
1706
self.to_file.write('\n')
1708
def log_string(self, revno, rev, max_chars, tags=None, prefix=''):
1709
"""Format log info into one string. Truncate tail of string
1710
:param revno: revision number or None.
1711
Revision numbers counts from 1.
1712
:param rev: revision object
1713
:param max_chars: maximum length of resulting string
1714
:param tags: list of tags or None
1715
:param prefix: string to prefix each line
1716
:return: formatted truncated string
1720
# show revno only when is not None
1721
out.append("%s:" % revno)
1722
out.append(self.truncate(self.short_author(rev), 20))
1723
out.append(self.date_string(rev))
1724
if len(rev.parent_ids) > 1:
1725
out.append('[merge]')
1727
tag_str = '{%s}' % (', '.join(tags))
1729
out.append(rev.get_summary())
1730
return self.truncate(prefix + " ".join(out).rstrip('\n'), max_chars)
1733
class GnuChangelogLogFormatter(LogFormatter):
1735
supports_merge_revisions = True
1736
supports_delta = True
1738
def log_revision(self, revision):
1739
"""Log a revision, either merged or not."""
1740
to_file = self.to_file
1742
date_str = format_date(revision.rev.timestamp,
1743
revision.rev.timezone or 0,
1745
date_fmt='%Y-%m-%d',
1747
committer_str = self.authors(revision.rev, 'first', sep=', ')
1748
committer_str = committer_str.replace(' <', ' <')
1749
to_file.write('%s %s\n\n' % (date_str,committer_str))
1751
if revision.delta is not None and revision.delta.has_changed():
1752
for c in revision.delta.added + revision.delta.removed + revision.delta.modified:
1754
to_file.write('\t* %s:\n' % (path,))
1755
for c in revision.delta.renamed:
1756
oldpath,newpath = c[:2]
1757
# For renamed files, show both the old and the new path
1758
to_file.write('\t* %s:\n\t* %s:\n' % (oldpath,newpath))
1761
if not revision.rev.message:
1762
to_file.write('\tNo commit message\n')
1764
message = revision.rev.message.rstrip('\r\n')
1765
for l in message.split('\n'):
1766
to_file.write('\t%s\n' % (l.lstrip(),))
1770
def line_log(rev, max_chars):
1771
lf = LineLogFormatter(None)
1772
return lf.log_string(None, rev, max_chars)
1775
class LogFormatterRegistry(registry.Registry):
1776
"""Registry for log formatters"""
1778
def make_formatter(self, name, *args, **kwargs):
1779
"""Construct a formatter from arguments.
1781
:param name: Name of the formatter to construct. 'short', 'long' and
1782
'line' are built-in.
1784
return self.get(name)(*args, **kwargs)
1786
def get_default(self, branch):
1787
return self.get(branch.get_config().log_format())
1790
log_formatter_registry = LogFormatterRegistry()
1793
log_formatter_registry.register('short', ShortLogFormatter,
1794
'Moderately short log format')
1795
log_formatter_registry.register('long', LongLogFormatter,
1796
'Detailed log format')
1797
log_formatter_registry.register('line', LineLogFormatter,
1798
'Log format with one line per revision')
1799
log_formatter_registry.register('gnu-changelog', GnuChangelogLogFormatter,
1800
'Format used by GNU ChangeLog files')
1803
def register_formatter(name, formatter):
1804
log_formatter_registry.register(name, formatter)
288
print >>to_file, ' (no message)'
290
for l in rev.message.split('\n'):
291
print >>to_file, ' ' + l
294
delta.show(to_file, self.show_ids)
299
FORMATTERS = {'long': LongLogFormatter,
300
'short': ShortLogFormatter,
1807
304
def log_formatter(name, *args, **kwargs):
1808
"""Construct a formatter from arguments.
1810
name -- Name of the formatter to construct; currently 'long', 'short' and
1811
'line' are supported.
1814
return log_formatter_registry.make_formatter(name, *args, **kwargs)
1816
raise errors.BzrCommandError("unknown log formatter: %r" % name)
1819
def author_list_all(rev):
1820
return rev.get_apparent_authors()[:]
1823
def author_list_first(rev):
1824
lst = rev.get_apparent_authors()
305
from bzrlib.errors import BzrCommandError
308
return FORMATTERS[name](*args, **kwargs)
1827
309
except IndexError:
1831
def author_list_committer(rev):
1832
return [rev.committer]
1835
author_list_registry = registry.Registry()
1837
author_list_registry.register('all', author_list_all,
1840
author_list_registry.register('first', author_list_first,
1843
author_list_registry.register('committer', author_list_committer,
1847
def show_one_log(revno, rev, delta, verbose, to_file, show_timezone):
1848
# deprecated; for compatibility
1849
lf = LongLogFormatter(to_file=to_file, show_timezone=show_timezone)
1850
lf.show(revno, rev, delta)
1853
def show_changed_revisions(branch, old_rh, new_rh, to_file=None,
1855
"""Show the change in revision history comparing the old revision history to the new one.
1857
:param branch: The branch where the revisions exist
1858
:param old_rh: The old revision history
1859
:param new_rh: The new revision history
1860
:param to_file: A file to write the results to. If None, stdout will be used
1863
to_file = codecs.getwriter(get_terminal_encoding())(sys.stdout,
1865
lf = log_formatter(log_format,
1868
show_timezone='original')
1870
# This is the first index which is different between
1873
for i in xrange(max(len(new_rh),
1875
if (len(new_rh) <= i
1877
or new_rh[i] != old_rh[i]):
1881
if base_idx is None:
1882
to_file.write('Nothing seems to have changed\n')
1884
## TODO: It might be nice to do something like show_log
1885
## and show the merged entries. But since this is the
1886
## removed revisions, it shouldn't be as important
1887
if base_idx < len(old_rh):
1888
to_file.write('*'*60)
1889
to_file.write('\nRemoved Revisions:\n')
1890
for i in range(base_idx, len(old_rh)):
1891
rev = branch.repository.get_revision(old_rh[i])
1892
lr = LogRevision(rev, i+1, 0, None)
1894
to_file.write('*'*60)
1895
to_file.write('\n\n')
1896
if base_idx < len(new_rh):
1897
to_file.write('Added Revisions:\n')
1902
direction='forward',
1903
start_revision=base_idx+1,
1904
end_revision=len(new_rh),
1908
def get_history_change(old_revision_id, new_revision_id, repository):
1909
"""Calculate the uncommon lefthand history between two revisions.
1911
:param old_revision_id: The original revision id.
1912
:param new_revision_id: The new revision id.
1913
:param repository: The repository to use for the calculation.
1915
return old_history, new_history
1918
old_revisions = set()
1920
new_revisions = set()
1921
new_iter = repository.iter_reverse_revision_history(new_revision_id)
1922
old_iter = repository.iter_reverse_revision_history(old_revision_id)
1923
stop_revision = None
1926
while do_new or do_old:
1929
new_revision = new_iter.next()
1930
except StopIteration:
1933
new_history.append(new_revision)
1934
new_revisions.add(new_revision)
1935
if new_revision in old_revisions:
1936
stop_revision = new_revision
1940
old_revision = old_iter.next()
1941
except StopIteration:
1944
old_history.append(old_revision)
1945
old_revisions.add(old_revision)
1946
if old_revision in new_revisions:
1947
stop_revision = old_revision
1949
new_history.reverse()
1950
old_history.reverse()
1951
if stop_revision is not None:
1952
new_history = new_history[new_history.index(stop_revision) + 1:]
1953
old_history = old_history[old_history.index(stop_revision) + 1:]
1954
return old_history, new_history
1957
def show_branch_change(branch, output, old_revno, old_revision_id):
1958
"""Show the changes made to a branch.
1960
:param branch: The branch to show changes about.
1961
:param output: A file-like object to write changes to.
1962
:param old_revno: The revno of the old tip.
1963
:param old_revision_id: The revision_id of the old tip.
1965
new_revno, new_revision_id = branch.last_revision_info()
1966
old_history, new_history = get_history_change(old_revision_id,
1969
if old_history == [] and new_history == []:
1970
output.write('Nothing seems to have changed\n')
1973
log_format = log_formatter_registry.get_default(branch)
1974
lf = log_format(show_ids=False, to_file=output, show_timezone='original')
1975
if old_history != []:
1976
output.write('*'*60)
1977
output.write('\nRemoved Revisions:\n')
1978
show_flat_log(branch.repository, old_history, old_revno, lf)
1979
output.write('*'*60)
1980
output.write('\n\n')
1981
if new_history != []:
1982
output.write('Added Revisions:\n')
1983
start_revno = new_revno - len(new_history) + 1
1984
show_log(branch, lf, None, verbose=False, direction='forward',
1985
start_revision=start_revno,)
1988
def show_flat_log(repository, history, last_revno, lf):
1989
"""Show a simple log of the specified history.
1991
:param repository: The repository to retrieve revisions from.
1992
:param history: A list of revision_ids indicating the lefthand history.
1993
:param last_revno: The revno of the last revision_id in the history.
1994
:param lf: The log formatter to use.
1996
start_revno = last_revno - len(history) + 1
1997
revisions = repository.get_revisions(history)
1998
for i, rev in enumerate(revisions):
1999
lr = LogRevision(rev, i + last_revno, 0, None)
2003
def _get_info_for_log_files(revisionspec_list, file_list, add_cleanup):
2004
"""Find file-ids and kinds given a list of files and a revision range.
2006
We search for files at the end of the range. If not found there,
2007
we try the start of the range.
2009
:param revisionspec_list: revision range as parsed on the command line
2010
:param file_list: the list of paths given on the command line;
2011
the first of these can be a branch location or a file path,
2012
the remainder must be file paths
2013
:param add_cleanup: When the branch returned is read locked,
2014
an unlock call will be queued to the cleanup.
2015
:return: (branch, info_list, start_rev_info, end_rev_info) where
2016
info_list is a list of (relative_path, file_id, kind) tuples where
2017
kind is one of values 'directory', 'file', 'symlink', 'tree-reference'.
2018
branch will be read-locked.
2020
from builtins import _get_revision_range
2021
tree, b, path = bzrdir.BzrDir.open_containing_tree_or_branch(file_list[0])
2022
add_cleanup(b.lock_read().unlock)
2023
# XXX: It's damn messy converting a list of paths to relative paths when
2024
# those paths might be deleted ones, they might be on a case-insensitive
2025
# filesystem and/or they might be in silly locations (like another branch).
2026
# For example, what should "log bzr://branch/dir/file1 file2" do? (Is
2027
# file2 implicitly in the same dir as file1 or should its directory be
2028
# taken from the current tree somehow?) For now, this solves the common
2029
# case of running log in a nested directory, assuming paths beyond the
2030
# first one haven't been deleted ...
2032
relpaths = [path] + tree.safe_relpath_files(file_list[1:])
2034
relpaths = [path] + file_list[1:]
2036
start_rev_info, end_rev_info = _get_revision_range(revisionspec_list, b,
2038
if relpaths in ([], [u'']):
2039
return b, [], start_rev_info, end_rev_info
2040
if start_rev_info is None and end_rev_info is None:
2042
tree = b.basis_tree()
2045
file_id = tree.path2id(fp)
2046
kind = _get_kind_for_file_id(tree, file_id)
2048
# go back to when time began
2051
rev1 = b.get_rev_id(1)
2052
except errors.NoSuchRevision:
2057
tree1 = b.repository.revision_tree(rev1)
2059
file_id = tree1.path2id(fp)
2060
kind = _get_kind_for_file_id(tree1, file_id)
2061
info_list.append((fp, file_id, kind))
2063
elif start_rev_info == end_rev_info:
2064
# One revision given - file must exist in it
2065
tree = b.repository.revision_tree(end_rev_info.rev_id)
2067
file_id = tree.path2id(fp)
2068
kind = _get_kind_for_file_id(tree, file_id)
2069
info_list.append((fp, file_id, kind))
2072
# Revision range given. Get the file-id from the end tree.
2073
# If that fails, try the start tree.
2074
rev_id = end_rev_info.rev_id
2076
tree = b.basis_tree()
2078
tree = b.repository.revision_tree(rev_id)
2081
file_id = tree.path2id(fp)
2082
kind = _get_kind_for_file_id(tree, file_id)
2085
rev_id = start_rev_info.rev_id
2087
rev1 = b.get_rev_id(1)
2088
tree1 = b.repository.revision_tree(rev1)
2090
tree1 = b.repository.revision_tree(rev_id)
2091
file_id = tree1.path2id(fp)
2092
kind = _get_kind_for_file_id(tree1, file_id)
2093
info_list.append((fp, file_id, kind))
2094
return b, info_list, start_rev_info, end_rev_info
2097
def _get_kind_for_file_id(tree, file_id):
2098
"""Return the kind of a file-id or None if it doesn't exist."""
2099
if file_id is not None:
2100
return tree.kind(file_id)
2105
properties_handler_registry = registry.Registry()
2107
# Use the properties handlers to print out bug information if available
2108
def _bugs_properties_handler(revision):
2109
if revision.properties.has_key('bugs'):
2110
bug_lines = revision.properties['bugs'].split('\n')
2111
bug_rows = [line.split(' ', 1) for line in bug_lines]
2112
fixed_bug_urls = [row[0] for row in bug_rows if
2113
len(row) > 1 and row[1] == 'fixed']
2116
return {'fixes bug(s)': ' '.join(fixed_bug_urls)}
2119
properties_handler_registry.register('bugs_properties_handler',
2120
_bugs_properties_handler)
2123
# adapters which revision ids to log are filtered. When log is called, the
2124
# log_rev_iterator is adapted through each of these factory methods.
2125
# Plugins are welcome to mutate this list in any way they like - as long
2126
# as the overall behaviour is preserved. At this point there is no extensible
2127
# mechanism for getting parameters to each factory method, and until there is
2128
# this won't be considered a stable api.
2132
# read revision objects
2133
_make_revision_objects,
2134
# filter on log messages
2135
_make_search_filter,
2136
# generate deltas for things we will show
310
raise BzrCommandError("unknown log formatter: %r" % name)