117
146
direction='reverse',
118
147
start_revision=None,
119
148
end_revision=None,
121
152
"""Write out human-readable log of commits to this branch.
124
LogFormatter object to show the output.
127
If true, list only the commits affecting the specified
128
file, rather than all commits.
131
If true show added/changed/deleted/renamed files.
134
'reverse' (default) is latest to earliest;
135
'forward' is earliest to latest.
138
If not None, only show revisions >= start_revision
141
If not None, only show revisions <= end_revision
154
:param lf: The LogFormatter object showing the output.
156
:param specific_fileid: If not None, list only the commits affecting the
157
specified file, rather than all commits.
159
:param verbose: If True show added/changed/deleted/renamed files.
161
:param direction: 'reverse' (default) is latest to earliest; 'forward' is
164
:param start_revision: If not None, only show revisions >= start_revision
166
:param end_revision: If not None, only show revisions <= end_revision
168
:param search: If not None, only show revisions with matching commit
171
:param limit: If set, shows only 'limit' revisions, all revisions are shown
174
:param show_diff: If True, output a diff after each revision.
143
from bzrlib.osutils import format_date
144
from bzrlib.errors import BzrCheckError
145
from bzrlib.textui import show_status
147
from warnings import warn
178
if getattr(lf, 'begin_log', None):
181
_show_log(branch, lf, specific_fileid, verbose, direction,
182
start_revision, end_revision, search, limit, show_diff)
184
if getattr(lf, 'end_log', None):
190
def _show_log(branch,
192
specific_fileid=None,
200
"""Worker function for show_log - see show_log."""
149
201
if not isinstance(lf, LogFormatter):
150
202
warn("not a LogFormatter instance: %r" % lf)
153
mutter('get log for file_id %r' % specific_fileid)
155
if search is not None:
157
searchRE = re.compile(search, re.IGNORECASE)
161
which_revs = _enumerate_history(branch)
163
if start_revision is None:
165
elif start_revision < 1 or start_revision >= len(which_revs):
166
raise InvalidRevisionNumber(start_revision)
168
if end_revision is None:
169
end_revision = len(which_revs)
170
elif end_revision < 1 or end_revision >= len(which_revs):
171
raise InvalidRevisionNumber(end_revision)
173
# list indexes are 0-based; revisions are 1-based
174
cut_revs = which_revs[(start_revision-1):(end_revision)]
204
trace.mutter('get log for file_id %r', specific_fileid)
206
# Consult the LogFormatter about what it needs and can handle
207
levels_to_display = lf.get_levels()
208
generate_merge_revisions = levels_to_display != 1
209
allow_single_merge_revision = True
210
if not getattr(lf, 'supports_merge_revisions', False):
211
allow_single_merge_revision = getattr(lf,
212
'supports_single_merge_revision', False)
213
generate_tags = getattr(lf, 'supports_tags', False)
214
if generate_tags and branch.supports_tags():
215
rev_tag_dict = branch.tags.get_reverse_tag_dict()
218
generate_delta = verbose and getattr(lf, 'supports_delta', False)
219
generate_diff = show_diff and getattr(lf, 'supports_diff', False)
221
# Find and print the interesting revisions
222
repo = branch.repository
224
revision_iterator = _create_log_revision_iterator(branch,
225
start_revision, end_revision, direction, specific_fileid, search,
226
generate_merge_revisions, allow_single_merge_revision,
227
generate_delta, limited_output=limit > 0)
228
for revs in revision_iterator:
229
for (rev_id, revno, merge_depth), rev, delta in revs:
230
# Note: 0 levels means show everything; merge_depth counts from 0
231
if levels_to_display != 0 and merge_depth >= levels_to_display:
234
diff = _format_diff(repo, rev, rev_id, specific_fileid)
237
lr = LogRevision(rev, revno, merge_depth, delta,
238
rev_tag_dict.get(rev_id), diff)
242
if log_count >= limit:
246
def _format_diff(repo, rev, rev_id, specific_fileid):
247
if len(rev.parent_ids) == 0:
248
ancestor_id = _mod_revision.NULL_REVISION
250
ancestor_id = rev.parent_ids[0]
251
tree_1 = repo.revision_tree(ancestor_id)
252
tree_2 = repo.revision_tree(rev_id)
254
specific_files = [tree_2.id2path(specific_fileid)]
256
specific_files = None
258
diff.show_diff_trees(tree_1, tree_2, s, specific_files, old_label='',
263
class _StartNotLinearAncestor(Exception):
264
"""Raised when a start revision is not found walking left-hand history."""
267
def _create_log_revision_iterator(branch, start_revision, end_revision,
268
direction, specific_fileid, search, generate_merge_revisions,
269
allow_single_merge_revision, generate_delta, limited_output=False):
270
"""Create a revision iterator for log.
272
:param branch: The branch being logged.
273
:param start_revision: If not None, only show revisions >= start_revision
274
:param end_revision: If not None, only show revisions <= end_revision
275
:param direction: 'reverse' (default) is latest to earliest; 'forward' is
277
:param specific_fileid: If not None, list only the commits affecting the
279
:param search: If not None, only show revisions with matching commit
281
:param generate_merge_revisions: If False, show only mainline revisions.
282
:param allow_single_merge_revision: If True, logging of a single
283
revision off the mainline is to be allowed
284
:param generate_delta: Whether to generate a delta for each revision.
285
:param limited_output: if True, the user only wants a limited result
287
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
290
start_rev_id, end_rev_id = _get_revision_limits(branch, start_revision,
293
# Decide how file-ids are matched: delta-filtering vs per-file graph.
294
# Delta filtering allows revisions to be displayed incrementally
295
# though the total time is much slower for huge repositories: log -v
296
# is the *lower* performance bound. At least until the split
297
# inventory format arrives, per-file-graph needs to remain the
298
# default except in verbose mode. Delta filtering should give more
299
# accurate results (e.g. inclusion of FILE deletions) so arguably
300
# it should always be used in the future.
301
use_deltas_for_matching = specific_fileid and generate_delta
302
delayed_graph_generation = not specific_fileid and (
303
start_rev_id or end_rev_id or limited_output)
304
generate_merges = generate_merge_revisions or (specific_fileid and
305
not use_deltas_for_matching)
306
view_revisions = _calc_view_revisions(branch, start_rev_id, end_rev_id,
307
direction, generate_merges, allow_single_merge_revision,
308
delayed_graph_generation=delayed_graph_generation)
309
search_deltas_for_fileids = None
310
if use_deltas_for_matching:
311
search_deltas_for_fileids = set([specific_fileid])
312
elif specific_fileid:
313
if not isinstance(view_revisions, list):
314
view_revisions = list(view_revisions)
315
view_revisions = _filter_revisions_touching_file_id(branch,
316
specific_fileid, view_revisions,
317
include_merges=generate_merge_revisions)
318
return make_log_rev_iterator(branch, view_revisions, generate_delta,
319
search, file_ids=search_deltas_for_fileids, direction=direction)
322
def _calc_view_revisions(branch, start_rev_id, end_rev_id, direction,
323
generate_merge_revisions, allow_single_merge_revision,
324
delayed_graph_generation=False):
325
"""Calculate the revisions to view.
327
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
328
a list of the same tuples.
330
br_revno, br_rev_id = branch.last_revision_info()
334
# If a single revision is requested, check we can handle it
335
generate_single_revision = (end_rev_id and start_rev_id == end_rev_id and
336
(not generate_merge_revisions or not _has_merges(branch, end_rev_id)))
337
if generate_single_revision:
338
if end_rev_id == br_rev_id:
340
return [(br_rev_id, br_revno, 0)]
342
revno = branch.revision_id_to_dotted_revno(end_rev_id)
343
if len(revno) > 1 and not allow_single_merge_revision:
344
# It's a merge revision and the log formatter is
345
# completely brain dead. This "feature" of allowing
346
# log formatters incapable of displaying dotted revnos
347
# ought to be deprecated IMNSHO. IGC 20091022
348
raise errors.BzrCommandError('Selected log formatter only'
349
' supports mainline revisions.')
350
revno_str = '.'.join(str(n) for n in revno)
351
return [(end_rev_id, revno_str, 0)]
353
# If we only want to see linear revisions, we can iterate ...
354
if not generate_merge_revisions:
355
result = _linear_view_revisions(branch, start_rev_id, end_rev_id)
356
# If a start limit was given and it's not obviously an
357
# ancestor of the end limit, check it before outputting anything
358
if direction == 'forward' or (start_rev_id
359
and not _is_obvious_ancestor(branch, start_rev_id, end_rev_id)):
361
result = list(result)
362
except _StartNotLinearAncestor:
363
raise errors.BzrCommandError('Start revision not found in'
364
' left-hand history of end revision.')
365
if direction == 'forward':
366
result = reversed(list(result))
369
# On large trees, generating the merge graph can take 30-60 seconds
370
# so we delay doing it until a merge is detected, incrementally
371
# returning initial (non-merge) revisions while we can.
372
initial_revisions = []
373
if delayed_graph_generation:
375
for rev_id, revno, depth in \
376
_linear_view_revisions(branch, start_rev_id, end_rev_id):
377
if _has_merges(branch, rev_id):
381
initial_revisions.append((rev_id, revno, depth))
383
# No merged revisions found
384
if direction == 'reverse':
385
return initial_revisions
386
elif direction == 'forward':
387
return reversed(initial_revisions)
389
raise ValueError('invalid direction %r' % direction)
390
except _StartNotLinearAncestor:
391
# A merge was never detected so the lower revision limit can't
392
# be nested down somewhere
393
raise errors.BzrCommandError('Start revision not found in'
394
' history of end revision.')
396
# A log including nested merges is required. If the direction is reverse,
397
# we rebase the initial merge depths so that the development line is
398
# shown naturally, i.e. just like it is for linear logging. We can easily
399
# make forward the exact opposite display, but showing the merge revisions
400
# indented at the end seems slightly nicer in that case.
401
view_revisions = chain(iter(initial_revisions),
402
_graph_view_revisions(branch, start_rev_id, end_rev_id,
403
rebase_initial_depths=direction == 'reverse'))
176
404
if direction == 'reverse':
405
return view_revisions
178
406
elif direction == 'forward':
181
raise ValueError('invalid direction %r' % direction)
183
for revno, rev_id in cut_revs:
184
if verbose or specific_fileid:
185
delta = branch.get_revision_delta(revno)
188
if not delta.touches_file_id(specific_fileid):
192
# although we calculated it, throw it away without display
195
rev = branch.get_revision(rev_id)
198
if not searchRE.search(rev.message):
201
lf.show(revno, rev, delta)
205
def deltas_for_log_dummy(branch, which_revs):
206
"""Return all the revisions without intermediate deltas.
208
Useful for log commands that won't need the delta information.
211
for revno, revision_id in which_revs:
212
yield revno, branch.get_revision(revision_id), None
215
def deltas_for_log_reverse(branch, which_revs):
216
"""Compute deltas for display in latest-to-earliest order.
222
Sequence of (revno, revision_id) for the subset of history to examine
225
Sequence of (revno, rev, delta)
227
The delta is from the given revision to the next one in the
228
sequence, which makes sense if the log is being displayed from
231
last_revno = last_revision_id = last_tree = None
232
for revno, revision_id in which_revs:
233
this_tree = branch.revision_tree(revision_id)
234
this_revision = branch.get_revision(revision_id)
237
yield last_revno, last_revision, compare_trees(this_tree, last_tree, False)
239
this_tree = EmptyTree(branch.get_root_id())
242
last_revision = this_revision
243
last_tree = this_tree
247
this_tree = EmptyTree(branch.get_root_id())
249
this_revno = last_revno - 1
250
this_revision_id = branch.revision_history()[this_revno]
251
this_tree = branch.revision_tree(this_revision_id)
252
yield last_revno, last_revision, compare_trees(this_tree, last_tree, False)
255
def deltas_for_log_forward(branch, which_revs):
256
"""Compute deltas for display in forward log.
258
Given a sequence of (revno, revision_id) pairs, return
261
The delta is from the given revision to the next one in the
262
sequence, which makes sense if the log is being displayed from
265
last_revno = last_revision_id = last_tree = None
266
prev_tree = EmptyTree(branch.get_root_id())
268
for revno, revision_id in which_revs:
269
this_tree = branch.revision_tree(revision_id)
270
this_revision = branch.get_revision(revision_id)
274
last_tree = EmptyTree(branch.get_root_id())
276
last_revno = revno - 1
277
last_revision_id = branch.revision_history()[last_revno]
278
last_tree = branch.revision_tree(last_revision_id)
280
yield revno, this_revision, compare_trees(last_tree, this_tree, False)
283
last_revision = this_revision
284
last_tree = this_tree
407
# Forward means oldest first, adjusting for depth.
408
view_revisions = reverse_by_depth(list(view_revisions))
409
return _rebase_merge_depth(view_revisions)
411
raise ValueError('invalid direction %r' % direction)
414
def _has_merges(branch, rev_id):
415
"""Does a revision have multiple parents or not?"""
416
parents = branch.repository.get_parent_map([rev_id]).get(rev_id, [])
417
return len(parents) > 1
420
def _is_obvious_ancestor(branch, start_rev_id, end_rev_id):
421
"""Is start_rev_id an obvious ancestor of end_rev_id?"""
422
if start_rev_id and end_rev_id:
423
start_dotted = branch.revision_id_to_dotted_revno(start_rev_id)
424
end_dotted = branch.revision_id_to_dotted_revno(end_rev_id)
425
if len(start_dotted) == 1 and len(end_dotted) == 1:
427
return start_dotted[0] <= end_dotted[0]
428
elif (len(start_dotted) == 3 and len(end_dotted) == 3 and
429
start_dotted[0:1] == end_dotted[0:1]):
430
# both on same development line
431
return start_dotted[2] <= end_dotted[2]
438
def _linear_view_revisions(branch, start_rev_id, end_rev_id):
439
"""Calculate a sequence of revisions to view, newest to oldest.
441
:param start_rev_id: the lower revision-id
442
:param end_rev_id: the upper revision-id
443
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
444
:raises _StartNotLinearAncestor: if a start_rev_id is specified but
445
is not found walking the left-hand history
447
br_revno, br_rev_id = branch.last_revision_info()
448
repo = branch.repository
449
if start_rev_id is None and end_rev_id is None:
451
for revision_id in repo.iter_reverse_revision_history(br_rev_id):
452
yield revision_id, str(cur_revno), 0
455
if end_rev_id is None:
456
end_rev_id = br_rev_id
457
found_start = start_rev_id is None
458
for revision_id in repo.iter_reverse_revision_history(end_rev_id):
459
revno = branch.revision_id_to_dotted_revno(revision_id)
460
revno_str = '.'.join(str(n) for n in revno)
461
if not found_start and revision_id == start_rev_id:
462
yield revision_id, revno_str, 0
466
yield revision_id, revno_str, 0
469
raise _StartNotLinearAncestor()
472
def _graph_view_revisions(branch, start_rev_id, end_rev_id,
473
rebase_initial_depths=True):
474
"""Calculate revisions to view including merges, newest to oldest.
476
:param branch: the branch
477
:param start_rev_id: the lower revision-id
478
:param end_rev_id: the upper revision-id
479
:param rebase_initial_depth: should depths be rebased until a mainline
481
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
483
view_revisions = branch.iter_merge_sorted_revisions(
484
start_revision_id=end_rev_id, stop_revision_id=start_rev_id,
485
stop_rule="with-merges")
486
if not rebase_initial_depths:
487
for (rev_id, merge_depth, revno, end_of_merge
489
yield rev_id, '.'.join(map(str, revno)), merge_depth
491
# We're following a development line starting at a merged revision.
492
# We need to adjust depths down by the initial depth until we find
493
# a depth less than it. Then we use that depth as the adjustment.
494
# If and when we reach the mainline, depth adjustment ends.
495
depth_adjustment = None
496
for (rev_id, merge_depth, revno, end_of_merge
498
if depth_adjustment is None:
499
depth_adjustment = merge_depth
501
if merge_depth < depth_adjustment:
502
depth_adjustment = merge_depth
503
merge_depth -= depth_adjustment
504
yield rev_id, '.'.join(map(str, revno)), merge_depth
507
def calculate_view_revisions(branch, start_revision, end_revision, direction,
508
specific_fileid, generate_merge_revisions, allow_single_merge_revision):
509
"""Calculate the revisions to view.
511
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
512
a list of the same tuples.
514
# This method is no longer called by the main code path.
515
# It is retained for API compatibility and may be deprecated
517
start_rev_id, end_rev_id = _get_revision_limits(branch, start_revision,
519
view_revisions = list(_calc_view_revisions(branch, start_rev_id, end_rev_id,
520
direction, generate_merge_revisions or specific_fileid,
521
allow_single_merge_revision))
523
view_revisions = _filter_revisions_touching_file_id(branch,
524
specific_fileid, view_revisions,
525
include_merges=generate_merge_revisions)
526
return _rebase_merge_depth(view_revisions)
529
def _rebase_merge_depth(view_revisions):
530
"""Adjust depths upwards so the top level is 0."""
531
# If either the first or last revision have a merge_depth of 0, we're done
532
if view_revisions and view_revisions[0][2] and view_revisions[-1][2]:
533
min_depth = min([d for r,n,d in view_revisions])
535
view_revisions = [(r,n,d-min_depth) for r,n,d in view_revisions]
536
return view_revisions
539
def make_log_rev_iterator(branch, view_revisions, generate_delta, search,
540
file_ids=None, direction='reverse'):
541
"""Create a revision iterator for log.
543
:param branch: The branch being logged.
544
:param view_revisions: The revisions being viewed.
545
:param generate_delta: Whether to generate a delta for each revision.
546
:param search: A user text search string.
547
:param file_ids: If non empty, only revisions matching one or more of
548
the file-ids are to be kept.
549
:param direction: the direction in which view_revisions is sorted
550
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
553
# Convert view_revisions into (view, None, None) groups to fit with
554
# the standard interface here.
555
if type(view_revisions) == list:
556
# A single batch conversion is faster than many incremental ones.
557
# As we have all the data, do a batch conversion.
558
nones = [None] * len(view_revisions)
559
log_rev_iterator = iter([zip(view_revisions, nones, nones)])
562
for view in view_revisions:
563
yield (view, None, None)
564
log_rev_iterator = iter([_convert()])
565
for adapter in log_adapters:
566
# It would be nicer if log adapters were first class objects
567
# with custom parameters. This will do for now. IGC 20090127
568
if adapter == _make_delta_filter:
569
log_rev_iterator = adapter(branch, generate_delta,
570
search, log_rev_iterator, file_ids, direction)
572
log_rev_iterator = adapter(branch, generate_delta,
573
search, log_rev_iterator)
574
return log_rev_iterator
577
def _make_search_filter(branch, generate_delta, search, log_rev_iterator):
578
"""Create a filtered iterator of log_rev_iterator matching on a regex.
580
:param branch: The branch being logged.
581
:param generate_delta: Whether to generate a delta for each revision.
582
:param search: A user text search string.
583
:param log_rev_iterator: An input iterator containing all revisions that
584
could be displayed, in lists.
585
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
589
return log_rev_iterator
590
# Compile the search now to get early errors.
591
searchRE = re.compile(search, re.IGNORECASE)
592
return _filter_message_re(searchRE, log_rev_iterator)
595
def _filter_message_re(searchRE, log_rev_iterator):
596
for revs in log_rev_iterator:
598
for (rev_id, revno, merge_depth), rev, delta in revs:
599
if searchRE.search(rev.message):
600
new_revs.append(((rev_id, revno, merge_depth), rev, delta))
604
def _make_delta_filter(branch, generate_delta, search, log_rev_iterator,
605
fileids=None, direction='reverse'):
606
"""Add revision deltas to a log iterator if needed.
608
:param branch: The branch being logged.
609
:param generate_delta: Whether to generate a delta for each revision.
610
:param search: A user text search string.
611
:param log_rev_iterator: An input iterator containing all revisions that
612
could be displayed, in lists.
613
:param fileids: If non empty, only revisions matching one or more of
614
the file-ids are to be kept.
615
:param direction: the direction in which view_revisions is sorted
616
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
619
if not generate_delta and not fileids:
620
return log_rev_iterator
621
return _generate_deltas(branch.repository, log_rev_iterator,
622
generate_delta, fileids, direction)
625
def _generate_deltas(repository, log_rev_iterator, always_delta, fileids,
627
"""Create deltas for each batch of revisions in log_rev_iterator.
629
If we're only generating deltas for the sake of filtering against
630
file-ids, we stop generating deltas once all file-ids reach the
631
appropriate life-cycle point. If we're receiving data newest to
632
oldest, then that life-cycle point is 'add', otherwise it's 'remove'.
634
check_fileids = fileids is not None and len(fileids) > 0
636
fileid_set = set(fileids)
637
if direction == 'reverse':
643
for revs in log_rev_iterator:
644
# If we were matching against fileids and we've run out,
645
# there's nothing left to do
646
if check_fileids and not fileid_set:
648
revisions = [rev[1] for rev in revs]
649
deltas = repository.get_deltas_for_revisions(revisions)
651
for rev, delta in izip(revs, deltas):
653
if not _delta_matches_fileids(delta, fileid_set, stop_on):
655
elif not always_delta:
656
# Delta was created just for matching - ditch it
657
# Note: It would probably be a better UI to return
658
# a delta filtered by the file-ids, rather than
659
# None at all. That functional enhancement can
662
new_revs.append((rev[0], rev[1], delta))
666
def _delta_matches_fileids(delta, fileids, stop_on='add'):
667
"""Check is a delta matches one of more file-ids.
669
:param fileids: a set of fileids to match against.
670
:param stop_on: either 'add' or 'remove' - take file-ids out of the
671
fileids set once their add or remove entry is detected respectively
676
for item in delta.added:
677
if item[1] in fileids:
679
fileids.remove(item[1])
681
for item in delta.removed:
682
if item[1] in fileids:
683
if stop_on == 'delete':
684
fileids.remove(item[1])
688
for l in (delta.modified, delta.renamed, delta.kind_changed):
690
if item[1] in fileids:
695
def _make_revision_objects(branch, generate_delta, search, log_rev_iterator):
696
"""Extract revision objects from the repository
698
:param branch: The branch being logged.
699
:param generate_delta: Whether to generate a delta for each revision.
700
:param search: A user text search string.
701
:param log_rev_iterator: An input iterator containing all revisions that
702
could be displayed, in lists.
703
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
706
repository = branch.repository
707
for revs in log_rev_iterator:
708
# r = revision_id, n = revno, d = merge depth
709
revision_ids = [view[0] for view, _, _ in revs]
710
revisions = repository.get_revisions(revision_ids)
711
revs = [(rev[0], revision, rev[2]) for rev, revision in
712
izip(revs, revisions)]
716
def _make_batch_filter(branch, generate_delta, search, log_rev_iterator):
717
"""Group up a single large batch into smaller ones.
719
:param branch: The branch being logged.
720
:param generate_delta: Whether to generate a delta for each revision.
721
:param search: A user text search string.
722
:param log_rev_iterator: An input iterator containing all revisions that
723
could be displayed, in lists.
724
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
727
repository = branch.repository
729
for batch in log_rev_iterator:
732
step = [detail for _, detail in zip(range(num), batch)]
736
num = min(int(num * 1.5), 200)
739
def _get_revision_limits(branch, start_revision, end_revision):
740
"""Get and check revision limits.
742
:param branch: The branch containing the revisions.
744
:param start_revision: The first revision to be logged.
745
For backwards compatibility this may be a mainline integer revno,
746
but for merge revision support a RevisionInfo is expected.
748
:param end_revision: The last revision to be logged.
749
For backwards compatibility this may be a mainline integer revno,
750
but for merge revision support a RevisionInfo is expected.
752
:return: (start_rev_id, end_rev_id) tuple.
754
branch_revno, branch_rev_id = branch.last_revision_info()
756
if start_revision is None:
759
if isinstance(start_revision, revisionspec.RevisionInfo):
760
start_rev_id = start_revision.rev_id
761
start_revno = start_revision.revno or 1
763
branch.check_real_revno(start_revision)
764
start_revno = start_revision
765
start_rev_id = branch.get_rev_id(start_revno)
768
if end_revision is None:
769
end_revno = branch_revno
771
if isinstance(end_revision, revisionspec.RevisionInfo):
772
end_rev_id = end_revision.rev_id
773
end_revno = end_revision.revno or branch_revno
775
branch.check_real_revno(end_revision)
776
end_revno = end_revision
777
end_rev_id = branch.get_rev_id(end_revno)
779
if branch_revno != 0:
780
if (start_rev_id == _mod_revision.NULL_REVISION
781
or end_rev_id == _mod_revision.NULL_REVISION):
782
raise errors.BzrCommandError('Logging revision 0 is invalid.')
783
if start_revno > end_revno:
784
raise errors.BzrCommandError("Start revision must be older than "
786
return (start_rev_id, end_rev_id)
789
def _get_mainline_revs(branch, start_revision, end_revision):
790
"""Get the mainline revisions from the branch.
792
Generates the list of mainline revisions for the branch.
794
:param branch: The branch containing the revisions.
796
:param start_revision: The first revision to be logged.
797
For backwards compatibility this may be a mainline integer revno,
798
but for merge revision support a RevisionInfo is expected.
800
:param end_revision: The last revision to be logged.
801
For backwards compatibility this may be a mainline integer revno,
802
but for merge revision support a RevisionInfo is expected.
804
:return: A (mainline_revs, rev_nos, start_rev_id, end_rev_id) tuple.
806
branch_revno, branch_last_revision = branch.last_revision_info()
807
if branch_revno == 0:
808
return None, None, None, None
810
# For mainline generation, map start_revision and end_revision to
811
# mainline revnos. If the revision is not on the mainline choose the
812
# appropriate extreme of the mainline instead - the extra will be
814
# Also map the revisions to rev_ids, to be used in the later filtering
817
if start_revision is None:
820
if isinstance(start_revision, revisionspec.RevisionInfo):
821
start_rev_id = start_revision.rev_id
822
start_revno = start_revision.revno or 1
824
branch.check_real_revno(start_revision)
825
start_revno = start_revision
828
if end_revision is None:
829
end_revno = branch_revno
831
if isinstance(end_revision, revisionspec.RevisionInfo):
832
end_rev_id = end_revision.rev_id
833
end_revno = end_revision.revno or branch_revno
835
branch.check_real_revno(end_revision)
836
end_revno = end_revision
838
if ((start_rev_id == _mod_revision.NULL_REVISION)
839
or (end_rev_id == _mod_revision.NULL_REVISION)):
840
raise errors.BzrCommandError('Logging revision 0 is invalid.')
841
if start_revno > end_revno:
842
raise errors.BzrCommandError("Start revision must be older than "
845
if end_revno < start_revno:
846
return None, None, None, None
847
cur_revno = branch_revno
850
for revision_id in branch.repository.iter_reverse_revision_history(
851
branch_last_revision):
852
if cur_revno < start_revno:
853
# We have gone far enough, but we always add 1 more revision
854
rev_nos[revision_id] = cur_revno
855
mainline_revs.append(revision_id)
857
if cur_revno <= end_revno:
858
rev_nos[revision_id] = cur_revno
859
mainline_revs.append(revision_id)
862
# We walked off the edge of all revisions, so we add a 'None' marker
863
mainline_revs.append(None)
865
mainline_revs.reverse()
867
# override the mainline to look like the revision history.
868
return mainline_revs, rev_nos, start_rev_id, end_rev_id
871
def _filter_revision_range(view_revisions, start_rev_id, end_rev_id):
872
"""Filter view_revisions based on revision ranges.
874
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
875
tuples to be filtered.
877
:param start_rev_id: If not NONE specifies the first revision to be logged.
878
If NONE then all revisions up to the end_rev_id are logged.
880
:param end_rev_id: If not NONE specifies the last revision to be logged.
881
If NONE then all revisions up to the end of the log are logged.
883
:return: The filtered view_revisions.
885
# This method is no longer called by the main code path.
886
# It may be removed soon. IGC 20090127
887
if start_rev_id or end_rev_id:
888
revision_ids = [r for r, n, d in view_revisions]
890
start_index = revision_ids.index(start_rev_id)
893
if start_rev_id == end_rev_id:
894
end_index = start_index
897
end_index = revision_ids.index(end_rev_id)
899
end_index = len(view_revisions) - 1
900
# To include the revisions merged into the last revision,
901
# extend end_rev_id down to, but not including, the next rev
902
# with the same or lesser merge_depth
903
end_merge_depth = view_revisions[end_index][2]
905
for index in xrange(end_index+1, len(view_revisions)+1):
906
if view_revisions[index][2] <= end_merge_depth:
907
end_index = index - 1
910
# if the search falls off the end then log to the end as well
911
end_index = len(view_revisions) - 1
912
view_revisions = view_revisions[start_index:end_index+1]
913
return view_revisions
916
def _filter_revisions_touching_file_id(branch, file_id, view_revisions,
917
include_merges=True):
918
r"""Return the list of revision ids which touch a given file id.
920
The function filters view_revisions and returns a subset.
921
This includes the revisions which directly change the file id,
922
and the revisions which merge these changes. So if the
934
And 'C' changes a file, then both C and D will be returned. F will not be
935
returned even though it brings the changes to C into the branch starting
936
with E. (Note that if we were using F as the tip instead of G, then we
939
This will also be restricted based on a subset of the mainline.
941
:param branch: The branch where we can get text revision information.
943
:param file_id: Filter out revisions that do not touch file_id.
945
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
946
tuples. This is the list of revisions which will be filtered. It is
947
assumed that view_revisions is in merge_sort order (i.e. newest
950
:param include_merges: include merge revisions in the result or not
952
:return: A list of (revision_id, dotted_revno, merge_depth) tuples.
954
# Lookup all possible text keys to determine which ones actually modified
956
text_keys = [(file_id, rev_id) for rev_id, revno, depth in view_revisions]
957
# Looking up keys in batches of 1000 can cut the time in half, as well as
958
# memory consumption. GraphIndex *does* like to look for a few keys in
959
# parallel, it just doesn't like looking for *lots* of keys in parallel.
960
# TODO: This code needs to be re-evaluated periodically as we tune the
961
# indexing layer. We might consider passing in hints as to the known
962
# access pattern (sparse/clustered, high success rate/low success
963
# rate). This particular access is clustered with a low success rate.
964
get_parent_map = branch.repository.texts.get_parent_map
965
modified_text_revisions = set()
967
for start in xrange(0, len(text_keys), chunk_size):
968
next_keys = text_keys[start:start + chunk_size]
969
# Only keep the revision_id portion of the key
970
modified_text_revisions.update(
971
[k[1] for k in get_parent_map(next_keys)])
972
del text_keys, next_keys
975
# Track what revisions will merge the current revision, replace entries
976
# with 'None' when they have been added to result
977
current_merge_stack = [None]
978
for info in view_revisions:
979
rev_id, revno, depth = info
980
if depth == len(current_merge_stack):
981
current_merge_stack.append(info)
983
del current_merge_stack[depth + 1:]
984
current_merge_stack[-1] = info
986
if rev_id in modified_text_revisions:
987
# This needs to be logged, along with the extra revisions
988
for idx in xrange(len(current_merge_stack)):
989
node = current_merge_stack[idx]
991
if include_merges or node[2] == 0:
993
current_merge_stack[idx] = None
997
def get_view_revisions(mainline_revs, rev_nos, branch, direction,
998
include_merges=True):
999
"""Produce an iterator of revisions to show
1000
:return: an iterator of (revision_id, revno, merge_depth)
1001
(if there is no revno for a revision, None is supplied)
1003
# This method is no longer called by the main code path.
1004
# It is retained for API compatibility and may be deprecated
1005
# soon. IGC 20090127
1006
if not include_merges:
1007
revision_ids = mainline_revs[1:]
1008
if direction == 'reverse':
1009
revision_ids.reverse()
1010
for revision_id in revision_ids:
1011
yield revision_id, str(rev_nos[revision_id]), 0
1013
graph = branch.repository.get_graph()
1014
# This asks for all mainline revisions, which means we only have to spider
1015
# sideways, rather than depth history. That said, its still size-of-history
1016
# and should be addressed.
1017
# mainline_revisions always includes an extra revision at the beginning, so
1019
parent_map = dict(((key, value) for key, value in
1020
graph.iter_ancestry(mainline_revs[1:]) if value is not None))
1021
# filter out ghosts; merge_sort errors on ghosts.
1022
rev_graph = _mod_repository._strip_NULL_ghosts(parent_map)
1023
merge_sorted_revisions = tsort.merge_sort(
1027
generate_revno=True)
1029
if direction == 'forward':
1030
# forward means oldest first.
1031
merge_sorted_revisions = reverse_by_depth(merge_sorted_revisions)
1032
elif direction != 'reverse':
1033
raise ValueError('invalid direction %r' % direction)
1035
for (sequence, rev_id, merge_depth, revno, end_of_merge
1036
) in merge_sorted_revisions:
1037
yield rev_id, '.'.join(map(str, revno)), merge_depth
1040
def reverse_by_depth(merge_sorted_revisions, _depth=0):
1041
"""Reverse revisions by depth.
1043
Revisions with a different depth are sorted as a group with the previous
1044
revision of that depth. There may be no topological justification for this,
1045
but it looks much nicer.
1047
# Add a fake revision at start so that we can always attach sub revisions
1048
merge_sorted_revisions = [(None, None, _depth)] + merge_sorted_revisions
1050
for val in merge_sorted_revisions:
1051
if val[2] == _depth:
1052
# Each revision at the current depth becomes a chunk grouping all
1053
# higher depth revisions.
1054
zd_revisions.append([val])
1056
zd_revisions[-1].append(val)
1057
for revisions in zd_revisions:
1058
if len(revisions) > 1:
1059
# We have higher depth revisions, let reverse them locally
1060
revisions[1:] = reverse_by_depth(revisions[1:], _depth + 1)
1061
zd_revisions.reverse()
1063
for chunk in zd_revisions:
1064
result.extend(chunk)
1066
# Top level call, get rid of the fake revisions that have been added
1067
result = [r for r in result if r[0] is not None and r[1] is not None]
1071
class LogRevision(object):
1072
"""A revision to be logged (by LogFormatter.log_revision).
1074
A simple wrapper for the attributes of a revision to be logged.
1075
The attributes may or may not be populated, as determined by the
1076
logging options and the log formatter capabilities.
1079
def __init__(self, rev=None, revno=None, merge_depth=0, delta=None,
1080
tags=None, diff=None):
1082
self.revno = str(revno)
1083
self.merge_depth = merge_depth
287
1089
class LogFormatter(object):
288
"""Abstract class to display log messages."""
289
def __init__(self, to_file, show_ids=False, show_timezone='original'):
1090
"""Abstract class to display log messages.
1092
At a minimum, a derived class must implement the log_revision method.
1094
If the LogFormatter needs to be informed of the beginning or end of
1095
a log it should implement the begin_log and/or end_log hook methods.
1097
A LogFormatter should define the following supports_XXX flags
1098
to indicate which LogRevision attributes it supports:
1100
- supports_delta must be True if this log formatter supports delta.
1101
Otherwise the delta attribute may not be populated. The 'delta_format'
1102
attribute describes whether the 'short_status' format (1) or the long
1103
one (2) should be used.
1105
- supports_merge_revisions must be True if this log formatter supports
1106
merge revisions. If not, and if supports_single_merge_revision is
1107
also not True, then only mainline revisions will be passed to the
1110
- preferred_levels is the number of levels this formatter defaults to.
1111
The default value is zero meaning display all levels.
1112
This value is only relevant if supports_merge_revisions is True.
1114
- supports_single_merge_revision must be True if this log formatter
1115
supports logging only a single merge revision. This flag is
1116
only relevant if supports_merge_revisions is not True.
1118
- supports_tags must be True if this log formatter supports tags.
1119
Otherwise the tags attribute may not be populated.
1121
- supports_diff must be True if this log formatter supports diffs.
1122
Otherwise the diff attribute may not be populated.
1124
Plugins can register functions to show custom revision properties using
1125
the properties_handler_registry. The registered function
1126
must respect the following interface description:
1127
def my_show_properties(properties_dict):
1128
# code that returns a dict {'name':'value'} of the properties
1131
preferred_levels = 0
1133
def __init__(self, to_file, show_ids=False, show_timezone='original',
1134
delta_format=None, levels=None):
1135
"""Create a LogFormatter.
1137
:param to_file: the file to output to
1138
:param show_ids: if True, revision-ids are to be displayed
1139
:param show_timezone: the timezone to use
1140
:param delta_format: the level of delta information to display
1141
or None to leave it u to the formatter to decide
1142
:param levels: the number of levels to display; None or -1 to
1143
let the log formatter decide.
290
1145
self.to_file = to_file
1146
# 'exact' stream used to show diff, it should print content 'as is'
1147
# and should not try to decode/encode it to unicode to avoid bug #328007
1148
self.to_exact_file = getattr(to_file, 'stream', to_file)
291
1149
self.show_ids = show_ids
292
1150
self.show_timezone = show_timezone
295
def show(self, revno, rev, delta):
1151
if delta_format is None:
1152
# Ensures backward compatibility
1153
delta_format = 2 # long format
1154
self.delta_format = delta_format
1155
self.levels = levels
1157
def get_levels(self):
1158
"""Get the number of levels to display or 0 for all."""
1159
if getattr(self, 'supports_merge_revisions', False):
1160
if self.levels is None or self.levels == -1:
1161
return self.preferred_levels
1166
def log_revision(self, revision):
1169
:param revision: The LogRevision to be logged.
296
1171
raise NotImplementedError('not implemented in abstract base')
1173
def short_committer(self, rev):
1174
name, address = config.parse_username(rev.committer)
1179
def short_author(self, rev):
1180
name, address = config.parse_username(rev.get_apparent_authors()[0])
1185
def show_properties(self, revision, indent):
1186
"""Displays the custom properties returned by each registered handler.
1188
If a registered handler raises an error it is propagated.
1190
for key, handler in properties_handler_registry.iteritems():
1191
for key, value in handler(revision).items():
1192
self.to_file.write(indent + key + ': ' + value + '\n')
1194
def show_diff(self, to_file, diff, indent):
1195
for l in diff.rstrip().split('\n'):
1196
to_file.write(indent + '%s\n' % (l,))
303
1199
class LongLogFormatter(LogFormatter):
304
def show(self, revno, rev, delta):
305
from osutils import format_date
1201
supports_merge_revisions = True
1202
supports_delta = True
1203
supports_tags = True
1204
supports_diff = True
1206
def log_revision(self, revision):
1207
"""Log a revision, either merged or not."""
1208
indent = ' ' * revision.merge_depth
307
1209
to_file = self.to_file
309
print >>to_file, '-' * 60
310
print >>to_file, 'revno:', revno
1210
to_file.write(indent + '-' * 60 + '\n')
1211
if revision.revno is not None:
1212
to_file.write(indent + 'revno: %s\n' % (revision.revno,))
1214
to_file.write(indent + 'tags: %s\n' % (', '.join(revision.tags)))
311
1215
if self.show_ids:
312
print >>to_file, 'revision-id:', rev.revision_id
313
print >>to_file, 'committer:', rev.committer
315
date_str = format_date(rev.timestamp,
1216
to_file.write(indent + 'revision-id: ' + revision.rev.revision_id)
1218
for parent_id in revision.rev.parent_ids:
1219
to_file.write(indent + 'parent: %s\n' % (parent_id,))
1220
self.show_properties(revision.rev, indent)
1222
committer = revision.rev.committer
1223
authors = revision.rev.get_apparent_authors()
1224
if authors != [committer]:
1225
to_file.write(indent + 'author: %s\n' % (", ".join(authors),))
1226
to_file.write(indent + 'committer: %s\n' % (committer,))
1228
branch_nick = revision.rev.properties.get('branch-nick', None)
1229
if branch_nick is not None:
1230
to_file.write(indent + 'branch nick: %s\n' % (branch_nick,))
1232
date_str = format_date(revision.rev.timestamp,
1233
revision.rev.timezone or 0,
317
1234
self.show_timezone)
318
print >>to_file, 'timestamp: %s' % date_str
1235
to_file.write(indent + 'timestamp: %s\n' % (date_str,))
320
print >>to_file, 'message:'
322
print >>to_file, ' (no message)'
1237
to_file.write(indent + 'message:\n')
1238
if not revision.rev.message:
1239
to_file.write(indent + ' (no message)\n')
324
for l in rev.message.split('\n'):
325
print >>to_file, ' ' + l
328
delta.show(to_file, self.show_ids)
1241
message = revision.rev.message.rstrip('\r\n')
1242
for l in message.split('\n'):
1243
to_file.write(indent + ' %s\n' % (l,))
1244
if revision.delta is not None:
1245
# We don't respect delta_format for compatibility
1246
revision.delta.show(to_file, self.show_ids, indent=indent,
1248
if revision.diff is not None:
1249
to_file.write(indent + 'diff:\n')
1250
# Note: we explicitly don't indent the diff (relative to the
1251
# revision information) so that the output can be fed to patch -p0
1252
self.show_diff(self.to_exact_file, revision.diff, indent)
332
1255
class ShortLogFormatter(LogFormatter):
333
def show(self, revno, rev, delta):
334
from bzrlib.osutils import format_date
1257
supports_merge_revisions = True
1258
preferred_levels = 1
1259
supports_delta = True
1260
supports_tags = True
1261
supports_diff = True
1263
def __init__(self, *args, **kwargs):
1264
super(ShortLogFormatter, self).__init__(*args, **kwargs)
1265
self.revno_width_by_depth = {}
1267
def log_revision(self, revision):
1268
# We need two indents: one per depth and one for the information
1269
# relative to that indent. Most mainline revnos are 5 chars or
1270
# less while dotted revnos are typically 11 chars or less. Once
1271
# calculated, we need to remember the offset for a given depth
1272
# as we might be starting from a dotted revno in the first column
1273
# and we want subsequent mainline revisions to line up.
1274
depth = revision.merge_depth
1275
indent = ' ' * depth
1276
revno_width = self.revno_width_by_depth.get(depth)
1277
if revno_width is None:
1278
if revision.revno.find('.') == -1:
1279
# mainline revno, e.g. 12345
1282
# dotted revno, e.g. 12345.10.55
1284
self.revno_width_by_depth[depth] = revno_width
1285
offset = ' ' * (revno_width + 1)
336
1287
to_file = self.to_file
338
print >>to_file, "%5d %s\t%s" % (revno, rev.committer,
339
format_date(rev.timestamp, rev.timezone or 0,
1289
if len(revision.rev.parent_ids) > 1:
1290
is_merge = ' [merge]'
1293
tags = ' {%s}' % (', '.join(revision.tags))
1294
to_file.write(indent + "%*s %s\t%s%s%s\n" % (revno_width,
1295
revision.revno, self.short_author(revision.rev),
1296
format_date(revision.rev.timestamp,
1297
revision.rev.timezone or 0,
1298
self.show_timezone, date_fmt="%Y-%m-%d",
1301
self.show_properties(revision.rev, indent+offset)
341
1302
if self.show_ids:
342
print >>to_file, ' revision-id:', rev.revision_id
1303
to_file.write(indent + offset + 'revision-id:%s\n'
1304
% (revision.rev.revision_id,))
1305
if not revision.rev.message:
1306
to_file.write(indent + offset + '(no message)\n')
1308
message = revision.rev.message.rstrip('\r\n')
1309
for l in message.split('\n'):
1310
to_file.write(indent + offset + '%s\n' % (l,))
1312
if revision.delta is not None:
1313
revision.delta.show(to_file, self.show_ids, indent=indent + offset,
1314
short_status=self.delta_format==1)
1315
if revision.diff is not None:
1316
self.show_diff(self.to_exact_file, revision.diff, ' ')
1320
class LineLogFormatter(LogFormatter):
1322
supports_merge_revisions = True
1323
preferred_levels = 1
1324
supports_tags = True
1326
def __init__(self, *args, **kwargs):
1327
super(LineLogFormatter, self).__init__(*args, **kwargs)
1328
self._max_chars = terminal_width() - 1
1330
def truncate(self, str, max_len):
1331
if len(str) <= max_len:
1333
return str[:max_len-3]+'...'
1335
def date_string(self, rev):
1336
return format_date(rev.timestamp, rev.timezone or 0,
1337
self.show_timezone, date_fmt="%Y-%m-%d",
1340
def message(self, rev):
343
1341
if not rev.message:
344
print >>to_file, ' (no message)'
346
for l in rev.message.split('\n'):
347
print >>to_file, ' ' + l
349
# TODO: Why not show the modified files in a shorter form as
350
# well? rewrap them single lines of appropriate length
352
delta.show(to_file, self.show_ids)
357
FORMATTERS = {'long': LongLogFormatter,
358
'short': ShortLogFormatter,
1342
return '(no message)'
1346
def log_revision(self, revision):
1347
indent = ' ' * revision.merge_depth
1348
self.to_file.write(self.log_string(revision.revno, revision.rev,
1349
self._max_chars, revision.tags, indent))
1350
self.to_file.write('\n')
1352
def log_string(self, revno, rev, max_chars, tags=None, prefix=''):
1353
"""Format log info into one string. Truncate tail of string
1354
:param revno: revision number or None.
1355
Revision numbers counts from 1.
1356
:param rev: revision object
1357
:param max_chars: maximum length of resulting string
1358
:param tags: list of tags or None
1359
:param prefix: string to prefix each line
1360
:return: formatted truncated string
1364
# show revno only when is not None
1365
out.append("%s:" % revno)
1366
out.append(self.truncate(self.short_author(rev), 20))
1367
out.append(self.date_string(rev))
1368
if len(rev.parent_ids) > 1:
1369
out.append('[merge]')
1371
tag_str = '{%s}' % (', '.join(tags))
1373
out.append(rev.get_summary())
1374
return self.truncate(prefix + " ".join(out).rstrip('\n'), max_chars)
1377
class GnuChangelogLogFormatter(LogFormatter):
1379
supports_merge_revisions = True
1380
supports_delta = True
1382
def log_revision(self, revision):
1383
"""Log a revision, either merged or not."""
1384
to_file = self.to_file
1386
date_str = format_date(revision.rev.timestamp,
1387
revision.rev.timezone or 0,
1389
date_fmt='%Y-%m-%d',
1391
committer_str = revision.rev.committer.replace (' <', ' <')
1392
to_file.write('%s %s\n\n' % (date_str,committer_str))
1394
if revision.delta is not None and revision.delta.has_changed():
1395
for c in revision.delta.added + revision.delta.removed + revision.delta.modified:
1397
to_file.write('\t* %s:\n' % (path,))
1398
for c in revision.delta.renamed:
1399
oldpath,newpath = c[:2]
1400
# For renamed files, show both the old and the new path
1401
to_file.write('\t* %s:\n\t* %s:\n' % (oldpath,newpath))
1404
if not revision.rev.message:
1405
to_file.write('\tNo commit message\n')
1407
message = revision.rev.message.rstrip('\r\n')
1408
for l in message.split('\n'):
1409
to_file.write('\t%s\n' % (l.lstrip(),))
1413
def line_log(rev, max_chars):
1414
lf = LineLogFormatter(None)
1415
return lf.log_string(None, rev, max_chars)
1418
class LogFormatterRegistry(registry.Registry):
1419
"""Registry for log formatters"""
1421
def make_formatter(self, name, *args, **kwargs):
1422
"""Construct a formatter from arguments.
1424
:param name: Name of the formatter to construct. 'short', 'long' and
1425
'line' are built-in.
1427
return self.get(name)(*args, **kwargs)
1429
def get_default(self, branch):
1430
return self.get(branch.get_config().log_format())
1433
log_formatter_registry = LogFormatterRegistry()
1436
log_formatter_registry.register('short', ShortLogFormatter,
1437
'Moderately short log format')
1438
log_formatter_registry.register('long', LongLogFormatter,
1439
'Detailed log format')
1440
log_formatter_registry.register('line', LineLogFormatter,
1441
'Log format with one line per revision')
1442
log_formatter_registry.register('gnu-changelog', GnuChangelogLogFormatter,
1443
'Format used by GNU ChangeLog files')
1446
def register_formatter(name, formatter):
1447
log_formatter_registry.register(name, formatter)
362
1450
def log_formatter(name, *args, **kwargs):
363
from bzrlib.errors import BzrCommandError
1451
"""Construct a formatter from arguments.
1453
name -- Name of the formatter to construct; currently 'long', 'short' and
1454
'line' are supported.
366
return FORMATTERS[name](*args, **kwargs)
368
raise BzrCommandError("unknown log formatter: %r" % name)
1457
return log_formatter_registry.make_formatter(name, *args, **kwargs)
1459
raise errors.BzrCommandError("unknown log formatter: %r" % name)
370
1462
def show_one_log(revno, rev, delta, verbose, to_file, show_timezone):
371
# deprecated; for compatability
1463
# deprecated; for compatibility
372
1464
lf = LongLogFormatter(to_file=to_file, show_timezone=show_timezone)
373
1465
lf.show(revno, rev, delta)
1468
def show_changed_revisions(branch, old_rh, new_rh, to_file=None,
1470
"""Show the change in revision history comparing the old revision history to the new one.
1472
:param branch: The branch where the revisions exist
1473
:param old_rh: The old revision history
1474
:param new_rh: The new revision history
1475
:param to_file: A file to write the results to. If None, stdout will be used
1478
to_file = codecs.getwriter(get_terminal_encoding())(sys.stdout,
1480
lf = log_formatter(log_format,
1483
show_timezone='original')
1485
# This is the first index which is different between
1488
for i in xrange(max(len(new_rh),
1490
if (len(new_rh) <= i
1492
or new_rh[i] != old_rh[i]):
1496
if base_idx is None:
1497
to_file.write('Nothing seems to have changed\n')
1499
## TODO: It might be nice to do something like show_log
1500
## and show the merged entries. But since this is the
1501
## removed revisions, it shouldn't be as important
1502
if base_idx < len(old_rh):
1503
to_file.write('*'*60)
1504
to_file.write('\nRemoved Revisions:\n')
1505
for i in range(base_idx, len(old_rh)):
1506
rev = branch.repository.get_revision(old_rh[i])
1507
lr = LogRevision(rev, i+1, 0, None)
1509
to_file.write('*'*60)
1510
to_file.write('\n\n')
1511
if base_idx < len(new_rh):
1512
to_file.write('Added Revisions:\n')
1517
direction='forward',
1518
start_revision=base_idx+1,
1519
end_revision=len(new_rh),
1523
def get_history_change(old_revision_id, new_revision_id, repository):
1524
"""Calculate the uncommon lefthand history between two revisions.
1526
:param old_revision_id: The original revision id.
1527
:param new_revision_id: The new revision id.
1528
:param repository: The repository to use for the calculation.
1530
return old_history, new_history
1533
old_revisions = set()
1535
new_revisions = set()
1536
new_iter = repository.iter_reverse_revision_history(new_revision_id)
1537
old_iter = repository.iter_reverse_revision_history(old_revision_id)
1538
stop_revision = None
1541
while do_new or do_old:
1544
new_revision = new_iter.next()
1545
except StopIteration:
1548
new_history.append(new_revision)
1549
new_revisions.add(new_revision)
1550
if new_revision in old_revisions:
1551
stop_revision = new_revision
1555
old_revision = old_iter.next()
1556
except StopIteration:
1559
old_history.append(old_revision)
1560
old_revisions.add(old_revision)
1561
if old_revision in new_revisions:
1562
stop_revision = old_revision
1564
new_history.reverse()
1565
old_history.reverse()
1566
if stop_revision is not None:
1567
new_history = new_history[new_history.index(stop_revision) + 1:]
1568
old_history = old_history[old_history.index(stop_revision) + 1:]
1569
return old_history, new_history
1572
def show_branch_change(branch, output, old_revno, old_revision_id):
1573
"""Show the changes made to a branch.
1575
:param branch: The branch to show changes about.
1576
:param output: A file-like object to write changes to.
1577
:param old_revno: The revno of the old tip.
1578
:param old_revision_id: The revision_id of the old tip.
1580
new_revno, new_revision_id = branch.last_revision_info()
1581
old_history, new_history = get_history_change(old_revision_id,
1584
if old_history == [] and new_history == []:
1585
output.write('Nothing seems to have changed\n')
1588
log_format = log_formatter_registry.get_default(branch)
1589
lf = log_format(show_ids=False, to_file=output, show_timezone='original')
1590
if old_history != []:
1591
output.write('*'*60)
1592
output.write('\nRemoved Revisions:\n')
1593
show_flat_log(branch.repository, old_history, old_revno, lf)
1594
output.write('*'*60)
1595
output.write('\n\n')
1596
if new_history != []:
1597
output.write('Added Revisions:\n')
1598
start_revno = new_revno - len(new_history) + 1
1599
show_log(branch, lf, None, verbose=False, direction='forward',
1600
start_revision=start_revno,)
1603
def show_flat_log(repository, history, last_revno, lf):
1604
"""Show a simple log of the specified history.
1606
:param repository: The repository to retrieve revisions from.
1607
:param history: A list of revision_ids indicating the lefthand history.
1608
:param last_revno: The revno of the last revision_id in the history.
1609
:param lf: The log formatter to use.
1611
start_revno = last_revno - len(history) + 1
1612
revisions = repository.get_revisions(history)
1613
for i, rev in enumerate(revisions):
1614
lr = LogRevision(rev, i + last_revno, 0, None)
1618
def _get_fileid_to_log(revision, tree, b, fp):
1619
"""Find the file-id to log for a file path in a revision range.
1621
:param revision: the revision range as parsed on the command line
1622
:param tree: the working tree, if any
1623
:param b: the branch
1624
:param fp: file path
1626
if revision is None:
1628
tree = b.basis_tree()
1629
file_id = tree.path2id(fp)
1631
# go back to when time began
1633
rev1 = b.get_rev_id(1)
1634
except errors.NoSuchRevision:
1638
tree = b.repository.revision_tree(rev1)
1639
file_id = tree.path2id(fp)
1641
elif len(revision) == 1:
1642
# One revision given - file must exist in it
1643
tree = revision[0].as_tree(b)
1644
file_id = tree.path2id(fp)
1646
elif len(revision) == 2:
1647
# Revision range given. Get the file-id from the end tree.
1648
# If that fails, try the start tree.
1649
rev_id = revision[1].as_revision_id(b)
1651
tree = b.basis_tree()
1653
tree = revision[1].as_tree(b)
1654
file_id = tree.path2id(fp)
1656
rev_id = revision[0].as_revision_id(b)
1658
rev1 = b.get_rev_id(1)
1659
tree = b.repository.revision_tree(rev1)
1661
tree = revision[0].as_tree(b)
1662
file_id = tree.path2id(fp)
1664
raise errors.BzrCommandError(
1665
'bzr log --revision takes one or two values.')
1669
properties_handler_registry = registry.Registry()
1670
properties_handler_registry.register_lazy("foreign",
1672
"show_foreign_properties")
1675
# adapters which revision ids to log are filtered. When log is called, the
1676
# log_rev_iterator is adapted through each of these factory methods.
1677
# Plugins are welcome to mutate this list in any way they like - as long
1678
# as the overall behaviour is preserved. At this point there is no extensible
1679
# mechanism for getting parameters to each factory method, and until there is
1680
# this won't be considered a stable api.
1684
# read revision objects
1685
_make_revision_objects,
1686
# filter on log messages
1687
_make_search_filter,
1688
# generate deltas for things we will show