169
194
direction='reverse',
170
195
start_revision=None,
171
196
end_revision=None,
173
199
"""Worker function for show_log - see show_log."""
174
from bzrlib.osutils import format_date
175
from bzrlib.errors import BzrCheckError
176
from bzrlib.textui import show_status
178
from warnings import warn
180
200
if not isinstance(lf, LogFormatter):
181
201
warn("not a LogFormatter instance: %r" % lf)
183
203
if specific_fileid:
184
mutter('get log for file_id %r', specific_fileid)
186
if search is not None:
188
searchRE = re.compile(search, re.IGNORECASE)
204
trace.mutter('get log for file_id %r', specific_fileid)
205
generate_merge_revisions = getattr(lf, 'supports_merge_revisions', False)
206
allow_single_merge_revision = getattr(lf,
207
'supports_single_merge_revision', False)
208
view_revisions = calculate_view_revisions(branch, start_revision,
209
end_revision, direction,
211
generate_merge_revisions,
212
allow_single_merge_revision)
214
generate_tags = getattr(lf, 'supports_tags', False)
216
if branch.supports_tags():
217
rev_tag_dict = branch.tags.get_reverse_tag_dict()
219
generate_delta = verbose and getattr(lf, 'supports_delta', False)
221
# now we just print all the revisions
223
revision_iterator = make_log_rev_iterator(branch, view_revisions,
224
generate_delta, search)
225
for revs in revision_iterator:
226
for (rev_id, revno, merge_depth), rev, delta in revs:
227
lr = LogRevision(rev, revno, merge_depth, delta,
228
rev_tag_dict.get(rev_id))
232
if log_count >= limit:
236
def calculate_view_revisions(branch, start_revision, end_revision, direction,
237
specific_fileid, generate_merge_revisions,
238
allow_single_merge_revision):
239
if (not generate_merge_revisions and start_revision is end_revision is
240
None and direction == 'reverse' and specific_fileid is None):
241
return _linear_view_revisions(branch)
243
mainline_revs, rev_nos, start_rev_id, end_rev_id = \
244
_get_mainline_revs(branch, start_revision, end_revision)
245
if not mainline_revs:
248
if direction == 'reverse':
249
start_rev_id, end_rev_id = end_rev_id, start_rev_id
251
generate_single_revision = False
252
if ((not generate_merge_revisions)
253
and ((start_rev_id and (start_rev_id not in rev_nos))
254
or (end_rev_id and (end_rev_id not in rev_nos)))):
255
generate_single_revision = ((start_rev_id == end_rev_id)
256
and allow_single_merge_revision)
257
if not generate_single_revision:
258
raise errors.BzrCommandError('Selected log formatter only supports'
259
' mainline revisions.')
260
generate_merge_revisions = generate_single_revision
261
view_revs_iter = get_view_revisions(mainline_revs, rev_nos, branch,
262
direction, include_merges=generate_merge_revisions)
263
view_revisions = _filter_revision_range(list(view_revs_iter),
266
if view_revisions and generate_single_revision:
267
view_revisions = view_revisions[0:1]
269
view_revisions = _filter_revisions_touching_file_id(branch,
274
# rebase merge_depth - unless there are no revisions or
275
# either the first or last revision have merge_depth = 0.
276
if view_revisions and view_revisions[0][2] and view_revisions[-1][2]:
277
min_depth = min([d for r,n,d in view_revisions])
279
view_revisions = [(r,n,d-min_depth) for r,n,d in view_revisions]
280
return view_revisions
283
def _linear_view_revisions(branch):
284
start_revno, start_revision_id = branch.last_revision_info()
285
repo = branch.repository
286
revision_ids = repo.iter_reverse_revision_history(start_revision_id)
287
for num, revision_id in enumerate(revision_ids):
288
yield revision_id, str(start_revno - num), 0
291
def make_log_rev_iterator(branch, view_revisions, generate_delta, search):
292
"""Create a revision iterator for log.
294
:param branch: The branch being logged.
295
:param view_revisions: The revisions being viewed.
296
:param generate_delta: Whether to generate a delta for each revision.
297
:param search: A user text search string.
298
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
301
# Convert view_revisions into (view, None, None) groups to fit with
302
# the standard interface here.
303
if type(view_revisions) == list:
304
# A single batch conversion is faster than many incremental ones.
305
# As we have all the data, do a batch conversion.
306
nones = [None] * len(view_revisions)
307
log_rev_iterator = iter([zip(view_revisions, nones, nones)])
192
which_revs = _enumerate_history(branch)
310
for view in view_revisions:
311
yield (view, None, None)
312
log_rev_iterator = iter([_convert()])
313
for adapter in log_adapters:
314
log_rev_iterator = adapter(branch, generate_delta, search,
316
return log_rev_iterator
319
def _make_search_filter(branch, generate_delta, search, log_rev_iterator):
320
"""Create a filtered iterator of log_rev_iterator matching on a regex.
322
:param branch: The branch being logged.
323
:param generate_delta: Whether to generate a delta for each revision.
324
:param search: A user text search string.
325
:param log_rev_iterator: An input iterator containing all revisions that
326
could be displayed, in lists.
327
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
331
return log_rev_iterator
332
# Compile the search now to get early errors.
333
searchRE = re.compile(search, re.IGNORECASE)
334
return _filter_message_re(searchRE, log_rev_iterator)
337
def _filter_message_re(searchRE, log_rev_iterator):
338
for revs in log_rev_iterator:
340
for (rev_id, revno, merge_depth), rev, delta in revs:
341
if searchRE.search(rev.message):
342
new_revs.append(((rev_id, revno, merge_depth), rev, delta))
346
def _make_delta_filter(branch, generate_delta, search, log_rev_iterator):
347
"""Add revision deltas to a log iterator if needed.
349
:param branch: The branch being logged.
350
:param generate_delta: Whether to generate a delta for each revision.
351
:param search: A user text search string.
352
:param log_rev_iterator: An input iterator containing all revisions that
353
could be displayed, in lists.
354
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
357
if not generate_delta:
358
return log_rev_iterator
359
return _generate_deltas(branch.repository, log_rev_iterator)
362
def _generate_deltas(repository, log_rev_iterator):
363
"""Create deltas for each batch of revisions in log_rev_iterator."""
364
for revs in log_rev_iterator:
365
revisions = [rev[1] for rev in revs]
366
deltas = repository.get_deltas_for_revisions(revisions)
367
revs = [(rev[0], rev[1], delta) for rev, delta in izip(revs, deltas)]
371
def _make_revision_objects(branch, generate_delta, search, log_rev_iterator):
372
"""Extract revision objects from the repository
374
:param branch: The branch being logged.
375
:param generate_delta: Whether to generate a delta for each revision.
376
:param search: A user text search string.
377
:param log_rev_iterator: An input iterator containing all revisions that
378
could be displayed, in lists.
379
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
382
repository = branch.repository
383
for revs in log_rev_iterator:
384
# r = revision_id, n = revno, d = merge depth
385
revision_ids = [view[0] for view, _, _ in revs]
386
revisions = repository.get_revisions(revision_ids)
387
revs = [(rev[0], revision, rev[2]) for rev, revision in
388
izip(revs, revisions)]
392
def _make_batch_filter(branch, generate_delta, search, log_rev_iterator):
393
"""Group up a single large batch into smaller ones.
395
:param branch: The branch being logged.
396
:param generate_delta: Whether to generate a delta for each revision.
397
:param search: A user text search string.
398
:param log_rev_iterator: An input iterator containing all revisions that
399
could be displayed, in lists.
400
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev, delta).
402
repository = branch.repository
404
for batch in log_rev_iterator:
407
step = [detail for _, detail in zip(range(num), batch)]
411
num = min(int(num * 1.5), 200)
414
def _get_mainline_revs(branch, start_revision, end_revision):
415
"""Get the mainline revisions from the branch.
417
Generates the list of mainline revisions for the branch.
419
:param branch: The branch containing the revisions.
421
:param start_revision: The first revision to be logged.
422
For backwards compatibility this may be a mainline integer revno,
423
but for merge revision support a RevisionInfo is expected.
425
:param end_revision: The last revision to be logged.
426
For backwards compatibility this may be a mainline integer revno,
427
but for merge revision support a RevisionInfo is expected.
429
:return: A (mainline_revs, rev_nos, start_rev_id, end_rev_id) tuple.
431
branch_revno, branch_last_revision = branch.last_revision_info()
432
if branch_revno == 0:
433
return None, None, None, None
435
# For mainline generation, map start_revision and end_revision to
436
# mainline revnos. If the revision is not on the mainline choose the
437
# appropriate extreme of the mainline instead - the extra will be
439
# Also map the revisions to rev_ids, to be used in the later filtering
194
442
if start_revision is None:
197
branch.check_real_revno(start_revision)
445
if isinstance(start_revision, revisionspec.RevisionInfo):
446
start_rev_id = start_revision.rev_id
447
start_revno = start_revision.revno or 1
449
branch.check_real_revno(start_revision)
450
start_revno = start_revision
199
453
if end_revision is None:
200
end_revision = len(which_revs)
202
branch.check_real_revno(end_revision)
204
# list indexes are 0-based; revisions are 1-based
205
cut_revs = which_revs[(start_revision-1):(end_revision)]
454
end_revno = branch_revno
456
if isinstance(end_revision, revisionspec.RevisionInfo):
457
end_rev_id = end_revision.rev_id
458
end_revno = end_revision.revno or branch_revno
460
branch.check_real_revno(end_revision)
461
end_revno = end_revision
463
if ((start_rev_id == _mod_revision.NULL_REVISION)
464
or (end_rev_id == _mod_revision.NULL_REVISION)):
465
raise errors.BzrCommandError('Logging revision 0 is invalid.')
466
if start_revno > end_revno:
467
raise errors.BzrCommandError("Start revision must be older than "
470
if end_revno < start_revno:
471
return None, None, None, None
472
cur_revno = branch_revno
475
for revision_id in branch.repository.iter_reverse_revision_history(
476
branch_last_revision):
477
if cur_revno < start_revno:
478
# We have gone far enough, but we always add 1 more revision
479
rev_nos[revision_id] = cur_revno
480
mainline_revs.append(revision_id)
482
if cur_revno <= end_revno:
483
rev_nos[revision_id] = cur_revno
484
mainline_revs.append(revision_id)
487
# We walked off the edge of all revisions, so we add a 'None' marker
488
mainline_revs.append(None)
490
mainline_revs.reverse()
208
492
# override the mainline to look like the revision history.
209
mainline_revs = [revision_id for index, revision_id in cut_revs]
210
if cut_revs[0][0] == 1:
211
mainline_revs.insert(0, None)
213
mainline_revs.insert(0, which_revs[start_revision-2][1])
215
merge_sorted_revisions = merge_sort(
216
branch.repository.get_revision_graph(mainline_revs[-1]),
493
return mainline_revs, rev_nos, start_rev_id, end_rev_id
496
def _filter_revision_range(view_revisions, start_rev_id, end_rev_id):
497
"""Filter view_revisions based on revision ranges.
499
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
500
tuples to be filtered.
502
:param start_rev_id: If not NONE specifies the first revision to be logged.
503
If NONE then all revisions up to the end_rev_id are logged.
505
:param end_rev_id: If not NONE specifies the last revision to be logged.
506
If NONE then all revisions up to the end of the log are logged.
508
:return: The filtered view_revisions.
510
if start_rev_id or end_rev_id:
511
revision_ids = [r for r, n, d in view_revisions]
513
start_index = revision_ids.index(start_rev_id)
516
if start_rev_id == end_rev_id:
517
end_index = start_index
520
end_index = revision_ids.index(end_rev_id)
522
end_index = len(view_revisions) - 1
523
# To include the revisions merged into the last revision,
524
# extend end_rev_id down to, but not including, the next rev
525
# with the same or lesser merge_depth
526
end_merge_depth = view_revisions[end_index][2]
528
for index in xrange(end_index+1, len(view_revisions)+1):
529
if view_revisions[index][2] <= end_merge_depth:
530
end_index = index - 1
533
# if the search falls off the end then log to the end as well
534
end_index = len(view_revisions) - 1
535
view_revisions = view_revisions[start_index:end_index+1]
536
return view_revisions
539
def _filter_revisions_touching_file_id(branch, file_id, view_revisions,
541
r"""Return the list of revision ids which touch a given file id.
543
The function filters view_revisions and returns a subset.
544
This includes the revisions which directly change the file id,
545
and the revisions which merge these changes. So if the
557
And 'C' changes a file, then both C and D will be returned. F will not be
558
returned even though it brings the changes to C into the branch starting
559
with E. (Note that if we were using F as the tip instead of G, then we
562
This will also be restricted based on a subset of the mainline.
564
:param branch: The branch where we can get text revision information.
565
:param file_id: Filter out revisions that do not touch file_id.
566
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
567
tuples. This is the list of revisions which will be filtered. It is
568
assumed that view_revisions is in merge_sort order (either forward or
570
:param direction: The direction of view_revisions. See also
571
reverse_by_depth, and get_view_revisions
572
:return: A list of (revision_id, dotted_revno, merge_depth) tuples.
574
# Lookup all possible text keys to determine which ones actually modified
576
text_keys = [(file_id, rev_id) for rev_id, revno, depth in view_revisions]
577
# Looking up keys in batches of 1000 can cut the time in half, as well as
578
# memory consumption. GraphIndex *does* like to look for a few keys in
579
# parallel, it just doesn't like looking for *lots* of keys in parallel.
580
# TODO: This code needs to be re-evaluated periodically as we tune the
581
# indexing layer. We might consider passing in hints as to the known
582
# access pattern (sparse/clustered, high success rate/low success
583
# rate). This particular access is clustered with a low success rate.
584
get_parent_map = branch.repository.texts.get_parent_map
585
modified_text_revisions = set()
587
for start in xrange(0, len(text_keys), chunk_size):
588
next_keys = text_keys[start:start + chunk_size]
589
# Only keep the revision_id portion of the key
590
modified_text_revisions.update(
591
[k[1] for k in get_parent_map(next_keys)])
592
del text_keys, next_keys
595
if direction == 'forward':
596
# TODO: The algorithm for finding 'merges' of file changes expects
597
# 'reverse' order (the default from 'merge_sort()'). Instead of
598
# forcing this, we could just use the reverse_by_depth order.
599
view_revisions = reverse_by_depth(view_revisions)
600
# Track what revisions will merge the current revision, replace entries
601
# with 'None' when they have been added to result
602
current_merge_stack = [None]
603
for info in view_revisions:
604
rev_id, revno, depth = info
605
if depth == len(current_merge_stack):
606
current_merge_stack.append(info)
608
del current_merge_stack[depth + 1:]
609
current_merge_stack[-1] = info
611
if rev_id in modified_text_revisions:
612
# This needs to be logged, along with the extra revisions
613
for idx in xrange(len(current_merge_stack)):
614
node = current_merge_stack[idx]
617
current_merge_stack[idx] = None
618
if direction == 'forward':
619
result = reverse_by_depth(result)
623
def get_view_revisions(mainline_revs, rev_nos, branch, direction,
624
include_merges=True):
625
"""Produce an iterator of revisions to show
626
:return: an iterator of (revision_id, revno, merge_depth)
627
(if there is no revno for a revision, None is supplied)
629
if include_merges is False:
630
revision_ids = mainline_revs[1:]
631
if direction == 'reverse':
632
revision_ids.reverse()
633
for revision_id in revision_ids:
634
yield revision_id, str(rev_nos[revision_id]), 0
636
graph = branch.repository.get_graph()
637
# This asks for all mainline revisions, which means we only have to spider
638
# sideways, rather than depth history. That said, its still size-of-history
639
# and should be addressed.
640
# mainline_revisions always includes an extra revision at the beginning, so
642
parent_map = dict(((key, value) for key, value in
643
graph.iter_ancestry(mainline_revs[1:]) if value is not None))
644
# filter out ghosts; merge_sort errors on ghosts.
645
rev_graph = _mod_repository._strip_NULL_ghosts(parent_map)
646
merge_sorted_revisions = tsort.merge_sort(
217
648
mainline_revs[-1],
220
if direction == 'reverse':
222
elif direction == 'forward':
652
if direction == 'forward':
223
653
# forward means oldest first.
224
merge_sorted_revisions.reverse()
654
merge_sorted_revisions = reverse_by_depth(merge_sorted_revisions)
655
elif direction != 'reverse':
226
656
raise ValueError('invalid direction %r' % direction)
228
revision_history = branch.revision_history()
230
# convert the revision history to a dictionary:
232
for index, rev_id in cut_revs:
233
rev_nos[rev_id] = index
235
# now we just print all the revisions
236
for sequence, rev_id, merge_depth, end_of_merge in merge_sorted_revisions:
237
rev = branch.repository.get_revision(rev_id)
240
if not searchRE.search(rev.message):
244
# a mainline revision.
245
if verbose or specific_fileid:
246
delta = _get_revision_delta(branch, rev_nos[rev_id])
249
if not delta.touches_file_id(specific_fileid):
253
# although we calculated it, throw it away without display
256
lf.show(rev_nos[rev_id], rev, delta)
257
elif hasattr(lf, 'show_merge'):
258
lf.show_merge(rev, merge_depth)
261
def deltas_for_log_dummy(branch, which_revs):
262
"""Return all the revisions without intermediate deltas.
264
Useful for log commands that won't need the delta information.
267
for revno, revision_id in which_revs:
268
yield revno, branch.get_revision(revision_id), None
271
def deltas_for_log_reverse(branch, which_revs):
272
"""Compute deltas for display in latest-to-earliest order.
278
Sequence of (revno, revision_id) for the subset of history to examine
281
Sequence of (revno, rev, delta)
283
The delta is from the given revision to the next one in the
284
sequence, which makes sense if the log is being displayed from
287
last_revno = last_revision_id = last_tree = None
288
for revno, revision_id in which_revs:
289
this_tree = branch.revision_tree(revision_id)
290
this_revision = branch.get_revision(revision_id)
293
yield last_revno, last_revision, compare_trees(this_tree, last_tree, False)
295
this_tree = EmptyTree(branch.get_root_id())
298
last_revision = this_revision
299
last_tree = this_tree
303
this_tree = EmptyTree(branch.get_root_id())
658
for sequence, rev_id, merge_depth, revno, end_of_merge in merge_sorted_revisions:
659
yield rev_id, '.'.join(map(str, revno)), merge_depth
662
def reverse_by_depth(merge_sorted_revisions, _depth=0):
663
"""Reverse revisions by depth.
665
Revisions with a different depth are sorted as a group with the previous
666
revision of that depth. There may be no topological justification for this,
667
but it looks much nicer.
670
for val in merge_sorted_revisions:
672
zd_revisions.append([val])
305
this_revno = last_revno - 1
306
this_revision_id = branch.revision_history()[this_revno]
307
this_tree = branch.revision_tree(this_revision_id)
308
yield last_revno, last_revision, compare_trees(this_tree, last_tree, False)
311
def deltas_for_log_forward(branch, which_revs):
312
"""Compute deltas for display in forward log.
314
Given a sequence of (revno, revision_id) pairs, return
317
The delta is from the given revision to the next one in the
318
sequence, which makes sense if the log is being displayed from
674
zd_revisions[-1].append(val)
675
for revisions in zd_revisions:
676
if len(revisions) > 1:
677
revisions[1:] = reverse_by_depth(revisions[1:], _depth + 1)
678
zd_revisions.reverse()
680
for chunk in zd_revisions:
685
class LogRevision(object):
686
"""A revision to be logged (by LogFormatter.log_revision).
688
A simple wrapper for the attributes of a revision to be logged.
689
The attributes may or may not be populated, as determined by the
690
logging options and the log formatter capabilities.
321
last_revno = last_revision_id = last_tree = None
322
prev_tree = EmptyTree(branch.get_root_id())
324
for revno, revision_id in which_revs:
325
this_tree = branch.revision_tree(revision_id)
326
this_revision = branch.get_revision(revision_id)
330
last_tree = EmptyTree(branch.get_root_id())
332
last_revno = revno - 1
333
last_revision_id = branch.revision_history()[last_revno]
334
last_tree = branch.revision_tree(last_revision_id)
336
yield revno, this_revision, compare_trees(last_tree, this_tree, False)
339
last_revision = this_revision
340
last_tree = this_tree
693
def __init__(self, rev=None, revno=None, merge_depth=0, delta=None,
697
self.merge_depth = merge_depth
343
702
class LogFormatter(object):
344
"""Abstract class to display log messages."""
703
"""Abstract class to display log messages.
705
At a minimum, a derived class must implement the log_revision method.
707
If the LogFormatter needs to be informed of the beginning or end of
708
a log it should implement the begin_log and/or end_log hook methods.
710
A LogFormatter should define the following supports_XXX flags
711
to indicate which LogRevision attributes it supports:
713
- supports_delta must be True if this log formatter supports delta.
714
Otherwise the delta attribute may not be populated.
715
- supports_merge_revisions must be True if this log formatter supports
716
merge revisions. If not, and if supports_single_merge_revisions is
717
also not True, then only mainline revisions will be passed to the
719
- supports_single_merge_revision must be True if this log formatter
720
supports logging only a single merge revision. This flag is
721
only relevant if supports_merge_revisions is not True.
722
- supports_tags must be True if this log formatter supports tags.
723
Otherwise the tags attribute may not be populated.
725
Plugins can register functions to show custom revision properties using
726
the properties_handler_registry. The registered function
727
must respect the following interface description:
728
def my_show_properties(properties_dict):
729
# code that returns a dict {'name':'value'} of the properties
346
733
def __init__(self, to_file, show_ids=False, show_timezone='original'):
347
734
self.to_file = to_file
348
735
self.show_ids = show_ids
349
736
self.show_timezone = show_timezone
351
def show(self, revno, rev, delta):
352
raise NotImplementedError('not implemented in abstract base')
738
# TODO: uncomment this block after show() has been removed.
739
# Until then defining log_revision would prevent _show_log calling show()
740
# in legacy formatters.
741
# def log_revision(self, revision):
744
# :param revision: The LogRevision to be logged.
746
# raise NotImplementedError('not implemented in abstract base')
354
748
def short_committer(self, rev):
355
return re.sub('<.*@.*>', '', rev.committer).strip(' ')
749
name, address = config.parse_username(rev.committer)
754
def short_author(self, rev):
755
name, address = config.parse_username(rev.get_apparent_author())
760
def show_properties(self, revision, indent):
761
"""Displays the custom properties returned by each registered handler.
763
If a registered handler raises an error it is propagated.
765
for key, handler in properties_handler_registry.iteritems():
766
for key, value in handler(revision).items():
767
self.to_file.write(indent + key + ': ' + value + '\n')
358
770
class LongLogFormatter(LogFormatter):
359
def show(self, revno, rev, delta):
360
return self._show_helper(revno=revno, rev=rev, delta=delta)
362
def show_merge(self, rev, merge_depth):
363
return self._show_helper(rev=rev, indent=' '*merge_depth, merged=True, delta=None)
365
def _show_helper(self, rev=None, revno=None, indent='', merged=False, delta=None):
366
"""Show a revision, either merged or not."""
367
from bzrlib.osutils import format_date
772
supports_merge_revisions = True
773
supports_delta = True
776
def log_revision(self, revision):
777
"""Log a revision, either merged or not."""
778
indent = ' ' * revision.merge_depth
368
779
to_file = self.to_file
369
print >>to_file, indent+'-' * 60
370
if revno is not None:
371
print >>to_file, 'revno:', revno
373
print >>to_file, indent+'merged:', rev.revision_id
375
print >>to_file, indent+'revision-id:', rev.revision_id
780
to_file.write(indent + '-' * 60 + '\n')
781
if revision.revno is not None:
782
to_file.write(indent + 'revno: %s\n' % (revision.revno,))
784
to_file.write(indent + 'tags: %s\n' % (', '.join(revision.tags)))
376
785
if self.show_ids:
377
for parent_id in rev.parent_ids:
378
print >>to_file, indent+'parent:', parent_id
379
print >>to_file, indent+'committer:', rev.committer
381
print >>to_file, indent+'branch nick: %s' % \
382
rev.properties['branch-nick']
385
date_str = format_date(rev.timestamp,
786
to_file.write(indent + 'revision-id: ' + revision.rev.revision_id)
788
for parent_id in revision.rev.parent_ids:
789
to_file.write(indent + 'parent: %s\n' % (parent_id,))
790
self.show_properties(revision.rev, indent)
792
author = revision.rev.properties.get('author', None)
793
if author is not None:
794
to_file.write(indent + 'author: %s\n' % (author,))
795
to_file.write(indent + 'committer: %s\n' % (revision.rev.committer,))
797
branch_nick = revision.rev.properties.get('branch-nick', None)
798
if branch_nick is not None:
799
to_file.write(indent + 'branch nick: %s\n' % (branch_nick,))
801
date_str = format_date(revision.rev.timestamp,
802
revision.rev.timezone or 0,
387
803
self.show_timezone)
388
print >>to_file, indent+'timestamp: %s' % date_str
804
to_file.write(indent + 'timestamp: %s\n' % (date_str,))
390
print >>to_file, indent+'message:'
392
print >>to_file, indent+' (no message)'
806
to_file.write(indent + 'message:\n')
807
if not revision.rev.message:
808
to_file.write(indent + ' (no message)\n')
394
message = rev.message.rstrip('\r\n')
810
message = revision.rev.message.rstrip('\r\n')
395
811
for l in message.split('\n'):
396
print >>to_file, indent+' ' + l
398
delta.show(to_file, self.show_ids)
812
to_file.write(indent + ' %s\n' % (l,))
813
if revision.delta is not None:
814
revision.delta.show(to_file, self.show_ids, indent=indent)
401
817
class ShortLogFormatter(LogFormatter):
402
def show(self, revno, rev, delta):
403
from bzrlib.osutils import format_date
819
supports_delta = True
820
supports_single_merge_revision = True
822
def log_revision(self, revision):
405
823
to_file = self.to_file
406
date_str = format_date(rev.timestamp, rev.timezone or 0,
408
print >>to_file, "%5d %s\t%s" % (revno, self.short_committer(rev),
409
format_date(rev.timestamp, rev.timezone or 0,
825
if len(revision.rev.parent_ids) > 1:
826
is_merge = ' [merge]'
827
to_file.write("%5s %s\t%s%s\n" % (revision.revno,
828
self.short_author(revision.rev),
829
format_date(revision.rev.timestamp,
830
revision.rev.timezone or 0,
410
831
self.show_timezone, date_fmt="%Y-%m-%d",
412
834
if self.show_ids:
413
print >>to_file, ' revision-id:', rev.revision_id
415
print >>to_file, ' (no message)'
835
to_file.write(' revision-id:%s\n' % (revision.rev.revision_id,))
836
if not revision.rev.message:
837
to_file.write(' (no message)\n')
417
message = rev.message.rstrip('\r\n')
839
message = revision.rev.message.rstrip('\r\n')
418
840
for l in message.split('\n'):
419
print >>to_file, ' ' + l
841
to_file.write(' %s\n' % (l,))
421
843
# TODO: Why not show the modified files in a shorter form as
422
844
# well? rewrap them single lines of appropriate length
424
delta.show(to_file, self.show_ids)
845
if revision.delta is not None:
846
revision.delta.show(to_file, self.show_ids)
428
850
class LineLogFormatter(LogFormatter):
852
supports_single_merge_revision = True
854
def __init__(self, *args, **kwargs):
855
super(LineLogFormatter, self).__init__(*args, **kwargs)
856
self._max_chars = terminal_width() - 1
429
858
def truncate(self, str, max_len):
430
859
if len(str) <= max_len:
432
861
return str[:max_len-3]+'...'
434
863
def date_string(self, rev):
435
from bzrlib.osutils import format_date
436
864
return format_date(rev.timestamp, rev.timezone or 0,
437
865
self.show_timezone, date_fmt="%Y-%m-%d",
438
866
show_offset=False)