174
187
:param show_diff: If True, output a diff after each revision.
178
if getattr(lf, 'begin_log', None):
181
_show_log(branch, lf, specific_fileid, verbose, direction,
182
start_revision, end_revision, search, limit, show_diff)
184
if getattr(lf, 'end_log', None):
190
def _show_log(branch,
192
specific_fileid=None,
200
"""Worker function for show_log - see show_log."""
201
if not isinstance(lf, LogFormatter):
202
warn("not a LogFormatter instance: %r" % lf)
204
trace.mutter('get log for file_id %r', specific_fileid)
206
# Consult the LogFormatter about what it needs and can handle
207
levels_to_display = lf.get_levels()
208
generate_merge_revisions = levels_to_display != 1
209
allow_single_merge_revision = True
210
if not getattr(lf, 'supports_merge_revisions', False):
211
allow_single_merge_revision = getattr(lf,
212
'supports_single_merge_revision', False)
213
generate_tags = getattr(lf, 'supports_tags', False)
214
if generate_tags and branch.supports_tags():
215
rev_tag_dict = branch.tags.get_reverse_tag_dict()
218
generate_delta = verbose and getattr(lf, 'supports_delta', False)
219
generate_diff = show_diff and getattr(lf, 'supports_diff', False)
221
# Find and print the interesting revisions
222
repo = branch.repository
224
revision_iterator = _create_log_revision_iterator(branch,
225
start_revision, end_revision, direction, specific_fileid, search,
226
generate_merge_revisions, allow_single_merge_revision,
227
generate_delta, limited_output=limit > 0)
228
for revs in revision_iterator:
229
for (rev_id, revno, merge_depth), rev, delta in revs:
230
# Note: 0 levels means show everything; merge_depth counts from 0
231
if levels_to_display != 0 and merge_depth >= levels_to_display:
234
diff = _format_diff(repo, rev, rev_id, specific_fileid)
237
lr = LogRevision(rev, revno, merge_depth, delta,
238
rev_tag_dict.get(rev_id), diff)
189
# Convert old-style parameters to new-style parameters
190
if specific_fileid is not None:
191
file_ids = [specific_fileid]
196
delta_type = 'partial'
203
diff_type = 'partial'
209
# Build the request and execute it
210
rqst = make_log_request_dict(direction=direction, specific_fileids=file_ids,
211
start_revision=start_revision, end_revision=end_revision,
212
limit=limit, message_search=search,
213
delta_type=delta_type, diff_type=diff_type)
214
Logger(branch, rqst).show(lf)
217
# Note: This needs to be kept this in sync with the defaults in
218
# make_log_request_dict() below
219
_DEFAULT_REQUEST_PARAMS = {
220
'direction': 'reverse',
222
'generate_tags': True,
223
'exclude_common_ancestry': False,
224
'_match_using_deltas': True,
228
def make_log_request_dict(direction='reverse', specific_fileids=None,
229
start_revision=None, end_revision=None, limit=None,
230
message_search=None, levels=1, generate_tags=True,
232
diff_type=None, _match_using_deltas=True,
233
exclude_common_ancestry=False,
235
"""Convenience function for making a logging request dictionary.
237
Using this function may make code slightly safer by ensuring
238
parameters have the correct names. It also provides a reference
239
point for documenting the supported parameters.
241
:param direction: 'reverse' (default) is latest to earliest;
242
'forward' is earliest to latest.
244
:param specific_fileids: If not None, only include revisions
245
affecting the specified files, rather than all revisions.
247
:param start_revision: If not None, only generate
248
revisions >= start_revision
250
:param end_revision: If not None, only generate
251
revisions <= end_revision
253
:param limit: If set, generate only 'limit' revisions, all revisions
254
are shown if None or 0.
256
:param message_search: If not None, only include revisions with
257
matching commit messages
259
:param levels: the number of levels of revisions to
260
generate; 1 for just the mainline; 0 for all levels.
262
:param generate_tags: If True, include tags for matched revisions.
264
:param delta_type: Either 'full', 'partial' or None.
265
'full' means generate the complete delta - adds/deletes/modifies/etc;
266
'partial' means filter the delta using specific_fileids;
267
None means do not generate any delta.
269
:param diff_type: Either 'full', 'partial' or None.
270
'full' means generate the complete diff - adds/deletes/modifies/etc;
271
'partial' means filter the diff using specific_fileids;
272
None means do not generate any diff.
274
:param _match_using_deltas: a private parameter controlling the
275
algorithm used for matching specific_fileids. This parameter
276
may be removed in the future so bzrlib client code should NOT
279
:param exclude_common_ancestry: Whether -rX..Y should be interpreted as a
280
range operator or as a graph difference.
283
'direction': direction,
284
'specific_fileids': specific_fileids,
285
'start_revision': start_revision,
286
'end_revision': end_revision,
288
'message_search': message_search,
290
'generate_tags': generate_tags,
291
'delta_type': delta_type,
292
'diff_type': diff_type,
293
'exclude_common_ancestry': exclude_common_ancestry,
294
# Add 'private' attributes for features that may be deprecated
295
'_match_using_deltas': _match_using_deltas,
299
def _apply_log_request_defaults(rqst):
300
"""Apply default values to a request dictionary."""
301
result = _DEFAULT_REQUEST_PARAMS
307
class LogGenerator(object):
308
"""A generator of log revisions."""
310
def iter_log_revisions(self):
311
"""Iterate over LogRevision objects.
313
:return: An iterator yielding LogRevision objects.
315
raise NotImplementedError(self.iter_log_revisions)
318
class Logger(object):
319
"""An object that generates, formats and displays a log."""
321
def __init__(self, branch, rqst):
324
:param branch: the branch to log
325
:param rqst: A dictionary specifying the query parameters.
326
See make_log_request_dict() for supported values.
329
self.rqst = _apply_log_request_defaults(rqst)
334
:param lf: The LogFormatter object to send the output to.
336
if not isinstance(lf, LogFormatter):
337
warn("not a LogFormatter instance: %r" % lf)
339
self.branch.lock_read()
341
if getattr(lf, 'begin_log', None):
344
if getattr(lf, 'end_log', None):
349
def _show_body(self, lf):
350
"""Show the main log output.
352
Subclasses may wish to override this.
354
# Tweak the LogRequest based on what the LogFormatter can handle.
355
# (There's no point generating stuff if the formatter can't display it.)
357
rqst['levels'] = lf.get_levels()
358
if not getattr(lf, 'supports_tags', False):
359
rqst['generate_tags'] = False
360
if not getattr(lf, 'supports_delta', False):
361
rqst['delta_type'] = None
362
if not getattr(lf, 'supports_diff', False):
363
rqst['diff_type'] = None
365
# Find and print the interesting revisions
366
generator = self._generator_factory(self.branch, rqst)
367
for lr in generator.iter_log_revisions():
239
368
lf.log_revision(lr)
242
if log_count >= limit:
246
def _format_diff(repo, rev, rev_id, specific_fileid):
247
if len(rev.parent_ids) == 0:
248
ancestor_id = _mod_revision.NULL_REVISION
250
ancestor_id = rev.parent_ids[0]
251
tree_1 = repo.revision_tree(ancestor_id)
252
tree_2 = repo.revision_tree(rev_id)
254
specific_files = [tree_2.id2path(specific_fileid)]
256
specific_files = None
258
diff.show_diff_trees(tree_1, tree_2, s, specific_files, old_label='',
371
def _generator_factory(self, branch, rqst):
372
"""Make the LogGenerator object to use.
374
Subclasses may wish to override this.
376
return _DefaultLogGenerator(branch, rqst)
263
379
class _StartNotLinearAncestor(Exception):
264
380
"""Raised when a start revision is not found walking left-hand history."""
267
def _create_log_revision_iterator(branch, start_revision, end_revision,
268
direction, specific_fileid, search, generate_merge_revisions,
269
allow_single_merge_revision, generate_delta, limited_output=False):
270
"""Create a revision iterator for log.
272
:param branch: The branch being logged.
273
:param start_revision: If not None, only show revisions >= start_revision
274
:param end_revision: If not None, only show revisions <= end_revision
275
:param direction: 'reverse' (default) is latest to earliest; 'forward' is
277
:param specific_fileid: If not None, list only the commits affecting the
279
:param search: If not None, only show revisions with matching commit
281
:param generate_merge_revisions: If False, show only mainline revisions.
282
:param allow_single_merge_revision: If True, logging of a single
283
revision off the mainline is to be allowed
284
:param generate_delta: Whether to generate a delta for each revision.
285
:param limited_output: if True, the user only wants a limited result
287
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
290
start_rev_id, end_rev_id = _get_revision_limits(branch, start_revision,
293
# Decide how file-ids are matched: delta-filtering vs per-file graph.
294
# Delta filtering allows revisions to be displayed incrementally
295
# though the total time is much slower for huge repositories: log -v
296
# is the *lower* performance bound. At least until the split
297
# inventory format arrives, per-file-graph needs to remain the
298
# default except in verbose mode. Delta filtering should give more
299
# accurate results (e.g. inclusion of FILE deletions) so arguably
300
# it should always be used in the future.
301
use_deltas_for_matching = specific_fileid and generate_delta
302
delayed_graph_generation = not specific_fileid and (
303
start_rev_id or end_rev_id or limited_output)
304
generate_merges = generate_merge_revisions or (specific_fileid and
305
not use_deltas_for_matching)
306
view_revisions = _calc_view_revisions(branch, start_rev_id, end_rev_id,
307
direction, generate_merges, allow_single_merge_revision,
308
delayed_graph_generation=delayed_graph_generation)
309
search_deltas_for_fileids = None
310
if use_deltas_for_matching:
311
search_deltas_for_fileids = set([specific_fileid])
312
elif specific_fileid:
383
class _DefaultLogGenerator(LogGenerator):
384
"""The default generator of log revisions."""
386
def __init__(self, branch, rqst):
389
if rqst.get('generate_tags') and branch.supports_tags():
390
self.rev_tag_dict = branch.tags.get_reverse_tag_dict()
392
self.rev_tag_dict = {}
394
def iter_log_revisions(self):
395
"""Iterate over LogRevision objects.
397
:return: An iterator yielding LogRevision objects.
400
levels = rqst.get('levels')
401
limit = rqst.get('limit')
402
diff_type = rqst.get('diff_type')
404
revision_iterator = self._create_log_revision_iterator()
405
for revs in revision_iterator:
406
for (rev_id, revno, merge_depth), rev, delta in revs:
407
# 0 levels means show everything; merge_depth counts from 0
408
if levels != 0 and merge_depth >= levels:
410
if diff_type is None:
413
diff = self._format_diff(rev, rev_id, diff_type)
414
yield LogRevision(rev, revno, merge_depth, delta,
415
self.rev_tag_dict.get(rev_id), diff)
418
if log_count >= limit:
421
def _format_diff(self, rev, rev_id, diff_type):
422
repo = self.branch.repository
423
if len(rev.parent_ids) == 0:
424
ancestor_id = _mod_revision.NULL_REVISION
426
ancestor_id = rev.parent_ids[0]
427
tree_1 = repo.revision_tree(ancestor_id)
428
tree_2 = repo.revision_tree(rev_id)
429
file_ids = self.rqst.get('specific_fileids')
430
if diff_type == 'partial' and file_ids is not None:
431
specific_files = [tree_2.id2path(id) for id in file_ids]
433
specific_files = None
435
path_encoding = osutils.get_diff_header_encoding()
436
diff.show_diff_trees(tree_1, tree_2, s, specific_files, old_label='',
437
new_label='', path_encoding=path_encoding)
440
def _create_log_revision_iterator(self):
441
"""Create a revision iterator for log.
443
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
446
self.start_rev_id, self.end_rev_id = _get_revision_limits(
447
self.branch, self.rqst.get('start_revision'),
448
self.rqst.get('end_revision'))
449
if self.rqst.get('_match_using_deltas'):
450
return self._log_revision_iterator_using_delta_matching()
452
# We're using the per-file-graph algorithm. This scales really
453
# well but only makes sense if there is a single file and it's
455
file_count = len(self.rqst.get('specific_fileids'))
457
raise BzrError("illegal LogRequest: must match-using-deltas "
458
"when logging %d files" % file_count)
459
return self._log_revision_iterator_using_per_file_graph()
461
def _log_revision_iterator_using_delta_matching(self):
462
# Get the base revisions, filtering by the revision range
464
generate_merge_revisions = rqst.get('levels') != 1
465
delayed_graph_generation = not rqst.get('specific_fileids') and (
466
rqst.get('limit') or self.start_rev_id or self.end_rev_id)
467
view_revisions = _calc_view_revisions(
468
self.branch, self.start_rev_id, self.end_rev_id,
469
rqst.get('direction'),
470
generate_merge_revisions=generate_merge_revisions,
471
delayed_graph_generation=delayed_graph_generation,
472
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
474
# Apply the other filters
475
return make_log_rev_iterator(self.branch, view_revisions,
476
rqst.get('delta_type'), rqst.get('message_search'),
477
file_ids=rqst.get('specific_fileids'),
478
direction=rqst.get('direction'))
480
def _log_revision_iterator_using_per_file_graph(self):
481
# Get the base revisions, filtering by the revision range.
482
# Note that we always generate the merge revisions because
483
# filter_revisions_touching_file_id() requires them ...
485
view_revisions = _calc_view_revisions(
486
self.branch, self.start_rev_id, self.end_rev_id,
487
rqst.get('direction'), generate_merge_revisions=True,
488
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
313
489
if not isinstance(view_revisions, list):
314
490
view_revisions = list(view_revisions)
315
view_revisions = _filter_revisions_touching_file_id(branch,
316
specific_fileid, view_revisions,
317
include_merges=generate_merge_revisions)
318
return make_log_rev_iterator(branch, view_revisions, generate_delta,
319
search, file_ids=search_deltas_for_fileids, direction=direction)
491
view_revisions = _filter_revisions_touching_file_id(self.branch,
492
rqst.get('specific_fileids')[0], view_revisions,
493
include_merges=rqst.get('levels') != 1)
494
return make_log_rev_iterator(self.branch, view_revisions,
495
rqst.get('delta_type'), rqst.get('message_search'))
322
498
def _calc_view_revisions(branch, start_rev_id, end_rev_id, direction,
323
generate_merge_revisions, allow_single_merge_revision,
324
delayed_graph_generation=False):
499
generate_merge_revisions,
500
delayed_graph_generation=False,
501
exclude_common_ancestry=False,
325
503
"""Calculate the revisions to view.
327
505
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
328
506
a list of the same tuples.
508
if (exclude_common_ancestry and start_rev_id == end_rev_id):
509
raise errors.BzrCommandError(
510
'--exclude-common-ancestry requires two different revisions')
511
if direction not in ('reverse', 'forward'):
512
raise ValueError('invalid direction %r' % direction)
330
513
br_revno, br_rev_id = branch.last_revision_info()
331
514
if br_revno == 0:
334
# If a single revision is requested, check we can handle it
335
generate_single_revision = (end_rev_id and start_rev_id == end_rev_id and
336
(not generate_merge_revisions or not _has_merges(branch, end_rev_id)))
337
if generate_single_revision:
338
if end_rev_id == br_rev_id:
340
return [(br_rev_id, br_revno, 0)]
342
revno = branch.revision_id_to_dotted_revno(end_rev_id)
343
if len(revno) > 1 and not allow_single_merge_revision:
344
# It's a merge revision and the log formatter is
345
# completely brain dead. This "feature" of allowing
346
# log formatters incapable of displaying dotted revnos
347
# ought to be deprecated IMNSHO. IGC 20091022
348
raise errors.BzrCommandError('Selected log formatter only'
349
' supports mainline revisions.')
350
revno_str = '.'.join(str(n) for n in revno)
351
return [(end_rev_id, revno_str, 0)]
353
# If we only want to see linear revisions, we can iterate ...
354
if not generate_merge_revisions:
355
result = _linear_view_revisions(branch, start_rev_id, end_rev_id)
356
# If a start limit was given and it's not obviously an
357
# ancestor of the end limit, check it before outputting anything
358
if direction == 'forward' or (start_rev_id
359
and not _is_obvious_ancestor(branch, start_rev_id, end_rev_id)):
361
result = list(result)
362
except _StartNotLinearAncestor:
363
raise errors.BzrCommandError('Start revision not found in'
364
' left-hand history of end revision.')
365
if direction == 'forward':
366
result = reversed(list(result))
517
if (end_rev_id and start_rev_id == end_rev_id
518
and (not generate_merge_revisions
519
or not _has_merges(branch, end_rev_id))):
520
# If a single revision is requested, check we can handle it
521
iter_revs = _generate_one_revision(branch, end_rev_id, br_rev_id,
523
elif not generate_merge_revisions:
524
# If we only want to see linear revisions, we can iterate ...
525
iter_revs = _generate_flat_revisions(branch, start_rev_id, end_rev_id,
526
direction, exclude_common_ancestry)
527
if direction == 'forward':
528
iter_revs = reversed(iter_revs)
530
iter_revs = _generate_all_revisions(branch, start_rev_id, end_rev_id,
531
direction, delayed_graph_generation,
532
exclude_common_ancestry)
533
if direction == 'forward':
534
iter_revs = _rebase_merge_depth(reverse_by_depth(list(iter_revs)))
538
def _generate_one_revision(branch, rev_id, br_rev_id, br_revno):
539
if rev_id == br_rev_id:
541
return [(br_rev_id, br_revno, 0)]
543
revno = branch.revision_id_to_dotted_revno(rev_id)
544
revno_str = '.'.join(str(n) for n in revno)
545
return [(rev_id, revno_str, 0)]
548
def _generate_flat_revisions(branch, start_rev_id, end_rev_id, direction,
549
exclude_common_ancestry=False):
550
result = _linear_view_revisions(
551
branch, start_rev_id, end_rev_id,
552
exclude_common_ancestry=exclude_common_ancestry)
553
# If a start limit was given and it's not obviously an
554
# ancestor of the end limit, check it before outputting anything
555
if direction == 'forward' or (start_rev_id
556
and not _is_obvious_ancestor(branch, start_rev_id, end_rev_id)):
558
result = list(result)
559
except _StartNotLinearAncestor:
560
raise errors.BzrCommandError('Start revision not found in'
561
' left-hand history of end revision.')
565
def _generate_all_revisions(branch, start_rev_id, end_rev_id, direction,
566
delayed_graph_generation,
567
exclude_common_ancestry=False):
369
568
# On large trees, generating the merge graph can take 30-60 seconds
370
569
# so we delay doing it until a merge is detected, incrementally
371
570
# returning initial (non-merge) revisions while we can.
572
# The above is only true for old formats (<= 0.92), for newer formats, a
573
# couple of seconds only should be needed to load the whole graph and the
574
# other graph operations needed are even faster than that -- vila 100201
372
575
initial_revisions = []
373
576
if delayed_graph_generation:
375
for rev_id, revno, depth in \
376
_linear_view_revisions(branch, start_rev_id, end_rev_id):
578
for rev_id, revno, depth in _linear_view_revisions(
579
branch, start_rev_id, end_rev_id, exclude_common_ancestry):
377
580
if _has_merges(branch, rev_id):
581
# The end_rev_id can be nested down somewhere. We need an
582
# explicit ancestry check. There is an ambiguity here as we
583
# may not raise _StartNotLinearAncestor for a revision that
584
# is an ancestor but not a *linear* one. But since we have
585
# loaded the graph to do the check (or calculate a dotted
586
# revno), we may as well accept to show the log... We need
587
# the check only if start_rev_id is not None as all
588
# revisions have _mod_revision.NULL_REVISION as an ancestor
590
graph = branch.repository.get_graph()
591
if (start_rev_id is not None
592
and not graph.is_ancestor(start_rev_id, end_rev_id)):
593
raise _StartNotLinearAncestor()
594
# Since we collected the revisions so far, we need to
378
596
end_rev_id = rev_id
381
599
initial_revisions.append((rev_id, revno, depth))
383
601
# No merged revisions found
384
if direction == 'reverse':
385
return initial_revisions
386
elif direction == 'forward':
387
return reversed(initial_revisions)
389
raise ValueError('invalid direction %r' % direction)
602
return initial_revisions
390
603
except _StartNotLinearAncestor:
391
604
# A merge was never detected so the lower revision limit can't
392
605
# be nested down somewhere
393
606
raise errors.BzrCommandError('Start revision not found in'
394
607
' history of end revision.')
609
# We exit the loop above because we encounter a revision with merges, from
610
# this revision, we need to switch to _graph_view_revisions.
396
612
# A log including nested merges is required. If the direction is reverse,
397
613
# we rebase the initial merge depths so that the development line is
398
614
# shown naturally, i.e. just like it is for linear logging. We can easily
1176
1424
def short_author(self, rev):
1177
name, address = config.parse_username(rev.get_apparent_author())
1425
return self.authors(rev, 'first', short=True, sep=', ')
1427
def authors(self, rev, who, short=False, sep=None):
1428
"""Generate list of authors, taking --authors option into account.
1430
The caller has to specify the name of a author list handler,
1431
as provided by the author list registry, using the ``who``
1432
argument. That name only sets a default, though: when the
1433
user selected a different author list generation using the
1434
``--authors`` command line switch, as represented by the
1435
``author_list_handler`` constructor argument, that value takes
1438
:param rev: The revision for which to generate the list of authors.
1439
:param who: Name of the default handler.
1440
:param short: Whether to shorten names to either name or address.
1441
:param sep: What separator to use for automatic concatenation.
1443
if self._author_list_handler is not None:
1444
# The user did specify --authors, which overrides the default
1445
author_list_handler = self._author_list_handler
1447
# The user didn't specify --authors, so we use the caller's default
1448
author_list_handler = author_list_registry.get(who)
1449
names = author_list_handler(rev)
1451
for i in range(len(names)):
1452
name, address = config.parse_username(names[i])
1458
names = sep.join(names)
1461
def merge_marker(self, revision):
1462
"""Get the merge marker to include in the output or '' if none."""
1463
if len(revision.rev.parent_ids) > 1:
1464
self._merge_count += 1
1182
1469
def show_properties(self, revision, indent):
1183
1470
"""Displays the custom properties returned by each registered handler.
1185
If a registered handler raises an error it is propagated.
1472
If a registered handler raises an error it is propagated.
1474
for line in self.custom_properties(revision):
1475
self.to_file.write("%s%s\n" % (indent, line))
1477
def custom_properties(self, revision):
1478
"""Format the custom properties returned by each registered handler.
1480
If a registered handler raises an error it is propagated.
1482
:return: a list of formatted lines (excluding trailing newlines)
1484
lines = self._foreign_info_properties(revision)
1187
1485
for key, handler in properties_handler_registry.iteritems():
1188
for key, value in handler(revision).items():
1189
self.to_file.write(indent + key + ': ' + value + '\n')
1486
lines.extend(self._format_properties(handler(revision)))
1489
def _foreign_info_properties(self, rev):
1490
"""Custom log displayer for foreign revision identifiers.
1492
:param rev: Revision object.
1494
# Revision comes directly from a foreign repository
1495
if isinstance(rev, foreign.ForeignRevision):
1496
return self._format_properties(
1497
rev.mapping.vcs.show_foreign_revid(rev.foreign_revid))
1499
# Imported foreign revision revision ids always contain :
1500
if not ":" in rev.revision_id:
1503
# Revision was once imported from a foreign repository
1505
foreign_revid, mapping = \
1506
foreign.foreign_vcs_registry.parse_revision_id(rev.revision_id)
1507
except errors.InvalidRevisionId:
1510
return self._format_properties(
1511
mapping.vcs.show_foreign_revid(foreign_revid))
1513
def _format_properties(self, properties):
1515
for key, value in properties.items():
1516
lines.append(key + ': ' + value)
1191
1519
def show_diff(self, to_file, diff, indent):
1192
1520
for l in diff.rstrip().split('\n'):
1193
1521
to_file.write(indent + '%s\n' % (l,))
1524
# Separator between revisions in long format
1525
_LONG_SEP = '-' * 60
1196
1528
class LongLogFormatter(LogFormatter):
1198
1530
supports_merge_revisions = True
1531
preferred_levels = 1
1199
1532
supports_delta = True
1200
1533
supports_tags = True
1201
1534
supports_diff = True
1536
def __init__(self, *args, **kwargs):
1537
super(LongLogFormatter, self).__init__(*args, **kwargs)
1538
if self.show_timezone == 'original':
1539
self.date_string = self._date_string_original_timezone
1541
self.date_string = self._date_string_with_timezone
1543
def _date_string_with_timezone(self, rev):
1544
return format_date(rev.timestamp, rev.timezone or 0,
1547
def _date_string_original_timezone(self, rev):
1548
return format_date_with_offset_in_original_timezone(rev.timestamp,
1203
1551
def log_revision(self, revision):
1204
1552
"""Log a revision, either merged or not."""
1205
1553
indent = ' ' * revision.merge_depth
1206
to_file = self.to_file
1207
to_file.write(indent + '-' * 60 + '\n')
1208
1555
if revision.revno is not None:
1209
to_file.write(indent + 'revno: %s\n' % (revision.revno,))
1556
lines.append('revno: %s%s' % (revision.revno,
1557
self.merge_marker(revision)))
1210
1558
if revision.tags:
1211
to_file.write(indent + 'tags: %s\n' % (', '.join(revision.tags)))
1559
lines.append('tags: %s' % (', '.join(revision.tags)))
1212
1560
if self.show_ids:
1213
to_file.write(indent + 'revision-id: ' + revision.rev.revision_id)
1561
lines.append('revision-id: %s' % (revision.rev.revision_id,))
1215
1562
for parent_id in revision.rev.parent_ids:
1216
to_file.write(indent + 'parent: %s\n' % (parent_id,))
1217
self.show_properties(revision.rev, indent)
1563
lines.append('parent: %s' % (parent_id,))
1564
lines.extend(self.custom_properties(revision.rev))
1219
author = revision.rev.properties.get('author', None)
1220
if author is not None:
1221
to_file.write(indent + 'author: %s\n' % (author,))
1222
to_file.write(indent + 'committer: %s\n' % (revision.rev.committer,))
1566
committer = revision.rev.committer
1567
authors = self.authors(revision.rev, 'all')
1568
if authors != [committer]:
1569
lines.append('author: %s' % (", ".join(authors),))
1570
lines.append('committer: %s' % (committer,))
1224
1572
branch_nick = revision.rev.properties.get('branch-nick', None)
1225
1573
if branch_nick is not None:
1226
to_file.write(indent + 'branch nick: %s\n' % (branch_nick,))
1228
date_str = format_date(revision.rev.timestamp,
1229
revision.rev.timezone or 0,
1231
to_file.write(indent + 'timestamp: %s\n' % (date_str,))
1233
to_file.write(indent + 'message:\n')
1574
lines.append('branch nick: %s' % (branch_nick,))
1576
lines.append('timestamp: %s' % (self.date_string(revision.rev),))
1578
lines.append('message:')
1234
1579
if not revision.rev.message:
1235
to_file.write(indent + ' (no message)\n')
1580
lines.append(' (no message)')
1237
1582
message = revision.rev.message.rstrip('\r\n')
1238
1583
for l in message.split('\n'):
1239
to_file.write(indent + ' %s\n' % (l,))
1584
lines.append(' %s' % (l,))
1586
# Dump the output, appending the delta and diff if requested
1587
to_file = self.to_file
1588
to_file.write("%s%s\n" % (indent, ('\n' + indent).join(lines)))
1240
1589
if revision.delta is not None:
1241
# We don't respect delta_format for compatibility
1242
revision.delta.show(to_file, self.show_ids, indent=indent,
1590
# Use the standard status output to display changes
1591
from bzrlib.delta import report_delta
1592
report_delta(to_file, revision.delta, short_status=False,
1593
show_ids=self.show_ids, indent=indent)
1244
1594
if revision.diff is not None:
1245
1595
to_file.write(indent + 'diff:\n')
1246
1597
# Note: we explicitly don't indent the diff (relative to the
1247
1598
# revision information) so that the output can be fed to patch -p0
1248
self.show_diff(to_file, revision.diff, indent)
1599
self.show_diff(self.to_exact_file, revision.diff, indent)
1600
self.to_exact_file.flush()
1602
def get_advice_separator(self):
1603
"""Get the text separating the log from the closing advice."""
1604
return '-' * 60 + '\n'
1251
1607
class ShortLogFormatter(LogFormatter):
1573
2000
lf.log_revision(lr)
1576
def _get_fileid_to_log(revision, tree, b, fp):
1577
"""Find the file-id to log for a file path in a revision range.
1579
:param revision: the revision range as parsed on the command line
1580
:param tree: the working tree, if any
1581
:param b: the branch
1582
:param fp: file path
2003
def _get_info_for_log_files(revisionspec_list, file_list, add_cleanup):
2004
"""Find file-ids and kinds given a list of files and a revision range.
2006
We search for files at the end of the range. If not found there,
2007
we try the start of the range.
2009
:param revisionspec_list: revision range as parsed on the command line
2010
:param file_list: the list of paths given on the command line;
2011
the first of these can be a branch location or a file path,
2012
the remainder must be file paths
2013
:param add_cleanup: When the branch returned is read locked,
2014
an unlock call will be queued to the cleanup.
2015
:return: (branch, info_list, start_rev_info, end_rev_info) where
2016
info_list is a list of (relative_path, file_id, kind) tuples where
2017
kind is one of values 'directory', 'file', 'symlink', 'tree-reference'.
2018
branch will be read-locked.
1584
if revision is None:
2020
from builtins import _get_revision_range
2021
tree, b, path = bzrdir.BzrDir.open_containing_tree_or_branch(file_list[0])
2022
add_cleanup(b.lock_read().unlock)
2023
# XXX: It's damn messy converting a list of paths to relative paths when
2024
# those paths might be deleted ones, they might be on a case-insensitive
2025
# filesystem and/or they might be in silly locations (like another branch).
2026
# For example, what should "log bzr://branch/dir/file1 file2" do? (Is
2027
# file2 implicitly in the same dir as file1 or should its directory be
2028
# taken from the current tree somehow?) For now, this solves the common
2029
# case of running log in a nested directory, assuming paths beyond the
2030
# first one haven't been deleted ...
2032
relpaths = [path] + tree.safe_relpath_files(file_list[1:])
2034
relpaths = [path] + file_list[1:]
2036
start_rev_info, end_rev_info = _get_revision_range(revisionspec_list, b,
2038
if relpaths in ([], [u'']):
2039
return b, [], start_rev_info, end_rev_info
2040
if start_rev_info is None and end_rev_info is None:
1585
2041
if tree is None:
1586
2042
tree = b.basis_tree()
1587
file_id = tree.path2id(fp)
1589
# go back to when time began
1591
rev1 = b.get_rev_id(1)
1592
except errors.NoSuchRevision:
1596
tree = b.repository.revision_tree(rev1)
1597
file_id = tree.path2id(fp)
2045
file_id = tree.path2id(fp)
2046
kind = _get_kind_for_file_id(tree, file_id)
2048
# go back to when time began
2051
rev1 = b.get_rev_id(1)
2052
except errors.NoSuchRevision:
2057
tree1 = b.repository.revision_tree(rev1)
2059
file_id = tree1.path2id(fp)
2060
kind = _get_kind_for_file_id(tree1, file_id)
2061
info_list.append((fp, file_id, kind))
1599
elif len(revision) == 1:
2063
elif start_rev_info == end_rev_info:
1600
2064
# One revision given - file must exist in it
1601
tree = revision[0].as_tree(b)
1602
file_id = tree.path2id(fp)
2065
tree = b.repository.revision_tree(end_rev_info.rev_id)
2067
file_id = tree.path2id(fp)
2068
kind = _get_kind_for_file_id(tree, file_id)
2069
info_list.append((fp, file_id, kind))
1604
elif len(revision) == 2:
1605
2072
# Revision range given. Get the file-id from the end tree.
1606
2073
# If that fails, try the start tree.
1607
rev_id = revision[1].as_revision_id(b)
2074
rev_id = end_rev_info.rev_id
1608
2075
if rev_id is None:
1609
2076
tree = b.basis_tree()
1611
tree = revision[1].as_tree(b)
1612
file_id = tree.path2id(fp)
1614
rev_id = revision[0].as_revision_id(b)
1616
rev1 = b.get_rev_id(1)
1617
tree = b.repository.revision_tree(rev1)
1619
tree = revision[0].as_tree(b)
2078
tree = b.repository.revision_tree(rev_id)
1620
2081
file_id = tree.path2id(fp)
2082
kind = _get_kind_for_file_id(tree, file_id)
2085
rev_id = start_rev_info.rev_id
2087
rev1 = b.get_rev_id(1)
2088
tree1 = b.repository.revision_tree(rev1)
2090
tree1 = b.repository.revision_tree(rev_id)
2091
file_id = tree1.path2id(fp)
2092
kind = _get_kind_for_file_id(tree1, file_id)
2093
info_list.append((fp, file_id, kind))
2094
return b, info_list, start_rev_info, end_rev_info
2097
def _get_kind_for_file_id(tree, file_id):
2098
"""Return the kind of a file-id or None if it doesn't exist."""
2099
if file_id is not None:
2100
return tree.kind(file_id)
1622
raise errors.BzrCommandError(
1623
'bzr log --revision takes one or two values.')
1627
2105
properties_handler_registry = registry.Registry()
1628
properties_handler_registry.register_lazy("foreign",
1630
"show_foreign_properties")
2107
# Use the properties handlers to print out bug information if available
2108
def _bugs_properties_handler(revision):
2109
if revision.properties.has_key('bugs'):
2110
bug_lines = revision.properties['bugs'].split('\n')
2111
bug_rows = [line.split(' ', 1) for line in bug_lines]
2112
fixed_bug_urls = [row[0] for row in bug_rows if
2113
len(row) > 1 and row[1] == 'fixed']
2116
return {'fixes bug(s)': ' '.join(fixed_bug_urls)}
2119
properties_handler_registry.register('bugs_properties_handler',
2120
_bugs_properties_handler)
1633
2123
# adapters which revision ids to log are filtered. When log is called, the