187
174
:param show_diff: If True, output a diff after each revision.
189
# Convert old-style parameters to new-style parameters
190
if specific_fileid is not None:
191
file_ids = [specific_fileid]
196
delta_type = 'partial'
203
diff_type = 'partial'
209
# Build the request and execute it
210
rqst = make_log_request_dict(direction=direction, specific_fileids=file_ids,
211
start_revision=start_revision, end_revision=end_revision,
212
limit=limit, message_search=search,
213
delta_type=delta_type, diff_type=diff_type)
214
Logger(branch, rqst).show(lf)
217
# Note: This needs to be kept this in sync with the defaults in
218
# make_log_request_dict() below
219
_DEFAULT_REQUEST_PARAMS = {
220
'direction': 'reverse',
222
'generate_tags': True,
223
'_match_using_deltas': True,
227
def make_log_request_dict(direction='reverse', specific_fileids=None,
228
start_revision=None, end_revision=None, limit=None,
229
message_search=None, levels=1, generate_tags=True, delta_type=None,
230
diff_type=None, _match_using_deltas=True):
231
"""Convenience function for making a logging request dictionary.
233
Using this function may make code slightly safer by ensuring
234
parameters have the correct names. It also provides a reference
235
point for documenting the supported parameters.
237
:param direction: 'reverse' (default) is latest to earliest;
238
'forward' is earliest to latest.
240
:param specific_fileids: If not None, only include revisions
241
affecting the specified files, rather than all revisions.
243
:param start_revision: If not None, only generate
244
revisions >= start_revision
246
:param end_revision: If not None, only generate
247
revisions <= end_revision
249
:param limit: If set, generate only 'limit' revisions, all revisions
250
are shown if None or 0.
252
:param message_search: If not None, only include revisions with
253
matching commit messages
255
:param levels: the number of levels of revisions to
256
generate; 1 for just the mainline; 0 for all levels.
258
:param generate_tags: If True, include tags for matched revisions.
260
:param delta_type: Either 'full', 'partial' or None.
261
'full' means generate the complete delta - adds/deletes/modifies/etc;
262
'partial' means filter the delta using specific_fileids;
263
None means do not generate any delta.
265
:param diff_type: Either 'full', 'partial' or None.
266
'full' means generate the complete diff - adds/deletes/modifies/etc;
267
'partial' means filter the diff using specific_fileids;
268
None means do not generate any diff.
270
:param _match_using_deltas: a private parameter controlling the
271
algorithm used for matching specific_fileids. This parameter
272
may be removed in the future so bzrlib client code should NOT
276
'direction': direction,
277
'specific_fileids': specific_fileids,
278
'start_revision': start_revision,
279
'end_revision': end_revision,
281
'message_search': message_search,
283
'generate_tags': generate_tags,
284
'delta_type': delta_type,
285
'diff_type': diff_type,
286
# Add 'private' attributes for features that may be deprecated
287
'_match_using_deltas': _match_using_deltas,
291
def _apply_log_request_defaults(rqst):
292
"""Apply default values to a request dictionary."""
293
result = _DEFAULT_REQUEST_PARAMS
299
class LogGenerator(object):
300
"""A generator of log revisions."""
302
def iter_log_revisions(self):
303
"""Iterate over LogRevision objects.
305
:return: An iterator yielding LogRevision objects.
307
raise NotImplementedError(self.iter_log_revisions)
310
class Logger(object):
311
"""An object that generates, formats and displays a log."""
313
def __init__(self, branch, rqst):
316
:param branch: the branch to log
317
:param rqst: A dictionary specifying the query parameters.
318
See make_log_request_dict() for supported values.
321
self.rqst = _apply_log_request_defaults(rqst)
326
:param lf: The LogFormatter object to send the output to.
328
if not isinstance(lf, LogFormatter):
329
warn("not a LogFormatter instance: %r" % lf)
331
self.branch.lock_read()
333
if getattr(lf, 'begin_log', None):
336
if getattr(lf, 'end_log', None):
341
def _show_body(self, lf):
342
"""Show the main log output.
344
Subclasses may wish to override this.
346
# Tweak the LogRequest based on what the LogFormatter can handle.
347
# (There's no point generating stuff if the formatter can't display it.)
349
rqst['levels'] = lf.get_levels()
350
if not getattr(lf, 'supports_tags', False):
351
rqst['generate_tags'] = False
352
if not getattr(lf, 'supports_delta', False):
353
rqst['delta_type'] = None
354
if not getattr(lf, 'supports_diff', False):
355
rqst['diff_type'] = None
357
# Find and print the interesting revisions
358
generator = self._generator_factory(self.branch, rqst)
359
for lr in generator.iter_log_revisions():
178
if getattr(lf, 'begin_log', None):
181
_show_log(branch, lf, specific_fileid, verbose, direction,
182
start_revision, end_revision, search, limit, show_diff)
184
if getattr(lf, 'end_log', None):
190
def _show_log(branch,
192
specific_fileid=None,
200
"""Worker function for show_log - see show_log."""
201
if not isinstance(lf, LogFormatter):
202
warn("not a LogFormatter instance: %r" % lf)
204
trace.mutter('get log for file_id %r', specific_fileid)
206
# Consult the LogFormatter about what it needs and can handle
207
levels_to_display = lf.get_levels()
208
generate_merge_revisions = levels_to_display != 1
209
allow_single_merge_revision = True
210
if not getattr(lf, 'supports_merge_revisions', False):
211
allow_single_merge_revision = getattr(lf,
212
'supports_single_merge_revision', False)
213
generate_tags = getattr(lf, 'supports_tags', False)
214
if generate_tags and branch.supports_tags():
215
rev_tag_dict = branch.tags.get_reverse_tag_dict()
218
generate_delta = verbose and getattr(lf, 'supports_delta', False)
219
generate_diff = show_diff and getattr(lf, 'supports_diff', False)
221
# Find and print the interesting revisions
222
repo = branch.repository
224
revision_iterator = _create_log_revision_iterator(branch,
225
start_revision, end_revision, direction, specific_fileid, search,
226
generate_merge_revisions, allow_single_merge_revision,
227
generate_delta, limited_output=limit > 0)
228
for revs in revision_iterator:
229
for (rev_id, revno, merge_depth), rev, delta in revs:
230
# Note: 0 levels means show everything; merge_depth counts from 0
231
if levels_to_display != 0 and merge_depth >= levels_to_display:
234
diff = _format_diff(repo, rev, rev_id, specific_fileid)
237
lr = LogRevision(rev, revno, merge_depth, delta,
238
rev_tag_dict.get(rev_id), diff)
360
239
lf.log_revision(lr)
363
def _generator_factory(self, branch, rqst):
364
"""Make the LogGenerator object to use.
366
Subclasses may wish to override this.
368
return _DefaultLogGenerator(branch, rqst)
242
if log_count >= limit:
246
def _format_diff(repo, rev, rev_id, specific_fileid):
247
if len(rev.parent_ids) == 0:
248
ancestor_id = _mod_revision.NULL_REVISION
250
ancestor_id = rev.parent_ids[0]
251
tree_1 = repo.revision_tree(ancestor_id)
252
tree_2 = repo.revision_tree(rev_id)
254
specific_files = [tree_2.id2path(specific_fileid)]
256
specific_files = None
258
diff.show_diff_trees(tree_1, tree_2, s, specific_files, old_label='',
371
263
class _StartNotLinearAncestor(Exception):
372
264
"""Raised when a start revision is not found walking left-hand history."""
375
class _DefaultLogGenerator(LogGenerator):
376
"""The default generator of log revisions."""
378
def __init__(self, branch, rqst):
381
if rqst.get('generate_tags') and branch.supports_tags():
382
self.rev_tag_dict = branch.tags.get_reverse_tag_dict()
384
self.rev_tag_dict = {}
386
def iter_log_revisions(self):
387
"""Iterate over LogRevision objects.
389
:return: An iterator yielding LogRevision objects.
392
levels = rqst.get('levels')
393
limit = rqst.get('limit')
394
diff_type = rqst.get('diff_type')
396
revision_iterator = self._create_log_revision_iterator()
397
for revs in revision_iterator:
398
for (rev_id, revno, merge_depth), rev, delta in revs:
399
# 0 levels means show everything; merge_depth counts from 0
400
if levels != 0 and merge_depth >= levels:
402
if diff_type is None:
405
diff = self._format_diff(rev, rev_id, diff_type)
406
yield LogRevision(rev, revno, merge_depth, delta,
407
self.rev_tag_dict.get(rev_id), diff)
410
if log_count >= limit:
413
def _format_diff(self, rev, rev_id, diff_type):
414
repo = self.branch.repository
415
if len(rev.parent_ids) == 0:
416
ancestor_id = _mod_revision.NULL_REVISION
418
ancestor_id = rev.parent_ids[0]
419
tree_1 = repo.revision_tree(ancestor_id)
420
tree_2 = repo.revision_tree(rev_id)
421
file_ids = self.rqst.get('specific_fileids')
422
if diff_type == 'partial' and file_ids is not None:
423
specific_files = [tree_2.id2path(id) for id in file_ids]
425
specific_files = None
427
diff.show_diff_trees(tree_1, tree_2, s, specific_files, old_label='',
431
def _create_log_revision_iterator(self):
432
"""Create a revision iterator for log.
434
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
437
self.start_rev_id, self.end_rev_id = _get_revision_limits(
438
self.branch, self.rqst.get('start_revision'),
439
self.rqst.get('end_revision'))
440
if self.rqst.get('_match_using_deltas'):
441
return self._log_revision_iterator_using_delta_matching()
443
# We're using the per-file-graph algorithm. This scales really
444
# well but only makes sense if there is a single file and it's
446
file_count = len(self.rqst.get('specific_fileids'))
448
raise BzrError("illegal LogRequest: must match-using-deltas "
449
"when logging %d files" % file_count)
450
return self._log_revision_iterator_using_per_file_graph()
452
def _log_revision_iterator_using_delta_matching(self):
453
# Get the base revisions, filtering by the revision range
455
generate_merge_revisions = rqst.get('levels') != 1
456
delayed_graph_generation = not rqst.get('specific_fileids') and (
457
rqst.get('limit') or self.start_rev_id or self.end_rev_id)
458
view_revisions = _calc_view_revisions(self.branch, self.start_rev_id,
459
self.end_rev_id, rqst.get('direction'), generate_merge_revisions,
460
delayed_graph_generation=delayed_graph_generation)
462
# Apply the other filters
463
return make_log_rev_iterator(self.branch, view_revisions,
464
rqst.get('delta_type'), rqst.get('message_search'),
465
file_ids=rqst.get('specific_fileids'),
466
direction=rqst.get('direction'))
468
def _log_revision_iterator_using_per_file_graph(self):
469
# Get the base revisions, filtering by the revision range.
470
# Note that we always generate the merge revisions because
471
# filter_revisions_touching_file_id() requires them ...
473
view_revisions = _calc_view_revisions(self.branch, self.start_rev_id,
474
self.end_rev_id, rqst.get('direction'), True)
267
def _create_log_revision_iterator(branch, start_revision, end_revision,
268
direction, specific_fileid, search, generate_merge_revisions,
269
allow_single_merge_revision, generate_delta, limited_output=False):
270
"""Create a revision iterator for log.
272
:param branch: The branch being logged.
273
:param start_revision: If not None, only show revisions >= start_revision
274
:param end_revision: If not None, only show revisions <= end_revision
275
:param direction: 'reverse' (default) is latest to earliest; 'forward' is
277
:param specific_fileid: If not None, list only the commits affecting the
279
:param search: If not None, only show revisions with matching commit
281
:param generate_merge_revisions: If False, show only mainline revisions.
282
:param allow_single_merge_revision: If True, logging of a single
283
revision off the mainline is to be allowed
284
:param generate_delta: Whether to generate a delta for each revision.
285
:param limited_output: if True, the user only wants a limited result
287
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
290
start_rev_id, end_rev_id = _get_revision_limits(branch, start_revision,
293
# Decide how file-ids are matched: delta-filtering vs per-file graph.
294
# Delta filtering allows revisions to be displayed incrementally
295
# though the total time is much slower for huge repositories: log -v
296
# is the *lower* performance bound. At least until the split
297
# inventory format arrives, per-file-graph needs to remain the
298
# default except in verbose mode. Delta filtering should give more
299
# accurate results (e.g. inclusion of FILE deletions) so arguably
300
# it should always be used in the future.
301
use_deltas_for_matching = specific_fileid and generate_delta
302
delayed_graph_generation = not specific_fileid and (
303
start_rev_id or end_rev_id or limited_output)
304
generate_merges = generate_merge_revisions or (specific_fileid and
305
not use_deltas_for_matching)
306
view_revisions = _calc_view_revisions(branch, start_rev_id, end_rev_id,
307
direction, generate_merges, allow_single_merge_revision,
308
delayed_graph_generation=delayed_graph_generation)
309
search_deltas_for_fileids = None
310
if use_deltas_for_matching:
311
search_deltas_for_fileids = set([specific_fileid])
312
elif specific_fileid:
475
313
if not isinstance(view_revisions, list):
476
314
view_revisions = list(view_revisions)
477
view_revisions = _filter_revisions_touching_file_id(self.branch,
478
rqst.get('specific_fileids')[0], view_revisions,
479
include_merges=rqst.get('levels') != 1)
480
return make_log_rev_iterator(self.branch, view_revisions,
481
rqst.get('delta_type'), rqst.get('message_search'))
315
view_revisions = _filter_revisions_touching_file_id(branch,
316
specific_fileid, view_revisions,
317
include_merges=generate_merge_revisions)
318
return make_log_rev_iterator(branch, view_revisions, generate_delta,
319
search, file_ids=search_deltas_for_fileids, direction=direction)
484
322
def _calc_view_revisions(branch, start_rev_id, end_rev_id, direction,
485
generate_merge_revisions, delayed_graph_generation=False):
323
generate_merge_revisions, allow_single_merge_revision,
324
delayed_graph_generation=False):
486
325
"""Calculate the revisions to view.
488
327
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
496
335
generate_single_revision = (end_rev_id and start_rev_id == end_rev_id and
497
336
(not generate_merge_revisions or not _has_merges(branch, end_rev_id)))
498
337
if generate_single_revision:
499
return _generate_one_revision(branch, end_rev_id, br_rev_id, br_revno)
338
if end_rev_id == br_rev_id:
340
return [(br_rev_id, br_revno, 0)]
342
revno = branch.revision_id_to_dotted_revno(end_rev_id)
343
if len(revno) > 1 and not allow_single_merge_revision:
344
# It's a merge revision and the log formatter is
345
# completely brain dead. This "feature" of allowing
346
# log formatters incapable of displaying dotted revnos
347
# ought to be deprecated IMNSHO. IGC 20091022
348
raise errors.BzrCommandError('Selected log formatter only'
349
' supports mainline revisions.')
350
revno_str = '.'.join(str(n) for n in revno)
351
return [(end_rev_id, revno_str, 0)]
501
353
# If we only want to see linear revisions, we can iterate ...
502
354
if not generate_merge_revisions:
503
return _generate_flat_revisions(branch, start_rev_id, end_rev_id,
506
return _generate_all_revisions(branch, start_rev_id, end_rev_id,
507
direction, delayed_graph_generation)
510
def _generate_one_revision(branch, rev_id, br_rev_id, br_revno):
511
if rev_id == br_rev_id:
513
return [(br_rev_id, br_revno, 0)]
515
revno = branch.revision_id_to_dotted_revno(rev_id)
516
revno_str = '.'.join(str(n) for n in revno)
517
return [(rev_id, revno_str, 0)]
520
def _generate_flat_revisions(branch, start_rev_id, end_rev_id, direction):
521
result = _linear_view_revisions(branch, start_rev_id, end_rev_id)
522
# If a start limit was given and it's not obviously an
523
# ancestor of the end limit, check it before outputting anything
524
if direction == 'forward' or (start_rev_id
525
and not _is_obvious_ancestor(branch, start_rev_id, end_rev_id)):
527
result = list(result)
528
except _StartNotLinearAncestor:
529
raise errors.BzrCommandError('Start revision not found in'
530
' left-hand history of end revision.')
531
if direction == 'forward':
532
result = reversed(result)
536
def _generate_all_revisions(branch, start_rev_id, end_rev_id, direction,
537
delayed_graph_generation):
355
result = _linear_view_revisions(branch, start_rev_id, end_rev_id)
356
# If a start limit was given and it's not obviously an
357
# ancestor of the end limit, check it before outputting anything
358
if direction == 'forward' or (start_rev_id
359
and not _is_obvious_ancestor(branch, start_rev_id, end_rev_id)):
361
result = list(result)
362
except _StartNotLinearAncestor:
363
raise errors.BzrCommandError('Start revision not found in'
364
' left-hand history of end revision.')
365
if direction == 'forward':
366
result = reversed(list(result))
538
369
# On large trees, generating the merge graph can take 30-60 seconds
539
370
# so we delay doing it until a merge is detected, incrementally
540
371
# returning initial (non-merge) revisions while we can.
542
# The above is only true for old formats (<= 0.92), for newer formats, a
543
# couple of seconds only should be needed to load the whole graph and the
544
# other graph operations needed are even faster than that -- vila 100201
545
372
initial_revisions = []
546
373
if delayed_graph_generation:
548
for rev_id, revno, depth in _linear_view_revisions(
549
branch, start_rev_id, end_rev_id):
375
for rev_id, revno, depth in \
376
_linear_view_revisions(branch, start_rev_id, end_rev_id):
550
377
if _has_merges(branch, rev_id):
551
# The end_rev_id can be nested down somewhere. We need an
552
# explicit ancestry check. There is an ambiguity here as we
553
# may not raise _StartNotLinearAncestor for a revision that
554
# is an ancestor but not a *linear* one. But since we have
555
# loaded the graph to do the check (or calculate a dotted
556
# revno), we may as well accept to show the log...
558
graph = branch.repository.get_graph()
559
if not graph.is_ancestor(start_rev_id, end_rev_id):
560
raise _StartNotLinearAncestor()
561
378
end_rev_id = rev_id
1386
1176
def short_author(self, rev):
1387
name, address = config.parse_username(rev.get_apparent_authors()[0])
1177
name, address = config.parse_username(rev.get_apparent_author())
1392
def merge_marker(self, revision):
1393
"""Get the merge marker to include in the output or '' if none."""
1394
if len(revision.rev.parent_ids) > 1:
1395
self._merge_count += 1
1400
1182
def show_properties(self, revision, indent):
1401
1183
"""Displays the custom properties returned by each registered handler.
1403
If a registered handler raises an error it is propagated.
1405
for line in self.custom_properties(revision):
1406
self.to_file.write("%s%s\n" % (indent, line))
1408
def custom_properties(self, revision):
1409
"""Format the custom properties returned by each registered handler.
1411
If a registered handler raises an error it is propagated.
1413
:return: a list of formatted lines (excluding trailing newlines)
1415
lines = self._foreign_info_properties(revision)
1185
If a registered handler raises an error it is propagated.
1416
1187
for key, handler in properties_handler_registry.iteritems():
1417
lines.extend(self._format_properties(handler(revision)))
1420
def _foreign_info_properties(self, rev):
1421
"""Custom log displayer for foreign revision identifiers.
1423
:param rev: Revision object.
1425
# Revision comes directly from a foreign repository
1426
if isinstance(rev, foreign.ForeignRevision):
1427
return rev.mapping.vcs.show_foreign_revid(rev.foreign_revid)
1429
# Imported foreign revision revision ids always contain :
1430
if not ":" in rev.revision_id:
1433
# Revision was once imported from a foreign repository
1435
foreign_revid, mapping = \
1436
foreign.foreign_vcs_registry.parse_revision_id(rev.revision_id)
1437
except errors.InvalidRevisionId:
1440
return self._format_properties(
1441
mapping.vcs.show_foreign_revid(foreign_revid))
1443
def _format_properties(self, properties):
1445
for key, value in properties.items():
1446
lines.append(key + ': ' + value)
1188
for key, value in handler(revision).items():
1189
self.to_file.write(indent + key + ': ' + value + '\n')
1449
1191
def show_diff(self, to_file, diff, indent):
1450
1192
for l in diff.rstrip().split('\n'):
1451
1193
to_file.write(indent + '%s\n' % (l,))
1454
# Separator between revisions in long format
1455
_LONG_SEP = '-' * 60
1458
1196
class LongLogFormatter(LogFormatter):
1460
1198
supports_merge_revisions = True
1461
preferred_levels = 1
1462
1199
supports_delta = True
1463
1200
supports_tags = True
1464
1201
supports_diff = True
1466
def __init__(self, *args, **kwargs):
1467
super(LongLogFormatter, self).__init__(*args, **kwargs)
1468
if self.show_timezone == 'original':
1469
self.date_string = self._date_string_original_timezone
1471
self.date_string = self._date_string_with_timezone
1473
def _date_string_with_timezone(self, rev):
1474
return format_date(rev.timestamp, rev.timezone or 0,
1477
def _date_string_original_timezone(self, rev):
1478
return format_date_with_offset_in_original_timezone(rev.timestamp,
1481
1203
def log_revision(self, revision):
1482
1204
"""Log a revision, either merged or not."""
1483
1205
indent = ' ' * revision.merge_depth
1206
to_file = self.to_file
1207
to_file.write(indent + '-' * 60 + '\n')
1485
1208
if revision.revno is not None:
1486
lines.append('revno: %s%s' % (revision.revno,
1487
self.merge_marker(revision)))
1209
to_file.write(indent + 'revno: %s\n' % (revision.revno,))
1488
1210
if revision.tags:
1489
lines.append('tags: %s' % (', '.join(revision.tags)))
1211
to_file.write(indent + 'tags: %s\n' % (', '.join(revision.tags)))
1490
1212
if self.show_ids:
1491
lines.append('revision-id: %s' % (revision.rev.revision_id,))
1213
to_file.write(indent + 'revision-id: ' + revision.rev.revision_id)
1492
1215
for parent_id in revision.rev.parent_ids:
1493
lines.append('parent: %s' % (parent_id,))
1494
lines.extend(self.custom_properties(revision.rev))
1216
to_file.write(indent + 'parent: %s\n' % (parent_id,))
1217
self.show_properties(revision.rev, indent)
1496
committer = revision.rev.committer
1497
authors = revision.rev.get_apparent_authors()
1498
if authors != [committer]:
1499
lines.append('author: %s' % (", ".join(authors),))
1500
lines.append('committer: %s' % (committer,))
1219
author = revision.rev.properties.get('author', None)
1220
if author is not None:
1221
to_file.write(indent + 'author: %s\n' % (author,))
1222
to_file.write(indent + 'committer: %s\n' % (revision.rev.committer,))
1502
1224
branch_nick = revision.rev.properties.get('branch-nick', None)
1503
1225
if branch_nick is not None:
1504
lines.append('branch nick: %s' % (branch_nick,))
1506
lines.append('timestamp: %s' % (self.date_string(revision.rev),))
1508
lines.append('message:')
1226
to_file.write(indent + 'branch nick: %s\n' % (branch_nick,))
1228
date_str = format_date(revision.rev.timestamp,
1229
revision.rev.timezone or 0,
1231
to_file.write(indent + 'timestamp: %s\n' % (date_str,))
1233
to_file.write(indent + 'message:\n')
1509
1234
if not revision.rev.message:
1510
lines.append(' (no message)')
1235
to_file.write(indent + ' (no message)\n')
1512
1237
message = revision.rev.message.rstrip('\r\n')
1513
1238
for l in message.split('\n'):
1514
lines.append(' %s' % (l,))
1516
# Dump the output, appending the delta and diff if requested
1517
to_file = self.to_file
1518
to_file.write("%s%s\n" % (indent, ('\n' + indent).join(lines)))
1239
to_file.write(indent + ' %s\n' % (l,))
1519
1240
if revision.delta is not None:
1520
1241
# We don't respect delta_format for compatibility
1521
1242
revision.delta.show(to_file, self.show_ids, indent=indent,
1522
1243
short_status=False)
1523
1244
if revision.diff is not None:
1524
1245
to_file.write(indent + 'diff:\n')
1526
1246
# Note: we explicitly don't indent the diff (relative to the
1527
1247
# revision information) so that the output can be fed to patch -p0
1528
self.show_diff(self.to_exact_file, revision.diff, indent)
1529
self.to_exact_file.flush()
1531
def get_advice_separator(self):
1532
"""Get the text separating the log from the closing advice."""
1533
return '-' * 60 + '\n'
1248
self.show_diff(to_file, revision.diff, indent)
1536
1251
class ShortLogFormatter(LogFormatter):
1897
1571
lf.log_revision(lr)
1900
def _get_info_for_log_files(revisionspec_list, file_list):
1901
"""Find file-ids and kinds given a list of files and a revision range.
1903
We search for files at the end of the range. If not found there,
1904
we try the start of the range.
1906
:param revisionspec_list: revision range as parsed on the command line
1907
:param file_list: the list of paths given on the command line;
1908
the first of these can be a branch location or a file path,
1909
the remainder must be file paths
1910
:return: (branch, info_list, start_rev_info, end_rev_info) where
1911
info_list is a list of (relative_path, file_id, kind) tuples where
1912
kind is one of values 'directory', 'file', 'symlink', 'tree-reference'.
1913
branch will be read-locked.
1574
def _get_fileid_to_log(revision, tree, b, fp):
1575
"""Find the file-id to log for a file path in a revision range.
1577
:param revision: the revision range as parsed on the command line
1578
:param tree: the working tree, if any
1579
:param b: the branch
1580
:param fp: file path
1915
from builtins import _get_revision_range, safe_relpath_files
1916
tree, b, path = bzrdir.BzrDir.open_containing_tree_or_branch(file_list[0])
1918
# XXX: It's damn messy converting a list of paths to relative paths when
1919
# those paths might be deleted ones, they might be on a case-insensitive
1920
# filesystem and/or they might be in silly locations (like another branch).
1921
# For example, what should "log bzr://branch/dir/file1 file2" do? (Is
1922
# file2 implicitly in the same dir as file1 or should its directory be
1923
# taken from the current tree somehow?) For now, this solves the common
1924
# case of running log in a nested directory, assuming paths beyond the
1925
# first one haven't been deleted ...
1927
relpaths = [path] + safe_relpath_files(tree, file_list[1:])
1929
relpaths = [path] + file_list[1:]
1931
start_rev_info, end_rev_info = _get_revision_range(revisionspec_list, b,
1933
if relpaths in ([], [u'']):
1934
return b, [], start_rev_info, end_rev_info
1935
if start_rev_info is None and end_rev_info is None:
1582
if revision is None:
1936
1583
if tree is None:
1937
1584
tree = b.basis_tree()
1940
file_id = tree.path2id(fp)
1941
kind = _get_kind_for_file_id(tree, file_id)
1943
# go back to when time began
1946
rev1 = b.get_rev_id(1)
1947
except errors.NoSuchRevision:
1952
tree1 = b.repository.revision_tree(rev1)
1954
file_id = tree1.path2id(fp)
1955
kind = _get_kind_for_file_id(tree1, file_id)
1956
info_list.append((fp, file_id, kind))
1585
file_id = tree.path2id(fp)
1587
# go back to when time began
1589
rev1 = b.get_rev_id(1)
1590
except errors.NoSuchRevision:
1594
tree = b.repository.revision_tree(rev1)
1595
file_id = tree.path2id(fp)
1958
elif start_rev_info == end_rev_info:
1597
elif len(revision) == 1:
1959
1598
# One revision given - file must exist in it
1960
tree = b.repository.revision_tree(end_rev_info.rev_id)
1962
file_id = tree.path2id(fp)
1963
kind = _get_kind_for_file_id(tree, file_id)
1964
info_list.append((fp, file_id, kind))
1599
tree = revision[0].as_tree(b)
1600
file_id = tree.path2id(fp)
1602
elif len(revision) == 2:
1967
1603
# Revision range given. Get the file-id from the end tree.
1968
1604
# If that fails, try the start tree.
1969
rev_id = end_rev_info.rev_id
1605
rev_id = revision[1].as_revision_id(b)
1970
1606
if rev_id is None:
1971
1607
tree = b.basis_tree()
1973
tree = b.repository.revision_tree(rev_id)
1609
tree = revision[1].as_tree(b)
1610
file_id = tree.path2id(fp)
1612
rev_id = revision[0].as_revision_id(b)
1614
rev1 = b.get_rev_id(1)
1615
tree = b.repository.revision_tree(rev1)
1617
tree = revision[0].as_tree(b)
1976
1618
file_id = tree.path2id(fp)
1977
kind = _get_kind_for_file_id(tree, file_id)
1980
rev_id = start_rev_info.rev_id
1982
rev1 = b.get_rev_id(1)
1983
tree1 = b.repository.revision_tree(rev1)
1985
tree1 = b.repository.revision_tree(rev_id)
1986
file_id = tree1.path2id(fp)
1987
kind = _get_kind_for_file_id(tree1, file_id)
1988
info_list.append((fp, file_id, kind))
1989
return b, info_list, start_rev_info, end_rev_info
1992
def _get_kind_for_file_id(tree, file_id):
1993
"""Return the kind of a file-id or None if it doesn't exist."""
1994
if file_id is not None:
1995
return tree.kind(file_id)
1620
raise errors.BzrCommandError(
1621
'bzr log --revision takes one or two values.')
2000
1625
properties_handler_registry = registry.Registry()
2002
# Use the properties handlers to print out bug information if available
2003
def _bugs_properties_handler(revision):
2004
if revision.properties.has_key('bugs'):
2005
bug_lines = revision.properties['bugs'].split('\n')
2006
bug_rows = [line.split(' ', 1) for line in bug_lines]
2007
fixed_bug_urls = [row[0] for row in bug_rows if
2008
len(row) > 1 and row[1] == 'fixed']
2011
return {'fixes bug(s)': ' '.join(fixed_bug_urls)}
2014
properties_handler_registry.register('bugs_properties_handler',
2015
_bugs_properties_handler)
1626
properties_handler_registry.register_lazy("foreign",
1628
"show_foreign_properties")
2018
1631
# adapters which revision ids to log are filtered. When log is called, the