262
260
return s.getvalue()
265
def calculate_view_revisions(branch, start_revision, end_revision, direction,
266
specific_fileid, generate_merge_revisions,
267
allow_single_merge_revision):
268
if ( not generate_merge_revisions
269
and start_revision is end_revision is None
270
and direction == 'reverse'
271
and specific_fileid is None):
272
return _linear_view_revisions(branch)
274
mainline_revs, rev_nos, start_rev_id, end_rev_id = _get_mainline_revs(
275
branch, start_revision, end_revision)
276
if not mainline_revs:
263
class _StartNotLinearAncestor(Exception):
264
"""Raised when a start revision is not found walking left-hand history."""
267
def _create_log_revision_iterator(branch, start_revision, end_revision,
268
direction, specific_fileid, search, generate_merge_revisions,
269
allow_single_merge_revision, generate_delta, limited_output=False):
270
"""Create a revision iterator for log.
272
:param branch: The branch being logged.
273
:param start_revision: If not None, only show revisions >= start_revision
274
:param end_revision: If not None, only show revisions <= end_revision
275
:param direction: 'reverse' (default) is latest to earliest; 'forward' is
277
:param specific_fileid: If not None, list only the commits affecting the
279
:param search: If not None, only show revisions with matching commit
281
:param generate_merge_revisions: If False, show only mainline revisions.
282
:param allow_single_merge_revision: If True, logging of a single
283
revision off the mainline is to be allowed
284
:param generate_delta: Whether to generate a delta for each revision.
285
:param limited_output: if True, the user only wants a limited result
287
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
290
start_rev_id, end_rev_id = _get_revision_limits(branch, start_revision,
293
# Decide how file-ids are matched: delta-filtering vs per-file graph.
294
# Delta filtering allows revisions to be displayed incrementally
295
# though the total time is much slower for huge repositories: log -v
296
# is the *lower* performance bound. At least until the split
297
# inventory format arrives, per-file-graph needs to remain the
298
# default except in verbose mode. Delta filtering should give more
299
# accurate results (e.g. inclusion of FILE deletions) so arguably
300
# it should always be used in the future.
301
use_deltas_for_matching = specific_fileid and generate_delta
302
delayed_graph_generation = not specific_fileid and (
303
start_rev_id or end_rev_id or limited_output)
304
generate_merges = generate_merge_revisions or (specific_fileid and
305
not use_deltas_for_matching)
306
view_revisions = _calc_view_revisions(branch, start_rev_id, end_rev_id,
307
direction, generate_merges, allow_single_merge_revision,
308
delayed_graph_generation=delayed_graph_generation)
309
search_deltas_for_fileids = None
310
if use_deltas_for_matching:
311
search_deltas_for_fileids = set([specific_fileid])
312
elif specific_fileid:
313
if not isinstance(view_revisions, list):
314
view_revisions = list(view_revisions)
315
view_revisions = _filter_revisions_touching_file_id(branch,
316
specific_fileid, view_revisions,
317
include_merges=generate_merge_revisions)
318
return make_log_rev_iterator(branch, view_revisions, generate_delta,
319
search, file_ids=search_deltas_for_fileids, direction=direction)
322
def _calc_view_revisions(branch, start_rev_id, end_rev_id, direction,
323
generate_merge_revisions, allow_single_merge_revision,
324
delayed_graph_generation=False):
325
"""Calculate the revisions to view.
327
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
328
a list of the same tuples.
330
br_revno, br_rev_id = branch.last_revision_info()
279
generate_single_revision = False
280
if ((not generate_merge_revisions)
281
and ((start_rev_id and (start_rev_id not in rev_nos))
282
or (end_rev_id and (end_rev_id not in rev_nos)))):
283
generate_single_revision = ((start_rev_id == end_rev_id)
284
and allow_single_merge_revision)
285
if not generate_single_revision:
286
raise errors.BzrCommandError('Selected log formatter only supports'
287
' mainline revisions.')
288
generate_merge_revisions = generate_single_revision
289
include_merges = generate_merge_revisions or specific_fileid
290
view_revs_iter = get_view_revisions(mainline_revs, rev_nos, branch,
291
direction, include_merges=include_merges)
334
# If a single revision is requested, check we can handle it
335
generate_single_revision = (end_rev_id and start_rev_id == end_rev_id and
336
(not generate_merge_revisions or not _has_merges(branch, end_rev_id)))
337
if generate_single_revision:
338
if end_rev_id == br_rev_id:
340
return [(br_rev_id, br_revno, 0)]
342
revno = branch.revision_id_to_dotted_revno(end_rev_id)
343
if len(revno) > 1 and not allow_single_merge_revision:
344
# It's a merge revision and the log formatter is
345
# completely brain dead. This "feature" of allowing
346
# log formatters incapable of displaying dotted revnos
347
# ought to be deprecated IMNSHO. IGC 20091022
348
raise errors.BzrCommandError('Selected log formatter only'
349
' supports mainline revisions.')
350
revno_str = '.'.join(str(n) for n in revno)
351
return [(end_rev_id, revno_str, 0)]
353
# If we only want to see linear revisions, we can iterate ...
354
if not generate_merge_revisions:
355
result = _linear_view_revisions(branch, start_rev_id, end_rev_id)
356
# If a start limit was given and it's not obviously an
357
# ancestor of the end limit, check it before outputting anything
358
if direction == 'forward' or (start_rev_id
359
and not _is_obvious_ancestor(branch, start_rev_id, end_rev_id)):
361
result = list(result)
362
except _StartNotLinearAncestor:
363
raise errors.BzrCommandError('Start revision not found in'
364
' left-hand history of end revision.')
365
if direction == 'forward':
366
result = reversed(list(result))
369
# On large trees, generating the merge graph can take 30-60 seconds
370
# so we delay doing it until a merge is detected, incrementally
371
# returning initial (non-merge) revisions while we can.
372
initial_revisions = []
373
if delayed_graph_generation:
375
for rev_id, revno, depth in \
376
_linear_view_revisions(branch, start_rev_id, end_rev_id):
377
if _has_merges(branch, rev_id):
381
initial_revisions.append((rev_id, revno, depth))
383
# No merged revisions found
384
if direction == 'reverse':
385
return initial_revisions
386
elif direction == 'forward':
387
return reversed(initial_revisions)
389
raise ValueError('invalid direction %r' % direction)
390
except _StartNotLinearAncestor:
391
# A merge was never detected so the lower revision limit can't
392
# be nested down somewhere
393
raise errors.BzrCommandError('Start revision not found in'
394
' history of end revision.')
396
# A log including nested merges is required. If the direction is reverse,
397
# we rebase the initial merge depths so that the development line is
398
# shown naturally, i.e. just like it is for linear logging. We can easily
399
# make forward the exact opposite display, but showing the merge revisions
400
# indented at the end seems slightly nicer in that case.
401
view_revisions = chain(iter(initial_revisions),
402
_graph_view_revisions(branch, start_rev_id, end_rev_id,
403
rebase_initial_depths=direction == 'reverse'))
293
404
if direction == 'reverse':
294
start_rev_id, end_rev_id = end_rev_id, start_rev_id
295
view_revisions = _filter_revision_range(list(view_revs_iter),
298
if view_revisions and generate_single_revision:
299
view_revisions = view_revisions[0:1]
405
return view_revisions
406
elif direction == 'forward':
407
# Forward means oldest first, adjusting for depth.
408
view_revisions = reverse_by_depth(list(view_revisions))
409
return _rebase_merge_depth(view_revisions)
411
raise ValueError('invalid direction %r' % direction)
414
def _has_merges(branch, rev_id):
415
"""Does a revision have multiple parents or not?"""
416
parents = branch.repository.get_parent_map([rev_id]).get(rev_id, [])
417
return len(parents) > 1
420
def _is_obvious_ancestor(branch, start_rev_id, end_rev_id):
421
"""Is start_rev_id an obvious ancestor of end_rev_id?"""
422
if start_rev_id and end_rev_id:
423
start_dotted = branch.revision_id_to_dotted_revno(start_rev_id)
424
end_dotted = branch.revision_id_to_dotted_revno(end_rev_id)
425
if len(start_dotted) == 1 and len(end_dotted) == 1:
427
return start_dotted[0] <= end_dotted[0]
428
elif (len(start_dotted) == 3 and len(end_dotted) == 3 and
429
start_dotted[0:1] == end_dotted[0:1]):
430
# both on same development line
431
return start_dotted[2] <= end_dotted[2]
438
def _linear_view_revisions(branch, start_rev_id, end_rev_id):
439
"""Calculate a sequence of revisions to view, newest to oldest.
441
:param start_rev_id: the lower revision-id
442
:param end_rev_id: the upper revision-id
443
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
444
:raises _StartNotLinearAncestor: if a start_rev_id is specified but
445
is not found walking the left-hand history
447
br_revno, br_rev_id = branch.last_revision_info()
448
repo = branch.repository
449
if start_rev_id is None and end_rev_id is None:
451
for revision_id in repo.iter_reverse_revision_history(br_rev_id):
452
yield revision_id, str(cur_revno), 0
455
if end_rev_id is None:
456
end_rev_id = br_rev_id
457
found_start = start_rev_id is None
458
for revision_id in repo.iter_reverse_revision_history(end_rev_id):
459
revno = branch.revision_id_to_dotted_revno(revision_id)
460
revno_str = '.'.join(str(n) for n in revno)
461
if not found_start and revision_id == start_rev_id:
462
yield revision_id, revno_str, 0
466
yield revision_id, revno_str, 0
469
raise _StartNotLinearAncestor()
472
def _graph_view_revisions(branch, start_rev_id, end_rev_id,
473
rebase_initial_depths=True):
474
"""Calculate revisions to view including merges, newest to oldest.
476
:param branch: the branch
477
:param start_rev_id: the lower revision-id
478
:param end_rev_id: the upper revision-id
479
:param rebase_initial_depth: should depths be rebased until a mainline
481
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
483
view_revisions = branch.iter_merge_sorted_revisions(
484
start_revision_id=end_rev_id, stop_revision_id=start_rev_id,
485
stop_rule="with-merges")
486
if not rebase_initial_depths:
487
for (rev_id, merge_depth, revno, end_of_merge
489
yield rev_id, '.'.join(map(str, revno)), merge_depth
491
# We're following a development line starting at a merged revision.
492
# We need to adjust depths down by the initial depth until we find
493
# a depth less than it. Then we use that depth as the adjustment.
494
# If and when we reach the mainline, depth adjustment ends.
495
depth_adjustment = None
496
for (rev_id, merge_depth, revno, end_of_merge
498
if depth_adjustment is None:
499
depth_adjustment = merge_depth
501
if merge_depth < depth_adjustment:
502
depth_adjustment = merge_depth
503
merge_depth -= depth_adjustment
504
yield rev_id, '.'.join(map(str, revno)), merge_depth
507
def calculate_view_revisions(branch, start_revision, end_revision, direction,
508
specific_fileid, generate_merge_revisions, allow_single_merge_revision):
509
"""Calculate the revisions to view.
511
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
512
a list of the same tuples.
514
# This method is no longer called by the main code path.
515
# It is retained for API compatibility and may be deprecated
517
start_rev_id, end_rev_id = _get_revision_limits(branch, start_revision,
519
view_revisions = list(_calc_view_revisions(branch, start_rev_id, end_rev_id,
520
direction, generate_merge_revisions or specific_fileid,
521
allow_single_merge_revision))
300
522
if specific_fileid:
301
523
view_revisions = _filter_revisions_touching_file_id(branch,
302
524
specific_fileid, view_revisions,
303
525
include_merges=generate_merge_revisions)
305
# rebase merge_depth - unless there are no revisions or
306
# either the first or last revision have merge_depth = 0.
526
return _rebase_merge_depth(view_revisions)
529
def _rebase_merge_depth(view_revisions):
530
"""Adjust depths upwards so the top level is 0."""
531
# If either the first or last revision have a merge_depth of 0, we're done
307
532
if view_revisions and view_revisions[0][2] and view_revisions[-1][2]:
308
533
min_depth = min([d for r,n,d in view_revisions])
309
534
if min_depth != 0:
382
610
:param search: A user text search string.
383
611
:param log_rev_iterator: An input iterator containing all revisions that
384
612
could be displayed, in lists.
613
:param fileids: If non empty, only revisions matching one or more of
614
the file-ids are to be kept.
615
:param direction: the direction in which view_revisions is sorted
385
616
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
388
if not generate_delta:
619
if not generate_delta and not fileids:
389
620
return log_rev_iterator
390
return _generate_deltas(branch.repository, log_rev_iterator)
393
def _generate_deltas(repository, log_rev_iterator):
394
"""Create deltas for each batch of revisions in log_rev_iterator."""
621
return _generate_deltas(branch.repository, log_rev_iterator,
622
generate_delta, fileids, direction)
625
def _generate_deltas(repository, log_rev_iterator, always_delta, fileids,
627
"""Create deltas for each batch of revisions in log_rev_iterator.
629
If we're only generating deltas for the sake of filtering against
630
file-ids, we stop generating deltas once all file-ids reach the
631
appropriate life-cycle point. If we're receiving data newest to
632
oldest, then that life-cycle point is 'add', otherwise it's 'remove'.
634
check_fileids = fileids is not None and len(fileids) > 0
636
fileid_set = set(fileids)
637
if direction == 'reverse':
395
643
for revs in log_rev_iterator:
644
# If we were matching against fileids and we've run out,
645
# there's nothing left to do
646
if check_fileids and not fileid_set:
396
648
revisions = [rev[1] for rev in revs]
397
649
deltas = repository.get_deltas_for_revisions(revisions)
398
revs = [(rev[0], rev[1], delta) for rev, delta in izip(revs, deltas)]
651
for rev, delta in izip(revs, deltas):
653
if not _delta_matches_fileids(delta, fileid_set, stop_on):
655
elif not always_delta:
656
# Delta was created just for matching - ditch it
657
# Note: It would probably be a better UI to return
658
# a delta filtered by the file-ids, rather than
659
# None at all. That functional enhancement can
662
new_revs.append((rev[0], rev[1], delta))
666
def _delta_matches_fileids(delta, fileids, stop_on='add'):
667
"""Check is a delta matches one of more file-ids.
669
:param fileids: a set of fileids to match against.
670
:param stop_on: either 'add' or 'remove' - take file-ids out of the
671
fileids set once their add or remove entry is detected respectively
676
for item in delta.added:
677
if item[1] in fileids:
679
fileids.remove(item[1])
681
for item in delta.removed:
682
if item[1] in fileids:
683
if stop_on == 'delete':
684
fileids.remove(item[1])
688
for l in (delta.modified, delta.renamed, delta.kind_changed):
690
if item[1] in fileids:
402
695
def _make_revision_objects(branch, generate_delta, search, log_rev_iterator):