108
graph = branch.repository.get_graph()
109
history = list(graph.iter_lefthand_ancestry(branch.last_revision(),
110
[_mod_revision.NULL_REVISION]))
111
for revision_id in reversed(history):
110
for revision_id in branch.revision_history():
112
111
this_inv = branch.repository.get_inventory(revision_id)
113
if this_inv.has_id(file_id):
112
if file_id in this_inv:
114
113
this_ie = this_inv[file_id]
115
114
this_path = this_inv.id2path(file_id)
218
213
Logger(branch, rqst).show(lf)
221
# Note: This needs to be kept in sync with the defaults in
216
# Note: This needs to be kept this in sync with the defaults in
222
217
# make_log_request_dict() below
223
218
_DEFAULT_REQUEST_PARAMS = {
224
219
'direction': 'reverse',
226
221
'generate_tags': True,
227
222
'exclude_common_ancestry': False,
228
223
'_match_using_deltas': True,
232
227
def make_log_request_dict(direction='reverse', specific_fileids=None,
233
228
start_revision=None, end_revision=None, limit=None,
234
message_search=None, levels=None, generate_tags=True,
229
message_search=None, levels=1, generate_tags=True,
236
231
diff_type=None, _match_using_deltas=True,
237
exclude_common_ancestry=False, match=None,
238
signature=False, omit_merges=False,
232
exclude_common_ancestry=False,
240
234
"""Convenience function for making a logging request dictionary.
262
256
matching commit messages
264
258
:param levels: the number of levels of revisions to
265
generate; 1 for just the mainline; 0 for all levels, or None for
259
generate; 1 for just the mainline; 0 for all levels.
268
261
:param generate_tags: If True, include tags for matched revisions.
270
263
:param delta_type: Either 'full', 'partial' or None.
271
264
'full' means generate the complete delta - adds/deletes/modifies/etc;
272
265
'partial' means filter the delta using specific_fileids;
285
278
:param exclude_common_ancestry: Whether -rX..Y should be interpreted as a
286
279
range operator or as a graph difference.
288
:param signature: show digital signature information
290
:param match: Dictionary of list of search strings to use when filtering
291
revisions. Keys can be 'message', 'author', 'committer', 'bugs' or
292
the empty string to match any of the preceding properties.
294
:param omit_merges: If True, commits with more than one parent are
298
# Take care of old style message_search parameter
301
if 'message' in match:
302
match['message'].append(message_search)
304
match['message'] = [message_search]
306
match={ 'message': [message_search] }
308
282
'direction': direction,
309
283
'specific_fileids': specific_fileids,
310
284
'start_revision': start_revision,
311
285
'end_revision': end_revision,
287
'message_search': message_search,
313
288
'levels': levels,
314
289
'generate_tags': generate_tags,
315
290
'delta_type': delta_type,
316
291
'diff_type': diff_type,
317
292
'exclude_common_ancestry': exclude_common_ancestry,
318
'signature': signature,
320
'omit_merges': omit_merges,
321
293
# Add 'private' attributes for features that may be deprecated
322
294
'_match_using_deltas': _match_using_deltas,
334
def format_signature_validity(rev_id, repo):
335
"""get the signature validity
337
:param rev_id: revision id to validate
338
:param repo: repository of revision
339
:return: human readable string to print to log
341
from bzrlib import gpg
343
gpg_strategy = gpg.GPGStrategy(None)
344
result = repo.verify_revision(rev_id, gpg_strategy)
345
if result[0] == gpg.SIGNATURE_VALID:
346
return "valid signature from {0}".format(result[1])
347
if result[0] == gpg.SIGNATURE_KEY_MISSING:
348
return "unknown key {0}".format(result[1])
349
if result[0] == gpg.SIGNATURE_NOT_VALID:
350
return "invalid signature!"
351
if result[0] == gpg.SIGNATURE_NOT_SIGNED:
352
return "no signature"
355
306
class LogGenerator(object):
356
307
"""A generator of log revisions."""
402
353
# Tweak the LogRequest based on what the LogFormatter can handle.
403
354
# (There's no point generating stuff if the formatter can't display it.)
405
if rqst['levels'] is None or lf.get_levels() > rqst['levels']:
406
# user didn't specify levels, use whatever the LF can handle:
407
rqst['levels'] = lf.get_levels()
356
rqst['levels'] = lf.get_levels()
409
357
if not getattr(lf, 'supports_tags', False):
410
358
rqst['generate_tags'] = False
411
359
if not getattr(lf, 'supports_delta', False):
412
360
rqst['delta_type'] = None
413
361
if not getattr(lf, 'supports_diff', False):
414
362
rqst['diff_type'] = None
415
if not getattr(lf, 'supports_signatures', False):
416
rqst['signature'] = False
418
364
# Find and print the interesting revisions
419
365
generator = self._generator_factory(self.branch, rqst)
462
406
# 0 levels means show everything; merge_depth counts from 0
463
407
if levels != 0 and merge_depth >= levels:
465
if omit_merges and len(rev.parent_ids) > 1:
467
409
if diff_type is None:
470
412
diff = self._format_diff(rev, rev_id, diff_type)
472
signature = format_signature_validity(rev_id,
473
self.branch.repository)
476
413
yield LogRevision(rev, revno, merge_depth, delta,
477
self.rev_tag_dict.get(rev_id), diff, signature)
414
self.rev_tag_dict.get(rev_id), diff)
480
417
if log_count >= limit:
536
473
# Apply the other filters
537
474
return make_log_rev_iterator(self.branch, view_revisions,
538
rqst.get('delta_type'), rqst.get('match'),
475
rqst.get('delta_type'), rqst.get('message_search'),
539
476
file_ids=rqst.get('specific_fileids'),
540
477
direction=rqst.get('direction'))
554
491
rqst.get('specific_fileids')[0], view_revisions,
555
492
include_merges=rqst.get('levels') != 1)
556
493
return make_log_rev_iterator(self.branch, view_revisions,
557
rqst.get('delta_type'), rqst.get('match'))
494
rqst.get('delta_type'), rqst.get('message_search'))
560
497
def _calc_view_revisions(branch, start_rev_id, end_rev_id, direction,
568
505
a list of the same tuples.
570
507
if (exclude_common_ancestry and start_rev_id == end_rev_id):
571
raise errors.BzrCommandError(gettext(
572
'--exclude-common-ancestry requires two different revisions'))
508
raise errors.BzrCommandError(
509
'--exclude-common-ancestry requires two different revisions')
573
510
if direction not in ('reverse', 'forward'):
574
raise ValueError(gettext('invalid direction %r') % direction)
511
raise ValueError('invalid direction %r' % direction)
575
512
br_revno, br_rev_id = branch.last_revision_info()
576
513
if br_revno == 0:
664
601
except _StartNotLinearAncestor:
665
602
# A merge was never detected so the lower revision limit can't
666
603
# be nested down somewhere
667
raise errors.BzrCommandError(gettext('Start revision not found in'
668
' history of end revision.'))
604
raise errors.BzrCommandError('Start revision not found in'
605
' history of end revision.')
670
607
# We exit the loop above because we encounter a revision with merges, from
671
608
# this revision, we need to switch to _graph_view_revisions.
742
679
br_revno, br_rev_id = branch.last_revision_info()
743
680
repo = branch.repository
744
graph = repo.get_graph()
745
681
if start_rev_id is None and end_rev_id is None:
746
682
cur_revno = br_revno
747
for revision_id in graph.iter_lefthand_ancestry(br_rev_id,
748
(_mod_revision.NULL_REVISION,)):
683
for revision_id in repo.iter_reverse_revision_history(br_rev_id):
749
684
yield revision_id, str(cur_revno), 0
752
687
if end_rev_id is None:
753
688
end_rev_id = br_rev_id
754
689
found_start = start_rev_id is None
755
for revision_id in graph.iter_lefthand_ancestry(end_rev_id,
756
(_mod_revision.NULL_REVISION,)):
690
for revision_id in repo.iter_reverse_revision_history(end_rev_id):
757
691
revno_str = _compute_revno_str(branch, revision_id)
758
692
if not found_start and revision_id == start_rev_id:
759
693
if not exclude_common_ancestry:
811
745
yield rev_id, '.'.join(map(str, revno)), merge_depth
748
@deprecated_function(deprecated_in((2, 2, 0)))
749
def calculate_view_revisions(branch, start_revision, end_revision, direction,
750
specific_fileid, generate_merge_revisions):
751
"""Calculate the revisions to view.
753
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
754
a list of the same tuples.
756
start_rev_id, end_rev_id = _get_revision_limits(branch, start_revision,
758
view_revisions = list(_calc_view_revisions(branch, start_rev_id, end_rev_id,
759
direction, generate_merge_revisions or specific_fileid))
761
view_revisions = _filter_revisions_touching_file_id(branch,
762
specific_fileid, view_revisions,
763
include_merges=generate_merge_revisions)
764
return _rebase_merge_depth(view_revisions)
814
767
def _rebase_merge_depth(view_revisions):
815
768
"""Adjust depths upwards so the top level is 0."""
816
769
# If either the first or last revision have a merge_depth of 0, we're done
860
813
return log_rev_iterator
863
def _make_search_filter(branch, generate_delta, match, log_rev_iterator):
816
def _make_search_filter(branch, generate_delta, search, log_rev_iterator):
864
817
"""Create a filtered iterator of log_rev_iterator matching on a regex.
866
819
:param branch: The branch being logged.
867
820
:param generate_delta: Whether to generate a delta for each revision.
868
:param match: A dictionary with properties as keys and lists of strings
869
as values. To match, a revision may match any of the supplied strings
870
within a single property but must match at least one string for each
821
:param search: A user text search string.
872
822
:param log_rev_iterator: An input iterator containing all revisions that
873
823
could be displayed, in lists.
874
824
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
878
828
return log_rev_iterator
879
searchRE = [(k, [re.compile(x, re.IGNORECASE) for x in v])
880
for (k,v) in match.iteritems()]
881
return _filter_re(searchRE, log_rev_iterator)
884
def _filter_re(searchRE, log_rev_iterator):
829
searchRE = re.compile(search, re.IGNORECASE)
830
return _filter_message_re(searchRE, log_rev_iterator)
833
def _filter_message_re(searchRE, log_rev_iterator):
885
834
for revs in log_rev_iterator:
886
new_revs = [rev for rev in revs if _match_filter(searchRE, rev[1])]
890
def _match_filter(searchRE, rev):
892
'message': (rev.message,),
893
'committer': (rev.committer,),
894
'author': (rev.get_apparent_authors()),
895
'bugs': list(rev.iter_bugs())
897
strings[''] = [item for inner_list in strings.itervalues()
898
for item in inner_list]
899
for (k,v) in searchRE:
900
if k in strings and not _match_any_filter(strings[k], v):
904
def _match_any_filter(strings, res):
905
return any([filter(None, map(re.search, strings)) for re in res])
836
for (rev_id, revno, merge_depth), rev, delta in revs:
837
if searchRE.search(rev.message):
838
new_revs.append(((rev_id, revno, merge_depth), rev, delta))
907
842
def _make_delta_filter(branch, generate_delta, search, log_rev_iterator,
908
843
fileids=None, direction='reverse'):
1082
1018
if branch_revno != 0:
1083
1019
if (start_rev_id == _mod_revision.NULL_REVISION
1084
1020
or end_rev_id == _mod_revision.NULL_REVISION):
1085
raise errors.BzrCommandError(gettext('Logging revision 0 is invalid.'))
1021
raise errors.BzrCommandError('Logging revision 0 is invalid.')
1086
1022
if start_revno > end_revno:
1087
raise errors.BzrCommandError(gettext("Start revision must be "
1088
"older than the end revision."))
1023
raise errors.BzrCommandError("Start revision must be older than "
1024
"the end revision.")
1089
1025
return (start_rev_id, end_rev_id)
1141
1077
if ((start_rev_id == _mod_revision.NULL_REVISION)
1142
1078
or (end_rev_id == _mod_revision.NULL_REVISION)):
1143
raise errors.BzrCommandError(gettext('Logging revision 0 is invalid.'))
1079
raise errors.BzrCommandError('Logging revision 0 is invalid.')
1144
1080
if start_revno > end_revno:
1145
raise errors.BzrCommandError(gettext("Start revision must be older "
1146
"than the end revision."))
1081
raise errors.BzrCommandError("Start revision must be older than "
1082
"the end revision.")
1148
1084
if end_revno < start_revno:
1149
1085
return None, None, None, None
1150
1086
cur_revno = branch_revno
1152
1088
mainline_revs = []
1153
graph = branch.repository.get_graph()
1154
for revision_id in graph.iter_lefthand_ancestry(
1155
branch_last_revision, (_mod_revision.NULL_REVISION,)):
1089
for revision_id in branch.repository.iter_reverse_revision_history(
1090
branch_last_revision):
1156
1091
if cur_revno < start_revno:
1157
1092
# We have gone far enough, but we always add 1 more revision
1158
1093
rev_nos[revision_id] = cur_revno
1172
1107
return mainline_revs, rev_nos, start_rev_id, end_rev_id
1110
@deprecated_function(deprecated_in((2, 2, 0)))
1111
def _filter_revision_range(view_revisions, start_rev_id, end_rev_id):
1112
"""Filter view_revisions based on revision ranges.
1114
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
1115
tuples to be filtered.
1117
:param start_rev_id: If not NONE specifies the first revision to be logged.
1118
If NONE then all revisions up to the end_rev_id are logged.
1120
:param end_rev_id: If not NONE specifies the last revision to be logged.
1121
If NONE then all revisions up to the end of the log are logged.
1123
:return: The filtered view_revisions.
1125
if start_rev_id or end_rev_id:
1126
revision_ids = [r for r, n, d in view_revisions]
1128
start_index = revision_ids.index(start_rev_id)
1131
if start_rev_id == end_rev_id:
1132
end_index = start_index
1135
end_index = revision_ids.index(end_rev_id)
1137
end_index = len(view_revisions) - 1
1138
# To include the revisions merged into the last revision,
1139
# extend end_rev_id down to, but not including, the next rev
1140
# with the same or lesser merge_depth
1141
end_merge_depth = view_revisions[end_index][2]
1143
for index in xrange(end_index+1, len(view_revisions)+1):
1144
if view_revisions[index][2] <= end_merge_depth:
1145
end_index = index - 1
1148
# if the search falls off the end then log to the end as well
1149
end_index = len(view_revisions) - 1
1150
view_revisions = view_revisions[start_index:end_index+1]
1151
return view_revisions
1175
1154
def _filter_revisions_touching_file_id(branch, file_id, view_revisions,
1176
1155
include_merges=True):
1177
1156
r"""Return the list of revision ids which touch a given file id.
1224
1200
# indexing layer. We might consider passing in hints as to the known
1225
1201
# access pattern (sparse/clustered, high success rate/low success
1226
1202
# rate). This particular access is clustered with a low success rate.
1203
get_parent_map = branch.repository.texts.get_parent_map
1227
1204
modified_text_revisions = set()
1228
1205
chunk_size = 1000
1229
1206
for start in xrange(0, len(text_keys), chunk_size):
1236
@deprecated_function(deprecated_in((2, 2, 0)))
1237
def get_view_revisions(mainline_revs, rev_nos, branch, direction,
1238
include_merges=True):
1239
"""Produce an iterator of revisions to show
1240
:return: an iterator of (revision_id, revno, merge_depth)
1241
(if there is no revno for a revision, None is supplied)
1243
if not include_merges:
1244
revision_ids = mainline_revs[1:]
1245
if direction == 'reverse':
1246
revision_ids.reverse()
1247
for revision_id in revision_ids:
1248
yield revision_id, str(rev_nos[revision_id]), 0
1250
graph = branch.repository.get_graph()
1251
# This asks for all mainline revisions, which means we only have to spider
1252
# sideways, rather than depth history. That said, its still size-of-history
1253
# and should be addressed.
1254
# mainline_revisions always includes an extra revision at the beginning, so
1256
parent_map = dict(((key, value) for key, value in
1257
graph.iter_ancestry(mainline_revs[1:]) if value is not None))
1258
# filter out ghosts; merge_sort errors on ghosts.
1259
rev_graph = _mod_repository._strip_NULL_ghosts(parent_map)
1260
merge_sorted_revisions = tsort.merge_sort(
1264
generate_revno=True)
1266
if direction == 'forward':
1267
# forward means oldest first.
1268
merge_sorted_revisions = reverse_by_depth(merge_sorted_revisions)
1269
elif direction != 'reverse':
1270
raise ValueError('invalid direction %r' % direction)
1272
for (sequence, rev_id, merge_depth, revno, end_of_merge
1273
) in merge_sorted_revisions:
1274
yield rev_id, '.'.join(map(str, revno)), merge_depth
1259
1277
def reverse_by_depth(merge_sorted_revisions, _depth=0):
1260
1278
"""Reverse revisions by depth.
1321
1338
to indicate which LogRevision attributes it supports:
1323
1340
- supports_delta must be True if this log formatter supports delta.
1324
Otherwise the delta attribute may not be populated. The 'delta_format'
1325
attribute describes whether the 'short_status' format (1) or the long
1326
one (2) should be used.
1341
Otherwise the delta attribute may not be populated. The 'delta_format'
1342
attribute describes whether the 'short_status' format (1) or the long
1343
one (2) should be used.
1328
1345
- supports_merge_revisions must be True if this log formatter supports
1329
merge revisions. If not, then only mainline revisions will be passed
1346
merge revisions. If not, then only mainline revisions will be passed
1332
1349
- preferred_levels is the number of levels this formatter defaults to.
1333
The default value is zero meaning display all levels.
1334
This value is only relevant if supports_merge_revisions is True.
1350
The default value is zero meaning display all levels.
1351
This value is only relevant if supports_merge_revisions is True.
1336
1353
- supports_tags must be True if this log formatter supports tags.
1337
Otherwise the tags attribute may not be populated.
1354
Otherwise the tags attribute may not be populated.
1339
1356
- supports_diff must be True if this log formatter supports diffs.
1340
Otherwise the diff attribute may not be populated.
1342
- supports_signatures must be True if this log formatter supports GPG
1357
Otherwise the diff attribute may not be populated.
1345
1359
Plugins can register functions to show custom revision properties using
1346
1360
the properties_handler_registry. The registered function
1347
must respect the following interface description::
1361
must respect the following interface description:
1349
1362
def my_show_properties(properties_dict):
1350
1363
# code that returns a dict {'name':'value'} of the properties
1358
1371
"""Create a LogFormatter.
1360
1373
:param to_file: the file to output to
1361
:param to_exact_file: if set, gives an output stream to which
1374
:param to_exact_file: if set, gives an output stream to which
1362
1375
non-Unicode diffs are written.
1363
1376
:param show_ids: if True, revision-ids are to be displayed
1364
1377
:param show_timezone: the timezone to use
1600
1609
if revision.delta is not None:
1601
1610
# Use the standard status output to display changes
1602
1611
from bzrlib.delta import report_delta
1603
report_delta(to_file, revision.delta, short_status=False,
1612
report_delta(to_file, revision.delta, short_status=False,
1604
1613
show_ids=self.show_ids, indent=indent)
1605
1614
if revision.diff is not None:
1606
1615
to_file.write(indent + 'diff:\n')
1672
1681
if revision.delta is not None:
1673
1682
# Use the standard status output to display changes
1674
1683
from bzrlib.delta import report_delta
1675
report_delta(to_file, revision.delta,
1676
short_status=self.delta_format==1,
1684
report_delta(to_file, revision.delta,
1685
short_status=self.delta_format==1,
1677
1686
show_ids=self.show_ids, indent=indent + offset)
1678
1687
if revision.diff is not None:
1679
1688
self.show_diff(self.to_exact_file, revision.diff, ' ')
1719
1728
def log_string(self, revno, rev, max_chars, tags=None, prefix=''):
1720
1729
"""Format log info into one string. Truncate tail of string
1722
:param revno: revision number or None.
1723
Revision numbers counts from 1.
1724
:param rev: revision object
1725
:param max_chars: maximum length of resulting string
1726
:param tags: list of tags or None
1727
:param prefix: string to prefix each line
1728
:return: formatted truncated string
1730
:param revno: revision number or None.
1731
Revision numbers counts from 1.
1732
:param rev: revision object
1733
:param max_chars: maximum length of resulting string
1734
:param tags: list of tags or None
1735
:param prefix: string to prefix each line
1736
:return: formatted truncated string
1732
1740
# show revno only when is not None
1733
1741
out.append("%s:" % revno)
1734
if max_chars is not None:
1735
out.append(self.truncate(self.short_author(rev), (max_chars+3)/4))
1737
out.append(self.short_author(rev))
1742
out.append(self.truncate(self.short_author(rev), 20))
1738
1743
out.append(self.date_string(rev))
1739
1744
if len(rev.parent_ids) > 1:
1740
1745
out.append('[merge]')
1860
1864
'The committer')
1867
def show_one_log(revno, rev, delta, verbose, to_file, show_timezone):
1868
# deprecated; for compatibility
1869
lf = LongLogFormatter(to_file=to_file, show_timezone=show_timezone)
1870
lf.show(revno, rev, delta)
1863
1873
def show_changed_revisions(branch, old_rh, new_rh, to_file=None,
1864
1874
log_format='long'):
1865
1875
"""Show the change in revision history comparing the old revision history to the new one.
2029
2038
branch will be read-locked.
2031
2040
from builtins import _get_revision_range
2032
tree, b, path = controldir.ControlDir.open_containing_tree_or_branch(
2041
tree, b, path = bzrdir.BzrDir.open_containing_tree_or_branch(file_list[0])
2034
2042
add_cleanup(b.lock_read().unlock)
2035
2043
# XXX: It's damn messy converting a list of paths to relative paths when
2036
2044
# those paths might be deleted ones, they might be on a case-insensitive