41
45
from bzrlib.merge3 import Merge3
42
46
from bzrlib.osutils import rename, pathjoin
43
47
from progress import DummyProgress, ProgressPhase
44
from bzrlib.revision import common_ancestor, is_ancestor, NULL_REVISION
48
from bzrlib.revision import (NULL_REVISION, ensure_null)
45
49
from bzrlib.textfile import check_text_lines
46
from bzrlib.trace import mutter, warning, note
47
from bzrlib.transform import (TreeTransform, resolve_conflicts, cook_conflicts,
48
FinalPaths, create_by_entry, unique_add,
50
from bzrlib.versionedfile import WeaveMerge
50
from bzrlib.trace import mutter, warning, note, is_quiet
51
from bzrlib.transform import (TransformPreview, TreeTransform,
52
resolve_conflicts, cook_conflicts,
53
conflict_pass, FinalPaths, create_by_entry,
54
unique_add, ROOT_PARENT)
55
from bzrlib.versionedfile import PlanWeaveMerge
51
56
from bzrlib import ui
53
58
# TODO: Report back as changes are merged in
55
def _get_tree(treespec, local_branch=None):
56
from bzrlib import workingtree
57
location, revno = treespec
59
tree = workingtree.WorkingTree.open_containing(location)[0]
60
return tree.branch, tree
61
branch = Branch.open_containing(location)[0]
63
revision_id = branch.last_revision()
65
revision_id = branch.get_rev_id(revno)
66
if revision_id is None:
67
revision_id = NULL_REVISION
68
return branch, _get_revid_tree(branch, revision_id, local_branch)
71
def _get_revid_tree(branch, revision_id, local_branch):
72
if revision_id is None:
73
base_tree = branch.bzrdir.open_workingtree()
75
if local_branch is not None:
76
if local_branch.base != branch.base:
77
local_branch.fetch(branch, revision_id)
78
base_tree = local_branch.repository.revision_tree(revision_id)
80
base_tree = branch.repository.revision_tree(revision_id)
84
def _get_revid_tree_from_tree(tree, revision_id, local_branch):
85
if revision_id is None:
87
if local_branch is not None:
88
if local_branch.base != tree.branch.base:
89
local_branch.fetch(tree.branch, revision_id)
90
return local_branch.repository.revision_tree(revision_id)
91
return tree.branch.repository.revision_tree(revision_id)
94
61
def transform_tree(from_tree, to_tree, interesting_ids=None):
95
62
merge_inner(from_tree.branch, to_tree, from_tree, ignore_zero=True,
114
82
self.ignore_zero = False
115
83
self.backup_files = False
116
84
self.interesting_ids = None
85
self.interesting_files = None
117
86
self.show_base = False
118
87
self.reprocess = False
121
90
self.recurse = recurse
122
91
self.change_reporter = change_reporter
124
def revision_tree(self, revision_id):
125
return self.this_branch.repository.revision_tree(revision_id)
92
self._cached_trees = {}
93
self._revision_graph = revision_graph
94
self._base_is_ancestor = None
95
self._base_is_other_ancestor = None
98
def revision_graph(self):
99
if self._revision_graph is None:
100
self._revision_graph = self.this_branch.repository.get_graph()
101
return self._revision_graph
103
def _set_base_is_ancestor(self, value):
104
self._base_is_ancestor = value
106
def _get_base_is_ancestor(self):
107
if self._base_is_ancestor is None:
108
self._base_is_ancestor = self.revision_graph.is_ancestor(
109
self.base_rev_id, self.this_basis)
110
return self._base_is_ancestor
112
base_is_ancestor = property(_get_base_is_ancestor, _set_base_is_ancestor)
114
def _set_base_is_other_ancestor(self, value):
115
self._base_is_other_ancestor = value
117
def _get_base_is_other_ancestor(self):
118
if self._base_is_other_ancestor is None:
119
if self.other_basis is None:
121
self._base_is_other_ancestor = self.revision_graph.is_ancestor(
122
self.base_rev_id, self.other_basis)
123
return self._base_is_other_ancestor
125
base_is_other_ancestor = property(_get_base_is_other_ancestor,
126
_set_base_is_other_ancestor)
129
def from_uncommitted(tree, other_tree, pb):
130
"""Return a Merger for uncommitted changes in other_tree.
132
:param tree: The tree to merge into
133
:param other_tree: The tree to get uncommitted changes from
134
:param pb: A progress indicator
136
merger = Merger(tree.branch, other_tree, other_tree.basis_tree(), tree,
138
merger.base_rev_id = merger.base_tree.get_revision_id()
139
merger.other_rev_id = None
140
merger.other_basis = merger.base_rev_id
144
def from_mergeable(klass, tree, mergeable, pb):
145
"""Return a Merger for a bundle or merge directive.
147
:param tree: The tree to merge changes into
148
:param mergeable: A merge directive or bundle
149
:param pb: A progress indicator
151
mergeable.install_revisions(tree.branch.repository)
152
base_revision_id, other_revision_id, verified =\
153
mergeable.get_merge_request(tree.branch.repository)
154
revision_graph = tree.branch.repository.get_graph()
155
if (base_revision_id != _mod_revision.NULL_REVISION and
156
revision_graph.is_ancestor(
157
base_revision_id, tree.branch.last_revision())):
158
base_revision_id = None
160
warning('Performing cherrypick')
161
merger = klass.from_revision_ids(pb, tree, other_revision_id,
162
base_revision_id, revision_graph=
164
return merger, verified
167
def from_revision_ids(pb, tree, other, base=None, other_branch=None,
168
base_branch=None, revision_graph=None):
169
"""Return a Merger for revision-ids.
171
:param tree: The tree to merge changes into
172
:param other: The revision-id to use as OTHER
173
:param base: The revision-id to use as BASE. If not specified, will
175
:param other_branch: A branch containing the other revision-id. If
176
not supplied, tree.branch is used.
177
:param base_branch: A branch containing the base revision-id. If
178
not supplied, other_branch or tree.branch will be used.
179
:param revision_graph: If you have a revision_graph precomputed, pass
180
it in, otherwise it will be created for you.
181
:param pb: A progress indicator
183
merger = Merger(tree.branch, this_tree=tree, pb=pb,
184
revision_graph=revision_graph)
185
if other_branch is None:
186
other_branch = tree.branch
187
merger.set_other_revision(other, other_branch)
191
if base_branch is None:
192
base_branch = other_branch
193
merger.set_base_revision(base, base_branch)
196
def revision_tree(self, revision_id, branch=None):
197
if revision_id not in self._cached_trees:
199
branch = self.this_branch
201
tree = self.this_tree.revision_tree(revision_id)
202
except errors.NoSuchRevisionInTree:
203
tree = branch.repository.revision_tree(revision_id)
204
self._cached_trees[revision_id] = tree
205
return self._cached_trees[revision_id]
207
def _get_tree(self, treespec, possible_transports=None):
208
from bzrlib import workingtree
209
location, revno = treespec
211
tree = workingtree.WorkingTree.open_containing(location)[0]
212
return tree.branch, tree
213
branch = Branch.open_containing(location, possible_transports)[0]
215
revision_id = branch.last_revision()
217
revision_id = branch.get_rev_id(revno)
218
revision_id = ensure_null(revision_id)
219
return branch, self.revision_tree(revision_id, branch)
127
221
def ensure_revision_trees(self):
128
222
if self.this_revision_tree is None:
129
self.this_basis_tree = self.this_branch.repository.revision_tree(
223
self.this_basis_tree = self.revision_tree(self.this_basis)
131
224
if self.this_basis == self.this_rev_id:
132
225
self.this_revision_tree = self.this_basis_tree
161
254
self.compare_basis()
162
255
if self.this_basis != self.this_rev_id:
163
raise BzrCommandError("Working tree has uncommitted changes.")
256
raise errors.UncommittedChanges(self.this_tree)
165
258
def compare_basis(self):
166
changes = self.this_tree.changes_from(self.this_tree.basis_tree())
260
basis_tree = self.revision_tree(self.this_tree.last_revision())
261
except errors.RevisionNotPresent:
262
basis_tree = self.this_tree.basis_tree()
263
changes = self.this_tree.changes_from(basis_tree)
167
264
if not changes.has_changed():
168
265
self.this_rev_id = self.this_basis
170
267
def set_interesting_files(self, file_list):
172
self._set_interesting_files(file_list)
173
except NotVersionedError, e:
174
raise BzrCommandError("%s is not a source file in any"
177
def _set_interesting_files(self, file_list):
178
"""Set the list of interesting ids from a list of files."""
179
if file_list is None:
180
self.interesting_ids = None
183
interesting_ids = set()
184
for path in file_list:
186
# TODO: jam 20070226 The trees are not locked at this time,
187
# wouldn't it make merge faster if it locks everything in the
188
# beginning? It locks at do_merge time, but this happens
190
for tree in (self.this_tree, self.base_tree, self.other_tree):
191
file_id = tree.path2id(path)
192
if file_id is not None:
193
interesting_ids.add(file_id)
196
raise NotVersionedError(path=path)
197
self.interesting_ids = interesting_ids
268
self.interesting_files = file_list
199
270
def set_pending(self):
200
if not self.base_is_ancestor:
202
if self.other_rev_id is None:
204
ancestry = self.this_branch.repository.get_ancestry(self.this_basis)
205
if self.other_rev_id in ancestry:
207
self.this_tree.add_parent_tree((self.other_rev_id, self.other_tree))
209
def set_other(self, other_revision):
271
if not self.base_is_ancestor or not self.base_is_other_ancestor or self.other_rev_id is None:
275
def _add_parent(self):
276
new_parents = self.this_tree.get_parent_ids() + [self.other_rev_id]
277
new_parent_trees = []
278
for revision_id in new_parents:
280
tree = self.revision_tree(revision_id)
281
except errors.RevisionNotPresent:
285
new_parent_trees.append((revision_id, tree))
287
self.this_tree.set_parent_trees(new_parent_trees,
288
allow_leftmost_as_ghost=True)
290
for _revision_id, tree in new_parent_trees:
294
def set_other(self, other_revision, possible_transports=None):
210
295
"""Set the revision and tree to merge from.
212
297
This sets the other_tree, other_rev_id, other_basis attributes.
214
299
:param other_revision: The [path, revision] list to merge from.
216
self.other_branch, self.other_tree = _get_tree(other_revision,
301
self.other_branch, self.other_tree = self._get_tree(other_revision,
218
303
if other_revision[1] == -1:
219
self.other_rev_id = self.other_branch.last_revision()
220
if self.other_rev_id is None:
304
self.other_rev_id = _mod_revision.ensure_null(
305
self.other_branch.last_revision())
306
if _mod_revision.is_null(self.other_rev_id):
221
307
raise NoCommits(self.other_branch)
222
308
self.other_basis = self.other_rev_id
223
309
elif other_revision[1] is not None:
241
327
self.other_rev_id = revision_id
242
328
self.other_branch = other_branch
243
self.this_branch.fetch(other_branch, self.other_rev_id)
329
self._maybe_fetch(other_branch, self.this_branch, self.other_rev_id)
244
330
self.other_tree = self.revision_tree(revision_id)
245
331
self.other_basis = revision_id
333
def set_base_revision(self, revision_id, branch):
334
"""Set 'base' based on a branch and revision id
336
:param revision_id: The revision to use for a tree
337
:param branch: The branch containing this tree
339
self.base_rev_id = revision_id
340
self.base_branch = branch
341
self._maybe_fetch(branch, self.this_branch, revision_id)
342
self.base_tree = self.revision_tree(revision_id)
344
def _maybe_fetch(self, source, target, revision_id):
345
if not source.repository.has_same_location(target.repository):
346
target.fetch(source, revision_id)
247
348
def find_base(self):
248
self.set_base([None, None])
349
revisions = [ensure_null(self.this_basis),
350
ensure_null(self.other_basis)]
351
if NULL_REVISION in revisions:
352
self.base_rev_id = NULL_REVISION
354
self.base_rev_id, steps = self.revision_graph.find_unique_lca(
355
revisions[0], revisions[1], count_steps=True)
356
if self.base_rev_id == NULL_REVISION:
357
raise UnrelatedBranches()
359
warning('Warning: criss-cross merge encountered. See bzr'
360
' help criss-cross.')
361
self.base_tree = self.revision_tree(self.base_rev_id)
362
self.base_is_ancestor = True
363
self.base_is_other_ancestor = True
250
365
def set_base(self, base_revision):
251
366
"""Set the base revision to use for the merge.
333
444
self.base_tree.unlock()
334
445
self.this_tree.unlock()
335
446
if len(merge.cooked_conflicts) == 0:
336
if not self.ignore_zero:
447
if not self.ignore_zero and not is_quiet():
337
448
note("All changes applied successfully.")
339
450
note("%d conflicts encountered." % len(merge.cooked_conflicts))
341
452
return len(merge.cooked_conflicts)
343
def regen_inventory(self, new_entries):
344
old_entries = self.this_tree.read_working_inventory()
348
for path, file_id in new_entries:
351
new_entries_map[file_id] = path
353
def id2path(file_id):
354
path = new_entries_map.get(file_id)
357
entry = old_entries[file_id]
358
if entry.parent_id is None:
360
return pathjoin(id2path(entry.parent_id), entry.name)
362
for file_id in old_entries:
363
entry = old_entries[file_id]
364
path = id2path(file_id)
365
if file_id in self.base_tree.inventory:
366
executable = getattr(self.base_tree.inventory[file_id], 'executable', False)
368
executable = getattr(entry, 'executable', False)
369
new_inventory[file_id] = (path, file_id, entry.parent_id,
370
entry.kind, executable)
372
by_path[path] = file_id
377
for path, file_id in new_entries:
379
del new_inventory[file_id]
382
new_path_list.append((path, file_id))
383
if file_id not in old_entries:
385
# Ensure no file is added before its parent
387
for path, file_id in new_path_list:
391
parent = by_path[os.path.dirname(path)]
392
abspath = pathjoin(self.this_tree.basedir, path)
393
kind = osutils.file_kind(abspath)
394
if file_id in self.base_tree.inventory:
395
executable = getattr(self.base_tree.inventory[file_id], 'executable', False)
398
new_inventory[file_id] = (path, file_id, parent, kind, executable)
399
by_path[path] = file_id
401
# Get a list in insertion order
402
new_inventory_list = new_inventory.values()
403
mutter ("""Inventory regeneration:
404
old length: %i insertions: %i deletions: %i new_length: %i"""\
405
% (len(old_entries), insertions, deletions,
406
len(new_inventory_list)))
407
assert len(new_inventory_list) == len(old_entries) + insertions\
409
new_inventory_list.sort()
410
return new_inventory_list
413
455
class Merge3Merger(object):
414
456
"""Three-way merger that uses the merge3 text merger"""
416
458
supports_reprocess = True
417
459
supports_show_base = True
418
460
history_based = False
461
supports_cherrypick = True
462
supports_reverse_cherrypick = True
463
winner_idx = {"this": 2, "other": 1, "conflict": 1}
420
465
def __init__(self, working_tree, this_tree, base_tree, other_tree,
421
466
interesting_ids=None, reprocess=False, show_base=False,
422
pb=DummyProgress(), pp=None, change_reporter=None):
423
"""Initialize the merger object and perform the merge."""
467
pb=DummyProgress(), pp=None, change_reporter=None,
468
interesting_files=None, do_merge=True,
470
"""Initialize the merger object and perform the merge.
472
:param working_tree: The working tree to apply the merge to
473
:param this_tree: The local tree in the merge operation
474
:param base_tree: The common tree in the merge operation
475
:param other_tree: The other other tree to merge changes from
476
:param interesting_ids: The file_ids of files that should be
477
participate in the merge. May not be combined with
479
:param: reprocess If True, perform conflict-reduction processing.
480
:param show_base: If True, show the base revision in text conflicts.
481
(incompatible with reprocess)
482
:param pb: A Progress bar
483
:param pp: A ProgressPhase object
484
:param change_reporter: An object that should report changes made
485
:param interesting_files: The tree-relative paths of files that should
486
participate in the merge. If these paths refer to directories,
487
the contents of those directories will also be included. May not
488
be combined with interesting_ids. If neither interesting_files nor
489
interesting_ids is specified, all files may participate in the
424
492
object.__init__(self)
493
if interesting_files is not None:
494
assert interesting_ids is None
495
self.interesting_ids = interesting_ids
496
self.interesting_files = interesting_files
425
497
self.this_tree = working_tree
426
self.this_tree.lock_tree_write()
427
498
self.base_tree = base_tree
428
self.base_tree.lock_read()
429
499
self.other_tree = other_tree
430
self.other_tree.lock_read()
431
500
self._raw_conflicts = []
432
501
self.cooked_conflicts = []
433
502
self.reprocess = reprocess
482
532
self.this_tree.unlock()
535
def make_preview_transform(self):
536
self.base_tree.lock_read()
537
self.other_tree.lock_read()
538
self.tt = TransformPreview(self.this_tree)
541
self._compute_transform()
544
self.other_tree.unlock()
545
self.base_tree.unlock()
549
def _compute_transform(self):
550
entries = self._entries3()
551
child_pb = ui.ui_factory.nested_progress_bar()
553
for num, (file_id, changed, parents3, names3,
554
executable3) in enumerate(entries):
555
child_pb.update('Preparing file merge', num, len(entries))
556
self._merge_names(file_id, parents3, names3)
558
file_status = self.merge_contents(file_id)
560
file_status = 'unmodified'
561
self._merge_executable(file_id,
562
executable3, file_status)
567
child_pb = ui.ui_factory.nested_progress_bar()
569
fs_conflicts = resolve_conflicts(self.tt, child_pb,
570
lambda t, c: conflict_pass(t, c, self.other_tree))
573
if self.change_reporter is not None:
574
from bzrlib import delta
575
delta.report_changes(
576
self.tt.iter_changes(), self.change_reporter)
577
self.cook_conflicts(fs_conflicts)
578
for conflict in self.cooked_conflicts:
582
"""Gather data about files modified between three trees.
584
Return a list of tuples of file_id, changed, parents3, names3,
585
executable3. changed is a boolean indicating whether the file contents
586
or kind were changed. parents3 is a tuple of parent ids for base,
587
other and this. names3 is a tuple of names for base, other and this.
588
executable3 is a tuple of execute-bit values for base, other and this.
591
iterator = self.other_tree.iter_changes(self.base_tree,
592
include_unchanged=True, specific_files=self.interesting_files,
593
extra_trees=[self.this_tree])
594
for (file_id, paths, changed, versioned, parents, names, kind,
595
executable) in iterator:
596
if (self.interesting_ids is not None and
597
file_id not in self.interesting_ids):
599
if file_id in self.this_tree.inventory:
600
entry = self.this_tree.inventory[file_id]
601
this_name = entry.name
602
this_parent = entry.parent_id
603
this_executable = entry.executable
607
this_executable = None
608
parents3 = parents + (this_parent,)
609
names3 = names + (this_name,)
610
executable3 = executable + (this_executable,)
611
result.append((file_id, changed, parents3, names3, executable3))
485
614
def fix_root(self):
487
616
self.tt.final_kind(self.tt.root)
887
1044
"""Three-way tree merger, text weave merger."""
888
1045
supports_reprocess = True
889
1046
supports_show_base = False
891
def __init__(self, working_tree, this_tree, base_tree, other_tree,
892
interesting_ids=None, pb=DummyProgress(), pp=None,
893
reprocess=False, change_reporter=None):
894
self.this_revision_tree = self._get_revision_tree(this_tree)
895
self.other_revision_tree = self._get_revision_tree(other_tree)
896
super(WeaveMerger, self).__init__(working_tree, this_tree,
897
base_tree, other_tree,
898
interesting_ids=interesting_ids,
899
pb=pb, pp=pp, reprocess=reprocess,
900
change_reporter=change_reporter)
902
def _get_revision_tree(self, tree):
903
"""Return a revision tree related to this tree.
904
If the tree is a WorkingTree, the basis will be returned.
906
if getattr(tree, 'get_weave', False) is False:
907
# If we have a WorkingTree, try using the basis
908
return tree.branch.basis_tree()
912
def _check_file(self, file_id):
913
"""Check that the revision tree's version of the file matches."""
914
for tree, rt in ((self.this_tree, self.this_revision_tree),
915
(self.other_tree, self.other_revision_tree)):
918
if tree.get_file_sha1(file_id) != rt.get_file_sha1(file_id):
919
raise WorkingTreeNotRevision(self.this_tree)
1047
supports_reverse_cherrypick = False
1048
history_based = True
921
1050
def _merged_lines(self, file_id):
922
1051
"""Generate the merged lines.
923
1052
There is no distinction between lines that are meant to contain <<<<<<<
926
weave = self.this_revision_tree.get_weave(file_id)
927
this_revision_id = self.this_revision_tree.inventory[file_id].revision
928
other_revision_id = \
929
self.other_revision_tree.inventory[file_id].revision
930
wm = WeaveMerge(weave, this_revision_id, other_revision_id,
931
'<<<<<<< TREE\n', '>>>>>>> MERGE-SOURCE\n')
932
return wm.merge_lines(self.reprocess)
1056
base = self.base_tree
1059
plan = self.this_tree.plan_file_merge(file_id, self.other_tree,
1061
if 'merge' in debug.debug_flags:
1063
trans_id = self.tt.trans_id_file_id(file_id)
1064
name = self.tt.final_name(trans_id) + '.plan'
1065
contents = ('%10s|%s' % l for l in plan)
1066
self.tt.new_file(name, self.tt.final_parent(trans_id), contents)
1067
textmerge = PlanWeaveMerge(plan, '<<<<<<< TREE\n',
1068
'>>>>>>> MERGE-SOURCE\n')
1069
return textmerge.merge_lines(self.reprocess)
934
1071
def text_merge(self, file_id, trans_id):
935
1072
"""Perform a (weave) text merge for a given file and file-id.
936
1073
If conflicts are encountered, .THIS and .OTHER files will be emitted,
937
1074
and a conflict will be noted.
939
self._check_file(file_id)
940
1076
lines, conflicts = self._merged_lines(file_id)
941
1077
lines = list(lines)
942
1078
# Note we're checking whether the OUTPUT is binary in this case,
1039
1203
from bzrlib import option
1040
1204
return option._merge_type_registry
1207
def _plan_annotate_merge(annotated_a, annotated_b, ancestors_a, ancestors_b):
1208
def status_a(revision, text):
1209
if revision in ancestors_b:
1210
return 'killed-b', text
1212
return 'new-a', text
1214
def status_b(revision, text):
1215
if revision in ancestors_a:
1216
return 'killed-a', text
1218
return 'new-b', text
1220
plain_a = [t for (a, t) in annotated_a]
1221
plain_b = [t for (a, t) in annotated_b]
1222
matcher = patiencediff.PatienceSequenceMatcher(None, plain_a, plain_b)
1223
blocks = matcher.get_matching_blocks()
1226
for ai, bi, l in blocks:
1227
# process all mismatched sections
1228
# (last mismatched section is handled because blocks always
1229
# includes a 0-length last block)
1230
for revision, text in annotated_a[a_cur:ai]:
1231
yield status_a(revision, text)
1232
for revision, text in annotated_b[b_cur:bi]:
1233
yield status_b(revision, text)
1235
# and now the matched section
1238
for text_a, text_b in zip(plain_a[ai:a_cur], plain_b[bi:b_cur]):
1239
assert text_a == text_b
1240
yield "unchanged", text_a
1243
class _PlanMergeBase(object):
1245
def __init__(self, a_rev, b_rev, vf):
1248
:param a_rev: Revision-id of one revision to merge
1249
:param b_rev: Revision-id of the other revision to merge
1250
:param vf: A versionedfile containing both revisions
1254
self.lines_a = vf.get_lines(a_rev)
1255
self.lines_b = vf.get_lines(b_rev)
1257
self._last_lines = None
1258
self._last_lines_revision_id = None
1259
self._cached_matching_blocks = {}
1261
def plan_merge(self):
1262
"""Generate a 'plan' for merging the two revisions.
1264
This involves comparing their texts and determining the cause of
1265
differences. If text A has a line and text B does not, then either the
1266
line was added to text A, or it was deleted from B. Once the causes
1267
are combined, they are written out in the format described in
1268
VersionedFile.plan_merge
1270
blocks = self._get_matching_blocks(self.a_rev, self.b_rev)
1271
unique_a, unique_b = self._unique_lines(blocks)
1272
new_a, killed_b = self._determine_status(self.a_rev, unique_a)
1273
new_b, killed_a = self._determine_status(self.b_rev, unique_b)
1274
return self._iter_plan(blocks, new_a, killed_b, new_b, killed_a)
1276
def _iter_plan(self, blocks, new_a, killed_b, new_b, killed_a):
1279
for i, j, n in blocks:
1280
for a_index in range(last_i, i):
1281
if a_index in new_a:
1282
if a_index in killed_b:
1283
yield 'conflicted-a', self.lines_a[a_index]
1285
yield 'new-a', self.lines_a[a_index]
1287
yield 'killed-b', self.lines_a[a_index]
1288
for b_index in range(last_j, j):
1289
if b_index in new_b:
1290
if b_index in killed_a:
1291
yield 'conflicted-b', self.lines_b[b_index]
1293
yield 'new-b', self.lines_b[b_index]
1295
yield 'killed-a', self.lines_b[b_index]
1296
# handle common lines
1297
for a_index in range(i, i+n):
1298
yield 'unchanged', self.lines_a[a_index]
1302
def _get_matching_blocks(self, left_revision, right_revision):
1303
"""Return a description of which sections of two revisions match.
1305
See SequenceMatcher.get_matching_blocks
1307
cached = self._cached_matching_blocks.get((left_revision,
1309
if cached is not None:
1311
if self._last_lines_revision_id == left_revision:
1312
left_lines = self._last_lines
1314
left_lines = self.vf.get_lines(left_revision)
1315
right_lines = self.vf.get_lines(right_revision)
1316
self._last_lines = right_lines
1317
self._last_lines_revision_id = right_revision
1318
matcher = patiencediff.PatienceSequenceMatcher(None, left_lines,
1320
return matcher.get_matching_blocks()
1322
def _unique_lines(self, matching_blocks):
1323
"""Analyse matching_blocks to determine which lines are unique
1325
:return: a tuple of (unique_left, unique_right), where the values are
1326
sets of line numbers of unique lines.
1332
for i, j, n in matching_blocks:
1333
unique_left.extend(range(last_i, i))
1334
unique_right.extend(range(last_j, j))
1337
return unique_left, unique_right
1340
def _subtract_plans(old_plan, new_plan):
1341
"""Remove changes from new_plan that came from old_plan.
1343
It is assumed that the difference between the old_plan and new_plan
1344
is their choice of 'b' text.
1346
All lines from new_plan that differ from old_plan are emitted
1347
verbatim. All lines from new_plan that match old_plan but are
1348
not about the 'b' revision are emitted verbatim.
1350
Lines that match and are about the 'b' revision are the lines we
1351
don't want, so we convert 'killed-b' -> 'unchanged', and 'new-b'
1352
is skipped entirely.
1354
matcher = patiencediff.PatienceSequenceMatcher(None, old_plan,
1357
for i, j, n in matcher.get_matching_blocks():
1358
for jj in range(last_j, j):
1360
for jj in range(j, j+n):
1361
plan_line = new_plan[jj]
1362
if plan_line[0] == 'new-b':
1364
elif plan_line[0] == 'killed-b':
1365
yield 'unchanged', plan_line[1]
1371
class _PlanMerge(_PlanMergeBase):
1372
"""Plan an annotate merge using on-the-fly annotation"""
1374
def __init__(self, a_rev, b_rev, vf):
1375
_PlanMergeBase.__init__(self, a_rev, b_rev, vf)
1376
a_ancestry = set(vf.get_ancestry(a_rev, topo_sorted=False))
1377
b_ancestry = set(vf.get_ancestry(b_rev, topo_sorted=False))
1378
self.uncommon = a_ancestry.symmetric_difference(b_ancestry)
1380
def _determine_status(self, revision_id, unique_line_numbers):
1381
"""Determines the status unique lines versus all lcas.
1383
Basically, determines why the line is unique to this revision.
1385
A line may be determined new or killed, but not both.
1387
:param revision_id: The id of the revision in which the lines are
1389
:param unique_line_numbers: The line numbers of unique lines.
1390
:return a tuple of (new_this, killed_other):
1392
new = self._find_new(revision_id)
1393
killed = set(unique_line_numbers).difference(new)
1396
def _find_new(self, version_id):
1397
"""Determine which lines are new in the ancestry of this version.
1399
If a lines is present in this version, and not present in any
1400
common ancestor, it is considered new.
1402
if version_id not in self.uncommon:
1404
parents = self.vf.get_parent_map([version_id])[version_id]
1405
if len(parents) == 0:
1406
return set(range(len(self.vf.get_lines(version_id))))
1408
for parent in parents:
1409
blocks = self._get_matching_blocks(version_id, parent)
1410
result, unused = self._unique_lines(blocks)
1411
parent_new = self._find_new(parent)
1412
for i, j, n in blocks:
1413
for ii, jj in [(i+r, j+r) for r in range(n)]:
1414
if jj in parent_new:
1419
new.intersection_update(result)
1423
class _PlanLCAMerge(_PlanMergeBase):
1425
This merge algorithm differs from _PlanMerge in that:
1426
1. comparisons are done against LCAs only
1427
2. cases where a contested line is new versus one LCA but old versus
1428
another are marked as conflicts, by emitting the line as conflicted-a
1431
This is faster, and hopefully produces more useful output.
1434
def __init__(self, a_rev, b_rev, vf, graph):
1435
_PlanMergeBase.__init__(self, a_rev, b_rev, vf)
1436
self.lcas = graph.find_lca(a_rev, b_rev)
1437
for lca in self.lcas:
1438
lca_lines = self.vf.get_lines(lca)
1439
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_a,
1441
blocks = list(matcher.get_matching_blocks())
1442
self._cached_matching_blocks[(a_rev, lca)] = blocks
1443
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_b,
1445
blocks = list(matcher.get_matching_blocks())
1446
self._cached_matching_blocks[(b_rev, lca)] = blocks
1448
def _determine_status(self, revision_id, unique_line_numbers):
1449
"""Determines the status unique lines versus all lcas.
1451
Basically, determines why the line is unique to this revision.
1453
A line may be determined new, killed, or both.
1455
If a line is determined new, that means it was not present in at least
1456
one LCA, and is not present in the other merge revision.
1458
If a line is determined killed, that means the line was present in
1461
If a line is killed and new, this indicates that the two merge
1462
revisions contain differing conflict resolutions.
1463
:param revision_id: The id of the revision in which the lines are
1465
:param unique_line_numbers: The line numbers of unique lines.
1466
:return a tuple of (new_this, killed_other):
1470
unique_line_numbers = set(unique_line_numbers)
1471
for lca in self.lcas:
1472
blocks = self._get_matching_blocks(revision_id, lca)
1473
unique_vs_lca, _ignored = self._unique_lines(blocks)
1474
new.update(unique_line_numbers.intersection(unique_vs_lca))
1475
killed.update(unique_line_numbers.difference(unique_vs_lca))