1
from merge_core import merge_flex
2
from changeset import generate_changeset, ExceptionConflictHandler
3
from changeset import Inventory
4
from bzrlib import find_branch
6
from bzrlib.errors import BzrCommandError
7
from bzrlib.diff import compare_trees
8
from trace import mutter, warning
1
# Copyright (C) 2005, 2006, 2008 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
14
class UnrelatedBranches(BzrCommandError):
16
msg = "Branches have no common ancestor, and no base revision"\
18
BzrCommandError.__init__(self, msg)
21
class MergeConflictHandler(ExceptionConflictHandler):
22
"""Handle conflicts encountered while merging"""
23
def __init__(self, dir, ignore_zero=False):
24
ExceptionConflictHandler.__init__(self, dir)
26
self.ignore_zero = ignore_zero
28
def copy(self, source, dest):
29
"""Copy the text and mode of a file
30
:param source: The path of the file to copy
31
:param dest: The distination file to create
33
s_file = file(source, "rb")
34
d_file = file(dest, "wb")
37
os.chmod(dest, 0777 & os.stat(source).st_mode)
39
def add_suffix(self, name, suffix, last_new_name=None):
40
"""Rename a file to append a suffix. If the new name exists, the
41
suffix is added repeatedly until a non-existant name is found
43
:param name: The path of the file
44
:param suffix: The suffix to append
45
:param last_new_name: (used for recursive calls) the last name tried
47
if last_new_name is None:
49
new_name = last_new_name+suffix
51
os.rename(name, new_name)
54
if e.errno != errno.EEXIST and e.errno != errno.ENOTEMPTY:
56
return self.add_suffix(name, suffix, last_new_name=new_name)
58
def conflict(self, text):
63
def merge_conflict(self, new_file, this_path, base_path, other_path):
65
Handle diff3 conflicts by producing a .THIS, .BASE and .OTHER. The
66
main file will be a version with diff3 conflicts.
67
:param new_file: Path to the output file with diff3 markers
68
:param this_path: Path to the file text for the THIS tree
69
:param base_path: Path to the file text for the BASE tree
70
:param other_path: Path to the file text for the OTHER tree
72
self.add_suffix(this_path, ".THIS")
73
self.copy(base_path, this_path+".BASE")
74
self.copy(other_path, this_path+".OTHER")
75
os.rename(new_file, this_path)
76
self.conflict("Diff3 conflict encountered in %s" % this_path)
78
def target_exists(self, entry, target, old_path):
79
"""Handle the case when the target file or dir exists"""
80
moved_path = self.add_suffix(target, ".moved")
81
self.conflict("Moved existing %s to %s" % (target, moved_path))
83
def rmdir_non_empty(self, filename):
84
"""Handle the case where the dir to be removed still has contents"""
85
self.conflict("Directory %s not removed because it is not empty"\
90
if not self.ignore_zero:
91
print "%d conflicts encountered.\n" % self.conflicts
93
class SourceFile(object):
94
def __init__(self, path, id, present=None, isdir=None):
97
self.present = present
99
self.interesting = True
102
return "SourceFile(%s, %s)" % (self.path, self.id)
104
def get_tree(treespec, temp_root, label):
105
location, revno = treespec
106
branch = find_branch(location)
108
base_tree = branch.working_tree()
110
base_tree = branch.basis_tree()
112
base_tree = branch.revision_tree(branch.lookup_revision(revno))
113
temp_path = os.path.join(temp_root, label)
115
return branch, MergeTree(base_tree, temp_path)
118
def abspath(tree, file_id):
119
path = tree.inventory.id2path(file_id)
124
def file_exists(tree, file_id):
125
return tree.has_filename(tree.id2path(file_id))
127
def inventory_map(tree):
129
for file_id in tree.inventory:
130
path = abspath(tree, file_id)
131
inventory[path] = SourceFile(path, file_id)
135
class MergeTree(object):
136
def __init__(self, tree, tempdir):
19
from itertools import chain
30
revision as _mod_revision,
34
from bzrlib.branch import Branch
35
from bzrlib.conflicts import ConflictList, Conflict
36
from bzrlib.errors import (BzrCommandError,
46
WorkingTreeNotRevision,
49
from bzrlib.graph import Graph
50
from bzrlib.merge3 import Merge3
51
from bzrlib.osutils import rename, pathjoin
52
from progress import DummyProgress, ProgressPhase
53
from bzrlib.revision import (NULL_REVISION, ensure_null)
54
from bzrlib.textfile import check_text_lines
55
from bzrlib.trace import mutter, warning, note, is_quiet
56
from bzrlib.transform import (TransformPreview, TreeTransform,
57
resolve_conflicts, cook_conflicts,
58
conflict_pass, FinalPaths, create_from_tree,
59
unique_add, ROOT_PARENT)
60
from bzrlib.versionedfile import PlanWeaveMerge
63
# TODO: Report back as changes are merged in
66
def transform_tree(from_tree, to_tree, interesting_ids=None):
67
merge_inner(from_tree.branch, to_tree, from_tree, ignore_zero=True,
68
interesting_ids=interesting_ids, this_tree=from_tree)
72
def __init__(self, this_branch, other_tree=None, base_tree=None,
73
this_tree=None, pb=None, change_reporter=None,
74
recurse='down', revision_graph=None):
137
75
object.__init__(self)
138
if hasattr(tree, "basedir"):
139
self.root = tree.basedir
142
self.inventory = inventory_map(tree)
144
self.tempdir = tempdir
145
os.mkdir(os.path.join(self.tempdir, "texts"))
148
def readonly_path(self, id):
149
if id not in self.tree:
151
if self.root is not None:
152
return self.tree.abspath(self.tree.id2path(id))
154
if self.tree.inventory[id].kind in ("directory", "root_directory"):
156
if not self.cached.has_key(id):
157
path = os.path.join(self.tempdir, "texts", id)
158
outfile = file(path, "wb")
159
outfile.write(self.tree.get_file(id).read())
160
assert(os.path.exists(path))
161
self.cached[id] = path
162
return self.cached[id]
166
def merge(other_revision, base_revision,
167
check_clean=True, ignore_zero=False,
169
"""Merge changes into a tree.
172
Base for three-way merge.
174
Other revision for three-way merge.
176
Directory to merge changes into; '.' by default.
178
If true, this_dir must have no uncommitted changes before the
181
tempdir = tempfile.mkdtemp(prefix="bzr-")
185
this_branch = find_branch(this_dir)
76
self.this_branch = this_branch
77
self.this_basis = _mod_revision.ensure_null(
78
this_branch.last_revision())
79
self.this_rev_id = None
80
self.this_tree = this_tree
81
self.this_revision_tree = None
82
self.this_basis_tree = None
83
self.other_tree = other_tree
84
self.other_branch = None
85
self.base_tree = base_tree
86
self.ignore_zero = False
87
self.backup_files = False
88
self.interesting_ids = None
89
self.interesting_files = None
90
self.show_base = False
91
self.reprocess = False
96
self.recurse = recurse
97
self.change_reporter = change_reporter
98
self._cached_trees = {}
99
self._revision_graph = revision_graph
100
self._base_is_ancestor = None
101
self._base_is_other_ancestor = None
102
self._is_criss_cross = None
103
self._lca_trees = None
106
def revision_graph(self):
107
if self._revision_graph is None:
108
self._revision_graph = self.this_branch.repository.get_graph()
109
return self._revision_graph
111
def _set_base_is_ancestor(self, value):
112
self._base_is_ancestor = value
114
def _get_base_is_ancestor(self):
115
if self._base_is_ancestor is None:
116
self._base_is_ancestor = self.revision_graph.is_ancestor(
117
self.base_rev_id, self.this_basis)
118
return self._base_is_ancestor
120
base_is_ancestor = property(_get_base_is_ancestor, _set_base_is_ancestor)
122
def _set_base_is_other_ancestor(self, value):
123
self._base_is_other_ancestor = value
125
def _get_base_is_other_ancestor(self):
126
if self._base_is_other_ancestor is None:
127
if self.other_basis is None:
129
self._base_is_other_ancestor = self.revision_graph.is_ancestor(
130
self.base_rev_id, self.other_basis)
131
return self._base_is_other_ancestor
133
base_is_other_ancestor = property(_get_base_is_other_ancestor,
134
_set_base_is_other_ancestor)
137
def from_uncommitted(tree, other_tree, pb=None, base_tree=None):
138
"""Return a Merger for uncommitted changes in other_tree.
140
:param tree: The tree to merge into
141
:param other_tree: The tree to get uncommitted changes from
142
:param pb: A progress indicator
143
:param base_tree: The basis to use for the merge. If unspecified,
144
other_tree.basis_tree() will be used.
146
if base_tree is None:
147
base_tree = other_tree.basis_tree()
148
merger = Merger(tree.branch, other_tree, base_tree, tree, pb)
149
merger.base_rev_id = merger.base_tree.get_revision_id()
150
merger.other_rev_id = None
151
merger.other_basis = merger.base_rev_id
155
def from_mergeable(klass, tree, mergeable, pb):
156
"""Return a Merger for a bundle or merge directive.
158
:param tree: The tree to merge changes into
159
:param mergeable: A merge directive or bundle
160
:param pb: A progress indicator
162
mergeable.install_revisions(tree.branch.repository)
163
base_revision_id, other_revision_id, verified =\
164
mergeable.get_merge_request(tree.branch.repository)
165
revision_graph = tree.branch.repository.get_graph()
166
if base_revision_id is not None:
167
if (base_revision_id != _mod_revision.NULL_REVISION and
168
revision_graph.is_ancestor(
169
base_revision_id, tree.branch.last_revision())):
170
base_revision_id = None
172
warning('Performing cherrypick')
173
merger = klass.from_revision_ids(pb, tree, other_revision_id,
174
base_revision_id, revision_graph=
176
return merger, verified
179
def from_revision_ids(pb, tree, other, base=None, other_branch=None,
180
base_branch=None, revision_graph=None,
182
"""Return a Merger for revision-ids.
184
:param pb: A progress indicator
185
:param tree: The tree to merge changes into
186
:param other: The revision-id to use as OTHER
187
:param base: The revision-id to use as BASE. If not specified, will
189
:param other_branch: A branch containing the other revision-id. If
190
not supplied, tree.branch is used.
191
:param base_branch: A branch containing the base revision-id. If
192
not supplied, other_branch or tree.branch will be used.
193
:param revision_graph: If you have a revision_graph precomputed, pass
194
it in, otherwise it will be created for you.
195
:param tree_branch: The branch associated with tree. If not supplied,
196
tree.branch will be used.
198
if tree_branch is None:
199
tree_branch = tree.branch
200
merger = Merger(tree_branch, this_tree=tree, pb=pb,
201
revision_graph=revision_graph)
202
if other_branch is None:
203
other_branch = tree.branch
204
merger.set_other_revision(other, other_branch)
208
if base_branch is None:
209
base_branch = other_branch
210
merger.set_base_revision(base, base_branch)
213
def revision_tree(self, revision_id, branch=None):
214
if revision_id not in self._cached_trees:
216
branch = self.this_branch
218
tree = self.this_tree.revision_tree(revision_id)
219
except errors.NoSuchRevisionInTree:
220
tree = branch.repository.revision_tree(revision_id)
221
self._cached_trees[revision_id] = tree
222
return self._cached_trees[revision_id]
224
def _get_tree(self, treespec, possible_transports=None):
225
from bzrlib import workingtree
226
location, revno = treespec
228
tree = workingtree.WorkingTree.open_containing(location)[0]
229
return tree.branch, tree
230
branch = Branch.open_containing(location, possible_transports)[0]
232
revision_id = branch.last_revision()
234
revision_id = branch.get_rev_id(revno)
235
revision_id = ensure_null(revision_id)
236
return branch, self.revision_tree(revision_id, branch)
238
def ensure_revision_trees(self):
239
if self.this_revision_tree is None:
240
self.this_basis_tree = self.revision_tree(self.this_basis)
241
if self.this_basis == self.this_rev_id:
242
self.this_revision_tree = self.this_basis_tree
244
if self.other_rev_id is None:
245
other_basis_tree = self.revision_tree(self.other_basis)
246
changes = other_basis_tree.changes_from(self.other_tree)
247
if changes.has_changed():
248
raise WorkingTreeNotRevision(self.this_tree)
249
other_rev_id = self.other_basis
250
self.other_tree = other_basis_tree
252
def file_revisions(self, file_id):
253
self.ensure_revision_trees()
254
def get_id(tree, file_id):
255
revision_id = tree.inventory[file_id].revision
257
if self.this_rev_id is None:
258
if self.this_basis_tree.get_file_sha1(file_id) != \
259
self.this_tree.get_file_sha1(file_id):
260
raise WorkingTreeNotRevision(self.this_tree)
262
trees = (self.this_basis_tree, self.other_tree)
263
return [get_id(tree, file_id) for tree in trees]
265
def check_basis(self, check_clean, require_commits=True):
266
if self.this_basis is None and require_commits is True:
267
raise BzrCommandError("This branch has no commits."
268
" (perhaps you would prefer 'bzr pull')")
187
changes = compare_trees(this_branch.working_tree(),
188
this_branch.basis_tree(), False)
189
if changes.has_changed():
190
raise BzrCommandError("Working tree has uncommitted changes.")
191
other_branch, other_tree = get_tree(other_revision, tempdir, "other")
271
if self.this_basis != self.this_rev_id:
272
raise errors.UncommittedChanges(self.this_tree)
274
def compare_basis(self):
276
basis_tree = self.revision_tree(self.this_tree.last_revision())
277
except errors.NoSuchRevision:
278
basis_tree = self.this_tree.basis_tree()
279
changes = self.this_tree.changes_from(basis_tree)
280
if not changes.has_changed():
281
self.this_rev_id = self.this_basis
283
def set_interesting_files(self, file_list):
284
self.interesting_files = file_list
286
def set_pending(self):
287
if not self.base_is_ancestor or not self.base_is_other_ancestor or self.other_rev_id is None:
291
def _add_parent(self):
292
new_parents = self.this_tree.get_parent_ids() + [self.other_rev_id]
293
new_parent_trees = []
294
for revision_id in new_parents:
296
tree = self.revision_tree(revision_id)
297
except errors.NoSuchRevision:
301
new_parent_trees.append((revision_id, tree))
303
self.this_tree.set_parent_trees(new_parent_trees,
304
allow_leftmost_as_ghost=True)
306
for _revision_id, tree in new_parent_trees:
310
def set_other(self, other_revision, possible_transports=None):
311
"""Set the revision and tree to merge from.
313
This sets the other_tree, other_rev_id, other_basis attributes.
315
:param other_revision: The [path, revision] list to merge from.
317
self.other_branch, self.other_tree = self._get_tree(other_revision,
319
if other_revision[1] == -1:
320
self.other_rev_id = _mod_revision.ensure_null(
321
self.other_branch.last_revision())
322
if _mod_revision.is_null(self.other_rev_id):
323
raise NoCommits(self.other_branch)
324
self.other_basis = self.other_rev_id
325
elif other_revision[1] is not None:
326
self.other_rev_id = self.other_branch.get_rev_id(other_revision[1])
327
self.other_basis = self.other_rev_id
329
self.other_rev_id = None
330
self.other_basis = self.other_branch.last_revision()
331
if self.other_basis is None:
332
raise NoCommits(self.other_branch)
333
if self.other_rev_id is not None:
334
self._cached_trees[self.other_rev_id] = self.other_tree
335
self._maybe_fetch(self.other_branch,self.this_branch, self.other_basis)
337
def set_other_revision(self, revision_id, other_branch):
338
"""Set 'other' based on a branch and revision id
340
:param revision_id: The revision to use for a tree
341
:param other_branch: The branch containing this tree
343
self.other_rev_id = revision_id
344
self.other_branch = other_branch
345
self._maybe_fetch(other_branch, self.this_branch, self.other_rev_id)
346
self.other_tree = self.revision_tree(revision_id)
347
self.other_basis = revision_id
349
def set_base_revision(self, revision_id, branch):
350
"""Set 'base' based on a branch and revision id
352
:param revision_id: The revision to use for a tree
353
:param branch: The branch containing this tree
355
self.base_rev_id = revision_id
356
self.base_branch = branch
357
self._maybe_fetch(branch, self.this_branch, revision_id)
358
self.base_tree = self.revision_tree(revision_id)
360
def _maybe_fetch(self, source, target, revision_id):
361
if not source.repository.has_same_location(target.repository):
362
target.fetch(source, revision_id)
365
revisions = [ensure_null(self.this_basis),
366
ensure_null(self.other_basis)]
367
if NULL_REVISION in revisions:
368
self.base_rev_id = NULL_REVISION
369
self.base_tree = self.revision_tree(self.base_rev_id)
370
self._is_criss_cross = False
372
lcas = self.revision_graph.find_lca(revisions[0], revisions[1])
373
self._is_criss_cross = False
375
self.base_rev_id = NULL_REVISION
377
self.base_rev_id = list(lcas)[0]
378
else: # len(lcas) > 1
380
# find_unique_lca can only handle 2 nodes, so we have to
381
# start back at the beginning. It is a shame to traverse
382
# the graph again, but better than re-implementing
384
self.base_rev_id = self.revision_graph.find_unique_lca(
385
revisions[0], revisions[1])
387
self.base_rev_id = self.revision_graph.find_unique_lca(
389
self._is_criss_cross = True
390
if self.base_rev_id == NULL_REVISION:
391
raise UnrelatedBranches()
392
if self._is_criss_cross:
393
warning('Warning: criss-cross merge encountered. See bzr'
394
' help criss-cross.')
395
mutter('Criss-cross lcas: %r' % lcas)
396
interesting_revision_ids = [self.base_rev_id]
397
interesting_revision_ids.extend(lcas)
398
interesting_trees = dict((t.get_revision_id(), t)
399
for t in self.this_branch.repository.revision_trees(
400
interesting_revision_ids))
401
self._cached_trees.update(interesting_trees)
402
self.base_tree = interesting_trees.pop(self.base_rev_id)
403
sorted_lca_keys = self.revision_graph.find_merge_order(
405
self._lca_trees = [interesting_trees[key]
406
for key in sorted_lca_keys]
408
self.base_tree = self.revision_tree(self.base_rev_id)
409
self.base_is_ancestor = True
410
self.base_is_other_ancestor = True
411
mutter('Base revid: %r' % self.base_rev_id)
413
def set_base(self, base_revision):
414
"""Set the base revision to use for the merge.
416
:param base_revision: A 2-list containing a path and revision number.
418
mutter("doing merge() with no base_revision specified")
192
419
if base_revision == [None, None]:
193
if other_revision[1] == -1:
196
o_revno = other_revision[1]
197
base_revno = this_branch.common_ancestor(other_branch,
198
other_revno=o_revno)[0]
199
if base_revno is None:
200
raise UnrelatedBranches()
201
base_revision = ['.', base_revno]
202
base_branch, base_tree = get_tree(base_revision, tempdir, "base")
203
merge_inner(this_branch, other_tree, base_tree, tempdir,
204
ignore_zero=ignore_zero)
206
shutil.rmtree(tempdir)
209
def generate_cset_optimized(tree_a, tree_b, inventory_a, inventory_b):
210
"""Generate a changeset, using the text_id to mark really-changed files.
211
This permits blazing comparisons when text_ids are present. It also
212
disables metadata comparison for files with identical texts.
214
for file_id in tree_a.tree.inventory:
215
if file_id not in tree_b.tree.inventory:
217
entry_a = tree_a.tree.inventory[file_id]
218
entry_b = tree_b.tree.inventory[file_id]
219
if (entry_a.kind, entry_b.kind) != ("file", "file"):
221
if None in (entry_a.text_id, entry_b.text_id):
223
if entry_a.text_id != entry_b.text_id:
225
inventory_a[abspath(tree_a.tree, file_id)].interesting = False
226
inventory_b[abspath(tree_b.tree, file_id)].interesting = False
227
cset = generate_changeset(tree_a, tree_b, inventory_a, inventory_b)
228
for entry in cset.entries.itervalues():
229
entry.metadata_change = None
233
def merge_inner(this_branch, other_tree, base_tree, tempdir,
235
this_tree = get_tree((this_branch.base, None), tempdir, "this")[1]
237
def get_inventory(tree):
238
return tree.inventory
240
inv_changes = merge_flex(this_tree, base_tree, other_tree,
241
generate_cset_optimized, get_inventory,
242
MergeConflictHandler(base_tree.root,
243
ignore_zero=ignore_zero))
246
for id, path in inv_changes.iteritems():
251
assert path.startswith('./')
253
adjust_ids.append((path, id))
254
this_branch.set_inventory(regen_inventory(this_branch, this_tree.root, adjust_ids))
257
def regen_inventory(this_branch, root, new_entries):
258
old_entries = this_branch.read_working_inventory()
261
for file_id in old_entries:
262
entry = old_entries[file_id]
263
path = old_entries.id2path(file_id)
264
new_inventory[file_id] = (path, file_id, entry.parent_id, entry.kind)
265
by_path[path] = file_id
270
for path, file_id in new_entries:
272
del new_inventory[file_id]
275
new_path_list.append((path, file_id))
276
if file_id not in old_entries:
278
# Ensure no file is added before its parent
280
for path, file_id in new_path_list:
284
parent = by_path[os.path.dirname(path)]
285
kind = bzrlib.osutils.file_kind(os.path.join(root, path))
286
new_inventory[file_id] = (path, file_id, parent, kind)
287
by_path[path] = file_id
289
# Get a list in insertion order
290
new_inventory_list = new_inventory.values()
291
mutter ("""Inventory regeneration:
292
old length: %i insertions: %i deletions: %i new_length: %i"""\
293
% (len(old_entries), insertions, deletions, len(new_inventory_list)))
294
assert len(new_inventory_list) == len(old_entries) + insertions - deletions
295
new_inventory_list.sort()
296
return new_inventory_list
422
base_branch, self.base_tree = self._get_tree(base_revision)
423
if base_revision[1] == -1:
424
self.base_rev_id = base_branch.last_revision()
425
elif base_revision[1] is None:
426
self.base_rev_id = _mod_revision.NULL_REVISION
428
self.base_rev_id = _mod_revision.ensure_null(
429
base_branch.get_rev_id(base_revision[1]))
430
self._maybe_fetch(base_branch, self.this_branch, self.base_rev_id)
432
def make_merger(self):
433
kwargs = {'working_tree':self.this_tree, 'this_tree': self.this_tree,
434
'other_tree': self.other_tree,
435
'interesting_ids': self.interesting_ids,
436
'interesting_files': self.interesting_files,
439
if self.merge_type.requires_base:
440
kwargs['base_tree'] = self.base_tree
441
if self.merge_type.supports_reprocess:
442
kwargs['reprocess'] = self.reprocess
444
raise BzrError("Conflict reduction is not supported for merge"
445
" type %s." % self.merge_type)
446
if self.merge_type.supports_show_base:
447
kwargs['show_base'] = self.show_base
449
raise BzrError("Showing base is not supported for this"
450
" merge type. %s" % self.merge_type)
451
if (not getattr(self.merge_type, 'supports_reverse_cherrypick', True)
452
and not self.base_is_other_ancestor):
453
raise errors.CannotReverseCherrypick()
454
if self.merge_type.supports_cherrypick:
455
kwargs['cherrypick'] = (not self.base_is_ancestor or
456
not self.base_is_other_ancestor)
457
if self._is_criss_cross and getattr(self.merge_type,
458
'supports_lca_trees', False):
459
kwargs['lca_trees'] = self._lca_trees
460
return self.merge_type(pb=self._pb,
461
change_reporter=self.change_reporter,
464
def _do_merge_to(self, merge):
466
if self.recurse == 'down':
467
for relpath, file_id in self.this_tree.iter_references():
468
sub_tree = self.this_tree.get_nested_tree(file_id, relpath)
469
other_revision = self.other_tree.get_reference_revision(
471
if other_revision == sub_tree.last_revision():
473
sub_merge = Merger(sub_tree.branch, this_tree=sub_tree)
474
sub_merge.merge_type = self.merge_type
475
other_branch = self.other_branch.reference_parent(file_id, relpath)
476
sub_merge.set_other_revision(other_revision, other_branch)
477
base_revision = self.base_tree.get_reference_revision(file_id)
478
sub_merge.base_tree = \
479
sub_tree.branch.repository.revision_tree(base_revision)
480
sub_merge.base_rev_id = base_revision
484
self.this_tree.lock_tree_write()
486
if self.base_tree is not None:
487
self.base_tree.lock_read()
489
if self.other_tree is not None:
490
self.other_tree.lock_read()
492
merge = self.make_merger()
493
self._do_merge_to(merge)
495
if self.other_tree is not None:
496
self.other_tree.unlock()
498
if self.base_tree is not None:
499
self.base_tree.unlock()
501
self.this_tree.unlock()
502
if len(merge.cooked_conflicts) == 0:
503
if not self.ignore_zero and not is_quiet():
504
note("All changes applied successfully.")
506
note("%d conflicts encountered." % len(merge.cooked_conflicts))
508
return len(merge.cooked_conflicts)
511
class _InventoryNoneEntry(object):
512
"""This represents an inventory entry which *isn't there*.
514
It simplifies the merging logic if we always have an InventoryEntry, even
515
if it isn't actually present
522
symlink_target = None
525
_none_entry = _InventoryNoneEntry()
528
class Merge3Merger(object):
529
"""Three-way merger that uses the merge3 text merger"""
531
supports_reprocess = True
532
supports_show_base = True
533
history_based = False
534
supports_cherrypick = True
535
supports_reverse_cherrypick = True
536
winner_idx = {"this": 2, "other": 1, "conflict": 1}
537
supports_lca_trees = True
539
def __init__(self, working_tree, this_tree, base_tree, other_tree,
540
interesting_ids=None, reprocess=False, show_base=False,
541
pb=DummyProgress(), pp=None, change_reporter=None,
542
interesting_files=None, do_merge=True,
543
cherrypick=False, lca_trees=None):
544
"""Initialize the merger object and perform the merge.
546
:param working_tree: The working tree to apply the merge to
547
:param this_tree: The local tree in the merge operation
548
:param base_tree: The common tree in the merge operation
549
:param other_tree: The other tree to merge changes from
550
:param interesting_ids: The file_ids of files that should be
551
participate in the merge. May not be combined with
553
:param: reprocess If True, perform conflict-reduction processing.
554
:param show_base: If True, show the base revision in text conflicts.
555
(incompatible with reprocess)
556
:param pb: A Progress bar
557
:param pp: A ProgressPhase object
558
:param change_reporter: An object that should report changes made
559
:param interesting_files: The tree-relative paths of files that should
560
participate in the merge. If these paths refer to directories,
561
the contents of those directories will also be included. May not
562
be combined with interesting_ids. If neither interesting_files nor
563
interesting_ids is specified, all files may participate in the
565
:param lca_trees: Can be set to a dictionary of {revision_id:rev_tree}
566
if the ancestry was found to include a criss-cross merge.
567
Otherwise should be None.
569
object.__init__(self)
570
if interesting_files is not None and interesting_ids is not None:
572
'specify either interesting_ids or interesting_files')
573
self.interesting_ids = interesting_ids
574
self.interesting_files = interesting_files
575
self.this_tree = working_tree
576
self.base_tree = base_tree
577
self.other_tree = other_tree
578
self._raw_conflicts = []
579
self.cooked_conflicts = []
580
self.reprocess = reprocess
581
self.show_base = show_base
582
self._lca_trees = lca_trees
583
# Uncommenting this will change the default algorithm to always use
584
# _entries_lca. This can be useful for running the test suite and
585
# making sure we haven't missed any corner cases.
586
# if lca_trees is None:
587
# self._lca_trees = [self.base_tree]
590
self.change_reporter = change_reporter
591
self.cherrypick = cherrypick
593
self.pp = ProgressPhase("Merge phase", 3, self.pb)
598
self.this_tree.lock_tree_write()
599
self.base_tree.lock_read()
600
self.other_tree.lock_read()
601
self.tt = TreeTransform(self.this_tree, self.pb)
604
self._compute_transform()
606
results = self.tt.apply(no_conflicts=True)
607
self.write_modified(results)
609
self.this_tree.add_conflicts(self.cooked_conflicts)
610
except UnsupportedOperation:
614
self.other_tree.unlock()
615
self.base_tree.unlock()
616
self.this_tree.unlock()
619
def make_preview_transform(self):
620
self.base_tree.lock_read()
621
self.other_tree.lock_read()
622
self.tt = TransformPreview(self.this_tree)
625
self._compute_transform()
628
self.other_tree.unlock()
629
self.base_tree.unlock()
633
def _compute_transform(self):
634
if self._lca_trees is None:
635
entries = self._entries3()
636
resolver = self._three_way
638
entries = self._entries_lca()
639
resolver = self._lca_multi_way
640
child_pb = ui.ui_factory.nested_progress_bar()
642
for num, (file_id, changed, parents3, names3,
643
executable3) in enumerate(entries):
644
child_pb.update('Preparing file merge', num, len(entries))
645
self._merge_names(file_id, parents3, names3, resolver=resolver)
647
file_status = self.merge_contents(file_id)
649
file_status = 'unmodified'
650
self._merge_executable(file_id,
651
executable3, file_status, resolver=resolver)
656
child_pb = ui.ui_factory.nested_progress_bar()
658
fs_conflicts = resolve_conflicts(self.tt, child_pb,
659
lambda t, c: conflict_pass(t, c, self.other_tree))
662
if self.change_reporter is not None:
663
from bzrlib import delta
664
delta.report_changes(
665
self.tt.iter_changes(), self.change_reporter)
666
self.cook_conflicts(fs_conflicts)
667
for conflict in self.cooked_conflicts:
671
"""Gather data about files modified between three trees.
673
Return a list of tuples of file_id, changed, parents3, names3,
674
executable3. changed is a boolean indicating whether the file contents
675
or kind were changed. parents3 is a tuple of parent ids for base,
676
other and this. names3 is a tuple of names for base, other and this.
677
executable3 is a tuple of execute-bit values for base, other and this.
680
iterator = self.other_tree.iter_changes(self.base_tree,
681
include_unchanged=True, specific_files=self.interesting_files,
682
extra_trees=[self.this_tree])
683
this_entries = dict((e.file_id, e) for p, e in
684
self.this_tree.iter_entries_by_dir(
685
self.interesting_ids))
686
for (file_id, paths, changed, versioned, parents, names, kind,
687
executable) in iterator:
688
if (self.interesting_ids is not None and
689
file_id not in self.interesting_ids):
691
entry = this_entries.get(file_id)
692
if entry is not None:
693
this_name = entry.name
694
this_parent = entry.parent_id
695
this_executable = entry.executable
699
this_executable = None
700
parents3 = parents + (this_parent,)
701
names3 = names + (this_name,)
702
executable3 = executable + (this_executable,)
703
result.append((file_id, changed, parents3, names3, executable3))
706
def _entries_lca(self):
707
"""Gather data about files modified between multiple trees.
709
This compares OTHER versus all LCA trees, and for interesting entries,
710
it then compares with THIS and BASE.
712
For the multi-valued entries, the format will be (BASE, [lca1, lca2])
713
:return: [(file_id, changed, parents, names, executable)]
714
file_id Simple file_id of the entry
715
changed Boolean, True if the kind or contents changed
717
parents ((base, [parent_id, in, lcas]), parent_id_other,
719
names ((base, [name, in, lcas]), name_in_other, name_in_this)
720
executable ((base, [exec, in, lcas]), exec_in_other, exec_in_this)
722
if self.interesting_files is not None:
723
lookup_trees = [self.this_tree, self.base_tree]
724
lookup_trees.extend(self._lca_trees)
725
# I think we should include the lca trees as well
726
interesting_ids = self.other_tree.paths2ids(self.interesting_files,
729
interesting_ids = self.interesting_ids
731
walker = _mod_tree.MultiWalker(self.other_tree, self._lca_trees)
733
base_inventory = self.base_tree.inventory
734
this_inventory = self.this_tree.inventory
735
for path, file_id, other_ie, lca_values in walker.iter_all():
736
# Is this modified at all from any of the other trees?
738
other_ie = _none_entry
739
if interesting_ids is not None and file_id not in interesting_ids:
742
# If other_revision is found in any of the lcas, that means this
743
# node is uninteresting. This is because when merging, if there are
744
# multiple heads(), we have to create a new node. So if we didn't,
745
# we know that the ancestry is linear, and that OTHER did not
747
# See doc/developers/lca_merge_resolution.txt for details
748
other_revision = other_ie.revision
749
if other_revision is not None:
750
# We can't use this shortcut when other_revision is None,
751
# because it may be None because things are WorkingTrees, and
752
# not because it is *actually* None.
753
is_unmodified = False
754
for lca_path, ie in lca_values:
755
if ie is not None and ie.revision == other_revision:
762
for lca_path, lca_ie in lca_values:
764
lca_entries.append(_none_entry)
766
lca_entries.append(lca_ie)
768
if file_id in base_inventory:
769
base_ie = base_inventory[file_id]
771
base_ie = _none_entry
773
if file_id in this_inventory:
774
this_ie = this_inventory[file_id]
776
this_ie = _none_entry
782
for lca_ie in lca_entries:
783
lca_kinds.append(lca_ie.kind)
784
lca_parent_ids.append(lca_ie.parent_id)
785
lca_names.append(lca_ie.name)
786
lca_executable.append(lca_ie.executable)
788
kind_winner = self._lca_multi_way(
789
(base_ie.kind, lca_kinds),
790
other_ie.kind, this_ie.kind)
791
parent_id_winner = self._lca_multi_way(
792
(base_ie.parent_id, lca_parent_ids),
793
other_ie.parent_id, this_ie.parent_id)
794
name_winner = self._lca_multi_way(
795
(base_ie.name, lca_names),
796
other_ie.name, this_ie.name)
798
content_changed = True
799
if kind_winner == 'this':
800
# No kind change in OTHER, see if there are *any* changes
801
if other_ie.kind == 'directory':
802
if parent_id_winner == 'this' and name_winner == 'this':
803
# No change for this directory in OTHER, skip
805
content_changed = False
806
elif other_ie.kind is None or other_ie.kind == 'file':
807
def get_sha1(ie, tree):
808
if ie.kind != 'file':
810
return tree.get_file_sha1(file_id)
811
base_sha1 = get_sha1(base_ie, self.base_tree)
812
lca_sha1s = [get_sha1(ie, tree) for ie, tree
813
in zip(lca_entries, self._lca_trees)]
814
this_sha1 = get_sha1(this_ie, self.this_tree)
815
other_sha1 = get_sha1(other_ie, self.other_tree)
816
sha1_winner = self._lca_multi_way(
817
(base_sha1, lca_sha1s), other_sha1, this_sha1,
818
allow_overriding_lca=False)
819
exec_winner = self._lca_multi_way(
820
(base_ie.executable, lca_executable),
821
other_ie.executable, this_ie.executable)
822
if (parent_id_winner == 'this' and name_winner == 'this'
823
and sha1_winner == 'this' and exec_winner == 'this'):
824
# No kind, parent, name, exec, or content change for
825
# OTHER, so this node is not considered interesting
827
if sha1_winner == 'this':
828
content_changed = False
829
elif other_ie.kind == 'symlink':
830
def get_target(ie, tree):
831
if ie.kind != 'symlink':
833
return tree.get_symlink_target(file_id)
834
base_target = get_target(base_ie, self.base_tree)
835
lca_targets = [get_target(ie, tree) for ie, tree
836
in zip(lca_entries, self._lca_trees)]
837
this_target = get_target(this_ie, self.this_tree)
838
other_target = get_target(other_ie, self.other_tree)
839
target_winner = self._lca_multi_way(
840
(base_target, lca_targets),
841
other_target, this_target)
842
if (parent_id_winner == 'this' and name_winner == 'this'
843
and target_winner == 'this'):
844
# No kind, parent, name, or symlink target change
847
if target_winner == 'this':
848
content_changed = False
849
elif other_ie.kind == 'tree-reference':
850
# The 'changed' information seems to be handled at a higher
851
# level. At least, _entries3 returns False for content
852
# changed, even when at a new revision_id.
853
content_changed = False
854
if (parent_id_winner == 'this' and name_winner == 'this'):
855
# Nothing interesting
858
raise AssertionError('unhandled kind: %s' % other_ie.kind)
859
# XXX: We need to handle kind == 'symlink'
861
# If we have gotten this far, that means something has changed
862
result.append((file_id, content_changed,
863
((base_ie.parent_id, lca_parent_ids),
864
other_ie.parent_id, this_ie.parent_id),
865
((base_ie.name, lca_names),
866
other_ie.name, this_ie.name),
867
((base_ie.executable, lca_executable),
868
other_ie.executable, this_ie.executable)
875
self.tt.final_kind(self.tt.root)
877
self.tt.cancel_deletion(self.tt.root)
878
if self.tt.final_file_id(self.tt.root) is None:
879
self.tt.version_file(self.tt.tree_file_id(self.tt.root),
881
other_root_file_id = self.other_tree.get_root_id()
882
if other_root_file_id is None:
884
other_root = self.tt.trans_id_file_id(other_root_file_id)
885
if other_root == self.tt.root:
888
self.tt.final_kind(other_root)
891
if self.other_tree.inventory.root.file_id in self.this_tree.inventory:
892
# the other tree's root is a non-root in the current tree
894
self.reparent_children(self.other_tree.inventory.root, self.tt.root)
895
self.tt.cancel_creation(other_root)
896
self.tt.cancel_versioning(other_root)
898
def reparent_children(self, ie, target):
899
for thing, child in ie.children.iteritems():
900
trans_id = self.tt.trans_id_file_id(child.file_id)
901
self.tt.adjust_path(self.tt.final_name(trans_id), target, trans_id)
903
def write_modified(self, results):
905
for path in results.modified_paths:
906
file_id = self.this_tree.path2id(self.this_tree.relpath(path))
909
hash = self.this_tree.get_file_sha1(file_id)
912
modified_hashes[file_id] = hash
913
self.this_tree.set_merge_modified(modified_hashes)
916
def parent(entry, file_id):
917
"""Determine the parent for a file_id (used as a key method)"""
920
return entry.parent_id
923
def name(entry, file_id):
924
"""Determine the name for a file_id (used as a key method)"""
930
def contents_sha1(tree, file_id):
931
"""Determine the sha1 of the file contents (used as a key method)."""
932
if file_id not in tree:
934
return tree.get_file_sha1(file_id)
937
def executable(tree, file_id):
938
"""Determine the executability of a file-id (used as a key method)."""
939
if file_id not in tree:
941
if tree.kind(file_id) != "file":
943
return tree.is_executable(file_id)
946
def kind(tree, file_id):
947
"""Determine the kind of a file-id (used as a key method)."""
948
if file_id not in tree:
950
return tree.kind(file_id)
953
def _three_way(base, other, this):
954
#if base == other, either they all agree, or only THIS has changed.
957
elif this not in (base, other):
959
# "Ambiguous clean merge" -- both sides have made the same change.
962
# this == base: only other has changed.
967
def _lca_multi_way(bases, other, this, allow_overriding_lca=True):
968
"""Consider LCAs when determining whether a change has occurred.
970
If LCAS are all identical, this is the same as a _three_way comparison.
972
:param bases: value in (BASE, [LCAS])
973
:param other: value in OTHER
974
:param this: value in THIS
975
:param allow_overriding_lca: If there is more than one unique lca
976
value, allow OTHER to override THIS if it has a new value, and
977
THIS only has an lca value, or vice versa. This is appropriate for
978
truly scalar values, not as much for non-scalars.
979
:return: 'this', 'other', or 'conflict' depending on whether an entry
982
# See doc/developers/lca_tree_merging.txt for details about this
985
# Either Ambiguously clean, or nothing was actually changed. We
988
base_val, lca_vals = bases
989
# Remove 'base_val' from the lca_vals, because it is not interesting
990
filtered_lca_vals = [lca_val for lca_val in lca_vals
991
if lca_val != base_val]
992
if len(filtered_lca_vals) == 0:
993
return Merge3Merger._three_way(base_val, other, this)
995
unique_lca_vals = set(filtered_lca_vals)
996
if len(unique_lca_vals) == 1:
997
return Merge3Merger._three_way(unique_lca_vals.pop(), other, this)
999
if allow_overriding_lca:
1000
if other in unique_lca_vals:
1001
if this in unique_lca_vals:
1002
# Each side picked a different lca, conflict
1005
# This has a value which supersedes both lca values, and
1006
# other only has an lca value
1008
elif this in unique_lca_vals:
1009
# OTHER has a value which supersedes both lca values, and this
1010
# only has an lca value
1013
# At this point, the lcas disagree, and the tips disagree
1017
def scalar_three_way(this_tree, base_tree, other_tree, file_id, key):
1018
"""Do a three-way test on a scalar.
1019
Return "this", "other" or "conflict", depending whether a value wins.
1021
key_base = key(base_tree, file_id)
1022
key_other = key(other_tree, file_id)
1023
#if base == other, either they all agree, or only THIS has changed.
1024
if key_base == key_other:
1026
key_this = key(this_tree, file_id)
1027
# "Ambiguous clean merge"
1028
if key_this == key_other:
1030
elif key_this == key_base:
1035
def merge_names(self, file_id):
1036
def get_entry(tree):
1037
if file_id in tree.inventory:
1038
return tree.inventory[file_id]
1041
this_entry = get_entry(self.this_tree)
1042
other_entry = get_entry(self.other_tree)
1043
base_entry = get_entry(self.base_tree)
1044
entries = (base_entry, other_entry, this_entry)
1047
for entry in entries:
1050
parents.append(None)
1052
names.append(entry.name)
1053
parents.append(entry.parent_id)
1054
return self._merge_names(file_id, parents, names,
1055
resolver=self._three_way)
1057
def _merge_names(self, file_id, parents, names, resolver):
1058
"""Perform a merge on file_id names and parents"""
1059
base_name, other_name, this_name = names
1060
base_parent, other_parent, this_parent = parents
1062
name_winner = resolver(*names)
1064
parent_id_winner = resolver(*parents)
1065
if this_name is None:
1066
if name_winner == "this":
1067
name_winner = "other"
1068
if parent_id_winner == "this":
1069
parent_id_winner = "other"
1070
if name_winner == "this" and parent_id_winner == "this":
1072
if name_winner == "conflict":
1073
trans_id = self.tt.trans_id_file_id(file_id)
1074
self._raw_conflicts.append(('name conflict', trans_id,
1075
this_name, other_name))
1076
if parent_id_winner == "conflict":
1077
trans_id = self.tt.trans_id_file_id(file_id)
1078
self._raw_conflicts.append(('parent conflict', trans_id,
1079
this_parent, other_parent))
1080
if other_name is None:
1081
# it doesn't matter whether the result was 'other' or
1082
# 'conflict'-- if there's no 'other', we leave it alone.
1084
# if we get here, name_winner and parent_winner are set to safe values.
1085
trans_id = self.tt.trans_id_file_id(file_id)
1086
parent_id = parents[self.winner_idx[parent_id_winner]]
1087
if parent_id is not None:
1088
parent_trans_id = self.tt.trans_id_file_id(parent_id)
1089
self.tt.adjust_path(names[self.winner_idx[name_winner]],
1090
parent_trans_id, trans_id)
1092
def merge_contents(self, file_id):
1093
"""Performs a merge on file_id contents."""
1094
def contents_pair(tree):
1095
if file_id not in tree:
1097
kind = tree.kind(file_id)
1099
contents = tree.get_file_sha1(file_id)
1100
elif kind == "symlink":
1101
contents = tree.get_symlink_target(file_id)
1104
return kind, contents
1106
def contents_conflict():
1107
trans_id = self.tt.trans_id_file_id(file_id)
1108
name = self.tt.final_name(trans_id)
1109
parent_id = self.tt.final_parent(trans_id)
1110
if file_id in self.this_tree.inventory:
1111
self.tt.unversion_file(trans_id)
1112
if file_id in self.this_tree:
1113
self.tt.delete_contents(trans_id)
1114
file_group = self._dump_conflicts(name, parent_id, file_id,
1116
self._raw_conflicts.append(('contents conflict', file_group))
1118
# See SPOT run. run, SPOT, run.
1119
# So we're not QUITE repeating ourselves; we do tricky things with
1121
base_pair = contents_pair(self.base_tree)
1122
other_pair = contents_pair(self.other_tree)
1124
this_pair = contents_pair(self.this_tree)
1125
lca_pairs = [contents_pair(tree) for tree in self._lca_trees]
1126
winner = self._lca_multi_way((base_pair, lca_pairs), other_pair,
1127
this_pair, allow_overriding_lca=False)
1129
if base_pair == other_pair:
1132
# We delayed evaluating this_pair as long as we can to avoid
1133
# unnecessary sha1 calculation
1134
this_pair = contents_pair(self.this_tree)
1135
winner = self._three_way(base_pair, other_pair, this_pair)
1136
if winner == 'this':
1137
# No interesting changes introduced by OTHER
1139
trans_id = self.tt.trans_id_file_id(file_id)
1140
if winner == 'other':
1141
# OTHER is a straight winner, so replace this contents with other
1142
file_in_this = file_id in self.this_tree
1144
# Remove any existing contents
1145
self.tt.delete_contents(trans_id)
1146
if file_id in self.other_tree:
1147
# OTHER changed the file
1148
create_from_tree(self.tt, trans_id,
1149
self.other_tree, file_id)
1150
if not file_in_this:
1151
self.tt.version_file(file_id, trans_id)
1154
# OTHER deleted the file
1155
self.tt.unversion_file(trans_id)
1158
# We have a hypothetical conflict, but if we have files, then we
1159
# can try to merge the content
1160
if this_pair[0] == 'file' and other_pair[0] == 'file':
1161
# THIS and OTHER are both files, so text merge. Either
1162
# BASE is a file, or both converted to files, so at least we
1163
# have agreement that output should be a file.
1165
self.text_merge(file_id, trans_id)
1167
return contents_conflict()
1168
if file_id not in self.this_tree:
1169
self.tt.version_file(file_id, trans_id)
1171
self.tt.tree_kind(trans_id)
1172
self.tt.delete_contents(trans_id)
1177
return contents_conflict()
1179
def get_lines(self, tree, file_id):
1180
"""Return the lines in a file, or an empty list."""
1182
return tree.get_file(file_id).readlines()
1186
def text_merge(self, file_id, trans_id):
1187
"""Perform a three-way text merge on a file_id"""
1188
# it's possible that we got here with base as a different type.
1189
# if so, we just want two-way text conflicts.
1190
if file_id in self.base_tree and \
1191
self.base_tree.kind(file_id) == "file":
1192
base_lines = self.get_lines(self.base_tree, file_id)
1195
other_lines = self.get_lines(self.other_tree, file_id)
1196
this_lines = self.get_lines(self.this_tree, file_id)
1197
m3 = Merge3(base_lines, this_lines, other_lines,
1198
is_cherrypick=self.cherrypick)
1199
start_marker = "!START OF MERGE CONFLICT!" + "I HOPE THIS IS UNIQUE"
1200
if self.show_base is True:
1201
base_marker = '|' * 7
1205
def iter_merge3(retval):
1206
retval["text_conflicts"] = False
1207
for line in m3.merge_lines(name_a = "TREE",
1208
name_b = "MERGE-SOURCE",
1209
name_base = "BASE-REVISION",
1210
start_marker=start_marker,
1211
base_marker=base_marker,
1212
reprocess=self.reprocess):
1213
if line.startswith(start_marker):
1214
retval["text_conflicts"] = True
1215
yield line.replace(start_marker, '<' * 7)
1219
merge3_iterator = iter_merge3(retval)
1220
self.tt.create_file(merge3_iterator, trans_id)
1221
if retval["text_conflicts"] is True:
1222
self._raw_conflicts.append(('text conflict', trans_id))
1223
name = self.tt.final_name(trans_id)
1224
parent_id = self.tt.final_parent(trans_id)
1225
file_group = self._dump_conflicts(name, parent_id, file_id,
1226
this_lines, base_lines,
1228
file_group.append(trans_id)
1230
def _dump_conflicts(self, name, parent_id, file_id, this_lines=None,
1231
base_lines=None, other_lines=None, set_version=False,
1233
"""Emit conflict files.
1234
If this_lines, base_lines, or other_lines are omitted, they will be
1235
determined automatically. If set_version is true, the .OTHER, .THIS
1236
or .BASE (in that order) will be created as versioned files.
1238
data = [('OTHER', self.other_tree, other_lines),
1239
('THIS', self.this_tree, this_lines)]
1241
data.append(('BASE', self.base_tree, base_lines))
1244
for suffix, tree, lines in data:
1246
trans_id = self._conflict_file(name, parent_id, tree, file_id,
1248
file_group.append(trans_id)
1249
if set_version and not versioned:
1250
self.tt.version_file(file_id, trans_id)
1254
def _conflict_file(self, name, parent_id, tree, file_id, suffix,
1256
"""Emit a single conflict file."""
1257
name = name + '.' + suffix
1258
trans_id = self.tt.create_path(name, parent_id)
1259
create_from_tree(self.tt, trans_id, tree, file_id, lines)
1262
def merge_executable(self, file_id, file_status):
1263
"""Perform a merge on the execute bit."""
1264
executable = [self.executable(t, file_id) for t in (self.base_tree,
1265
self.other_tree, self.this_tree)]
1266
self._merge_executable(file_id, executable, file_status,
1267
resolver=self._three_way)
1269
def _merge_executable(self, file_id, executable, file_status,
1271
"""Perform a merge on the execute bit."""
1272
base_executable, other_executable, this_executable = executable
1273
if file_status == "deleted":
1275
winner = resolver(*executable)
1276
if winner == "conflict":
1277
# There must be a None in here, if we have a conflict, but we
1278
# need executability since file status was not deleted.
1279
if self.executable(self.other_tree, file_id) is None:
1283
if winner == 'this' and file_status != "modified":
1285
trans_id = self.tt.trans_id_file_id(file_id)
1287
if self.tt.final_kind(trans_id) != "file":
1291
if winner == "this":
1292
executability = this_executable
1294
if file_id in self.other_tree:
1295
executability = other_executable
1296
elif file_id in self.this_tree:
1297
executability = this_executable
1298
elif file_id in self.base_tree:
1299
executability = base_executable
1300
if executability is not None:
1301
trans_id = self.tt.trans_id_file_id(file_id)
1302
self.tt.set_executability(executability, trans_id)
1304
def cook_conflicts(self, fs_conflicts):
1305
"""Convert all conflicts into a form that doesn't depend on trans_id"""
1306
from conflicts import Conflict
1308
self.cooked_conflicts.extend(cook_conflicts(fs_conflicts, self.tt))
1309
fp = FinalPaths(self.tt)
1310
for conflict in self._raw_conflicts:
1311
conflict_type = conflict[0]
1312
if conflict_type in ('name conflict', 'parent conflict'):
1313
trans_id = conflict[1]
1314
conflict_args = conflict[2:]
1315
if trans_id not in name_conflicts:
1316
name_conflicts[trans_id] = {}
1317
unique_add(name_conflicts[trans_id], conflict_type,
1319
if conflict_type == 'contents conflict':
1320
for trans_id in conflict[1]:
1321
file_id = self.tt.final_file_id(trans_id)
1322
if file_id is not None:
1324
path = fp.get_path(trans_id)
1325
for suffix in ('.BASE', '.THIS', '.OTHER'):
1326
if path.endswith(suffix):
1327
path = path[:-len(suffix)]
1329
c = Conflict.factory(conflict_type, path=path, file_id=file_id)
1330
self.cooked_conflicts.append(c)
1331
if conflict_type == 'text conflict':
1332
trans_id = conflict[1]
1333
path = fp.get_path(trans_id)
1334
file_id = self.tt.final_file_id(trans_id)
1335
c = Conflict.factory(conflict_type, path=path, file_id=file_id)
1336
self.cooked_conflicts.append(c)
1338
for trans_id, conflicts in name_conflicts.iteritems():
1340
this_parent, other_parent = conflicts['parent conflict']
1341
if this_parent == other_parent:
1342
raise AssertionError()
1344
this_parent = other_parent = \
1345
self.tt.final_file_id(self.tt.final_parent(trans_id))
1347
this_name, other_name = conflicts['name conflict']
1348
if this_name == other_name:
1349
raise AssertionError()
1351
this_name = other_name = self.tt.final_name(trans_id)
1352
other_path = fp.get_path(trans_id)
1353
if this_parent is not None and this_name is not None:
1354
this_parent_path = \
1355
fp.get_path(self.tt.trans_id_file_id(this_parent))
1356
this_path = pathjoin(this_parent_path, this_name)
1358
this_path = "<deleted>"
1359
file_id = self.tt.final_file_id(trans_id)
1360
c = Conflict.factory('path conflict', path=this_path,
1361
conflict_path=other_path, file_id=file_id)
1362
self.cooked_conflicts.append(c)
1363
self.cooked_conflicts.sort(key=Conflict.sort_key)
1366
class WeaveMerger(Merge3Merger):
1367
"""Three-way tree merger, text weave merger."""
1368
supports_reprocess = True
1369
supports_show_base = False
1370
supports_reverse_cherrypick = False
1371
history_based = True
1373
def _merged_lines(self, file_id):
1374
"""Generate the merged lines.
1375
There is no distinction between lines that are meant to contain <<<<<<<
1379
base = self.base_tree
1382
plan = self.this_tree.plan_file_merge(file_id, self.other_tree,
1384
if 'merge' in debug.debug_flags:
1386
trans_id = self.tt.trans_id_file_id(file_id)
1387
name = self.tt.final_name(trans_id) + '.plan'
1388
contents = ('%10s|%s' % l for l in plan)
1389
self.tt.new_file(name, self.tt.final_parent(trans_id), contents)
1390
textmerge = PlanWeaveMerge(plan, '<<<<<<< TREE\n',
1391
'>>>>>>> MERGE-SOURCE\n')
1392
return textmerge.merge_lines(self.reprocess)
1394
def text_merge(self, file_id, trans_id):
1395
"""Perform a (weave) text merge for a given file and file-id.
1396
If conflicts are encountered, .THIS and .OTHER files will be emitted,
1397
and a conflict will be noted.
1399
lines, conflicts = self._merged_lines(file_id)
1401
# Note we're checking whether the OUTPUT is binary in this case,
1402
# because we don't want to get into weave merge guts.
1403
check_text_lines(lines)
1404
self.tt.create_file(lines, trans_id)
1406
self._raw_conflicts.append(('text conflict', trans_id))
1407
name = self.tt.final_name(trans_id)
1408
parent_id = self.tt.final_parent(trans_id)
1409
file_group = self._dump_conflicts(name, parent_id, file_id,
1411
file_group.append(trans_id)
1414
class LCAMerger(WeaveMerger):
1416
def _merged_lines(self, file_id):
1417
"""Generate the merged lines.
1418
There is no distinction between lines that are meant to contain <<<<<<<
1422
base = self.base_tree
1425
plan = self.this_tree.plan_file_lca_merge(file_id, self.other_tree,
1427
if 'merge' in debug.debug_flags:
1429
trans_id = self.tt.trans_id_file_id(file_id)
1430
name = self.tt.final_name(trans_id) + '.plan'
1431
contents = ('%10s|%s' % l for l in plan)
1432
self.tt.new_file(name, self.tt.final_parent(trans_id), contents)
1433
textmerge = PlanWeaveMerge(plan, '<<<<<<< TREE\n',
1434
'>>>>>>> MERGE-SOURCE\n')
1435
return textmerge.merge_lines(self.reprocess)
1438
class Diff3Merger(Merge3Merger):
1439
"""Three-way merger using external diff3 for text merging"""
1441
def dump_file(self, temp_dir, name, tree, file_id):
1442
out_path = pathjoin(temp_dir, name)
1443
out_file = open(out_path, "wb")
1445
in_file = tree.get_file(file_id)
1446
for line in in_file:
1447
out_file.write(line)
1452
def text_merge(self, file_id, trans_id):
1453
"""Perform a diff3 merge using a specified file-id and trans-id.
1454
If conflicts are encountered, .BASE, .THIS. and .OTHER conflict files
1455
will be dumped, and a will be conflict noted.
1458
temp_dir = osutils.mkdtemp(prefix="bzr-")
1460
new_file = pathjoin(temp_dir, "new")
1461
this = self.dump_file(temp_dir, "this", self.this_tree, file_id)
1462
base = self.dump_file(temp_dir, "base", self.base_tree, file_id)
1463
other = self.dump_file(temp_dir, "other", self.other_tree, file_id)
1464
status = bzrlib.patch.diff3(new_file, this, base, other)
1465
if status not in (0, 1):
1466
raise BzrError("Unhandled diff3 exit code")
1467
f = open(new_file, 'rb')
1469
self.tt.create_file(f, trans_id)
1473
name = self.tt.final_name(trans_id)
1474
parent_id = self.tt.final_parent(trans_id)
1475
self._dump_conflicts(name, parent_id, file_id)
1476
self._raw_conflicts.append(('text conflict', trans_id))
1478
osutils.rmtree(temp_dir)
1481
def merge_inner(this_branch, other_tree, base_tree, ignore_zero=False,
1483
merge_type=Merge3Merger,
1484
interesting_ids=None,
1488
interesting_files=None,
1491
change_reporter=None):
1492
"""Primary interface for merging.
1494
typical use is probably
1495
'merge_inner(branch, branch.get_revision_tree(other_revision),
1496
branch.get_revision_tree(base_revision))'
1498
if this_tree is None:
1499
raise BzrError("bzrlib.merge.merge_inner requires a this_tree "
1500
"parameter as of bzrlib version 0.8.")
1501
merger = Merger(this_branch, other_tree, base_tree, this_tree=this_tree,
1502
pb=pb, change_reporter=change_reporter)
1503
merger.backup_files = backup_files
1504
merger.merge_type = merge_type
1505
merger.interesting_ids = interesting_ids
1506
merger.ignore_zero = ignore_zero
1507
if interesting_files:
1509
raise ValueError('Only supply interesting_ids'
1510
' or interesting_files')
1511
merger.interesting_files = interesting_files
1512
merger.show_base = show_base
1513
merger.reprocess = reprocess
1514
merger.other_rev_id = other_rev_id
1515
merger.other_basis = other_rev_id
1516
get_revision_id = getattr(base_tree, 'get_revision_id', None)
1517
if get_revision_id is None:
1518
get_revision_id = base_tree.last_revision
1519
merger.set_base_revision(get_revision_id(), this_branch)
1520
return merger.do_merge()
1522
def get_merge_type_registry():
1523
"""Merge type registry is in bzrlib.option to avoid circular imports.
1525
This method provides a sanctioned way to retrieve it.
1527
from bzrlib import option
1528
return option._merge_type_registry
1531
def _plan_annotate_merge(annotated_a, annotated_b, ancestors_a, ancestors_b):
1532
def status_a(revision, text):
1533
if revision in ancestors_b:
1534
return 'killed-b', text
1536
return 'new-a', text
1538
def status_b(revision, text):
1539
if revision in ancestors_a:
1540
return 'killed-a', text
1542
return 'new-b', text
1544
plain_a = [t for (a, t) in annotated_a]
1545
plain_b = [t for (a, t) in annotated_b]
1546
matcher = patiencediff.PatienceSequenceMatcher(None, plain_a, plain_b)
1547
blocks = matcher.get_matching_blocks()
1550
for ai, bi, l in blocks:
1551
# process all mismatched sections
1552
# (last mismatched section is handled because blocks always
1553
# includes a 0-length last block)
1554
for revision, text in annotated_a[a_cur:ai]:
1555
yield status_a(revision, text)
1556
for revision, text in annotated_b[b_cur:bi]:
1557
yield status_b(revision, text)
1558
# and now the matched section
1561
for text_a in plain_a[ai:a_cur]:
1562
yield "unchanged", text_a
1565
class _PlanMergeBase(object):
1567
def __init__(self, a_rev, b_rev, vf, key_prefix):
1570
:param a_rev: Revision-id of one revision to merge
1571
:param b_rev: Revision-id of the other revision to merge
1572
:param vf: A VersionedFiles containing both revisions
1573
:param key_prefix: A prefix for accessing keys in vf, typically
1579
self._last_lines = None
1580
self._last_lines_revision_id = None
1581
self._cached_matching_blocks = {}
1582
self._key_prefix = key_prefix
1583
self._precache_tip_lines()
1585
def _precache_tip_lines(self):
1586
lines = self.get_lines([self.a_rev, self.b_rev])
1587
self.lines_a = lines[self.a_rev]
1588
self.lines_b = lines[self.b_rev]
1590
def get_lines(self, revisions):
1591
"""Get lines for revisions from the backing VersionedFiles.
1593
:raises RevisionNotPresent: on absent texts.
1595
keys = [(self._key_prefix + (rev,)) for rev in revisions]
1597
for record in self.vf.get_record_stream(keys, 'unordered', True):
1598
if record.storage_kind == 'absent':
1599
raise errors.RevisionNotPresent(record.key, self.vf)
1600
result[record.key[-1]] = osutils.chunks_to_lines(
1601
record.get_bytes_as('chunked'))
1604
def plan_merge(self):
1605
"""Generate a 'plan' for merging the two revisions.
1607
This involves comparing their texts and determining the cause of
1608
differences. If text A has a line and text B does not, then either the
1609
line was added to text A, or it was deleted from B. Once the causes
1610
are combined, they are written out in the format described in
1611
VersionedFile.plan_merge
1613
blocks = self._get_matching_blocks(self.a_rev, self.b_rev)
1614
unique_a, unique_b = self._unique_lines(blocks)
1615
new_a, killed_b = self._determine_status(self.a_rev, unique_a)
1616
new_b, killed_a = self._determine_status(self.b_rev, unique_b)
1617
return self._iter_plan(blocks, new_a, killed_b, new_b, killed_a)
1619
def _iter_plan(self, blocks, new_a, killed_b, new_b, killed_a):
1622
for i, j, n in blocks:
1623
for a_index in range(last_i, i):
1624
if a_index in new_a:
1625
if a_index in killed_b:
1626
yield 'conflicted-a', self.lines_a[a_index]
1628
yield 'new-a', self.lines_a[a_index]
1630
yield 'killed-b', self.lines_a[a_index]
1631
for b_index in range(last_j, j):
1632
if b_index in new_b:
1633
if b_index in killed_a:
1634
yield 'conflicted-b', self.lines_b[b_index]
1636
yield 'new-b', self.lines_b[b_index]
1638
yield 'killed-a', self.lines_b[b_index]
1639
# handle common lines
1640
for a_index in range(i, i+n):
1641
yield 'unchanged', self.lines_a[a_index]
1645
def _get_matching_blocks(self, left_revision, right_revision):
1646
"""Return a description of which sections of two revisions match.
1648
See SequenceMatcher.get_matching_blocks
1650
cached = self._cached_matching_blocks.get((left_revision,
1652
if cached is not None:
1654
if self._last_lines_revision_id == left_revision:
1655
left_lines = self._last_lines
1656
right_lines = self.get_lines([right_revision])[right_revision]
1658
lines = self.get_lines([left_revision, right_revision])
1659
left_lines = lines[left_revision]
1660
right_lines = lines[right_revision]
1661
self._last_lines = right_lines
1662
self._last_lines_revision_id = right_revision
1663
matcher = patiencediff.PatienceSequenceMatcher(None, left_lines,
1665
return matcher.get_matching_blocks()
1667
def _unique_lines(self, matching_blocks):
1668
"""Analyse matching_blocks to determine which lines are unique
1670
:return: a tuple of (unique_left, unique_right), where the values are
1671
sets of line numbers of unique lines.
1677
for i, j, n in matching_blocks:
1678
unique_left.extend(range(last_i, i))
1679
unique_right.extend(range(last_j, j))
1682
return unique_left, unique_right
1685
def _subtract_plans(old_plan, new_plan):
1686
"""Remove changes from new_plan that came from old_plan.
1688
It is assumed that the difference between the old_plan and new_plan
1689
is their choice of 'b' text.
1691
All lines from new_plan that differ from old_plan are emitted
1692
verbatim. All lines from new_plan that match old_plan but are
1693
not about the 'b' revision are emitted verbatim.
1695
Lines that match and are about the 'b' revision are the lines we
1696
don't want, so we convert 'killed-b' -> 'unchanged', and 'new-b'
1697
is skipped entirely.
1699
matcher = patiencediff.PatienceSequenceMatcher(None, old_plan,
1702
for i, j, n in matcher.get_matching_blocks():
1703
for jj in range(last_j, j):
1705
for jj in range(j, j+n):
1706
plan_line = new_plan[jj]
1707
if plan_line[0] == 'new-b':
1709
elif plan_line[0] == 'killed-b':
1710
yield 'unchanged', plan_line[1]
1716
class _PlanMerge(_PlanMergeBase):
1717
"""Plan an annotate merge using on-the-fly annotation"""
1719
def __init__(self, a_rev, b_rev, vf, key_prefix):
1720
super(_PlanMerge, self).__init__(a_rev, b_rev, vf, key_prefix)
1721
self.a_key = self._key_prefix + (self.a_rev,)
1722
self.b_key = self._key_prefix + (self.b_rev,)
1723
self.graph = Graph(self.vf)
1724
heads = self.graph.heads((self.a_key, self.b_key))
1726
# one side dominates, so we can just return its values, yay for
1728
# Ideally we would know that before we get this far
1729
self._head_key = heads.pop()
1730
if self._head_key == self.a_key:
1734
mutter('found dominating revision for %s\n%s > %s', self.vf,
1735
self._head_key[-1], other)
1738
self._head_key = None
1741
def _precache_tip_lines(self):
1742
# Turn this into a no-op, because we will do this later
1745
def _find_recursive_lcas(self):
1746
"""Find all the ancestors back to a unique lca"""
1747
cur_ancestors = (self.a_key, self.b_key)
1748
# graph.find_lca(uncommon, keys) now returns plain NULL_REVISION,
1749
# rather than a key tuple. We will just map that directly to no common
1753
next_lcas = self.graph.find_lca(*cur_ancestors)
1754
# Map a plain NULL_REVISION to a simple no-ancestors
1755
if next_lcas == set([NULL_REVISION]):
1757
# Order the lca's based on when they were merged into the tip
1758
# While the actual merge portion of weave merge uses a set() of
1759
# active revisions, the order of insertion *does* effect the
1760
# implicit ordering of the texts.
1761
for rev_key in cur_ancestors:
1762
ordered_parents = tuple(self.graph.find_merge_order(rev_key,
1764
parent_map[rev_key] = ordered_parents
1765
if len(next_lcas) == 0:
1767
elif len(next_lcas) == 1:
1768
parent_map[list(next_lcas)[0]] = ()
1770
elif len(next_lcas) > 2:
1771
# More than 2 lca's, fall back to grabbing all nodes between
1772
# this and the unique lca.
1773
mutter('More than 2 LCAs, falling back to all nodes for:'
1774
' %s, %s\n=> %s', self.a_key, self.b_key, cur_ancestors)
1775
cur_lcas = next_lcas
1776
while len(cur_lcas) > 1:
1777
cur_lcas = self.graph.find_lca(*cur_lcas)
1778
if len(cur_lcas) == 0:
1779
# No common base to find, use the full ancestry
1782
unique_lca = list(cur_lcas)[0]
1783
if unique_lca == NULL_REVISION:
1784
# find_lca will return a plain 'NULL_REVISION' rather
1785
# than a key tuple when there is no common ancestor, we
1786
# prefer to just use None, because it doesn't confuse
1787
# _get_interesting_texts()
1789
parent_map.update(self._find_unique_parents(next_lcas,
1792
cur_ancestors = next_lcas
1795
def _find_unique_parents(self, tip_keys, base_key):
1796
"""Find ancestors of tip that aren't ancestors of base.
1798
:param tip_keys: Nodes that are interesting
1799
:param base_key: Cull all ancestors of this node
1800
:return: The parent map for all revisions between tip_keys and
1801
base_key. base_key will be included. References to nodes outside of
1802
the ancestor set will also be removed.
1804
# TODO: this would be simpler if find_unique_ancestors took a list
1805
# instead of a single tip, internally it supports it, but it
1806
# isn't a "backwards compatible" api change.
1807
if base_key is None:
1808
parent_map = dict(self.graph.iter_ancestry(tip_keys))
1809
# We remove NULL_REVISION because it isn't a proper tuple key, and
1810
# thus confuses things like _get_interesting_texts, and our logic
1811
# to add the texts into the memory weave.
1812
if NULL_REVISION in parent_map:
1813
parent_map.pop(NULL_REVISION)
1816
for tip in tip_keys:
1818
self.graph.find_unique_ancestors(tip, [base_key]))
1819
parent_map = self.graph.get_parent_map(interesting)
1820
parent_map[base_key] = ()
1821
culled_parent_map, child_map, tails = self._remove_external_references(
1823
# Remove all the tails but base_key
1824
if base_key is not None:
1825
tails.remove(base_key)
1826
self._prune_tails(culled_parent_map, child_map, tails)
1827
# Now remove all the uninteresting 'linear' regions
1828
simple_map = _mod_graph.collapse_linear_regions(culled_parent_map)
1832
def _remove_external_references(parent_map):
1833
"""Remove references that go outside of the parent map.
1835
:param parent_map: Something returned from Graph.get_parent_map(keys)
1836
:return: (filtered_parent_map, child_map, tails)
1837
filtered_parent_map is parent_map without external references
1838
child_map is the {parent_key: [child_keys]} mapping
1839
tails is a list of nodes that do not have any parents in the map
1841
# TODO: The basic effect of this function seems more generic than
1842
# _PlanMerge. But the specific details of building a child_map,
1843
# and computing tails seems very specific to _PlanMerge.
1844
# Still, should this be in Graph land?
1845
filtered_parent_map = {}
1848
for key, parent_keys in parent_map.iteritems():
1849
culled_parent_keys = [p for p in parent_keys if p in parent_map]
1850
if not culled_parent_keys:
1852
for parent_key in culled_parent_keys:
1853
child_map.setdefault(parent_key, []).append(key)
1854
# TODO: Do we want to do this, it adds overhead for every node,
1855
# just to say that the node has no children
1856
child_map.setdefault(key, [])
1857
filtered_parent_map[key] = culled_parent_keys
1858
return filtered_parent_map, child_map, tails
1861
def _prune_tails(parent_map, child_map, tails_to_remove):
1862
"""Remove tails from the parent map.
1864
This will remove the supplied revisions until no more children have 0
1867
:param parent_map: A dict of {child: [parents]}, this dictionary will
1868
be modified in place.
1869
:param tails_to_remove: A list of tips that should be removed,
1870
this list will be consumed
1871
:param child_map: The reverse dict of parent_map ({parent: [children]})
1872
this dict will be modified
1873
:return: None, parent_map will be modified in place.
1875
while tails_to_remove:
1876
next = tails_to_remove.pop()
1877
parent_map.pop(next)
1878
children = child_map.pop(next)
1879
for child in children:
1880
child_parents = parent_map[child]
1881
child_parents.remove(next)
1882
if len(child_parents) == 0:
1883
tails_to_remove.append(child)
1885
def _get_interesting_texts(self, parent_map):
1886
"""Return a dict of texts we are interested in.
1888
Note that the input is in key tuples, but the output is in plain
1891
:param parent_map: The output from _find_recursive_lcas
1892
:return: A dict of {'revision_id':lines} as returned by
1893
_PlanMergeBase.get_lines()
1895
all_revision_keys = set(parent_map)
1896
all_revision_keys.add(self.a_key)
1897
all_revision_keys.add(self.b_key)
1899
# Everything else is in 'keys' but get_lines is in 'revision_ids'
1900
all_texts = self.get_lines([k[-1] for k in all_revision_keys])
1903
def _build_weave(self):
1904
from bzrlib import weave
1905
self._weave = weave.Weave(weave_name='in_memory_weave',
1906
allow_reserved=True)
1907
parent_map = self._find_recursive_lcas()
1909
all_texts = self._get_interesting_texts(parent_map)
1911
# Note: Unfortunately, the order given by topo_sort will effect the
1912
# ordering resolution in the output. Specifically, if you add A then B,
1913
# then in the output text A lines will show up before B lines. And, of
1914
# course, topo_sort doesn't guarantee any real ordering.
1915
# So we use merge_sort, and add a fake node on the tip.
1916
# This ensures that left-hand parents will always be inserted into the
1917
# weave before right-hand parents.
1918
tip_key = self._key_prefix + (_mod_revision.CURRENT_REVISION,)
1919
parent_map[tip_key] = (self.a_key, self.b_key)
1921
for seq_num, key, depth, eom in reversed(tsort.merge_sort(parent_map,
1925
# for key in tsort.topo_sort(parent_map):
1926
parent_keys = parent_map[key]
1927
revision_id = key[-1]
1928
parent_ids = [k[-1] for k in parent_keys]
1929
self._weave.add_lines(revision_id, parent_ids,
1930
all_texts[revision_id])
1932
def plan_merge(self):
1933
"""Generate a 'plan' for merging the two revisions.
1935
This involves comparing their texts and determining the cause of
1936
differences. If text A has a line and text B does not, then either the
1937
line was added to text A, or it was deleted from B. Once the causes
1938
are combined, they are written out in the format described in
1939
VersionedFile.plan_merge
1941
if self._head_key is not None: # There was a single head
1942
if self._head_key == self.a_key:
1945
if self._head_key != self.b_key:
1946
raise AssertionError('There was an invalid head: %s != %s'
1947
% (self.b_key, self._head_key))
1949
head_rev = self._head_key[-1]
1950
lines = self.get_lines([head_rev])[head_rev]
1951
return ((plan, line) for line in lines)
1952
return self._weave.plan_merge(self.a_rev, self.b_rev)
1955
class _PlanLCAMerge(_PlanMergeBase):
1957
This merge algorithm differs from _PlanMerge in that:
1958
1. comparisons are done against LCAs only
1959
2. cases where a contested line is new versus one LCA but old versus
1960
another are marked as conflicts, by emitting the line as conflicted-a
1963
This is faster, and hopefully produces more useful output.
1966
def __init__(self, a_rev, b_rev, vf, key_prefix, graph):
1967
_PlanMergeBase.__init__(self, a_rev, b_rev, vf, key_prefix)
1968
lcas = graph.find_lca(key_prefix + (a_rev,), key_prefix + (b_rev,))
1971
if lca == NULL_REVISION:
1974
self.lcas.add(lca[-1])
1975
for lca in self.lcas:
1976
if _mod_revision.is_null(lca):
1979
lca_lines = self.get_lines([lca])[lca]
1980
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_a,
1982
blocks = list(matcher.get_matching_blocks())
1983
self._cached_matching_blocks[(a_rev, lca)] = blocks
1984
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_b,
1986
blocks = list(matcher.get_matching_blocks())
1987
self._cached_matching_blocks[(b_rev, lca)] = blocks
1989
def _determine_status(self, revision_id, unique_line_numbers):
1990
"""Determines the status unique lines versus all lcas.
1992
Basically, determines why the line is unique to this revision.
1994
A line may be determined new, killed, or both.
1996
If a line is determined new, that means it was not present in at least
1997
one LCA, and is not present in the other merge revision.
1999
If a line is determined killed, that means the line was present in
2002
If a line is killed and new, this indicates that the two merge
2003
revisions contain differing conflict resolutions.
2004
:param revision_id: The id of the revision in which the lines are
2006
:param unique_line_numbers: The line numbers of unique lines.
2007
:return a tuple of (new_this, killed_other):
2011
unique_line_numbers = set(unique_line_numbers)
2012
for lca in self.lcas:
2013
blocks = self._get_matching_blocks(revision_id, lca)
2014
unique_vs_lca, _ignored = self._unique_lines(blocks)
2015
new.update(unique_line_numbers.intersection(unique_vs_lca))
2016
killed.update(unique_line_numbers.difference(unique_vs_lca))