1
from merge_core import merge_flex
2
from changeset import generate_changeset, ExceptionConflictHandler
3
from changeset import Inventory
4
from bzrlib import find_branch
6
from bzrlib.errors import BzrCommandError
7
from bzrlib.diff import compare_trees
8
from trace import mutter, warning
1
# Copyright (C) 2005, 2006 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
14
class UnrelatedBranches(BzrCommandError):
16
msg = "Branches have no common ancestor, and no base revision"\
18
BzrCommandError.__init__(self, msg)
21
class MergeConflictHandler(ExceptionConflictHandler):
22
"""Handle conflicts encountered while merging"""
23
def __init__(self, dir, ignore_zero=False):
24
ExceptionConflictHandler.__init__(self, dir)
26
self.ignore_zero = ignore_zero
28
def copy(self, source, dest):
29
"""Copy the text and mode of a file
30
:param source: The path of the file to copy
31
:param dest: The distination file to create
33
s_file = file(source, "rb")
34
d_file = file(dest, "wb")
37
os.chmod(dest, 0777 & os.stat(source).st_mode)
39
def add_suffix(self, name, suffix, last_new_name=None):
40
"""Rename a file to append a suffix. If the new name exists, the
41
suffix is added repeatedly until a non-existant name is found
43
:param name: The path of the file
44
:param suffix: The suffix to append
45
:param last_new_name: (used for recursive calls) the last name tried
47
if last_new_name is None:
49
new_name = last_new_name+suffix
51
os.rename(name, new_name)
54
if e.errno != errno.EEXIST and e.errno != errno.ENOTEMPTY:
56
return self.add_suffix(name, suffix, last_new_name=new_name)
58
def conflict(self, text):
63
def merge_conflict(self, new_file, this_path, base_path, other_path):
65
Handle diff3 conflicts by producing a .THIS, .BASE and .OTHER. The
66
main file will be a version with diff3 conflicts.
67
:param new_file: Path to the output file with diff3 markers
68
:param this_path: Path to the file text for the THIS tree
69
:param base_path: Path to the file text for the BASE tree
70
:param other_path: Path to the file text for the OTHER tree
72
self.add_suffix(this_path, ".THIS")
73
self.copy(base_path, this_path+".BASE")
74
self.copy(other_path, this_path+".OTHER")
75
os.rename(new_file, this_path)
76
self.conflict("Diff3 conflict encountered in %s" % this_path)
78
def target_exists(self, entry, target, old_path):
79
"""Handle the case when the target file or dir exists"""
80
moved_path = self.add_suffix(target, ".moved")
81
self.conflict("Moved existing %s to %s" % (target, moved_path))
83
def rmdir_non_empty(self, filename):
84
"""Handle the case where the dir to be removed still has contents"""
85
self.conflict("Directory %s not removed because it is not empty"\
90
if not self.ignore_zero:
91
print "%d conflicts encountered.\n" % self.conflicts
93
class SourceFile(object):
94
def __init__(self, path, id, present=None, isdir=None):
97
self.present = present
99
self.interesting = True
102
return "SourceFile(%s, %s)" % (self.path, self.id)
104
def get_tree(treespec, temp_root, label):
105
location, revno = treespec
106
branch = find_branch(location)
108
base_tree = branch.working_tree()
110
base_tree = branch.basis_tree()
112
base_tree = branch.revision_tree(branch.lookup_revision(revno))
113
temp_path = os.path.join(temp_root, label)
115
return branch, MergeTree(base_tree, temp_path)
118
def abspath(tree, file_id):
119
path = tree.inventory.id2path(file_id)
124
def file_exists(tree, file_id):
125
return tree.has_filename(tree.id2path(file_id))
127
def inventory_map(tree):
129
for file_id in tree.inventory:
130
path = abspath(tree, file_id)
131
inventory[path] = SourceFile(path, file_id)
135
class MergeTree(object):
136
def __init__(self, tree, tempdir):
28
revision as _mod_revision,
30
from bzrlib.branch import Branch
31
from bzrlib.conflicts import ConflictList, Conflict
32
from bzrlib.errors import (BzrCommandError,
42
WorkingTreeNotRevision,
45
from bzrlib.merge3 import Merge3
46
from bzrlib.osutils import rename, pathjoin
47
from progress import DummyProgress, ProgressPhase
48
from bzrlib.revision import (NULL_REVISION, ensure_null)
49
from bzrlib.textfile import check_text_lines
50
from bzrlib.trace import mutter, warning, note, is_quiet
51
from bzrlib.transform import (TransformPreview, TreeTransform,
52
resolve_conflicts, cook_conflicts,
53
conflict_pass, FinalPaths, create_by_entry,
54
unique_add, ROOT_PARENT)
55
from bzrlib.versionedfile import PlanWeaveMerge
58
# TODO: Report back as changes are merged in
61
def transform_tree(from_tree, to_tree, interesting_ids=None):
62
merge_inner(from_tree.branch, to_tree, from_tree, ignore_zero=True,
63
interesting_ids=interesting_ids, this_tree=from_tree)
67
def __init__(self, this_branch, other_tree=None, base_tree=None,
68
this_tree=None, pb=DummyProgress(), change_reporter=None,
69
recurse='down', revision_graph=None):
137
70
object.__init__(self)
138
if hasattr(tree, "basedir"):
139
self.root = tree.basedir
142
self.inventory = inventory_map(tree)
144
self.tempdir = tempdir
145
os.mkdir(os.path.join(self.tempdir, "texts"))
148
def readonly_path(self, id):
149
if id not in self.tree:
151
if self.root is not None:
152
return self.tree.abspath(self.tree.id2path(id))
154
if self.tree.inventory[id].kind in ("directory", "root_directory"):
156
if not self.cached.has_key(id):
157
path = os.path.join(self.tempdir, "texts", id)
158
outfile = file(path, "wb")
159
outfile.write(self.tree.get_file(id).read())
160
assert(os.path.exists(path))
161
self.cached[id] = path
162
return self.cached[id]
166
def merge(other_revision, base_revision,
167
check_clean=True, ignore_zero=False,
169
"""Merge changes into a tree.
172
Base for three-way merge.
174
Other revision for three-way merge.
176
Directory to merge changes into; '.' by default.
178
If true, this_dir must have no uncommitted changes before the
181
tempdir = tempfile.mkdtemp(prefix="bzr-")
185
this_branch = find_branch(this_dir)
71
assert this_tree is not None, "this_tree is required"
72
self.this_branch = this_branch
73
self.this_basis = _mod_revision.ensure_null(
74
this_branch.last_revision())
75
self.this_rev_id = None
76
self.this_tree = this_tree
77
self.this_revision_tree = None
78
self.this_basis_tree = None
79
self.other_tree = other_tree
80
self.other_branch = None
81
self.base_tree = base_tree
82
self.ignore_zero = False
83
self.backup_files = False
84
self.interesting_ids = None
85
self.interesting_files = None
86
self.show_base = False
87
self.reprocess = False
90
self.recurse = recurse
91
self.change_reporter = change_reporter
92
self._cached_trees = {}
93
self._revision_graph = revision_graph
94
self._base_is_ancestor = None
95
self._base_is_other_ancestor = None
98
def revision_graph(self):
99
if self._revision_graph is None:
100
self._revision_graph = self.this_branch.repository.get_graph()
101
return self._revision_graph
103
def _set_base_is_ancestor(self, value):
104
self._base_is_ancestor = value
106
def _get_base_is_ancestor(self):
107
if self._base_is_ancestor is None:
108
self._base_is_ancestor = self.revision_graph.is_ancestor(
109
self.base_rev_id, self.this_basis)
110
return self._base_is_ancestor
112
base_is_ancestor = property(_get_base_is_ancestor, _set_base_is_ancestor)
114
def _set_base_is_other_ancestor(self, value):
115
self._base_is_other_ancestor = value
117
def _get_base_is_other_ancestor(self):
118
if self._base_is_other_ancestor is None:
119
self.base_is_other_ancestor = self.revision_graph.is_ancestor(
120
self.base_rev_id, self.other_basis)
121
return self._base_is_other_ancestor
123
base_is_other_ancestor = property(_get_base_is_other_ancestor,
124
_set_base_is_other_ancestor)
127
def from_uncommitted(tree, other_tree, pb):
128
"""Return a Merger for uncommitted changes in other_tree.
130
:param tree: The tree to merge into
131
:param other_tree: The tree to get uncommitted changes from
132
:param pb: A progress indicator
134
merger = Merger(tree.branch, other_tree, other_tree.basis_tree(), tree,
136
merger.base_rev_id = merger.base_tree.get_revision_id()
137
merger.other_rev_id = None
138
merger.other_basis = merger.base_rev_id
142
def from_mergeable(klass, tree, mergeable, pb):
143
"""Return a Merger for a bundle or merge directive.
145
:param tree: The tree to merge changes into
146
:param mergeable: A merge directive or bundle
147
:param pb: A progress indicator
149
mergeable.install_revisions(tree.branch.repository)
150
base_revision_id, other_revision_id, verified =\
151
mergeable.get_merge_request(tree.branch.repository)
152
revision_graph = tree.branch.repository.get_graph()
153
if (base_revision_id != _mod_revision.NULL_REVISION and
154
revision_graph.is_ancestor(
155
base_revision_id, tree.branch.last_revision())):
156
base_revision_id = None
158
warning('Performing cherrypick')
159
merger = klass.from_revision_ids(pb, tree, other_revision_id,
160
base_revision_id, revision_graph=
162
return merger, verified
165
def from_revision_ids(pb, this, other, base=None, other_branch=None,
166
base_branch=None, revision_graph=None):
167
"""Return a Merger for revision-ids.
169
:param tree: The tree to merge changes into
170
:param other: The revision-id to use as OTHER
171
:param base: The revision-id to use as BASE. If not specified, will
173
:param other_branch: A branch containing the other revision-id. If
174
not supplied, this.branch is used.
175
:param base_branch: A branch containing the base revision-id. If
176
not supplied, other_branch or this.branch will be used.
177
:param pb: A progress indicator
179
merger = Merger(this.branch, this_tree=this, pb=pb,
180
revision_graph=revision_graph)
181
if other_branch is None:
182
other_branch = this.branch
183
merger.set_other_revision(other, other_branch)
187
if base_branch is None:
188
base_branch = other_branch
189
merger.set_base_revision(base, base_branch)
192
def revision_tree(self, revision_id, branch=None):
193
if revision_id not in self._cached_trees:
195
branch = self.this_branch
197
tree = self.this_tree.revision_tree(revision_id)
198
except errors.NoSuchRevisionInTree:
199
tree = branch.repository.revision_tree(revision_id)
200
self._cached_trees[revision_id] = tree
201
return self._cached_trees[revision_id]
203
def _get_tree(self, treespec, possible_transports=None):
204
from bzrlib import workingtree
205
location, revno = treespec
207
tree = workingtree.WorkingTree.open_containing(location)[0]
208
return tree.branch, tree
209
branch = Branch.open_containing(location, possible_transports)[0]
211
revision_id = branch.last_revision()
213
revision_id = branch.get_rev_id(revno)
214
revision_id = ensure_null(revision_id)
215
return branch, self.revision_tree(revision_id, branch)
217
def ensure_revision_trees(self):
218
if self.this_revision_tree is None:
219
self.this_basis_tree = self.revision_tree(self.this_basis)
220
if self.this_basis == self.this_rev_id:
221
self.this_revision_tree = self.this_basis_tree
223
if self.other_rev_id is None:
224
other_basis_tree = self.revision_tree(self.other_basis)
225
changes = other_basis_tree.changes_from(self.other_tree)
226
if changes.has_changed():
227
raise WorkingTreeNotRevision(self.this_tree)
228
other_rev_id = self.other_basis
229
self.other_tree = other_basis_tree
231
def file_revisions(self, file_id):
232
self.ensure_revision_trees()
233
def get_id(tree, file_id):
234
revision_id = tree.inventory[file_id].revision
235
assert revision_id is not None
237
if self.this_rev_id is None:
238
if self.this_basis_tree.get_file_sha1(file_id) != \
239
self.this_tree.get_file_sha1(file_id):
240
raise WorkingTreeNotRevision(self.this_tree)
242
trees = (self.this_basis_tree, self.other_tree)
243
return [get_id(tree, file_id) for tree in trees]
245
def check_basis(self, check_clean, require_commits=True):
246
if self.this_basis is None and require_commits is True:
247
raise BzrCommandError("This branch has no commits."
248
" (perhaps you would prefer 'bzr pull')")
187
changes = compare_trees(this_branch.working_tree(),
188
this_branch.basis_tree(), False)
189
if changes.has_changed():
190
raise BzrCommandError("Working tree has uncommitted changes.")
191
other_branch, other_tree = get_tree(other_revision, tempdir, "other")
251
if self.this_basis != self.this_rev_id:
252
raise errors.UncommittedChanges(self.this_tree)
254
def compare_basis(self):
256
basis_tree = self.revision_tree(self.this_tree.last_revision())
257
except errors.RevisionNotPresent:
258
basis_tree = self.this_tree.basis_tree()
259
changes = self.this_tree.changes_from(basis_tree)
260
if not changes.has_changed():
261
self.this_rev_id = self.this_basis
263
def set_interesting_files(self, file_list):
264
self.interesting_files = file_list
266
def set_pending(self):
267
if not self.base_is_ancestor or not self.base_is_other_ancestor or self.other_rev_id is None:
271
def _add_parent(self):
272
new_parents = self.this_tree.get_parent_ids() + [self.other_rev_id]
273
new_parent_trees = []
274
for revision_id in new_parents:
276
tree = self.revision_tree(revision_id)
277
except errors.RevisionNotPresent:
281
new_parent_trees.append((revision_id, tree))
283
self.this_tree.set_parent_trees(new_parent_trees,
284
allow_leftmost_as_ghost=True)
286
for _revision_id, tree in new_parent_trees:
290
def set_other(self, other_revision, possible_transports=None):
291
"""Set the revision and tree to merge from.
293
This sets the other_tree, other_rev_id, other_basis attributes.
295
:param other_revision: The [path, revision] list to merge from.
297
self.other_branch, self.other_tree = self._get_tree(other_revision,
299
if other_revision[1] == -1:
300
self.other_rev_id = _mod_revision.ensure_null(
301
self.other_branch.last_revision())
302
if _mod_revision.is_null(self.other_rev_id):
303
raise NoCommits(self.other_branch)
304
self.other_basis = self.other_rev_id
305
elif other_revision[1] is not None:
306
self.other_rev_id = self.other_branch.get_rev_id(other_revision[1])
307
self.other_basis = self.other_rev_id
309
self.other_rev_id = None
310
self.other_basis = self.other_branch.last_revision()
311
if self.other_basis is None:
312
raise NoCommits(self.other_branch)
313
if self.other_rev_id is not None:
314
self._cached_trees[self.other_rev_id] = self.other_tree
315
self._maybe_fetch(self.other_branch,self.this_branch, self.other_basis)
317
def set_other_revision(self, revision_id, other_branch):
318
"""Set 'other' based on a branch and revision id
320
:param revision_id: The revision to use for a tree
321
:param other_branch: The branch containing this tree
323
self.other_rev_id = revision_id
324
self.other_branch = other_branch
325
self._maybe_fetch(other_branch, self.this_branch, self.other_rev_id)
326
self.other_tree = self.revision_tree(revision_id)
327
self.other_basis = revision_id
329
def set_base_revision(self, revision_id, branch):
330
"""Set 'base' based on a branch and revision id
332
:param revision_id: The revision to use for a tree
333
:param branch: The branch containing this tree
335
self.base_rev_id = revision_id
336
self.base_branch = branch
337
self._maybe_fetch(branch, self.this_branch, revision_id)
338
self.base_tree = self.revision_tree(revision_id)
340
def _maybe_fetch(self, source, target, revision_id):
341
if not source.repository.has_same_location(target.repository):
342
target.fetch(source, revision_id)
345
revisions = [ensure_null(self.this_basis),
346
ensure_null(self.other_basis)]
347
if NULL_REVISION in revisions:
348
self.base_rev_id = NULL_REVISION
350
self.base_rev_id, steps = self.revision_graph.find_unique_lca(
351
revisions[0], revisions[1], count_steps=True)
352
if self.base_rev_id == NULL_REVISION:
353
raise UnrelatedBranches()
355
warning('Warning: criss-cross merge encountered. See bzr'
356
' help criss-cross.')
357
self.base_tree = self.revision_tree(self.base_rev_id)
358
self.base_is_ancestor = True
359
self.base_is_other_ancestor = True
361
def set_base(self, base_revision):
362
"""Set the base revision to use for the merge.
364
:param base_revision: A 2-list containing a path and revision number.
366
mutter("doing merge() with no base_revision specified")
192
367
if base_revision == [None, None]:
193
if other_revision[1] == -1:
196
o_revno = other_revision[1]
197
base_revno = this_branch.common_ancestor(other_branch,
198
other_revno=o_revno)[0]
199
if base_revno is None:
200
raise UnrelatedBranches()
201
base_revision = ['.', base_revno]
202
base_branch, base_tree = get_tree(base_revision, tempdir, "base")
203
merge_inner(this_branch, other_tree, base_tree, tempdir,
204
ignore_zero=ignore_zero)
206
shutil.rmtree(tempdir)
209
def generate_cset_optimized(tree_a, tree_b, inventory_a, inventory_b):
210
"""Generate a changeset, using the text_id to mark really-changed files.
211
This permits blazing comparisons when text_ids are present. It also
212
disables metadata comparison for files with identical texts.
214
for file_id in tree_a.tree.inventory:
215
if file_id not in tree_b.tree.inventory:
217
entry_a = tree_a.tree.inventory[file_id]
218
entry_b = tree_b.tree.inventory[file_id]
219
if (entry_a.kind, entry_b.kind) != ("file", "file"):
221
if None in (entry_a.text_id, entry_b.text_id):
223
if entry_a.text_id != entry_b.text_id:
225
inventory_a[abspath(tree_a.tree, file_id)].interesting = False
226
inventory_b[abspath(tree_b.tree, file_id)].interesting = False
227
cset = generate_changeset(tree_a, tree_b, inventory_a, inventory_b)
228
for entry in cset.entries.itervalues():
229
entry.metadata_change = None
233
def merge_inner(this_branch, other_tree, base_tree, tempdir,
235
this_tree = get_tree((this_branch.base, None), tempdir, "this")[1]
237
def get_inventory(tree):
238
return tree.inventory
240
inv_changes = merge_flex(this_tree, base_tree, other_tree,
241
generate_cset_optimized, get_inventory,
242
MergeConflictHandler(base_tree.root,
243
ignore_zero=ignore_zero))
246
for id, path in inv_changes.iteritems():
251
assert path.startswith('./')
253
adjust_ids.append((path, id))
254
this_branch.set_inventory(regen_inventory(this_branch, this_tree.root, adjust_ids))
257
def regen_inventory(this_branch, root, new_entries):
258
old_entries = this_branch.read_working_inventory()
261
for file_id in old_entries:
262
entry = old_entries[file_id]
263
path = old_entries.id2path(file_id)
264
new_inventory[file_id] = (path, file_id, entry.parent_id, entry.kind)
265
by_path[path] = file_id
370
base_branch, self.base_tree = self._get_tree(base_revision)
371
if base_revision[1] == -1:
372
self.base_rev_id = base_branch.last_revision()
373
elif base_revision[1] is None:
374
self.base_rev_id = _mod_revision.NULL_REVISION
376
self.base_rev_id = _mod_revision.ensure_null(
377
base_branch.get_rev_id(base_revision[1]))
378
self._maybe_fetch(base_branch, self.this_branch, self.base_rev_id)
380
def make_merger(self):
381
kwargs = {'working_tree':self.this_tree, 'this_tree': self.this_tree,
382
'other_tree': self.other_tree,
383
'interesting_ids': self.interesting_ids,
384
'interesting_files': self.interesting_files,
387
if self.merge_type.requires_base:
388
kwargs['base_tree'] = self.base_tree
389
if self.merge_type.supports_reprocess:
390
kwargs['reprocess'] = self.reprocess
392
raise BzrError("Conflict reduction is not supported for merge"
393
" type %s." % self.merge_type)
394
if self.merge_type.supports_show_base:
395
kwargs['show_base'] = self.show_base
397
raise BzrError("Showing base is not supported for this"
398
" merge type. %s" % self.merge_type)
399
if (not getattr(self.merge_type, 'supports_reverse_cherrypick', True)
400
and not self.base_is_other_ancestor):
401
raise errors.CannotReverseCherrypick()
402
if self.merge_type.history_based:
403
kwargs['cherrypick'] = (not self.base_is_ancestor or
404
not self.base_is_other_ancestor)
405
return self.merge_type(pb=self._pb,
406
change_reporter=self.change_reporter,
410
merge = self.make_merger()
411
self.this_tree.lock_tree_write()
412
if self.base_tree is not None:
413
self.base_tree.lock_read()
414
if self.other_tree is not None:
415
self.other_tree.lock_read()
418
if self.recurse == 'down':
419
for path, file_id in self.this_tree.iter_references():
420
sub_tree = self.this_tree.get_nested_tree(file_id, path)
421
other_revision = self.other_tree.get_reference_revision(
423
if other_revision == sub_tree.last_revision():
425
sub_merge = Merger(sub_tree.branch, this_tree=sub_tree)
426
sub_merge.merge_type = self.merge_type
427
relpath = self.this_tree.relpath(path)
428
other_branch = self.other_branch.reference_parent(file_id, relpath)
429
sub_merge.set_other_revision(other_revision, other_branch)
430
base_revision = self.base_tree.get_reference_revision(file_id)
431
sub_merge.base_tree = \
432
sub_tree.branch.repository.revision_tree(base_revision)
436
if self.other_tree is not None:
437
self.other_tree.unlock()
438
if self.base_tree is not None:
439
self.base_tree.unlock()
440
self.this_tree.unlock()
441
if len(merge.cooked_conflicts) == 0:
442
if not self.ignore_zero and not is_quiet():
443
note("All changes applied successfully.")
445
note("%d conflicts encountered." % len(merge.cooked_conflicts))
447
return len(merge.cooked_conflicts)
450
class Merge3Merger(object):
451
"""Three-way merger that uses the merge3 text merger"""
453
supports_reprocess = True
454
supports_show_base = True
455
history_based = False
456
supports_reverse_cherrypick = True
457
winner_idx = {"this": 2, "other": 1, "conflict": 1}
459
def __init__(self, working_tree, this_tree, base_tree, other_tree,
460
interesting_ids=None, reprocess=False, show_base=False,
461
pb=DummyProgress(), pp=None, change_reporter=None,
462
interesting_files=None, do_merge=True):
463
"""Initialize the merger object and perform the merge.
465
:param working_tree: The working tree to apply the merge to
466
:param this_tree: The local tree in the merge operation
467
:param base_tree: The common tree in the merge operation
468
:param other_tree: The other other tree to merge changes from
469
:param interesting_ids: The file_ids of files that should be
470
participate in the merge. May not be combined with
472
:param: reprocess If True, perform conflict-reduction processing.
473
:param show_base: If True, show the base revision in text conflicts.
474
(incompatible with reprocess)
475
:param pb: A Progress bar
476
:param pp: A ProgressPhase object
477
:param change_reporter: An object that should report changes made
478
:param interesting_files: The tree-relative paths of files that should
479
participate in the merge. If these paths refer to directories,
480
the contents of those directories will also be included. May not
481
be combined with interesting_ids. If neither interesting_files nor
482
interesting_ids is specified, all files may participate in the
485
object.__init__(self)
486
if interesting_files is not None:
487
assert interesting_ids is None
488
self.interesting_ids = interesting_ids
489
self.interesting_files = interesting_files
490
self.this_tree = working_tree
491
self.base_tree = base_tree
492
self.other_tree = other_tree
493
self._raw_conflicts = []
494
self.cooked_conflicts = []
495
self.reprocess = reprocess
496
self.show_base = show_base
499
self.change_reporter = change_reporter
501
self.pp = ProgressPhase("Merge phase", 3, self.pb)
506
self.this_tree.lock_tree_write()
507
self.base_tree.lock_read()
508
self.other_tree.lock_read()
509
self.tt = TreeTransform(self.this_tree, self.pb)
512
self._compute_transform()
514
results = self.tt.apply(no_conflicts=True)
515
self.write_modified(results)
517
self.this_tree.add_conflicts(self.cooked_conflicts)
518
except UnsupportedOperation:
522
self.other_tree.unlock()
523
self.base_tree.unlock()
524
self.this_tree.unlock()
527
def make_preview_transform(self):
528
self.base_tree.lock_read()
529
self.other_tree.lock_read()
530
self.tt = TransformPreview(self.this_tree)
533
self._compute_transform()
536
self.other_tree.unlock()
537
self.base_tree.unlock()
541
def _compute_transform(self):
542
entries = self._entries3()
543
child_pb = ui.ui_factory.nested_progress_bar()
545
for num, (file_id, changed, parents3, names3,
546
executable3) in enumerate(entries):
547
child_pb.update('Preparing file merge', num, len(entries))
548
self._merge_names(file_id, parents3, names3)
550
file_status = self.merge_contents(file_id)
552
file_status = 'unmodified'
553
self._merge_executable(file_id,
554
executable3, file_status)
559
child_pb = ui.ui_factory.nested_progress_bar()
561
fs_conflicts = resolve_conflicts(self.tt, child_pb,
562
lambda t, c: conflict_pass(t, c, self.other_tree))
565
if self.change_reporter is not None:
566
from bzrlib import delta
567
delta.report_changes(
568
self.tt._iter_changes(), self.change_reporter)
569
self.cook_conflicts(fs_conflicts)
570
for conflict in self.cooked_conflicts:
574
"""Gather data about files modified between three trees.
576
Return a list of tuples of file_id, changed, parents3, names3,
577
executable3. changed is a boolean indicating whether the file contents
578
or kind were changed. parents3 is a tuple of parent ids for base,
579
other and this. names3 is a tuple of names for base, other and this.
580
executable3 is a tuple of execute-bit values for base, other and this.
583
iterator = self.other_tree._iter_changes(self.base_tree,
584
include_unchanged=True, specific_files=self.interesting_files,
585
extra_trees=[self.this_tree])
586
for (file_id, paths, changed, versioned, parents, names, kind,
587
executable) in iterator:
588
if (self.interesting_ids is not None and
589
file_id not in self.interesting_ids):
591
if file_id in self.this_tree.inventory:
592
entry = self.this_tree.inventory[file_id]
593
this_name = entry.name
594
this_parent = entry.parent_id
595
this_executable = entry.executable
599
this_executable = None
600
parents3 = parents + (this_parent,)
601
names3 = names + (this_name,)
602
executable3 = executable + (this_executable,)
603
result.append((file_id, changed, parents3, names3, executable3))
608
self.tt.final_kind(self.tt.root)
610
self.tt.cancel_deletion(self.tt.root)
611
if self.tt.final_file_id(self.tt.root) is None:
612
self.tt.version_file(self.tt.tree_file_id(self.tt.root),
614
if self.other_tree.inventory.root is None:
616
other_root_file_id = self.other_tree.get_root_id()
617
other_root = self.tt.trans_id_file_id(other_root_file_id)
618
if other_root == self.tt.root:
621
self.tt.final_kind(other_root)
624
self.reparent_children(self.other_tree.inventory.root, self.tt.root)
625
self.tt.cancel_creation(other_root)
626
self.tt.cancel_versioning(other_root)
628
def reparent_children(self, ie, target):
629
for thing, child in ie.children.iteritems():
630
trans_id = self.tt.trans_id_file_id(child.file_id)
631
self.tt.adjust_path(self.tt.final_name(trans_id), target, trans_id)
633
def write_modified(self, results):
635
for path in results.modified_paths:
636
file_id = self.this_tree.path2id(self.this_tree.relpath(path))
639
hash = self.this_tree.get_file_sha1(file_id)
642
modified_hashes[file_id] = hash
643
self.this_tree.set_merge_modified(modified_hashes)
646
def parent(entry, file_id):
647
"""Determine the parent for a file_id (used as a key method)"""
650
return entry.parent_id
653
def name(entry, file_id):
654
"""Determine the name for a file_id (used as a key method)"""
270
for path, file_id in new_entries:
272
del new_inventory[file_id]
275
new_path_list.append((path, file_id))
276
if file_id not in old_entries:
278
# Ensure no file is added before its parent
280
for path, file_id in new_path_list:
284
parent = by_path[os.path.dirname(path)]
285
kind = bzrlib.osutils.file_kind(os.path.join(root, path))
286
new_inventory[file_id] = (path, file_id, parent, kind)
287
by_path[path] = file_id
289
# Get a list in insertion order
290
new_inventory_list = new_inventory.values()
291
mutter ("""Inventory regeneration:
292
old length: %i insertions: %i deletions: %i new_length: %i"""\
293
% (len(old_entries), insertions, deletions, len(new_inventory_list)))
294
assert len(new_inventory_list) == len(old_entries) + insertions - deletions
295
new_inventory_list.sort()
296
return new_inventory_list
660
def contents_sha1(tree, file_id):
661
"""Determine the sha1 of the file contents (used as a key method)."""
662
if file_id not in tree:
664
return tree.get_file_sha1(file_id)
667
def executable(tree, file_id):
668
"""Determine the executability of a file-id (used as a key method)."""
669
if file_id not in tree:
671
if tree.kind(file_id) != "file":
673
return tree.is_executable(file_id)
676
def kind(tree, file_id):
677
"""Determine the kind of a file-id (used as a key method)."""
678
if file_id not in tree:
680
return tree.kind(file_id)
683
def _three_way(base, other, this):
684
#if base == other, either they all agree, or only THIS has changed.
687
elif this not in (base, other):
689
# "Ambiguous clean merge" -- both sides have made the same change.
692
# this == base: only other has changed.
697
def scalar_three_way(this_tree, base_tree, other_tree, file_id, key):
698
"""Do a three-way test on a scalar.
699
Return "this", "other" or "conflict", depending whether a value wins.
701
key_base = key(base_tree, file_id)
702
key_other = key(other_tree, file_id)
703
#if base == other, either they all agree, or only THIS has changed.
704
if key_base == key_other:
706
key_this = key(this_tree, file_id)
707
if key_this not in (key_base, key_other):
709
# "Ambiguous clean merge"
710
elif key_this == key_other:
713
assert key_this == key_base
716
def merge_names(self, file_id):
718
if file_id in tree.inventory:
719
return tree.inventory[file_id]
722
this_entry = get_entry(self.this_tree)
723
other_entry = get_entry(self.other_tree)
724
base_entry = get_entry(self.base_tree)
725
entries = (base_entry, other_entry, this_entry)
728
for entry in entries:
733
names.append(entry.name)
734
parents.append(entry.parent_id)
735
return self._merge_names(file_id, parents, names)
737
def _merge_names(self, file_id, parents, names):
738
"""Perform a merge on file_id names and parents"""
739
base_name, other_name, this_name = names
740
base_parent, other_parent, this_parent = parents
742
name_winner = self._three_way(*names)
744
parent_id_winner = self._three_way(*parents)
745
if this_name is None:
746
if name_winner == "this":
747
name_winner = "other"
748
if parent_id_winner == "this":
749
parent_id_winner = "other"
750
if name_winner == "this" and parent_id_winner == "this":
752
if name_winner == "conflict":
753
trans_id = self.tt.trans_id_file_id(file_id)
754
self._raw_conflicts.append(('name conflict', trans_id,
755
this_name, other_name))
756
if parent_id_winner == "conflict":
757
trans_id = self.tt.trans_id_file_id(file_id)
758
self._raw_conflicts.append(('parent conflict', trans_id,
759
this_parent, other_parent))
760
if other_name is None:
761
# it doesn't matter whether the result was 'other' or
762
# 'conflict'-- if there's no 'other', we leave it alone.
764
# if we get here, name_winner and parent_winner are set to safe values.
765
trans_id = self.tt.trans_id_file_id(file_id)
766
parent_id = parents[self.winner_idx[parent_id_winner]]
767
if parent_id is not None:
768
parent_trans_id = self.tt.trans_id_file_id(parent_id)
769
self.tt.adjust_path(names[self.winner_idx[name_winner]],
770
parent_trans_id, trans_id)
772
def merge_contents(self, file_id):
773
"""Performa a merge on file_id contents."""
774
def contents_pair(tree):
775
if file_id not in tree:
777
kind = tree.kind(file_id)
779
contents = tree.get_file_sha1(file_id)
780
elif kind == "symlink":
781
contents = tree.get_symlink_target(file_id)
784
return kind, contents
786
def contents_conflict():
787
trans_id = self.tt.trans_id_file_id(file_id)
788
name = self.tt.final_name(trans_id)
789
parent_id = self.tt.final_parent(trans_id)
790
if file_id in self.this_tree.inventory:
791
self.tt.unversion_file(trans_id)
792
if file_id in self.this_tree:
793
self.tt.delete_contents(trans_id)
794
file_group = self._dump_conflicts(name, parent_id, file_id,
796
self._raw_conflicts.append(('contents conflict', file_group))
798
# See SPOT run. run, SPOT, run.
799
# So we're not QUITE repeating ourselves; we do tricky things with
801
base_pair = contents_pair(self.base_tree)
802
other_pair = contents_pair(self.other_tree)
803
if base_pair == other_pair:
804
# OTHER introduced no changes
806
this_pair = contents_pair(self.this_tree)
807
if this_pair == other_pair:
808
# THIS and OTHER introduced the same changes
811
trans_id = self.tt.trans_id_file_id(file_id)
812
if this_pair == base_pair:
813
# only OTHER introduced changes
814
if file_id in self.this_tree:
815
# Remove any existing contents
816
self.tt.delete_contents(trans_id)
817
if file_id in self.other_tree:
818
# OTHER changed the file
819
create_by_entry(self.tt,
820
self.other_tree.inventory[file_id],
821
self.other_tree, trans_id)
822
if file_id not in self.this_tree.inventory:
823
self.tt.version_file(file_id, trans_id)
825
elif file_id in self.this_tree.inventory:
826
# OTHER deleted the file
827
self.tt.unversion_file(trans_id)
829
#BOTH THIS and OTHER introduced changes; scalar conflict
830
elif this_pair[0] == "file" and other_pair[0] == "file":
831
# THIS and OTHER are both files, so text merge. Either
832
# BASE is a file, or both converted to files, so at least we
833
# have agreement that output should be a file.
835
self.text_merge(file_id, trans_id)
837
return contents_conflict()
838
if file_id not in self.this_tree.inventory:
839
self.tt.version_file(file_id, trans_id)
841
self.tt.tree_kind(trans_id)
842
self.tt.delete_contents(trans_id)
847
# Scalar conflict, can't text merge. Dump conflicts
848
return contents_conflict()
850
def get_lines(self, tree, file_id):
851
"""Return the lines in a file, or an empty list."""
853
return tree.get_file(file_id).readlines()
857
def text_merge(self, file_id, trans_id):
858
"""Perform a three-way text merge on a file_id"""
859
# it's possible that we got here with base as a different type.
860
# if so, we just want two-way text conflicts.
861
if file_id in self.base_tree and \
862
self.base_tree.kind(file_id) == "file":
863
base_lines = self.get_lines(self.base_tree, file_id)
866
other_lines = self.get_lines(self.other_tree, file_id)
867
this_lines = self.get_lines(self.this_tree, file_id)
868
m3 = Merge3(base_lines, this_lines, other_lines)
869
start_marker = "!START OF MERGE CONFLICT!" + "I HOPE THIS IS UNIQUE"
870
if self.show_base is True:
871
base_marker = '|' * 7
875
def iter_merge3(retval):
876
retval["text_conflicts"] = False
877
for line in m3.merge_lines(name_a = "TREE",
878
name_b = "MERGE-SOURCE",
879
name_base = "BASE-REVISION",
880
start_marker=start_marker,
881
base_marker=base_marker,
882
reprocess=self.reprocess):
883
if line.startswith(start_marker):
884
retval["text_conflicts"] = True
885
yield line.replace(start_marker, '<' * 7)
889
merge3_iterator = iter_merge3(retval)
890
self.tt.create_file(merge3_iterator, trans_id)
891
if retval["text_conflicts"] is True:
892
self._raw_conflicts.append(('text conflict', trans_id))
893
name = self.tt.final_name(trans_id)
894
parent_id = self.tt.final_parent(trans_id)
895
file_group = self._dump_conflicts(name, parent_id, file_id,
896
this_lines, base_lines,
898
file_group.append(trans_id)
900
def _dump_conflicts(self, name, parent_id, file_id, this_lines=None,
901
base_lines=None, other_lines=None, set_version=False,
903
"""Emit conflict files.
904
If this_lines, base_lines, or other_lines are omitted, they will be
905
determined automatically. If set_version is true, the .OTHER, .THIS
906
or .BASE (in that order) will be created as versioned files.
908
data = [('OTHER', self.other_tree, other_lines),
909
('THIS', self.this_tree, this_lines)]
911
data.append(('BASE', self.base_tree, base_lines))
914
for suffix, tree, lines in data:
916
trans_id = self._conflict_file(name, parent_id, tree, file_id,
918
file_group.append(trans_id)
919
if set_version and not versioned:
920
self.tt.version_file(file_id, trans_id)
924
def _conflict_file(self, name, parent_id, tree, file_id, suffix,
926
"""Emit a single conflict file."""
927
name = name + '.' + suffix
928
trans_id = self.tt.create_path(name, parent_id)
929
entry = tree.inventory[file_id]
930
create_by_entry(self.tt, entry, tree, trans_id, lines)
933
def merge_executable(self, file_id, file_status):
934
"""Perform a merge on the execute bit."""
935
executable = [self.executable(t, file_id) for t in (self.base_tree,
936
self.other_tree, self.this_tree)]
937
self._merge_executable(file_id, executable, file_status)
939
def _merge_executable(self, file_id, executable, file_status):
940
"""Perform a merge on the execute bit."""
941
base_executable, other_executable, this_executable = executable
942
if file_status == "deleted":
944
trans_id = self.tt.trans_id_file_id(file_id)
946
if self.tt.final_kind(trans_id) != "file":
950
winner = self._three_way(*executable)
951
if winner == "conflict":
952
# There must be a None in here, if we have a conflict, but we
953
# need executability since file status was not deleted.
954
if self.executable(self.other_tree, file_id) is None:
959
if file_status == "modified":
960
executability = this_executable
961
if executability is not None:
962
trans_id = self.tt.trans_id_file_id(file_id)
963
self.tt.set_executability(executability, trans_id)
965
assert winner == "other"
966
if file_id in self.other_tree:
967
executability = other_executable
968
elif file_id in self.this_tree:
969
executability = this_executable
970
elif file_id in self.base_tree:
971
executability = base_executable
972
if executability is not None:
973
trans_id = self.tt.trans_id_file_id(file_id)
974
self.tt.set_executability(executability, trans_id)
976
def cook_conflicts(self, fs_conflicts):
977
"""Convert all conflicts into a form that doesn't depend on trans_id"""
978
from conflicts import Conflict
980
self.cooked_conflicts.extend(cook_conflicts(fs_conflicts, self.tt))
981
fp = FinalPaths(self.tt)
982
for conflict in self._raw_conflicts:
983
conflict_type = conflict[0]
984
if conflict_type in ('name conflict', 'parent conflict'):
985
trans_id = conflict[1]
986
conflict_args = conflict[2:]
987
if trans_id not in name_conflicts:
988
name_conflicts[trans_id] = {}
989
unique_add(name_conflicts[trans_id], conflict_type,
991
if conflict_type == 'contents conflict':
992
for trans_id in conflict[1]:
993
file_id = self.tt.final_file_id(trans_id)
994
if file_id is not None:
996
path = fp.get_path(trans_id)
997
for suffix in ('.BASE', '.THIS', '.OTHER'):
998
if path.endswith(suffix):
999
path = path[:-len(suffix)]
1001
c = Conflict.factory(conflict_type, path=path, file_id=file_id)
1002
self.cooked_conflicts.append(c)
1003
if conflict_type == 'text conflict':
1004
trans_id = conflict[1]
1005
path = fp.get_path(trans_id)
1006
file_id = self.tt.final_file_id(trans_id)
1007
c = Conflict.factory(conflict_type, path=path, file_id=file_id)
1008
self.cooked_conflicts.append(c)
1010
for trans_id, conflicts in name_conflicts.iteritems():
1012
this_parent, other_parent = conflicts['parent conflict']
1013
assert this_parent != other_parent
1015
this_parent = other_parent = \
1016
self.tt.final_file_id(self.tt.final_parent(trans_id))
1018
this_name, other_name = conflicts['name conflict']
1019
assert this_name != other_name
1021
this_name = other_name = self.tt.final_name(trans_id)
1022
other_path = fp.get_path(trans_id)
1023
if this_parent is not None and this_name is not None:
1024
this_parent_path = \
1025
fp.get_path(self.tt.trans_id_file_id(this_parent))
1026
this_path = pathjoin(this_parent_path, this_name)
1028
this_path = "<deleted>"
1029
file_id = self.tt.final_file_id(trans_id)
1030
c = Conflict.factory('path conflict', path=this_path,
1031
conflict_path=other_path, file_id=file_id)
1032
self.cooked_conflicts.append(c)
1033
self.cooked_conflicts.sort(key=Conflict.sort_key)
1036
class WeaveMerger(Merge3Merger):
1037
"""Three-way tree merger, text weave merger."""
1038
supports_reprocess = True
1039
supports_show_base = False
1040
supports_reverse_cherrypick = False
1041
history_based = True
1043
def __init__(self, working_tree, this_tree, base_tree, other_tree,
1044
interesting_ids=None, pb=DummyProgress(), pp=None,
1045
reprocess=False, change_reporter=None,
1046
interesting_files=None, cherrypick=False, do_merge=True):
1047
self.cherrypick = cherrypick
1048
super(WeaveMerger, self).__init__(working_tree, this_tree,
1049
base_tree, other_tree,
1050
interesting_files=interesting_files,
1051
interesting_ids=interesting_ids,
1052
pb=pb, pp=pp, reprocess=reprocess,
1053
change_reporter=change_reporter,
1056
def _merged_lines(self, file_id):
1057
"""Generate the merged lines.
1058
There is no distinction between lines that are meant to contain <<<<<<<
1062
base = self.base_tree
1065
plan = self.this_tree.plan_file_merge(file_id, self.other_tree,
1067
if 'merge' in debug.debug_flags:
1069
trans_id = self.tt.trans_id_file_id(file_id)
1070
name = self.tt.final_name(trans_id) + '.plan'
1071
contents = ('%10s|%s' % l for l in plan)
1072
self.tt.new_file(name, self.tt.final_parent(trans_id), contents)
1073
textmerge = PlanWeaveMerge(plan, '<<<<<<< TREE\n',
1074
'>>>>>>> MERGE-SOURCE\n')
1075
return textmerge.merge_lines(self.reprocess)
1077
def text_merge(self, file_id, trans_id):
1078
"""Perform a (weave) text merge for a given file and file-id.
1079
If conflicts are encountered, .THIS and .OTHER files will be emitted,
1080
and a conflict will be noted.
1082
lines, conflicts = self._merged_lines(file_id)
1084
# Note we're checking whether the OUTPUT is binary in this case,
1085
# because we don't want to get into weave merge guts.
1086
check_text_lines(lines)
1087
self.tt.create_file(lines, trans_id)
1089
self._raw_conflicts.append(('text conflict', trans_id))
1090
name = self.tt.final_name(trans_id)
1091
parent_id = self.tt.final_parent(trans_id)
1092
file_group = self._dump_conflicts(name, parent_id, file_id,
1094
file_group.append(trans_id)
1097
class LCAMerger(WeaveMerger):
1099
def _merged_lines(self, file_id):
1100
"""Generate the merged lines.
1101
There is no distinction between lines that are meant to contain <<<<<<<
1105
base = self.base_tree
1108
plan = self.this_tree.plan_file_lca_merge(file_id, self.other_tree,
1110
if 'merge' in debug.debug_flags:
1112
trans_id = self.tt.trans_id_file_id(file_id)
1113
name = self.tt.final_name(trans_id) + '.plan'
1114
contents = ('%10s|%s' % l for l in plan)
1115
self.tt.new_file(name, self.tt.final_parent(trans_id), contents)
1116
textmerge = PlanWeaveMerge(plan, '<<<<<<< TREE\n',
1117
'>>>>>>> MERGE-SOURCE\n')
1118
return textmerge.merge_lines(self.reprocess)
1121
class Diff3Merger(Merge3Merger):
1122
"""Three-way merger using external diff3 for text merging"""
1124
def dump_file(self, temp_dir, name, tree, file_id):
1125
out_path = pathjoin(temp_dir, name)
1126
out_file = open(out_path, "wb")
1128
in_file = tree.get_file(file_id)
1129
for line in in_file:
1130
out_file.write(line)
1135
def text_merge(self, file_id, trans_id):
1136
"""Perform a diff3 merge using a specified file-id and trans-id.
1137
If conflicts are encountered, .BASE, .THIS. and .OTHER conflict files
1138
will be dumped, and a will be conflict noted.
1141
temp_dir = osutils.mkdtemp(prefix="bzr-")
1143
new_file = pathjoin(temp_dir, "new")
1144
this = self.dump_file(temp_dir, "this", self.this_tree, file_id)
1145
base = self.dump_file(temp_dir, "base", self.base_tree, file_id)
1146
other = self.dump_file(temp_dir, "other", self.other_tree, file_id)
1147
status = bzrlib.patch.diff3(new_file, this, base, other)
1148
if status not in (0, 1):
1149
raise BzrError("Unhandled diff3 exit code")
1150
f = open(new_file, 'rb')
1152
self.tt.create_file(f, trans_id)
1156
name = self.tt.final_name(trans_id)
1157
parent_id = self.tt.final_parent(trans_id)
1158
self._dump_conflicts(name, parent_id, file_id)
1159
self._raw_conflicts.append(('text conflict', trans_id))
1161
osutils.rmtree(temp_dir)
1164
def merge_inner(this_branch, other_tree, base_tree, ignore_zero=False,
1166
merge_type=Merge3Merger,
1167
interesting_ids=None,
1171
interesting_files=None,
1174
change_reporter=None):
1175
"""Primary interface for merging.
1177
typical use is probably
1178
'merge_inner(branch, branch.get_revision_tree(other_revision),
1179
branch.get_revision_tree(base_revision))'
1181
if this_tree is None:
1182
raise BzrError("bzrlib.merge.merge_inner requires a this_tree "
1183
"parameter as of bzrlib version 0.8.")
1184
merger = Merger(this_branch, other_tree, base_tree, this_tree=this_tree,
1185
pb=pb, change_reporter=change_reporter)
1186
merger.backup_files = backup_files
1187
merger.merge_type = merge_type
1188
merger.interesting_ids = interesting_ids
1189
merger.ignore_zero = ignore_zero
1190
if interesting_files:
1191
assert not interesting_ids, ('Only supply interesting_ids'
1192
' or interesting_files')
1193
merger.interesting_files = interesting_files
1194
merger.show_base = show_base
1195
merger.reprocess = reprocess
1196
merger.other_rev_id = other_rev_id
1197
merger.other_basis = other_rev_id
1198
return merger.do_merge()
1200
def get_merge_type_registry():
1201
"""Merge type registry is in bzrlib.option to avoid circular imports.
1203
This method provides a sanctioned way to retrieve it.
1205
from bzrlib import option
1206
return option._merge_type_registry
1209
def _plan_annotate_merge(annotated_a, annotated_b, ancestors_a, ancestors_b):
1210
def status_a(revision, text):
1211
if revision in ancestors_b:
1212
return 'killed-b', text
1214
return 'new-a', text
1216
def status_b(revision, text):
1217
if revision in ancestors_a:
1218
return 'killed-a', text
1220
return 'new-b', text
1222
plain_a = [t for (a, t) in annotated_a]
1223
plain_b = [t for (a, t) in annotated_b]
1224
matcher = patiencediff.PatienceSequenceMatcher(None, plain_a, plain_b)
1225
blocks = matcher.get_matching_blocks()
1228
for ai, bi, l in blocks:
1229
# process all mismatched sections
1230
# (last mismatched section is handled because blocks always
1231
# includes a 0-length last block)
1232
for revision, text in annotated_a[a_cur:ai]:
1233
yield status_a(revision, text)
1234
for revision, text in annotated_b[b_cur:bi]:
1235
yield status_b(revision, text)
1237
# and now the matched section
1240
for text_a, text_b in zip(plain_a[ai:a_cur], plain_b[bi:b_cur]):
1241
assert text_a == text_b
1242
yield "unchanged", text_a
1245
class _PlanMergeBase(object):
1247
def __init__(self, a_rev, b_rev, vf):
1250
:param a_rev: Revision-id of one revision to merge
1251
:param b_rev: Revision-id of the other revision to merge
1252
:param vf: A versionedfile containing both revisions
1256
self.lines_a = vf.get_lines(a_rev)
1257
self.lines_b = vf.get_lines(b_rev)
1259
self._last_lines = None
1260
self._last_lines_revision_id = None
1261
self._cached_matching_blocks = {}
1263
def plan_merge(self):
1264
"""Generate a 'plan' for merging the two revisions.
1266
This involves comparing their texts and determining the cause of
1267
differences. If text A has a line and text B does not, then either the
1268
line was added to text A, or it was deleted from B. Once the causes
1269
are combined, they are written out in the format described in
1270
VersionedFile.plan_merge
1272
blocks = self._get_matching_blocks(self.a_rev, self.b_rev)
1273
unique_a, unique_b = self._unique_lines(blocks)
1274
new_a, killed_b = self._determine_status(self.a_rev, unique_a)
1275
new_b, killed_a = self._determine_status(self.b_rev, unique_b)
1276
return self._iter_plan(blocks, new_a, killed_b, new_b, killed_a)
1278
def _iter_plan(self, blocks, new_a, killed_b, new_b, killed_a):
1281
for i, j, n in blocks:
1282
for a_index in range(last_i, i):
1283
if a_index in new_a:
1284
if a_index in killed_b:
1285
yield 'conflicted-a', self.lines_a[a_index]
1287
yield 'new-a', self.lines_a[a_index]
1289
yield 'killed-b', self.lines_a[a_index]
1290
for b_index in range(last_j, j):
1291
if b_index in new_b:
1292
if b_index in killed_a:
1293
yield 'conflicted-b', self.lines_b[b_index]
1295
yield 'new-b', self.lines_b[b_index]
1297
yield 'killed-a', self.lines_b[b_index]
1298
# handle common lines
1299
for a_index in range(i, i+n):
1300
yield 'unchanged', self.lines_a[a_index]
1304
def _get_matching_blocks(self, left_revision, right_revision):
1305
"""Return a description of which sections of two revisions match.
1307
See SequenceMatcher.get_matching_blocks
1309
cached = self._cached_matching_blocks.get((left_revision,
1311
if cached is not None:
1313
if self._last_lines_revision_id == left_revision:
1314
left_lines = self._last_lines
1316
left_lines = self.vf.get_lines(left_revision)
1317
right_lines = self.vf.get_lines(right_revision)
1318
self._last_lines = right_lines
1319
self._last_lines_revision_id = right_revision
1320
matcher = patiencediff.PatienceSequenceMatcher(None, left_lines,
1322
return matcher.get_matching_blocks()
1324
def _unique_lines(self, matching_blocks):
1325
"""Analyse matching_blocks to determine which lines are unique
1327
:return: a tuple of (unique_left, unique_right), where the values are
1328
sets of line numbers of unique lines.
1334
for i, j, n in matching_blocks:
1335
unique_left.extend(range(last_i, i))
1336
unique_right.extend(range(last_j, j))
1339
return unique_left, unique_right
1342
def _subtract_plans(old_plan, new_plan):
1343
"""Remove changes from new_plan that came from old_plan.
1345
It is assumed that the difference between the old_plan and new_plan
1346
is their choice of 'b' text.
1348
All lines from new_plan that differ from old_plan are emitted
1349
verbatim. All lines from new_plan that match old_plan but are
1350
not about the 'b' revision are emitted verbatim.
1352
Lines that match and are about the 'b' revision are the lines we
1353
don't want, so we convert 'killed-b' -> 'unchanged', and 'new-b'
1354
is skipped entirely.
1356
matcher = patiencediff.PatienceSequenceMatcher(None, old_plan,
1359
for i, j, n in matcher.get_matching_blocks():
1360
for jj in range(last_j, j):
1362
for jj in range(j, j+n):
1363
plan_line = new_plan[jj]
1364
if plan_line[0] == 'new-b':
1366
elif plan_line[0] == 'killed-b':
1367
yield 'unchanged', plan_line[1]
1373
class _PlanMerge(_PlanMergeBase):
1374
"""Plan an annotate merge using on-the-fly annotation"""
1376
def __init__(self, a_rev, b_rev, vf):
1377
_PlanMergeBase.__init__(self, a_rev, b_rev, vf)
1378
a_ancestry = set(vf.get_ancestry(a_rev, topo_sorted=False))
1379
b_ancestry = set(vf.get_ancestry(b_rev, topo_sorted=False))
1380
self.uncommon = a_ancestry.symmetric_difference(b_ancestry)
1382
def _determine_status(self, revision_id, unique_line_numbers):
1383
"""Determines the status unique lines versus all lcas.
1385
Basically, determines why the line is unique to this revision.
1387
A line may be determined new or killed, but not both.
1389
:param revision_id: The id of the revision in which the lines are
1391
:param unique_line_numbers: The line numbers of unique lines.
1392
:return a tuple of (new_this, killed_other):
1394
new = self._find_new(revision_id)
1395
killed = set(unique_line_numbers).difference(new)
1398
def _find_new(self, version_id):
1399
"""Determine which lines are new in the ancestry of this version.
1401
If a lines is present in this version, and not present in any
1402
common ancestor, it is considered new.
1404
if version_id not in self.uncommon:
1406
parents = self.vf.get_parents(version_id)
1407
if len(parents) == 0:
1408
return set(range(len(self.vf.get_lines(version_id))))
1410
for parent in parents:
1411
blocks = self._get_matching_blocks(version_id, parent)
1412
result, unused = self._unique_lines(blocks)
1413
parent_new = self._find_new(parent)
1414
for i, j, n in blocks:
1415
for ii, jj in [(i+r, j+r) for r in range(n)]:
1416
if jj in parent_new:
1421
new.intersection_update(result)
1425
class _PlanLCAMerge(_PlanMergeBase):
1427
This merge algorithm differs from _PlanMerge in that:
1428
1. comparisons are done against LCAs only
1429
2. cases where a contested line is new versus one LCA but old versus
1430
another are marked as conflicts, by emitting the line as conflicted-a
1433
This is faster, and hopefully produces more useful output.
1436
def __init__(self, a_rev, b_rev, vf, graph):
1437
_PlanMergeBase.__init__(self, a_rev, b_rev, vf)
1438
self.lcas = graph.find_lca(a_rev, b_rev)
1439
for lca in self.lcas:
1440
lca_lines = self.vf.get_lines(lca)
1441
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_a,
1443
blocks = list(matcher.get_matching_blocks())
1444
self._cached_matching_blocks[(a_rev, lca)] = blocks
1445
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_b,
1447
blocks = list(matcher.get_matching_blocks())
1448
self._cached_matching_blocks[(b_rev, lca)] = blocks
1450
def _determine_status(self, revision_id, unique_line_numbers):
1451
"""Determines the status unique lines versus all lcas.
1453
Basically, determines why the line is unique to this revision.
1455
A line may be determined new, killed, or both.
1457
If a line is determined new, that means it was not present in at least
1458
one LCA, and is not present in the other merge revision.
1460
If a line is determined killed, that means the line was present in
1463
If a line is killed and new, this indicates that the two merge
1464
revisions contain differing conflict resolutions.
1465
:param revision_id: The id of the revision in which the lines are
1467
:param unique_line_numbers: The line numbers of unique lines.
1468
:return a tuple of (new_this, killed_other):
1472
unique_line_numbers = set(unique_line_numbers)
1473
for lca in self.lcas:
1474
blocks = self._get_matching_blocks(revision_id, lca)
1475
unique_vs_lca, _ignored = self._unique_lines(blocks)
1476
new.update(unique_line_numbers.intersection(unique_vs_lca))
1477
killed.update(unique_line_numbers.difference(unique_vs_lca))