1
# Copyright (C) 2005 Canonical Ltd
1
# Copyright (C) 2005, 2006, 2008 Canonical Ltd
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
5
5
# the Free Software Foundation; either version 2 of the License, or
6
6
# (at your option) any later version.
8
8
# This program is distributed in the hope that it will be useful,
9
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
11
# GNU General Public License for more details.
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
17
# TODO: build_working_dir can be built on something simpler than merge()
19
from itertools import chain
23
from bzrlib._changeset import generate_changeset, ExceptionConflictHandler
24
from bzrlib._changeset import Inventory, Diff3Merge, ReplaceContents
25
from bzrlib._merge_core import WeaveMerge
26
from bzrlib._merge_core import merge_flex, ApplyMerge3, BackupBeforeChange
30
revision as _mod_revision,
27
33
from bzrlib.branch import Branch
28
from bzrlib.delta import compare_trees
34
from bzrlib.conflicts import ConflictList, Conflict
29
35
from bzrlib.errors import (BzrCommandError,
34
45
WorkingTreeNotRevision,
37
from bzrlib.fetch import greedy_fetch, fetch
48
from bzrlib.graph import Graph
49
from bzrlib.merge3 import Merge3
39
50
from bzrlib.osutils import rename, pathjoin
40
from bzrlib.revision import common_ancestor, MultipleRevisionSources
41
from bzrlib.revision import is_ancestor, NULL_REVISION
42
from bzrlib.trace import mutter, warning, note
51
from progress import DummyProgress, ProgressPhase
52
from bzrlib.revision import (NULL_REVISION, ensure_null)
53
from bzrlib.textfile import check_text_lines
54
from bzrlib.trace import mutter, warning, note, is_quiet
55
from bzrlib.transform import (TransformPreview, TreeTransform,
56
resolve_conflicts, cook_conflicts,
57
conflict_pass, FinalPaths, create_by_entry,
58
unique_add, ROOT_PARENT)
59
from bzrlib.versionedfile import PlanWeaveMerge
44
62
# TODO: Report back as changes are merged in
46
# comments from abentley on irc: merge happens in two stages, each
47
# of which generates a changeset object
49
# stage 1: generate OLD->OTHER,
50
# stage 2: use MINE and OLD->OTHER to generate MINE -> RESULT
52
class _MergeConflictHandler(ExceptionConflictHandler):
53
"""Handle conflicts encountered while merging.
55
This subclasses ExceptionConflictHandler, so that any types of
56
conflict that are not explicitly handled cause an exception and
59
def __init__(self, this_tree, base_tree, other_tree, ignore_zero=False):
60
ExceptionConflictHandler.__init__(self)
62
self.ignore_zero = ignore_zero
63
self.this_tree = this_tree
64
self.base_tree = base_tree
65
self.other_tree = other_tree
67
def copy(self, source, dest):
68
"""Copy the text and mode of a file
69
:param source: The path of the file to copy
70
:param dest: The distination file to create
72
s_file = file(source, "rb")
73
d_file = file(dest, "wb")
76
os.chmod(dest, 0777 & os.stat(source).st_mode)
78
def dump(self, lines, dest):
79
"""Copy the text and mode of a file
80
:param source: The path of the file to copy
81
:param dest: The distination file to create
83
d_file = file(dest, "wb")
87
def add_suffix(self, name, suffix, last_new_name=None, fix_inventory=True):
88
"""Rename a file to append a suffix. If the new name exists, the
89
suffix is added repeatedly until a non-existant name is found
91
:param name: The path of the file
92
:param suffix: The suffix to append
93
:param last_new_name: (used for recursive calls) the last name tried
95
if last_new_name is None:
97
new_name = last_new_name+suffix
99
rename(name, new_name)
100
if fix_inventory is True:
102
relpath = self.this_tree.relpath(name)
103
except NotBranchError:
105
if relpath is not None:
106
file_id = self.this_tree.path2id(relpath)
107
if file_id is not None:
108
new_path = self.this_tree.relpath(new_name)
109
rename(new_name, name)
110
self.this_tree.rename_one(relpath, new_path)
111
assert self.this_tree.id2path(file_id) == new_path
113
if e.errno != errno.EEXIST and e.errno != errno.ENOTEMPTY:
115
return self.add_suffix(name, suffix, last_new_name=new_name,
116
fix_inventory=fix_inventory)
119
def conflict(self, text):
124
def merge_conflict(self, new_file, this_path, base_lines, other_lines):
126
Handle diff3 conflicts by producing a .THIS, .BASE and .OTHER. The
127
main file will be a version with diff3 conflicts.
128
:param new_file: Path to the output file with diff3 markers
129
:param this_path: Path to the file text for the THIS tree
130
:param base_path: Path to the file text for the BASE tree
131
:param other_path: Path to the file text for the OTHER tree
133
self.add_suffix(this_path, ".THIS", fix_inventory=False)
134
self.dump(base_lines, this_path+".BASE")
135
self.dump(other_lines, this_path+".OTHER")
136
rename(new_file, this_path)
137
self.conflict("Diff3 conflict encountered in %s" % this_path)
139
def weave_merge_conflict(self, filename, weave, other_i, out_file):
141
Handle weave conflicts by producing a .THIS, and .OTHER. The
142
main file will be a version with diff3-style conflicts.
144
self.add_suffix(filename, ".THIS", fix_inventory=False)
146
self.dump(weave.get_iter(other_i), filename+".OTHER")
147
self.conflict("Text conflict encountered in %s" % filename)
149
def new_contents_conflict(self, filename, other_contents):
150
"""Conflicting contents for newly added file."""
151
other_contents(filename + ".OTHER", self, False)
152
self.conflict("Conflict in newly added file %s" % filename)
155
def target_exists(self, entry, target, old_path):
156
"""Handle the case when the target file or dir exists"""
157
moved_path = self.add_suffix(target, ".moved")
158
self.conflict("Moved existing %s to %s" % (target, moved_path))
160
def rmdir_non_empty(self, filename):
161
"""Handle the case where the dir to be removed still has contents"""
162
self.conflict("Directory %s not removed because it is not empty"\
166
def rem_contents_conflict(self, filename, this_contents, base_contents):
167
base_contents(filename+".BASE", self)
168
this_contents(filename+".THIS", self)
169
self.conflict("Other branch deleted locally modified file %s" %
171
return ReplaceContents(this_contents, None)
173
def abs_this_path(self, file_id):
174
"""Return the absolute path for a file_id in the this tree."""
175
return self.this_tree.id2abspath(file_id)
177
def add_missing_parents(self, file_id, tree):
178
"""If some of the parents for file_id are missing, add them."""
179
entry = tree.inventory[file_id]
180
if entry.parent_id not in self.this_tree:
181
return self.create_all_missing(entry.parent_id, tree)
183
return self.abs_this_path(entry.parent_id)
185
def create_all_missing(self, file_id, tree):
186
"""Add contents for a file_id and all its parents to a tree."""
187
entry = tree.inventory[file_id]
188
if entry.parent_id is not None and entry.parent_id not in self.this_tree:
189
abspath = self.create_all_missing(entry.parent_id, tree)
191
abspath = self.abs_this_path(entry.parent_id)
192
entry_path = pathjoin(abspath, entry.name)
193
if not os.path.isdir(entry_path):
194
self.create(file_id, entry_path, tree)
197
def create(self, file_id, path, tree):
198
"""Uses tree data to create a filesystem object for the file_id"""
199
from bzrlib._changeset import get_contents
200
get_contents(tree, file_id)(path, self)
202
def missing_for_merge(self, file_id, other_path):
203
"""The file_id doesn't exist in THIS, but does in OTHER and BASE"""
204
self.conflict("Other branch modified locally deleted file %s" %
206
parent_dir = self.add_missing_parents(file_id, self.other_tree)
207
stem = pathjoin(parent_dir, os.path.basename(other_path))
208
self.create(file_id, stem+".OTHER", self.other_tree)
209
self.create(file_id, stem+".BASE", self.base_tree)
211
def threeway_contents_conflict(filename, this_contents, base_contents,
213
self.conflict("Three-way conflict merging %s" % filename)
216
if self.conflicts == 0:
217
if not self.ignore_zero:
218
note("All changes applied successfully.")
220
note("%d conflicts encountered." % self.conflicts)
222
def _get_tree(treespec, local_branch=None):
223
location, revno = treespec
224
branch = Branch.open_containing(location)[0]
228
revision = branch.last_revision()
230
revision = branch.get_rev_id(revno)
232
revision = NULL_REVISION
233
return branch, _get_revid_tree(branch, revision, local_branch)
236
def _get_revid_tree(branch, revision, local_branch):
238
base_tree = branch.working_tree()
240
if local_branch is not None:
241
if local_branch.base != branch.base:
242
greedy_fetch(local_branch, branch, revision)
243
base_tree = local_branch.repository.revision_tree(revision)
245
base_tree = branch.repository.revision_tree(revision)
249
def build_working_dir(to_dir):
250
"""Build a working directory in an empty directory.
252
to_dir is a directory containing branch metadata but no working files,
253
typically constructed by cloning an existing branch.
255
This is split out as a special idiomatic case of merge. It could
256
eventually be done by just building the tree directly calling into
257
lower-level code (e.g. constructing a changeset).
259
# RBC 20051019 is this not just 'export' ?
260
# AB Well, export doesn't take care of inventory...
261
this_branch = Branch.open_containing(to_dir)[0]
262
transform_tree(this_branch.working_tree(), this_branch.basis_tree())
265
65
def transform_tree(from_tree, to_tree, interesting_ids=None):
266
66
merge_inner(from_tree.branch, to_tree, from_tree, ignore_zero=True,
267
interesting_ids=interesting_ids)
270
def merge_inner(this_branch, other_tree, base_tree, ignore_zero=False,
272
merge_type=ApplyMerge3,
273
interesting_ids=None,
277
interesting_files=None):
278
"""Primary interface for merging.
280
typical use is probably
281
'merge_inner(branch, branch.get_revision_tree(other_revision),
282
branch.get_revision_tree(base_revision))'
284
merger = Merger(this_branch, other_tree, base_tree)
285
merger.backup_files = backup_files
286
merger.merge_type = merge_type
287
merger.interesting_ids = interesting_ids
288
if interesting_files:
289
assert not interesting_ids, ('Only supply interesting_ids'
290
' or interesting_files')
291
merger._set_interesting_files(interesting_files)
292
merger.show_base = show_base
293
merger.reprocess = reprocess
294
merger.conflict_handler = _MergeConflictHandler(merger.this_tree,
295
base_tree, other_tree,
296
ignore_zero=ignore_zero)
297
merger.other_rev_id = other_rev_id
298
merger.other_basis = other_rev_id
299
return merger.do_merge()
67
interesting_ids=interesting_ids, this_tree=from_tree)
302
70
class Merger(object):
303
def __init__(self, this_branch, other_tree=None, base_tree=None):
71
def __init__(self, this_branch, other_tree=None, base_tree=None,
72
this_tree=None, pb=DummyProgress(), change_reporter=None,
73
recurse='down', revision_graph=None):
304
74
object.__init__(self)
305
75
self.this_branch = this_branch
306
self.this_basis = this_branch.last_revision()
76
self.this_basis = _mod_revision.ensure_null(
77
this_branch.last_revision())
307
78
self.this_rev_id = None
308
self.this_tree = this_branch.working_tree()
79
self.this_tree = this_tree
309
80
self.this_revision_tree = None
310
81
self.this_basis_tree = None
311
82
self.other_tree = other_tree
83
self.other_branch = None
312
84
self.base_tree = base_tree
313
85
self.ignore_zero = False
314
86
self.backup_files = False
315
87
self.interesting_ids = None
88
self.interesting_files = None
316
89
self.show_base = False
317
90
self.reprocess = False
318
self.conflict_handler = _MergeConflictHandler(self.this_tree,
319
base_tree, other_tree)
321
def revision_tree(self, revision_id):
322
return self.this_branch.repository.revision_tree(revision_id)
93
self.recurse = recurse
94
self.change_reporter = change_reporter
95
self._cached_trees = {}
96
self._revision_graph = revision_graph
97
self._base_is_ancestor = None
98
self._base_is_other_ancestor = None
101
def revision_graph(self):
102
if self._revision_graph is None:
103
self._revision_graph = self.this_branch.repository.get_graph()
104
return self._revision_graph
106
def _set_base_is_ancestor(self, value):
107
self._base_is_ancestor = value
109
def _get_base_is_ancestor(self):
110
if self._base_is_ancestor is None:
111
self._base_is_ancestor = self.revision_graph.is_ancestor(
112
self.base_rev_id, self.this_basis)
113
return self._base_is_ancestor
115
base_is_ancestor = property(_get_base_is_ancestor, _set_base_is_ancestor)
117
def _set_base_is_other_ancestor(self, value):
118
self._base_is_other_ancestor = value
120
def _get_base_is_other_ancestor(self):
121
if self._base_is_other_ancestor is None:
122
if self.other_basis is None:
124
self._base_is_other_ancestor = self.revision_graph.is_ancestor(
125
self.base_rev_id, self.other_basis)
126
return self._base_is_other_ancestor
128
base_is_other_ancestor = property(_get_base_is_other_ancestor,
129
_set_base_is_other_ancestor)
132
def from_uncommitted(tree, other_tree, pb):
133
"""Return a Merger for uncommitted changes in other_tree.
135
:param tree: The tree to merge into
136
:param other_tree: The tree to get uncommitted changes from
137
:param pb: A progress indicator
139
merger = Merger(tree.branch, other_tree, other_tree.basis_tree(), tree,
141
merger.base_rev_id = merger.base_tree.get_revision_id()
142
merger.other_rev_id = None
143
merger.other_basis = merger.base_rev_id
147
def from_mergeable(klass, tree, mergeable, pb):
148
"""Return a Merger for a bundle or merge directive.
150
:param tree: The tree to merge changes into
151
:param mergeable: A merge directive or bundle
152
:param pb: A progress indicator
154
mergeable.install_revisions(tree.branch.repository)
155
base_revision_id, other_revision_id, verified =\
156
mergeable.get_merge_request(tree.branch.repository)
157
revision_graph = tree.branch.repository.get_graph()
158
if base_revision_id is not None:
159
if (base_revision_id != _mod_revision.NULL_REVISION and
160
revision_graph.is_ancestor(
161
base_revision_id, tree.branch.last_revision())):
162
base_revision_id = None
164
warning('Performing cherrypick')
165
merger = klass.from_revision_ids(pb, tree, other_revision_id,
166
base_revision_id, revision_graph=
168
return merger, verified
171
def from_revision_ids(pb, tree, other, base=None, other_branch=None,
172
base_branch=None, revision_graph=None):
173
"""Return a Merger for revision-ids.
175
:param tree: The tree to merge changes into
176
:param other: The revision-id to use as OTHER
177
:param base: The revision-id to use as BASE. If not specified, will
179
:param other_branch: A branch containing the other revision-id. If
180
not supplied, tree.branch is used.
181
:param base_branch: A branch containing the base revision-id. If
182
not supplied, other_branch or tree.branch will be used.
183
:param revision_graph: If you have a revision_graph precomputed, pass
184
it in, otherwise it will be created for you.
185
:param pb: A progress indicator
187
merger = Merger(tree.branch, this_tree=tree, pb=pb,
188
revision_graph=revision_graph)
189
if other_branch is None:
190
other_branch = tree.branch
191
merger.set_other_revision(other, other_branch)
195
if base_branch is None:
196
base_branch = other_branch
197
merger.set_base_revision(base, base_branch)
200
def revision_tree(self, revision_id, branch=None):
201
if revision_id not in self._cached_trees:
203
branch = self.this_branch
205
tree = self.this_tree.revision_tree(revision_id)
206
except errors.NoSuchRevisionInTree:
207
tree = branch.repository.revision_tree(revision_id)
208
self._cached_trees[revision_id] = tree
209
return self._cached_trees[revision_id]
211
def _get_tree(self, treespec, possible_transports=None):
212
from bzrlib import workingtree
213
location, revno = treespec
215
tree = workingtree.WorkingTree.open_containing(location)[0]
216
return tree.branch, tree
217
branch = Branch.open_containing(location, possible_transports)[0]
219
revision_id = branch.last_revision()
221
revision_id = branch.get_rev_id(revno)
222
revision_id = ensure_null(revision_id)
223
return branch, self.revision_tree(revision_id, branch)
324
225
def ensure_revision_trees(self):
325
226
if self.this_revision_tree is None:
326
self.this_basis_tree = self.this_branch.repository.revision_tree(
227
self.this_basis_tree = self.revision_tree(self.this_basis)
328
228
if self.this_basis == self.this_rev_id:
329
229
self.this_revision_tree = self.this_basis_tree
331
231
if self.other_rev_id is None:
332
232
other_basis_tree = self.revision_tree(self.other_basis)
333
changes = compare_trees(self.other_tree, other_basis_tree)
233
changes = other_basis_tree.changes_from(self.other_tree)
334
234
if changes.has_changed():
335
235
raise WorkingTreeNotRevision(self.this_tree)
336
other_rev_id = other_basis
236
other_rev_id = self.other_basis
337
237
self.other_tree = other_basis_tree
339
239
def file_revisions(self, file_id):
340
240
self.ensure_revision_trees()
341
241
def get_id(tree, file_id):
342
242
revision_id = tree.inventory[file_id].revision
343
assert revision_id is not None
344
243
return revision_id
345
244
if self.this_rev_id is None:
346
245
if self.this_basis_tree.get_file_sha1(file_id) != \
350
249
trees = (self.this_basis_tree, self.other_tree)
351
250
return [get_id(tree, file_id) for tree in trees]
353
def merge_factory(self, file_id, base, other):
354
if self.merge_type.history_based:
355
if self.show_base is True:
356
raise BzrError("Cannot show base for hisory-based merges")
357
if self.reprocess is True:
358
raise BzrError("Cannot reprocess history-based merges")
360
t_revid, o_revid = self.file_revisions(file_id)
361
weave = self.this_basis_tree.get_weave(file_id)
362
contents_change = self.merge_type(weave, t_revid, o_revid)
364
if self.show_base is True or self.reprocess is True:
365
contents_change = self.merge_type(file_id, base, other,
366
show_base=self.show_base,
367
reprocess=self.reprocess)
369
contents_change = self.merge_type(file_id, base, other)
370
if self.backup_files:
371
contents_change = BackupBeforeChange(contents_change)
372
return contents_change
374
def check_basis(self, check_clean):
375
if self.this_basis is None:
376
raise BzrCommandError("This branch has no commits")
252
def check_basis(self, check_clean, require_commits=True):
253
if self.this_basis is None and require_commits is True:
254
raise BzrCommandError("This branch has no commits."
255
" (perhaps you would prefer 'bzr pull')")
378
257
self.compare_basis()
379
258
if self.this_basis != self.this_rev_id:
380
raise BzrCommandError("Working tree has uncommitted changes.")
259
raise errors.UncommittedChanges(self.this_tree)
382
261
def compare_basis(self):
383
changes = compare_trees(self.this_branch.working_tree(),
384
self.this_branch.basis_tree(), False)
263
basis_tree = self.revision_tree(self.this_tree.last_revision())
264
except errors.NoSuchRevision:
265
basis_tree = self.this_tree.basis_tree()
266
changes = self.this_tree.changes_from(basis_tree)
385
267
if not changes.has_changed():
386
268
self.this_rev_id = self.this_basis
388
270
def set_interesting_files(self, file_list):
390
self._set_interesting_files(file_list)
391
except NotVersionedError, e:
392
raise BzrCommandError("%s is not a source file in any"
395
def _set_interesting_files(self, file_list):
396
"""Set the list of interesting ids from a list of files."""
397
if file_list is None:
398
self.interesting_ids = None
401
interesting_ids = set()
402
for path in file_list:
404
for tree in (self.this_tree, self.base_tree, self.other_tree):
405
file_id = tree.inventory.path2id(path)
406
if file_id is not None:
407
interesting_ids.add(file_id)
410
raise NotVersionedError(path=path)
411
self.interesting_ids = interesting_ids
271
self.interesting_files = file_list
413
273
def set_pending(self):
414
if not self.base_is_ancestor:
416
if self.other_rev_id is None:
418
ancestry = self.this_branch.repository.get_ancestry(self.this_basis)
419
if self.other_rev_id in ancestry:
421
self.this_branch.working_tree().add_pending_merge(self.other_rev_id)
423
def set_other(self, other_revision):
424
other_branch, self.other_tree = _get_tree(other_revision,
274
if not self.base_is_ancestor or not self.base_is_other_ancestor or self.other_rev_id is None:
278
def _add_parent(self):
279
new_parents = self.this_tree.get_parent_ids() + [self.other_rev_id]
280
new_parent_trees = []
281
for revision_id in new_parents:
283
tree = self.revision_tree(revision_id)
284
except errors.NoSuchRevision:
288
new_parent_trees.append((revision_id, tree))
290
self.this_tree.set_parent_trees(new_parent_trees,
291
allow_leftmost_as_ghost=True)
293
for _revision_id, tree in new_parent_trees:
297
def set_other(self, other_revision, possible_transports=None):
298
"""Set the revision and tree to merge from.
300
This sets the other_tree, other_rev_id, other_basis attributes.
302
:param other_revision: The [path, revision] list to merge from.
304
self.other_branch, self.other_tree = self._get_tree(other_revision,
426
306
if other_revision[1] == -1:
427
self.other_rev_id = other_branch.last_revision()
428
if self.other_rev_id is None:
429
raise NoCommits(other_branch)
307
self.other_rev_id = _mod_revision.ensure_null(
308
self.other_branch.last_revision())
309
if _mod_revision.is_null(self.other_rev_id):
310
raise NoCommits(self.other_branch)
430
311
self.other_basis = self.other_rev_id
431
312
elif other_revision[1] is not None:
432
self.other_rev_id = other_branch.get_rev_id(other_revision[1])
313
self.other_rev_id = self.other_branch.get_rev_id(other_revision[1])
433
314
self.other_basis = self.other_rev_id
435
316
self.other_rev_id = None
436
self.other_basis = other_branch.last_revision()
317
self.other_basis = self.other_branch.last_revision()
437
318
if self.other_basis is None:
438
raise NoCommits(other_branch)
439
if other_branch.base != self.this_branch.base:
440
fetch(from_branch=other_branch, to_branch=self.this_branch,
441
last_revision=self.other_basis)
319
raise NoCommits(self.other_branch)
320
if self.other_rev_id is not None:
321
self._cached_trees[self.other_rev_id] = self.other_tree
322
self._maybe_fetch(self.other_branch,self.this_branch, self.other_basis)
324
def set_other_revision(self, revision_id, other_branch):
325
"""Set 'other' based on a branch and revision id
327
:param revision_id: The revision to use for a tree
328
:param other_branch: The branch containing this tree
330
self.other_rev_id = revision_id
331
self.other_branch = other_branch
332
self._maybe_fetch(other_branch, self.this_branch, self.other_rev_id)
333
self.other_tree = self.revision_tree(revision_id)
334
self.other_basis = revision_id
336
def set_base_revision(self, revision_id, branch):
337
"""Set 'base' based on a branch and revision id
339
:param revision_id: The revision to use for a tree
340
:param branch: The branch containing this tree
342
self.base_rev_id = revision_id
343
self.base_branch = branch
344
self._maybe_fetch(branch, self.this_branch, revision_id)
345
self.base_tree = self.revision_tree(revision_id)
347
def _maybe_fetch(self, source, target, revision_id):
348
if not source.repository.has_same_location(target.repository):
349
target.fetch(source, revision_id)
352
revisions = [ensure_null(self.this_basis),
353
ensure_null(self.other_basis)]
354
if NULL_REVISION in revisions:
355
self.base_rev_id = NULL_REVISION
357
self.base_rev_id, steps = self.revision_graph.find_unique_lca(
358
revisions[0], revisions[1], count_steps=True)
359
if self.base_rev_id == NULL_REVISION:
360
raise UnrelatedBranches()
362
warning('Warning: criss-cross merge encountered. See bzr'
363
' help criss-cross.')
364
self.base_tree = self.revision_tree(self.base_rev_id)
365
self.base_is_ancestor = True
366
self.base_is_other_ancestor = True
443
368
def set_base(self, base_revision):
369
"""Set the base revision to use for the merge.
371
:param base_revision: A 2-list containing a path and revision number.
444
373
mutter("doing merge() with no base_revision specified")
445
374
if base_revision == [None, None]:
447
self.base_rev_id = common_ancestor(self.this_basis,
449
self.this_branch.repository)
450
except NoCommonAncestor:
451
raise UnrelatedBranches()
452
self.base_tree = _get_revid_tree(self.this_branch, self.base_rev_id,
454
self.base_is_ancestor = True
456
base_branch, self.base_tree = _get_tree(base_revision)
377
base_branch, self.base_tree = self._get_tree(base_revision)
457
378
if base_revision[1] == -1:
458
379
self.base_rev_id = base_branch.last_revision()
459
380
elif base_revision[1] is None:
460
self.base_rev_id = None
462
self.base_rev_id = base_branch.get_rev_id(base_revision[1])
463
fetch(from_branch=base_branch, to_branch=self.this_branch)
464
self.base_is_ancestor = is_ancestor(self.this_basis,
469
def get_inventory(tree):
470
return tree.inventory
472
inv_changes = merge_flex(self.this_tree, self.base_tree,
474
generate_changeset, get_inventory,
475
self.conflict_handler,
476
merge_factory=self.merge_factory,
477
interesting_ids=self.interesting_ids)
480
for id, path in inv_changes.iteritems():
485
assert path.startswith('.' + '/') or path.startswith('.' + '\\'), "path is %s" % path
487
adjust_ids.append((path, id))
488
if len(adjust_ids) > 0:
489
self.this_branch.working_tree().set_inventory(self.regen_inventory(adjust_ids))
490
conflicts = self.conflict_handler.conflicts
491
self.conflict_handler.finalize()
494
def regen_inventory(self, new_entries):
495
old_entries = self.this_branch.working_tree().read_working_inventory()
499
for path, file_id in new_entries:
502
new_entries_map[file_id] = path
504
def id2path(file_id):
505
path = new_entries_map.get(file_id)
508
entry = old_entries[file_id]
509
if entry.parent_id is None:
511
return pathjoin(id2path(entry.parent_id), entry.name)
513
for file_id in old_entries:
514
entry = old_entries[file_id]
515
path = id2path(file_id)
516
new_inventory[file_id] = (path, file_id, entry.parent_id,
518
by_path[path] = file_id
523
for path, file_id in new_entries:
525
del new_inventory[file_id]
528
new_path_list.append((path, file_id))
529
if file_id not in old_entries:
531
# Ensure no file is added before its parent
533
for path, file_id in new_path_list:
537
parent = by_path[os.path.dirname(path)]
538
abspath = pathjoin(self.this_tree.basedir, path)
539
kind = bzrlib.osutils.file_kind(abspath)
540
new_inventory[file_id] = (path, file_id, parent, kind)
541
by_path[path] = file_id
543
# Get a list in insertion order
544
new_inventory_list = new_inventory.values()
545
mutter ("""Inventory regeneration:
546
old length: %i insertions: %i deletions: %i new_length: %i"""\
547
% (len(old_entries), insertions, deletions,
548
len(new_inventory_list)))
549
assert len(new_inventory_list) == len(old_entries) + insertions\
551
new_inventory_list.sort()
552
return new_inventory_list
555
merge_types = { "merge3": (ApplyMerge3, "Native diff3-style merge"),
556
"diff3": (Diff3Merge, "Merge using external diff3"),
557
'weave': (WeaveMerge, "Weave-based merge")
381
self.base_rev_id = _mod_revision.NULL_REVISION
383
self.base_rev_id = _mod_revision.ensure_null(
384
base_branch.get_rev_id(base_revision[1]))
385
self._maybe_fetch(base_branch, self.this_branch, self.base_rev_id)
387
def make_merger(self):
388
kwargs = {'working_tree':self.this_tree, 'this_tree': self.this_tree,
389
'other_tree': self.other_tree,
390
'interesting_ids': self.interesting_ids,
391
'interesting_files': self.interesting_files,
394
if self.merge_type.requires_base:
395
kwargs['base_tree'] = self.base_tree
396
if self.merge_type.supports_reprocess:
397
kwargs['reprocess'] = self.reprocess
399
raise BzrError("Conflict reduction is not supported for merge"
400
" type %s." % self.merge_type)
401
if self.merge_type.supports_show_base:
402
kwargs['show_base'] = self.show_base
404
raise BzrError("Showing base is not supported for this"
405
" merge type. %s" % self.merge_type)
406
if (not getattr(self.merge_type, 'supports_reverse_cherrypick', True)
407
and not self.base_is_other_ancestor):
408
raise errors.CannotReverseCherrypick()
409
if self.merge_type.supports_cherrypick:
410
kwargs['cherrypick'] = (not self.base_is_ancestor or
411
not self.base_is_other_ancestor)
412
return self.merge_type(pb=self._pb,
413
change_reporter=self.change_reporter,
417
self.this_tree.lock_tree_write()
418
if self.base_tree is not None:
419
self.base_tree.lock_read()
420
if self.other_tree is not None:
421
self.other_tree.lock_read()
423
merge = self.make_merger()
425
if self.recurse == 'down':
426
for relpath, file_id in self.this_tree.iter_references():
427
sub_tree = self.this_tree.get_nested_tree(file_id, relpath)
428
other_revision = self.other_tree.get_reference_revision(
430
if other_revision == sub_tree.last_revision():
432
sub_merge = Merger(sub_tree.branch, this_tree=sub_tree)
433
sub_merge.merge_type = self.merge_type
434
other_branch = self.other_branch.reference_parent(file_id, relpath)
435
sub_merge.set_other_revision(other_revision, other_branch)
436
base_revision = self.base_tree.get_reference_revision(file_id)
437
sub_merge.base_tree = \
438
sub_tree.branch.repository.revision_tree(base_revision)
439
sub_merge.base_rev_id = base_revision
443
if self.other_tree is not None:
444
self.other_tree.unlock()
445
if self.base_tree is not None:
446
self.base_tree.unlock()
447
self.this_tree.unlock()
448
if len(merge.cooked_conflicts) == 0:
449
if not self.ignore_zero and not is_quiet():
450
note("All changes applied successfully.")
452
note("%d conflicts encountered." % len(merge.cooked_conflicts))
454
return len(merge.cooked_conflicts)
457
class Merge3Merger(object):
458
"""Three-way merger that uses the merge3 text merger"""
460
supports_reprocess = True
461
supports_show_base = True
462
history_based = False
463
supports_cherrypick = True
464
supports_reverse_cherrypick = True
465
winner_idx = {"this": 2, "other": 1, "conflict": 1}
467
def __init__(self, working_tree, this_tree, base_tree, other_tree,
468
interesting_ids=None, reprocess=False, show_base=False,
469
pb=DummyProgress(), pp=None, change_reporter=None,
470
interesting_files=None, do_merge=True,
472
"""Initialize the merger object and perform the merge.
474
:param working_tree: The working tree to apply the merge to
475
:param this_tree: The local tree in the merge operation
476
:param base_tree: The common tree in the merge operation
477
:param other_tree: The other other tree to merge changes from
478
:param interesting_ids: The file_ids of files that should be
479
participate in the merge. May not be combined with
481
:param: reprocess If True, perform conflict-reduction processing.
482
:param show_base: If True, show the base revision in text conflicts.
483
(incompatible with reprocess)
484
:param pb: A Progress bar
485
:param pp: A ProgressPhase object
486
:param change_reporter: An object that should report changes made
487
:param interesting_files: The tree-relative paths of files that should
488
participate in the merge. If these paths refer to directories,
489
the contents of those directories will also be included. May not
490
be combined with interesting_ids. If neither interesting_files nor
491
interesting_ids is specified, all files may participate in the
494
object.__init__(self)
495
if interesting_files is not None and interesting_ids is not None:
497
'specify either interesting_ids or interesting_files')
498
self.interesting_ids = interesting_ids
499
self.interesting_files = interesting_files
500
self.this_tree = working_tree
501
self.base_tree = base_tree
502
self.other_tree = other_tree
503
self._raw_conflicts = []
504
self.cooked_conflicts = []
505
self.reprocess = reprocess
506
self.show_base = show_base
509
self.change_reporter = change_reporter
510
self.cherrypick = cherrypick
512
self.pp = ProgressPhase("Merge phase", 3, self.pb)
517
self.this_tree.lock_tree_write()
518
self.base_tree.lock_read()
519
self.other_tree.lock_read()
520
self.tt = TreeTransform(self.this_tree, self.pb)
523
self._compute_transform()
525
results = self.tt.apply(no_conflicts=True)
526
self.write_modified(results)
528
self.this_tree.add_conflicts(self.cooked_conflicts)
529
except UnsupportedOperation:
533
self.other_tree.unlock()
534
self.base_tree.unlock()
535
self.this_tree.unlock()
538
def make_preview_transform(self):
539
self.base_tree.lock_read()
540
self.other_tree.lock_read()
541
self.tt = TransformPreview(self.this_tree)
544
self._compute_transform()
547
self.other_tree.unlock()
548
self.base_tree.unlock()
552
def _compute_transform(self):
553
entries = self._entries3()
554
child_pb = ui.ui_factory.nested_progress_bar()
556
for num, (file_id, changed, parents3, names3,
557
executable3) in enumerate(entries):
558
child_pb.update('Preparing file merge', num, len(entries))
559
self._merge_names(file_id, parents3, names3)
561
file_status = self.merge_contents(file_id)
563
file_status = 'unmodified'
564
self._merge_executable(file_id,
565
executable3, file_status)
570
child_pb = ui.ui_factory.nested_progress_bar()
572
fs_conflicts = resolve_conflicts(self.tt, child_pb,
573
lambda t, c: conflict_pass(t, c, self.other_tree))
576
if self.change_reporter is not None:
577
from bzrlib import delta
578
delta.report_changes(
579
self.tt.iter_changes(), self.change_reporter)
580
self.cook_conflicts(fs_conflicts)
581
for conflict in self.cooked_conflicts:
585
"""Gather data about files modified between three trees.
587
Return a list of tuples of file_id, changed, parents3, names3,
588
executable3. changed is a boolean indicating whether the file contents
589
or kind were changed. parents3 is a tuple of parent ids for base,
590
other and this. names3 is a tuple of names for base, other and this.
591
executable3 is a tuple of execute-bit values for base, other and this.
594
iterator = self.other_tree.iter_changes(self.base_tree,
595
include_unchanged=True, specific_files=self.interesting_files,
596
extra_trees=[self.this_tree])
597
for (file_id, paths, changed, versioned, parents, names, kind,
598
executable) in iterator:
599
if (self.interesting_ids is not None and
600
file_id not in self.interesting_ids):
602
if file_id in self.this_tree.inventory:
603
entry = self.this_tree.inventory[file_id]
604
this_name = entry.name
605
this_parent = entry.parent_id
606
this_executable = entry.executable
610
this_executable = None
611
parents3 = parents + (this_parent,)
612
names3 = names + (this_name,)
613
executable3 = executable + (this_executable,)
614
result.append((file_id, changed, parents3, names3, executable3))
619
self.tt.final_kind(self.tt.root)
621
self.tt.cancel_deletion(self.tt.root)
622
if self.tt.final_file_id(self.tt.root) is None:
623
self.tt.version_file(self.tt.tree_file_id(self.tt.root),
625
if self.other_tree.inventory.root is None:
627
other_root_file_id = self.other_tree.get_root_id()
628
other_root = self.tt.trans_id_file_id(other_root_file_id)
629
if other_root == self.tt.root:
632
self.tt.final_kind(other_root)
635
if self.other_tree.inventory.root.file_id in self.this_tree.inventory:
636
# the other tree's root is a non-root in the current tree
638
self.reparent_children(self.other_tree.inventory.root, self.tt.root)
639
self.tt.cancel_creation(other_root)
640
self.tt.cancel_versioning(other_root)
642
def reparent_children(self, ie, target):
643
for thing, child in ie.children.iteritems():
644
trans_id = self.tt.trans_id_file_id(child.file_id)
645
self.tt.adjust_path(self.tt.final_name(trans_id), target, trans_id)
647
def write_modified(self, results):
649
for path in results.modified_paths:
650
file_id = self.this_tree.path2id(self.this_tree.relpath(path))
653
hash = self.this_tree.get_file_sha1(file_id)
656
modified_hashes[file_id] = hash
657
self.this_tree.set_merge_modified(modified_hashes)
660
def parent(entry, file_id):
661
"""Determine the parent for a file_id (used as a key method)"""
664
return entry.parent_id
667
def name(entry, file_id):
668
"""Determine the name for a file_id (used as a key method)"""
674
def contents_sha1(tree, file_id):
675
"""Determine the sha1 of the file contents (used as a key method)."""
676
if file_id not in tree:
678
return tree.get_file_sha1(file_id)
681
def executable(tree, file_id):
682
"""Determine the executability of a file-id (used as a key method)."""
683
if file_id not in tree:
685
if tree.kind(file_id) != "file":
687
return tree.is_executable(file_id)
690
def kind(tree, file_id):
691
"""Determine the kind of a file-id (used as a key method)."""
692
if file_id not in tree:
694
return tree.kind(file_id)
697
def _three_way(base, other, this):
698
#if base == other, either they all agree, or only THIS has changed.
701
elif this not in (base, other):
703
# "Ambiguous clean merge" -- both sides have made the same change.
706
# this == base: only other has changed.
711
def scalar_three_way(this_tree, base_tree, other_tree, file_id, key):
712
"""Do a three-way test on a scalar.
713
Return "this", "other" or "conflict", depending whether a value wins.
715
key_base = key(base_tree, file_id)
716
key_other = key(other_tree, file_id)
717
#if base == other, either they all agree, or only THIS has changed.
718
if key_base == key_other:
720
key_this = key(this_tree, file_id)
721
# "Ambiguous clean merge"
722
if key_this == key_other:
724
elif key_this == key_base:
729
def merge_names(self, file_id):
731
if file_id in tree.inventory:
732
return tree.inventory[file_id]
735
this_entry = get_entry(self.this_tree)
736
other_entry = get_entry(self.other_tree)
737
base_entry = get_entry(self.base_tree)
738
entries = (base_entry, other_entry, this_entry)
741
for entry in entries:
746
names.append(entry.name)
747
parents.append(entry.parent_id)
748
return self._merge_names(file_id, parents, names)
750
def _merge_names(self, file_id, parents, names):
751
"""Perform a merge on file_id names and parents"""
752
base_name, other_name, this_name = names
753
base_parent, other_parent, this_parent = parents
755
name_winner = self._three_way(*names)
757
parent_id_winner = self._three_way(*parents)
758
if this_name is None:
759
if name_winner == "this":
760
name_winner = "other"
761
if parent_id_winner == "this":
762
parent_id_winner = "other"
763
if name_winner == "this" and parent_id_winner == "this":
765
if name_winner == "conflict":
766
trans_id = self.tt.trans_id_file_id(file_id)
767
self._raw_conflicts.append(('name conflict', trans_id,
768
this_name, other_name))
769
if parent_id_winner == "conflict":
770
trans_id = self.tt.trans_id_file_id(file_id)
771
self._raw_conflicts.append(('parent conflict', trans_id,
772
this_parent, other_parent))
773
if other_name is None:
774
# it doesn't matter whether the result was 'other' or
775
# 'conflict'-- if there's no 'other', we leave it alone.
777
# if we get here, name_winner and parent_winner are set to safe values.
778
trans_id = self.tt.trans_id_file_id(file_id)
779
parent_id = parents[self.winner_idx[parent_id_winner]]
780
if parent_id is not None:
781
parent_trans_id = self.tt.trans_id_file_id(parent_id)
782
self.tt.adjust_path(names[self.winner_idx[name_winner]],
783
parent_trans_id, trans_id)
785
def merge_contents(self, file_id):
786
"""Performa a merge on file_id contents."""
787
def contents_pair(tree):
788
if file_id not in tree:
790
kind = tree.kind(file_id)
792
contents = tree.get_file_sha1(file_id)
793
elif kind == "symlink":
794
contents = tree.get_symlink_target(file_id)
797
return kind, contents
799
def contents_conflict():
800
trans_id = self.tt.trans_id_file_id(file_id)
801
name = self.tt.final_name(trans_id)
802
parent_id = self.tt.final_parent(trans_id)
803
if file_id in self.this_tree.inventory:
804
self.tt.unversion_file(trans_id)
805
if file_id in self.this_tree:
806
self.tt.delete_contents(trans_id)
807
file_group = self._dump_conflicts(name, parent_id, file_id,
809
self._raw_conflicts.append(('contents conflict', file_group))
811
# See SPOT run. run, SPOT, run.
812
# So we're not QUITE repeating ourselves; we do tricky things with
814
base_pair = contents_pair(self.base_tree)
815
other_pair = contents_pair(self.other_tree)
816
if base_pair == other_pair:
817
# OTHER introduced no changes
819
this_pair = contents_pair(self.this_tree)
820
if this_pair == other_pair:
821
# THIS and OTHER introduced the same changes
824
trans_id = self.tt.trans_id_file_id(file_id)
825
if this_pair == base_pair:
826
# only OTHER introduced changes
827
if file_id in self.this_tree:
828
# Remove any existing contents
829
self.tt.delete_contents(trans_id)
830
if file_id in self.other_tree:
831
# OTHER changed the file
832
create_by_entry(self.tt,
833
self.other_tree.inventory[file_id],
834
self.other_tree, trans_id)
835
if file_id not in self.this_tree.inventory:
836
self.tt.version_file(file_id, trans_id)
838
elif file_id in self.this_tree.inventory:
839
# OTHER deleted the file
840
self.tt.unversion_file(trans_id)
842
#BOTH THIS and OTHER introduced changes; scalar conflict
843
elif this_pair[0] == "file" and other_pair[0] == "file":
844
# THIS and OTHER are both files, so text merge. Either
845
# BASE is a file, or both converted to files, so at least we
846
# have agreement that output should be a file.
848
self.text_merge(file_id, trans_id)
850
return contents_conflict()
851
if file_id not in self.this_tree.inventory:
852
self.tt.version_file(file_id, trans_id)
854
self.tt.tree_kind(trans_id)
855
self.tt.delete_contents(trans_id)
860
# Scalar conflict, can't text merge. Dump conflicts
861
return contents_conflict()
863
def get_lines(self, tree, file_id):
864
"""Return the lines in a file, or an empty list."""
866
return tree.get_file(file_id).readlines()
870
def text_merge(self, file_id, trans_id):
871
"""Perform a three-way text merge on a file_id"""
872
# it's possible that we got here with base as a different type.
873
# if so, we just want two-way text conflicts.
874
if file_id in self.base_tree and \
875
self.base_tree.kind(file_id) == "file":
876
base_lines = self.get_lines(self.base_tree, file_id)
879
other_lines = self.get_lines(self.other_tree, file_id)
880
this_lines = self.get_lines(self.this_tree, file_id)
881
m3 = Merge3(base_lines, this_lines, other_lines,
882
is_cherrypick=self.cherrypick)
883
start_marker = "!START OF MERGE CONFLICT!" + "I HOPE THIS IS UNIQUE"
884
if self.show_base is True:
885
base_marker = '|' * 7
889
def iter_merge3(retval):
890
retval["text_conflicts"] = False
891
for line in m3.merge_lines(name_a = "TREE",
892
name_b = "MERGE-SOURCE",
893
name_base = "BASE-REVISION",
894
start_marker=start_marker,
895
base_marker=base_marker,
896
reprocess=self.reprocess):
897
if line.startswith(start_marker):
898
retval["text_conflicts"] = True
899
yield line.replace(start_marker, '<' * 7)
903
merge3_iterator = iter_merge3(retval)
904
self.tt.create_file(merge3_iterator, trans_id)
905
if retval["text_conflicts"] is True:
906
self._raw_conflicts.append(('text conflict', trans_id))
907
name = self.tt.final_name(trans_id)
908
parent_id = self.tt.final_parent(trans_id)
909
file_group = self._dump_conflicts(name, parent_id, file_id,
910
this_lines, base_lines,
912
file_group.append(trans_id)
914
def _dump_conflicts(self, name, parent_id, file_id, this_lines=None,
915
base_lines=None, other_lines=None, set_version=False,
917
"""Emit conflict files.
918
If this_lines, base_lines, or other_lines are omitted, they will be
919
determined automatically. If set_version is true, the .OTHER, .THIS
920
or .BASE (in that order) will be created as versioned files.
922
data = [('OTHER', self.other_tree, other_lines),
923
('THIS', self.this_tree, this_lines)]
925
data.append(('BASE', self.base_tree, base_lines))
928
for suffix, tree, lines in data:
930
trans_id = self._conflict_file(name, parent_id, tree, file_id,
932
file_group.append(trans_id)
933
if set_version and not versioned:
934
self.tt.version_file(file_id, trans_id)
938
def _conflict_file(self, name, parent_id, tree, file_id, suffix,
940
"""Emit a single conflict file."""
941
name = name + '.' + suffix
942
trans_id = self.tt.create_path(name, parent_id)
943
entry = tree.inventory[file_id]
944
create_by_entry(self.tt, entry, tree, trans_id, lines)
947
def merge_executable(self, file_id, file_status):
948
"""Perform a merge on the execute bit."""
949
executable = [self.executable(t, file_id) for t in (self.base_tree,
950
self.other_tree, self.this_tree)]
951
self._merge_executable(file_id, executable, file_status)
953
def _merge_executable(self, file_id, executable, file_status):
954
"""Perform a merge on the execute bit."""
955
base_executable, other_executable, this_executable = executable
956
if file_status == "deleted":
958
winner = self._three_way(*executable)
959
if winner == "conflict":
960
# There must be a None in here, if we have a conflict, but we
961
# need executability since file status was not deleted.
962
if self.executable(self.other_tree, file_id) is None:
966
if winner == 'this' and file_status != "modified":
968
trans_id = self.tt.trans_id_file_id(file_id)
970
if self.tt.final_kind(trans_id) != "file":
975
executability = this_executable
977
if file_id in self.other_tree:
978
executability = other_executable
979
elif file_id in self.this_tree:
980
executability = this_executable
981
elif file_id in self.base_tree:
982
executability = base_executable
983
if executability is not None:
984
trans_id = self.tt.trans_id_file_id(file_id)
985
self.tt.set_executability(executability, trans_id)
987
def cook_conflicts(self, fs_conflicts):
988
"""Convert all conflicts into a form that doesn't depend on trans_id"""
989
from conflicts import Conflict
991
self.cooked_conflicts.extend(cook_conflicts(fs_conflicts, self.tt))
992
fp = FinalPaths(self.tt)
993
for conflict in self._raw_conflicts:
994
conflict_type = conflict[0]
995
if conflict_type in ('name conflict', 'parent conflict'):
996
trans_id = conflict[1]
997
conflict_args = conflict[2:]
998
if trans_id not in name_conflicts:
999
name_conflicts[trans_id] = {}
1000
unique_add(name_conflicts[trans_id], conflict_type,
1002
if conflict_type == 'contents conflict':
1003
for trans_id in conflict[1]:
1004
file_id = self.tt.final_file_id(trans_id)
1005
if file_id is not None:
1007
path = fp.get_path(trans_id)
1008
for suffix in ('.BASE', '.THIS', '.OTHER'):
1009
if path.endswith(suffix):
1010
path = path[:-len(suffix)]
1012
c = Conflict.factory(conflict_type, path=path, file_id=file_id)
1013
self.cooked_conflicts.append(c)
1014
if conflict_type == 'text conflict':
1015
trans_id = conflict[1]
1016
path = fp.get_path(trans_id)
1017
file_id = self.tt.final_file_id(trans_id)
1018
c = Conflict.factory(conflict_type, path=path, file_id=file_id)
1019
self.cooked_conflicts.append(c)
1021
for trans_id, conflicts in name_conflicts.iteritems():
1023
this_parent, other_parent = conflicts['parent conflict']
1024
if this_parent == other_parent:
1025
raise AssertionError()
1027
this_parent = other_parent = \
1028
self.tt.final_file_id(self.tt.final_parent(trans_id))
1030
this_name, other_name = conflicts['name conflict']
1031
if this_name == other_name:
1032
raise AssertionError()
1034
this_name = other_name = self.tt.final_name(trans_id)
1035
other_path = fp.get_path(trans_id)
1036
if this_parent is not None and this_name is not None:
1037
this_parent_path = \
1038
fp.get_path(self.tt.trans_id_file_id(this_parent))
1039
this_path = pathjoin(this_parent_path, this_name)
1041
this_path = "<deleted>"
1042
file_id = self.tt.final_file_id(trans_id)
1043
c = Conflict.factory('path conflict', path=this_path,
1044
conflict_path=other_path, file_id=file_id)
1045
self.cooked_conflicts.append(c)
1046
self.cooked_conflicts.sort(key=Conflict.sort_key)
1049
class WeaveMerger(Merge3Merger):
1050
"""Three-way tree merger, text weave merger."""
1051
supports_reprocess = True
1052
supports_show_base = False
1053
supports_reverse_cherrypick = False
1054
history_based = True
1056
def _merged_lines(self, file_id):
1057
"""Generate the merged lines.
1058
There is no distinction between lines that are meant to contain <<<<<<<
1062
base = self.base_tree
1065
plan = self.this_tree.plan_file_merge(file_id, self.other_tree,
1067
if 'merge' in debug.debug_flags:
1069
trans_id = self.tt.trans_id_file_id(file_id)
1070
name = self.tt.final_name(trans_id) + '.plan'
1071
contents = ('%10s|%s' % l for l in plan)
1072
self.tt.new_file(name, self.tt.final_parent(trans_id), contents)
1073
textmerge = PlanWeaveMerge(plan, '<<<<<<< TREE\n',
1074
'>>>>>>> MERGE-SOURCE\n')
1075
return textmerge.merge_lines(self.reprocess)
1077
def text_merge(self, file_id, trans_id):
1078
"""Perform a (weave) text merge for a given file and file-id.
1079
If conflicts are encountered, .THIS and .OTHER files will be emitted,
1080
and a conflict will be noted.
1082
lines, conflicts = self._merged_lines(file_id)
1084
# Note we're checking whether the OUTPUT is binary in this case,
1085
# because we don't want to get into weave merge guts.
1086
check_text_lines(lines)
1087
self.tt.create_file(lines, trans_id)
1089
self._raw_conflicts.append(('text conflict', trans_id))
1090
name = self.tt.final_name(trans_id)
1091
parent_id = self.tt.final_parent(trans_id)
1092
file_group = self._dump_conflicts(name, parent_id, file_id,
1094
file_group.append(trans_id)
1097
class LCAMerger(WeaveMerger):
1099
def _merged_lines(self, file_id):
1100
"""Generate the merged lines.
1101
There is no distinction between lines that are meant to contain <<<<<<<
1105
base = self.base_tree
1108
plan = self.this_tree.plan_file_lca_merge(file_id, self.other_tree,
1110
if 'merge' in debug.debug_flags:
1112
trans_id = self.tt.trans_id_file_id(file_id)
1113
name = self.tt.final_name(trans_id) + '.plan'
1114
contents = ('%10s|%s' % l for l in plan)
1115
self.tt.new_file(name, self.tt.final_parent(trans_id), contents)
1116
textmerge = PlanWeaveMerge(plan, '<<<<<<< TREE\n',
1117
'>>>>>>> MERGE-SOURCE\n')
1118
return textmerge.merge_lines(self.reprocess)
1121
class Diff3Merger(Merge3Merger):
1122
"""Three-way merger using external diff3 for text merging"""
1124
def dump_file(self, temp_dir, name, tree, file_id):
1125
out_path = pathjoin(temp_dir, name)
1126
out_file = open(out_path, "wb")
1128
in_file = tree.get_file(file_id)
1129
for line in in_file:
1130
out_file.write(line)
1135
def text_merge(self, file_id, trans_id):
1136
"""Perform a diff3 merge using a specified file-id and trans-id.
1137
If conflicts are encountered, .BASE, .THIS. and .OTHER conflict files
1138
will be dumped, and a will be conflict noted.
1141
temp_dir = osutils.mkdtemp(prefix="bzr-")
1143
new_file = pathjoin(temp_dir, "new")
1144
this = self.dump_file(temp_dir, "this", self.this_tree, file_id)
1145
base = self.dump_file(temp_dir, "base", self.base_tree, file_id)
1146
other = self.dump_file(temp_dir, "other", self.other_tree, file_id)
1147
status = bzrlib.patch.diff3(new_file, this, base, other)
1148
if status not in (0, 1):
1149
raise BzrError("Unhandled diff3 exit code")
1150
f = open(new_file, 'rb')
1152
self.tt.create_file(f, trans_id)
1156
name = self.tt.final_name(trans_id)
1157
parent_id = self.tt.final_parent(trans_id)
1158
self._dump_conflicts(name, parent_id, file_id)
1159
self._raw_conflicts.append(('text conflict', trans_id))
1161
osutils.rmtree(temp_dir)
1164
def merge_inner(this_branch, other_tree, base_tree, ignore_zero=False,
1166
merge_type=Merge3Merger,
1167
interesting_ids=None,
1171
interesting_files=None,
1174
change_reporter=None):
1175
"""Primary interface for merging.
1177
typical use is probably
1178
'merge_inner(branch, branch.get_revision_tree(other_revision),
1179
branch.get_revision_tree(base_revision))'
1181
if this_tree is None:
1182
raise BzrError("bzrlib.merge.merge_inner requires a this_tree "
1183
"parameter as of bzrlib version 0.8.")
1184
merger = Merger(this_branch, other_tree, base_tree, this_tree=this_tree,
1185
pb=pb, change_reporter=change_reporter)
1186
merger.backup_files = backup_files
1187
merger.merge_type = merge_type
1188
merger.interesting_ids = interesting_ids
1189
merger.ignore_zero = ignore_zero
1190
if interesting_files:
1192
raise ValueError('Only supply interesting_ids'
1193
' or interesting_files')
1194
merger.interesting_files = interesting_files
1195
merger.show_base = show_base
1196
merger.reprocess = reprocess
1197
merger.other_rev_id = other_rev_id
1198
merger.other_basis = other_rev_id
1199
get_revision_id = getattr(base_tree, 'get_revision_id', None)
1200
if get_revision_id is None:
1201
get_revision_id = base_tree.last_revision
1202
merger.set_base_revision(get_revision_id(), this_branch)
1203
return merger.do_merge()
1205
def get_merge_type_registry():
1206
"""Merge type registry is in bzrlib.option to avoid circular imports.
1208
This method provides a sanctioned way to retrieve it.
1210
from bzrlib import option
1211
return option._merge_type_registry
1214
def _plan_annotate_merge(annotated_a, annotated_b, ancestors_a, ancestors_b):
1215
def status_a(revision, text):
1216
if revision in ancestors_b:
1217
return 'killed-b', text
1219
return 'new-a', text
1221
def status_b(revision, text):
1222
if revision in ancestors_a:
1223
return 'killed-a', text
1225
return 'new-b', text
1227
plain_a = [t for (a, t) in annotated_a]
1228
plain_b = [t for (a, t) in annotated_b]
1229
matcher = patiencediff.PatienceSequenceMatcher(None, plain_a, plain_b)
1230
blocks = matcher.get_matching_blocks()
1233
for ai, bi, l in blocks:
1234
# process all mismatched sections
1235
# (last mismatched section is handled because blocks always
1236
# includes a 0-length last block)
1237
for revision, text in annotated_a[a_cur:ai]:
1238
yield status_a(revision, text)
1239
for revision, text in annotated_b[b_cur:bi]:
1240
yield status_b(revision, text)
1241
# and now the matched section
1244
for text_a in plain_a[ai:a_cur]:
1245
yield "unchanged", text_a
1248
class _PlanMergeBase(object):
1250
def __init__(self, a_rev, b_rev, vf, key_prefix):
1253
:param a_rev: Revision-id of one revision to merge
1254
:param b_rev: Revision-id of the other revision to merge
1255
:param vf: A VersionedFiles containing both revisions
1256
:param key_prefix: A prefix for accessing keys in vf, typically
1262
self._last_lines = None
1263
self._last_lines_revision_id = None
1264
self._cached_matching_blocks = {}
1265
self._key_prefix = key_prefix
1266
self._precache_tip_lines()
1268
def _precache_tip_lines(self):
1269
lines = self.get_lines([self.a_rev, self.b_rev])
1270
self.lines_a = lines[self.a_rev]
1271
self.lines_b = lines[self.b_rev]
1273
def get_lines(self, revisions):
1274
"""Get lines for revisions from the backing VersionedFiles.
1276
:raises RevisionNotPresent: on absent texts.
1278
keys = [(self._key_prefix + (rev,)) for rev in revisions]
1280
for record in self.vf.get_record_stream(keys, 'unordered', True):
1281
if record.storage_kind == 'absent':
1282
raise errors.RevisionNotPresent(record.key, self.vf)
1283
result[record.key[-1]] = osutils.split_lines(
1284
record.get_bytes_as('fulltext'))
1287
def plan_merge(self):
1288
"""Generate a 'plan' for merging the two revisions.
1290
This involves comparing their texts and determining the cause of
1291
differences. If text A has a line and text B does not, then either the
1292
line was added to text A, or it was deleted from B. Once the causes
1293
are combined, they are written out in the format described in
1294
VersionedFile.plan_merge
1296
blocks = self._get_matching_blocks(self.a_rev, self.b_rev)
1297
unique_a, unique_b = self._unique_lines(blocks)
1298
new_a, killed_b = self._determine_status(self.a_rev, unique_a)
1299
new_b, killed_a = self._determine_status(self.b_rev, unique_b)
1300
return self._iter_plan(blocks, new_a, killed_b, new_b, killed_a)
1302
def _iter_plan(self, blocks, new_a, killed_b, new_b, killed_a):
1305
for i, j, n in blocks:
1306
for a_index in range(last_i, i):
1307
if a_index in new_a:
1308
if a_index in killed_b:
1309
yield 'conflicted-a', self.lines_a[a_index]
1311
yield 'new-a', self.lines_a[a_index]
1313
yield 'killed-b', self.lines_a[a_index]
1314
for b_index in range(last_j, j):
1315
if b_index in new_b:
1316
if b_index in killed_a:
1317
yield 'conflicted-b', self.lines_b[b_index]
1319
yield 'new-b', self.lines_b[b_index]
1321
yield 'killed-a', self.lines_b[b_index]
1322
# handle common lines
1323
for a_index in range(i, i+n):
1324
yield 'unchanged', self.lines_a[a_index]
1328
def _get_matching_blocks(self, left_revision, right_revision):
1329
"""Return a description of which sections of two revisions match.
1331
See SequenceMatcher.get_matching_blocks
1333
cached = self._cached_matching_blocks.get((left_revision,
1335
if cached is not None:
1337
if self._last_lines_revision_id == left_revision:
1338
left_lines = self._last_lines
1339
right_lines = self.get_lines([right_revision])[right_revision]
1341
lines = self.get_lines([left_revision, right_revision])
1342
left_lines = lines[left_revision]
1343
right_lines = lines[right_revision]
1344
self._last_lines = right_lines
1345
self._last_lines_revision_id = right_revision
1346
matcher = patiencediff.PatienceSequenceMatcher(None, left_lines,
1348
return matcher.get_matching_blocks()
1350
def _unique_lines(self, matching_blocks):
1351
"""Analyse matching_blocks to determine which lines are unique
1353
:return: a tuple of (unique_left, unique_right), where the values are
1354
sets of line numbers of unique lines.
1360
for i, j, n in matching_blocks:
1361
unique_left.extend(range(last_i, i))
1362
unique_right.extend(range(last_j, j))
1365
return unique_left, unique_right
1368
def _subtract_plans(old_plan, new_plan):
1369
"""Remove changes from new_plan that came from old_plan.
1371
It is assumed that the difference between the old_plan and new_plan
1372
is their choice of 'b' text.
1374
All lines from new_plan that differ from old_plan are emitted
1375
verbatim. All lines from new_plan that match old_plan but are
1376
not about the 'b' revision are emitted verbatim.
1378
Lines that match and are about the 'b' revision are the lines we
1379
don't want, so we convert 'killed-b' -> 'unchanged', and 'new-b'
1380
is skipped entirely.
1382
matcher = patiencediff.PatienceSequenceMatcher(None, old_plan,
1385
for i, j, n in matcher.get_matching_blocks():
1386
for jj in range(last_j, j):
1388
for jj in range(j, j+n):
1389
plan_line = new_plan[jj]
1390
if plan_line[0] == 'new-b':
1392
elif plan_line[0] == 'killed-b':
1393
yield 'unchanged', plan_line[1]
1399
class _PlanMerge(_PlanMergeBase):
1400
"""Plan an annotate merge using on-the-fly annotation"""
1402
def __init__(self, a_rev, b_rev, vf, key_prefix):
1403
super(_PlanMerge, self).__init__(a_rev, b_rev, vf, key_prefix)
1404
self.a_key = self._key_prefix + (self.a_rev,)
1405
self.b_key = self._key_prefix + (self.b_rev,)
1406
self.graph = Graph(self.vf)
1407
heads = self.graph.heads((self.a_key, self.b_key))
1409
# one side dominates, so we can just return its values, yay for
1411
# Ideally we would know that before we get this far
1412
self._head_key = heads.pop()
1413
if self._head_key == self.a_key:
1417
mutter('found dominating revision for %s\n%s > %s', self.vf,
1418
self._head_key[-1], other)
1421
self._head_key = None
1424
def _precache_tip_lines(self):
1425
# Turn this into a no-op, because we will do this later
1428
def _find_recursive_lcas(self):
1429
"""Find all the ancestors back to a unique lca"""
1430
cur_ancestors = (self.a_key, self.b_key)
1431
# graph.find_lca(uncommon, keys) now returns plain NULL_REVISION,
1432
# rather than a key tuple. We will just map that directly to no common
1436
next_lcas = self.graph.find_lca(*cur_ancestors)
1437
# Map a plain NULL_REVISION to a simple no-ancestors
1438
if next_lcas == set([NULL_REVISION]):
1440
# Order the lca's based on when they were merged into the tip
1441
# While the actual merge portion of weave merge uses a set() of
1442
# active revisions, the order of insertion *does* effect the
1443
# implicit ordering of the texts.
1444
for rev_key in cur_ancestors:
1445
ordered_parents = tuple(self.graph.find_merge_order(rev_key,
1447
parent_map[rev_key] = ordered_parents
1448
if len(next_lcas) == 0:
1450
elif len(next_lcas) == 1:
1451
parent_map[list(next_lcas)[0]] = ()
1453
elif len(next_lcas) > 2:
1454
# More than 2 lca's, fall back to grabbing all nodes between
1455
# this and the unique lca.
1456
mutter('More than 2 LCAs, falling back to all nodes for:'
1457
' %s, %s\n=> %s', self.a_key, self.b_key, cur_ancestors)
1458
cur_lcas = next_lcas
1459
while len(cur_lcas) > 1:
1460
cur_lcas = self.graph.find_lca(*cur_lcas)
1461
if len(cur_lcas) == 0:
1462
# No common base to find, use the full ancestry
1465
unique_lca = list(cur_lcas)[0]
1466
if unique_lca == NULL_REVISION:
1467
# find_lca will return a plain 'NULL_REVISION' rather
1468
# than a key tuple when there is no common ancestor, we
1469
# prefer to just use None, because it doesn't confuse
1470
# _get_interesting_texts()
1472
parent_map.update(self._find_unique_parents(next_lcas,
1475
cur_ancestors = next_lcas
1478
def _find_unique_parents(self, tip_keys, base_key):
1479
"""Find ancestors of tip that aren't ancestors of base.
1481
:param tip_keys: Nodes that are interesting
1482
:param base_key: Cull all ancestors of this node
1483
:return: The parent map for all revisions between tip_keys and
1484
base_key. base_key will be included. References to nodes outside of
1485
the ancestor set will also be removed.
1487
# TODO: this would be simpler if find_unique_ancestors took a list
1488
# instead of a single tip, internally it supports it, but it
1489
# isn't a "backwards compatible" api change.
1490
if base_key is None:
1491
parent_map = dict(self.graph.iter_ancestry(tip_keys))
1492
# We remove NULL_REVISION because it isn't a proper tuple key, and
1493
# thus confuses things like _get_interesting_texts, and our logic
1494
# to add the texts into the memory weave.
1495
if NULL_REVISION in parent_map:
1496
parent_map.pop(NULL_REVISION)
1499
for tip in tip_keys:
1501
self.graph.find_unique_ancestors(tip, [base_key]))
1502
parent_map = self.graph.get_parent_map(interesting)
1503
parent_map[base_key] = ()
1504
culled_parent_map, child_map, tails = self._remove_external_references(
1506
# Remove all the tails but base_key
1507
if base_key is not None:
1508
tails.remove(base_key)
1509
self._prune_tails(culled_parent_map, child_map, tails)
1510
# Now remove all the uninteresting 'linear' regions
1511
simple_map = _mod_graph.collapse_linear_regions(culled_parent_map)
1515
def _remove_external_references(parent_map):
1516
"""Remove references that go outside of the parent map.
1518
:param parent_map: Something returned from Graph.get_parent_map(keys)
1519
:return: (filtered_parent_map, child_map, tails)
1520
filtered_parent_map is parent_map without external references
1521
child_map is the {parent_key: [child_keys]} mapping
1522
tails is a list of nodes that do not have any parents in the map
1524
# TODO: The basic effect of this function seems more generic than
1525
# _PlanMerge. But the specific details of building a child_map,
1526
# and computing tails seems very specific to _PlanMerge.
1527
# Still, should this be in Graph land?
1528
filtered_parent_map = {}
1531
for key, parent_keys in parent_map.iteritems():
1532
culled_parent_keys = [p for p in parent_keys if p in parent_map]
1533
if not culled_parent_keys:
1535
for parent_key in culled_parent_keys:
1536
child_map.setdefault(parent_key, []).append(key)
1537
# TODO: Do we want to do this, it adds overhead for every node,
1538
# just to say that the node has no children
1539
child_map.setdefault(key, [])
1540
filtered_parent_map[key] = culled_parent_keys
1541
return filtered_parent_map, child_map, tails
1544
def _prune_tails(parent_map, child_map, tails_to_remove):
1545
"""Remove tails from the parent map.
1547
This will remove the supplied revisions until no more children have 0
1550
:param parent_map: A dict of {child: [parents]}, this dictionary will
1551
be modified in place.
1552
:param tails_to_remove: A list of tips that should be removed,
1553
this list will be consumed
1554
:param child_map: The reverse dict of parent_map ({parent: [children]})
1555
this dict will be modified
1556
:return: None, parent_map will be modified in place.
1558
while tails_to_remove:
1559
next = tails_to_remove.pop()
1560
parent_map.pop(next)
1561
children = child_map.pop(next)
1562
for child in children:
1563
child_parents = parent_map[child]
1564
child_parents.remove(next)
1565
if len(child_parents) == 0:
1566
tails_to_remove.append(child)
1568
def _get_interesting_texts(self, parent_map):
1569
"""Return a dict of texts we are interested in.
1571
Note that the input is in key tuples, but the output is in plain
1574
:param parent_map: The output from _find_recursive_lcas
1575
:return: A dict of {'revision_id':lines} as returned by
1576
_PlanMergeBase.get_lines()
1578
all_revision_keys = set(parent_map)
1579
all_revision_keys.add(self.a_key)
1580
all_revision_keys.add(self.b_key)
1582
# Everything else is in 'keys' but get_lines is in 'revision_ids'
1583
all_texts = self.get_lines([k[-1] for k in all_revision_keys])
1586
def _build_weave(self):
1587
from bzrlib import weave
1588
self._weave = weave.Weave(weave_name='in_memory_weave',
1589
allow_reserved=True)
1590
parent_map = self._find_recursive_lcas()
1592
all_texts = self._get_interesting_texts(parent_map)
1594
# Note: Unfortunately, the order given by topo_sort will effect the
1595
# ordering resolution in the output. Specifically, if you add A then B,
1596
# then in the output text A lines will show up before B lines. And, of
1597
# course, topo_sort doesn't guarantee any real ordering.
1598
# So we use merge_sort, and add a fake node on the tip.
1599
# This ensures that left-hand parents will always be inserted into the
1600
# weave before right-hand parents.
1601
tip_key = self._key_prefix + (_mod_revision.CURRENT_REVISION,)
1602
parent_map[tip_key] = (self.a_key, self.b_key)
1604
for seq_num, key, depth, eom in reversed(tsort.merge_sort(parent_map,
1608
# for key in tsort.topo_sort(parent_map):
1609
parent_keys = parent_map[key]
1610
revision_id = key[-1]
1611
parent_ids = [k[-1] for k in parent_keys]
1612
self._weave.add_lines(revision_id, parent_ids,
1613
all_texts[revision_id])
1615
def plan_merge(self):
1616
"""Generate a 'plan' for merging the two revisions.
1618
This involves comparing their texts and determining the cause of
1619
differences. If text A has a line and text B does not, then either the
1620
line was added to text A, or it was deleted from B. Once the causes
1621
are combined, they are written out in the format described in
1622
VersionedFile.plan_merge
1624
if self._head_key is not None: # There was a single head
1625
if self._head_key == self.a_key:
1628
if self._head_key != self.b_key:
1629
raise AssertionError('There was an invalid head: %s != %s'
1630
% (self.b_key, self._head_key))
1632
head_rev = self._head_key[-1]
1633
lines = self.get_lines([head_rev])[head_rev]
1634
return ((plan, line) for line in lines)
1635
return self._weave.plan_merge(self.a_rev, self.b_rev)
1638
class _PlanLCAMerge(_PlanMergeBase):
1640
This merge algorithm differs from _PlanMerge in that:
1641
1. comparisons are done against LCAs only
1642
2. cases where a contested line is new versus one LCA but old versus
1643
another are marked as conflicts, by emitting the line as conflicted-a
1646
This is faster, and hopefully produces more useful output.
1649
def __init__(self, a_rev, b_rev, vf, key_prefix, graph):
1650
_PlanMergeBase.__init__(self, a_rev, b_rev, vf, key_prefix)
1651
lcas = graph.find_lca(key_prefix + (a_rev,), key_prefix + (b_rev,))
1654
if lca == NULL_REVISION:
1657
self.lcas.add(lca[-1])
1658
for lca in self.lcas:
1659
if _mod_revision.is_null(lca):
1662
lca_lines = self.get_lines([lca])[lca]
1663
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_a,
1665
blocks = list(matcher.get_matching_blocks())
1666
self._cached_matching_blocks[(a_rev, lca)] = blocks
1667
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_b,
1669
blocks = list(matcher.get_matching_blocks())
1670
self._cached_matching_blocks[(b_rev, lca)] = blocks
1672
def _determine_status(self, revision_id, unique_line_numbers):
1673
"""Determines the status unique lines versus all lcas.
1675
Basically, determines why the line is unique to this revision.
1677
A line may be determined new, killed, or both.
1679
If a line is determined new, that means it was not present in at least
1680
one LCA, and is not present in the other merge revision.
1682
If a line is determined killed, that means the line was present in
1685
If a line is killed and new, this indicates that the two merge
1686
revisions contain differing conflict resolutions.
1687
:param revision_id: The id of the revision in which the lines are
1689
:param unique_line_numbers: The line numbers of unique lines.
1690
:return a tuple of (new_this, killed_other):
1694
unique_line_numbers = set(unique_line_numbers)
1695
for lca in self.lcas:
1696
blocks = self._get_matching_blocks(revision_id, lca)
1697
unique_vs_lca, _ignored = self._unique_lines(blocks)
1698
new.update(unique_line_numbers.intersection(unique_vs_lca))
1699
killed.update(unique_line_numbers.difference(unique_vs_lca))