1
# Copyright (C) 2005, 2006, 2008 Canonical Ltd
1
# Copyright (C) 2005 Canonical Ltd
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
5
5
# the Free Software Foundation; either version 2 of the License, or
6
6
# (at your option) any later version.
8
8
# This program is distributed in the hope that it will be useful,
9
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
11
# GNU General Public License for more details.
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
17
# TODO: build_working_dir can be built on something simpler than merge()
19
from itertools import chain
30
revision as _mod_revision,
23
from bzrlib._changeset import generate_changeset, ExceptionConflictHandler
24
from bzrlib._changeset import Inventory, Diff3Merge, ReplaceContents
25
from bzrlib._merge_core import WeaveMerge
26
from bzrlib._merge_core import merge_flex, ApplyMerge3, BackupBeforeChange
34
27
from bzrlib.branch import Branch
35
from bzrlib.conflicts import ConflictList, Conflict
28
from bzrlib.delta import compare_trees
36
29
from bzrlib.errors import (BzrCommandError,
46
37
WorkingTreeNotRevision,
49
from bzrlib.graph import Graph
50
from bzrlib.merge3 import Merge3
39
from bzrlib.fetch import greedy_fetch, fetch
51
41
from bzrlib.osutils import rename, pathjoin
52
from progress import DummyProgress, ProgressPhase
53
from bzrlib.revision import (NULL_REVISION, ensure_null)
54
from bzrlib.textfile import check_text_lines
55
from bzrlib.trace import mutter, warning, note, is_quiet
56
from bzrlib.transform import (TransformPreview, TreeTransform,
57
resolve_conflicts, cook_conflicts,
58
conflict_pass, FinalPaths, create_from_tree,
59
unique_add, ROOT_PARENT)
60
from bzrlib.versionedfile import PlanWeaveMerge
42
from bzrlib.revision import common_ancestor, is_ancestor, NULL_REVISION
43
from bzrlib.trace import mutter, warning, note
44
from bzrlib.workingtree import WorkingTree
63
46
# TODO: Report back as changes are merged in
48
# comments from abentley on irc: merge happens in two stages, each
49
# of which generates a changeset object
51
# stage 1: generate OLD->OTHER,
52
# stage 2: use MINE and OLD->OTHER to generate MINE -> RESULT
54
class _MergeConflictHandler(ExceptionConflictHandler):
55
"""Handle conflicts encountered while merging.
57
This subclasses ExceptionConflictHandler, so that any types of
58
conflict that are not explicitly handled cause an exception and
61
def __init__(self, this_tree, base_tree, other_tree, ignore_zero=False):
62
ExceptionConflictHandler.__init__(self)
64
self.ignore_zero = ignore_zero
65
self.this_tree = this_tree
66
self.base_tree = base_tree
67
self.other_tree = other_tree
69
def copy(self, source, dest):
70
"""Copy the text and mode of a file
71
:param source: The path of the file to copy
72
:param dest: The distination file to create
74
s_file = file(source, "rb")
75
d_file = file(dest, "wb")
78
os.chmod(dest, 0777 & os.stat(source).st_mode)
80
def dump(self, lines, dest):
81
"""Copy the text and mode of a file
82
:param source: The path of the file to copy
83
:param dest: The distination file to create
85
d_file = file(dest, "wb")
89
def add_suffix(self, name, suffix, last_new_name=None, fix_inventory=True):
90
"""Rename a file to append a suffix. If the new name exists, the
91
suffix is added repeatedly until a non-existant name is found
93
:param name: The path of the file
94
:param suffix: The suffix to append
95
:param last_new_name: (used for recursive calls) the last name tried
97
if last_new_name is None:
99
new_name = last_new_name+suffix
101
rename(name, new_name)
102
if fix_inventory is True:
104
relpath = self.this_tree.relpath(name)
105
except NotBranchError:
107
if relpath is not None:
108
file_id = self.this_tree.path2id(relpath)
109
if file_id is not None:
110
new_path = self.this_tree.relpath(new_name)
111
rename(new_name, name)
112
self.this_tree.rename_one(relpath, new_path)
113
assert self.this_tree.id2path(file_id) == new_path
115
if e.errno != errno.EEXIST and e.errno != errno.ENOTEMPTY:
117
return self.add_suffix(name, suffix, last_new_name=new_name,
118
fix_inventory=fix_inventory)
121
def conflict(self, text):
126
def merge_conflict(self, new_file, this_path, base_lines, other_lines):
128
Handle diff3 conflicts by producing a .THIS, .BASE and .OTHER. The
129
main file will be a version with diff3 conflicts.
130
:param new_file: Path to the output file with diff3 markers
131
:param this_path: Path to the file text for the THIS tree
132
:param base_path: Path to the file text for the BASE tree
133
:param other_path: Path to the file text for the OTHER tree
135
self.add_suffix(this_path, ".THIS", fix_inventory=False)
136
self.dump(base_lines, this_path+".BASE")
137
self.dump(other_lines, this_path+".OTHER")
138
rename(new_file, this_path)
139
self.conflict("Diff3 conflict encountered in %s" % this_path)
141
def weave_merge_conflict(self, filename, weave, other_i, out_file):
143
Handle weave conflicts by producing a .THIS, and .OTHER. The
144
main file will be a version with diff3-style conflicts.
146
self.add_suffix(filename, ".THIS", fix_inventory=False)
148
self.dump(weave.get_iter(other_i), filename+".OTHER")
149
self.conflict("Text conflict encountered in %s" % filename)
151
def new_contents_conflict(self, filename, other_contents):
152
"""Conflicting contents for newly added file."""
153
other_contents(filename + ".OTHER", self, False)
154
self.conflict("Conflict in newly added file %s" % filename)
157
def target_exists(self, entry, target, old_path):
158
"""Handle the case when the target file or dir exists"""
159
moved_path = self.add_suffix(target, ".moved")
160
self.conflict("Moved existing %s to %s" % (target, moved_path))
162
def rmdir_non_empty(self, filename):
163
"""Handle the case where the dir to be removed still has contents"""
164
self.conflict("Directory %s not removed because it is not empty"\
168
def rem_contents_conflict(self, filename, this_contents, base_contents):
169
base_contents(filename+".BASE", self)
170
this_contents(filename+".THIS", self)
171
self.conflict("Other branch deleted locally modified file %s" %
173
return ReplaceContents(this_contents, None)
175
def abs_this_path(self, file_id):
176
"""Return the absolute path for a file_id in the this tree."""
177
return self.this_tree.id2abspath(file_id)
179
def add_missing_parents(self, file_id, tree):
180
"""If some of the parents for file_id are missing, add them."""
181
entry = tree.inventory[file_id]
182
if entry.parent_id not in self.this_tree:
183
return self.create_all_missing(entry.parent_id, tree)
185
return self.abs_this_path(entry.parent_id)
187
def create_all_missing(self, file_id, tree):
188
"""Add contents for a file_id and all its parents to a tree."""
189
entry = tree.inventory[file_id]
190
if entry.parent_id is not None and entry.parent_id not in self.this_tree:
191
abspath = self.create_all_missing(entry.parent_id, tree)
193
abspath = self.abs_this_path(entry.parent_id)
194
entry_path = pathjoin(abspath, entry.name)
195
if not os.path.isdir(entry_path):
196
self.create(file_id, entry_path, tree)
199
def create(self, file_id, path, tree):
200
"""Uses tree data to create a filesystem object for the file_id"""
201
from bzrlib._changeset import get_contents
202
get_contents(tree, file_id)(path, self)
204
def missing_for_merge(self, file_id, other_path):
205
"""The file_id doesn't exist in THIS, but does in OTHER and BASE"""
206
self.conflict("Other branch modified locally deleted file %s" %
208
parent_dir = self.add_missing_parents(file_id, self.other_tree)
209
stem = pathjoin(parent_dir, os.path.basename(other_path))
210
self.create(file_id, stem+".OTHER", self.other_tree)
211
self.create(file_id, stem+".BASE", self.base_tree)
213
def threeway_contents_conflict(filename, this_contents, base_contents,
215
self.conflict("Three-way conflict merging %s" % filename)
218
if self.conflicts == 0:
219
if not self.ignore_zero:
220
note("All changes applied successfully.")
222
note("%d conflicts encountered." % self.conflicts)
224
def _get_tree(treespec, local_branch=None):
225
location, revno = treespec
226
branch = Branch.open_containing(location)[0]
230
revision = branch.last_revision()
232
revision = branch.get_rev_id(revno)
234
revision = NULL_REVISION
235
return branch, _get_revid_tree(branch, revision, local_branch)
238
def _get_revid_tree(branch, revision, local_branch):
240
base_tree = branch.working_tree()
242
if local_branch is not None:
243
if local_branch.base != branch.base:
244
greedy_fetch(local_branch, branch, revision)
245
base_tree = local_branch.repository.revision_tree(revision)
247
base_tree = branch.repository.revision_tree(revision)
251
def build_working_dir(to_dir):
252
"""Build a working directory in an empty directory.
254
to_dir is a directory containing branch metadata but no working files,
255
typically constructed by cloning an existing branch.
257
This is split out as a special idiomatic case of merge. It could
258
eventually be done by just building the tree directly calling into
259
lower-level code (e.g. constructing a changeset).
261
# RBC 20051019 is this not just 'export' ?
262
# AB Well, export doesn't take care of inventory...
263
this_tree = WorkingTree.open_containing(to_dir)[0]
264
transform_tree(this_tree, this_tree.basis_tree())
66
267
def transform_tree(from_tree, to_tree, interesting_ids=None):
67
from_tree.lock_tree_write()
69
merge_inner(from_tree.branch, to_tree, from_tree, ignore_zero=True,
70
interesting_ids=interesting_ids, this_tree=from_tree)
268
merge_inner(from_tree.branch, to_tree, from_tree, ignore_zero=True,
269
interesting_ids=interesting_ids)
272
def merge_inner(this_branch, other_tree, base_tree, ignore_zero=False,
274
merge_type=ApplyMerge3,
275
interesting_ids=None,
279
interesting_files=None,
281
"""Primary interface for merging.
283
typical use is probably
284
'merge_inner(branch, branch.get_revision_tree(other_revision),
285
branch.get_revision_tree(base_revision))'
287
if this_tree is None:
288
this_tree = this_branch.working_tree()
289
merger = Merger(this_branch, other_tree, base_tree, this_tree=this_tree)
290
merger.backup_files = backup_files
291
merger.merge_type = merge_type
292
merger.interesting_ids = interesting_ids
293
if interesting_files:
294
assert not interesting_ids, ('Only supply interesting_ids'
295
' or interesting_files')
296
merger._set_interesting_files(interesting_files)
297
merger.show_base = show_base
298
merger.reprocess = reprocess
299
merger.conflict_handler = _MergeConflictHandler(merger.this_tree,
300
base_tree, other_tree,
301
ignore_zero=ignore_zero)
302
merger.other_rev_id = other_rev_id
303
merger.other_basis = other_rev_id
304
return merger.do_merge()
75
307
class Merger(object):
76
def __init__(self, this_branch, other_tree=None, base_tree=None,
77
this_tree=None, pb=None, change_reporter=None,
78
recurse='down', revision_graph=None):
308
def __init__(self, this_branch, other_tree=None, base_tree=None, this_tree=None):
79
309
object.__init__(self)
310
assert this_tree is not None, "this_tree is required"
80
311
self.this_branch = this_branch
81
self.this_basis = _mod_revision.ensure_null(
82
this_branch.last_revision())
312
self.this_basis = this_branch.last_revision()
83
313
self.this_rev_id = None
84
314
self.this_tree = this_tree
85
315
self.this_revision_tree = None
86
316
self.this_basis_tree = None
87
317
self.other_tree = other_tree
88
self.other_branch = None
89
318
self.base_tree = base_tree
90
319
self.ignore_zero = False
91
320
self.backup_files = False
92
321
self.interesting_ids = None
93
self.interesting_files = None
94
322
self.show_base = False
95
323
self.reprocess = False
100
self.recurse = recurse
101
self.change_reporter = change_reporter
102
self._cached_trees = {}
103
self._revision_graph = revision_graph
104
self._base_is_ancestor = None
105
self._base_is_other_ancestor = None
106
self._is_criss_cross = None
107
self._lca_trees = None
109
def cache_trees_with_revision_ids(self, trees):
110
"""Cache any tree in trees if it has a revision_id."""
111
for maybe_tree in trees:
112
if maybe_tree is None:
115
rev_id = maybe_tree.get_revision_id()
116
except AttributeError:
118
self._cached_trees[rev_id] = maybe_tree
121
def revision_graph(self):
122
if self._revision_graph is None:
123
self._revision_graph = self.this_branch.repository.get_graph()
124
return self._revision_graph
126
def _set_base_is_ancestor(self, value):
127
self._base_is_ancestor = value
129
def _get_base_is_ancestor(self):
130
if self._base_is_ancestor is None:
131
self._base_is_ancestor = self.revision_graph.is_ancestor(
132
self.base_rev_id, self.this_basis)
133
return self._base_is_ancestor
135
base_is_ancestor = property(_get_base_is_ancestor, _set_base_is_ancestor)
137
def _set_base_is_other_ancestor(self, value):
138
self._base_is_other_ancestor = value
140
def _get_base_is_other_ancestor(self):
141
if self._base_is_other_ancestor is None:
142
if self.other_basis is None:
144
self._base_is_other_ancestor = self.revision_graph.is_ancestor(
145
self.base_rev_id, self.other_basis)
146
return self._base_is_other_ancestor
148
base_is_other_ancestor = property(_get_base_is_other_ancestor,
149
_set_base_is_other_ancestor)
152
def from_uncommitted(tree, other_tree, pb=None, base_tree=None):
153
"""Return a Merger for uncommitted changes in other_tree.
155
:param tree: The tree to merge into
156
:param other_tree: The tree to get uncommitted changes from
157
:param pb: A progress indicator
158
:param base_tree: The basis to use for the merge. If unspecified,
159
other_tree.basis_tree() will be used.
161
if base_tree is None:
162
base_tree = other_tree.basis_tree()
163
merger = Merger(tree.branch, other_tree, base_tree, tree, pb)
164
merger.base_rev_id = merger.base_tree.get_revision_id()
165
merger.other_rev_id = None
166
merger.other_basis = merger.base_rev_id
170
def from_mergeable(klass, tree, mergeable, pb):
171
"""Return a Merger for a bundle or merge directive.
173
:param tree: The tree to merge changes into
174
:param mergeable: A merge directive or bundle
175
:param pb: A progress indicator
177
mergeable.install_revisions(tree.branch.repository)
178
base_revision_id, other_revision_id, verified =\
179
mergeable.get_merge_request(tree.branch.repository)
180
revision_graph = tree.branch.repository.get_graph()
181
if base_revision_id is not None:
182
if (base_revision_id != _mod_revision.NULL_REVISION and
183
revision_graph.is_ancestor(
184
base_revision_id, tree.branch.last_revision())):
185
base_revision_id = None
187
warning('Performing cherrypick')
188
merger = klass.from_revision_ids(pb, tree, other_revision_id,
189
base_revision_id, revision_graph=
191
return merger, verified
194
def from_revision_ids(pb, tree, other, base=None, other_branch=None,
195
base_branch=None, revision_graph=None,
197
"""Return a Merger for revision-ids.
199
:param pb: A progress indicator
200
:param tree: The tree to merge changes into
201
:param other: The revision-id to use as OTHER
202
:param base: The revision-id to use as BASE. If not specified, will
204
:param other_branch: A branch containing the other revision-id. If
205
not supplied, tree.branch is used.
206
:param base_branch: A branch containing the base revision-id. If
207
not supplied, other_branch or tree.branch will be used.
208
:param revision_graph: If you have a revision_graph precomputed, pass
209
it in, otherwise it will be created for you.
210
:param tree_branch: The branch associated with tree. If not supplied,
211
tree.branch will be used.
213
if tree_branch is None:
214
tree_branch = tree.branch
215
merger = Merger(tree_branch, this_tree=tree, pb=pb,
216
revision_graph=revision_graph)
217
if other_branch is None:
218
other_branch = tree.branch
219
merger.set_other_revision(other, other_branch)
223
if base_branch is None:
224
base_branch = other_branch
225
merger.set_base_revision(base, base_branch)
228
def revision_tree(self, revision_id, branch=None):
229
if revision_id not in self._cached_trees:
231
branch = self.this_branch
233
tree = self.this_tree.revision_tree(revision_id)
234
except errors.NoSuchRevisionInTree:
235
tree = branch.repository.revision_tree(revision_id)
236
self._cached_trees[revision_id] = tree
237
return self._cached_trees[revision_id]
239
def _get_tree(self, treespec, possible_transports=None):
240
from bzrlib import workingtree
241
location, revno = treespec
243
tree = workingtree.WorkingTree.open_containing(location)[0]
244
return tree.branch, tree
245
branch = Branch.open_containing(location, possible_transports)[0]
247
revision_id = branch.last_revision()
249
revision_id = branch.get_rev_id(revno)
250
revision_id = ensure_null(revision_id)
251
return branch, self.revision_tree(revision_id, branch)
324
self.conflict_handler = _MergeConflictHandler(self.this_tree,
325
base_tree, other_tree)
327
def revision_tree(self, revision_id):
328
return self.this_branch.repository.revision_tree(revision_id)
253
330
def ensure_revision_trees(self):
254
331
if self.this_revision_tree is None:
255
self.this_basis_tree = self.revision_tree(self.this_basis)
332
self.this_basis_tree = self.this_branch.repository.revision_tree(
256
334
if self.this_basis == self.this_rev_id:
257
335
self.this_revision_tree = self.this_basis_tree
259
337
if self.other_rev_id is None:
260
338
other_basis_tree = self.revision_tree(self.other_basis)
261
if other_basis_tree.has_changes(self.other_tree):
339
changes = compare_trees(self.other_tree, other_basis_tree)
340
if changes.has_changed():
262
341
raise WorkingTreeNotRevision(self.this_tree)
263
other_rev_id = self.other_basis
342
other_rev_id = other_basis
264
343
self.other_tree = other_basis_tree
266
345
def file_revisions(self, file_id):
267
346
self.ensure_revision_trees()
268
347
def get_id(tree, file_id):
269
348
revision_id = tree.inventory[file_id].revision
349
assert revision_id is not None
270
350
return revision_id
271
351
if self.this_rev_id is None:
272
352
if self.this_basis_tree.get_file_sha1(file_id) != \
276
356
trees = (self.this_basis_tree, self.other_tree)
277
357
return [get_id(tree, file_id) for tree in trees]
279
def check_basis(self, check_clean, require_commits=True):
280
if self.this_basis is None and require_commits is True:
281
raise BzrCommandError("This branch has no commits."
282
" (perhaps you would prefer 'bzr pull')")
359
def merge_factory(self, file_id, base, other):
360
if self.merge_type.history_based:
361
if self.show_base is True:
362
raise BzrError("Cannot show base for hisory-based merges")
363
if self.reprocess is True:
364
raise BzrError("Cannot reprocess history-based merges")
366
t_revid, o_revid = self.file_revisions(file_id)
367
weave = self.this_basis_tree.get_weave(file_id)
368
contents_change = self.merge_type(weave, t_revid, o_revid)
370
if self.show_base is True or self.reprocess is True:
371
contents_change = self.merge_type(file_id, base, other,
372
show_base=self.show_base,
373
reprocess=self.reprocess)
375
contents_change = self.merge_type(file_id, base, other)
376
if self.backup_files:
377
contents_change = BackupBeforeChange(contents_change)
378
return contents_change
380
def check_basis(self, check_clean):
381
if self.this_basis is None:
382
raise BzrCommandError("This branch has no commits")
284
384
self.compare_basis()
285
385
if self.this_basis != self.this_rev_id:
286
raise errors.UncommittedChanges(self.this_tree)
386
raise BzrCommandError("Working tree has uncommitted changes.")
288
388
def compare_basis(self):
290
basis_tree = self.revision_tree(self.this_tree.last_revision())
291
except errors.NoSuchRevision:
292
basis_tree = self.this_tree.basis_tree()
293
if not self.this_tree.has_changes(basis_tree):
389
changes = compare_trees(self.this_tree,
390
self.this_tree.basis_tree(), False)
391
if not changes.has_changed():
294
392
self.this_rev_id = self.this_basis
296
394
def set_interesting_files(self, file_list):
297
self.interesting_files = file_list
396
self._set_interesting_files(file_list)
397
except NotVersionedError, e:
398
raise BzrCommandError("%s is not a source file in any"
401
def _set_interesting_files(self, file_list):
402
"""Set the list of interesting ids from a list of files."""
403
if file_list is None:
404
self.interesting_ids = None
407
interesting_ids = set()
408
for path in file_list:
410
for tree in (self.this_tree, self.base_tree, self.other_tree):
411
file_id = tree.inventory.path2id(path)
412
if file_id is not None:
413
interesting_ids.add(file_id)
416
raise NotVersionedError(path=path)
417
self.interesting_ids = interesting_ids
299
419
def set_pending(self):
300
if not self.base_is_ancestor or not self.base_is_other_ancestor or self.other_rev_id is None:
304
def _add_parent(self):
305
new_parents = self.this_tree.get_parent_ids() + [self.other_rev_id]
306
new_parent_trees = []
307
for revision_id in new_parents:
309
tree = self.revision_tree(revision_id)
310
except errors.NoSuchRevision:
314
new_parent_trees.append((revision_id, tree))
316
self.this_tree.set_parent_trees(new_parent_trees,
317
allow_leftmost_as_ghost=True)
319
for _revision_id, tree in new_parent_trees:
323
def set_other(self, other_revision, possible_transports=None):
324
"""Set the revision and tree to merge from.
326
This sets the other_tree, other_rev_id, other_basis attributes.
328
:param other_revision: The [path, revision] list to merge from.
330
self.other_branch, self.other_tree = self._get_tree(other_revision,
420
if not self.base_is_ancestor:
422
if self.other_rev_id is None:
424
ancestry = self.this_branch.repository.get_ancestry(self.this_basis)
425
if self.other_rev_id in ancestry:
427
self.this_tree.add_pending_merge(self.other_rev_id)
429
def set_other(self, other_revision):
430
other_branch, self.other_tree = _get_tree(other_revision,
332
432
if other_revision[1] == -1:
333
self.other_rev_id = _mod_revision.ensure_null(
334
self.other_branch.last_revision())
335
if _mod_revision.is_null(self.other_rev_id):
336
raise NoCommits(self.other_branch)
433
self.other_rev_id = other_branch.last_revision()
434
if self.other_rev_id is None:
435
raise NoCommits(other_branch)
337
436
self.other_basis = self.other_rev_id
338
437
elif other_revision[1] is not None:
339
self.other_rev_id = self.other_branch.get_rev_id(other_revision[1])
438
self.other_rev_id = other_branch.get_rev_id(other_revision[1])
340
439
self.other_basis = self.other_rev_id
342
441
self.other_rev_id = None
343
self.other_basis = self.other_branch.last_revision()
442
self.other_basis = other_branch.last_revision()
344
443
if self.other_basis is None:
345
raise NoCommits(self.other_branch)
346
if self.other_rev_id is not None:
347
self._cached_trees[self.other_rev_id] = self.other_tree
348
self._maybe_fetch(self.other_branch,self.this_branch, self.other_basis)
350
def set_other_revision(self, revision_id, other_branch):
351
"""Set 'other' based on a branch and revision id
353
:param revision_id: The revision to use for a tree
354
:param other_branch: The branch containing this tree
356
self.other_rev_id = revision_id
357
self.other_branch = other_branch
358
self._maybe_fetch(other_branch, self.this_branch, self.other_rev_id)
359
self.other_tree = self.revision_tree(revision_id)
360
self.other_basis = revision_id
362
def set_base_revision(self, revision_id, branch):
363
"""Set 'base' based on a branch and revision id
365
:param revision_id: The revision to use for a tree
366
:param branch: The branch containing this tree
368
self.base_rev_id = revision_id
369
self.base_branch = branch
370
self._maybe_fetch(branch, self.this_branch, revision_id)
371
self.base_tree = self.revision_tree(revision_id)
373
def _maybe_fetch(self, source, target, revision_id):
374
if not source.repository.has_same_location(target.repository):
375
target.fetch(source, revision_id)
378
revisions = [ensure_null(self.this_basis),
379
ensure_null(self.other_basis)]
380
if NULL_REVISION in revisions:
381
self.base_rev_id = NULL_REVISION
382
self.base_tree = self.revision_tree(self.base_rev_id)
383
self._is_criss_cross = False
385
lcas = self.revision_graph.find_lca(revisions[0], revisions[1])
386
self._is_criss_cross = False
388
self.base_rev_id = NULL_REVISION
390
self.base_rev_id = list(lcas)[0]
391
else: # len(lcas) > 1
393
# find_unique_lca can only handle 2 nodes, so we have to
394
# start back at the beginning. It is a shame to traverse
395
# the graph again, but better than re-implementing
397
self.base_rev_id = self.revision_graph.find_unique_lca(
398
revisions[0], revisions[1])
400
self.base_rev_id = self.revision_graph.find_unique_lca(
402
self._is_criss_cross = True
403
if self.base_rev_id == NULL_REVISION:
404
raise UnrelatedBranches()
405
if self._is_criss_cross:
406
warning('Warning: criss-cross merge encountered. See bzr'
407
' help criss-cross.')
408
mutter('Criss-cross lcas: %r' % lcas)
409
interesting_revision_ids = [self.base_rev_id]
410
interesting_revision_ids.extend(lcas)
411
interesting_trees = dict((t.get_revision_id(), t)
412
for t in self.this_branch.repository.revision_trees(
413
interesting_revision_ids))
414
self._cached_trees.update(interesting_trees)
415
self.base_tree = interesting_trees.pop(self.base_rev_id)
416
sorted_lca_keys = self.revision_graph.find_merge_order(
418
self._lca_trees = [interesting_trees[key]
419
for key in sorted_lca_keys]
421
self.base_tree = self.revision_tree(self.base_rev_id)
422
self.base_is_ancestor = True
423
self.base_is_other_ancestor = True
424
mutter('Base revid: %r' % self.base_rev_id)
444
raise NoCommits(other_branch)
445
if other_branch.base != self.this_branch.base:
446
fetch(from_branch=other_branch, to_branch=self.this_branch,
447
last_revision=self.other_basis)
426
449
def set_base(self, base_revision):
427
"""Set the base revision to use for the merge.
429
:param base_revision: A 2-list containing a path and revision number.
431
450
mutter("doing merge() with no base_revision specified")
432
451
if base_revision == [None, None]:
453
self.base_rev_id = common_ancestor(self.this_basis,
455
self.this_branch.repository)
456
except NoCommonAncestor:
457
raise UnrelatedBranches()
458
self.base_tree = _get_revid_tree(self.this_branch, self.base_rev_id,
460
self.base_is_ancestor = True
435
base_branch, self.base_tree = self._get_tree(base_revision)
462
base_branch, self.base_tree = _get_tree(base_revision)
436
463
if base_revision[1] == -1:
437
464
self.base_rev_id = base_branch.last_revision()
438
465
elif base_revision[1] is None:
439
self.base_rev_id = _mod_revision.NULL_REVISION
441
self.base_rev_id = _mod_revision.ensure_null(
442
base_branch.get_rev_id(base_revision[1]))
443
self._maybe_fetch(base_branch, self.this_branch, self.base_rev_id)
445
def make_merger(self):
446
kwargs = {'working_tree':self.this_tree, 'this_tree': self.this_tree,
447
'other_tree': self.other_tree,
448
'interesting_ids': self.interesting_ids,
449
'interesting_files': self.interesting_files,
452
if self.merge_type.requires_base:
453
kwargs['base_tree'] = self.base_tree
454
if self.merge_type.supports_reprocess:
455
kwargs['reprocess'] = self.reprocess
457
raise BzrError("Conflict reduction is not supported for merge"
458
" type %s." % self.merge_type)
459
if self.merge_type.supports_show_base:
460
kwargs['show_base'] = self.show_base
462
raise BzrError("Showing base is not supported for this"
463
" merge type. %s" % self.merge_type)
464
if (not getattr(self.merge_type, 'supports_reverse_cherrypick', True)
465
and not self.base_is_other_ancestor):
466
raise errors.CannotReverseCherrypick()
467
if self.merge_type.supports_cherrypick:
468
kwargs['cherrypick'] = (not self.base_is_ancestor or
469
not self.base_is_other_ancestor)
470
if self._is_criss_cross and getattr(self.merge_type,
471
'supports_lca_trees', False):
472
kwargs['lca_trees'] = self._lca_trees
473
return self.merge_type(pb=self._pb,
474
change_reporter=self.change_reporter,
477
def _do_merge_to(self, merge):
478
if self.other_branch is not None:
479
self.other_branch.update_references(self.this_branch)
481
if self.recurse == 'down':
482
for relpath, file_id in self.this_tree.iter_references():
483
sub_tree = self.this_tree.get_nested_tree(file_id, relpath)
484
other_revision = self.other_tree.get_reference_revision(
486
if other_revision == sub_tree.last_revision():
488
sub_merge = Merger(sub_tree.branch, this_tree=sub_tree)
489
sub_merge.merge_type = self.merge_type
490
other_branch = self.other_branch.reference_parent(file_id, relpath)
491
sub_merge.set_other_revision(other_revision, other_branch)
492
base_revision = self.base_tree.get_reference_revision(file_id)
493
sub_merge.base_tree = \
494
sub_tree.branch.repository.revision_tree(base_revision)
495
sub_merge.base_rev_id = base_revision
499
self.this_tree.lock_tree_write()
501
if self.base_tree is not None:
502
self.base_tree.lock_read()
504
if self.other_tree is not None:
505
self.other_tree.lock_read()
507
merge = self.make_merger()
508
self._do_merge_to(merge)
510
if self.other_tree is not None:
511
self.other_tree.unlock()
513
if self.base_tree is not None:
514
self.base_tree.unlock()
516
self.this_tree.unlock()
517
if len(merge.cooked_conflicts) == 0:
518
if not self.ignore_zero and not is_quiet():
519
note("All changes applied successfully.")
521
note("%d conflicts encountered." % len(merge.cooked_conflicts))
523
return len(merge.cooked_conflicts)
526
class _InventoryNoneEntry(object):
527
"""This represents an inventory entry which *isn't there*.
529
It simplifies the merging logic if we always have an InventoryEntry, even
530
if it isn't actually present
537
symlink_target = None
540
_none_entry = _InventoryNoneEntry()
543
class Merge3Merger(object):
544
"""Three-way merger that uses the merge3 text merger"""
546
supports_reprocess = True
547
supports_show_base = True
548
history_based = False
549
supports_cherrypick = True
550
supports_reverse_cherrypick = True
551
winner_idx = {"this": 2, "other": 1, "conflict": 1}
552
supports_lca_trees = True
554
def __init__(self, working_tree, this_tree, base_tree, other_tree,
555
interesting_ids=None, reprocess=False, show_base=False,
556
pb=DummyProgress(), pp=None, change_reporter=None,
557
interesting_files=None, do_merge=True,
558
cherrypick=False, lca_trees=None):
559
"""Initialize the merger object and perform the merge.
561
:param working_tree: The working tree to apply the merge to
562
:param this_tree: The local tree in the merge operation
563
:param base_tree: The common tree in the merge operation
564
:param other_tree: The other tree to merge changes from
565
:param interesting_ids: The file_ids of files that should be
566
participate in the merge. May not be combined with
568
:param: reprocess If True, perform conflict-reduction processing.
569
:param show_base: If True, show the base revision in text conflicts.
570
(incompatible with reprocess)
571
:param pb: A Progress bar
572
:param pp: A ProgressPhase object
573
:param change_reporter: An object that should report changes made
574
:param interesting_files: The tree-relative paths of files that should
575
participate in the merge. If these paths refer to directories,
576
the contents of those directories will also be included. May not
577
be combined with interesting_ids. If neither interesting_files nor
578
interesting_ids is specified, all files may participate in the
580
:param lca_trees: Can be set to a dictionary of {revision_id:rev_tree}
581
if the ancestry was found to include a criss-cross merge.
582
Otherwise should be None.
584
object.__init__(self)
585
if interesting_files is not None and interesting_ids is not None:
587
'specify either interesting_ids or interesting_files')
588
self.interesting_ids = interesting_ids
589
self.interesting_files = interesting_files
590
self.this_tree = working_tree
591
self.base_tree = base_tree
592
self.other_tree = other_tree
593
self._raw_conflicts = []
594
self.cooked_conflicts = []
595
self.reprocess = reprocess
596
self.show_base = show_base
597
self._lca_trees = lca_trees
598
# Uncommenting this will change the default algorithm to always use
599
# _entries_lca. This can be useful for running the test suite and
600
# making sure we haven't missed any corner cases.
601
# if lca_trees is None:
602
# self._lca_trees = [self.base_tree]
605
self.change_reporter = change_reporter
606
self.cherrypick = cherrypick
608
self.pp = ProgressPhase("Merge phase", 3, self.pb)
613
self.this_tree.lock_tree_write()
614
self.base_tree.lock_read()
615
self.other_tree.lock_read()
616
self.tt = TreeTransform(self.this_tree, self.pb)
619
self._compute_transform()
621
results = self.tt.apply(no_conflicts=True)
622
self.write_modified(results)
624
self.this_tree.add_conflicts(self.cooked_conflicts)
625
except UnsupportedOperation:
629
self.other_tree.unlock()
630
self.base_tree.unlock()
631
self.this_tree.unlock()
634
def make_preview_transform(self):
635
self.base_tree.lock_read()
636
self.other_tree.lock_read()
637
self.tt = TransformPreview(self.this_tree)
640
self._compute_transform()
643
self.other_tree.unlock()
644
self.base_tree.unlock()
648
def _compute_transform(self):
649
if self._lca_trees is None:
650
entries = self._entries3()
651
resolver = self._three_way
653
entries = self._entries_lca()
654
resolver = self._lca_multi_way
655
child_pb = ui.ui_factory.nested_progress_bar()
657
for num, (file_id, changed, parents3, names3,
658
executable3) in enumerate(entries):
659
child_pb.update('Preparing file merge', num, len(entries))
660
self._merge_names(file_id, parents3, names3, resolver=resolver)
662
file_status = self.merge_contents(file_id)
664
file_status = 'unmodified'
665
self._merge_executable(file_id,
666
executable3, file_status, resolver=resolver)
671
child_pb = ui.ui_factory.nested_progress_bar()
673
fs_conflicts = resolve_conflicts(self.tt, child_pb,
674
lambda t, c: conflict_pass(t, c, self.other_tree))
677
if self.change_reporter is not None:
678
from bzrlib import delta
679
delta.report_changes(
680
self.tt.iter_changes(), self.change_reporter)
681
self.cook_conflicts(fs_conflicts)
682
for conflict in self.cooked_conflicts:
686
"""Gather data about files modified between three trees.
688
Return a list of tuples of file_id, changed, parents3, names3,
689
executable3. changed is a boolean indicating whether the file contents
690
or kind were changed. parents3 is a tuple of parent ids for base,
691
other and this. names3 is a tuple of names for base, other and this.
692
executable3 is a tuple of execute-bit values for base, other and this.
695
iterator = self.other_tree.iter_changes(self.base_tree,
696
include_unchanged=True, specific_files=self.interesting_files,
697
extra_trees=[self.this_tree])
698
this_entries = dict((e.file_id, e) for p, e in
699
self.this_tree.iter_entries_by_dir(
700
self.interesting_ids))
701
for (file_id, paths, changed, versioned, parents, names, kind,
702
executable) in iterator:
703
if (self.interesting_ids is not None and
704
file_id not in self.interesting_ids):
706
entry = this_entries.get(file_id)
707
if entry is not None:
708
this_name = entry.name
709
this_parent = entry.parent_id
710
this_executable = entry.executable
714
this_executable = None
715
parents3 = parents + (this_parent,)
716
names3 = names + (this_name,)
717
executable3 = executable + (this_executable,)
718
result.append((file_id, changed, parents3, names3, executable3))
721
def _entries_lca(self):
722
"""Gather data about files modified between multiple trees.
724
This compares OTHER versus all LCA trees, and for interesting entries,
725
it then compares with THIS and BASE.
727
For the multi-valued entries, the format will be (BASE, [lca1, lca2])
728
:return: [(file_id, changed, parents, names, executable)]
729
file_id Simple file_id of the entry
730
changed Boolean, True if the kind or contents changed
732
parents ((base, [parent_id, in, lcas]), parent_id_other,
734
names ((base, [name, in, lcas]), name_in_other, name_in_this)
735
executable ((base, [exec, in, lcas]), exec_in_other, exec_in_this)
737
if self.interesting_files is not None:
738
lookup_trees = [self.this_tree, self.base_tree]
739
lookup_trees.extend(self._lca_trees)
740
# I think we should include the lca trees as well
741
interesting_ids = self.other_tree.paths2ids(self.interesting_files,
744
interesting_ids = self.interesting_ids
746
walker = _mod_tree.MultiWalker(self.other_tree, self._lca_trees)
748
base_inventory = self.base_tree.inventory
749
this_inventory = self.this_tree.inventory
750
for path, file_id, other_ie, lca_values in walker.iter_all():
751
# Is this modified at all from any of the other trees?
753
other_ie = _none_entry
754
if interesting_ids is not None and file_id not in interesting_ids:
757
# If other_revision is found in any of the lcas, that means this
758
# node is uninteresting. This is because when merging, if there are
759
# multiple heads(), we have to create a new node. So if we didn't,
760
# we know that the ancestry is linear, and that OTHER did not
762
# See doc/developers/lca_merge_resolution.txt for details
763
other_revision = other_ie.revision
764
if other_revision is not None:
765
# We can't use this shortcut when other_revision is None,
766
# because it may be None because things are WorkingTrees, and
767
# not because it is *actually* None.
768
is_unmodified = False
769
for lca_path, ie in lca_values:
770
if ie is not None and ie.revision == other_revision:
777
for lca_path, lca_ie in lca_values:
779
lca_entries.append(_none_entry)
781
lca_entries.append(lca_ie)
783
if file_id in base_inventory:
784
base_ie = base_inventory[file_id]
786
base_ie = _none_entry
788
if file_id in this_inventory:
789
this_ie = this_inventory[file_id]
791
this_ie = _none_entry
797
for lca_ie in lca_entries:
798
lca_kinds.append(lca_ie.kind)
799
lca_parent_ids.append(lca_ie.parent_id)
800
lca_names.append(lca_ie.name)
801
lca_executable.append(lca_ie.executable)
803
kind_winner = self._lca_multi_way(
804
(base_ie.kind, lca_kinds),
805
other_ie.kind, this_ie.kind)
806
parent_id_winner = self._lca_multi_way(
807
(base_ie.parent_id, lca_parent_ids),
808
other_ie.parent_id, this_ie.parent_id)
809
name_winner = self._lca_multi_way(
810
(base_ie.name, lca_names),
811
other_ie.name, this_ie.name)
813
content_changed = True
814
if kind_winner == 'this':
815
# No kind change in OTHER, see if there are *any* changes
816
if other_ie.kind == 'directory':
817
if parent_id_winner == 'this' and name_winner == 'this':
818
# No change for this directory in OTHER, skip
820
content_changed = False
821
elif other_ie.kind is None or other_ie.kind == 'file':
822
def get_sha1(ie, tree):
823
if ie.kind != 'file':
825
return tree.get_file_sha1(file_id)
826
base_sha1 = get_sha1(base_ie, self.base_tree)
827
lca_sha1s = [get_sha1(ie, tree) for ie, tree
828
in zip(lca_entries, self._lca_trees)]
829
this_sha1 = get_sha1(this_ie, self.this_tree)
830
other_sha1 = get_sha1(other_ie, self.other_tree)
831
sha1_winner = self._lca_multi_way(
832
(base_sha1, lca_sha1s), other_sha1, this_sha1,
833
allow_overriding_lca=False)
834
exec_winner = self._lca_multi_way(
835
(base_ie.executable, lca_executable),
836
other_ie.executable, this_ie.executable)
837
if (parent_id_winner == 'this' and name_winner == 'this'
838
and sha1_winner == 'this' and exec_winner == 'this'):
839
# No kind, parent, name, exec, or content change for
840
# OTHER, so this node is not considered interesting
842
if sha1_winner == 'this':
843
content_changed = False
844
elif other_ie.kind == 'symlink':
845
def get_target(ie, tree):
846
if ie.kind != 'symlink':
848
return tree.get_symlink_target(file_id)
849
base_target = get_target(base_ie, self.base_tree)
850
lca_targets = [get_target(ie, tree) for ie, tree
851
in zip(lca_entries, self._lca_trees)]
852
this_target = get_target(this_ie, self.this_tree)
853
other_target = get_target(other_ie, self.other_tree)
854
target_winner = self._lca_multi_way(
855
(base_target, lca_targets),
856
other_target, this_target)
857
if (parent_id_winner == 'this' and name_winner == 'this'
858
and target_winner == 'this'):
859
# No kind, parent, name, or symlink target change
862
if target_winner == 'this':
863
content_changed = False
864
elif other_ie.kind == 'tree-reference':
865
# The 'changed' information seems to be handled at a higher
866
# level. At least, _entries3 returns False for content
867
# changed, even when at a new revision_id.
868
content_changed = False
869
if (parent_id_winner == 'this' and name_winner == 'this'):
870
# Nothing interesting
873
raise AssertionError('unhandled kind: %s' % other_ie.kind)
874
# XXX: We need to handle kind == 'symlink'
876
# If we have gotten this far, that means something has changed
877
result.append((file_id, content_changed,
878
((base_ie.parent_id, lca_parent_ids),
879
other_ie.parent_id, this_ie.parent_id),
880
((base_ie.name, lca_names),
881
other_ie.name, this_ie.name),
882
((base_ie.executable, lca_executable),
883
other_ie.executable, this_ie.executable)
890
self.tt.final_kind(self.tt.root)
892
self.tt.cancel_deletion(self.tt.root)
893
if self.tt.final_file_id(self.tt.root) is None:
894
self.tt.version_file(self.tt.tree_file_id(self.tt.root),
896
other_root_file_id = self.other_tree.get_root_id()
897
if other_root_file_id is None:
899
other_root = self.tt.trans_id_file_id(other_root_file_id)
900
if other_root == self.tt.root:
903
self.tt.final_kind(other_root)
906
if self.other_tree.inventory.root.file_id in self.this_tree.inventory:
907
# the other tree's root is a non-root in the current tree
909
self.reparent_children(self.other_tree.inventory.root, self.tt.root)
910
self.tt.cancel_creation(other_root)
911
self.tt.cancel_versioning(other_root)
913
def reparent_children(self, ie, target):
914
for thing, child in ie.children.iteritems():
915
trans_id = self.tt.trans_id_file_id(child.file_id)
916
self.tt.adjust_path(self.tt.final_name(trans_id), target, trans_id)
918
def write_modified(self, results):
920
for path in results.modified_paths:
921
file_id = self.this_tree.path2id(self.this_tree.relpath(path))
924
hash = self.this_tree.get_file_sha1(file_id)
927
modified_hashes[file_id] = hash
928
self.this_tree.set_merge_modified(modified_hashes)
931
def parent(entry, file_id):
932
"""Determine the parent for a file_id (used as a key method)"""
935
return entry.parent_id
938
def name(entry, file_id):
939
"""Determine the name for a file_id (used as a key method)"""
945
def contents_sha1(tree, file_id):
946
"""Determine the sha1 of the file contents (used as a key method)."""
947
if file_id not in tree:
949
return tree.get_file_sha1(file_id)
952
def executable(tree, file_id):
953
"""Determine the executability of a file-id (used as a key method)."""
954
if file_id not in tree:
956
if tree.kind(file_id) != "file":
958
return tree.is_executable(file_id)
961
def kind(tree, file_id):
962
"""Determine the kind of a file-id (used as a key method)."""
963
if file_id not in tree:
965
return tree.kind(file_id)
968
def _three_way(base, other, this):
969
#if base == other, either they all agree, or only THIS has changed.
972
elif this not in (base, other):
974
# "Ambiguous clean merge" -- both sides have made the same change.
977
# this == base: only other has changed.
982
def _lca_multi_way(bases, other, this, allow_overriding_lca=True):
983
"""Consider LCAs when determining whether a change has occurred.
985
If LCAS are all identical, this is the same as a _three_way comparison.
987
:param bases: value in (BASE, [LCAS])
988
:param other: value in OTHER
989
:param this: value in THIS
990
:param allow_overriding_lca: If there is more than one unique lca
991
value, allow OTHER to override THIS if it has a new value, and
992
THIS only has an lca value, or vice versa. This is appropriate for
993
truly scalar values, not as much for non-scalars.
994
:return: 'this', 'other', or 'conflict' depending on whether an entry
997
# See doc/developers/lca_tree_merging.txt for details about this
1000
# Either Ambiguously clean, or nothing was actually changed. We
1003
base_val, lca_vals = bases
1004
# Remove 'base_val' from the lca_vals, because it is not interesting
1005
filtered_lca_vals = [lca_val for lca_val in lca_vals
1006
if lca_val != base_val]
1007
if len(filtered_lca_vals) == 0:
1008
return Merge3Merger._three_way(base_val, other, this)
1010
unique_lca_vals = set(filtered_lca_vals)
1011
if len(unique_lca_vals) == 1:
1012
return Merge3Merger._three_way(unique_lca_vals.pop(), other, this)
1014
if allow_overriding_lca:
1015
if other in unique_lca_vals:
1016
if this in unique_lca_vals:
1017
# Each side picked a different lca, conflict
1020
# This has a value which supersedes both lca values, and
1021
# other only has an lca value
1023
elif this in unique_lca_vals:
1024
# OTHER has a value which supersedes both lca values, and this
1025
# only has an lca value
1028
# At this point, the lcas disagree, and the tips disagree
1032
def scalar_three_way(this_tree, base_tree, other_tree, file_id, key):
1033
"""Do a three-way test on a scalar.
1034
Return "this", "other" or "conflict", depending whether a value wins.
1036
key_base = key(base_tree, file_id)
1037
key_other = key(other_tree, file_id)
1038
#if base == other, either they all agree, or only THIS has changed.
1039
if key_base == key_other:
1041
key_this = key(this_tree, file_id)
1042
# "Ambiguous clean merge"
1043
if key_this == key_other:
1045
elif key_this == key_base:
1050
def merge_names(self, file_id):
1051
def get_entry(tree):
1052
if file_id in tree.inventory:
1053
return tree.inventory[file_id]
1056
this_entry = get_entry(self.this_tree)
1057
other_entry = get_entry(self.other_tree)
1058
base_entry = get_entry(self.base_tree)
1059
entries = (base_entry, other_entry, this_entry)
1062
for entry in entries:
1065
parents.append(None)
1067
names.append(entry.name)
1068
parents.append(entry.parent_id)
1069
return self._merge_names(file_id, parents, names,
1070
resolver=self._three_way)
1072
def _merge_names(self, file_id, parents, names, resolver):
1073
"""Perform a merge on file_id names and parents"""
1074
base_name, other_name, this_name = names
1075
base_parent, other_parent, this_parent = parents
1077
name_winner = resolver(*names)
1079
parent_id_winner = resolver(*parents)
1080
if this_name is None:
1081
if name_winner == "this":
1082
name_winner = "other"
1083
if parent_id_winner == "this":
1084
parent_id_winner = "other"
1085
if name_winner == "this" and parent_id_winner == "this":
1087
if name_winner == "conflict":
1088
trans_id = self.tt.trans_id_file_id(file_id)
1089
self._raw_conflicts.append(('name conflict', trans_id,
1090
this_name, other_name))
1091
if parent_id_winner == "conflict":
1092
trans_id = self.tt.trans_id_file_id(file_id)
1093
self._raw_conflicts.append(('parent conflict', trans_id,
1094
this_parent, other_parent))
1095
if other_name is None:
1096
# it doesn't matter whether the result was 'other' or
1097
# 'conflict'-- if there's no 'other', we leave it alone.
1099
# if we get here, name_winner and parent_winner are set to safe values.
1100
trans_id = self.tt.trans_id_file_id(file_id)
1101
parent_id = parents[self.winner_idx[parent_id_winner]]
1102
if parent_id is not None:
1103
parent_trans_id = self.tt.trans_id_file_id(parent_id)
1104
self.tt.adjust_path(names[self.winner_idx[name_winner]],
1105
parent_trans_id, trans_id)
1107
def merge_contents(self, file_id):
1108
"""Performs a merge on file_id contents."""
1109
def contents_pair(tree):
1110
if file_id not in tree:
1112
kind = tree.kind(file_id)
1114
contents = tree.get_file_sha1(file_id)
1115
elif kind == "symlink":
1116
contents = tree.get_symlink_target(file_id)
1119
return kind, contents
1121
def contents_conflict():
1122
trans_id = self.tt.trans_id_file_id(file_id)
1123
name = self.tt.final_name(trans_id)
1124
parent_id = self.tt.final_parent(trans_id)
1125
if file_id in self.this_tree.inventory:
1126
self.tt.unversion_file(trans_id)
1127
if file_id in self.this_tree:
1128
self.tt.delete_contents(trans_id)
1129
file_group = self._dump_conflicts(name, parent_id, file_id,
1131
self._raw_conflicts.append(('contents conflict', file_group))
1133
# See SPOT run. run, SPOT, run.
1134
# So we're not QUITE repeating ourselves; we do tricky things with
1136
base_pair = contents_pair(self.base_tree)
1137
other_pair = contents_pair(self.other_tree)
1139
this_pair = contents_pair(self.this_tree)
1140
lca_pairs = [contents_pair(tree) for tree in self._lca_trees]
1141
winner = self._lca_multi_way((base_pair, lca_pairs), other_pair,
1142
this_pair, allow_overriding_lca=False)
1144
if base_pair == other_pair:
1147
# We delayed evaluating this_pair as long as we can to avoid
1148
# unnecessary sha1 calculation
1149
this_pair = contents_pair(self.this_tree)
1150
winner = self._three_way(base_pair, other_pair, this_pair)
1151
if winner == 'this':
1152
# No interesting changes introduced by OTHER
1154
trans_id = self.tt.trans_id_file_id(file_id)
1155
if winner == 'other':
1156
# OTHER is a straight winner, so replace this contents with other
1157
file_in_this = file_id in self.this_tree
1159
# Remove any existing contents
1160
self.tt.delete_contents(trans_id)
1161
if file_id in self.other_tree:
1162
# OTHER changed the file
1163
create_from_tree(self.tt, trans_id,
1164
self.other_tree, file_id)
1165
if not file_in_this:
1166
self.tt.version_file(file_id, trans_id)
1169
# OTHER deleted the file
1170
self.tt.unversion_file(trans_id)
1173
# We have a hypothetical conflict, but if we have files, then we
1174
# can try to merge the content
1175
if this_pair[0] == 'file' and other_pair[0] == 'file':
1176
# THIS and OTHER are both files, so text merge. Either
1177
# BASE is a file, or both converted to files, so at least we
1178
# have agreement that output should be a file.
1180
self.text_merge(file_id, trans_id)
1182
return contents_conflict()
1183
if file_id not in self.this_tree:
1184
self.tt.version_file(file_id, trans_id)
1186
self.tt.tree_kind(trans_id)
1187
self.tt.delete_contents(trans_id)
1192
return contents_conflict()
1194
def get_lines(self, tree, file_id):
1195
"""Return the lines in a file, or an empty list."""
1197
return tree.get_file(file_id).readlines()
1201
def text_merge(self, file_id, trans_id):
1202
"""Perform a three-way text merge on a file_id"""
1203
# it's possible that we got here with base as a different type.
1204
# if so, we just want two-way text conflicts.
1205
if file_id in self.base_tree and \
1206
self.base_tree.kind(file_id) == "file":
1207
base_lines = self.get_lines(self.base_tree, file_id)
1210
other_lines = self.get_lines(self.other_tree, file_id)
1211
this_lines = self.get_lines(self.this_tree, file_id)
1212
m3 = Merge3(base_lines, this_lines, other_lines,
1213
is_cherrypick=self.cherrypick)
1214
start_marker = "!START OF MERGE CONFLICT!" + "I HOPE THIS IS UNIQUE"
1215
if self.show_base is True:
1216
base_marker = '|' * 7
1220
def iter_merge3(retval):
1221
retval["text_conflicts"] = False
1222
for line in m3.merge_lines(name_a = "TREE",
1223
name_b = "MERGE-SOURCE",
1224
name_base = "BASE-REVISION",
1225
start_marker=start_marker,
1226
base_marker=base_marker,
1227
reprocess=self.reprocess):
1228
if line.startswith(start_marker):
1229
retval["text_conflicts"] = True
1230
yield line.replace(start_marker, '<' * 7)
1234
merge3_iterator = iter_merge3(retval)
1235
self.tt.create_file(merge3_iterator, trans_id)
1236
if retval["text_conflicts"] is True:
1237
self._raw_conflicts.append(('text conflict', trans_id))
1238
name = self.tt.final_name(trans_id)
1239
parent_id = self.tt.final_parent(trans_id)
1240
file_group = self._dump_conflicts(name, parent_id, file_id,
1241
this_lines, base_lines,
1243
file_group.append(trans_id)
1245
def _dump_conflicts(self, name, parent_id, file_id, this_lines=None,
1246
base_lines=None, other_lines=None, set_version=False,
1248
"""Emit conflict files.
1249
If this_lines, base_lines, or other_lines are omitted, they will be
1250
determined automatically. If set_version is true, the .OTHER, .THIS
1251
or .BASE (in that order) will be created as versioned files.
1253
data = [('OTHER', self.other_tree, other_lines),
1254
('THIS', self.this_tree, this_lines)]
1256
data.append(('BASE', self.base_tree, base_lines))
1259
for suffix, tree, lines in data:
1261
trans_id = self._conflict_file(name, parent_id, tree, file_id,
1263
file_group.append(trans_id)
1264
if set_version and not versioned:
1265
self.tt.version_file(file_id, trans_id)
1269
def _conflict_file(self, name, parent_id, tree, file_id, suffix,
1271
"""Emit a single conflict file."""
1272
name = name + '.' + suffix
1273
trans_id = self.tt.create_path(name, parent_id)
1274
create_from_tree(self.tt, trans_id, tree, file_id, lines)
1277
def merge_executable(self, file_id, file_status):
1278
"""Perform a merge on the execute bit."""
1279
executable = [self.executable(t, file_id) for t in (self.base_tree,
1280
self.other_tree, self.this_tree)]
1281
self._merge_executable(file_id, executable, file_status,
1282
resolver=self._three_way)
1284
def _merge_executable(self, file_id, executable, file_status,
1286
"""Perform a merge on the execute bit."""
1287
base_executable, other_executable, this_executable = executable
1288
if file_status == "deleted":
1290
winner = resolver(*executable)
1291
if winner == "conflict":
1292
# There must be a None in here, if we have a conflict, but we
1293
# need executability since file status was not deleted.
1294
if self.executable(self.other_tree, file_id) is None:
1298
if winner == 'this' and file_status != "modified":
1300
trans_id = self.tt.trans_id_file_id(file_id)
1302
if self.tt.final_kind(trans_id) != "file":
1306
if winner == "this":
1307
executability = this_executable
1309
if file_id in self.other_tree:
1310
executability = other_executable
1311
elif file_id in self.this_tree:
1312
executability = this_executable
1313
elif file_id in self.base_tree:
1314
executability = base_executable
1315
if executability is not None:
1316
trans_id = self.tt.trans_id_file_id(file_id)
1317
self.tt.set_executability(executability, trans_id)
1319
def cook_conflicts(self, fs_conflicts):
1320
"""Convert all conflicts into a form that doesn't depend on trans_id"""
1321
from conflicts import Conflict
1323
self.cooked_conflicts.extend(cook_conflicts(fs_conflicts, self.tt))
1324
fp = FinalPaths(self.tt)
1325
for conflict in self._raw_conflicts:
1326
conflict_type = conflict[0]
1327
if conflict_type in ('name conflict', 'parent conflict'):
1328
trans_id = conflict[1]
1329
conflict_args = conflict[2:]
1330
if trans_id not in name_conflicts:
1331
name_conflicts[trans_id] = {}
1332
unique_add(name_conflicts[trans_id], conflict_type,
1334
if conflict_type == 'contents conflict':
1335
for trans_id in conflict[1]:
1336
file_id = self.tt.final_file_id(trans_id)
1337
if file_id is not None:
1339
path = fp.get_path(trans_id)
1340
for suffix in ('.BASE', '.THIS', '.OTHER'):
1341
if path.endswith(suffix):
1342
path = path[:-len(suffix)]
1344
c = Conflict.factory(conflict_type, path=path, file_id=file_id)
1345
self.cooked_conflicts.append(c)
1346
if conflict_type == 'text conflict':
1347
trans_id = conflict[1]
1348
path = fp.get_path(trans_id)
1349
file_id = self.tt.final_file_id(trans_id)
1350
c = Conflict.factory(conflict_type, path=path, file_id=file_id)
1351
self.cooked_conflicts.append(c)
1353
for trans_id, conflicts in name_conflicts.iteritems():
1355
this_parent, other_parent = conflicts['parent conflict']
1356
if this_parent == other_parent:
1357
raise AssertionError()
1359
this_parent = other_parent = \
1360
self.tt.final_file_id(self.tt.final_parent(trans_id))
1362
this_name, other_name = conflicts['name conflict']
1363
if this_name == other_name:
1364
raise AssertionError()
1366
this_name = other_name = self.tt.final_name(trans_id)
1367
other_path = fp.get_path(trans_id)
1368
if this_parent is not None and this_name is not None:
1369
this_parent_path = \
1370
fp.get_path(self.tt.trans_id_file_id(this_parent))
1371
this_path = pathjoin(this_parent_path, this_name)
1373
this_path = "<deleted>"
1374
file_id = self.tt.final_file_id(trans_id)
1375
c = Conflict.factory('path conflict', path=this_path,
1376
conflict_path=other_path, file_id=file_id)
1377
self.cooked_conflicts.append(c)
1378
self.cooked_conflicts.sort(key=Conflict.sort_key)
1381
class WeaveMerger(Merge3Merger):
1382
"""Three-way tree merger, text weave merger."""
1383
supports_reprocess = True
1384
supports_show_base = False
1385
supports_reverse_cherrypick = False
1386
history_based = True
1388
def _merged_lines(self, file_id):
1389
"""Generate the merged lines.
1390
There is no distinction between lines that are meant to contain <<<<<<<
1394
base = self.base_tree
1397
plan = self.this_tree.plan_file_merge(file_id, self.other_tree,
1399
if 'merge' in debug.debug_flags:
1401
trans_id = self.tt.trans_id_file_id(file_id)
1402
name = self.tt.final_name(trans_id) + '.plan'
1403
contents = ('%10s|%s' % l for l in plan)
1404
self.tt.new_file(name, self.tt.final_parent(trans_id), contents)
1405
textmerge = PlanWeaveMerge(plan, '<<<<<<< TREE\n',
1406
'>>>>>>> MERGE-SOURCE\n')
1407
return textmerge.merge_lines(self.reprocess)
1409
def text_merge(self, file_id, trans_id):
1410
"""Perform a (weave) text merge for a given file and file-id.
1411
If conflicts are encountered, .THIS and .OTHER files will be emitted,
1412
and a conflict will be noted.
1414
lines, conflicts = self._merged_lines(file_id)
1416
# Note we're checking whether the OUTPUT is binary in this case,
1417
# because we don't want to get into weave merge guts.
1418
check_text_lines(lines)
1419
self.tt.create_file(lines, trans_id)
1421
self._raw_conflicts.append(('text conflict', trans_id))
1422
name = self.tt.final_name(trans_id)
1423
parent_id = self.tt.final_parent(trans_id)
1424
file_group = self._dump_conflicts(name, parent_id, file_id,
1426
file_group.append(trans_id)
1429
class LCAMerger(WeaveMerger):
1431
def _merged_lines(self, file_id):
1432
"""Generate the merged lines.
1433
There is no distinction between lines that are meant to contain <<<<<<<
1437
base = self.base_tree
1440
plan = self.this_tree.plan_file_lca_merge(file_id, self.other_tree,
1442
if 'merge' in debug.debug_flags:
1444
trans_id = self.tt.trans_id_file_id(file_id)
1445
name = self.tt.final_name(trans_id) + '.plan'
1446
contents = ('%10s|%s' % l for l in plan)
1447
self.tt.new_file(name, self.tt.final_parent(trans_id), contents)
1448
textmerge = PlanWeaveMerge(plan, '<<<<<<< TREE\n',
1449
'>>>>>>> MERGE-SOURCE\n')
1450
return textmerge.merge_lines(self.reprocess)
1453
class Diff3Merger(Merge3Merger):
1454
"""Three-way merger using external diff3 for text merging"""
1456
def dump_file(self, temp_dir, name, tree, file_id):
1457
out_path = pathjoin(temp_dir, name)
1458
out_file = open(out_path, "wb")
1460
in_file = tree.get_file(file_id)
1461
for line in in_file:
1462
out_file.write(line)
1467
def text_merge(self, file_id, trans_id):
1468
"""Perform a diff3 merge using a specified file-id and trans-id.
1469
If conflicts are encountered, .BASE, .THIS. and .OTHER conflict files
1470
will be dumped, and a will be conflict noted.
1473
temp_dir = osutils.mkdtemp(prefix="bzr-")
1475
new_file = pathjoin(temp_dir, "new")
1476
this = self.dump_file(temp_dir, "this", self.this_tree, file_id)
1477
base = self.dump_file(temp_dir, "base", self.base_tree, file_id)
1478
other = self.dump_file(temp_dir, "other", self.other_tree, file_id)
1479
status = bzrlib.patch.diff3(new_file, this, base, other)
1480
if status not in (0, 1):
1481
raise BzrError("Unhandled diff3 exit code")
1482
f = open(new_file, 'rb')
1484
self.tt.create_file(f, trans_id)
1488
name = self.tt.final_name(trans_id)
1489
parent_id = self.tt.final_parent(trans_id)
1490
self._dump_conflicts(name, parent_id, file_id)
1491
self._raw_conflicts.append(('text conflict', trans_id))
1493
osutils.rmtree(temp_dir)
1496
def merge_inner(this_branch, other_tree, base_tree, ignore_zero=False,
1498
merge_type=Merge3Merger,
1499
interesting_ids=None,
1503
interesting_files=None,
1506
change_reporter=None):
1507
"""Primary interface for merging.
1509
typical use is probably
1510
'merge_inner(branch, branch.get_revision_tree(other_revision),
1511
branch.get_revision_tree(base_revision))'
1513
if this_tree is None:
1514
raise BzrError("bzrlib.merge.merge_inner requires a this_tree "
1515
"parameter as of bzrlib version 0.8.")
1516
merger = Merger(this_branch, other_tree, base_tree, this_tree=this_tree,
1517
pb=pb, change_reporter=change_reporter)
1518
merger.backup_files = backup_files
1519
merger.merge_type = merge_type
1520
merger.interesting_ids = interesting_ids
1521
merger.ignore_zero = ignore_zero
1522
if interesting_files:
1524
raise ValueError('Only supply interesting_ids'
1525
' or interesting_files')
1526
merger.interesting_files = interesting_files
1527
merger.show_base = show_base
1528
merger.reprocess = reprocess
1529
merger.other_rev_id = other_rev_id
1530
merger.other_basis = other_rev_id
1531
get_revision_id = getattr(base_tree, 'get_revision_id', None)
1532
if get_revision_id is None:
1533
get_revision_id = base_tree.last_revision
1534
merger.cache_trees_with_revision_ids([other_tree, base_tree, this_tree])
1535
merger.set_base_revision(get_revision_id(), this_branch)
1536
return merger.do_merge()
1538
def get_merge_type_registry():
1539
"""Merge type registry is in bzrlib.option to avoid circular imports.
1541
This method provides a sanctioned way to retrieve it.
1543
from bzrlib import option
1544
return option._merge_type_registry
1547
def _plan_annotate_merge(annotated_a, annotated_b, ancestors_a, ancestors_b):
1548
def status_a(revision, text):
1549
if revision in ancestors_b:
1550
return 'killed-b', text
1552
return 'new-a', text
1554
def status_b(revision, text):
1555
if revision in ancestors_a:
1556
return 'killed-a', text
1558
return 'new-b', text
1560
plain_a = [t for (a, t) in annotated_a]
1561
plain_b = [t for (a, t) in annotated_b]
1562
matcher = patiencediff.PatienceSequenceMatcher(None, plain_a, plain_b)
1563
blocks = matcher.get_matching_blocks()
1566
for ai, bi, l in blocks:
1567
# process all mismatched sections
1568
# (last mismatched section is handled because blocks always
1569
# includes a 0-length last block)
1570
for revision, text in annotated_a[a_cur:ai]:
1571
yield status_a(revision, text)
1572
for revision, text in annotated_b[b_cur:bi]:
1573
yield status_b(revision, text)
1574
# and now the matched section
1577
for text_a in plain_a[ai:a_cur]:
1578
yield "unchanged", text_a
1581
class _PlanMergeBase(object):
1583
def __init__(self, a_rev, b_rev, vf, key_prefix):
1586
:param a_rev: Revision-id of one revision to merge
1587
:param b_rev: Revision-id of the other revision to merge
1588
:param vf: A VersionedFiles containing both revisions
1589
:param key_prefix: A prefix for accessing keys in vf, typically
1595
self._last_lines = None
1596
self._last_lines_revision_id = None
1597
self._cached_matching_blocks = {}
1598
self._key_prefix = key_prefix
1599
self._precache_tip_lines()
1601
def _precache_tip_lines(self):
1602
lines = self.get_lines([self.a_rev, self.b_rev])
1603
self.lines_a = lines[self.a_rev]
1604
self.lines_b = lines[self.b_rev]
1606
def get_lines(self, revisions):
1607
"""Get lines for revisions from the backing VersionedFiles.
1609
:raises RevisionNotPresent: on absent texts.
1611
keys = [(self._key_prefix + (rev,)) for rev in revisions]
1613
for record in self.vf.get_record_stream(keys, 'unordered', True):
1614
if record.storage_kind == 'absent':
1615
raise errors.RevisionNotPresent(record.key, self.vf)
1616
result[record.key[-1]] = osutils.chunks_to_lines(
1617
record.get_bytes_as('chunked'))
1620
def plan_merge(self):
1621
"""Generate a 'plan' for merging the two revisions.
1623
This involves comparing their texts and determining the cause of
1624
differences. If text A has a line and text B does not, then either the
1625
line was added to text A, or it was deleted from B. Once the causes
1626
are combined, they are written out in the format described in
1627
VersionedFile.plan_merge
1629
blocks = self._get_matching_blocks(self.a_rev, self.b_rev)
1630
unique_a, unique_b = self._unique_lines(blocks)
1631
new_a, killed_b = self._determine_status(self.a_rev, unique_a)
1632
new_b, killed_a = self._determine_status(self.b_rev, unique_b)
1633
return self._iter_plan(blocks, new_a, killed_b, new_b, killed_a)
1635
def _iter_plan(self, blocks, new_a, killed_b, new_b, killed_a):
1638
for i, j, n in blocks:
1639
for a_index in range(last_i, i):
1640
if a_index in new_a:
1641
if a_index in killed_b:
1642
yield 'conflicted-a', self.lines_a[a_index]
1644
yield 'new-a', self.lines_a[a_index]
1646
yield 'killed-b', self.lines_a[a_index]
1647
for b_index in range(last_j, j):
1648
if b_index in new_b:
1649
if b_index in killed_a:
1650
yield 'conflicted-b', self.lines_b[b_index]
1652
yield 'new-b', self.lines_b[b_index]
1654
yield 'killed-a', self.lines_b[b_index]
1655
# handle common lines
1656
for a_index in range(i, i+n):
1657
yield 'unchanged', self.lines_a[a_index]
1661
def _get_matching_blocks(self, left_revision, right_revision):
1662
"""Return a description of which sections of two revisions match.
1664
See SequenceMatcher.get_matching_blocks
1666
cached = self._cached_matching_blocks.get((left_revision,
1668
if cached is not None:
1670
if self._last_lines_revision_id == left_revision:
1671
left_lines = self._last_lines
1672
right_lines = self.get_lines([right_revision])[right_revision]
1674
lines = self.get_lines([left_revision, right_revision])
1675
left_lines = lines[left_revision]
1676
right_lines = lines[right_revision]
1677
self._last_lines = right_lines
1678
self._last_lines_revision_id = right_revision
1679
matcher = patiencediff.PatienceSequenceMatcher(None, left_lines,
1681
return matcher.get_matching_blocks()
1683
def _unique_lines(self, matching_blocks):
1684
"""Analyse matching_blocks to determine which lines are unique
1686
:return: a tuple of (unique_left, unique_right), where the values are
1687
sets of line numbers of unique lines.
1693
for i, j, n in matching_blocks:
1694
unique_left.extend(range(last_i, i))
1695
unique_right.extend(range(last_j, j))
1698
return unique_left, unique_right
1701
def _subtract_plans(old_plan, new_plan):
1702
"""Remove changes from new_plan that came from old_plan.
1704
It is assumed that the difference between the old_plan and new_plan
1705
is their choice of 'b' text.
1707
All lines from new_plan that differ from old_plan are emitted
1708
verbatim. All lines from new_plan that match old_plan but are
1709
not about the 'b' revision are emitted verbatim.
1711
Lines that match and are about the 'b' revision are the lines we
1712
don't want, so we convert 'killed-b' -> 'unchanged', and 'new-b'
1713
is skipped entirely.
1715
matcher = patiencediff.PatienceSequenceMatcher(None, old_plan,
1718
for i, j, n in matcher.get_matching_blocks():
1719
for jj in range(last_j, j):
1721
for jj in range(j, j+n):
1722
plan_line = new_plan[jj]
1723
if plan_line[0] == 'new-b':
1725
elif plan_line[0] == 'killed-b':
1726
yield 'unchanged', plan_line[1]
1732
class _PlanMerge(_PlanMergeBase):
1733
"""Plan an annotate merge using on-the-fly annotation"""
1735
def __init__(self, a_rev, b_rev, vf, key_prefix):
1736
super(_PlanMerge, self).__init__(a_rev, b_rev, vf, key_prefix)
1737
self.a_key = self._key_prefix + (self.a_rev,)
1738
self.b_key = self._key_prefix + (self.b_rev,)
1739
self.graph = Graph(self.vf)
1740
heads = self.graph.heads((self.a_key, self.b_key))
1742
# one side dominates, so we can just return its values, yay for
1744
# Ideally we would know that before we get this far
1745
self._head_key = heads.pop()
1746
if self._head_key == self.a_key:
1750
mutter('found dominating revision for %s\n%s > %s', self.vf,
1751
self._head_key[-1], other)
1754
self._head_key = None
1757
def _precache_tip_lines(self):
1758
# Turn this into a no-op, because we will do this later
1761
def _find_recursive_lcas(self):
1762
"""Find all the ancestors back to a unique lca"""
1763
cur_ancestors = (self.a_key, self.b_key)
1764
# graph.find_lca(uncommon, keys) now returns plain NULL_REVISION,
1765
# rather than a key tuple. We will just map that directly to no common
1769
next_lcas = self.graph.find_lca(*cur_ancestors)
1770
# Map a plain NULL_REVISION to a simple no-ancestors
1771
if next_lcas == set([NULL_REVISION]):
1773
# Order the lca's based on when they were merged into the tip
1774
# While the actual merge portion of weave merge uses a set() of
1775
# active revisions, the order of insertion *does* effect the
1776
# implicit ordering of the texts.
1777
for rev_key in cur_ancestors:
1778
ordered_parents = tuple(self.graph.find_merge_order(rev_key,
1780
parent_map[rev_key] = ordered_parents
1781
if len(next_lcas) == 0:
1783
elif len(next_lcas) == 1:
1784
parent_map[list(next_lcas)[0]] = ()
1786
elif len(next_lcas) > 2:
1787
# More than 2 lca's, fall back to grabbing all nodes between
1788
# this and the unique lca.
1789
mutter('More than 2 LCAs, falling back to all nodes for:'
1790
' %s, %s\n=> %s', self.a_key, self.b_key, cur_ancestors)
1791
cur_lcas = next_lcas
1792
while len(cur_lcas) > 1:
1793
cur_lcas = self.graph.find_lca(*cur_lcas)
1794
if len(cur_lcas) == 0:
1795
# No common base to find, use the full ancestry
1798
unique_lca = list(cur_lcas)[0]
1799
if unique_lca == NULL_REVISION:
1800
# find_lca will return a plain 'NULL_REVISION' rather
1801
# than a key tuple when there is no common ancestor, we
1802
# prefer to just use None, because it doesn't confuse
1803
# _get_interesting_texts()
1805
parent_map.update(self._find_unique_parents(next_lcas,
1808
cur_ancestors = next_lcas
1811
def _find_unique_parents(self, tip_keys, base_key):
1812
"""Find ancestors of tip that aren't ancestors of base.
1814
:param tip_keys: Nodes that are interesting
1815
:param base_key: Cull all ancestors of this node
1816
:return: The parent map for all revisions between tip_keys and
1817
base_key. base_key will be included. References to nodes outside of
1818
the ancestor set will also be removed.
1820
# TODO: this would be simpler if find_unique_ancestors took a list
1821
# instead of a single tip, internally it supports it, but it
1822
# isn't a "backwards compatible" api change.
1823
if base_key is None:
1824
parent_map = dict(self.graph.iter_ancestry(tip_keys))
1825
# We remove NULL_REVISION because it isn't a proper tuple key, and
1826
# thus confuses things like _get_interesting_texts, and our logic
1827
# to add the texts into the memory weave.
1828
if NULL_REVISION in parent_map:
1829
parent_map.pop(NULL_REVISION)
1832
for tip in tip_keys:
1834
self.graph.find_unique_ancestors(tip, [base_key]))
1835
parent_map = self.graph.get_parent_map(interesting)
1836
parent_map[base_key] = ()
1837
culled_parent_map, child_map, tails = self._remove_external_references(
1839
# Remove all the tails but base_key
1840
if base_key is not None:
1841
tails.remove(base_key)
1842
self._prune_tails(culled_parent_map, child_map, tails)
1843
# Now remove all the uninteresting 'linear' regions
1844
simple_map = _mod_graph.collapse_linear_regions(culled_parent_map)
1848
def _remove_external_references(parent_map):
1849
"""Remove references that go outside of the parent map.
1851
:param parent_map: Something returned from Graph.get_parent_map(keys)
1852
:return: (filtered_parent_map, child_map, tails)
1853
filtered_parent_map is parent_map without external references
1854
child_map is the {parent_key: [child_keys]} mapping
1855
tails is a list of nodes that do not have any parents in the map
1857
# TODO: The basic effect of this function seems more generic than
1858
# _PlanMerge. But the specific details of building a child_map,
1859
# and computing tails seems very specific to _PlanMerge.
1860
# Still, should this be in Graph land?
1861
filtered_parent_map = {}
1864
for key, parent_keys in parent_map.iteritems():
1865
culled_parent_keys = [p for p in parent_keys if p in parent_map]
1866
if not culled_parent_keys:
1868
for parent_key in culled_parent_keys:
1869
child_map.setdefault(parent_key, []).append(key)
1870
# TODO: Do we want to do this, it adds overhead for every node,
1871
# just to say that the node has no children
1872
child_map.setdefault(key, [])
1873
filtered_parent_map[key] = culled_parent_keys
1874
return filtered_parent_map, child_map, tails
1877
def _prune_tails(parent_map, child_map, tails_to_remove):
1878
"""Remove tails from the parent map.
1880
This will remove the supplied revisions until no more children have 0
1883
:param parent_map: A dict of {child: [parents]}, this dictionary will
1884
be modified in place.
1885
:param tails_to_remove: A list of tips that should be removed,
1886
this list will be consumed
1887
:param child_map: The reverse dict of parent_map ({parent: [children]})
1888
this dict will be modified
1889
:return: None, parent_map will be modified in place.
1891
while tails_to_remove:
1892
next = tails_to_remove.pop()
1893
parent_map.pop(next)
1894
children = child_map.pop(next)
1895
for child in children:
1896
child_parents = parent_map[child]
1897
child_parents.remove(next)
1898
if len(child_parents) == 0:
1899
tails_to_remove.append(child)
1901
def _get_interesting_texts(self, parent_map):
1902
"""Return a dict of texts we are interested in.
1904
Note that the input is in key tuples, but the output is in plain
1907
:param parent_map: The output from _find_recursive_lcas
1908
:return: A dict of {'revision_id':lines} as returned by
1909
_PlanMergeBase.get_lines()
1911
all_revision_keys = set(parent_map)
1912
all_revision_keys.add(self.a_key)
1913
all_revision_keys.add(self.b_key)
1915
# Everything else is in 'keys' but get_lines is in 'revision_ids'
1916
all_texts = self.get_lines([k[-1] for k in all_revision_keys])
1919
def _build_weave(self):
1920
from bzrlib import weave
1921
self._weave = weave.Weave(weave_name='in_memory_weave',
1922
allow_reserved=True)
1923
parent_map = self._find_recursive_lcas()
1925
all_texts = self._get_interesting_texts(parent_map)
1927
# Note: Unfortunately, the order given by topo_sort will effect the
1928
# ordering resolution in the output. Specifically, if you add A then B,
1929
# then in the output text A lines will show up before B lines. And, of
1930
# course, topo_sort doesn't guarantee any real ordering.
1931
# So we use merge_sort, and add a fake node on the tip.
1932
# This ensures that left-hand parents will always be inserted into the
1933
# weave before right-hand parents.
1934
tip_key = self._key_prefix + (_mod_revision.CURRENT_REVISION,)
1935
parent_map[tip_key] = (self.a_key, self.b_key)
1937
for seq_num, key, depth, eom in reversed(tsort.merge_sort(parent_map,
1941
# for key in tsort.topo_sort(parent_map):
1942
parent_keys = parent_map[key]
1943
revision_id = key[-1]
1944
parent_ids = [k[-1] for k in parent_keys]
1945
self._weave.add_lines(revision_id, parent_ids,
1946
all_texts[revision_id])
1948
def plan_merge(self):
1949
"""Generate a 'plan' for merging the two revisions.
1951
This involves comparing their texts and determining the cause of
1952
differences. If text A has a line and text B does not, then either the
1953
line was added to text A, or it was deleted from B. Once the causes
1954
are combined, they are written out in the format described in
1955
VersionedFile.plan_merge
1957
if self._head_key is not None: # There was a single head
1958
if self._head_key == self.a_key:
1961
if self._head_key != self.b_key:
1962
raise AssertionError('There was an invalid head: %s != %s'
1963
% (self.b_key, self._head_key))
1965
head_rev = self._head_key[-1]
1966
lines = self.get_lines([head_rev])[head_rev]
1967
return ((plan, line) for line in lines)
1968
return self._weave.plan_merge(self.a_rev, self.b_rev)
1971
class _PlanLCAMerge(_PlanMergeBase):
1973
This merge algorithm differs from _PlanMerge in that:
1974
1. comparisons are done against LCAs only
1975
2. cases where a contested line is new versus one LCA but old versus
1976
another are marked as conflicts, by emitting the line as conflicted-a
1979
This is faster, and hopefully produces more useful output.
1982
def __init__(self, a_rev, b_rev, vf, key_prefix, graph):
1983
_PlanMergeBase.__init__(self, a_rev, b_rev, vf, key_prefix)
1984
lcas = graph.find_lca(key_prefix + (a_rev,), key_prefix + (b_rev,))
1987
if lca == NULL_REVISION:
1990
self.lcas.add(lca[-1])
1991
for lca in self.lcas:
1992
if _mod_revision.is_null(lca):
1995
lca_lines = self.get_lines([lca])[lca]
1996
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_a,
1998
blocks = list(matcher.get_matching_blocks())
1999
self._cached_matching_blocks[(a_rev, lca)] = blocks
2000
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_b,
2002
blocks = list(matcher.get_matching_blocks())
2003
self._cached_matching_blocks[(b_rev, lca)] = blocks
2005
def _determine_status(self, revision_id, unique_line_numbers):
2006
"""Determines the status unique lines versus all lcas.
2008
Basically, determines why the line is unique to this revision.
2010
A line may be determined new, killed, or both.
2012
If a line is determined new, that means it was not present in at least
2013
one LCA, and is not present in the other merge revision.
2015
If a line is determined killed, that means the line was present in
2018
If a line is killed and new, this indicates that the two merge
2019
revisions contain differing conflict resolutions.
2020
:param revision_id: The id of the revision in which the lines are
2022
:param unique_line_numbers: The line numbers of unique lines.
2023
:return a tuple of (new_this, killed_other):
2027
unique_line_numbers = set(unique_line_numbers)
2028
for lca in self.lcas:
2029
blocks = self._get_matching_blocks(revision_id, lca)
2030
unique_vs_lca, _ignored = self._unique_lines(blocks)
2031
new.update(unique_line_numbers.intersection(unique_vs_lca))
2032
killed.update(unique_line_numbers.difference(unique_vs_lca))
466
self.base_rev_id = None
468
self.base_rev_id = base_branch.get_rev_id(base_revision[1])
469
fetch(from_branch=base_branch, to_branch=self.this_branch)
470
self.base_is_ancestor = is_ancestor(self.this_basis,
475
def get_inventory(tree):
476
return tree.inventory
478
inv_changes = merge_flex(self.this_tree, self.base_tree,
480
generate_changeset, get_inventory,
481
self.conflict_handler,
482
merge_factory=self.merge_factory,
483
interesting_ids=self.interesting_ids)
486
for id, path in inv_changes.iteritems():
491
assert path.startswith('.' + '/') or path.startswith('.' + '\\'), "path is %s" % path
493
adjust_ids.append((path, id))
494
if len(adjust_ids) > 0:
495
self.this_tree.set_inventory(self.regen_inventory(adjust_ids))
496
conflicts = self.conflict_handler.conflicts
497
self.conflict_handler.finalize()
500
def regen_inventory(self, new_entries):
501
old_entries = self.this_tree.read_working_inventory()
505
for path, file_id in new_entries:
508
new_entries_map[file_id] = path
510
def id2path(file_id):
511
path = new_entries_map.get(file_id)
514
entry = old_entries[file_id]
515
if entry.parent_id is None:
517
return pathjoin(id2path(entry.parent_id), entry.name)
519
for file_id in old_entries:
520
entry = old_entries[file_id]
521
path = id2path(file_id)
522
new_inventory[file_id] = (path, file_id, entry.parent_id,
524
by_path[path] = file_id
529
for path, file_id in new_entries:
531
del new_inventory[file_id]
534
new_path_list.append((path, file_id))
535
if file_id not in old_entries:
537
# Ensure no file is added before its parent
539
for path, file_id in new_path_list:
543
parent = by_path[os.path.dirname(path)]
544
abspath = pathjoin(self.this_tree.basedir, path)
545
kind = bzrlib.osutils.file_kind(abspath)
546
new_inventory[file_id] = (path, file_id, parent, kind)
547
by_path[path] = file_id
549
# Get a list in insertion order
550
new_inventory_list = new_inventory.values()
551
mutter ("""Inventory regeneration:
552
old length: %i insertions: %i deletions: %i new_length: %i"""\
553
% (len(old_entries), insertions, deletions,
554
len(new_inventory_list)))
555
assert len(new_inventory_list) == len(old_entries) + insertions\
557
new_inventory_list.sort()
558
return new_inventory_list
561
merge_types = { "merge3": (ApplyMerge3, "Native diff3-style merge"),
562
"diff3": (Diff3Merge, "Merge using external diff3"),
563
'weave': (WeaveMerge, "Weave-based merge")