1
# Copyright (C) 2005-2011 Canonical Ltd
1
# Copyright (C) 2005 Canonical Ltd
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
5
5
# the Free Software Foundation; either version 2 of the License, or
6
6
# (at your option) any later version.
8
8
# This program is distributed in the hope that it will be useful,
9
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
11
# GNU General Public License for more details.
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
from bzrlib.lazy_import import lazy_import
20
lazy_import(globals(), """
22
branch as _mod_branch,
24
conflicts as _mod_conflicts,
31
revision as _mod_revision,
47
from bzrlib.symbol_versioning import (
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
24
import bzrlib.revision
25
from bzrlib.merge_core import merge_flex, ApplyMerge3, BackupBeforeChange
26
from bzrlib.merge_core import WeaveMerge
27
from bzrlib.changeset import generate_changeset, ExceptionConflictHandler
28
from bzrlib.changeset import Inventory, Diff3Merge, ReplaceContents
29
from bzrlib.branch import Branch
30
from bzrlib.errors import (BzrCommandError,
34
WorkingTreeNotRevision,
38
from bzrlib.delta import compare_trees
39
from bzrlib.trace import mutter, warning, note
40
from bzrlib.fetch import greedy_fetch, fetch
41
from bzrlib.revision import is_ancestor, NULL_REVISION
42
from bzrlib.osutils import rename
43
from bzrlib.revision import common_ancestor, MultipleRevisionSources
44
from bzrlib.errors import NoSuchRevision
51
46
# TODO: Report back as changes are merged in
48
# TODO: build_working_dir can be built on something simpler than merge()
50
# FIXME: merge() parameters seem oriented towards the command line
51
# NOTABUG: merge is a helper for commandline functions. merge_inner is the
52
# the core functionality.
54
# comments from abentley on irc: merge happens in two stages, each
55
# of which generates a changeset object
57
# stage 1: generate OLD->OTHER,
58
# stage 2: use MINE and OLD->OTHER to generate MINE -> RESULT
60
class MergeConflictHandler(ExceptionConflictHandler):
61
"""Handle conflicts encountered while merging.
63
This subclasses ExceptionConflictHandler, so that any types of
64
conflict that are not explicitly handled cause an exception and
67
def __init__(self, this_tree, base_tree, other_tree, ignore_zero=False):
68
ExceptionConflictHandler.__init__(self)
70
self.ignore_zero = ignore_zero
71
self.this_tree = this_tree
72
self.base_tree = base_tree
73
self.other_tree = other_tree
75
def copy(self, source, dest):
76
"""Copy the text and mode of a file
77
:param source: The path of the file to copy
78
:param dest: The distination file to create
80
s_file = file(source, "rb")
81
d_file = file(dest, "wb")
84
os.chmod(dest, 0777 & os.stat(source).st_mode)
86
def dump(self, lines, dest):
87
"""Copy the text and mode of a file
88
:param source: The path of the file to copy
89
:param dest: The distination file to create
91
d_file = file(dest, "wb")
95
def add_suffix(self, name, suffix, last_new_name=None, fix_inventory=True):
96
"""Rename a file to append a suffix. If the new name exists, the
97
suffix is added repeatedly until a non-existant name is found
99
:param name: The path of the file
100
:param suffix: The suffix to append
101
:param last_new_name: (used for recursive calls) the last name tried
103
if last_new_name is None:
105
new_name = last_new_name+suffix
107
rename(name, new_name)
108
if fix_inventory is True:
110
relpath = self.this_tree.relpath(name)
111
except NotBranchError:
113
if relpath is not None:
114
file_id = self.this_tree.path2id(relpath)
115
if file_id is not None:
116
new_path = self.this_tree.relpath(new_name)
117
rename(new_name, name)
118
self.this_tree.rename_one(relpath, new_path)
119
assert self.this_tree.id2path(file_id) == new_path
121
if e.errno != errno.EEXIST and e.errno != errno.ENOTEMPTY:
123
return self.add_suffix(name, suffix, last_new_name=new_name,
124
fix_inventory=fix_inventory)
127
def conflict(self, text):
132
def merge_conflict(self, new_file, this_path, base_lines, other_lines):
134
Handle diff3 conflicts by producing a .THIS, .BASE and .OTHER. The
135
main file will be a version with diff3 conflicts.
136
:param new_file: Path to the output file with diff3 markers
137
:param this_path: Path to the file text for the THIS tree
138
:param base_path: Path to the file text for the BASE tree
139
:param other_path: Path to the file text for the OTHER tree
141
self.add_suffix(this_path, ".THIS", fix_inventory=False)
142
self.dump(base_lines, this_path+".BASE")
143
self.dump(other_lines, this_path+".OTHER")
144
rename(new_file, this_path)
145
self.conflict("Diff3 conflict encountered in %s" % this_path)
147
def weave_merge_conflict(self, filename, weave, other_i, out_file):
149
Handle weave conflicts by producing a .THIS, and .OTHER. The
150
main file will be a version with diff3-style conflicts.
152
self.add_suffix(filename, ".THIS", fix_inventory=False)
154
self.dump(weave.get_iter(other_i), filename+".OTHER")
155
self.conflict("Text conflict encountered in %s" % filename)
157
def new_contents_conflict(self, filename, other_contents):
158
"""Conflicting contents for newly added file."""
159
other_contents(filename + ".OTHER", self, False)
160
self.conflict("Conflict in newly added file %s" % filename)
163
def target_exists(self, entry, target, old_path):
164
"""Handle the case when the target file or dir exists"""
165
moved_path = self.add_suffix(target, ".moved")
166
self.conflict("Moved existing %s to %s" % (target, moved_path))
168
def rmdir_non_empty(self, filename):
169
"""Handle the case where the dir to be removed still has contents"""
170
self.conflict("Directory %s not removed because it is not empty"\
174
def rem_contents_conflict(self, filename, this_contents, base_contents):
175
base_contents(filename+".BASE", self, False)
176
this_contents(filename+".THIS", self, False)
177
return ReplaceContents(this_contents, None)
179
def rem_contents_conflict(self, filename, this_contents, base_contents):
180
base_contents(filename+".BASE", self, False)
181
this_contents(filename+".THIS", self, False)
182
self.conflict("Other branch deleted locally modified file %s" %
184
return ReplaceContents(this_contents, None)
186
def abs_this_path(self, file_id):
187
"""Return the absolute path for a file_id in the this tree."""
188
return self.this_tree.id2abspath(file_id)
190
def add_missing_parents(self, file_id, tree):
191
"""If some of the parents for file_id are missing, add them."""
192
entry = tree.inventory[file_id]
193
if entry.parent_id not in self.this_tree:
194
return self.create_all_missing(entry.parent_id, tree)
196
return self.abs_this_path(entry.parent_id)
198
def create_all_missing(self, file_id, tree):
199
"""Add contents for a file_id and all its parents to a tree."""
200
entry = tree.inventory[file_id]
201
if entry.parent_id is not None and entry.parent_id not in self.this_tree:
202
abspath = self.create_all_missing(entry.parent_id, tree)
204
abspath = self.abs_this_path(entry.parent_id)
205
entry_path = os.path.join(abspath, entry.name)
206
if not os.path.isdir(entry_path):
207
self.create(file_id, entry_path, tree)
210
def create(self, file_id, path, tree, reverse=False):
211
"""Uses tree data to create a filesystem object for the file_id"""
212
from changeset import get_contents
213
get_contents(tree, file_id)(path, self, reverse)
215
def missing_for_merge(self, file_id, other_path):
216
"""The file_id doesn't exist in THIS, but does in OTHER and BASE"""
217
self.conflict("Other branch modified locally deleted file %s" %
219
parent_dir = self.add_missing_parents(file_id, self.other_tree)
220
stem = os.path.join(parent_dir, os.path.basename(other_path))
221
self.create(file_id, stem+".OTHER", self.other_tree)
222
self.create(file_id, stem+".BASE", self.base_tree)
224
def threeway_contents_conflict(filename, this_contents, base_contents,
226
self.conflict("Three-way conflict merging %s" % filename)
229
if not self.ignore_zero:
230
note("%d conflicts encountered." % self.conflicts)
232
def get_tree(treespec, local_branch=None):
233
location, revno = treespec
234
branch = Branch.open_containing(location)[0]
238
revision = branch.last_revision()
240
revision = branch.get_rev_id(revno)
242
revision = NULL_REVISION
243
return branch, get_revid_tree(branch, revision, local_branch)
245
def get_revid_tree(branch, revision, local_branch):
247
base_tree = branch.working_tree()
249
if local_branch is not None:
250
greedy_fetch(local_branch, branch, revision)
251
base_tree = local_branch.revision_tree(revision)
253
base_tree = branch.revision_tree(revision)
257
def file_exists(tree, file_id):
258
return tree.has_filename(tree.id2path(file_id))
261
def build_working_dir(to_dir):
262
"""Build a working directory in an empty directory.
264
to_dir is a directory containing branch metadata but no working files,
265
typically constructed by cloning an existing branch.
267
This is split out as a special idiomatic case of merge. It could
268
eventually be done by just building the tree directly calling into
269
lower-level code (e.g. constructing a changeset).
271
# RBC 20051019 is this not just 'export' ?
272
# AB Well, export doesn't take care of inventory...
273
this_branch = Branch.open_containing(to_dir)[0]
274
transform_tree(this_branch.working_tree(), this_branch.basis_tree())
54
277
def transform_tree(from_tree, to_tree, interesting_ids=None):
55
from_tree.lock_tree_write()
56
operation = cleanup.OperationWithCleanups(merge_inner)
57
operation.add_cleanup(from_tree.unlock)
58
operation.run_simple(from_tree.branch, to_tree, from_tree,
59
ignore_zero=True, interesting_ids=interesting_ids, this_tree=from_tree)
62
class MergeHooks(hooks.Hooks):
65
hooks.Hooks.__init__(self, "bzrlib.merge", "Merger.hooks")
66
self.add_hook('merge_file_content',
67
"Called with a bzrlib.merge.Merger object to create a per file "
68
"merge object when starting a merge. "
69
"Should return either None or a subclass of "
70
"``bzrlib.merge.AbstractPerFileMerger``. "
71
"Such objects will then be called per file "
72
"that needs to be merged (including when one "
73
"side has deleted the file and the other has changed it). "
74
"See the AbstractPerFileMerger API docs for details on how it is "
79
class AbstractPerFileMerger(object):
80
"""PerFileMerger objects are used by plugins extending merge for bzrlib.
82
See ``bzrlib.plugins.news_merge.news_merge`` for an example concrete class.
84
:ivar merger: The Merge3Merger performing the merge.
87
def __init__(self, merger):
88
"""Create a PerFileMerger for use with merger."""
91
def merge_contents(self, merge_params):
92
"""Attempt to merge the contents of a single file.
94
:param merge_params: A bzrlib.merge.MergeHookParams
95
:return: A tuple of (status, chunks), where status is one of
96
'not_applicable', 'success', 'conflicted', or 'delete'. If status
97
is 'success' or 'conflicted', then chunks should be an iterable of
98
strings for the new file contents.
100
return ('not applicable', None)
103
class PerFileMerger(AbstractPerFileMerger):
104
"""Merge individual files when self.file_matches returns True.
106
This class is intended to be subclassed. The file_matches and
107
merge_matching methods should be overridden with concrete implementations.
110
def file_matches(self, params):
111
"""Return True if merge_matching should be called on this file.
113
Only called with merges of plain files with no clear winner.
115
Subclasses must override this.
117
raise NotImplementedError(self.file_matches)
119
def get_filename(self, params, tree):
120
"""Lookup the filename (i.e. basename, not path), given a Tree (e.g.
121
self.merger.this_tree) and a MergeHookParams.
123
return osutils.basename(tree.id2path(params.file_id))
125
def get_filepath(self, params, tree):
126
"""Calculate the path to the file in a tree.
128
:param params: A MergeHookParams describing the file to merge
129
:param tree: a Tree, e.g. self.merger.this_tree.
131
return tree.id2path(params.file_id)
133
def merge_contents(self, params):
134
"""Merge the contents of a single file."""
135
# Check whether this custom merge logic should be used.
137
# OTHER is a straight winner, rely on default merge.
138
params.winner == 'other' or
139
# THIS and OTHER aren't both files.
140
not params.is_file_merge() or
141
# The filename doesn't match *.xml
142
not self.file_matches(params)):
143
return 'not_applicable', None
144
return self.merge_matching(params)
146
def merge_matching(self, params):
147
"""Merge the contents of a single file that has matched the criteria
148
in PerFileMerger.merge_contents (is a conflict, is a file,
149
self.file_matches is True).
151
Subclasses must override this.
153
raise NotImplementedError(self.merge_matching)
156
class ConfigurableFileMerger(PerFileMerger):
157
"""Merge individual files when configured via a .conf file.
159
This is a base class for concrete custom file merging logic. Concrete
160
classes should implement ``merge_text``.
162
See ``bzrlib.plugins.news_merge.news_merge`` for an example concrete class.
164
:ivar affected_files: The configured file paths to merge.
166
:cvar name_prefix: The prefix to use when looking up configuration
167
details. <name_prefix>_merge_files describes the files targeted by the
170
:cvar default_files: The default file paths to merge when no configuration
177
def __init__(self, merger):
178
super(ConfigurableFileMerger, self).__init__(merger)
179
self.affected_files = None
180
self.default_files = self.__class__.default_files or []
181
self.name_prefix = self.__class__.name_prefix
182
if self.name_prefix is None:
183
raise ValueError("name_prefix must be set.")
185
def file_matches(self, params):
186
"""Check whether the file should call the merge hook.
188
<name_prefix>_merge_files configuration variable is a list of files
189
that should use the hook.
191
affected_files = self.affected_files
192
if affected_files is None:
193
config = self.merger.this_branch.get_config()
194
# Until bzr provides a better policy for caching the config, we
195
# just add the part we're interested in to the params to avoid
196
# reading the config files repeatedly (bazaar.conf, location.conf,
198
config_key = self.name_prefix + '_merge_files'
199
affected_files = config.get_user_option_as_list(config_key)
200
if affected_files is None:
201
# If nothing was specified in the config, use the default.
202
affected_files = self.default_files
203
self.affected_files = affected_files
205
filepath = self.get_filepath(params, self.merger.this_tree)
206
if filepath in affected_files:
210
def merge_matching(self, params):
211
return self.merge_text(params)
213
def merge_text(self, params):
214
"""Merge the byte contents of a single file.
216
This is called after checking that the merge should be performed in
217
merge_contents, and it should behave as per
218
``bzrlib.merge.AbstractPerFileMerger.merge_contents``.
220
raise NotImplementedError(self.merge_text)
223
class MergeHookParams(object):
224
"""Object holding parameters passed to merge_file_content hooks.
226
There are some fields hooks can access:
228
:ivar file_id: the file ID of the file being merged
229
:ivar trans_id: the transform ID for the merge of this file
230
:ivar this_kind: kind of file_id in 'this' tree
231
:ivar other_kind: kind of file_id in 'other' tree
232
:ivar winner: one of 'this', 'other', 'conflict'
235
def __init__(self, merger, file_id, trans_id, this_kind, other_kind,
237
self._merger = merger
238
self.file_id = file_id
239
self.trans_id = trans_id
240
self.this_kind = this_kind
241
self.other_kind = other_kind
244
def is_file_merge(self):
245
"""True if this_kind and other_kind are both 'file'."""
246
return self.this_kind == 'file' and self.other_kind == 'file'
248
@decorators.cachedproperty
249
def base_lines(self):
250
"""The lines of the 'base' version of the file."""
251
return self._merger.get_lines(self._merger.base_tree, self.file_id)
253
@decorators.cachedproperty
254
def this_lines(self):
255
"""The lines of the 'this' version of the file."""
256
return self._merger.get_lines(self._merger.this_tree, self.file_id)
258
@decorators.cachedproperty
259
def other_lines(self):
260
"""The lines of the 'other' version of the file."""
261
return self._merger.get_lines(self._merger.other_tree, self.file_id)
278
merge_inner(from_tree.branch, to_tree, from_tree, ignore_zero=True,
279
interesting_ids=interesting_ids)
282
def merge(other_revision, base_revision,
283
check_clean=True, ignore_zero=False,
284
this_dir=None, backup_files=False, merge_type=ApplyMerge3,
285
file_list=None, show_base=False, reprocess=False):
286
"""Merge changes into a tree.
289
list(path, revno) Base for three-way merge.
290
If [None, None] then a base will be automatically determined.
292
list(path, revno) Other revision for three-way merge.
294
Directory to merge changes into; '.' by default.
296
If true, this_dir must have no uncommitted changes before the
298
ignore_zero - If true, suppress the "zero conflicts" message when
299
there are no conflicts; should be set when doing something we expect
300
to complete perfectly.
301
file_list - If supplied, merge only changes to selected files.
303
All available ancestors of other_revision and base_revision are
304
automatically pulled into the branch.
306
The revno may be -1 to indicate the last revision on the branch, which is
309
This function is intended for use from the command line; programmatic
310
clients might prefer to call merge_inner(), which has less magic behavior.
314
this_branch = Branch.open_containing(this_dir)[0]
315
if show_base and not merge_type is ApplyMerge3:
316
raise BzrCommandError("Show-base is not supported for this merge"
317
" type. %s" % merge_type)
318
if reprocess and not merge_type is ApplyMerge3:
319
raise BzrCommandError("Reprocess is not supported for this merge"
320
" type. %s" % merge_type)
321
if reprocess and show_base:
322
raise BzrCommandError("Cannot reprocess and show base.")
323
merger = Merger(this_branch)
324
merger.check_basis(check_clean)
325
merger.set_other(other_revision)
326
merger.set_base(base_revision)
327
if merger.base_rev_id == merger.other_rev_id:
328
note('Nothing to do.')
330
merger.backup_files = backup_files
331
merger.merge_type = merge_type
332
merger.set_interesting_files(file_list)
333
merger.show_base = show_base
334
merger.reprocess = reprocess
335
merger.conflict_handler = MergeConflictHandler(merger.this_tree,
338
ignore_zero=ignore_zero)
339
conflicts = merger.do_merge()
343
def merge_inner(this_branch, other_tree, base_tree, ignore_zero=False,
345
merge_type=ApplyMerge3,
346
interesting_ids=None,
350
interesting_files=None):
351
"""Primary interface for merging.
353
typical use is probably
354
'merge_inner(branch, branch.get_revision_tree(other_revision),
355
branch.get_revision_tree(base_revision))'
357
merger = Merger(this_branch, other_tree, base_tree)
358
merger.backup_files = backup_files
359
merger.merge_type = merge_type
360
merger.interesting_ids = interesting_ids
361
if interesting_files:
362
assert not interesting_ids, ('Only supply interesting_ids'
363
' or interesting_files')
364
merger._set_interesting_files(interesting_files)
365
merger.show_base = show_base
366
merger.reprocess = reprocess
367
merger.conflict_handler = MergeConflictHandler(merger.this_tree, base_tree,
369
ignore_zero=ignore_zero)
370
merger.other_rev_id = other_rev_id
371
merger.other_basis = other_rev_id
372
return merger.do_merge()
264
375
class Merger(object):
268
def __init__(self, this_branch, other_tree=None, base_tree=None,
269
this_tree=None, pb=None, change_reporter=None,
270
recurse='down', revision_graph=None):
376
def __init__(self, this_branch, other_tree=None, base_tree=None):
271
377
object.__init__(self)
272
378
self.this_branch = this_branch
273
self.this_basis = _mod_revision.ensure_null(
274
this_branch.last_revision())
379
self.this_basis = this_branch.last_revision()
275
380
self.this_rev_id = None
276
self.this_tree = this_tree
381
self.this_tree = this_branch.working_tree()
277
382
self.this_revision_tree = None
278
383
self.this_basis_tree = None
279
384
self.other_tree = other_tree
280
self.other_branch = None
281
385
self.base_tree = base_tree
282
386
self.ignore_zero = False
283
387
self.backup_files = False
284
388
self.interesting_ids = None
285
self.interesting_files = None
286
389
self.show_base = False
287
390
self.reprocess = False
289
warnings.warn("pb parameter to Merger() is deprecated and ignored")
291
self.recurse = recurse
292
self.change_reporter = change_reporter
293
self._cached_trees = {}
294
self._revision_graph = revision_graph
295
self._base_is_ancestor = None
296
self._base_is_other_ancestor = None
297
self._is_criss_cross = None
298
self._lca_trees = None
300
def cache_trees_with_revision_ids(self, trees):
301
"""Cache any tree in trees if it has a revision_id."""
302
for maybe_tree in trees:
303
if maybe_tree is None:
306
rev_id = maybe_tree.get_revision_id()
307
except AttributeError:
309
self._cached_trees[rev_id] = maybe_tree
312
def revision_graph(self):
313
if self._revision_graph is None:
314
self._revision_graph = self.this_branch.repository.get_graph()
315
return self._revision_graph
317
def _set_base_is_ancestor(self, value):
318
self._base_is_ancestor = value
320
def _get_base_is_ancestor(self):
321
if self._base_is_ancestor is None:
322
self._base_is_ancestor = self.revision_graph.is_ancestor(
323
self.base_rev_id, self.this_basis)
324
return self._base_is_ancestor
326
base_is_ancestor = property(_get_base_is_ancestor, _set_base_is_ancestor)
328
def _set_base_is_other_ancestor(self, value):
329
self._base_is_other_ancestor = value
331
def _get_base_is_other_ancestor(self):
332
if self._base_is_other_ancestor is None:
333
if self.other_basis is None:
335
self._base_is_other_ancestor = self.revision_graph.is_ancestor(
336
self.base_rev_id, self.other_basis)
337
return self._base_is_other_ancestor
339
base_is_other_ancestor = property(_get_base_is_other_ancestor,
340
_set_base_is_other_ancestor)
343
def from_uncommitted(tree, other_tree, pb=None, base_tree=None):
344
"""Return a Merger for uncommitted changes in other_tree.
346
:param tree: The tree to merge into
347
:param other_tree: The tree to get uncommitted changes from
348
:param pb: A progress indicator
349
:param base_tree: The basis to use for the merge. If unspecified,
350
other_tree.basis_tree() will be used.
352
if base_tree is None:
353
base_tree = other_tree.basis_tree()
354
merger = Merger(tree.branch, other_tree, base_tree, tree, pb)
355
merger.base_rev_id = merger.base_tree.get_revision_id()
356
merger.other_rev_id = None
357
merger.other_basis = merger.base_rev_id
361
def from_mergeable(klass, tree, mergeable, pb):
362
"""Return a Merger for a bundle or merge directive.
364
:param tree: The tree to merge changes into
365
:param mergeable: A merge directive or bundle
366
:param pb: A progress indicator
368
mergeable.install_revisions(tree.branch.repository)
369
base_revision_id, other_revision_id, verified =\
370
mergeable.get_merge_request(tree.branch.repository)
371
revision_graph = tree.branch.repository.get_graph()
372
if base_revision_id is not None:
373
if (base_revision_id != _mod_revision.NULL_REVISION and
374
revision_graph.is_ancestor(
375
base_revision_id, tree.branch.last_revision())):
376
base_revision_id = None
378
trace.warning('Performing cherrypick')
379
merger = klass.from_revision_ids(pb, tree, other_revision_id,
380
base_revision_id, revision_graph=
382
return merger, verified
385
def from_revision_ids(pb, tree, other, base=None, other_branch=None,
386
base_branch=None, revision_graph=None,
388
"""Return a Merger for revision-ids.
390
:param pb: A progress indicator
391
:param tree: The tree to merge changes into
392
:param other: The revision-id to use as OTHER
393
:param base: The revision-id to use as BASE. If not specified, will
395
:param other_branch: A branch containing the other revision-id. If
396
not supplied, tree.branch is used.
397
:param base_branch: A branch containing the base revision-id. If
398
not supplied, other_branch or tree.branch will be used.
399
:param revision_graph: If you have a revision_graph precomputed, pass
400
it in, otherwise it will be created for you.
401
:param tree_branch: The branch associated with tree. If not supplied,
402
tree.branch will be used.
404
if tree_branch is None:
405
tree_branch = tree.branch
406
merger = Merger(tree_branch, this_tree=tree, pb=pb,
407
revision_graph=revision_graph)
408
if other_branch is None:
409
other_branch = tree.branch
410
merger.set_other_revision(other, other_branch)
414
if base_branch is None:
415
base_branch = other_branch
416
merger.set_base_revision(base, base_branch)
419
def revision_tree(self, revision_id, branch=None):
420
if revision_id not in self._cached_trees:
422
branch = self.this_branch
424
tree = self.this_tree.revision_tree(revision_id)
425
except errors.NoSuchRevisionInTree:
426
tree = branch.repository.revision_tree(revision_id)
427
self._cached_trees[revision_id] = tree
428
return self._cached_trees[revision_id]
430
def _get_tree(self, treespec, possible_transports=None):
431
location, revno = treespec
433
tree = workingtree.WorkingTree.open_containing(location)[0]
434
return tree.branch, tree
435
branch = _mod_branch.Branch.open_containing(
436
location, possible_transports)[0]
438
revision_id = branch.last_revision()
440
revision_id = branch.get_rev_id(revno)
441
revision_id = _mod_revision.ensure_null(revision_id)
442
return branch, self.revision_tree(revision_id, branch)
444
@deprecated_method(deprecated_in((2, 1, 0)))
391
self.conflict_handler = MergeConflictHandler(self.this_tree, base_tree,
394
def revision_tree(self, revision_id):
395
return self.this_branch.revision_tree(revision_id)
445
397
def ensure_revision_trees(self):
446
398
if self.this_revision_tree is None:
447
self.this_basis_tree = self.revision_tree(self.this_basis)
399
self.this_basis_tree = self.this_branch.revision_tree(
448
401
if self.this_basis == self.this_rev_id:
449
402
self.this_revision_tree = self.this_basis_tree
451
405
if self.other_rev_id is None:
452
406
other_basis_tree = self.revision_tree(self.other_basis)
453
if other_basis_tree.has_changes(self.other_tree):
454
raise errors.WorkingTreeNotRevision(self.this_tree)
455
other_rev_id = self.other_basis
407
changes = compare_trees(self.other_tree, other_basis_tree)
408
if changes.has_changed():
409
raise WorkingTreeNotRevision(self.this_tree)
410
other_rev_id = other_basis
456
411
self.other_tree = other_basis_tree
458
@deprecated_method(deprecated_in((2, 1, 0)))
459
414
def file_revisions(self, file_id):
460
415
self.ensure_revision_trees()
416
def get_id(tree, file_id):
417
revision_id = tree.inventory[file_id].revision
418
assert revision_id is not None
461
420
if self.this_rev_id is None:
462
421
if self.this_basis_tree.get_file_sha1(file_id) != \
463
422
self.this_tree.get_file_sha1(file_id):
464
raise errors.WorkingTreeNotRevision(self.this_tree)
423
raise WorkingTreeNotRevision(self.this_tree)
466
425
trees = (self.this_basis_tree, self.other_tree)
467
return [tree.get_file_revision(file_id) for tree in trees]
469
@deprecated_method(deprecated_in((2, 1, 0)))
470
def check_basis(self, check_clean, require_commits=True):
471
if self.this_basis is None and require_commits is True:
472
raise errors.BzrCommandError(
473
"This branch has no commits."
474
" (perhaps you would prefer 'bzr pull')")
426
return [get_id(tree, file_id) for tree in trees]
429
def merge_factory(self, file_id, base, other):
430
if self.merge_type.history_based:
431
if self.show_base is True:
432
raise BzrError("Cannot show base for hisory-based merges")
433
if self.reprocess is True:
434
raise BzrError("Cannot reprocess history-based merges")
436
t_revid, o_revid = self.file_revisions(file_id)
437
weave = self.this_basis_tree.get_weave(file_id)
438
contents_change = self.merge_type(weave, t_revid, o_revid)
440
if self.show_base is True or self.reprocess is True:
441
contents_change = self.merge_type(file_id, base, other,
442
show_base=self.show_base,
443
reprocess=self.reprocess)
445
contents_change = self.merge_type(file_id, base, other)
446
if self.backup_files:
447
contents_change = BackupBeforeChange(contents_change)
448
return contents_change
450
def check_basis(self, check_clean):
451
if self.this_basis is None:
452
raise BzrCommandError("This branch has no commits")
476
454
self.compare_basis()
477
455
if self.this_basis != self.this_rev_id:
478
raise errors.UncommittedChanges(self.this_tree)
456
raise BzrCommandError("Working tree has uncommitted changes.")
480
@deprecated_method(deprecated_in((2, 1, 0)))
481
458
def compare_basis(self):
483
basis_tree = self.revision_tree(self.this_tree.last_revision())
484
except errors.NoSuchRevision:
485
basis_tree = self.this_tree.basis_tree()
486
if not self.this_tree.has_changes(basis_tree):
459
changes = compare_trees(self.this_branch.working_tree(),
460
self.this_branch.basis_tree(), False)
461
if not changes.has_changed():
487
462
self.this_rev_id = self.this_basis
489
464
def set_interesting_files(self, file_list):
490
self.interesting_files = file_list
466
self._set_interesting_files(file_list)
467
except NotVersionedError, e:
468
raise BzrCommandError("%s is not a source file in any"
471
def _set_interesting_files(self, file_list):
472
"""Set the list of interesting ids from a list of files."""
473
if file_list is None:
474
self.interesting_ids = None
477
interesting_ids = set()
478
for fname in file_list:
479
path = self.this_tree.relpath(fname)
481
for tree in (self.this_tree, self.base_tree, self.other_tree):
482
file_id = tree.inventory.path2id(path)
483
if file_id is not None:
484
interesting_ids.add(file_id)
487
raise NotVersionedError(path=fname)
488
self.interesting_ids = interesting_ids
492
490
def set_pending(self):
493
if (not self.base_is_ancestor or not self.base_is_other_ancestor
494
or self.other_rev_id is None):
498
def _add_parent(self):
499
new_parents = self.this_tree.get_parent_ids() + [self.other_rev_id]
500
new_parent_trees = []
501
operation = cleanup.OperationWithCleanups(
502
self.this_tree.set_parent_trees)
503
for revision_id in new_parents:
505
tree = self.revision_tree(revision_id)
506
except errors.NoSuchRevision:
510
operation.add_cleanup(tree.unlock)
511
new_parent_trees.append((revision_id, tree))
512
operation.run_simple(new_parent_trees, allow_leftmost_as_ghost=True)
514
def set_other(self, other_revision, possible_transports=None):
515
"""Set the revision and tree to merge from.
517
This sets the other_tree, other_rev_id, other_basis attributes.
519
:param other_revision: The [path, revision] list to merge from.
521
self.other_branch, self.other_tree = self._get_tree(other_revision,
491
if not self.base_is_ancestor:
493
if self.other_rev_id is None:
495
if self.other_rev_id in self.this_branch.get_ancestry(self.this_basis):
497
self.this_branch.working_tree().add_pending_merge(self.other_rev_id)
499
def set_other(self, other_revision):
500
other_branch, self.other_tree = get_tree(other_revision,
523
502
if other_revision[1] == -1:
524
self.other_rev_id = _mod_revision.ensure_null(
525
self.other_branch.last_revision())
526
if _mod_revision.is_null(self.other_rev_id):
527
raise errors.NoCommits(self.other_branch)
503
self.other_rev_id = other_branch.last_revision()
504
if self.other_rev_id is None:
505
raise NoCommits(other_branch)
528
506
self.other_basis = self.other_rev_id
529
507
elif other_revision[1] is not None:
530
self.other_rev_id = self.other_branch.get_rev_id(other_revision[1])
508
self.other_rev_id = other_branch.get_rev_id(other_revision[1])
531
509
self.other_basis = self.other_rev_id
533
511
self.other_rev_id = None
534
self.other_basis = self.other_branch.last_revision()
512
self.other_basis = other_branch.last_revision()
535
513
if self.other_basis is None:
536
raise errors.NoCommits(self.other_branch)
537
if self.other_rev_id is not None:
538
self._cached_trees[self.other_rev_id] = self.other_tree
539
self._maybe_fetch(self.other_branch,self.this_branch, self.other_basis)
541
def set_other_revision(self, revision_id, other_branch):
542
"""Set 'other' based on a branch and revision id
544
:param revision_id: The revision to use for a tree
545
:param other_branch: The branch containing this tree
547
self.other_rev_id = revision_id
548
self.other_branch = other_branch
549
self._maybe_fetch(other_branch, self.this_branch, self.other_rev_id)
550
self.other_tree = self.revision_tree(revision_id)
551
self.other_basis = revision_id
553
def set_base_revision(self, revision_id, branch):
554
"""Set 'base' based on a branch and revision id
556
:param revision_id: The revision to use for a tree
557
:param branch: The branch containing this tree
559
self.base_rev_id = revision_id
560
self.base_branch = branch
561
self._maybe_fetch(branch, self.this_branch, revision_id)
562
self.base_tree = self.revision_tree(revision_id)
564
def _maybe_fetch(self, source, target, revision_id):
565
if not source.repository.has_same_location(target.repository):
566
target.fetch(source, revision_id)
569
revisions = [_mod_revision.ensure_null(self.this_basis),
570
_mod_revision.ensure_null(self.other_basis)]
571
if _mod_revision.NULL_REVISION in revisions:
572
self.base_rev_id = _mod_revision.NULL_REVISION
573
self.base_tree = self.revision_tree(self.base_rev_id)
574
self._is_criss_cross = False
576
lcas = self.revision_graph.find_lca(revisions[0], revisions[1])
577
self._is_criss_cross = False
579
self.base_rev_id = _mod_revision.NULL_REVISION
581
self.base_rev_id = list(lcas)[0]
582
else: # len(lcas) > 1
583
self._is_criss_cross = True
585
# find_unique_lca can only handle 2 nodes, so we have to
586
# start back at the beginning. It is a shame to traverse
587
# the graph again, but better than re-implementing
589
self.base_rev_id = self.revision_graph.find_unique_lca(
590
revisions[0], revisions[1])
592
self.base_rev_id = self.revision_graph.find_unique_lca(
594
sorted_lca_keys = self.revision_graph.find_merge_order(
596
if self.base_rev_id == _mod_revision.NULL_REVISION:
597
self.base_rev_id = sorted_lca_keys[0]
599
if self.base_rev_id == _mod_revision.NULL_REVISION:
600
raise errors.UnrelatedBranches()
601
if self._is_criss_cross:
602
trace.warning('Warning: criss-cross merge encountered. See bzr'
603
' help criss-cross.')
604
trace.mutter('Criss-cross lcas: %r' % lcas)
605
if self.base_rev_id in lcas:
606
trace.mutter('Unable to find unique lca. '
607
'Fallback %r as best option.'
609
interesting_revision_ids = set(lcas)
610
interesting_revision_ids.add(self.base_rev_id)
611
interesting_trees = dict((t.get_revision_id(), t)
612
for t in self.this_branch.repository.revision_trees(
613
interesting_revision_ids))
614
self._cached_trees.update(interesting_trees)
615
if self.base_rev_id in lcas:
616
self.base_tree = interesting_trees[self.base_rev_id]
618
self.base_tree = interesting_trees.pop(self.base_rev_id)
619
self._lca_trees = [interesting_trees[key]
620
for key in sorted_lca_keys]
622
self.base_tree = self.revision_tree(self.base_rev_id)
623
self.base_is_ancestor = True
624
self.base_is_other_ancestor = True
625
trace.mutter('Base revid: %r' % self.base_rev_id)
514
raise NoCommits(other_branch)
515
fetch(from_branch=other_branch, to_branch=self.this_branch,
516
last_revision=self.other_basis)
627
518
def set_base(self, base_revision):
628
"""Set the base revision to use for the merge.
630
:param base_revision: A 2-list containing a path and revision number.
632
trace.mutter("doing merge() with no base_revision specified")
519
mutter("doing merge() with no base_revision specified")
633
520
if base_revision == [None, None]:
522
self.base_rev_id = common_ancestor(self.this_basis,
525
except NoCommonAncestor:
526
raise UnrelatedBranches()
527
self.base_tree = get_revid_tree(self.this_branch, self.base_rev_id,
529
self.base_is_ancestor = True
636
base_branch, self.base_tree = self._get_tree(base_revision)
531
base_branch, self.base_tree = get_tree(base_revision)
637
532
if base_revision[1] == -1:
638
533
self.base_rev_id = base_branch.last_revision()
639
534
elif base_revision[1] is None:
640
self.base_rev_id = _mod_revision.NULL_REVISION
642
self.base_rev_id = _mod_revision.ensure_null(
643
base_branch.get_rev_id(base_revision[1]))
644
self._maybe_fetch(base_branch, self.this_branch, self.base_rev_id)
646
def make_merger(self):
647
kwargs = {'working_tree':self.this_tree, 'this_tree': self.this_tree,
648
'other_tree': self.other_tree,
649
'interesting_ids': self.interesting_ids,
650
'interesting_files': self.interesting_files,
651
'this_branch': self.this_branch,
653
if self.merge_type.requires_base:
654
kwargs['base_tree'] = self.base_tree
655
if self.merge_type.supports_reprocess:
656
kwargs['reprocess'] = self.reprocess
658
raise errors.BzrError(
659
"Conflict reduction is not supported for merge"
660
" type %s." % self.merge_type)
661
if self.merge_type.supports_show_base:
662
kwargs['show_base'] = self.show_base
664
raise errors.BzrError("Showing base is not supported for this"
665
" merge type. %s" % self.merge_type)
666
if (not getattr(self.merge_type, 'supports_reverse_cherrypick', True)
667
and not self.base_is_other_ancestor):
668
raise errors.CannotReverseCherrypick()
669
if self.merge_type.supports_cherrypick:
670
kwargs['cherrypick'] = (not self.base_is_ancestor or
671
not self.base_is_other_ancestor)
672
if self._is_criss_cross and getattr(self.merge_type,
673
'supports_lca_trees', False):
674
kwargs['lca_trees'] = self._lca_trees
675
return self.merge_type(pb=None,
676
change_reporter=self.change_reporter,
679
def _do_merge_to(self):
680
merge = self.make_merger()
681
if self.other_branch is not None:
682
self.other_branch.update_references(self.this_branch)
684
if self.recurse == 'down':
685
for relpath, file_id in self.this_tree.iter_references():
686
sub_tree = self.this_tree.get_nested_tree(file_id, relpath)
687
other_revision = self.other_tree.get_reference_revision(
689
if other_revision == sub_tree.last_revision():
691
sub_merge = Merger(sub_tree.branch, this_tree=sub_tree)
692
sub_merge.merge_type = self.merge_type
693
other_branch = self.other_branch.reference_parent(file_id,
695
sub_merge.set_other_revision(other_revision, other_branch)
696
base_revision = self.base_tree.get_reference_revision(file_id)
697
sub_merge.base_tree = \
698
sub_tree.branch.repository.revision_tree(base_revision)
699
sub_merge.base_rev_id = base_revision
704
operation = cleanup.OperationWithCleanups(self._do_merge_to)
705
self.this_tree.lock_tree_write()
706
operation.add_cleanup(self.this_tree.unlock)
707
if self.base_tree is not None:
708
self.base_tree.lock_read()
709
operation.add_cleanup(self.base_tree.unlock)
710
if self.other_tree is not None:
711
self.other_tree.lock_read()
712
operation.add_cleanup(self.other_tree.unlock)
713
merge = operation.run_simple()
714
if len(merge.cooked_conflicts) == 0:
715
if not self.ignore_zero and not trace.is_quiet():
716
trace.note("All changes applied successfully.")
718
trace.note("%d conflicts encountered."
719
% len(merge.cooked_conflicts))
721
return len(merge.cooked_conflicts)
724
class _InventoryNoneEntry(object):
725
"""This represents an inventory entry which *isn't there*.
727
It simplifies the merging logic if we always have an InventoryEntry, even
728
if it isn't actually present
735
symlink_target = None
738
_none_entry = _InventoryNoneEntry()
741
class Merge3Merger(object):
742
"""Three-way merger that uses the merge3 text merger"""
744
supports_reprocess = True
745
supports_show_base = True
746
history_based = False
747
supports_cherrypick = True
748
supports_reverse_cherrypick = True
749
winner_idx = {"this": 2, "other": 1, "conflict": 1}
750
supports_lca_trees = True
752
def __init__(self, working_tree, this_tree, base_tree, other_tree,
753
interesting_ids=None, reprocess=False, show_base=False,
754
pb=None, pp=None, change_reporter=None,
755
interesting_files=None, do_merge=True,
756
cherrypick=False, lca_trees=None, this_branch=None):
757
"""Initialize the merger object and perform the merge.
759
:param working_tree: The working tree to apply the merge to
760
:param this_tree: The local tree in the merge operation
761
:param base_tree: The common tree in the merge operation
762
:param other_tree: The other tree to merge changes from
763
:param this_branch: The branch associated with this_tree. Defaults to
764
this_tree.branch if not supplied.
765
:param interesting_ids: The file_ids of files that should be
766
participate in the merge. May not be combined with
768
:param: reprocess If True, perform conflict-reduction processing.
769
:param show_base: If True, show the base revision in text conflicts.
770
(incompatible with reprocess)
772
:param pp: A ProgressPhase object
773
:param change_reporter: An object that should report changes made
774
:param interesting_files: The tree-relative paths of files that should
775
participate in the merge. If these paths refer to directories,
776
the contents of those directories will also be included. May not
777
be combined with interesting_ids. If neither interesting_files nor
778
interesting_ids is specified, all files may participate in the
780
:param lca_trees: Can be set to a dictionary of {revision_id:rev_tree}
781
if the ancestry was found to include a criss-cross merge.
782
Otherwise should be None.
784
object.__init__(self)
785
if interesting_files is not None and interesting_ids is not None:
787
'specify either interesting_ids or interesting_files')
788
if this_branch is None:
789
this_branch = this_tree.branch
790
self.interesting_ids = interesting_ids
791
self.interesting_files = interesting_files
792
self.this_tree = working_tree
793
self.base_tree = base_tree
794
self.other_tree = other_tree
795
self.this_branch = this_branch
796
self._raw_conflicts = []
797
self.cooked_conflicts = []
798
self.reprocess = reprocess
799
self.show_base = show_base
800
self._lca_trees = lca_trees
801
# Uncommenting this will change the default algorithm to always use
802
# _entries_lca. This can be useful for running the test suite and
803
# making sure we haven't missed any corner cases.
804
# if lca_trees is None:
805
# self._lca_trees = [self.base_tree]
806
self.change_reporter = change_reporter
807
self.cherrypick = cherrypick
811
warnings.warn("pp argument to Merge3Merger is deprecated")
813
warnings.warn("pb argument to Merge3Merger is deprecated")
816
operation = cleanup.OperationWithCleanups(self._do_merge)
817
self.this_tree.lock_tree_write()
818
operation.add_cleanup(self.this_tree.unlock)
819
self.base_tree.lock_read()
820
operation.add_cleanup(self.base_tree.unlock)
821
self.other_tree.lock_read()
822
operation.add_cleanup(self.other_tree.unlock)
825
def _do_merge(self, operation):
826
self.tt = transform.TreeTransform(self.this_tree, None)
827
operation.add_cleanup(self.tt.finalize)
828
self._compute_transform()
829
results = self.tt.apply(no_conflicts=True)
830
self.write_modified(results)
832
self.this_tree.add_conflicts(self.cooked_conflicts)
833
except errors.UnsupportedOperation:
836
def make_preview_transform(self):
837
operation = cleanup.OperationWithCleanups(self._make_preview_transform)
838
self.base_tree.lock_read()
839
operation.add_cleanup(self.base_tree.unlock)
840
self.other_tree.lock_read()
841
operation.add_cleanup(self.other_tree.unlock)
842
return operation.run_simple()
844
def _make_preview_transform(self):
845
self.tt = transform.TransformPreview(self.this_tree)
846
self._compute_transform()
849
def _compute_transform(self):
850
if self._lca_trees is None:
851
entries = self._entries3()
852
resolver = self._three_way
854
entries = self._entries_lca()
855
resolver = self._lca_multi_way
856
child_pb = ui.ui_factory.nested_progress_bar()
858
factories = Merger.hooks['merge_file_content']
859
hooks = [factory(self) for factory in factories] + [self]
860
self.active_hooks = [hook for hook in hooks if hook is not None]
861
for num, (file_id, changed, parents3, names3,
862
executable3) in enumerate(entries):
863
child_pb.update('Preparing file merge', num, len(entries))
864
self._merge_names(file_id, parents3, names3, resolver=resolver)
866
file_status = self._do_merge_contents(file_id)
868
file_status = 'unmodified'
869
self._merge_executable(file_id,
870
executable3, file_status, resolver=resolver)
873
self.tt.fixup_new_roots()
874
self._finish_computing_transform()
876
def _finish_computing_transform(self):
877
"""Finalize the transform and report the changes.
879
This is the second half of _compute_transform.
881
child_pb = ui.ui_factory.nested_progress_bar()
883
fs_conflicts = transform.resolve_conflicts(self.tt, child_pb,
884
lambda t, c: transform.conflict_pass(t, c, self.other_tree))
887
if self.change_reporter is not None:
888
from bzrlib import delta
889
delta.report_changes(
890
self.tt.iter_changes(), self.change_reporter)
891
self.cook_conflicts(fs_conflicts)
892
for conflict in self.cooked_conflicts:
893
trace.warning(unicode(conflict))
896
"""Gather data about files modified between three trees.
898
Return a list of tuples of file_id, changed, parents3, names3,
899
executable3. changed is a boolean indicating whether the file contents
900
or kind were changed. parents3 is a tuple of parent ids for base,
901
other and this. names3 is a tuple of names for base, other and this.
902
executable3 is a tuple of execute-bit values for base, other and this.
905
iterator = self.other_tree.iter_changes(self.base_tree,
906
specific_files=self.interesting_files,
907
extra_trees=[self.this_tree])
908
this_entries = dict((e.file_id, e) for p, e in
909
self.this_tree.iter_entries_by_dir(
910
self.interesting_ids))
911
for (file_id, paths, changed, versioned, parents, names, kind,
912
executable) in iterator:
913
if (self.interesting_ids is not None and
914
file_id not in self.interesting_ids):
916
entry = this_entries.get(file_id)
917
if entry is not None:
918
this_name = entry.name
919
this_parent = entry.parent_id
920
this_executable = entry.executable
924
this_executable = None
925
parents3 = parents + (this_parent,)
926
names3 = names + (this_name,)
927
executable3 = executable + (this_executable,)
928
result.append((file_id, changed, parents3, names3, executable3))
931
def _entries_lca(self):
932
"""Gather data about files modified between multiple trees.
934
This compares OTHER versus all LCA trees, and for interesting entries,
935
it then compares with THIS and BASE.
937
For the multi-valued entries, the format will be (BASE, [lca1, lca2])
939
:return: [(file_id, changed, parents, names, executable)], where:
941
* file_id: Simple file_id of the entry
942
* changed: Boolean, True if the kind or contents changed else False
943
* parents: ((base, [parent_id, in, lcas]), parent_id_other,
945
* names: ((base, [name, in, lcas]), name_in_other, name_in_this)
946
* executable: ((base, [exec, in, lcas]), exec_in_other,
949
if self.interesting_files is not None:
950
lookup_trees = [self.this_tree, self.base_tree]
951
lookup_trees.extend(self._lca_trees)
952
# I think we should include the lca trees as well
953
interesting_ids = self.other_tree.paths2ids(self.interesting_files,
956
interesting_ids = self.interesting_ids
958
walker = _mod_tree.MultiWalker(self.other_tree, self._lca_trees)
960
base_inventory = self.base_tree.inventory
961
this_inventory = self.this_tree.inventory
962
for path, file_id, other_ie, lca_values in walker.iter_all():
963
# Is this modified at all from any of the other trees?
965
other_ie = _none_entry
966
if interesting_ids is not None and file_id not in interesting_ids:
969
# If other_revision is found in any of the lcas, that means this
970
# node is uninteresting. This is because when merging, if there are
971
# multiple heads(), we have to create a new node. So if we didn't,
972
# we know that the ancestry is linear, and that OTHER did not
974
# See doc/developers/lca_merge_resolution.txt for details
975
other_revision = other_ie.revision
976
if other_revision is not None:
977
# We can't use this shortcut when other_revision is None,
978
# because it may be None because things are WorkingTrees, and
979
# not because it is *actually* None.
980
is_unmodified = False
981
for lca_path, ie in lca_values:
982
if ie is not None and ie.revision == other_revision:
989
for lca_path, lca_ie in lca_values:
991
lca_entries.append(_none_entry)
993
lca_entries.append(lca_ie)
995
if base_inventory.has_id(file_id):
996
base_ie = base_inventory[file_id]
998
base_ie = _none_entry
1000
if this_inventory.has_id(file_id):
1001
this_ie = this_inventory[file_id]
1003
this_ie = _none_entry
1009
for lca_ie in lca_entries:
1010
lca_kinds.append(lca_ie.kind)
1011
lca_parent_ids.append(lca_ie.parent_id)
1012
lca_names.append(lca_ie.name)
1013
lca_executable.append(lca_ie.executable)
1015
kind_winner = self._lca_multi_way(
1016
(base_ie.kind, lca_kinds),
1017
other_ie.kind, this_ie.kind)
1018
parent_id_winner = self._lca_multi_way(
1019
(base_ie.parent_id, lca_parent_ids),
1020
other_ie.parent_id, this_ie.parent_id)
1021
name_winner = self._lca_multi_way(
1022
(base_ie.name, lca_names),
1023
other_ie.name, this_ie.name)
1025
content_changed = True
1026
if kind_winner == 'this':
1027
# No kind change in OTHER, see if there are *any* changes
1028
if other_ie.kind == 'directory':
1029
if parent_id_winner == 'this' and name_winner == 'this':
1030
# No change for this directory in OTHER, skip
1032
content_changed = False
1033
elif other_ie.kind is None or other_ie.kind == 'file':
1034
def get_sha1(ie, tree):
1035
if ie.kind != 'file':
1037
return tree.get_file_sha1(file_id)
1038
base_sha1 = get_sha1(base_ie, self.base_tree)
1039
lca_sha1s = [get_sha1(ie, tree) for ie, tree
1040
in zip(lca_entries, self._lca_trees)]
1041
this_sha1 = get_sha1(this_ie, self.this_tree)
1042
other_sha1 = get_sha1(other_ie, self.other_tree)
1043
sha1_winner = self._lca_multi_way(
1044
(base_sha1, lca_sha1s), other_sha1, this_sha1,
1045
allow_overriding_lca=False)
1046
exec_winner = self._lca_multi_way(
1047
(base_ie.executable, lca_executable),
1048
other_ie.executable, this_ie.executable)
1049
if (parent_id_winner == 'this' and name_winner == 'this'
1050
and sha1_winner == 'this' and exec_winner == 'this'):
1051
# No kind, parent, name, exec, or content change for
1052
# OTHER, so this node is not considered interesting
1054
if sha1_winner == 'this':
1055
content_changed = False
1056
elif other_ie.kind == 'symlink':
1057
def get_target(ie, tree):
1058
if ie.kind != 'symlink':
1060
return tree.get_symlink_target(file_id)
1061
base_target = get_target(base_ie, self.base_tree)
1062
lca_targets = [get_target(ie, tree) for ie, tree
1063
in zip(lca_entries, self._lca_trees)]
1064
this_target = get_target(this_ie, self.this_tree)
1065
other_target = get_target(other_ie, self.other_tree)
1066
target_winner = self._lca_multi_way(
1067
(base_target, lca_targets),
1068
other_target, this_target)
1069
if (parent_id_winner == 'this' and name_winner == 'this'
1070
and target_winner == 'this'):
1071
# No kind, parent, name, or symlink target change
1074
if target_winner == 'this':
1075
content_changed = False
1076
elif other_ie.kind == 'tree-reference':
1077
# The 'changed' information seems to be handled at a higher
1078
# level. At least, _entries3 returns False for content
1079
# changed, even when at a new revision_id.
1080
content_changed = False
1081
if (parent_id_winner == 'this' and name_winner == 'this'):
1082
# Nothing interesting
1085
raise AssertionError('unhandled kind: %s' % other_ie.kind)
1087
# If we have gotten this far, that means something has changed
1088
result.append((file_id, content_changed,
1089
((base_ie.parent_id, lca_parent_ids),
1090
other_ie.parent_id, this_ie.parent_id),
1091
((base_ie.name, lca_names),
1092
other_ie.name, this_ie.name),
1093
((base_ie.executable, lca_executable),
1094
other_ie.executable, this_ie.executable)
1098
@deprecated_method(deprecated_in((2, 4, 0)))
1100
if self.tt.final_kind(self.tt.root) is None:
1101
self.tt.cancel_deletion(self.tt.root)
1102
if self.tt.final_file_id(self.tt.root) is None:
1103
self.tt.version_file(self.tt.tree_file_id(self.tt.root),
1105
other_root_file_id = self.other_tree.get_root_id()
1106
if other_root_file_id is None:
1108
other_root = self.tt.trans_id_file_id(other_root_file_id)
1109
if other_root == self.tt.root:
1111
if self.this_tree.inventory.has_id(
1112
self.other_tree.inventory.root.file_id):
1113
# the other tree's root is a non-root in the current tree (as
1114
# when a previously unrelated branch is merged into another)
1116
if self.tt.final_kind(other_root) is not None:
1117
other_root_is_present = True
1119
# other_root doesn't have a physical representation. We still need
1120
# to move any references to the actual root of the tree.
1121
other_root_is_present = False
1122
# 'other_tree.inventory.root' is not present in this tree. We are
1123
# calling adjust_path for children which *want* to be present with a
1124
# correct place to go.
1125
for _, child in self.other_tree.inventory.root.children.iteritems():
1126
trans_id = self.tt.trans_id_file_id(child.file_id)
1127
if not other_root_is_present:
1128
if self.tt.final_kind(trans_id) is not None:
1129
# The item exist in the final tree and has a defined place
1132
# Move the item into the root
1134
final_name = self.tt.final_name(trans_id)
1135
except errors.NoFinalPath:
1136
# This file is not present anymore, ignore it.
1138
self.tt.adjust_path(final_name, self.tt.root, trans_id)
1139
if other_root_is_present:
1140
self.tt.cancel_creation(other_root)
1141
self.tt.cancel_versioning(other_root)
1143
def write_modified(self, results):
1144
modified_hashes = {}
1145
for path in results.modified_paths:
1146
file_id = self.this_tree.path2id(self.this_tree.relpath(path))
1149
hash = self.this_tree.get_file_sha1(file_id)
1152
modified_hashes[file_id] = hash
1153
self.this_tree.set_merge_modified(modified_hashes)
1156
def parent(entry, file_id):
1157
"""Determine the parent for a file_id (used as a key method)"""
1160
return entry.parent_id
1163
def name(entry, file_id):
1164
"""Determine the name for a file_id (used as a key method)"""
1170
def contents_sha1(tree, file_id):
1171
"""Determine the sha1 of the file contents (used as a key method)."""
1172
if not tree.has_id(file_id):
1174
return tree.get_file_sha1(file_id)
1177
def executable(tree, file_id):
1178
"""Determine the executability of a file-id (used as a key method)."""
1179
if not tree.has_id(file_id):
1181
if tree.kind(file_id) != "file":
1183
return tree.is_executable(file_id)
1186
def kind(tree, file_id):
1187
"""Determine the kind of a file-id (used as a key method)."""
1188
if not tree.has_id(file_id):
1190
return tree.kind(file_id)
1193
def _three_way(base, other, this):
1195
# if 'base == other', either they all agree, or only 'this' has
1198
elif this not in (base, other):
1199
# 'this' is neither 'base' nor 'other', so both sides changed
1202
# "Ambiguous clean merge" -- both sides have made the same change.
1205
# this == base: only other has changed.
1209
def _lca_multi_way(bases, other, this, allow_overriding_lca=True):
1210
"""Consider LCAs when determining whether a change has occurred.
1212
If LCAS are all identical, this is the same as a _three_way comparison.
1214
:param bases: value in (BASE, [LCAS])
1215
:param other: value in OTHER
1216
:param this: value in THIS
1217
:param allow_overriding_lca: If there is more than one unique lca
1218
value, allow OTHER to override THIS if it has a new value, and
1219
THIS only has an lca value, or vice versa. This is appropriate for
1220
truly scalar values, not as much for non-scalars.
1221
:return: 'this', 'other', or 'conflict' depending on whether an entry
1224
# See doc/developers/lca_tree_merging.txt for details about this
1227
# Either Ambiguously clean, or nothing was actually changed. We
1230
base_val, lca_vals = bases
1231
# Remove 'base_val' from the lca_vals, because it is not interesting
1232
filtered_lca_vals = [lca_val for lca_val in lca_vals
1233
if lca_val != base_val]
1234
if len(filtered_lca_vals) == 0:
1235
return Merge3Merger._three_way(base_val, other, this)
1237
unique_lca_vals = set(filtered_lca_vals)
1238
if len(unique_lca_vals) == 1:
1239
return Merge3Merger._three_way(unique_lca_vals.pop(), other, this)
1241
if allow_overriding_lca:
1242
if other in unique_lca_vals:
1243
if this in unique_lca_vals:
1244
# Each side picked a different lca, conflict
1247
# This has a value which supersedes both lca values, and
1248
# other only has an lca value
1250
elif this in unique_lca_vals:
1251
# OTHER has a value which supersedes both lca values, and this
1252
# only has an lca value
1255
# At this point, the lcas disagree, and the tip disagree
1259
@deprecated_method(deprecated_in((2, 2, 0)))
1260
def scalar_three_way(this_tree, base_tree, other_tree, file_id, key):
1261
"""Do a three-way test on a scalar.
1262
Return "this", "other" or "conflict", depending whether a value wins.
1264
key_base = key(base_tree, file_id)
1265
key_other = key(other_tree, file_id)
1266
#if base == other, either they all agree, or only THIS has changed.
1267
if key_base == key_other:
1269
key_this = key(this_tree, file_id)
1270
# "Ambiguous clean merge"
1271
if key_this == key_other:
1273
elif key_this == key_base:
1278
def merge_names(self, file_id):
1279
def get_entry(tree):
1280
if tree.has_id(file_id):
1281
return tree.inventory[file_id]
1284
this_entry = get_entry(self.this_tree)
1285
other_entry = get_entry(self.other_tree)
1286
base_entry = get_entry(self.base_tree)
1287
entries = (base_entry, other_entry, this_entry)
1290
for entry in entries:
1293
parents.append(None)
1295
names.append(entry.name)
1296
parents.append(entry.parent_id)
1297
return self._merge_names(file_id, parents, names,
1298
resolver=self._three_way)
1300
def _merge_names(self, file_id, parents, names, resolver):
1301
"""Perform a merge on file_id names and parents"""
1302
base_name, other_name, this_name = names
1303
base_parent, other_parent, this_parent = parents
1305
name_winner = resolver(*names)
1307
parent_id_winner = resolver(*parents)
1308
if this_name is None:
1309
if name_winner == "this":
1310
name_winner = "other"
1311
if parent_id_winner == "this":
1312
parent_id_winner = "other"
1313
if name_winner == "this" and parent_id_winner == "this":
1315
if name_winner == 'conflict' or parent_id_winner == 'conflict':
1316
# Creating helpers (.OTHER or .THIS) here cause problems down the
1317
# road if a ContentConflict needs to be created so we should not do
1319
trans_id = self.tt.trans_id_file_id(file_id)
1320
self._raw_conflicts.append(('path conflict', trans_id, file_id,
1321
this_parent, this_name,
1322
other_parent, other_name))
1323
if not self.other_tree.has_id(file_id):
1324
# it doesn't matter whether the result was 'other' or
1325
# 'conflict'-- if it has no file id, we leave it alone.
1327
parent_id = parents[self.winner_idx[parent_id_winner]]
1328
name = names[self.winner_idx[name_winner]]
1329
if parent_id is not None or name is not None:
1330
# if we get here, name_winner and parent_winner are set to safe
1332
if parent_id is None and name is not None:
1333
# if parent_id is None and name is non-None, current file is
1335
if names[self.winner_idx[parent_id_winner]] != '':
1336
raise AssertionError(
1337
'File looks like a root, but named %s' %
1338
names[self.winner_idx[parent_id_winner]])
1339
parent_trans_id = transform.ROOT_PARENT
1341
parent_trans_id = self.tt.trans_id_file_id(parent_id)
1342
self.tt.adjust_path(name, parent_trans_id,
1343
self.tt.trans_id_file_id(file_id))
1345
def _do_merge_contents(self, file_id):
1346
"""Performs a merge on file_id contents."""
1347
def contents_pair(tree):
1348
if not tree.has_id(file_id):
1350
kind = tree.kind(file_id)
1352
contents = tree.get_file_sha1(file_id)
1353
elif kind == "symlink":
1354
contents = tree.get_symlink_target(file_id)
1357
return kind, contents
1359
# See SPOT run. run, SPOT, run.
1360
# So we're not QUITE repeating ourselves; we do tricky things with
1362
base_pair = contents_pair(self.base_tree)
1363
other_pair = contents_pair(self.other_tree)
1365
this_pair = contents_pair(self.this_tree)
1366
lca_pairs = [contents_pair(tree) for tree in self._lca_trees]
1367
winner = self._lca_multi_way((base_pair, lca_pairs), other_pair,
1368
this_pair, allow_overriding_lca=False)
1370
if base_pair == other_pair:
1373
# We delayed evaluating this_pair as long as we can to avoid
1374
# unnecessary sha1 calculation
1375
this_pair = contents_pair(self.this_tree)
1376
winner = self._three_way(base_pair, other_pair, this_pair)
1377
if winner == 'this':
1378
# No interesting changes introduced by OTHER
1380
# We have a hypothetical conflict, but if we have files, then we
1381
# can try to merge the content
1382
trans_id = self.tt.trans_id_file_id(file_id)
1383
params = MergeHookParams(self, file_id, trans_id, this_pair[0],
1384
other_pair[0], winner)
1385
hooks = self.active_hooks
1386
hook_status = 'not_applicable'
1388
hook_status, lines = hook.merge_contents(params)
1389
if hook_status != 'not_applicable':
1390
# Don't try any more hooks, this one applies.
1392
# If the merge ends up replacing the content of the file, we get rid of
1393
# it at the end of this method (this variable is used to track the
1394
# exceptions to this rule).
1397
if hook_status == 'not_applicable':
1398
# No merge hook was able to resolve the situation. Two cases exist:
1399
# a content conflict or a duplicate one.
1401
name = self.tt.final_name(trans_id)
1402
parent_id = self.tt.final_parent(trans_id)
1404
inhibit_content_conflict = False
1405
if params.this_kind is None: # file_id is not in THIS
1406
# Is the name used for a different file_id ?
1407
dupe_path = self.other_tree.id2path(file_id)
1408
this_id = self.this_tree.path2id(dupe_path)
1409
if this_id is not None:
1410
# Two entries for the same path
1412
# versioning the merged file will trigger a duplicate
1414
self.tt.version_file(file_id, trans_id)
1415
transform.create_from_tree(
1416
self.tt, trans_id, self.other_tree, file_id,
1417
filter_tree_path=self._get_filter_tree_path(file_id))
1418
inhibit_content_conflict = True
1419
elif params.other_kind is None: # file_id is not in OTHER
1420
# Is the name used for a different file_id ?
1421
dupe_path = self.this_tree.id2path(file_id)
1422
other_id = self.other_tree.path2id(dupe_path)
1423
if other_id is not None:
1424
# Two entries for the same path again, but here, the other
1425
# entry will also be merged. We simply inhibit the
1426
# 'content' conflict creation because we know OTHER will
1427
# create (or has already created depending on ordering) an
1428
# entry at the same path. This will trigger a 'duplicate'
1431
inhibit_content_conflict = True
1432
if not inhibit_content_conflict:
1433
if params.this_kind is not None:
1434
self.tt.unversion_file(trans_id)
1435
# This is a contents conflict, because none of the available
1436
# functions could merge it.
1437
file_group = self._dump_conflicts(name, parent_id, file_id,
1439
self._raw_conflicts.append(('contents conflict', file_group))
1440
elif hook_status == 'success':
1441
self.tt.create_file(lines, trans_id)
1442
elif hook_status == 'conflicted':
1443
# XXX: perhaps the hook should be able to provide
1444
# the BASE/THIS/OTHER files?
1445
self.tt.create_file(lines, trans_id)
1446
self._raw_conflicts.append(('text conflict', trans_id))
1447
name = self.tt.final_name(trans_id)
1448
parent_id = self.tt.final_parent(trans_id)
1449
self._dump_conflicts(name, parent_id, file_id)
1450
elif hook_status == 'delete':
1451
self.tt.unversion_file(trans_id)
1453
elif hook_status == 'done':
1454
# The hook function did whatever it needs to do directly, no
1455
# further action needed here.
1458
raise AssertionError('unknown hook_status: %r' % (hook_status,))
1459
if not self.this_tree.has_id(file_id) and result == "modified":
1460
self.tt.version_file(file_id, trans_id)
1462
# The merge has been performed and produced a new content, so the
1463
# old contents should not be retained.
1464
self.tt.delete_contents(trans_id)
1467
def _default_other_winner_merge(self, merge_hook_params):
1468
"""Replace this contents with other."""
1469
file_id = merge_hook_params.file_id
1470
trans_id = merge_hook_params.trans_id
1471
if self.other_tree.has_id(file_id):
1472
# OTHER changed the file
1473
transform.create_from_tree(
1474
self.tt, trans_id, self.other_tree, file_id,
1475
filter_tree_path=self._get_filter_tree_path(file_id))
1477
elif self.this_tree.has_id(file_id):
1478
# OTHER deleted the file
1479
return 'delete', None
1481
raise AssertionError(
1482
'winner is OTHER, but file_id %r not in THIS or OTHER tree'
1485
def merge_contents(self, merge_hook_params):
1486
"""Fallback merge logic after user installed hooks."""
1487
# This function is used in merge hooks as the fallback instance.
1488
# Perhaps making this function and the functions it calls be a
1489
# a separate class would be better.
1490
if merge_hook_params.winner == 'other':
1491
# OTHER is a straight winner, so replace this contents with other
1492
return self._default_other_winner_merge(merge_hook_params)
1493
elif merge_hook_params.is_file_merge():
1494
# THIS and OTHER are both files, so text merge. Either
1495
# BASE is a file, or both converted to files, so at least we
1496
# have agreement that output should be a file.
1498
self.text_merge(merge_hook_params.file_id,
1499
merge_hook_params.trans_id)
1500
except errors.BinaryFile:
1501
return 'not_applicable', None
1504
return 'not_applicable', None
1506
def get_lines(self, tree, file_id):
1507
"""Return the lines in a file, or an empty list."""
1508
if tree.has_id(file_id):
1509
return tree.get_file_lines(file_id)
1513
def text_merge(self, file_id, trans_id):
1514
"""Perform a three-way text merge on a file_id"""
1515
# it's possible that we got here with base as a different type.
1516
# if so, we just want two-way text conflicts.
1517
if self.base_tree.has_id(file_id) and \
1518
self.base_tree.kind(file_id) == "file":
1519
base_lines = self.get_lines(self.base_tree, file_id)
1522
other_lines = self.get_lines(self.other_tree, file_id)
1523
this_lines = self.get_lines(self.this_tree, file_id)
1524
m3 = merge3.Merge3(base_lines, this_lines, other_lines,
1525
is_cherrypick=self.cherrypick)
1526
start_marker = "!START OF MERGE CONFLICT!" + "I HOPE THIS IS UNIQUE"
1527
if self.show_base is True:
1528
base_marker = '|' * 7
1532
def iter_merge3(retval):
1533
retval["text_conflicts"] = False
1534
for line in m3.merge_lines(name_a = "TREE",
1535
name_b = "MERGE-SOURCE",
1536
name_base = "BASE-REVISION",
1537
start_marker=start_marker,
1538
base_marker=base_marker,
1539
reprocess=self.reprocess):
1540
if line.startswith(start_marker):
1541
retval["text_conflicts"] = True
1542
yield line.replace(start_marker, '<' * 7)
1546
merge3_iterator = iter_merge3(retval)
1547
self.tt.create_file(merge3_iterator, trans_id)
1548
if retval["text_conflicts"] is True:
1549
self._raw_conflicts.append(('text conflict', trans_id))
1550
name = self.tt.final_name(trans_id)
1551
parent_id = self.tt.final_parent(trans_id)
1552
file_group = self._dump_conflicts(name, parent_id, file_id,
1553
this_lines, base_lines,
1555
file_group.append(trans_id)
1558
def _get_filter_tree_path(self, file_id):
1559
if self.this_tree.supports_content_filtering():
1560
# We get the path from the working tree if it exists.
1561
# That fails though when OTHER is adding a file, so
1562
# we fall back to the other tree to find the path if
1563
# it doesn't exist locally.
1565
return self.this_tree.id2path(file_id)
1566
except errors.NoSuchId:
1567
return self.other_tree.id2path(file_id)
1568
# Skip the id2path lookup for older formats
1571
def _dump_conflicts(self, name, parent_id, file_id, this_lines=None,
1572
base_lines=None, other_lines=None, set_version=False,
1574
"""Emit conflict files.
1575
If this_lines, base_lines, or other_lines are omitted, they will be
1576
determined automatically. If set_version is true, the .OTHER, .THIS
1577
or .BASE (in that order) will be created as versioned files.
1579
data = [('OTHER', self.other_tree, other_lines),
1580
('THIS', self.this_tree, this_lines)]
1582
data.append(('BASE', self.base_tree, base_lines))
1584
# We need to use the actual path in the working tree of the file here,
1585
# ignoring the conflict suffixes
1587
if wt.supports_content_filtering():
1589
filter_tree_path = wt.id2path(file_id)
1590
except errors.NoSuchId:
1591
# file has been deleted
1592
filter_tree_path = None
1594
# Skip the id2path lookup for older formats
1595
filter_tree_path = None
1599
for suffix, tree, lines in data:
1600
if tree.has_id(file_id):
1601
trans_id = self._conflict_file(name, parent_id, tree, file_id,
1602
suffix, lines, filter_tree_path)
1603
file_group.append(trans_id)
1604
if set_version and not versioned:
1605
self.tt.version_file(file_id, trans_id)
1609
def _conflict_file(self, name, parent_id, tree, file_id, suffix,
1610
lines=None, filter_tree_path=None):
1611
"""Emit a single conflict file."""
1612
name = name + '.' + suffix
1613
trans_id = self.tt.create_path(name, parent_id)
1614
transform.create_from_tree(self.tt, trans_id, tree, file_id, lines,
1618
def merge_executable(self, file_id, file_status):
1619
"""Perform a merge on the execute bit."""
1620
executable = [self.executable(t, file_id) for t in (self.base_tree,
1621
self.other_tree, self.this_tree)]
1622
self._merge_executable(file_id, executable, file_status,
1623
resolver=self._three_way)
1625
def _merge_executable(self, file_id, executable, file_status,
1627
"""Perform a merge on the execute bit."""
1628
base_executable, other_executable, this_executable = executable
1629
if file_status == "deleted":
1631
winner = resolver(*executable)
1632
if winner == "conflict":
1633
# There must be a None in here, if we have a conflict, but we
1634
# need executability since file status was not deleted.
1635
if self.executable(self.other_tree, file_id) is None:
1639
if winner == 'this' and file_status != "modified":
1641
trans_id = self.tt.trans_id_file_id(file_id)
1642
if self.tt.final_kind(trans_id) != "file":
1644
if winner == "this":
1645
executability = this_executable
1647
if self.other_tree.has_id(file_id):
1648
executability = other_executable
1649
elif self.this_tree.has_id(file_id):
1650
executability = this_executable
1651
elif self.base_tree_has_id(file_id):
1652
executability = base_executable
1653
if executability is not None:
1654
trans_id = self.tt.trans_id_file_id(file_id)
1655
self.tt.set_executability(executability, trans_id)
1657
def cook_conflicts(self, fs_conflicts):
1658
"""Convert all conflicts into a form that doesn't depend on trans_id"""
1659
content_conflict_file_ids = set()
1660
cooked_conflicts = transform.cook_conflicts(fs_conflicts, self.tt)
1661
fp = transform.FinalPaths(self.tt)
1662
for conflict in self._raw_conflicts:
1663
conflict_type = conflict[0]
1664
if conflict_type == 'path conflict':
1666
this_parent, this_name,
1667
other_parent, other_name) = conflict[1:]
1668
if this_parent is None or this_name is None:
1669
this_path = '<deleted>'
1671
parent_path = fp.get_path(
1672
self.tt.trans_id_file_id(this_parent))
1673
this_path = osutils.pathjoin(parent_path, this_name)
1674
if other_parent is None or other_name is None:
1675
other_path = '<deleted>'
1677
if other_parent == self.other_tree.get_root_id():
1678
# The tree transform doesn't know about the other root,
1679
# so we special case here to avoid a NoFinalPath
1683
parent_path = fp.get_path(
1684
self.tt.trans_id_file_id(other_parent))
1685
other_path = osutils.pathjoin(parent_path, other_name)
1686
c = _mod_conflicts.Conflict.factory(
1687
'path conflict', path=this_path,
1688
conflict_path=other_path,
1690
elif conflict_type == 'contents conflict':
1691
for trans_id in conflict[1]:
1692
file_id = self.tt.final_file_id(trans_id)
1693
if file_id is not None:
1694
# Ok we found the relevant file-id
1696
path = fp.get_path(trans_id)
1697
for suffix in ('.BASE', '.THIS', '.OTHER'):
1698
if path.endswith(suffix):
1699
# Here is the raw path
1700
path = path[:-len(suffix)]
1702
c = _mod_conflicts.Conflict.factory(conflict_type,
1703
path=path, file_id=file_id)
1704
content_conflict_file_ids.add(file_id)
1705
elif conflict_type == 'text conflict':
1706
trans_id = conflict[1]
1707
path = fp.get_path(trans_id)
1708
file_id = self.tt.final_file_id(trans_id)
1709
c = _mod_conflicts.Conflict.factory(conflict_type,
1710
path=path, file_id=file_id)
1712
raise AssertionError('bad conflict type: %r' % (conflict,))
1713
cooked_conflicts.append(c)
1715
self.cooked_conflicts = []
1716
# We want to get rid of path conflicts when a corresponding contents
1717
# conflict exists. This can occur when one branch deletes a file while
1718
# the other renames *and* modifies it. In this case, the content
1719
# conflict is enough.
1720
for c in cooked_conflicts:
1721
if (c.typestring == 'path conflict'
1722
and c.file_id in content_conflict_file_ids):
1724
self.cooked_conflicts.append(c)
1725
self.cooked_conflicts.sort(key=_mod_conflicts.Conflict.sort_key)
1728
class WeaveMerger(Merge3Merger):
1729
"""Three-way tree merger, text weave merger."""
1730
supports_reprocess = True
1731
supports_show_base = False
1732
supports_reverse_cherrypick = False
1733
history_based = True
1735
def _generate_merge_plan(self, file_id, base):
1736
return self.this_tree.plan_file_merge(file_id, self.other_tree,
1739
def _merged_lines(self, file_id):
1740
"""Generate the merged lines.
1741
There is no distinction between lines that are meant to contain <<<<<<<
1745
base = self.base_tree
1748
plan = self._generate_merge_plan(file_id, base)
1749
if 'merge' in debug.debug_flags:
1751
trans_id = self.tt.trans_id_file_id(file_id)
1752
name = self.tt.final_name(trans_id) + '.plan'
1753
contents = ('%11s|%s' % l for l in plan)
1754
self.tt.new_file(name, self.tt.final_parent(trans_id), contents)
1755
textmerge = versionedfile.PlanWeaveMerge(plan, '<<<<<<< TREE\n',
1756
'>>>>>>> MERGE-SOURCE\n')
1757
lines, conflicts = textmerge.merge_lines(self.reprocess)
1759
base_lines = textmerge.base_from_plan()
1762
return lines, base_lines
1764
def text_merge(self, file_id, trans_id):
1765
"""Perform a (weave) text merge for a given file and file-id.
1766
If conflicts are encountered, .THIS and .OTHER files will be emitted,
1767
and a conflict will be noted.
1769
lines, base_lines = self._merged_lines(file_id)
1771
# Note we're checking whether the OUTPUT is binary in this case,
1772
# because we don't want to get into weave merge guts.
1773
textfile.check_text_lines(lines)
1774
self.tt.create_file(lines, trans_id)
1775
if base_lines is not None:
1777
self._raw_conflicts.append(('text conflict', trans_id))
1778
name = self.tt.final_name(trans_id)
1779
parent_id = self.tt.final_parent(trans_id)
1780
file_group = self._dump_conflicts(name, parent_id, file_id,
1782
base_lines=base_lines)
1783
file_group.append(trans_id)
1786
class LCAMerger(WeaveMerger):
1788
def _generate_merge_plan(self, file_id, base):
1789
return self.this_tree.plan_file_lca_merge(file_id, self.other_tree,
1792
class Diff3Merger(Merge3Merger):
1793
"""Three-way merger using external diff3 for text merging"""
1795
def dump_file(self, temp_dir, name, tree, file_id):
1796
out_path = osutils.pathjoin(temp_dir, name)
1797
out_file = open(out_path, "wb")
1799
in_file = tree.get_file(file_id)
1800
for line in in_file:
1801
out_file.write(line)
1806
def text_merge(self, file_id, trans_id):
1807
"""Perform a diff3 merge using a specified file-id and trans-id.
1808
If conflicts are encountered, .BASE, .THIS. and .OTHER conflict files
1809
will be dumped, and a will be conflict noted.
1812
temp_dir = osutils.mkdtemp(prefix="bzr-")
1814
new_file = osutils.pathjoin(temp_dir, "new")
1815
this = self.dump_file(temp_dir, "this", self.this_tree, file_id)
1816
base = self.dump_file(temp_dir, "base", self.base_tree, file_id)
1817
other = self.dump_file(temp_dir, "other", self.other_tree, file_id)
1818
status = bzrlib.patch.diff3(new_file, this, base, other)
1819
if status not in (0, 1):
1820
raise errors.BzrError("Unhandled diff3 exit code")
1821
f = open(new_file, 'rb')
1823
self.tt.create_file(f, trans_id)
1827
name = self.tt.final_name(trans_id)
1828
parent_id = self.tt.final_parent(trans_id)
1829
self._dump_conflicts(name, parent_id, file_id)
1830
self._raw_conflicts.append(('text conflict', trans_id))
1832
osutils.rmtree(temp_dir)
1835
class PathNotInTree(errors.BzrError):
1837
_fmt = """Merge-into failed because %(tree)s does not contain %(path)s."""
1839
def __init__(self, path, tree):
1840
errors.BzrError.__init__(self, path=path, tree=tree)
1843
class MergeIntoMerger(Merger):
1844
"""Merger that understands other_tree will be merged into a subdir.
1846
This also changes the Merger api so that it uses real Branch, revision_id,
1847
and RevisonTree objects, rather than using revision specs.
1850
def __init__(self, this_tree, other_branch, other_tree, target_subdir,
1851
source_subpath, other_rev_id=None):
1852
"""Create a new MergeIntoMerger object.
1854
source_subpath in other_tree will be effectively copied to
1855
target_subdir in this_tree.
1857
:param this_tree: The tree that we will be merging into.
1858
:param other_branch: The Branch we will be merging from.
1859
:param other_tree: The RevisionTree object we want to merge.
1860
:param target_subdir: The relative path where we want to merge
1861
other_tree into this_tree
1862
:param source_subpath: The relative path specifying the subtree of
1863
other_tree to merge into this_tree.
1865
# It is assumed that we are merging a tree that is not in our current
1866
# ancestry, which means we are using the "EmptyTree" as our basis.
1867
null_ancestor_tree = this_tree.branch.repository.revision_tree(
1868
_mod_revision.NULL_REVISION)
1869
super(MergeIntoMerger, self).__init__(
1870
this_branch=this_tree.branch,
1871
this_tree=this_tree,
1872
other_tree=other_tree,
1873
base_tree=null_ancestor_tree,
1875
self._target_subdir = target_subdir
1876
self._source_subpath = source_subpath
1877
self.other_branch = other_branch
1878
if other_rev_id is None:
1879
other_rev_id = other_tree.get_revision_id()
1880
self.other_rev_id = self.other_basis = other_rev_id
1881
self.base_is_ancestor = True
1882
self.backup_files = True
1883
self.merge_type = Merge3Merger
1884
self.show_base = False
1885
self.reprocess = False
1886
self.interesting_ids = None
1887
self.merge_type = _MergeTypeParameterizer(MergeIntoMergeType,
1888
target_subdir=self._target_subdir,
1889
source_subpath=self._source_subpath)
1890
if self._source_subpath != '':
1891
# If this isn't a partial merge make sure the revisions will be
1893
self._maybe_fetch(self.other_branch, self.this_branch,
1896
def set_pending(self):
1897
if self._source_subpath != '':
1899
Merger.set_pending(self)
1902
class _MergeTypeParameterizer(object):
1903
"""Wrap a merge-type class to provide extra parameters.
1905
This is hack used by MergeIntoMerger to pass some extra parameters to its
1906
merge_type. Merger.do_merge() sets up its own set of parameters to pass to
1907
the 'merge_type' member. It is difficult override do_merge without
1908
re-writing the whole thing, so instead we create a wrapper which will pass
1909
the extra parameters.
1912
def __init__(self, merge_type, **kwargs):
1913
self._extra_kwargs = kwargs
1914
self._merge_type = merge_type
1916
def __call__(self, *args, **kwargs):
1917
kwargs.update(self._extra_kwargs)
1918
return self._merge_type(*args, **kwargs)
1920
def __getattr__(self, name):
1921
return getattr(self._merge_type, name)
1924
class MergeIntoMergeType(Merge3Merger):
1925
"""Merger that incorporates a tree (or part of a tree) into another."""
1927
def __init__(self, *args, **kwargs):
1928
"""Initialize the merger object.
1930
:param args: See Merge3Merger.__init__'s args.
1931
:param kwargs: See Merge3Merger.__init__'s keyword args, except for
1932
source_subpath and target_subdir.
1933
:keyword source_subpath: The relative path specifying the subtree of
1934
other_tree to merge into this_tree.
1935
:keyword target_subdir: The relative path where we want to merge
1936
other_tree into this_tree
1938
# All of the interesting work happens during Merge3Merger.__init__(),
1939
# so we have have to hack in to get our extra parameters set.
1940
self._source_subpath = kwargs.pop('source_subpath')
1941
self._target_subdir = kwargs.pop('target_subdir')
1942
super(MergeIntoMergeType, self).__init__(*args, **kwargs)
1944
def _compute_transform(self):
1945
child_pb = ui.ui_factory.nested_progress_bar()
1947
entries = self._entries_to_incorporate()
1948
entries = list(entries)
1949
for num, (entry, parent_id) in enumerate(entries):
1950
child_pb.update('Preparing file merge', num, len(entries))
1951
parent_trans_id = self.tt.trans_id_file_id(parent_id)
1952
trans_id = transform.new_by_entry(self.tt, entry,
1953
parent_trans_id, self.other_tree)
1956
self._finish_computing_transform()
1958
def _entries_to_incorporate(self):
1959
"""Yields pairs of (inventory_entry, new_parent)."""
1960
other_inv = self.other_tree.inventory
1961
subdir_id = other_inv.path2id(self._source_subpath)
1962
if subdir_id is None:
1963
# XXX: The error would be clearer if it gave the URL of the source
1964
# branch, but we don't have a reference to that here.
1965
raise PathNotInTree(self._source_subpath, "Source tree")
1966
subdir = other_inv[subdir_id]
1967
parent_in_target = osutils.dirname(self._target_subdir)
1968
target_id = self.this_tree.inventory.path2id(parent_in_target)
1969
if target_id is None:
1970
raise PathNotInTree(self._target_subdir, "Target tree")
1971
name_in_target = osutils.basename(self._target_subdir)
1972
merge_into_root = subdir.copy()
1973
merge_into_root.name = name_in_target
1974
if self.this_tree.inventory.has_id(merge_into_root.file_id):
1975
# Give the root a new file-id.
1976
# This can happen fairly easily if the directory we are
1977
# incorporating is the root, and both trees have 'TREE_ROOT' as
1978
# their root_id. Users will expect this to Just Work, so we
1979
# change the file-id here.
1980
# Non-root file-ids could potentially conflict too. That's really
1981
# an edge case, so we don't do anything special for those. We let
1982
# them cause conflicts.
1983
merge_into_root.file_id = generate_ids.gen_file_id(name_in_target)
1984
yield (merge_into_root, target_id)
1985
if subdir.kind != 'directory':
1986
# No children, so we are done.
1988
for ignored_path, entry in other_inv.iter_entries_by_dir(subdir_id):
1989
parent_id = entry.parent_id
1990
if parent_id == subdir.file_id:
1991
# The root's parent ID has changed, so make sure children of
1992
# the root refer to the new ID.
1993
parent_id = merge_into_root.file_id
1994
yield (entry, parent_id)
1997
def merge_inner(this_branch, other_tree, base_tree, ignore_zero=False,
1999
merge_type=Merge3Merger,
2000
interesting_ids=None,
2004
interesting_files=None,
2007
change_reporter=None):
2008
"""Primary interface for merging.
2010
Typical use is probably::
2012
merge_inner(branch, branch.get_revision_tree(other_revision),
2013
branch.get_revision_tree(base_revision))
2015
if this_tree is None:
2016
raise errors.BzrError("bzrlib.merge.merge_inner requires a this_tree "
2018
merger = Merger(this_branch, other_tree, base_tree, this_tree=this_tree,
2019
pb=pb, change_reporter=change_reporter)
2020
merger.backup_files = backup_files
2021
merger.merge_type = merge_type
2022
merger.interesting_ids = interesting_ids
2023
merger.ignore_zero = ignore_zero
2024
if interesting_files:
2026
raise ValueError('Only supply interesting_ids'
2027
' or interesting_files')
2028
merger.interesting_files = interesting_files
2029
merger.show_base = show_base
2030
merger.reprocess = reprocess
2031
merger.other_rev_id = other_rev_id
2032
merger.other_basis = other_rev_id
2033
get_revision_id = getattr(base_tree, 'get_revision_id', None)
2034
if get_revision_id is None:
2035
get_revision_id = base_tree.last_revision
2036
merger.cache_trees_with_revision_ids([other_tree, base_tree, this_tree])
2037
merger.set_base_revision(get_revision_id(), this_branch)
2038
return merger.do_merge()
2040
def get_merge_type_registry():
2041
"""Merge type registry is in bzrlib.option to avoid circular imports.
2043
This method provides a sanctioned way to retrieve it.
2045
from bzrlib import option
2046
return option._merge_type_registry
2049
def _plan_annotate_merge(annotated_a, annotated_b, ancestors_a, ancestors_b):
2050
def status_a(revision, text):
2051
if revision in ancestors_b:
2052
return 'killed-b', text
2054
return 'new-a', text
2056
def status_b(revision, text):
2057
if revision in ancestors_a:
2058
return 'killed-a', text
2060
return 'new-b', text
2062
plain_a = [t for (a, t) in annotated_a]
2063
plain_b = [t for (a, t) in annotated_b]
2064
matcher = patiencediff.PatienceSequenceMatcher(None, plain_a, plain_b)
2065
blocks = matcher.get_matching_blocks()
2068
for ai, bi, l in blocks:
2069
# process all mismatched sections
2070
# (last mismatched section is handled because blocks always
2071
# includes a 0-length last block)
2072
for revision, text in annotated_a[a_cur:ai]:
2073
yield status_a(revision, text)
2074
for revision, text in annotated_b[b_cur:bi]:
2075
yield status_b(revision, text)
2076
# and now the matched section
2079
for text_a in plain_a[ai:a_cur]:
2080
yield "unchanged", text_a
2083
class _PlanMergeBase(object):
2085
def __init__(self, a_rev, b_rev, vf, key_prefix):
2088
:param a_rev: Revision-id of one revision to merge
2089
:param b_rev: Revision-id of the other revision to merge
2090
:param vf: A VersionedFiles containing both revisions
2091
:param key_prefix: A prefix for accessing keys in vf, typically
2097
self._last_lines = None
2098
self._last_lines_revision_id = None
2099
self._cached_matching_blocks = {}
2100
self._key_prefix = key_prefix
2101
self._precache_tip_lines()
2103
def _precache_tip_lines(self):
2104
lines = self.get_lines([self.a_rev, self.b_rev])
2105
self.lines_a = lines[self.a_rev]
2106
self.lines_b = lines[self.b_rev]
2108
def get_lines(self, revisions):
2109
"""Get lines for revisions from the backing VersionedFiles.
2111
:raises RevisionNotPresent: on absent texts.
2113
keys = [(self._key_prefix + (rev,)) for rev in revisions]
2115
for record in self.vf.get_record_stream(keys, 'unordered', True):
2116
if record.storage_kind == 'absent':
2117
raise errors.RevisionNotPresent(record.key, self.vf)
2118
result[record.key[-1]] = osutils.chunks_to_lines(
2119
record.get_bytes_as('chunked'))
2122
def plan_merge(self):
2123
"""Generate a 'plan' for merging the two revisions.
2125
This involves comparing their texts and determining the cause of
2126
differences. If text A has a line and text B does not, then either the
2127
line was added to text A, or it was deleted from B. Once the causes
2128
are combined, they are written out in the format described in
2129
VersionedFile.plan_merge
2131
blocks = self._get_matching_blocks(self.a_rev, self.b_rev)
2132
unique_a, unique_b = self._unique_lines(blocks)
2133
new_a, killed_b = self._determine_status(self.a_rev, unique_a)
2134
new_b, killed_a = self._determine_status(self.b_rev, unique_b)
2135
return self._iter_plan(blocks, new_a, killed_b, new_b, killed_a)
2137
def _iter_plan(self, blocks, new_a, killed_b, new_b, killed_a):
2140
for i, j, n in blocks:
2141
for a_index in range(last_i, i):
2142
if a_index in new_a:
2143
if a_index in killed_b:
2144
yield 'conflicted-a', self.lines_a[a_index]
2146
yield 'new-a', self.lines_a[a_index]
2148
yield 'killed-b', self.lines_a[a_index]
2149
for b_index in range(last_j, j):
2150
if b_index in new_b:
2151
if b_index in killed_a:
2152
yield 'conflicted-b', self.lines_b[b_index]
2154
yield 'new-b', self.lines_b[b_index]
2156
yield 'killed-a', self.lines_b[b_index]
2157
# handle common lines
2158
for a_index in range(i, i+n):
2159
yield 'unchanged', self.lines_a[a_index]
2163
def _get_matching_blocks(self, left_revision, right_revision):
2164
"""Return a description of which sections of two revisions match.
2166
See SequenceMatcher.get_matching_blocks
2168
cached = self._cached_matching_blocks.get((left_revision,
2170
if cached is not None:
2172
if self._last_lines_revision_id == left_revision:
2173
left_lines = self._last_lines
2174
right_lines = self.get_lines([right_revision])[right_revision]
2176
lines = self.get_lines([left_revision, right_revision])
2177
left_lines = lines[left_revision]
2178
right_lines = lines[right_revision]
2179
self._last_lines = right_lines
2180
self._last_lines_revision_id = right_revision
2181
matcher = patiencediff.PatienceSequenceMatcher(None, left_lines,
2183
return matcher.get_matching_blocks()
2185
def _unique_lines(self, matching_blocks):
2186
"""Analyse matching_blocks to determine which lines are unique
2188
:return: a tuple of (unique_left, unique_right), where the values are
2189
sets of line numbers of unique lines.
2195
for i, j, n in matching_blocks:
2196
unique_left.extend(range(last_i, i))
2197
unique_right.extend(range(last_j, j))
2200
return unique_left, unique_right
2203
def _subtract_plans(old_plan, new_plan):
2204
"""Remove changes from new_plan that came from old_plan.
2206
It is assumed that the difference between the old_plan and new_plan
2207
is their choice of 'b' text.
2209
All lines from new_plan that differ from old_plan are emitted
2210
verbatim. All lines from new_plan that match old_plan but are
2211
not about the 'b' revision are emitted verbatim.
2213
Lines that match and are about the 'b' revision are the lines we
2214
don't want, so we convert 'killed-b' -> 'unchanged', and 'new-b'
2215
is skipped entirely.
2217
matcher = patiencediff.PatienceSequenceMatcher(None, old_plan,
2220
for i, j, n in matcher.get_matching_blocks():
2221
for jj in range(last_j, j):
2223
for jj in range(j, j+n):
2224
plan_line = new_plan[jj]
2225
if plan_line[0] == 'new-b':
2227
elif plan_line[0] == 'killed-b':
2228
yield 'unchanged', plan_line[1]
2234
class _PlanMerge(_PlanMergeBase):
2235
"""Plan an annotate merge using on-the-fly annotation"""
2237
def __init__(self, a_rev, b_rev, vf, key_prefix):
2238
super(_PlanMerge, self).__init__(a_rev, b_rev, vf, key_prefix)
2239
self.a_key = self._key_prefix + (self.a_rev,)
2240
self.b_key = self._key_prefix + (self.b_rev,)
2241
self.graph = _mod_graph.Graph(self.vf)
2242
heads = self.graph.heads((self.a_key, self.b_key))
2244
# one side dominates, so we can just return its values, yay for
2246
# Ideally we would know that before we get this far
2247
self._head_key = heads.pop()
2248
if self._head_key == self.a_key:
2252
trace.mutter('found dominating revision for %s\n%s > %s', self.vf,
2253
self._head_key[-1], other)
2256
self._head_key = None
2259
def _precache_tip_lines(self):
2260
# Turn this into a no-op, because we will do this later
2263
def _find_recursive_lcas(self):
2264
"""Find all the ancestors back to a unique lca"""
2265
cur_ancestors = (self.a_key, self.b_key)
2266
# graph.find_lca(uncommon, keys) now returns plain NULL_REVISION,
2267
# rather than a key tuple. We will just map that directly to no common
2271
next_lcas = self.graph.find_lca(*cur_ancestors)
2272
# Map a plain NULL_REVISION to a simple no-ancestors
2273
if next_lcas == set([_mod_revision.NULL_REVISION]):
2275
# Order the lca's based on when they were merged into the tip
2276
# While the actual merge portion of weave merge uses a set() of
2277
# active revisions, the order of insertion *does* effect the
2278
# implicit ordering of the texts.
2279
for rev_key in cur_ancestors:
2280
ordered_parents = tuple(self.graph.find_merge_order(rev_key,
2282
parent_map[rev_key] = ordered_parents
2283
if len(next_lcas) == 0:
2285
elif len(next_lcas) == 1:
2286
parent_map[list(next_lcas)[0]] = ()
2288
elif len(next_lcas) > 2:
2289
# More than 2 lca's, fall back to grabbing all nodes between
2290
# this and the unique lca.
2291
trace.mutter('More than 2 LCAs, falling back to all nodes for:'
2293
self.a_key, self.b_key, cur_ancestors)
2294
cur_lcas = next_lcas
2295
while len(cur_lcas) > 1:
2296
cur_lcas = self.graph.find_lca(*cur_lcas)
2297
if len(cur_lcas) == 0:
2298
# No common base to find, use the full ancestry
2301
unique_lca = list(cur_lcas)[0]
2302
if unique_lca == _mod_revision.NULL_REVISION:
2303
# find_lca will return a plain 'NULL_REVISION' rather
2304
# than a key tuple when there is no common ancestor, we
2305
# prefer to just use None, because it doesn't confuse
2306
# _get_interesting_texts()
2308
parent_map.update(self._find_unique_parents(next_lcas,
2311
cur_ancestors = next_lcas
2314
def _find_unique_parents(self, tip_keys, base_key):
2315
"""Find ancestors of tip that aren't ancestors of base.
2317
:param tip_keys: Nodes that are interesting
2318
:param base_key: Cull all ancestors of this node
2319
:return: The parent map for all revisions between tip_keys and
2320
base_key. base_key will be included. References to nodes outside of
2321
the ancestor set will also be removed.
2323
# TODO: this would be simpler if find_unique_ancestors took a list
2324
# instead of a single tip, internally it supports it, but it
2325
# isn't a "backwards compatible" api change.
2326
if base_key is None:
2327
parent_map = dict(self.graph.iter_ancestry(tip_keys))
2328
# We remove NULL_REVISION because it isn't a proper tuple key, and
2329
# thus confuses things like _get_interesting_texts, and our logic
2330
# to add the texts into the memory weave.
2331
if _mod_revision.NULL_REVISION in parent_map:
2332
parent_map.pop(_mod_revision.NULL_REVISION)
2335
for tip in tip_keys:
2337
self.graph.find_unique_ancestors(tip, [base_key]))
2338
parent_map = self.graph.get_parent_map(interesting)
2339
parent_map[base_key] = ()
2340
culled_parent_map, child_map, tails = self._remove_external_references(
2342
# Remove all the tails but base_key
2343
if base_key is not None:
2344
tails.remove(base_key)
2345
self._prune_tails(culled_parent_map, child_map, tails)
2346
# Now remove all the uninteresting 'linear' regions
2347
simple_map = _mod_graph.collapse_linear_regions(culled_parent_map)
2351
def _remove_external_references(parent_map):
2352
"""Remove references that go outside of the parent map.
2354
:param parent_map: Something returned from Graph.get_parent_map(keys)
2355
:return: (filtered_parent_map, child_map, tails)
2356
filtered_parent_map is parent_map without external references
2357
child_map is the {parent_key: [child_keys]} mapping
2358
tails is a list of nodes that do not have any parents in the map
2360
# TODO: The basic effect of this function seems more generic than
2361
# _PlanMerge. But the specific details of building a child_map,
2362
# and computing tails seems very specific to _PlanMerge.
2363
# Still, should this be in Graph land?
2364
filtered_parent_map = {}
2367
for key, parent_keys in parent_map.iteritems():
2368
culled_parent_keys = [p for p in parent_keys if p in parent_map]
2369
if not culled_parent_keys:
2371
for parent_key in culled_parent_keys:
2372
child_map.setdefault(parent_key, []).append(key)
2373
# TODO: Do we want to do this, it adds overhead for every node,
2374
# just to say that the node has no children
2375
child_map.setdefault(key, [])
2376
filtered_parent_map[key] = culled_parent_keys
2377
return filtered_parent_map, child_map, tails
2380
def _prune_tails(parent_map, child_map, tails_to_remove):
2381
"""Remove tails from the parent map.
2383
This will remove the supplied revisions until no more children have 0
2386
:param parent_map: A dict of {child: [parents]}, this dictionary will
2387
be modified in place.
2388
:param tails_to_remove: A list of tips that should be removed,
2389
this list will be consumed
2390
:param child_map: The reverse dict of parent_map ({parent: [children]})
2391
this dict will be modified
2392
:return: None, parent_map will be modified in place.
2394
while tails_to_remove:
2395
next = tails_to_remove.pop()
2396
parent_map.pop(next)
2397
children = child_map.pop(next)
2398
for child in children:
2399
child_parents = parent_map[child]
2400
child_parents.remove(next)
2401
if len(child_parents) == 0:
2402
tails_to_remove.append(child)
2404
def _get_interesting_texts(self, parent_map):
2405
"""Return a dict of texts we are interested in.
2407
Note that the input is in key tuples, but the output is in plain
2410
:param parent_map: The output from _find_recursive_lcas
2411
:return: A dict of {'revision_id':lines} as returned by
2412
_PlanMergeBase.get_lines()
2414
all_revision_keys = set(parent_map)
2415
all_revision_keys.add(self.a_key)
2416
all_revision_keys.add(self.b_key)
2418
# Everything else is in 'keys' but get_lines is in 'revision_ids'
2419
all_texts = self.get_lines([k[-1] for k in all_revision_keys])
2422
def _build_weave(self):
2423
from bzrlib import weave
2424
self._weave = weave.Weave(weave_name='in_memory_weave',
2425
allow_reserved=True)
2426
parent_map = self._find_recursive_lcas()
2428
all_texts = self._get_interesting_texts(parent_map)
2430
# Note: Unfortunately, the order given by topo_sort will effect the
2431
# ordering resolution in the output. Specifically, if you add A then B,
2432
# then in the output text A lines will show up before B lines. And, of
2433
# course, topo_sort doesn't guarantee any real ordering.
2434
# So we use merge_sort, and add a fake node on the tip.
2435
# This ensures that left-hand parents will always be inserted into the
2436
# weave before right-hand parents.
2437
tip_key = self._key_prefix + (_mod_revision.CURRENT_REVISION,)
2438
parent_map[tip_key] = (self.a_key, self.b_key)
2440
for seq_num, key, depth, eom in reversed(tsort.merge_sort(parent_map,
2444
# for key in tsort.topo_sort(parent_map):
2445
parent_keys = parent_map[key]
2446
revision_id = key[-1]
2447
parent_ids = [k[-1] for k in parent_keys]
2448
self._weave.add_lines(revision_id, parent_ids,
2449
all_texts[revision_id])
2451
def plan_merge(self):
2452
"""Generate a 'plan' for merging the two revisions.
2454
This involves comparing their texts and determining the cause of
2455
differences. If text A has a line and text B does not, then either the
2456
line was added to text A, or it was deleted from B. Once the causes
2457
are combined, they are written out in the format described in
2458
VersionedFile.plan_merge
2460
if self._head_key is not None: # There was a single head
2461
if self._head_key == self.a_key:
2464
if self._head_key != self.b_key:
2465
raise AssertionError('There was an invalid head: %s != %s'
2466
% (self.b_key, self._head_key))
2468
head_rev = self._head_key[-1]
2469
lines = self.get_lines([head_rev])[head_rev]
2470
return ((plan, line) for line in lines)
2471
return self._weave.plan_merge(self.a_rev, self.b_rev)
2474
class _PlanLCAMerge(_PlanMergeBase):
2476
This merge algorithm differs from _PlanMerge in that:
2478
1. comparisons are done against LCAs only
2479
2. cases where a contested line is new versus one LCA but old versus
2480
another are marked as conflicts, by emitting the line as conflicted-a
2483
This is faster, and hopefully produces more useful output.
2486
def __init__(self, a_rev, b_rev, vf, key_prefix, graph):
2487
_PlanMergeBase.__init__(self, a_rev, b_rev, vf, key_prefix)
2488
lcas = graph.find_lca(key_prefix + (a_rev,), key_prefix + (b_rev,))
2491
if lca == _mod_revision.NULL_REVISION:
2494
self.lcas.add(lca[-1])
2495
for lca in self.lcas:
2496
if _mod_revision.is_null(lca):
2499
lca_lines = self.get_lines([lca])[lca]
2500
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_a,
2502
blocks = list(matcher.get_matching_blocks())
2503
self._cached_matching_blocks[(a_rev, lca)] = blocks
2504
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_b,
2506
blocks = list(matcher.get_matching_blocks())
2507
self._cached_matching_blocks[(b_rev, lca)] = blocks
2509
def _determine_status(self, revision_id, unique_line_numbers):
2510
"""Determines the status unique lines versus all lcas.
2512
Basically, determines why the line is unique to this revision.
2514
A line may be determined new, killed, or both.
2516
If a line is determined new, that means it was not present in at least
2517
one LCA, and is not present in the other merge revision.
2519
If a line is determined killed, that means the line was present in
2522
If a line is killed and new, this indicates that the two merge
2523
revisions contain differing conflict resolutions.
2525
:param revision_id: The id of the revision in which the lines are
2527
:param unique_line_numbers: The line numbers of unique lines.
2528
:return: a tuple of (new_this, killed_other)
2532
unique_line_numbers = set(unique_line_numbers)
2533
for lca in self.lcas:
2534
blocks = self._get_matching_blocks(revision_id, lca)
2535
unique_vs_lca, _ignored = self._unique_lines(blocks)
2536
new.update(unique_line_numbers.intersection(unique_vs_lca))
2537
killed.update(unique_line_numbers.difference(unique_vs_lca))
535
self.base_rev_id = None
537
self.base_rev_id = base_branch.get_rev_id(base_revision[1])
538
fetch(from_branch=base_branch, to_branch=self.this_branch)
539
self.base_is_ancestor = is_ancestor(self.this_basis,
544
def get_inventory(tree):
545
return tree.inventory
547
inv_changes = merge_flex(self.this_tree, self.base_tree,
549
generate_changeset, get_inventory,
550
self.conflict_handler,
551
merge_factory=self.merge_factory,
552
interesting_ids=self.interesting_ids)
555
for id, path in inv_changes.iteritems():
560
assert path.startswith('.' + '/') or path.startswith('.' + '\\'), "path is %s" % path
562
adjust_ids.append((path, id))
563
if len(adjust_ids) > 0:
564
self.this_branch.working_tree().set_inventory(self.regen_inventory(adjust_ids))
565
conflicts = self.conflict_handler.conflicts
566
self.conflict_handler.finalize()
569
def regen_inventory(self, new_entries):
570
old_entries = self.this_branch.working_tree().read_working_inventory()
574
for path, file_id in new_entries:
577
new_entries_map[file_id] = path
579
def id2path(file_id):
580
path = new_entries_map.get(file_id)
583
entry = old_entries[file_id]
584
if entry.parent_id is None:
586
return os.path.join(id2path(entry.parent_id), entry.name)
588
for file_id in old_entries:
589
entry = old_entries[file_id]
590
path = id2path(file_id)
591
new_inventory[file_id] = (path, file_id, entry.parent_id,
593
by_path[path] = file_id
598
for path, file_id in new_entries:
600
del new_inventory[file_id]
603
new_path_list.append((path, file_id))
604
if file_id not in old_entries:
606
# Ensure no file is added before its parent
608
for path, file_id in new_path_list:
612
parent = by_path[os.path.dirname(path)]
613
abspath = os.path.join(self.this_tree.basedir, path)
614
kind = bzrlib.osutils.file_kind(abspath)
615
new_inventory[file_id] = (path, file_id, parent, kind)
616
by_path[path] = file_id
618
# Get a list in insertion order
619
new_inventory_list = new_inventory.values()
620
mutter ("""Inventory regeneration:
621
old length: %i insertions: %i deletions: %i new_length: %i"""\
622
% (len(old_entries), insertions, deletions,
623
len(new_inventory_list)))
624
assert len(new_inventory_list) == len(old_entries) + insertions\
626
new_inventory_list.sort()
627
return new_inventory_list
629
merge_types = { "merge3": (ApplyMerge3, "Native diff3-style merge"),
630
"diff3": (Diff3Merge, "Merge using external diff3"),
631
'weave': (WeaveMerge, "Weave-based merge")