1
# Copyright (C) 2005, 2006, 2008 Canonical Ltd
1
# Copyright (C) 2005 Canonical Ltd
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
5
5
# the Free Software Foundation; either version 2 of the License, or
6
6
# (at your option) any later version.
8
8
# This program is distributed in the hope that it will be useful,
9
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
11
# GNU General Public License for more details.
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19
from itertools import chain
30
revision as _mod_revision,
24
import bzrlib.revision
25
from bzrlib.merge_core import merge_flex, ApplyMerge3, BackupBeforeChange
26
from bzrlib.merge_core import WeaveMerge
27
from bzrlib.changeset import generate_changeset, ExceptionConflictHandler
28
from bzrlib.changeset import Inventory, Diff3Merge, ReplaceContents
33
29
from bzrlib.branch import Branch
34
from bzrlib.conflicts import ConflictList, Conflict
35
30
from bzrlib.errors import (BzrCommandError,
34
WorkingTreeNotRevision,
45
WorkingTreeNotRevision,
48
from bzrlib.graph import Graph
49
from bzrlib.merge3 import Merge3
50
from bzrlib.osutils import rename, pathjoin
51
from progress import DummyProgress, ProgressPhase
52
from bzrlib.revision import (NULL_REVISION, ensure_null)
53
from bzrlib.textfile import check_text_lines
54
from bzrlib.trace import mutter, warning, note, is_quiet
55
from bzrlib.transform import (TransformPreview, TreeTransform,
56
resolve_conflicts, cook_conflicts,
57
conflict_pass, FinalPaths, create_by_entry,
58
unique_add, ROOT_PARENT)
59
from bzrlib.versionedfile import PlanWeaveMerge
38
from bzrlib.delta import compare_trees
39
from bzrlib.trace import mutter, warning, note
40
from bzrlib.fetch import greedy_fetch, fetch
41
from bzrlib.revision import is_ancestor, NULL_REVISION
42
from bzrlib.osutils import rename
43
from bzrlib.revision import common_ancestor, MultipleRevisionSources
44
from bzrlib.errors import NoSuchRevision
62
46
# TODO: Report back as changes are merged in
48
# TODO: build_working_dir can be built on something simpler than merge()
50
# FIXME: merge() parameters seem oriented towards the command line
51
# NOTABUG: merge is a helper for commandline functions. merge_inner is the
52
# the core functionality.
54
# comments from abentley on irc: merge happens in two stages, each
55
# of which generates a changeset object
57
# stage 1: generate OLD->OTHER,
58
# stage 2: use MINE and OLD->OTHER to generate MINE -> RESULT
60
class MergeConflictHandler(ExceptionConflictHandler):
61
"""Handle conflicts encountered while merging.
63
This subclasses ExceptionConflictHandler, so that any types of
64
conflict that are not explicitly handled cause an exception and
67
def __init__(self, this_tree, base_tree, other_tree, ignore_zero=False):
68
ExceptionConflictHandler.__init__(self)
70
self.ignore_zero = ignore_zero
71
self.this_tree = this_tree
72
self.base_tree = base_tree
73
self.other_tree = other_tree
75
def copy(self, source, dest):
76
"""Copy the text and mode of a file
77
:param source: The path of the file to copy
78
:param dest: The distination file to create
80
s_file = file(source, "rb")
81
d_file = file(dest, "wb")
84
os.chmod(dest, 0777 & os.stat(source).st_mode)
86
def dump(self, lines, dest):
87
"""Copy the text and mode of a file
88
:param source: The path of the file to copy
89
:param dest: The distination file to create
91
d_file = file(dest, "wb")
95
def add_suffix(self, name, suffix, last_new_name=None, fix_inventory=True):
96
"""Rename a file to append a suffix. If the new name exists, the
97
suffix is added repeatedly until a non-existant name is found
99
:param name: The path of the file
100
:param suffix: The suffix to append
101
:param last_new_name: (used for recursive calls) the last name tried
103
if last_new_name is None:
105
new_name = last_new_name+suffix
107
rename(name, new_name)
108
if fix_inventory is True:
110
relpath = self.this_tree.relpath(name)
111
except NotBranchError:
113
if relpath is not None:
114
file_id = self.this_tree.path2id(relpath)
115
if file_id is not None:
116
new_path = self.this_tree.relpath(new_name)
117
rename(new_name, name)
118
self.this_tree.branch.rename_one(relpath, new_path)
119
assert self.this_tree.id2path(file_id) == relpath
120
self.this_tree._inventory = self.this_tree.read_working_inventory()
121
assert self.this_tree.id2path(file_id) == new_path
123
if e.errno != errno.EEXIST and e.errno != errno.ENOTEMPTY:
125
return self.add_suffix(name, suffix, last_new_name=new_name,
126
fix_inventory=fix_inventory)
129
def conflict(self, text):
134
def merge_conflict(self, new_file, this_path, base_lines, other_lines):
136
Handle diff3 conflicts by producing a .THIS, .BASE and .OTHER. The
137
main file will be a version with diff3 conflicts.
138
:param new_file: Path to the output file with diff3 markers
139
:param this_path: Path to the file text for the THIS tree
140
:param base_path: Path to the file text for the BASE tree
141
:param other_path: Path to the file text for the OTHER tree
143
self.add_suffix(this_path, ".THIS", fix_inventory=False)
144
self.dump(base_lines, this_path+".BASE")
145
self.dump(other_lines, this_path+".OTHER")
146
rename(new_file, this_path)
147
self.conflict("Diff3 conflict encountered in %s" % this_path)
149
def weave_merge_conflict(self, filename, weave, other_i, out_file):
151
Handle weave conflicts by producing a .THIS, and .OTHER. The
152
main file will be a version with diff3-style conflicts.
154
self.add_suffix(filename, ".THIS", fix_inventory=False)
156
self.dump(weave.get_iter(other_i), filename+".OTHER")
157
self.conflict("Text conflict encountered in %s" % filename)
159
def new_contents_conflict(self, filename, other_contents):
160
"""Conflicting contents for newly added file."""
161
other_contents(filename + ".OTHER", self, False)
162
self.conflict("Conflict in newly added file %s" % filename)
165
def target_exists(self, entry, target, old_path):
166
"""Handle the case when the target file or dir exists"""
167
moved_path = self.add_suffix(target, ".moved")
168
self.conflict("Moved existing %s to %s" % (target, moved_path))
170
def rmdir_non_empty(self, filename):
171
"""Handle the case where the dir to be removed still has contents"""
172
self.conflict("Directory %s not removed because it is not empty"\
176
def rem_contents_conflict(self, filename, this_contents, base_contents):
177
base_contents(filename+".BASE", self, False)
178
this_contents(filename+".THIS", self, False)
179
return ReplaceContents(this_contents, None)
181
def rem_contents_conflict(self, filename, this_contents, base_contents):
182
base_contents(filename+".BASE", self, False)
183
this_contents(filename+".THIS", self, False)
184
self.conflict("Other branch deleted locally modified file %s" %
186
return ReplaceContents(this_contents, None)
188
def abs_this_path(self, file_id):
189
"""Return the absolute path for a file_id in the this tree."""
190
return self.this_tree.id2abspath(file_id)
192
def add_missing_parents(self, file_id, tree):
193
"""If some of the parents for file_id are missing, add them."""
194
entry = tree.inventory[file_id]
195
if entry.parent_id not in self.this_tree:
196
return self.create_all_missing(entry.parent_id, tree)
198
return self.abs_this_path(entry.parent_id)
200
def create_all_missing(self, file_id, tree):
201
"""Add contents for a file_id and all its parents to a tree."""
202
entry = tree.inventory[file_id]
203
if entry.parent_id is not None and entry.parent_id not in self.this_tree:
204
abspath = self.create_all_missing(entry.parent_id, tree)
206
abspath = self.abs_this_path(entry.parent_id)
207
entry_path = os.path.join(abspath, entry.name)
208
if not os.path.isdir(entry_path):
209
self.create(file_id, entry_path, tree)
212
def create(self, file_id, path, tree, reverse=False):
213
"""Uses tree data to create a filesystem object for the file_id"""
214
from changeset import get_contents
215
get_contents(tree, file_id)(path, self, reverse)
217
def missing_for_merge(self, file_id, other_path):
218
"""The file_id doesn't exist in THIS, but does in OTHER and BASE"""
219
self.conflict("Other branch modified locally deleted file %s" %
221
parent_dir = self.add_missing_parents(file_id, self.other_tree)
222
stem = os.path.join(parent_dir, os.path.basename(other_path))
223
self.create(file_id, stem+".OTHER", self.other_tree)
224
self.create(file_id, stem+".BASE", self.base_tree)
226
def threeway_contents_conflict(filename, this_contents, base_contents,
228
self.conflict("Three-way conflict merging %s" % filename)
231
if not self.ignore_zero:
232
note("%d conflicts encountered.\n", self.conflicts)
234
def get_tree(treespec, local_branch=None):
235
location, revno = treespec
236
branch = Branch.open_containing(location)[0]
240
revision = branch.last_revision()
242
revision = branch.get_rev_id(revno)
244
revision = NULL_REVISION
245
return branch, get_revid_tree(branch, revision, local_branch)
247
def get_revid_tree(branch, revision, local_branch):
249
base_tree = branch.working_tree()
251
if local_branch is not None:
252
greedy_fetch(local_branch, branch, revision)
253
base_tree = local_branch.revision_tree(revision)
255
base_tree = branch.revision_tree(revision)
259
def file_exists(tree, file_id):
260
return tree.has_filename(tree.id2path(file_id))
263
def build_working_dir(to_dir):
264
"""Build a working directory in an empty directory.
266
to_dir is a directory containing branch metadata but no working files,
267
typically constructed by cloning an existing branch.
269
This is split out as a special idiomatic case of merge. It could
270
eventually be done by just building the tree directly calling into
271
lower-level code (e.g. constructing a changeset).
273
# RBC 20051019 is this not just 'export' ?
274
# AB Well, export doesn't take care of inventory...
275
this_branch = Branch.open_containing(to_dir)[0]
276
transform_tree(this_branch.working_tree(), this_branch.basis_tree())
65
279
def transform_tree(from_tree, to_tree, interesting_ids=None):
66
280
merge_inner(from_tree.branch, to_tree, from_tree, ignore_zero=True,
67
interesting_ids=interesting_ids, this_tree=from_tree)
281
interesting_ids=interesting_ids)
284
def merge(other_revision, base_revision,
285
check_clean=True, ignore_zero=False,
286
this_dir=None, backup_files=False, merge_type=ApplyMerge3,
287
file_list=None, show_base=False, reprocess=False):
288
"""Merge changes into a tree.
291
list(path, revno) Base for three-way merge.
292
If [None, None] then a base will be automatically determined.
294
list(path, revno) Other revision for three-way merge.
296
Directory to merge changes into; '.' by default.
298
If true, this_dir must have no uncommitted changes before the
300
ignore_zero - If true, suppress the "zero conflicts" message when
301
there are no conflicts; should be set when doing something we expect
302
to complete perfectly.
303
file_list - If supplied, merge only changes to selected files.
305
All available ancestors of other_revision and base_revision are
306
automatically pulled into the branch.
308
The revno may be -1 to indicate the last revision on the branch, which is
311
This function is intended for use from the command line; programmatic
312
clients might prefer to call merge_inner(), which has less magic behavior.
316
this_branch = Branch.open_containing(this_dir)[0]
317
if show_base and not merge_type is ApplyMerge3:
318
raise BzrCommandError("Show-base is not supported for this merge"
319
" type. %s" % merge_type)
320
if reprocess and not merge_type is ApplyMerge3:
321
raise BzrCommandError("Reprocess is not supported for this merge"
322
" type. %s" % merge_type)
323
if reprocess and show_base:
324
raise BzrCommandError("Cannot reprocess and show base.")
325
merger = Merger(this_branch)
326
merger.check_basis(check_clean)
327
merger.set_other(other_revision)
328
merger.set_base(base_revision)
329
if merger.base_rev_id == merger.other_rev_id:
330
note('Nothing to do.')
332
merger.backup_files = backup_files
333
merger.merge_type = merge_type
334
merger.set_interesting_files(file_list)
335
merger.show_base = show_base
336
merger.reprocess = reprocess
337
merger.conflict_handler = MergeConflictHandler(merger.this_tree,
340
ignore_zero=ignore_zero)
341
conflicts = merger.do_merge()
345
def merge_inner(this_branch, other_tree, base_tree, ignore_zero=False,
347
merge_type=ApplyMerge3,
348
interesting_ids=None,
352
interesting_files=None):
353
"""Primary interface for merging.
355
typical use is probably
356
'merge_inner(branch, branch.get_revision_tree(other_revision),
357
branch.get_revision_tree(base_revision))'
359
merger = Merger(this_branch, other_tree, base_tree)
360
merger.backup_files = backup_files
361
merger.merge_type = merge_type
362
merger.interesting_ids = interesting_ids
363
if interesting_files:
364
assert not interesting_ids, ('Only supply interesting_ids'
365
' or interesting_files')
366
merger._set_interesting_files(interesting_files)
367
merger.show_base = show_base
368
merger.reprocess = reprocess
369
merger.conflict_handler = MergeConflictHandler(merger.this_tree, base_tree,
371
ignore_zero=ignore_zero)
372
merger.other_rev_id = other_rev_id
373
merger.other_basis = other_rev_id
374
return merger.do_merge()
70
377
class Merger(object):
71
def __init__(self, this_branch, other_tree=None, base_tree=None,
72
this_tree=None, pb=DummyProgress(), change_reporter=None,
73
recurse='down', revision_graph=None):
378
def __init__(self, this_branch, other_tree=None, base_tree=None):
74
379
object.__init__(self)
75
380
self.this_branch = this_branch
76
self.this_basis = _mod_revision.ensure_null(
77
this_branch.last_revision())
381
self.this_basis = this_branch.last_revision()
78
382
self.this_rev_id = None
79
self.this_tree = this_tree
383
self.this_tree = this_branch.working_tree()
80
384
self.this_revision_tree = None
81
385
self.this_basis_tree = None
82
386
self.other_tree = other_tree
83
self.other_branch = None
84
387
self.base_tree = base_tree
85
388
self.ignore_zero = False
86
389
self.backup_files = False
87
390
self.interesting_ids = None
88
self.interesting_files = None
89
391
self.show_base = False
90
392
self.reprocess = False
93
self.recurse = recurse
94
self.change_reporter = change_reporter
95
self._cached_trees = {}
96
self._revision_graph = revision_graph
97
self._base_is_ancestor = None
98
self._base_is_other_ancestor = None
101
def revision_graph(self):
102
if self._revision_graph is None:
103
self._revision_graph = self.this_branch.repository.get_graph()
104
return self._revision_graph
106
def _set_base_is_ancestor(self, value):
107
self._base_is_ancestor = value
109
def _get_base_is_ancestor(self):
110
if self._base_is_ancestor is None:
111
self._base_is_ancestor = self.revision_graph.is_ancestor(
112
self.base_rev_id, self.this_basis)
113
return self._base_is_ancestor
115
base_is_ancestor = property(_get_base_is_ancestor, _set_base_is_ancestor)
117
def _set_base_is_other_ancestor(self, value):
118
self._base_is_other_ancestor = value
120
def _get_base_is_other_ancestor(self):
121
if self._base_is_other_ancestor is None:
122
if self.other_basis is None:
124
self._base_is_other_ancestor = self.revision_graph.is_ancestor(
125
self.base_rev_id, self.other_basis)
126
return self._base_is_other_ancestor
128
base_is_other_ancestor = property(_get_base_is_other_ancestor,
129
_set_base_is_other_ancestor)
132
def from_uncommitted(tree, other_tree, pb):
133
"""Return a Merger for uncommitted changes in other_tree.
135
:param tree: The tree to merge into
136
:param other_tree: The tree to get uncommitted changes from
137
:param pb: A progress indicator
139
merger = Merger(tree.branch, other_tree, other_tree.basis_tree(), tree,
141
merger.base_rev_id = merger.base_tree.get_revision_id()
142
merger.other_rev_id = None
143
merger.other_basis = merger.base_rev_id
147
def from_mergeable(klass, tree, mergeable, pb):
148
"""Return a Merger for a bundle or merge directive.
150
:param tree: The tree to merge changes into
151
:param mergeable: A merge directive or bundle
152
:param pb: A progress indicator
154
mergeable.install_revisions(tree.branch.repository)
155
base_revision_id, other_revision_id, verified =\
156
mergeable.get_merge_request(tree.branch.repository)
157
revision_graph = tree.branch.repository.get_graph()
158
if base_revision_id is not None:
159
if (base_revision_id != _mod_revision.NULL_REVISION and
160
revision_graph.is_ancestor(
161
base_revision_id, tree.branch.last_revision())):
162
base_revision_id = None
164
warning('Performing cherrypick')
165
merger = klass.from_revision_ids(pb, tree, other_revision_id,
166
base_revision_id, revision_graph=
168
return merger, verified
171
def from_revision_ids(pb, tree, other, base=None, other_branch=None,
172
base_branch=None, revision_graph=None):
173
"""Return a Merger for revision-ids.
175
:param tree: The tree to merge changes into
176
:param other: The revision-id to use as OTHER
177
:param base: The revision-id to use as BASE. If not specified, will
179
:param other_branch: A branch containing the other revision-id. If
180
not supplied, tree.branch is used.
181
:param base_branch: A branch containing the base revision-id. If
182
not supplied, other_branch or tree.branch will be used.
183
:param revision_graph: If you have a revision_graph precomputed, pass
184
it in, otherwise it will be created for you.
185
:param pb: A progress indicator
187
merger = Merger(tree.branch, this_tree=tree, pb=pb,
188
revision_graph=revision_graph)
189
if other_branch is None:
190
other_branch = tree.branch
191
merger.set_other_revision(other, other_branch)
195
if base_branch is None:
196
base_branch = other_branch
197
merger.set_base_revision(base, base_branch)
200
def revision_tree(self, revision_id, branch=None):
201
if revision_id not in self._cached_trees:
203
branch = self.this_branch
205
tree = self.this_tree.revision_tree(revision_id)
206
except errors.NoSuchRevisionInTree:
207
tree = branch.repository.revision_tree(revision_id)
208
self._cached_trees[revision_id] = tree
209
return self._cached_trees[revision_id]
211
def _get_tree(self, treespec, possible_transports=None):
212
from bzrlib import workingtree
213
location, revno = treespec
215
tree = workingtree.WorkingTree.open_containing(location)[0]
216
return tree.branch, tree
217
branch = Branch.open_containing(location, possible_transports)[0]
219
revision_id = branch.last_revision()
221
revision_id = branch.get_rev_id(revno)
222
revision_id = ensure_null(revision_id)
223
return branch, self.revision_tree(revision_id, branch)
393
self.conflict_handler = MergeConflictHandler(self.this_tree, base_tree,
396
def revision_tree(self, revision_id):
397
return self.this_branch.revision_tree(revision_id)
225
399
def ensure_revision_trees(self):
226
400
if self.this_revision_tree is None:
227
self.this_basis_tree = self.revision_tree(self.this_basis)
401
self.this_basis_tree = self.this_branch.revision_tree(
228
403
if self.this_basis == self.this_rev_id:
229
404
self.this_revision_tree = self.this_basis_tree
231
407
if self.other_rev_id is None:
232
408
other_basis_tree = self.revision_tree(self.other_basis)
233
changes = other_basis_tree.changes_from(self.other_tree)
409
changes = compare_trees(self.other_tree, other_basis_tree)
234
410
if changes.has_changed():
235
411
raise WorkingTreeNotRevision(self.this_tree)
236
other_rev_id = self.other_basis
412
other_rev_id = other_basis
237
413
self.other_tree = other_basis_tree
239
416
def file_revisions(self, file_id):
240
417
self.ensure_revision_trees()
241
418
def get_id(tree, file_id):
242
419
revision_id = tree.inventory[file_id].revision
420
assert revision_id is not None
243
421
return revision_id
244
422
if self.this_rev_id is None:
245
423
if self.this_basis_tree.get_file_sha1(file_id) != \
249
427
trees = (self.this_basis_tree, self.other_tree)
250
428
return [get_id(tree, file_id) for tree in trees]
252
def check_basis(self, check_clean, require_commits=True):
253
if self.this_basis is None and require_commits is True:
254
raise BzrCommandError("This branch has no commits."
255
" (perhaps you would prefer 'bzr pull')")
431
def merge_factory(self, file_id, base, other):
432
if self.merge_type.history_based:
433
if self.show_base is True:
434
raise BzrError("Cannot show base for hisory-based merges")
435
if self.reprocess is True:
436
raise BzrError("Cannot reprocess history-based merges")
438
t_revid, o_revid = self.file_revisions(file_id)
439
weave = self.this_basis_tree.get_weave(file_id)
440
contents_change = self.merge_type(weave, t_revid, o_revid)
442
if self.show_base is True or self.reprocess is True:
443
contents_change = self.merge_type(file_id, base, other,
444
show_base=self.show_base,
445
reprocess=self.reprocess)
447
contents_change = self.merge_type(file_id, base, other)
448
if self.backup_files:
449
contents_change = BackupBeforeChange(contents_change)
450
return contents_change
452
def check_basis(self, check_clean):
453
if self.this_basis is None:
454
raise BzrCommandError("This branch has no commits")
257
456
self.compare_basis()
258
457
if self.this_basis != self.this_rev_id:
259
raise errors.UncommittedChanges(self.this_tree)
458
raise BzrCommandError("Working tree has uncommitted changes.")
261
460
def compare_basis(self):
263
basis_tree = self.revision_tree(self.this_tree.last_revision())
264
except errors.NoSuchRevision:
265
basis_tree = self.this_tree.basis_tree()
266
changes = self.this_tree.changes_from(basis_tree)
461
changes = compare_trees(self.this_branch.working_tree(),
462
self.this_branch.basis_tree(), False)
267
463
if not changes.has_changed():
268
464
self.this_rev_id = self.this_basis
270
466
def set_interesting_files(self, file_list):
271
self.interesting_files = file_list
468
self._set_interesting_files(file_list)
469
except NotVersionedError, e:
470
raise BzrCommandError("%s is not a source file in any"
473
def _set_interesting_files(self, file_list):
474
"""Set the list of interesting ids from a list of files."""
475
if file_list is None:
476
self.interesting_ids = None
479
interesting_ids = set()
480
for fname in file_list:
481
path = self.this_tree.relpath(fname)
483
for tree in (self.this_tree, self.base_tree, self.other_tree):
484
file_id = tree.inventory.path2id(path)
485
if file_id is not None:
486
interesting_ids.add(file_id)
489
raise NotVersionedError(path=fname)
490
self.interesting_ids = interesting_ids
273
492
def set_pending(self):
274
if not self.base_is_ancestor or not self.base_is_other_ancestor or self.other_rev_id is None:
278
def _add_parent(self):
279
new_parents = self.this_tree.get_parent_ids() + [self.other_rev_id]
280
new_parent_trees = []
281
for revision_id in new_parents:
283
tree = self.revision_tree(revision_id)
284
except errors.NoSuchRevision:
288
new_parent_trees.append((revision_id, tree))
290
self.this_tree.set_parent_trees(new_parent_trees,
291
allow_leftmost_as_ghost=True)
293
for _revision_id, tree in new_parent_trees:
297
def set_other(self, other_revision, possible_transports=None):
298
"""Set the revision and tree to merge from.
300
This sets the other_tree, other_rev_id, other_basis attributes.
302
:param other_revision: The [path, revision] list to merge from.
304
self.other_branch, self.other_tree = self._get_tree(other_revision,
493
if not self.base_is_ancestor:
495
if self.other_rev_id is None:
497
if self.other_rev_id in self.this_branch.get_ancestry(self.this_basis):
499
self.this_branch.working_tree().add_pending_merge(self.other_rev_id)
501
def set_other(self, other_revision):
502
other_branch, self.other_tree = get_tree(other_revision,
306
504
if other_revision[1] == -1:
307
self.other_rev_id = _mod_revision.ensure_null(
308
self.other_branch.last_revision())
309
if _mod_revision.is_null(self.other_rev_id):
310
raise NoCommits(self.other_branch)
505
self.other_rev_id = other_branch.last_revision()
506
if self.other_rev_id is None:
507
raise NoCommits(other_branch)
311
508
self.other_basis = self.other_rev_id
312
509
elif other_revision[1] is not None:
313
self.other_rev_id = self.other_branch.get_rev_id(other_revision[1])
510
self.other_rev_id = other_branch.get_rev_id(other_revision[1])
314
511
self.other_basis = self.other_rev_id
316
513
self.other_rev_id = None
317
self.other_basis = self.other_branch.last_revision()
514
self.other_basis = other_branch.last_revision()
318
515
if self.other_basis is None:
319
raise NoCommits(self.other_branch)
320
if self.other_rev_id is not None:
321
self._cached_trees[self.other_rev_id] = self.other_tree
322
self._maybe_fetch(self.other_branch,self.this_branch, self.other_basis)
324
def set_other_revision(self, revision_id, other_branch):
325
"""Set 'other' based on a branch and revision id
327
:param revision_id: The revision to use for a tree
328
:param other_branch: The branch containing this tree
330
self.other_rev_id = revision_id
331
self.other_branch = other_branch
332
self._maybe_fetch(other_branch, self.this_branch, self.other_rev_id)
333
self.other_tree = self.revision_tree(revision_id)
334
self.other_basis = revision_id
336
def set_base_revision(self, revision_id, branch):
337
"""Set 'base' based on a branch and revision id
339
:param revision_id: The revision to use for a tree
340
:param branch: The branch containing this tree
342
self.base_rev_id = revision_id
343
self.base_branch = branch
344
self._maybe_fetch(branch, self.this_branch, revision_id)
345
self.base_tree = self.revision_tree(revision_id)
347
def _maybe_fetch(self, source, target, revision_id):
348
if not source.repository.has_same_location(target.repository):
349
target.fetch(source, revision_id)
352
revisions = [ensure_null(self.this_basis),
353
ensure_null(self.other_basis)]
354
if NULL_REVISION in revisions:
355
self.base_rev_id = NULL_REVISION
357
self.base_rev_id, steps = self.revision_graph.find_unique_lca(
358
revisions[0], revisions[1], count_steps=True)
359
if self.base_rev_id == NULL_REVISION:
360
raise UnrelatedBranches()
362
warning('Warning: criss-cross merge encountered. See bzr'
363
' help criss-cross.')
364
self.base_tree = self.revision_tree(self.base_rev_id)
365
self.base_is_ancestor = True
366
self.base_is_other_ancestor = True
516
raise NoCommits(other_branch)
517
fetch(from_branch=other_branch, to_branch=self.this_branch,
518
last_revision=self.other_basis)
368
520
def set_base(self, base_revision):
369
"""Set the base revision to use for the merge.
371
:param base_revision: A 2-list containing a path and revision number.
373
521
mutter("doing merge() with no base_revision specified")
374
522
if base_revision == [None, None]:
524
self.base_rev_id = common_ancestor(self.this_basis,
527
except NoCommonAncestor:
528
raise UnrelatedBranches()
529
self.base_tree = get_revid_tree(self.this_branch, self.base_rev_id,
531
self.base_is_ancestor = True
377
base_branch, self.base_tree = self._get_tree(base_revision)
533
base_branch, self.base_tree = get_tree(base_revision)
378
534
if base_revision[1] == -1:
379
535
self.base_rev_id = base_branch.last_revision()
380
536
elif base_revision[1] is None:
381
self.base_rev_id = _mod_revision.NULL_REVISION
383
self.base_rev_id = _mod_revision.ensure_null(
384
base_branch.get_rev_id(base_revision[1]))
385
self._maybe_fetch(base_branch, self.this_branch, self.base_rev_id)
387
def make_merger(self):
388
kwargs = {'working_tree':self.this_tree, 'this_tree': self.this_tree,
389
'other_tree': self.other_tree,
390
'interesting_ids': self.interesting_ids,
391
'interesting_files': self.interesting_files,
394
if self.merge_type.requires_base:
395
kwargs['base_tree'] = self.base_tree
396
if self.merge_type.supports_reprocess:
397
kwargs['reprocess'] = self.reprocess
399
raise BzrError("Conflict reduction is not supported for merge"
400
" type %s." % self.merge_type)
401
if self.merge_type.supports_show_base:
402
kwargs['show_base'] = self.show_base
404
raise BzrError("Showing base is not supported for this"
405
" merge type. %s" % self.merge_type)
406
if (not getattr(self.merge_type, 'supports_reverse_cherrypick', True)
407
and not self.base_is_other_ancestor):
408
raise errors.CannotReverseCherrypick()
409
if self.merge_type.supports_cherrypick:
410
kwargs['cherrypick'] = (not self.base_is_ancestor or
411
not self.base_is_other_ancestor)
412
return self.merge_type(pb=self._pb,
413
change_reporter=self.change_reporter,
417
self.this_tree.lock_tree_write()
418
if self.base_tree is not None:
419
self.base_tree.lock_read()
420
if self.other_tree is not None:
421
self.other_tree.lock_read()
423
merge = self.make_merger()
425
if self.recurse == 'down':
426
for relpath, file_id in self.this_tree.iter_references():
427
sub_tree = self.this_tree.get_nested_tree(file_id, relpath)
428
other_revision = self.other_tree.get_reference_revision(
430
if other_revision == sub_tree.last_revision():
432
sub_merge = Merger(sub_tree.branch, this_tree=sub_tree)
433
sub_merge.merge_type = self.merge_type
434
other_branch = self.other_branch.reference_parent(file_id, relpath)
435
sub_merge.set_other_revision(other_revision, other_branch)
436
base_revision = self.base_tree.get_reference_revision(file_id)
437
sub_merge.base_tree = \
438
sub_tree.branch.repository.revision_tree(base_revision)
439
sub_merge.base_rev_id = base_revision
443
if self.other_tree is not None:
444
self.other_tree.unlock()
445
if self.base_tree is not None:
446
self.base_tree.unlock()
447
self.this_tree.unlock()
448
if len(merge.cooked_conflicts) == 0:
449
if not self.ignore_zero and not is_quiet():
450
note("All changes applied successfully.")
452
note("%d conflicts encountered." % len(merge.cooked_conflicts))
454
return len(merge.cooked_conflicts)
457
class Merge3Merger(object):
458
"""Three-way merger that uses the merge3 text merger"""
460
supports_reprocess = True
461
supports_show_base = True
462
history_based = False
463
supports_cherrypick = True
464
supports_reverse_cherrypick = True
465
winner_idx = {"this": 2, "other": 1, "conflict": 1}
467
def __init__(self, working_tree, this_tree, base_tree, other_tree,
468
interesting_ids=None, reprocess=False, show_base=False,
469
pb=DummyProgress(), pp=None, change_reporter=None,
470
interesting_files=None, do_merge=True,
472
"""Initialize the merger object and perform the merge.
474
:param working_tree: The working tree to apply the merge to
475
:param this_tree: The local tree in the merge operation
476
:param base_tree: The common tree in the merge operation
477
:param other_tree: The other other tree to merge changes from
478
:param interesting_ids: The file_ids of files that should be
479
participate in the merge. May not be combined with
481
:param: reprocess If True, perform conflict-reduction processing.
482
:param show_base: If True, show the base revision in text conflicts.
483
(incompatible with reprocess)
484
:param pb: A Progress bar
485
:param pp: A ProgressPhase object
486
:param change_reporter: An object that should report changes made
487
:param interesting_files: The tree-relative paths of files that should
488
participate in the merge. If these paths refer to directories,
489
the contents of those directories will also be included. May not
490
be combined with interesting_ids. If neither interesting_files nor
491
interesting_ids is specified, all files may participate in the
494
object.__init__(self)
495
if interesting_files is not None and interesting_ids is not None:
497
'specify either interesting_ids or interesting_files')
498
self.interesting_ids = interesting_ids
499
self.interesting_files = interesting_files
500
self.this_tree = working_tree
501
self.base_tree = base_tree
502
self.other_tree = other_tree
503
self._raw_conflicts = []
504
self.cooked_conflicts = []
505
self.reprocess = reprocess
506
self.show_base = show_base
509
self.change_reporter = change_reporter
510
self.cherrypick = cherrypick
512
self.pp = ProgressPhase("Merge phase", 3, self.pb)
517
self.this_tree.lock_tree_write()
518
self.base_tree.lock_read()
519
self.other_tree.lock_read()
520
self.tt = TreeTransform(self.this_tree, self.pb)
523
self._compute_transform()
525
results = self.tt.apply(no_conflicts=True)
526
self.write_modified(results)
528
self.this_tree.add_conflicts(self.cooked_conflicts)
529
except UnsupportedOperation:
533
self.other_tree.unlock()
534
self.base_tree.unlock()
535
self.this_tree.unlock()
538
def make_preview_transform(self):
539
self.base_tree.lock_read()
540
self.other_tree.lock_read()
541
self.tt = TransformPreview(self.this_tree)
544
self._compute_transform()
547
self.other_tree.unlock()
548
self.base_tree.unlock()
552
def _compute_transform(self):
553
entries = self._entries3()
554
child_pb = ui.ui_factory.nested_progress_bar()
556
for num, (file_id, changed, parents3, names3,
557
executable3) in enumerate(entries):
558
child_pb.update('Preparing file merge', num, len(entries))
559
self._merge_names(file_id, parents3, names3)
561
file_status = self.merge_contents(file_id)
563
file_status = 'unmodified'
564
self._merge_executable(file_id,
565
executable3, file_status)
570
child_pb = ui.ui_factory.nested_progress_bar()
572
fs_conflicts = resolve_conflicts(self.tt, child_pb,
573
lambda t, c: conflict_pass(t, c, self.other_tree))
576
if self.change_reporter is not None:
577
from bzrlib import delta
578
delta.report_changes(
579
self.tt.iter_changes(), self.change_reporter)
580
self.cook_conflicts(fs_conflicts)
581
for conflict in self.cooked_conflicts:
585
"""Gather data about files modified between three trees.
587
Return a list of tuples of file_id, changed, parents3, names3,
588
executable3. changed is a boolean indicating whether the file contents
589
or kind were changed. parents3 is a tuple of parent ids for base,
590
other and this. names3 is a tuple of names for base, other and this.
591
executable3 is a tuple of execute-bit values for base, other and this.
594
iterator = self.other_tree.iter_changes(self.base_tree,
595
include_unchanged=True, specific_files=self.interesting_files,
596
extra_trees=[self.this_tree])
597
for (file_id, paths, changed, versioned, parents, names, kind,
598
executable) in iterator:
599
if (self.interesting_ids is not None and
600
file_id not in self.interesting_ids):
602
if file_id in self.this_tree.inventory:
603
entry = self.this_tree.inventory[file_id]
604
this_name = entry.name
605
this_parent = entry.parent_id
606
this_executable = entry.executable
610
this_executable = None
611
parents3 = parents + (this_parent,)
612
names3 = names + (this_name,)
613
executable3 = executable + (this_executable,)
614
result.append((file_id, changed, parents3, names3, executable3))
619
self.tt.final_kind(self.tt.root)
621
self.tt.cancel_deletion(self.tt.root)
622
if self.tt.final_file_id(self.tt.root) is None:
623
self.tt.version_file(self.tt.tree_file_id(self.tt.root),
625
if self.other_tree.inventory.root is None:
627
other_root_file_id = self.other_tree.get_root_id()
628
other_root = self.tt.trans_id_file_id(other_root_file_id)
629
if other_root == self.tt.root:
632
self.tt.final_kind(other_root)
635
self.reparent_children(self.other_tree.inventory.root, self.tt.root)
636
self.tt.cancel_creation(other_root)
637
self.tt.cancel_versioning(other_root)
639
def reparent_children(self, ie, target):
640
for thing, child in ie.children.iteritems():
641
trans_id = self.tt.trans_id_file_id(child.file_id)
642
self.tt.adjust_path(self.tt.final_name(trans_id), target, trans_id)
644
def write_modified(self, results):
646
for path in results.modified_paths:
647
file_id = self.this_tree.path2id(self.this_tree.relpath(path))
650
hash = self.this_tree.get_file_sha1(file_id)
653
modified_hashes[file_id] = hash
654
self.this_tree.set_merge_modified(modified_hashes)
657
def parent(entry, file_id):
658
"""Determine the parent for a file_id (used as a key method)"""
661
return entry.parent_id
664
def name(entry, file_id):
665
"""Determine the name for a file_id (used as a key method)"""
671
def contents_sha1(tree, file_id):
672
"""Determine the sha1 of the file contents (used as a key method)."""
673
if file_id not in tree:
675
return tree.get_file_sha1(file_id)
678
def executable(tree, file_id):
679
"""Determine the executability of a file-id (used as a key method)."""
680
if file_id not in tree:
682
if tree.kind(file_id) != "file":
684
return tree.is_executable(file_id)
687
def kind(tree, file_id):
688
"""Determine the kind of a file-id (used as a key method)."""
689
if file_id not in tree:
691
return tree.kind(file_id)
694
def _three_way(base, other, this):
695
#if base == other, either they all agree, or only THIS has changed.
698
elif this not in (base, other):
700
# "Ambiguous clean merge" -- both sides have made the same change.
703
# this == base: only other has changed.
708
def scalar_three_way(this_tree, base_tree, other_tree, file_id, key):
709
"""Do a three-way test on a scalar.
710
Return "this", "other" or "conflict", depending whether a value wins.
712
key_base = key(base_tree, file_id)
713
key_other = key(other_tree, file_id)
714
#if base == other, either they all agree, or only THIS has changed.
715
if key_base == key_other:
717
key_this = key(this_tree, file_id)
718
# "Ambiguous clean merge"
719
if key_this == key_other:
721
elif key_this == key_base:
726
def merge_names(self, file_id):
728
if file_id in tree.inventory:
729
return tree.inventory[file_id]
732
this_entry = get_entry(self.this_tree)
733
other_entry = get_entry(self.other_tree)
734
base_entry = get_entry(self.base_tree)
735
entries = (base_entry, other_entry, this_entry)
738
for entry in entries:
743
names.append(entry.name)
744
parents.append(entry.parent_id)
745
return self._merge_names(file_id, parents, names)
747
def _merge_names(self, file_id, parents, names):
748
"""Perform a merge on file_id names and parents"""
749
base_name, other_name, this_name = names
750
base_parent, other_parent, this_parent = parents
752
name_winner = self._three_way(*names)
754
parent_id_winner = self._three_way(*parents)
755
if this_name is None:
756
if name_winner == "this":
757
name_winner = "other"
758
if parent_id_winner == "this":
759
parent_id_winner = "other"
760
if name_winner == "this" and parent_id_winner == "this":
762
if name_winner == "conflict":
763
trans_id = self.tt.trans_id_file_id(file_id)
764
self._raw_conflicts.append(('name conflict', trans_id,
765
this_name, other_name))
766
if parent_id_winner == "conflict":
767
trans_id = self.tt.trans_id_file_id(file_id)
768
self._raw_conflicts.append(('parent conflict', trans_id,
769
this_parent, other_parent))
770
if other_name is None:
771
# it doesn't matter whether the result was 'other' or
772
# 'conflict'-- if there's no 'other', we leave it alone.
774
# if we get here, name_winner and parent_winner are set to safe values.
775
trans_id = self.tt.trans_id_file_id(file_id)
776
parent_id = parents[self.winner_idx[parent_id_winner]]
777
if parent_id is not None:
778
parent_trans_id = self.tt.trans_id_file_id(parent_id)
779
self.tt.adjust_path(names[self.winner_idx[name_winner]],
780
parent_trans_id, trans_id)
782
def merge_contents(self, file_id):
783
"""Performa a merge on file_id contents."""
784
def contents_pair(tree):
785
if file_id not in tree:
787
kind = tree.kind(file_id)
789
contents = tree.get_file_sha1(file_id)
790
elif kind == "symlink":
791
contents = tree.get_symlink_target(file_id)
794
return kind, contents
796
def contents_conflict():
797
trans_id = self.tt.trans_id_file_id(file_id)
798
name = self.tt.final_name(trans_id)
799
parent_id = self.tt.final_parent(trans_id)
800
if file_id in self.this_tree.inventory:
801
self.tt.unversion_file(trans_id)
802
if file_id in self.this_tree:
803
self.tt.delete_contents(trans_id)
804
file_group = self._dump_conflicts(name, parent_id, file_id,
806
self._raw_conflicts.append(('contents conflict', file_group))
808
# See SPOT run. run, SPOT, run.
809
# So we're not QUITE repeating ourselves; we do tricky things with
811
base_pair = contents_pair(self.base_tree)
812
other_pair = contents_pair(self.other_tree)
813
if base_pair == other_pair:
814
# OTHER introduced no changes
816
this_pair = contents_pair(self.this_tree)
817
if this_pair == other_pair:
818
# THIS and OTHER introduced the same changes
821
trans_id = self.tt.trans_id_file_id(file_id)
822
if this_pair == base_pair:
823
# only OTHER introduced changes
824
if file_id in self.this_tree:
825
# Remove any existing contents
826
self.tt.delete_contents(trans_id)
827
if file_id in self.other_tree:
828
# OTHER changed the file
829
create_by_entry(self.tt,
830
self.other_tree.inventory[file_id],
831
self.other_tree, trans_id)
832
if file_id not in self.this_tree.inventory:
833
self.tt.version_file(file_id, trans_id)
835
elif file_id in self.this_tree.inventory:
836
# OTHER deleted the file
837
self.tt.unversion_file(trans_id)
839
#BOTH THIS and OTHER introduced changes; scalar conflict
840
elif this_pair[0] == "file" and other_pair[0] == "file":
841
# THIS and OTHER are both files, so text merge. Either
842
# BASE is a file, or both converted to files, so at least we
843
# have agreement that output should be a file.
845
self.text_merge(file_id, trans_id)
847
return contents_conflict()
848
if file_id not in self.this_tree.inventory:
849
self.tt.version_file(file_id, trans_id)
851
self.tt.tree_kind(trans_id)
852
self.tt.delete_contents(trans_id)
857
# Scalar conflict, can't text merge. Dump conflicts
858
return contents_conflict()
860
def get_lines(self, tree, file_id):
861
"""Return the lines in a file, or an empty list."""
863
return tree.get_file(file_id).readlines()
867
def text_merge(self, file_id, trans_id):
868
"""Perform a three-way text merge on a file_id"""
869
# it's possible that we got here with base as a different type.
870
# if so, we just want two-way text conflicts.
871
if file_id in self.base_tree and \
872
self.base_tree.kind(file_id) == "file":
873
base_lines = self.get_lines(self.base_tree, file_id)
876
other_lines = self.get_lines(self.other_tree, file_id)
877
this_lines = self.get_lines(self.this_tree, file_id)
878
m3 = Merge3(base_lines, this_lines, other_lines,
879
is_cherrypick=self.cherrypick)
880
start_marker = "!START OF MERGE CONFLICT!" + "I HOPE THIS IS UNIQUE"
881
if self.show_base is True:
882
base_marker = '|' * 7
886
def iter_merge3(retval):
887
retval["text_conflicts"] = False
888
for line in m3.merge_lines(name_a = "TREE",
889
name_b = "MERGE-SOURCE",
890
name_base = "BASE-REVISION",
891
start_marker=start_marker,
892
base_marker=base_marker,
893
reprocess=self.reprocess):
894
if line.startswith(start_marker):
895
retval["text_conflicts"] = True
896
yield line.replace(start_marker, '<' * 7)
900
merge3_iterator = iter_merge3(retval)
901
self.tt.create_file(merge3_iterator, trans_id)
902
if retval["text_conflicts"] is True:
903
self._raw_conflicts.append(('text conflict', trans_id))
904
name = self.tt.final_name(trans_id)
905
parent_id = self.tt.final_parent(trans_id)
906
file_group = self._dump_conflicts(name, parent_id, file_id,
907
this_lines, base_lines,
909
file_group.append(trans_id)
911
def _dump_conflicts(self, name, parent_id, file_id, this_lines=None,
912
base_lines=None, other_lines=None, set_version=False,
914
"""Emit conflict files.
915
If this_lines, base_lines, or other_lines are omitted, they will be
916
determined automatically. If set_version is true, the .OTHER, .THIS
917
or .BASE (in that order) will be created as versioned files.
919
data = [('OTHER', self.other_tree, other_lines),
920
('THIS', self.this_tree, this_lines)]
922
data.append(('BASE', self.base_tree, base_lines))
925
for suffix, tree, lines in data:
927
trans_id = self._conflict_file(name, parent_id, tree, file_id,
929
file_group.append(trans_id)
930
if set_version and not versioned:
931
self.tt.version_file(file_id, trans_id)
935
def _conflict_file(self, name, parent_id, tree, file_id, suffix,
937
"""Emit a single conflict file."""
938
name = name + '.' + suffix
939
trans_id = self.tt.create_path(name, parent_id)
940
entry = tree.inventory[file_id]
941
create_by_entry(self.tt, entry, tree, trans_id, lines)
944
def merge_executable(self, file_id, file_status):
945
"""Perform a merge on the execute bit."""
946
executable = [self.executable(t, file_id) for t in (self.base_tree,
947
self.other_tree, self.this_tree)]
948
self._merge_executable(file_id, executable, file_status)
950
def _merge_executable(self, file_id, executable, file_status):
951
"""Perform a merge on the execute bit."""
952
base_executable, other_executable, this_executable = executable
953
if file_status == "deleted":
955
winner = self._three_way(*executable)
956
if winner == "conflict":
957
# There must be a None in here, if we have a conflict, but we
958
# need executability since file status was not deleted.
959
if self.executable(self.other_tree, file_id) is None:
963
if winner == 'this' and file_status != "modified":
965
trans_id = self.tt.trans_id_file_id(file_id)
967
if self.tt.final_kind(trans_id) != "file":
972
executability = this_executable
974
if file_id in self.other_tree:
975
executability = other_executable
976
elif file_id in self.this_tree:
977
executability = this_executable
978
elif file_id in self.base_tree:
979
executability = base_executable
980
if executability is not None:
981
trans_id = self.tt.trans_id_file_id(file_id)
982
self.tt.set_executability(executability, trans_id)
984
def cook_conflicts(self, fs_conflicts):
985
"""Convert all conflicts into a form that doesn't depend on trans_id"""
986
from conflicts import Conflict
988
self.cooked_conflicts.extend(cook_conflicts(fs_conflicts, self.tt))
989
fp = FinalPaths(self.tt)
990
for conflict in self._raw_conflicts:
991
conflict_type = conflict[0]
992
if conflict_type in ('name conflict', 'parent conflict'):
993
trans_id = conflict[1]
994
conflict_args = conflict[2:]
995
if trans_id not in name_conflicts:
996
name_conflicts[trans_id] = {}
997
unique_add(name_conflicts[trans_id], conflict_type,
999
if conflict_type == 'contents conflict':
1000
for trans_id in conflict[1]:
1001
file_id = self.tt.final_file_id(trans_id)
1002
if file_id is not None:
1004
path = fp.get_path(trans_id)
1005
for suffix in ('.BASE', '.THIS', '.OTHER'):
1006
if path.endswith(suffix):
1007
path = path[:-len(suffix)]
1009
c = Conflict.factory(conflict_type, path=path, file_id=file_id)
1010
self.cooked_conflicts.append(c)
1011
if conflict_type == 'text conflict':
1012
trans_id = conflict[1]
1013
path = fp.get_path(trans_id)
1014
file_id = self.tt.final_file_id(trans_id)
1015
c = Conflict.factory(conflict_type, path=path, file_id=file_id)
1016
self.cooked_conflicts.append(c)
1018
for trans_id, conflicts in name_conflicts.iteritems():
1020
this_parent, other_parent = conflicts['parent conflict']
1021
if this_parent == other_parent:
1022
raise AssertionError()
1024
this_parent = other_parent = \
1025
self.tt.final_file_id(self.tt.final_parent(trans_id))
1027
this_name, other_name = conflicts['name conflict']
1028
if this_name == other_name:
1029
raise AssertionError()
1031
this_name = other_name = self.tt.final_name(trans_id)
1032
other_path = fp.get_path(trans_id)
1033
if this_parent is not None and this_name is not None:
1034
this_parent_path = \
1035
fp.get_path(self.tt.trans_id_file_id(this_parent))
1036
this_path = pathjoin(this_parent_path, this_name)
1038
this_path = "<deleted>"
1039
file_id = self.tt.final_file_id(trans_id)
1040
c = Conflict.factory('path conflict', path=this_path,
1041
conflict_path=other_path, file_id=file_id)
1042
self.cooked_conflicts.append(c)
1043
self.cooked_conflicts.sort(key=Conflict.sort_key)
1046
class WeaveMerger(Merge3Merger):
1047
"""Three-way tree merger, text weave merger."""
1048
supports_reprocess = True
1049
supports_show_base = False
1050
supports_reverse_cherrypick = False
1051
history_based = True
1053
def _merged_lines(self, file_id):
1054
"""Generate the merged lines.
1055
There is no distinction between lines that are meant to contain <<<<<<<
1059
base = self.base_tree
1062
plan = self.this_tree.plan_file_merge(file_id, self.other_tree,
1064
if 'merge' in debug.debug_flags:
1066
trans_id = self.tt.trans_id_file_id(file_id)
1067
name = self.tt.final_name(trans_id) + '.plan'
1068
contents = ('%10s|%s' % l for l in plan)
1069
self.tt.new_file(name, self.tt.final_parent(trans_id), contents)
1070
textmerge = PlanWeaveMerge(plan, '<<<<<<< TREE\n',
1071
'>>>>>>> MERGE-SOURCE\n')
1072
return textmerge.merge_lines(self.reprocess)
1074
def text_merge(self, file_id, trans_id):
1075
"""Perform a (weave) text merge for a given file and file-id.
1076
If conflicts are encountered, .THIS and .OTHER files will be emitted,
1077
and a conflict will be noted.
1079
lines, conflicts = self._merged_lines(file_id)
1081
# Note we're checking whether the OUTPUT is binary in this case,
1082
# because we don't want to get into weave merge guts.
1083
check_text_lines(lines)
1084
self.tt.create_file(lines, trans_id)
1086
self._raw_conflicts.append(('text conflict', trans_id))
1087
name = self.tt.final_name(trans_id)
1088
parent_id = self.tt.final_parent(trans_id)
1089
file_group = self._dump_conflicts(name, parent_id, file_id,
1091
file_group.append(trans_id)
1094
class LCAMerger(WeaveMerger):
1096
def _merged_lines(self, file_id):
1097
"""Generate the merged lines.
1098
There is no distinction between lines that are meant to contain <<<<<<<
1102
base = self.base_tree
1105
plan = self.this_tree.plan_file_lca_merge(file_id, self.other_tree,
1107
if 'merge' in debug.debug_flags:
1109
trans_id = self.tt.trans_id_file_id(file_id)
1110
name = self.tt.final_name(trans_id) + '.plan'
1111
contents = ('%10s|%s' % l for l in plan)
1112
self.tt.new_file(name, self.tt.final_parent(trans_id), contents)
1113
textmerge = PlanWeaveMerge(plan, '<<<<<<< TREE\n',
1114
'>>>>>>> MERGE-SOURCE\n')
1115
return textmerge.merge_lines(self.reprocess)
1118
class Diff3Merger(Merge3Merger):
1119
"""Three-way merger using external diff3 for text merging"""
1121
def dump_file(self, temp_dir, name, tree, file_id):
1122
out_path = pathjoin(temp_dir, name)
1123
out_file = open(out_path, "wb")
1125
in_file = tree.get_file(file_id)
1126
for line in in_file:
1127
out_file.write(line)
1132
def text_merge(self, file_id, trans_id):
1133
"""Perform a diff3 merge using a specified file-id and trans-id.
1134
If conflicts are encountered, .BASE, .THIS. and .OTHER conflict files
1135
will be dumped, and a will be conflict noted.
1138
temp_dir = osutils.mkdtemp(prefix="bzr-")
1140
new_file = pathjoin(temp_dir, "new")
1141
this = self.dump_file(temp_dir, "this", self.this_tree, file_id)
1142
base = self.dump_file(temp_dir, "base", self.base_tree, file_id)
1143
other = self.dump_file(temp_dir, "other", self.other_tree, file_id)
1144
status = bzrlib.patch.diff3(new_file, this, base, other)
1145
if status not in (0, 1):
1146
raise BzrError("Unhandled diff3 exit code")
1147
f = open(new_file, 'rb')
1149
self.tt.create_file(f, trans_id)
1153
name = self.tt.final_name(trans_id)
1154
parent_id = self.tt.final_parent(trans_id)
1155
self._dump_conflicts(name, parent_id, file_id)
1156
self._raw_conflicts.append(('text conflict', trans_id))
1158
osutils.rmtree(temp_dir)
1161
def merge_inner(this_branch, other_tree, base_tree, ignore_zero=False,
1163
merge_type=Merge3Merger,
1164
interesting_ids=None,
1168
interesting_files=None,
1171
change_reporter=None):
1172
"""Primary interface for merging.
1174
typical use is probably
1175
'merge_inner(branch, branch.get_revision_tree(other_revision),
1176
branch.get_revision_tree(base_revision))'
1178
if this_tree is None:
1179
raise BzrError("bzrlib.merge.merge_inner requires a this_tree "
1180
"parameter as of bzrlib version 0.8.")
1181
merger = Merger(this_branch, other_tree, base_tree, this_tree=this_tree,
1182
pb=pb, change_reporter=change_reporter)
1183
merger.backup_files = backup_files
1184
merger.merge_type = merge_type
1185
merger.interesting_ids = interesting_ids
1186
merger.ignore_zero = ignore_zero
1187
if interesting_files:
1189
raise ValueError('Only supply interesting_ids'
1190
' or interesting_files')
1191
merger.interesting_files = interesting_files
1192
merger.show_base = show_base
1193
merger.reprocess = reprocess
1194
merger.other_rev_id = other_rev_id
1195
merger.other_basis = other_rev_id
1196
get_revision_id = getattr(base_tree, 'get_revision_id', None)
1197
if get_revision_id is None:
1198
get_revision_id = base_tree.last_revision
1199
merger.set_base_revision(get_revision_id(), this_branch)
1200
return merger.do_merge()
1202
def get_merge_type_registry():
1203
"""Merge type registry is in bzrlib.option to avoid circular imports.
1205
This method provides a sanctioned way to retrieve it.
1207
from bzrlib import option
1208
return option._merge_type_registry
1211
def _plan_annotate_merge(annotated_a, annotated_b, ancestors_a, ancestors_b):
1212
def status_a(revision, text):
1213
if revision in ancestors_b:
1214
return 'killed-b', text
1216
return 'new-a', text
1218
def status_b(revision, text):
1219
if revision in ancestors_a:
1220
return 'killed-a', text
1222
return 'new-b', text
1224
plain_a = [t for (a, t) in annotated_a]
1225
plain_b = [t for (a, t) in annotated_b]
1226
matcher = patiencediff.PatienceSequenceMatcher(None, plain_a, plain_b)
1227
blocks = matcher.get_matching_blocks()
1230
for ai, bi, l in blocks:
1231
# process all mismatched sections
1232
# (last mismatched section is handled because blocks always
1233
# includes a 0-length last block)
1234
for revision, text in annotated_a[a_cur:ai]:
1235
yield status_a(revision, text)
1236
for revision, text in annotated_b[b_cur:bi]:
1237
yield status_b(revision, text)
1238
# and now the matched section
1241
for text_a in plain_a[ai:a_cur]:
1242
yield "unchanged", text_a
1245
class _PlanMergeBase(object):
1247
def __init__(self, a_rev, b_rev, vf, key_prefix):
1250
:param a_rev: Revision-id of one revision to merge
1251
:param b_rev: Revision-id of the other revision to merge
1252
:param vf: A VersionedFiles containing both revisions
1253
:param key_prefix: A prefix for accessing keys in vf, typically
1259
self._last_lines = None
1260
self._last_lines_revision_id = None
1261
self._cached_matching_blocks = {}
1262
self._key_prefix = key_prefix
1263
self._precache_tip_lines()
1265
def _precache_tip_lines(self):
1266
lines = self.get_lines([self.a_rev, self.b_rev])
1267
self.lines_a = lines[self.a_rev]
1268
self.lines_b = lines[self.b_rev]
1270
def get_lines(self, revisions):
1271
"""Get lines for revisions from the backing VersionedFiles.
1273
:raises RevisionNotPresent: on absent texts.
1275
keys = [(self._key_prefix + (rev,)) for rev in revisions]
1277
for record in self.vf.get_record_stream(keys, 'unordered', True):
1278
if record.storage_kind == 'absent':
1279
raise errors.RevisionNotPresent(record.key, self.vf)
1280
result[record.key[-1]] = osutils.split_lines(
1281
record.get_bytes_as('fulltext'))
1284
def plan_merge(self):
1285
"""Generate a 'plan' for merging the two revisions.
1287
This involves comparing their texts and determining the cause of
1288
differences. If text A has a line and text B does not, then either the
1289
line was added to text A, or it was deleted from B. Once the causes
1290
are combined, they are written out in the format described in
1291
VersionedFile.plan_merge
1293
blocks = self._get_matching_blocks(self.a_rev, self.b_rev)
1294
unique_a, unique_b = self._unique_lines(blocks)
1295
new_a, killed_b = self._determine_status(self.a_rev, unique_a)
1296
new_b, killed_a = self._determine_status(self.b_rev, unique_b)
1297
return self._iter_plan(blocks, new_a, killed_b, new_b, killed_a)
1299
def _iter_plan(self, blocks, new_a, killed_b, new_b, killed_a):
1302
for i, j, n in blocks:
1303
for a_index in range(last_i, i):
1304
if a_index in new_a:
1305
if a_index in killed_b:
1306
yield 'conflicted-a', self.lines_a[a_index]
1308
yield 'new-a', self.lines_a[a_index]
1310
yield 'killed-b', self.lines_a[a_index]
1311
for b_index in range(last_j, j):
1312
if b_index in new_b:
1313
if b_index in killed_a:
1314
yield 'conflicted-b', self.lines_b[b_index]
1316
yield 'new-b', self.lines_b[b_index]
1318
yield 'killed-a', self.lines_b[b_index]
1319
# handle common lines
1320
for a_index in range(i, i+n):
1321
yield 'unchanged', self.lines_a[a_index]
1325
def _get_matching_blocks(self, left_revision, right_revision):
1326
"""Return a description of which sections of two revisions match.
1328
See SequenceMatcher.get_matching_blocks
1330
cached = self._cached_matching_blocks.get((left_revision,
1332
if cached is not None:
1334
if self._last_lines_revision_id == left_revision:
1335
left_lines = self._last_lines
1336
right_lines = self.get_lines([right_revision])[right_revision]
1338
lines = self.get_lines([left_revision, right_revision])
1339
left_lines = lines[left_revision]
1340
right_lines = lines[right_revision]
1341
self._last_lines = right_lines
1342
self._last_lines_revision_id = right_revision
1343
matcher = patiencediff.PatienceSequenceMatcher(None, left_lines,
1345
return matcher.get_matching_blocks()
1347
def _unique_lines(self, matching_blocks):
1348
"""Analyse matching_blocks to determine which lines are unique
1350
:return: a tuple of (unique_left, unique_right), where the values are
1351
sets of line numbers of unique lines.
1357
for i, j, n in matching_blocks:
1358
unique_left.extend(range(last_i, i))
1359
unique_right.extend(range(last_j, j))
1362
return unique_left, unique_right
1365
def _subtract_plans(old_plan, new_plan):
1366
"""Remove changes from new_plan that came from old_plan.
1368
It is assumed that the difference between the old_plan and new_plan
1369
is their choice of 'b' text.
1371
All lines from new_plan that differ from old_plan are emitted
1372
verbatim. All lines from new_plan that match old_plan but are
1373
not about the 'b' revision are emitted verbatim.
1375
Lines that match and are about the 'b' revision are the lines we
1376
don't want, so we convert 'killed-b' -> 'unchanged', and 'new-b'
1377
is skipped entirely.
1379
matcher = patiencediff.PatienceSequenceMatcher(None, old_plan,
1382
for i, j, n in matcher.get_matching_blocks():
1383
for jj in range(last_j, j):
1385
for jj in range(j, j+n):
1386
plan_line = new_plan[jj]
1387
if plan_line[0] == 'new-b':
1389
elif plan_line[0] == 'killed-b':
1390
yield 'unchanged', plan_line[1]
1396
class _PlanMerge(_PlanMergeBase):
1397
"""Plan an annotate merge using on-the-fly annotation"""
1399
def __init__(self, a_rev, b_rev, vf, key_prefix):
1400
super(_PlanMerge, self).__init__(a_rev, b_rev, vf, key_prefix)
1401
self.a_key = self._key_prefix + (self.a_rev,)
1402
self.b_key = self._key_prefix + (self.b_rev,)
1403
self.graph = Graph(self.vf)
1404
heads = self.graph.heads((self.a_key, self.b_key))
1406
# one side dominates, so we can just return its values, yay for
1408
# Ideally we would know that before we get this far
1409
self._head_key = heads.pop()
1410
if self._head_key == self.a_key:
1414
mutter('found dominating revision for %s\n%s > %s', self.vf,
1415
self._head_key[-1], other)
1418
self._head_key = None
1421
def _precache_tip_lines(self):
1422
# Turn this into a no-op, because we will do this later
1425
def _find_recursive_lcas(self):
1426
"""Find all the ancestors back to a unique lca"""
1427
cur_ancestors = (self.a_key, self.b_key)
1428
# graph.find_lca(uncommon, keys) now returns plain NULL_REVISION,
1429
# rather than a key tuple. We will just map that directly to no common
1433
next_lcas = self.graph.find_lca(*cur_ancestors)
1434
# Map a plain NULL_REVISION to a simple no-ancestors
1435
if next_lcas == set([NULL_REVISION]):
1437
# Order the lca's based on when they were merged into the tip
1438
# While the actual merge portion of weave merge uses a set() of
1439
# active revisions, the order of insertion *does* effect the
1440
# implicit ordering of the texts.
1441
for rev_key in cur_ancestors:
1442
ordered_parents = tuple(self.graph.find_merge_order(rev_key,
1444
parent_map[rev_key] = ordered_parents
1445
if len(next_lcas) == 0:
1447
elif len(next_lcas) == 1:
1448
parent_map[list(next_lcas)[0]] = ()
1450
elif len(next_lcas) > 2:
1451
# More than 2 lca's, fall back to grabbing all nodes between
1452
# this and the unique lca.
1453
mutter('More than 2 LCAs, falling back to all nodes for:'
1454
' %s, %s\n=> %s', self.a_key, self.b_key, cur_ancestors)
1455
cur_lcas = next_lcas
1456
while len(cur_lcas) > 1:
1457
cur_lcas = self.graph.find_lca(*cur_lcas)
1458
if len(cur_lcas) == 0:
1459
# No common base to find, use the full ancestry
1462
unique_lca = list(cur_lcas)[0]
1463
if unique_lca == NULL_REVISION:
1464
# find_lca will return a plain 'NULL_REVISION' rather
1465
# than a key tuple when there is no common ancestor, we
1466
# prefer to just use None, because it doesn't confuse
1467
# _get_interesting_texts()
1469
parent_map.update(self._find_unique_parents(next_lcas,
1472
cur_ancestors = next_lcas
1475
def _find_unique_parents(self, tip_keys, base_key):
1476
"""Find ancestors of tip that aren't ancestors of base.
1478
:param tip_keys: Nodes that are interesting
1479
:param base_key: Cull all ancestors of this node
1480
:return: The parent map for all revisions between tip_keys and
1481
base_key. base_key will be included. References to nodes outside of
1482
the ancestor set will also be removed.
1484
# TODO: this would be simpler if find_unique_ancestors took a list
1485
# instead of a single tip, internally it supports it, but it
1486
# isn't a "backwards compatible" api change.
1487
if base_key is None:
1488
parent_map = dict(self.graph.iter_ancestry(tip_keys))
1489
# We remove NULL_REVISION because it isn't a proper tuple key, and
1490
# thus confuses things like _get_interesting_texts, and our logic
1491
# to add the texts into the memory weave.
1492
if NULL_REVISION in parent_map:
1493
parent_map.pop(NULL_REVISION)
1496
for tip in tip_keys:
1498
self.graph.find_unique_ancestors(tip, [base_key]))
1499
parent_map = self.graph.get_parent_map(interesting)
1500
parent_map[base_key] = ()
1501
culled_parent_map, child_map, tails = self._remove_external_references(
1503
# Remove all the tails but base_key
1504
if base_key is not None:
1505
tails.remove(base_key)
1506
self._prune_tails(culled_parent_map, child_map, tails)
1507
# Now remove all the uninteresting 'linear' regions
1508
simple_map = _mod_graph.collapse_linear_regions(culled_parent_map)
1512
def _remove_external_references(parent_map):
1513
"""Remove references that go outside of the parent map.
1515
:param parent_map: Something returned from Graph.get_parent_map(keys)
1516
:return: (filtered_parent_map, child_map, tails)
1517
filtered_parent_map is parent_map without external references
1518
child_map is the {parent_key: [child_keys]} mapping
1519
tails is a list of nodes that do not have any parents in the map
1521
# TODO: The basic effect of this function seems more generic than
1522
# _PlanMerge. But the specific details of building a child_map,
1523
# and computing tails seems very specific to _PlanMerge.
1524
# Still, should this be in Graph land?
1525
filtered_parent_map = {}
1528
for key, parent_keys in parent_map.iteritems():
1529
culled_parent_keys = [p for p in parent_keys if p in parent_map]
1530
if not culled_parent_keys:
1532
for parent_key in culled_parent_keys:
1533
child_map.setdefault(parent_key, []).append(key)
1534
# TODO: Do we want to do this, it adds overhead for every node,
1535
# just to say that the node has no children
1536
child_map.setdefault(key, [])
1537
filtered_parent_map[key] = culled_parent_keys
1538
return filtered_parent_map, child_map, tails
1541
def _prune_tails(parent_map, child_map, tails_to_remove):
1542
"""Remove tails from the parent map.
1544
This will remove the supplied revisions until no more children have 0
1547
:param parent_map: A dict of {child: [parents]}, this dictionary will
1548
be modified in place.
1549
:param tails_to_remove: A list of tips that should be removed,
1550
this list will be consumed
1551
:param child_map: The reverse dict of parent_map ({parent: [children]})
1552
this dict will be modified
1553
:return: None, parent_map will be modified in place.
1555
while tails_to_remove:
1556
next = tails_to_remove.pop()
1557
parent_map.pop(next)
1558
children = child_map.pop(next)
1559
for child in children:
1560
child_parents = parent_map[child]
1561
child_parents.remove(next)
1562
if len(child_parents) == 0:
1563
tails_to_remove.append(child)
1565
def _get_interesting_texts(self, parent_map):
1566
"""Return a dict of texts we are interested in.
1568
Note that the input is in key tuples, but the output is in plain
1571
:param parent_map: The output from _find_recursive_lcas
1572
:return: A dict of {'revision_id':lines} as returned by
1573
_PlanMergeBase.get_lines()
1575
all_revision_keys = set(parent_map)
1576
all_revision_keys.add(self.a_key)
1577
all_revision_keys.add(self.b_key)
1579
# Everything else is in 'keys' but get_lines is in 'revision_ids'
1580
all_texts = self.get_lines([k[-1] for k in all_revision_keys])
1583
def _build_weave(self):
1584
from bzrlib import weave
1585
self._weave = weave.Weave(weave_name='in_memory_weave',
1586
allow_reserved=True)
1587
parent_map = self._find_recursive_lcas()
1589
all_texts = self._get_interesting_texts(parent_map)
1591
# Note: Unfortunately, the order given by topo_sort will effect the
1592
# ordering resolution in the output. Specifically, if you add A then B,
1593
# then in the output text A lines will show up before B lines. And, of
1594
# course, topo_sort doesn't guarantee any real ordering.
1595
# So we use merge_sort, and add a fake node on the tip.
1596
# This ensures that left-hand parents will always be inserted into the
1597
# weave before right-hand parents.
1598
tip_key = self._key_prefix + (_mod_revision.CURRENT_REVISION,)
1599
parent_map[tip_key] = (self.a_key, self.b_key)
1601
for seq_num, key, depth, eom in reversed(tsort.merge_sort(parent_map,
1605
# for key in tsort.topo_sort(parent_map):
1606
parent_keys = parent_map[key]
1607
revision_id = key[-1]
1608
parent_ids = [k[-1] for k in parent_keys]
1609
self._weave.add_lines(revision_id, parent_ids,
1610
all_texts[revision_id])
1612
def plan_merge(self):
1613
"""Generate a 'plan' for merging the two revisions.
1615
This involves comparing their texts and determining the cause of
1616
differences. If text A has a line and text B does not, then either the
1617
line was added to text A, or it was deleted from B. Once the causes
1618
are combined, they are written out in the format described in
1619
VersionedFile.plan_merge
1621
if self._head_key is not None: # There was a single head
1622
if self._head_key == self.a_key:
1625
if self._head_key != self.b_key:
1626
raise AssertionError('There was an invalid head: %s != %s'
1627
% (self.b_key, self._head_key))
1629
head_rev = self._head_key[-1]
1630
lines = self.get_lines([head_rev])[head_rev]
1631
return ((plan, line) for line in lines)
1632
return self._weave.plan_merge(self.a_rev, self.b_rev)
1635
class _PlanLCAMerge(_PlanMergeBase):
1637
This merge algorithm differs from _PlanMerge in that:
1638
1. comparisons are done against LCAs only
1639
2. cases where a contested line is new versus one LCA but old versus
1640
another are marked as conflicts, by emitting the line as conflicted-a
1643
This is faster, and hopefully produces more useful output.
1646
def __init__(self, a_rev, b_rev, vf, key_prefix, graph):
1647
_PlanMergeBase.__init__(self, a_rev, b_rev, vf, key_prefix)
1648
lcas = graph.find_lca(key_prefix + (a_rev,), key_prefix + (b_rev,))
1651
if lca == NULL_REVISION:
1654
self.lcas.add(lca[-1])
1655
for lca in self.lcas:
1656
if _mod_revision.is_null(lca):
1659
lca_lines = self.get_lines([lca])[lca]
1660
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_a,
1662
blocks = list(matcher.get_matching_blocks())
1663
self._cached_matching_blocks[(a_rev, lca)] = blocks
1664
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_b,
1666
blocks = list(matcher.get_matching_blocks())
1667
self._cached_matching_blocks[(b_rev, lca)] = blocks
1669
def _determine_status(self, revision_id, unique_line_numbers):
1670
"""Determines the status unique lines versus all lcas.
1672
Basically, determines why the line is unique to this revision.
1674
A line may be determined new, killed, or both.
1676
If a line is determined new, that means it was not present in at least
1677
one LCA, and is not present in the other merge revision.
1679
If a line is determined killed, that means the line was present in
1682
If a line is killed and new, this indicates that the two merge
1683
revisions contain differing conflict resolutions.
1684
:param revision_id: The id of the revision in which the lines are
1686
:param unique_line_numbers: The line numbers of unique lines.
1687
:return a tuple of (new_this, killed_other):
1691
unique_line_numbers = set(unique_line_numbers)
1692
for lca in self.lcas:
1693
blocks = self._get_matching_blocks(revision_id, lca)
1694
unique_vs_lca, _ignored = self._unique_lines(blocks)
1695
new.update(unique_line_numbers.intersection(unique_vs_lca))
1696
killed.update(unique_line_numbers.difference(unique_vs_lca))
537
self.base_rev_id = None
539
self.base_rev_id = base_branch.get_rev_id(base_revision[1])
540
fetch(from_branch=base_branch, to_branch=self.this_branch)
541
self.base_is_ancestor = is_ancestor(self.this_basis,
546
def get_inventory(tree):
547
return tree.inventory
549
inv_changes = merge_flex(self.this_tree, self.base_tree,
551
generate_changeset, get_inventory,
552
self.conflict_handler,
553
merge_factory=self.merge_factory,
554
interesting_ids=self.interesting_ids)
557
for id, path in inv_changes.iteritems():
562
assert path.startswith('.' + '/') or path.startswith('.' + '\\'), "path is %s" % path
564
adjust_ids.append((path, id))
565
if len(adjust_ids) > 0:
566
self.this_branch.working_tree().set_inventory(self.regen_inventory(adjust_ids))
567
conflicts = self.conflict_handler.conflicts
568
self.conflict_handler.finalize()
571
def regen_inventory(self, new_entries):
572
old_entries = self.this_branch.working_tree().read_working_inventory()
576
for path, file_id in new_entries:
579
new_entries_map[file_id] = path
581
def id2path(file_id):
582
path = new_entries_map.get(file_id)
585
entry = old_entries[file_id]
586
if entry.parent_id is None:
588
return os.path.join(id2path(entry.parent_id), entry.name)
590
for file_id in old_entries:
591
entry = old_entries[file_id]
592
path = id2path(file_id)
593
new_inventory[file_id] = (path, file_id, entry.parent_id,
595
by_path[path] = file_id
600
for path, file_id in new_entries:
602
del new_inventory[file_id]
605
new_path_list.append((path, file_id))
606
if file_id not in old_entries:
608
# Ensure no file is added before its parent
610
for path, file_id in new_path_list:
614
parent = by_path[os.path.dirname(path)]
615
abspath = os.path.join(self.this_tree.basedir, path)
616
kind = bzrlib.osutils.file_kind(abspath)
617
new_inventory[file_id] = (path, file_id, parent, kind)
618
by_path[path] = file_id
620
# Get a list in insertion order
621
new_inventory_list = new_inventory.values()
622
mutter ("""Inventory regeneration:
623
old length: %i insertions: %i deletions: %i new_length: %i"""\
624
% (len(old_entries), insertions, deletions,
625
len(new_inventory_list)))
626
assert len(new_inventory_list) == len(old_entries) + insertions\
628
new_inventory_list.sort()
629
return new_inventory_list
631
merge_types = { "merge3": (ApplyMerge3, "Native diff3-style merge"),
632
"diff3": (Diff3Merge, "Merge using external diff3"),
633
'weave': (WeaveMerge, "Weave-based merge")