13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
from bzrlib.lazy_import import lazy_import
20
lazy_import(globals(), """
22
21
from bzrlib import (
22
branch as _mod_branch,
24
conflicts as _mod_conflicts,
28
31
revision as _mod_revision,
30
from bzrlib.branch import Branch
31
from bzrlib.conflicts import ConflictList, Conflict
32
from bzrlib.errors import (BzrCommandError,
42
WorkingTreeNotRevision,
45
from bzrlib.merge3 import Merge3
46
from bzrlib.osutils import rename, pathjoin
47
from progress import DummyProgress, ProgressPhase
48
from bzrlib.revision import (NULL_REVISION, ensure_null)
49
from bzrlib.textfile import check_text_lines
50
from bzrlib.trace import mutter, warning, note, is_quiet
51
from bzrlib.transform import (TransformPreview, TreeTransform,
52
resolve_conflicts, cook_conflicts,
53
conflict_pass, FinalPaths, create_by_entry,
54
unique_add, ROOT_PARENT)
55
from bzrlib.versionedfile import PlanWeaveMerge
41
from bzrlib.i18n import gettext
48
from bzrlib.symbol_versioning import (
58
52
# TODO: Report back as changes are merged in
61
55
def transform_tree(from_tree, to_tree, interesting_ids=None):
62
merge_inner(from_tree.branch, to_tree, from_tree, ignore_zero=True,
63
interesting_ids=interesting_ids, this_tree=from_tree)
56
from_tree.lock_tree_write()
57
operation = cleanup.OperationWithCleanups(merge_inner)
58
operation.add_cleanup(from_tree.unlock)
59
operation.run_simple(from_tree.branch, to_tree, from_tree,
60
ignore_zero=True, interesting_ids=interesting_ids, this_tree=from_tree)
63
class MergeHooks(hooks.Hooks):
66
hooks.Hooks.__init__(self, "bzrlib.merge", "Merger.hooks")
67
self.add_hook('merge_file_content',
68
"Called with a bzrlib.merge.Merger object to create a per file "
69
"merge object when starting a merge. "
70
"Should return either None or a subclass of "
71
"``bzrlib.merge.AbstractPerFileMerger``. "
72
"Such objects will then be called per file "
73
"that needs to be merged (including when one "
74
"side has deleted the file and the other has changed it). "
75
"See the AbstractPerFileMerger API docs for details on how it is "
80
class AbstractPerFileMerger(object):
81
"""PerFileMerger objects are used by plugins extending merge for bzrlib.
83
See ``bzrlib.plugins.news_merge.news_merge`` for an example concrete class.
85
:ivar merger: The Merge3Merger performing the merge.
88
def __init__(self, merger):
89
"""Create a PerFileMerger for use with merger."""
92
def merge_contents(self, merge_params):
93
"""Attempt to merge the contents of a single file.
95
:param merge_params: A bzrlib.merge.MergeHookParams
96
:return: A tuple of (status, chunks), where status is one of
97
'not_applicable', 'success', 'conflicted', or 'delete'. If status
98
is 'success' or 'conflicted', then chunks should be an iterable of
99
strings for the new file contents.
101
return ('not applicable', None)
104
class PerFileMerger(AbstractPerFileMerger):
105
"""Merge individual files when self.file_matches returns True.
107
This class is intended to be subclassed. The file_matches and
108
merge_matching methods should be overridden with concrete implementations.
111
def file_matches(self, params):
112
"""Return True if merge_matching should be called on this file.
114
Only called with merges of plain files with no clear winner.
116
Subclasses must override this.
118
raise NotImplementedError(self.file_matches)
120
def get_filename(self, params, tree):
121
"""Lookup the filename (i.e. basename, not path), given a Tree (e.g.
122
self.merger.this_tree) and a MergeHookParams.
124
return osutils.basename(tree.id2path(params.file_id))
126
def get_filepath(self, params, tree):
127
"""Calculate the path to the file in a tree.
129
:param params: A MergeHookParams describing the file to merge
130
:param tree: a Tree, e.g. self.merger.this_tree.
132
return tree.id2path(params.file_id)
134
def merge_contents(self, params):
135
"""Merge the contents of a single file."""
136
# Check whether this custom merge logic should be used.
138
# OTHER is a straight winner, rely on default merge.
139
params.winner == 'other' or
140
# THIS and OTHER aren't both files.
141
not params.is_file_merge() or
142
# The filename doesn't match *.xml
143
not self.file_matches(params)):
144
return 'not_applicable', None
145
return self.merge_matching(params)
147
def merge_matching(self, params):
148
"""Merge the contents of a single file that has matched the criteria
149
in PerFileMerger.merge_contents (is a conflict, is a file,
150
self.file_matches is True).
152
Subclasses must override this.
154
raise NotImplementedError(self.merge_matching)
157
class ConfigurableFileMerger(PerFileMerger):
158
"""Merge individual files when configured via a .conf file.
160
This is a base class for concrete custom file merging logic. Concrete
161
classes should implement ``merge_text``.
163
See ``bzrlib.plugins.news_merge.news_merge`` for an example concrete class.
165
:ivar affected_files: The configured file paths to merge.
167
:cvar name_prefix: The prefix to use when looking up configuration
168
details. <name_prefix>_merge_files describes the files targeted by the
171
:cvar default_files: The default file paths to merge when no configuration
178
def __init__(self, merger):
179
super(ConfigurableFileMerger, self).__init__(merger)
180
self.affected_files = None
181
self.default_files = self.__class__.default_files or []
182
self.name_prefix = self.__class__.name_prefix
183
if self.name_prefix is None:
184
raise ValueError("name_prefix must be set.")
186
def file_matches(self, params):
187
"""Check whether the file should call the merge hook.
189
<name_prefix>_merge_files configuration variable is a list of files
190
that should use the hook.
192
affected_files = self.affected_files
193
if affected_files is None:
194
config = self.merger.this_branch.get_config()
195
# Until bzr provides a better policy for caching the config, we
196
# just add the part we're interested in to the params to avoid
197
# reading the config files repeatedly (bazaar.conf, location.conf,
199
config_key = self.name_prefix + '_merge_files'
200
affected_files = config.get_user_option_as_list(config_key)
201
if affected_files is None:
202
# If nothing was specified in the config, use the default.
203
affected_files = self.default_files
204
self.affected_files = affected_files
206
filepath = self.get_filepath(params, self.merger.this_tree)
207
if filepath in affected_files:
211
def merge_matching(self, params):
212
return self.merge_text(params)
214
def merge_text(self, params):
215
"""Merge the byte contents of a single file.
217
This is called after checking that the merge should be performed in
218
merge_contents, and it should behave as per
219
``bzrlib.merge.AbstractPerFileMerger.merge_contents``.
221
raise NotImplementedError(self.merge_text)
224
class MergeHookParams(object):
225
"""Object holding parameters passed to merge_file_content hooks.
227
There are some fields hooks can access:
229
:ivar file_id: the file ID of the file being merged
230
:ivar trans_id: the transform ID for the merge of this file
231
:ivar this_kind: kind of file_id in 'this' tree
232
:ivar other_kind: kind of file_id in 'other' tree
233
:ivar winner: one of 'this', 'other', 'conflict'
236
def __init__(self, merger, file_id, trans_id, this_kind, other_kind,
238
self._merger = merger
239
self.file_id = file_id
240
self.trans_id = trans_id
241
self.this_kind = this_kind
242
self.other_kind = other_kind
245
def is_file_merge(self):
246
"""True if this_kind and other_kind are both 'file'."""
247
return self.this_kind == 'file' and self.other_kind == 'file'
249
@decorators.cachedproperty
250
def base_lines(self):
251
"""The lines of the 'base' version of the file."""
252
return self._merger.get_lines(self._merger.base_tree, self.file_id)
254
@decorators.cachedproperty
255
def this_lines(self):
256
"""The lines of the 'this' version of the file."""
257
return self._merger.get_lines(self._merger.this_tree, self.file_id)
259
@decorators.cachedproperty
260
def other_lines(self):
261
"""The lines of the 'other' version of the file."""
262
return self._merger.get_lines(self._merger.other_tree, self.file_id)
66
265
class Merger(object):
67
269
def __init__(self, this_branch, other_tree=None, base_tree=None,
68
this_tree=None, pb=DummyProgress(), change_reporter=None,
270
this_tree=None, pb=None, change_reporter=None,
69
271
recurse='down', revision_graph=None):
70
272
object.__init__(self)
71
assert this_tree is not None, "this_tree is required"
72
273
self.this_branch = this_branch
73
274
self.this_basis = _mod_revision.ensure_null(
74
275
this_branch.last_revision())
227
452
if self.other_rev_id is None:
228
453
other_basis_tree = self.revision_tree(self.other_basis)
229
changes = other_basis_tree.changes_from(self.other_tree)
230
if changes.has_changed():
231
raise WorkingTreeNotRevision(self.this_tree)
454
if other_basis_tree.has_changes(self.other_tree):
455
raise errors.WorkingTreeNotRevision(self.this_tree)
232
456
other_rev_id = self.other_basis
233
457
self.other_tree = other_basis_tree
459
@deprecated_method(deprecated_in((2, 1, 0)))
235
460
def file_revisions(self, file_id):
236
461
self.ensure_revision_trees()
237
def get_id(tree, file_id):
238
revision_id = tree.inventory[file_id].revision
239
assert revision_id is not None
241
462
if self.this_rev_id is None:
242
463
if self.this_basis_tree.get_file_sha1(file_id) != \
243
464
self.this_tree.get_file_sha1(file_id):
244
raise WorkingTreeNotRevision(self.this_tree)
465
raise errors.WorkingTreeNotRevision(self.this_tree)
246
467
trees = (self.this_basis_tree, self.other_tree)
247
return [get_id(tree, file_id) for tree in trees]
468
return [tree.get_file_revision(file_id) for tree in trees]
470
@deprecated_method(deprecated_in((2, 1, 0)))
249
471
def check_basis(self, check_clean, require_commits=True):
250
472
if self.this_basis is None and require_commits is True:
251
raise BzrCommandError("This branch has no commits."
252
" (perhaps you would prefer 'bzr pull')")
473
raise errors.BzrCommandError(
474
"This branch has no commits."
475
" (perhaps you would prefer 'bzr pull')")
254
477
self.compare_basis()
255
478
if self.this_basis != self.this_rev_id:
256
479
raise errors.UncommittedChanges(self.this_tree)
481
@deprecated_method(deprecated_in((2, 1, 0)))
258
482
def compare_basis(self):
260
484
basis_tree = self.revision_tree(self.this_tree.last_revision())
261
except errors.RevisionNotPresent:
485
except errors.NoSuchRevision:
262
486
basis_tree = self.this_tree.basis_tree()
263
changes = self.this_tree.changes_from(basis_tree)
264
if not changes.has_changed():
487
if not self.this_tree.has_changes(basis_tree):
265
488
self.this_rev_id = self.this_basis
267
490
def set_interesting_files(self, file_list):
268
491
self.interesting_files = file_list
270
493
def set_pending(self):
271
if not self.base_is_ancestor or not self.base_is_other_ancestor or self.other_rev_id is None:
494
if (not self.base_is_ancestor or not self.base_is_other_ancestor
495
or self.other_rev_id is None):
273
497
self._add_parent()
275
499
def _add_parent(self):
276
500
new_parents = self.this_tree.get_parent_ids() + [self.other_rev_id]
277
501
new_parent_trees = []
502
operation = cleanup.OperationWithCleanups(
503
self.this_tree.set_parent_trees)
278
504
for revision_id in new_parents:
280
506
tree = self.revision_tree(revision_id)
281
except errors.RevisionNotPresent:
507
except errors.NoSuchRevision:
511
operation.add_cleanup(tree.unlock)
285
512
new_parent_trees.append((revision_id, tree))
287
self.this_tree.set_parent_trees(new_parent_trees,
288
allow_leftmost_as_ghost=True)
290
for _revision_id, tree in new_parent_trees:
513
operation.run_simple(new_parent_trees, allow_leftmost_as_ghost=True)
294
515
def set_other(self, other_revision, possible_transports=None):
295
516
"""Set the revision and tree to merge from.
346
567
target.fetch(source, revision_id)
348
569
def find_base(self):
349
revisions = [ensure_null(self.this_basis),
350
ensure_null(self.other_basis)]
351
if NULL_REVISION in revisions:
352
self.base_rev_id = NULL_REVISION
570
revisions = [_mod_revision.ensure_null(self.this_basis),
571
_mod_revision.ensure_null(self.other_basis)]
572
if _mod_revision.NULL_REVISION in revisions:
573
self.base_rev_id = _mod_revision.NULL_REVISION
574
self.base_tree = self.revision_tree(self.base_rev_id)
575
self._is_criss_cross = False
354
self.base_rev_id, steps = self.revision_graph.find_unique_lca(
355
revisions[0], revisions[1], count_steps=True)
356
if self.base_rev_id == NULL_REVISION:
357
raise UnrelatedBranches()
359
warning('Warning: criss-cross merge encountered. See bzr'
360
' help criss-cross.')
361
self.base_tree = self.revision_tree(self.base_rev_id)
577
lcas = self.revision_graph.find_lca(revisions[0], revisions[1])
578
self._is_criss_cross = False
580
self.base_rev_id = _mod_revision.NULL_REVISION
582
self.base_rev_id = list(lcas)[0]
583
else: # len(lcas) > 1
584
self._is_criss_cross = True
586
# find_unique_lca can only handle 2 nodes, so we have to
587
# start back at the beginning. It is a shame to traverse
588
# the graph again, but better than re-implementing
590
self.base_rev_id = self.revision_graph.find_unique_lca(
591
revisions[0], revisions[1])
593
self.base_rev_id = self.revision_graph.find_unique_lca(
595
sorted_lca_keys = self.revision_graph.find_merge_order(
597
if self.base_rev_id == _mod_revision.NULL_REVISION:
598
self.base_rev_id = sorted_lca_keys[0]
600
if self.base_rev_id == _mod_revision.NULL_REVISION:
601
raise errors.UnrelatedBranches()
602
if self._is_criss_cross:
603
trace.warning('Warning: criss-cross merge encountered. See bzr'
604
' help criss-cross.')
605
trace.mutter('Criss-cross lcas: %r' % lcas)
606
if self.base_rev_id in lcas:
607
trace.mutter('Unable to find unique lca. '
608
'Fallback %r as best option.' % self.base_rev_id)
609
interesting_revision_ids = set(lcas)
610
interesting_revision_ids.add(self.base_rev_id)
611
interesting_trees = dict((t.get_revision_id(), t)
612
for t in self.this_branch.repository.revision_trees(
613
interesting_revision_ids))
614
self._cached_trees.update(interesting_trees)
615
if self.base_rev_id in lcas:
616
self.base_tree = interesting_trees[self.base_rev_id]
618
self.base_tree = interesting_trees.pop(self.base_rev_id)
619
self._lca_trees = [interesting_trees[key]
620
for key in sorted_lca_keys]
622
self.base_tree = self.revision_tree(self.base_rev_id)
362
623
self.base_is_ancestor = True
363
624
self.base_is_other_ancestor = True
625
trace.mutter('Base revid: %r' % self.base_rev_id)
365
627
def set_base(self, base_revision):
366
628
"""Set the base revision to use for the merge.
368
630
:param base_revision: A 2-list containing a path and revision number.
370
mutter("doing merge() with no base_revision specified")
632
trace.mutter("doing merge() with no base_revision specified")
371
633
if base_revision == [None, None]:
386
648
'other_tree': self.other_tree,
387
649
'interesting_ids': self.interesting_ids,
388
650
'interesting_files': self.interesting_files,
651
'this_branch': self.this_branch,
390
652
'do_merge': False}
391
653
if self.merge_type.requires_base:
392
654
kwargs['base_tree'] = self.base_tree
393
655
if self.merge_type.supports_reprocess:
394
656
kwargs['reprocess'] = self.reprocess
395
657
elif self.reprocess:
396
raise BzrError("Conflict reduction is not supported for merge"
397
" type %s." % self.merge_type)
658
raise errors.BzrError(
659
"Conflict reduction is not supported for merge"
660
" type %s." % self.merge_type)
398
661
if self.merge_type.supports_show_base:
399
662
kwargs['show_base'] = self.show_base
400
663
elif self.show_base:
401
raise BzrError("Showing base is not supported for this"
402
" merge type. %s" % self.merge_type)
664
raise errors.BzrError("Showing base is not supported for this"
665
" merge type. %s" % self.merge_type)
403
666
if (not getattr(self.merge_type, 'supports_reverse_cherrypick', True)
404
667
and not self.base_is_other_ancestor):
405
668
raise errors.CannotReverseCherrypick()
406
669
if self.merge_type.supports_cherrypick:
407
670
kwargs['cherrypick'] = (not self.base_is_ancestor or
408
671
not self.base_is_other_ancestor)
409
return self.merge_type(pb=self._pb,
672
if self._is_criss_cross and getattr(self.merge_type,
673
'supports_lca_trees', False):
674
kwargs['lca_trees'] = self._lca_trees
675
return self.merge_type(pb=None,
410
676
change_reporter=self.change_reporter,
679
def _do_merge_to(self):
680
merge = self.make_merger()
681
if self.other_branch is not None:
682
self.other_branch.update_references(self.this_branch)
684
if self.recurse == 'down':
685
for relpath, file_id in self.this_tree.iter_references():
686
sub_tree = self.this_tree.get_nested_tree(file_id, relpath)
687
other_revision = self.other_tree.get_reference_revision(
689
if other_revision == sub_tree.last_revision():
691
sub_merge = Merger(sub_tree.branch, this_tree=sub_tree)
692
sub_merge.merge_type = self.merge_type
693
other_branch = self.other_branch.reference_parent(file_id, relpath)
694
sub_merge.set_other_revision(other_revision, other_branch)
695
base_revision = self.base_tree.get_reference_revision(file_id)
696
sub_merge.base_tree = \
697
sub_tree.branch.repository.revision_tree(base_revision)
698
sub_merge.base_rev_id = base_revision
413
702
def do_merge(self):
703
operation = cleanup.OperationWithCleanups(self._do_merge_to)
414
704
self.this_tree.lock_tree_write()
705
operation.add_cleanup(self.this_tree.unlock)
415
706
if self.base_tree is not None:
416
707
self.base_tree.lock_read()
708
operation.add_cleanup(self.base_tree.unlock)
417
709
if self.other_tree is not None:
418
710
self.other_tree.lock_read()
420
merge = self.make_merger()
422
if self.recurse == 'down':
423
for path, file_id in self.this_tree.iter_references():
424
sub_tree = self.this_tree.get_nested_tree(file_id, path)
425
other_revision = self.other_tree.get_reference_revision(
427
if other_revision == sub_tree.last_revision():
429
sub_merge = Merger(sub_tree.branch, this_tree=sub_tree)
430
sub_merge.merge_type = self.merge_type
431
relpath = self.this_tree.relpath(path)
432
other_branch = self.other_branch.reference_parent(file_id, relpath)
433
sub_merge.set_other_revision(other_revision, other_branch)
434
base_revision = self.base_tree.get_reference_revision(file_id)
435
sub_merge.base_tree = \
436
sub_tree.branch.repository.revision_tree(base_revision)
437
sub_merge.base_rev_id = base_revision
441
if self.other_tree is not None:
442
self.other_tree.unlock()
443
if self.base_tree is not None:
444
self.base_tree.unlock()
445
self.this_tree.unlock()
711
operation.add_cleanup(self.other_tree.unlock)
712
merge = operation.run_simple()
446
713
if len(merge.cooked_conflicts) == 0:
447
if not self.ignore_zero and not is_quiet():
448
note("All changes applied successfully.")
714
if not self.ignore_zero and not trace.is_quiet():
715
trace.note(gettext("All changes applied successfully."))
450
note("%d conflicts encountered." % len(merge.cooked_conflicts))
717
trace.note(gettext("%d conflicts encountered.")
718
% len(merge.cooked_conflicts))
452
720
return len(merge.cooked_conflicts)
723
class _InventoryNoneEntry(object):
724
"""This represents an inventory entry which *isn't there*.
726
It simplifies the merging logic if we always have an InventoryEntry, even
727
if it isn't actually present
734
symlink_target = None
737
_none_entry = _InventoryNoneEntry()
455
740
class Merge3Merger(object):
456
741
"""Three-way merger that uses the merge3 text merger"""
457
742
requires_base = True
488
776
be combined with interesting_ids. If neither interesting_files nor
489
777
interesting_ids is specified, all files may participate in the
779
:param lca_trees: Can be set to a dictionary of {revision_id:rev_tree}
780
if the ancestry was found to include a criss-cross merge.
781
Otherwise should be None.
492
783
object.__init__(self)
493
if interesting_files is not None:
494
assert interesting_ids is None
784
if interesting_files is not None and interesting_ids is not None:
786
'specify either interesting_ids or interesting_files')
787
if this_branch is None:
788
this_branch = this_tree.branch
495
789
self.interesting_ids = interesting_ids
496
790
self.interesting_files = interesting_files
497
791
self.this_tree = working_tree
498
792
self.base_tree = base_tree
499
793
self.other_tree = other_tree
794
self.this_branch = this_branch
500
795
self._raw_conflicts = []
501
796
self.cooked_conflicts = []
502
797
self.reprocess = reprocess
503
798
self.show_base = show_base
799
self._lca_trees = lca_trees
800
# Uncommenting this will change the default algorithm to always use
801
# _entries_lca. This can be useful for running the test suite and
802
# making sure we haven't missed any corner cases.
803
# if lca_trees is None:
804
# self._lca_trees = [self.base_tree]
506
805
self.change_reporter = change_reporter
507
806
self.cherrypick = cherrypick
509
self.pp = ProgressPhase("Merge phase", 3, self.pb)
810
warnings.warn("pp argument to Merge3Merger is deprecated")
812
warnings.warn("pb argument to Merge3Merger is deprecated")
513
814
def do_merge(self):
815
operation = cleanup.OperationWithCleanups(self._do_merge)
514
816
self.this_tree.lock_tree_write()
817
operation.add_cleanup(self.this_tree.unlock)
515
818
self.base_tree.lock_read()
819
operation.add_cleanup(self.base_tree.unlock)
516
820
self.other_tree.lock_read()
517
self.tt = TreeTransform(self.this_tree, self.pb)
821
operation.add_cleanup(self.other_tree.unlock)
824
def _do_merge(self, operation):
825
self.tt = transform.TreeTransform(self.this_tree, None)
826
operation.add_cleanup(self.tt.finalize)
827
self._compute_transform()
828
results = self.tt.apply(no_conflicts=True)
829
self.write_modified(results)
520
self._compute_transform()
522
results = self.tt.apply(no_conflicts=True)
523
self.write_modified(results)
525
self.this_tree.add_conflicts(self.cooked_conflicts)
526
except UnsupportedOperation:
530
self.other_tree.unlock()
531
self.base_tree.unlock()
532
self.this_tree.unlock()
831
self.this_tree.add_conflicts(self.cooked_conflicts)
832
except errors.UnsupportedOperation:
535
835
def make_preview_transform(self):
836
operation = cleanup.OperationWithCleanups(self._make_preview_transform)
536
837
self.base_tree.lock_read()
838
operation.add_cleanup(self.base_tree.unlock)
537
839
self.other_tree.lock_read()
538
self.tt = TransformPreview(self.this_tree)
541
self._compute_transform()
544
self.other_tree.unlock()
545
self.base_tree.unlock()
840
operation.add_cleanup(self.other_tree.unlock)
841
return operation.run_simple()
843
def _make_preview_transform(self):
844
self.tt = transform.TransformPreview(self.this_tree)
845
self._compute_transform()
549
848
def _compute_transform(self):
550
entries = self._entries3()
849
if self._lca_trees is None:
850
entries = self._entries3()
851
resolver = self._three_way
853
entries = self._entries_lca()
854
resolver = self._lca_multi_way
551
855
child_pb = ui.ui_factory.nested_progress_bar()
857
factories = Merger.hooks['merge_file_content']
858
hooks = [factory(self) for factory in factories] + [self]
859
self.active_hooks = [hook for hook in hooks if hook is not None]
553
860
for num, (file_id, changed, parents3, names3,
554
861
executable3) in enumerate(entries):
555
child_pb.update('Preparing file merge', num, len(entries))
556
self._merge_names(file_id, parents3, names3)
862
child_pb.update(gettext('Preparing file merge'), num, len(entries))
863
self._merge_names(file_id, parents3, names3, resolver=resolver)
558
file_status = self.merge_contents(file_id)
865
file_status = self._do_merge_contents(file_id)
560
867
file_status = 'unmodified'
561
868
self._merge_executable(file_id,
562
executable3, file_status)
869
executable3, file_status, resolver=resolver)
564
871
child_pb.finished()
872
self.tt.fixup_new_roots()
873
self._finish_computing_transform()
875
def _finish_computing_transform(self):
876
"""Finalize the transform and report the changes.
878
This is the second half of _compute_transform.
567
880
child_pb = ui.ui_factory.nested_progress_bar()
569
fs_conflicts = resolve_conflicts(self.tt, child_pb,
570
lambda t, c: conflict_pass(t, c, self.other_tree))
882
fs_conflicts = transform.resolve_conflicts(self.tt, child_pb,
883
lambda t, c: transform.conflict_pass(t, c, self.other_tree))
572
885
child_pb.finished()
573
886
if self.change_reporter is not None:
611
927
result.append((file_id, changed, parents3, names3, executable3))
930
def _entries_lca(self):
931
"""Gather data about files modified between multiple trees.
933
This compares OTHER versus all LCA trees, and for interesting entries,
934
it then compares with THIS and BASE.
936
For the multi-valued entries, the format will be (BASE, [lca1, lca2])
938
:return: [(file_id, changed, parents, names, executable)], where:
940
* file_id: Simple file_id of the entry
941
* changed: Boolean, True if the kind or contents changed else False
942
* parents: ((base, [parent_id, in, lcas]), parent_id_other,
944
* names: ((base, [name, in, lcas]), name_in_other, name_in_this)
945
* executable: ((base, [exec, in, lcas]), exec_in_other,
948
if self.interesting_files is not None:
949
lookup_trees = [self.this_tree, self.base_tree]
950
lookup_trees.extend(self._lca_trees)
951
# I think we should include the lca trees as well
952
interesting_ids = self.other_tree.paths2ids(self.interesting_files,
955
interesting_ids = self.interesting_ids
957
walker = _mod_tree.MultiWalker(self.other_tree, self._lca_trees)
959
base_inventory = self.base_tree.inventory
960
this_inventory = self.this_tree.inventory
961
for path, file_id, other_ie, lca_values in walker.iter_all():
962
# Is this modified at all from any of the other trees?
964
other_ie = _none_entry
965
if interesting_ids is not None and file_id not in interesting_ids:
968
# If other_revision is found in any of the lcas, that means this
969
# node is uninteresting. This is because when merging, if there are
970
# multiple heads(), we have to create a new node. So if we didn't,
971
# we know that the ancestry is linear, and that OTHER did not
973
# See doc/developers/lca_merge_resolution.txt for details
974
other_revision = other_ie.revision
975
if other_revision is not None:
976
# We can't use this shortcut when other_revision is None,
977
# because it may be None because things are WorkingTrees, and
978
# not because it is *actually* None.
979
is_unmodified = False
980
for lca_path, ie in lca_values:
981
if ie is not None and ie.revision == other_revision:
988
for lca_path, lca_ie in lca_values:
990
lca_entries.append(_none_entry)
992
lca_entries.append(lca_ie)
994
if base_inventory.has_id(file_id):
995
base_ie = base_inventory[file_id]
997
base_ie = _none_entry
999
if this_inventory.has_id(file_id):
1000
this_ie = this_inventory[file_id]
1002
this_ie = _none_entry
1008
for lca_ie in lca_entries:
1009
lca_kinds.append(lca_ie.kind)
1010
lca_parent_ids.append(lca_ie.parent_id)
1011
lca_names.append(lca_ie.name)
1012
lca_executable.append(lca_ie.executable)
1014
kind_winner = self._lca_multi_way(
1015
(base_ie.kind, lca_kinds),
1016
other_ie.kind, this_ie.kind)
1017
parent_id_winner = self._lca_multi_way(
1018
(base_ie.parent_id, lca_parent_ids),
1019
other_ie.parent_id, this_ie.parent_id)
1020
name_winner = self._lca_multi_way(
1021
(base_ie.name, lca_names),
1022
other_ie.name, this_ie.name)
1024
content_changed = True
1025
if kind_winner == 'this':
1026
# No kind change in OTHER, see if there are *any* changes
1027
if other_ie.kind == 'directory':
1028
if parent_id_winner == 'this' and name_winner == 'this':
1029
# No change for this directory in OTHER, skip
1031
content_changed = False
1032
elif other_ie.kind is None or other_ie.kind == 'file':
1033
def get_sha1(ie, tree):
1034
if ie.kind != 'file':
1036
return tree.get_file_sha1(file_id)
1037
base_sha1 = get_sha1(base_ie, self.base_tree)
1038
lca_sha1s = [get_sha1(ie, tree) for ie, tree
1039
in zip(lca_entries, self._lca_trees)]
1040
this_sha1 = get_sha1(this_ie, self.this_tree)
1041
other_sha1 = get_sha1(other_ie, self.other_tree)
1042
sha1_winner = self._lca_multi_way(
1043
(base_sha1, lca_sha1s), other_sha1, this_sha1,
1044
allow_overriding_lca=False)
1045
exec_winner = self._lca_multi_way(
1046
(base_ie.executable, lca_executable),
1047
other_ie.executable, this_ie.executable)
1048
if (parent_id_winner == 'this' and name_winner == 'this'
1049
and sha1_winner == 'this' and exec_winner == 'this'):
1050
# No kind, parent, name, exec, or content change for
1051
# OTHER, so this node is not considered interesting
1053
if sha1_winner == 'this':
1054
content_changed = False
1055
elif other_ie.kind == 'symlink':
1056
def get_target(ie, tree):
1057
if ie.kind != 'symlink':
1059
return tree.get_symlink_target(file_id)
1060
base_target = get_target(base_ie, self.base_tree)
1061
lca_targets = [get_target(ie, tree) for ie, tree
1062
in zip(lca_entries, self._lca_trees)]
1063
this_target = get_target(this_ie, self.this_tree)
1064
other_target = get_target(other_ie, self.other_tree)
1065
target_winner = self._lca_multi_way(
1066
(base_target, lca_targets),
1067
other_target, this_target)
1068
if (parent_id_winner == 'this' and name_winner == 'this'
1069
and target_winner == 'this'):
1070
# No kind, parent, name, or symlink target change
1073
if target_winner == 'this':
1074
content_changed = False
1075
elif other_ie.kind == 'tree-reference':
1076
# The 'changed' information seems to be handled at a higher
1077
# level. At least, _entries3 returns False for content
1078
# changed, even when at a new revision_id.
1079
content_changed = False
1080
if (parent_id_winner == 'this' and name_winner == 'this'):
1081
# Nothing interesting
1084
raise AssertionError('unhandled kind: %s' % other_ie.kind)
1086
# If we have gotten this far, that means something has changed
1087
result.append((file_id, content_changed,
1088
((base_ie.parent_id, lca_parent_ids),
1089
other_ie.parent_id, this_ie.parent_id),
1090
((base_ie.name, lca_names),
1091
other_ie.name, this_ie.name),
1092
((base_ie.executable, lca_executable),
1093
other_ie.executable, this_ie.executable)
1097
@deprecated_method(deprecated_in((2, 4, 0)))
614
1098
def fix_root(self):
616
self.tt.final_kind(self.tt.root)
1099
if self.tt.final_kind(self.tt.root) is None:
618
1100
self.tt.cancel_deletion(self.tt.root)
619
1101
if self.tt.final_file_id(self.tt.root) is None:
620
self.tt.version_file(self.tt.tree_file_id(self.tt.root),
1102
self.tt.version_file(self.tt.tree_file_id(self.tt.root),
622
if self.other_tree.inventory.root is None:
624
1104
other_root_file_id = self.other_tree.get_root_id()
1105
if other_root_file_id is None:
625
1107
other_root = self.tt.trans_id_file_id(other_root_file_id)
626
1108
if other_root == self.tt.root:
629
self.tt.final_kind(other_root)
1110
if self.this_tree.inventory.has_id(
1111
self.other_tree.inventory.root.file_id):
1112
# the other tree's root is a non-root in the current tree (as
1113
# when a previously unrelated branch is merged into another)
632
self.reparent_children(self.other_tree.inventory.root, self.tt.root)
633
self.tt.cancel_creation(other_root)
634
self.tt.cancel_versioning(other_root)
636
def reparent_children(self, ie, target):
637
for thing, child in ie.children.iteritems():
1115
if self.tt.final_kind(other_root) is not None:
1116
other_root_is_present = True
1118
# other_root doesn't have a physical representation. We still need
1119
# to move any references to the actual root of the tree.
1120
other_root_is_present = False
1121
# 'other_tree.inventory.root' is not present in this tree. We are
1122
# calling adjust_path for children which *want* to be present with a
1123
# correct place to go.
1124
for _, child in self.other_tree.inventory.root.children.iteritems():
638
1125
trans_id = self.tt.trans_id_file_id(child.file_id)
639
self.tt.adjust_path(self.tt.final_name(trans_id), target, trans_id)
1126
if not other_root_is_present:
1127
if self.tt.final_kind(trans_id) is not None:
1128
# The item exist in the final tree and has a defined place
1131
# Move the item into the root
1133
final_name = self.tt.final_name(trans_id)
1134
except errors.NoFinalPath:
1135
# This file is not present anymore, ignore it.
1137
self.tt.adjust_path(final_name, self.tt.root, trans_id)
1138
if other_root_is_present:
1139
self.tt.cancel_creation(other_root)
1140
self.tt.cancel_versioning(other_root)
641
1142
def write_modified(self, results):
642
1143
modified_hashes = {}
684
1185
def kind(tree, file_id):
685
1186
"""Determine the kind of a file-id (used as a key method)."""
686
if file_id not in tree:
1187
if not tree.has_id(file_id):
688
1189
return tree.kind(file_id)
691
1192
def _three_way(base, other, this):
692
#if base == other, either they all agree, or only THIS has changed.
693
1193
if base == other:
1194
# if 'base == other', either they all agree, or only 'this' has
695
1197
elif this not in (base, other):
1198
# 'this' is neither 'base' nor 'other', so both sides changed
696
1199
return 'conflict'
697
# "Ambiguous clean merge" -- both sides have made the same change.
698
1200
elif this == other:
1201
# "Ambiguous clean merge" -- both sides have made the same change.
700
# this == base: only other has changed.
1204
# this == base: only other has changed.
1208
def _lca_multi_way(bases, other, this, allow_overriding_lca=True):
1209
"""Consider LCAs when determining whether a change has occurred.
1211
If LCAS are all identical, this is the same as a _three_way comparison.
1213
:param bases: value in (BASE, [LCAS])
1214
:param other: value in OTHER
1215
:param this: value in THIS
1216
:param allow_overriding_lca: If there is more than one unique lca
1217
value, allow OTHER to override THIS if it has a new value, and
1218
THIS only has an lca value, or vice versa. This is appropriate for
1219
truly scalar values, not as much for non-scalars.
1220
:return: 'this', 'other', or 'conflict' depending on whether an entry
1223
# See doc/developers/lca_tree_merging.txt for details about this
1226
# Either Ambiguously clean, or nothing was actually changed. We
1229
base_val, lca_vals = bases
1230
# Remove 'base_val' from the lca_vals, because it is not interesting
1231
filtered_lca_vals = [lca_val for lca_val in lca_vals
1232
if lca_val != base_val]
1233
if len(filtered_lca_vals) == 0:
1234
return Merge3Merger._three_way(base_val, other, this)
1236
unique_lca_vals = set(filtered_lca_vals)
1237
if len(unique_lca_vals) == 1:
1238
return Merge3Merger._three_way(unique_lca_vals.pop(), other, this)
1240
if allow_overriding_lca:
1241
if other in unique_lca_vals:
1242
if this in unique_lca_vals:
1243
# Each side picked a different lca, conflict
1246
# This has a value which supersedes both lca values, and
1247
# other only has an lca value
1249
elif this in unique_lca_vals:
1250
# OTHER has a value which supersedes both lca values, and this
1251
# only has an lca value
1254
# At this point, the lcas disagree, and the tip disagree
1258
@deprecated_method(deprecated_in((2, 2, 0)))
705
1259
def scalar_three_way(this_tree, base_tree, other_tree, file_id, key):
706
1260
"""Do a three-way test on a scalar.
707
1261
Return "this", "other" or "conflict", depending whether a value wins.
757
1311
parent_id_winner = "other"
758
1312
if name_winner == "this" and parent_id_winner == "this":
760
if name_winner == "conflict":
761
trans_id = self.tt.trans_id_file_id(file_id)
762
self._raw_conflicts.append(('name conflict', trans_id,
763
this_name, other_name))
764
if parent_id_winner == "conflict":
765
trans_id = self.tt.trans_id_file_id(file_id)
766
self._raw_conflicts.append(('parent conflict', trans_id,
767
this_parent, other_parent))
768
if other_name is None:
769
# it doesn't matter whether the result was 'other' or
770
# 'conflict'-- if there's no 'other', we leave it alone.
1314
if name_winner == 'conflict' or parent_id_winner == 'conflict':
1315
# Creating helpers (.OTHER or .THIS) here cause problems down the
1316
# road if a ContentConflict needs to be created so we should not do
1318
trans_id = self.tt.trans_id_file_id(file_id)
1319
self._raw_conflicts.append(('path conflict', trans_id, file_id,
1320
this_parent, this_name,
1321
other_parent, other_name))
1322
if not self.other_tree.has_id(file_id):
1323
# it doesn't matter whether the result was 'other' or
1324
# 'conflict'-- if it has no file id, we leave it alone.
772
# if we get here, name_winner and parent_winner are set to safe values.
773
trans_id = self.tt.trans_id_file_id(file_id)
774
1326
parent_id = parents[self.winner_idx[parent_id_winner]]
775
if parent_id is not None:
776
parent_trans_id = self.tt.trans_id_file_id(parent_id)
777
self.tt.adjust_path(names[self.winner_idx[name_winner]],
778
parent_trans_id, trans_id)
1327
name = names[self.winner_idx[name_winner]]
1328
if parent_id is not None or name is not None:
1329
# if we get here, name_winner and parent_winner are set to safe
1331
if parent_id is None and name is not None:
1332
# if parent_id is None and name is non-None, current file is
1334
if names[self.winner_idx[parent_id_winner]] != '':
1335
raise AssertionError(
1336
'File looks like a root, but named %s' %
1337
names[self.winner_idx[parent_id_winner]])
1338
parent_trans_id = transform.ROOT_PARENT
1340
parent_trans_id = self.tt.trans_id_file_id(parent_id)
1341
self.tt.adjust_path(name, parent_trans_id,
1342
self.tt.trans_id_file_id(file_id))
780
def merge_contents(self, file_id):
781
"""Performa a merge on file_id contents."""
1344
def _do_merge_contents(self, file_id):
1345
"""Performs a merge on file_id contents."""
782
1346
def contents_pair(tree):
783
if file_id not in tree:
1347
if not tree.has_id(file_id):
784
1348
return (None, None)
785
1349
kind = tree.kind(file_id)
786
1350
if kind == "file":
792
1356
return kind, contents
794
def contents_conflict():
795
trans_id = self.tt.trans_id_file_id(file_id)
796
name = self.tt.final_name(trans_id)
797
parent_id = self.tt.final_parent(trans_id)
798
if file_id in self.this_tree.inventory:
799
self.tt.unversion_file(trans_id)
800
if file_id in self.this_tree:
801
self.tt.delete_contents(trans_id)
802
file_group = self._dump_conflicts(name, parent_id, file_id,
804
self._raw_conflicts.append(('contents conflict', file_group))
806
1358
# See SPOT run. run, SPOT, run.
807
1359
# So we're not QUITE repeating ourselves; we do tricky things with
809
1361
base_pair = contents_pair(self.base_tree)
810
1362
other_pair = contents_pair(self.other_tree)
811
if base_pair == other_pair:
812
# OTHER introduced no changes
814
this_pair = contents_pair(self.this_tree)
815
if this_pair == other_pair:
816
# THIS and OTHER introduced the same changes
819
trans_id = self.tt.trans_id_file_id(file_id)
820
if this_pair == base_pair:
821
# only OTHER introduced changes
822
if file_id in self.this_tree:
823
# Remove any existing contents
824
self.tt.delete_contents(trans_id)
825
if file_id in self.other_tree:
826
# OTHER changed the file
827
create_by_entry(self.tt,
828
self.other_tree.inventory[file_id],
829
self.other_tree, trans_id)
830
if file_id not in self.this_tree.inventory:
831
self.tt.version_file(file_id, trans_id)
833
elif file_id in self.this_tree.inventory:
834
# OTHER deleted the file
835
self.tt.unversion_file(trans_id)
837
#BOTH THIS and OTHER introduced changes; scalar conflict
838
elif this_pair[0] == "file" and other_pair[0] == "file":
839
# THIS and OTHER are both files, so text merge. Either
840
# BASE is a file, or both converted to files, so at least we
841
# have agreement that output should be a file.
843
self.text_merge(file_id, trans_id)
845
return contents_conflict()
846
if file_id not in self.this_tree.inventory:
847
self.tt.version_file(file_id, trans_id)
849
self.tt.tree_kind(trans_id)
850
self.tt.delete_contents(trans_id)
855
# Scalar conflict, can't text merge. Dump conflicts
856
return contents_conflict()
1364
this_pair = contents_pair(self.this_tree)
1365
lca_pairs = [contents_pair(tree) for tree in self._lca_trees]
1366
winner = self._lca_multi_way((base_pair, lca_pairs), other_pair,
1367
this_pair, allow_overriding_lca=False)
1369
if base_pair == other_pair:
1372
# We delayed evaluating this_pair as long as we can to avoid
1373
# unnecessary sha1 calculation
1374
this_pair = contents_pair(self.this_tree)
1375
winner = self._three_way(base_pair, other_pair, this_pair)
1376
if winner == 'this':
1377
# No interesting changes introduced by OTHER
1379
# We have a hypothetical conflict, but if we have files, then we
1380
# can try to merge the content
1381
trans_id = self.tt.trans_id_file_id(file_id)
1382
params = MergeHookParams(self, file_id, trans_id, this_pair[0],
1383
other_pair[0], winner)
1384
hooks = self.active_hooks
1385
hook_status = 'not_applicable'
1387
hook_status, lines = hook.merge_contents(params)
1388
if hook_status != 'not_applicable':
1389
# Don't try any more hooks, this one applies.
1392
if hook_status == 'not_applicable':
1393
# This is a contents conflict, because none of the available
1394
# functions could merge it.
1396
name = self.tt.final_name(trans_id)
1397
parent_id = self.tt.final_parent(trans_id)
1398
if self.this_tree.has_id(file_id):
1399
self.tt.unversion_file(trans_id)
1400
file_group = self._dump_conflicts(name, parent_id, file_id,
1402
self._raw_conflicts.append(('contents conflict', file_group))
1403
elif hook_status == 'success':
1404
self.tt.create_file(lines, trans_id)
1405
elif hook_status == 'conflicted':
1406
# XXX: perhaps the hook should be able to provide
1407
# the BASE/THIS/OTHER files?
1408
self.tt.create_file(lines, trans_id)
1409
self._raw_conflicts.append(('text conflict', trans_id))
1410
name = self.tt.final_name(trans_id)
1411
parent_id = self.tt.final_parent(trans_id)
1412
self._dump_conflicts(name, parent_id, file_id)
1413
elif hook_status == 'delete':
1414
self.tt.unversion_file(trans_id)
1416
elif hook_status == 'done':
1417
# The hook function did whatever it needs to do directly, no
1418
# further action needed here.
1421
raise AssertionError('unknown hook_status: %r' % (hook_status,))
1422
if not self.this_tree.has_id(file_id) and result == "modified":
1423
self.tt.version_file(file_id, trans_id)
1424
# The merge has been performed, so the old contents should not be
1426
self.tt.delete_contents(trans_id)
1429
def _default_other_winner_merge(self, merge_hook_params):
1430
"""Replace this contents with other."""
1431
file_id = merge_hook_params.file_id
1432
trans_id = merge_hook_params.trans_id
1433
file_in_this = self.this_tree.has_id(file_id)
1434
if self.other_tree.has_id(file_id):
1435
# OTHER changed the file
1437
if wt.supports_content_filtering():
1438
# We get the path from the working tree if it exists.
1439
# That fails though when OTHER is adding a file, so
1440
# we fall back to the other tree to find the path if
1441
# it doesn't exist locally.
1443
filter_tree_path = wt.id2path(file_id)
1444
except errors.NoSuchId:
1445
filter_tree_path = self.other_tree.id2path(file_id)
1447
# Skip the id2path lookup for older formats
1448
filter_tree_path = None
1449
transform.create_from_tree(self.tt, trans_id,
1450
self.other_tree, file_id,
1451
filter_tree_path=filter_tree_path)
1454
# OTHER deleted the file
1455
return 'delete', None
1457
raise AssertionError(
1458
'winner is OTHER, but file_id %r not in THIS or OTHER tree'
1461
def merge_contents(self, merge_hook_params):
1462
"""Fallback merge logic after user installed hooks."""
1463
# This function is used in merge hooks as the fallback instance.
1464
# Perhaps making this function and the functions it calls be a
1465
# a separate class would be better.
1466
if merge_hook_params.winner == 'other':
1467
# OTHER is a straight winner, so replace this contents with other
1468
return self._default_other_winner_merge(merge_hook_params)
1469
elif merge_hook_params.is_file_merge():
1470
# THIS and OTHER are both files, so text merge. Either
1471
# BASE is a file, or both converted to files, so at least we
1472
# have agreement that output should be a file.
1474
self.text_merge(merge_hook_params.file_id,
1475
merge_hook_params.trans_id)
1476
except errors.BinaryFile:
1477
return 'not_applicable', None
1480
return 'not_applicable', None
858
1482
def get_lines(self, tree, file_id):
859
1483
"""Return the lines in a file, or an empty list."""
861
return tree.get_file(file_id).readlines()
1484
if tree.has_id(file_id):
1485
return tree.get_file_lines(file_id)
914
1538
determined automatically. If set_version is true, the .OTHER, .THIS
915
1539
or .BASE (in that order) will be created as versioned files.
917
data = [('OTHER', self.other_tree, other_lines),
1541
data = [('OTHER', self.other_tree, other_lines),
918
1542
('THIS', self.this_tree, this_lines)]
920
1544
data.append(('BASE', self.base_tree, base_lines))
1546
# We need to use the actual path in the working tree of the file here,
1547
# ignoring the conflict suffixes
1549
if wt.supports_content_filtering():
1551
filter_tree_path = wt.id2path(file_id)
1552
except errors.NoSuchId:
1553
# file has been deleted
1554
filter_tree_path = None
1556
# Skip the id2path lookup for older formats
1557
filter_tree_path = None
921
1559
versioned = False
923
1561
for suffix, tree, lines in data:
1562
if tree.has_id(file_id):
925
1563
trans_id = self._conflict_file(name, parent_id, tree, file_id,
1564
suffix, lines, filter_tree_path)
927
1565
file_group.append(trans_id)
928
1566
if set_version and not versioned:
929
1567
self.tt.version_file(file_id, trans_id)
930
1568
versioned = True
931
1569
return file_group
933
def _conflict_file(self, name, parent_id, tree, file_id, suffix,
1571
def _conflict_file(self, name, parent_id, tree, file_id, suffix,
1572
lines=None, filter_tree_path=None):
935
1573
"""Emit a single conflict file."""
936
1574
name = name + '.' + suffix
937
1575
trans_id = self.tt.create_path(name, parent_id)
938
entry = tree.inventory[file_id]
939
create_by_entry(self.tt, entry, tree, trans_id, lines)
1576
transform.create_from_tree(self.tt, trans_id, tree, file_id, lines,
942
1580
def merge_executable(self, file_id, file_status):
943
1581
"""Perform a merge on the execute bit."""
944
1582
executable = [self.executable(t, file_id) for t in (self.base_tree,
945
1583
self.other_tree, self.this_tree)]
946
self._merge_executable(file_id, executable, file_status)
1584
self._merge_executable(file_id, executable, file_status,
1585
resolver=self._three_way)
948
def _merge_executable(self, file_id, executable, file_status):
1587
def _merge_executable(self, file_id, executable, file_status,
949
1589
"""Perform a merge on the execute bit."""
950
1590
base_executable, other_executable, this_executable = executable
951
1591
if file_status == "deleted":
953
trans_id = self.tt.trans_id_file_id(file_id)
955
if self.tt.final_kind(trans_id) != "file":
959
winner = self._three_way(*executable)
1593
winner = resolver(*executable)
960
1594
if winner == "conflict":
961
1595
# There must be a None in here, if we have a conflict, but we
962
1596
# need executability since file status was not deleted.
966
1600
winner = "other"
1601
if winner == 'this' and file_status != "modified":
1603
trans_id = self.tt.trans_id_file_id(file_id)
1604
if self.tt.final_kind(trans_id) != "file":
967
1606
if winner == "this":
968
if file_status == "modified":
969
executability = this_executable
970
if executability is not None:
971
trans_id = self.tt.trans_id_file_id(file_id)
972
self.tt.set_executability(executability, trans_id)
1607
executability = this_executable
974
assert winner == "other"
975
if file_id in self.other_tree:
1609
if self.other_tree.has_id(file_id):
976
1610
executability = other_executable
977
elif file_id in self.this_tree:
1611
elif self.this_tree.has_id(file_id):
978
1612
executability = this_executable
979
elif file_id in self.base_tree:
1613
elif self.base_tree_has_id(file_id):
980
1614
executability = base_executable
981
if executability is not None:
982
trans_id = self.tt.trans_id_file_id(file_id)
983
self.tt.set_executability(executability, trans_id)
1615
if executability is not None:
1616
trans_id = self.tt.trans_id_file_id(file_id)
1617
self.tt.set_executability(executability, trans_id)
985
1619
def cook_conflicts(self, fs_conflicts):
986
1620
"""Convert all conflicts into a form that doesn't depend on trans_id"""
987
from conflicts import Conflict
989
self.cooked_conflicts.extend(cook_conflicts(fs_conflicts, self.tt))
990
fp = FinalPaths(self.tt)
1621
content_conflict_file_ids = set()
1622
cooked_conflicts = transform.cook_conflicts(fs_conflicts, self.tt)
1623
fp = transform.FinalPaths(self.tt)
991
1624
for conflict in self._raw_conflicts:
992
1625
conflict_type = conflict[0]
993
if conflict_type in ('name conflict', 'parent conflict'):
994
trans_id = conflict[1]
995
conflict_args = conflict[2:]
996
if trans_id not in name_conflicts:
997
name_conflicts[trans_id] = {}
998
unique_add(name_conflicts[trans_id], conflict_type,
1000
if conflict_type == 'contents conflict':
1626
if conflict_type == 'path conflict':
1628
this_parent, this_name,
1629
other_parent, other_name) = conflict[1:]
1630
if this_parent is None or this_name is None:
1631
this_path = '<deleted>'
1633
parent_path = fp.get_path(
1634
self.tt.trans_id_file_id(this_parent))
1635
this_path = osutils.pathjoin(parent_path, this_name)
1636
if other_parent is None or other_name is None:
1637
other_path = '<deleted>'
1639
if other_parent == self.other_tree.get_root_id():
1640
# The tree transform doesn't know about the other root,
1641
# so we special case here to avoid a NoFinalPath
1645
parent_path = fp.get_path(
1646
self.tt.trans_id_file_id(other_parent))
1647
other_path = osutils.pathjoin(parent_path, other_name)
1648
c = _mod_conflicts.Conflict.factory(
1649
'path conflict', path=this_path,
1650
conflict_path=other_path,
1652
elif conflict_type == 'contents conflict':
1001
1653
for trans_id in conflict[1]:
1002
1654
file_id = self.tt.final_file_id(trans_id)
1003
1655
if file_id is not None:
1007
1659
if path.endswith(suffix):
1008
1660
path = path[:-len(suffix)]
1010
c = Conflict.factory(conflict_type, path=path, file_id=file_id)
1011
self.cooked_conflicts.append(c)
1012
if conflict_type == 'text conflict':
1662
c = _mod_conflicts.Conflict.factory(conflict_type,
1663
path=path, file_id=file_id)
1664
content_conflict_file_ids.add(file_id)
1665
elif conflict_type == 'text conflict':
1013
1666
trans_id = conflict[1]
1014
1667
path = fp.get_path(trans_id)
1015
1668
file_id = self.tt.final_file_id(trans_id)
1016
c = Conflict.factory(conflict_type, path=path, file_id=file_id)
1017
self.cooked_conflicts.append(c)
1669
c = _mod_conflicts.Conflict.factory(conflict_type,
1670
path=path, file_id=file_id)
1672
raise AssertionError('bad conflict type: %r' % (conflict,))
1673
cooked_conflicts.append(c)
1019
for trans_id, conflicts in name_conflicts.iteritems():
1021
this_parent, other_parent = conflicts['parent conflict']
1022
assert this_parent != other_parent
1024
this_parent = other_parent = \
1025
self.tt.final_file_id(self.tt.final_parent(trans_id))
1027
this_name, other_name = conflicts['name conflict']
1028
assert this_name != other_name
1030
this_name = other_name = self.tt.final_name(trans_id)
1031
other_path = fp.get_path(trans_id)
1032
if this_parent is not None and this_name is not None:
1033
this_parent_path = \
1034
fp.get_path(self.tt.trans_id_file_id(this_parent))
1035
this_path = pathjoin(this_parent_path, this_name)
1037
this_path = "<deleted>"
1038
file_id = self.tt.final_file_id(trans_id)
1039
c = Conflict.factory('path conflict', path=this_path,
1040
conflict_path=other_path, file_id=file_id)
1675
self.cooked_conflicts = []
1676
# We want to get rid of path conflicts when a corresponding contents
1677
# conflict exists. This can occur when one branch deletes a file while
1678
# the other renames *and* modifies it. In this case, the content
1679
# conflict is enough.
1680
for c in cooked_conflicts:
1681
if (c.typestring == 'path conflict'
1682
and c.file_id in content_conflict_file_ids):
1041
1684
self.cooked_conflicts.append(c)
1042
self.cooked_conflicts.sort(key=Conflict.sort_key)
1685
self.cooked_conflicts.sort(key=_mod_conflicts.Conflict.sort_key)
1045
1688
class WeaveMerger(Merge3Merger):
1049
1692
supports_reverse_cherrypick = False
1050
1693
history_based = True
1052
def _merged_lines(self, file_id):
1053
"""Generate the merged lines.
1054
There is no distinction between lines that are meant to contain <<<<<<<
1058
base = self.base_tree
1061
plan = self.this_tree.plan_file_merge(file_id, self.other_tree,
1695
def _generate_merge_plan(self, file_id, base):
1696
return self.this_tree.plan_file_merge(file_id, self.other_tree,
1699
def _merged_lines(self, file_id):
1700
"""Generate the merged lines.
1701
There is no distinction between lines that are meant to contain <<<<<<<
1705
base = self.base_tree
1708
plan = self._generate_merge_plan(file_id, base)
1063
1709
if 'merge' in debug.debug_flags:
1064
1710
plan = list(plan)
1065
1711
trans_id = self.tt.trans_id_file_id(file_id)
1066
1712
name = self.tt.final_name(trans_id) + '.plan'
1067
contents = ('%10s|%s' % l for l in plan)
1713
contents = ('%11s|%s' % l for l in plan)
1068
1714
self.tt.new_file(name, self.tt.final_parent(trans_id), contents)
1069
textmerge = PlanWeaveMerge(plan, '<<<<<<< TREE\n',
1070
'>>>>>>> MERGE-SOURCE\n')
1071
return textmerge.merge_lines(self.reprocess)
1715
textmerge = versionedfile.PlanWeaveMerge(plan, '<<<<<<< TREE\n',
1716
'>>>>>>> MERGE-SOURCE\n')
1717
lines, conflicts = textmerge.merge_lines(self.reprocess)
1719
base_lines = textmerge.base_from_plan()
1722
return lines, base_lines
1073
1724
def text_merge(self, file_id, trans_id):
1074
1725
"""Perform a (weave) text merge for a given file and file-id.
1075
1726
If conflicts are encountered, .THIS and .OTHER files will be emitted,
1076
1727
and a conflict will be noted.
1078
lines, conflicts = self._merged_lines(file_id)
1729
lines, base_lines = self._merged_lines(file_id)
1079
1730
lines = list(lines)
1080
# Note we're checking whether the OUTPUT is binary in this case,
1731
# Note we're checking whether the OUTPUT is binary in this case,
1081
1732
# because we don't want to get into weave merge guts.
1082
check_text_lines(lines)
1733
textfile.check_text_lines(lines)
1083
1734
self.tt.create_file(lines, trans_id)
1735
if base_lines is not None:
1085
1737
self._raw_conflicts.append(('text conflict', trans_id))
1086
1738
name = self.tt.final_name(trans_id)
1087
1739
parent_id = self.tt.final_parent(trans_id)
1088
file_group = self._dump_conflicts(name, parent_id, file_id,
1740
file_group = self._dump_conflicts(name, parent_id, file_id,
1742
base_lines=base_lines)
1090
1743
file_group.append(trans_id)
1093
1746
class LCAMerger(WeaveMerger):
1095
def _merged_lines(self, file_id):
1096
"""Generate the merged lines.
1097
There is no distinction between lines that are meant to contain <<<<<<<
1101
base = self.base_tree
1104
plan = self.this_tree.plan_file_lca_merge(file_id, self.other_tree,
1748
def _generate_merge_plan(self, file_id, base):
1749
return self.this_tree.plan_file_lca_merge(file_id, self.other_tree,
1106
if 'merge' in debug.debug_flags:
1108
trans_id = self.tt.trans_id_file_id(file_id)
1109
name = self.tt.final_name(trans_id) + '.plan'
1110
contents = ('%10s|%s' % l for l in plan)
1111
self.tt.new_file(name, self.tt.final_parent(trans_id), contents)
1112
textmerge = PlanWeaveMerge(plan, '<<<<<<< TREE\n',
1113
'>>>>>>> MERGE-SOURCE\n')
1114
return textmerge.merge_lines(self.reprocess)
1117
1752
class Diff3Merger(Merge3Merger):
1118
1753
"""Three-way merger using external diff3 for text merging"""
1120
1755
def dump_file(self, temp_dir, name, tree, file_id):
1121
out_path = pathjoin(temp_dir, name)
1756
out_path = osutils.pathjoin(temp_dir, name)
1122
1757
out_file = open(out_path, "wb")
1124
1759
in_file = tree.get_file(file_id)
1157
1792
osutils.rmtree(temp_dir)
1795
class PathNotInTree(errors.BzrError):
1797
_fmt = """Merge-into failed because %(tree)s does not contain %(path)s."""
1799
def __init__(self, path, tree):
1800
errors.BzrError.__init__(self, path=path, tree=tree)
1803
class MergeIntoMerger(Merger):
1804
"""Merger that understands other_tree will be merged into a subdir.
1806
This also changes the Merger api so that it uses real Branch, revision_id,
1807
and RevisonTree objects, rather than using revision specs.
1810
def __init__(self, this_tree, other_branch, other_tree, target_subdir,
1811
source_subpath, other_rev_id=None):
1812
"""Create a new MergeIntoMerger object.
1814
source_subpath in other_tree will be effectively copied to
1815
target_subdir in this_tree.
1817
:param this_tree: The tree that we will be merging into.
1818
:param other_branch: The Branch we will be merging from.
1819
:param other_tree: The RevisionTree object we want to merge.
1820
:param target_subdir: The relative path where we want to merge
1821
other_tree into this_tree
1822
:param source_subpath: The relative path specifying the subtree of
1823
other_tree to merge into this_tree.
1825
# It is assumed that we are merging a tree that is not in our current
1826
# ancestry, which means we are using the "EmptyTree" as our basis.
1827
null_ancestor_tree = this_tree.branch.repository.revision_tree(
1828
_mod_revision.NULL_REVISION)
1829
super(MergeIntoMerger, self).__init__(
1830
this_branch=this_tree.branch,
1831
this_tree=this_tree,
1832
other_tree=other_tree,
1833
base_tree=null_ancestor_tree,
1835
self._target_subdir = target_subdir
1836
self._source_subpath = source_subpath
1837
self.other_branch = other_branch
1838
if other_rev_id is None:
1839
other_rev_id = other_tree.get_revision_id()
1840
self.other_rev_id = self.other_basis = other_rev_id
1841
self.base_is_ancestor = True
1842
self.backup_files = True
1843
self.merge_type = Merge3Merger
1844
self.show_base = False
1845
self.reprocess = False
1846
self.interesting_ids = None
1847
self.merge_type = _MergeTypeParameterizer(MergeIntoMergeType,
1848
target_subdir=self._target_subdir,
1849
source_subpath=self._source_subpath)
1850
if self._source_subpath != '':
1851
# If this isn't a partial merge make sure the revisions will be
1853
self._maybe_fetch(self.other_branch, self.this_branch,
1856
def set_pending(self):
1857
if self._source_subpath != '':
1859
Merger.set_pending(self)
1862
class _MergeTypeParameterizer(object):
1863
"""Wrap a merge-type class to provide extra parameters.
1865
This is hack used by MergeIntoMerger to pass some extra parameters to its
1866
merge_type. Merger.do_merge() sets up its own set of parameters to pass to
1867
the 'merge_type' member. It is difficult override do_merge without
1868
re-writing the whole thing, so instead we create a wrapper which will pass
1869
the extra parameters.
1872
def __init__(self, merge_type, **kwargs):
1873
self._extra_kwargs = kwargs
1874
self._merge_type = merge_type
1876
def __call__(self, *args, **kwargs):
1877
kwargs.update(self._extra_kwargs)
1878
return self._merge_type(*args, **kwargs)
1880
def __getattr__(self, name):
1881
return getattr(self._merge_type, name)
1884
class MergeIntoMergeType(Merge3Merger):
1885
"""Merger that incorporates a tree (or part of a tree) into another."""
1887
def __init__(self, *args, **kwargs):
1888
"""Initialize the merger object.
1890
:param args: See Merge3Merger.__init__'s args.
1891
:param kwargs: See Merge3Merger.__init__'s keyword args, except for
1892
source_subpath and target_subdir.
1893
:keyword source_subpath: The relative path specifying the subtree of
1894
other_tree to merge into this_tree.
1895
:keyword target_subdir: The relative path where we want to merge
1896
other_tree into this_tree
1898
# All of the interesting work happens during Merge3Merger.__init__(),
1899
# so we have have to hack in to get our extra parameters set.
1900
self._source_subpath = kwargs.pop('source_subpath')
1901
self._target_subdir = kwargs.pop('target_subdir')
1902
super(MergeIntoMergeType, self).__init__(*args, **kwargs)
1904
def _compute_transform(self):
1905
child_pb = ui.ui_factory.nested_progress_bar()
1907
entries = self._entries_to_incorporate()
1908
entries = list(entries)
1909
for num, (entry, parent_id) in enumerate(entries):
1910
child_pb.update(gettext('Preparing file merge'), num, len(entries))
1911
parent_trans_id = self.tt.trans_id_file_id(parent_id)
1912
trans_id = transform.new_by_entry(self.tt, entry,
1913
parent_trans_id, self.other_tree)
1916
self._finish_computing_transform()
1918
def _entries_to_incorporate(self):
1919
"""Yields pairs of (inventory_entry, new_parent)."""
1920
other_inv = self.other_tree.inventory
1921
subdir_id = other_inv.path2id(self._source_subpath)
1922
if subdir_id is None:
1923
# XXX: The error would be clearer if it gave the URL of the source
1924
# branch, but we don't have a reference to that here.
1925
raise PathNotInTree(self._source_subpath, "Source tree")
1926
subdir = other_inv[subdir_id]
1927
parent_in_target = osutils.dirname(self._target_subdir)
1928
target_id = self.this_tree.inventory.path2id(parent_in_target)
1929
if target_id is None:
1930
raise PathNotInTree(self._target_subdir, "Target tree")
1931
name_in_target = osutils.basename(self._target_subdir)
1932
merge_into_root = subdir.copy()
1933
merge_into_root.name = name_in_target
1934
if self.this_tree.inventory.has_id(merge_into_root.file_id):
1935
# Give the root a new file-id.
1936
# This can happen fairly easily if the directory we are
1937
# incorporating is the root, and both trees have 'TREE_ROOT' as
1938
# their root_id. Users will expect this to Just Work, so we
1939
# change the file-id here.
1940
# Non-root file-ids could potentially conflict too. That's really
1941
# an edge case, so we don't do anything special for those. We let
1942
# them cause conflicts.
1943
merge_into_root.file_id = generate_ids.gen_file_id(name_in_target)
1944
yield (merge_into_root, target_id)
1945
if subdir.kind != 'directory':
1946
# No children, so we are done.
1948
for ignored_path, entry in other_inv.iter_entries_by_dir(subdir_id):
1949
parent_id = entry.parent_id
1950
if parent_id == subdir.file_id:
1951
# The root's parent ID has changed, so make sure children of
1952
# the root refer to the new ID.
1953
parent_id = merge_into_root.file_id
1954
yield (entry, parent_id)
1160
1957
def merge_inner(this_branch, other_tree, base_tree, ignore_zero=False,
1161
1958
backup_files=False,
1162
1959
merge_type=Merge3Merger,
1233
2033
yield status_a(revision, text)
1234
2034
for revision, text in annotated_b[b_cur:bi]:
1235
2035
yield status_b(revision, text)
1237
2036
# and now the matched section
1240
for text_a, text_b in zip(plain_a[ai:a_cur], plain_b[bi:b_cur]):
1241
assert text_a == text_b
2039
for text_a in plain_a[ai:a_cur]:
1242
2040
yield "unchanged", text_a
1245
2043
class _PlanMergeBase(object):
1247
def __init__(self, a_rev, b_rev, vf):
2045
def __init__(self, a_rev, b_rev, vf, key_prefix):
1250
2048
:param a_rev: Revision-id of one revision to merge
1251
2049
:param b_rev: Revision-id of the other revision to merge
1252
:param vf: A versionedfile containing both revisions
2050
:param vf: A VersionedFiles containing both revisions
2051
:param key_prefix: A prefix for accessing keys in vf, typically
1254
2054
self.a_rev = a_rev
1255
2055
self.b_rev = b_rev
1256
self.lines_a = vf.get_lines(a_rev)
1257
self.lines_b = vf.get_lines(b_rev)
1259
2057
self._last_lines = None
1260
2058
self._last_lines_revision_id = None
1261
2059
self._cached_matching_blocks = {}
2060
self._key_prefix = key_prefix
2061
self._precache_tip_lines()
2063
def _precache_tip_lines(self):
2064
lines = self.get_lines([self.a_rev, self.b_rev])
2065
self.lines_a = lines[self.a_rev]
2066
self.lines_b = lines[self.b_rev]
2068
def get_lines(self, revisions):
2069
"""Get lines for revisions from the backing VersionedFiles.
2071
:raises RevisionNotPresent: on absent texts.
2073
keys = [(self._key_prefix + (rev,)) for rev in revisions]
2075
for record in self.vf.get_record_stream(keys, 'unordered', True):
2076
if record.storage_kind == 'absent':
2077
raise errors.RevisionNotPresent(record.key, self.vf)
2078
result[record.key[-1]] = osutils.chunks_to_lines(
2079
record.get_bytes_as('chunked'))
1263
2082
def plan_merge(self):
1264
2083
"""Generate a 'plan' for merging the two revisions.
1373
2194
class _PlanMerge(_PlanMergeBase):
1374
2195
"""Plan an annotate merge using on-the-fly annotation"""
1376
def __init__(self, a_rev, b_rev, vf):
1377
_PlanMergeBase.__init__(self, a_rev, b_rev, vf)
1378
a_ancestry = set(vf.get_ancestry(a_rev, topo_sorted=False))
1379
b_ancestry = set(vf.get_ancestry(b_rev, topo_sorted=False))
1380
self.uncommon = a_ancestry.symmetric_difference(b_ancestry)
1382
def _determine_status(self, revision_id, unique_line_numbers):
1383
"""Determines the status unique lines versus all lcas.
1385
Basically, determines why the line is unique to this revision.
1387
A line may be determined new or killed, but not both.
1389
:param revision_id: The id of the revision in which the lines are
1391
:param unique_line_numbers: The line numbers of unique lines.
1392
:return a tuple of (new_this, killed_other):
1394
new = self._find_new(revision_id)
1395
killed = set(unique_line_numbers).difference(new)
1398
def _find_new(self, version_id):
1399
"""Determine which lines are new in the ancestry of this version.
1401
If a lines is present in this version, and not present in any
1402
common ancestor, it is considered new.
1404
if version_id not in self.uncommon:
1406
parents = self.vf.get_parents(version_id)
1407
if len(parents) == 0:
1408
return set(range(len(self.vf.get_lines(version_id))))
1410
for parent in parents:
1411
blocks = self._get_matching_blocks(version_id, parent)
1412
result, unused = self._unique_lines(blocks)
1413
parent_new = self._find_new(parent)
1414
for i, j, n in blocks:
1415
for ii, jj in [(i+r, j+r) for r in range(n)]:
1416
if jj in parent_new:
1421
new.intersection_update(result)
2197
def __init__(self, a_rev, b_rev, vf, key_prefix):
2198
super(_PlanMerge, self).__init__(a_rev, b_rev, vf, key_prefix)
2199
self.a_key = self._key_prefix + (self.a_rev,)
2200
self.b_key = self._key_prefix + (self.b_rev,)
2201
self.graph = _mod_graph.Graph(self.vf)
2202
heads = self.graph.heads((self.a_key, self.b_key))
2204
# one side dominates, so we can just return its values, yay for
2206
# Ideally we would know that before we get this far
2207
self._head_key = heads.pop()
2208
if self._head_key == self.a_key:
2212
trace.mutter('found dominating revision for %s\n%s > %s', self.vf,
2213
self._head_key[-1], other)
2216
self._head_key = None
2219
def _precache_tip_lines(self):
2220
# Turn this into a no-op, because we will do this later
2223
def _find_recursive_lcas(self):
2224
"""Find all the ancestors back to a unique lca"""
2225
cur_ancestors = (self.a_key, self.b_key)
2226
# graph.find_lca(uncommon, keys) now returns plain NULL_REVISION,
2227
# rather than a key tuple. We will just map that directly to no common
2231
next_lcas = self.graph.find_lca(*cur_ancestors)
2232
# Map a plain NULL_REVISION to a simple no-ancestors
2233
if next_lcas == set([_mod_revision.NULL_REVISION]):
2235
# Order the lca's based on when they were merged into the tip
2236
# While the actual merge portion of weave merge uses a set() of
2237
# active revisions, the order of insertion *does* effect the
2238
# implicit ordering of the texts.
2239
for rev_key in cur_ancestors:
2240
ordered_parents = tuple(self.graph.find_merge_order(rev_key,
2242
parent_map[rev_key] = ordered_parents
2243
if len(next_lcas) == 0:
2245
elif len(next_lcas) == 1:
2246
parent_map[list(next_lcas)[0]] = ()
2248
elif len(next_lcas) > 2:
2249
# More than 2 lca's, fall back to grabbing all nodes between
2250
# this and the unique lca.
2251
trace.mutter('More than 2 LCAs, falling back to all nodes for:'
2253
self.a_key, self.b_key, cur_ancestors)
2254
cur_lcas = next_lcas
2255
while len(cur_lcas) > 1:
2256
cur_lcas = self.graph.find_lca(*cur_lcas)
2257
if len(cur_lcas) == 0:
2258
# No common base to find, use the full ancestry
2261
unique_lca = list(cur_lcas)[0]
2262
if unique_lca == _mod_revision.NULL_REVISION:
2263
# find_lca will return a plain 'NULL_REVISION' rather
2264
# than a key tuple when there is no common ancestor, we
2265
# prefer to just use None, because it doesn't confuse
2266
# _get_interesting_texts()
2268
parent_map.update(self._find_unique_parents(next_lcas,
2271
cur_ancestors = next_lcas
2274
def _find_unique_parents(self, tip_keys, base_key):
2275
"""Find ancestors of tip that aren't ancestors of base.
2277
:param tip_keys: Nodes that are interesting
2278
:param base_key: Cull all ancestors of this node
2279
:return: The parent map for all revisions between tip_keys and
2280
base_key. base_key will be included. References to nodes outside of
2281
the ancestor set will also be removed.
2283
# TODO: this would be simpler if find_unique_ancestors took a list
2284
# instead of a single tip, internally it supports it, but it
2285
# isn't a "backwards compatible" api change.
2286
if base_key is None:
2287
parent_map = dict(self.graph.iter_ancestry(tip_keys))
2288
# We remove NULL_REVISION because it isn't a proper tuple key, and
2289
# thus confuses things like _get_interesting_texts, and our logic
2290
# to add the texts into the memory weave.
2291
if _mod_revision.NULL_REVISION in parent_map:
2292
parent_map.pop(_mod_revision.NULL_REVISION)
2295
for tip in tip_keys:
2297
self.graph.find_unique_ancestors(tip, [base_key]))
2298
parent_map = self.graph.get_parent_map(interesting)
2299
parent_map[base_key] = ()
2300
culled_parent_map, child_map, tails = self._remove_external_references(
2302
# Remove all the tails but base_key
2303
if base_key is not None:
2304
tails.remove(base_key)
2305
self._prune_tails(culled_parent_map, child_map, tails)
2306
# Now remove all the uninteresting 'linear' regions
2307
simple_map = _mod_graph.collapse_linear_regions(culled_parent_map)
2311
def _remove_external_references(parent_map):
2312
"""Remove references that go outside of the parent map.
2314
:param parent_map: Something returned from Graph.get_parent_map(keys)
2315
:return: (filtered_parent_map, child_map, tails)
2316
filtered_parent_map is parent_map without external references
2317
child_map is the {parent_key: [child_keys]} mapping
2318
tails is a list of nodes that do not have any parents in the map
2320
# TODO: The basic effect of this function seems more generic than
2321
# _PlanMerge. But the specific details of building a child_map,
2322
# and computing tails seems very specific to _PlanMerge.
2323
# Still, should this be in Graph land?
2324
filtered_parent_map = {}
2327
for key, parent_keys in parent_map.iteritems():
2328
culled_parent_keys = [p for p in parent_keys if p in parent_map]
2329
if not culled_parent_keys:
2331
for parent_key in culled_parent_keys:
2332
child_map.setdefault(parent_key, []).append(key)
2333
# TODO: Do we want to do this, it adds overhead for every node,
2334
# just to say that the node has no children
2335
child_map.setdefault(key, [])
2336
filtered_parent_map[key] = culled_parent_keys
2337
return filtered_parent_map, child_map, tails
2340
def _prune_tails(parent_map, child_map, tails_to_remove):
2341
"""Remove tails from the parent map.
2343
This will remove the supplied revisions until no more children have 0
2346
:param parent_map: A dict of {child: [parents]}, this dictionary will
2347
be modified in place.
2348
:param tails_to_remove: A list of tips that should be removed,
2349
this list will be consumed
2350
:param child_map: The reverse dict of parent_map ({parent: [children]})
2351
this dict will be modified
2352
:return: None, parent_map will be modified in place.
2354
while tails_to_remove:
2355
next = tails_to_remove.pop()
2356
parent_map.pop(next)
2357
children = child_map.pop(next)
2358
for child in children:
2359
child_parents = parent_map[child]
2360
child_parents.remove(next)
2361
if len(child_parents) == 0:
2362
tails_to_remove.append(child)
2364
def _get_interesting_texts(self, parent_map):
2365
"""Return a dict of texts we are interested in.
2367
Note that the input is in key tuples, but the output is in plain
2370
:param parent_map: The output from _find_recursive_lcas
2371
:return: A dict of {'revision_id':lines} as returned by
2372
_PlanMergeBase.get_lines()
2374
all_revision_keys = set(parent_map)
2375
all_revision_keys.add(self.a_key)
2376
all_revision_keys.add(self.b_key)
2378
# Everything else is in 'keys' but get_lines is in 'revision_ids'
2379
all_texts = self.get_lines([k[-1] for k in all_revision_keys])
2382
def _build_weave(self):
2383
from bzrlib import weave
2384
self._weave = weave.Weave(weave_name='in_memory_weave',
2385
allow_reserved=True)
2386
parent_map = self._find_recursive_lcas()
2388
all_texts = self._get_interesting_texts(parent_map)
2390
# Note: Unfortunately, the order given by topo_sort will effect the
2391
# ordering resolution in the output. Specifically, if you add A then B,
2392
# then in the output text A lines will show up before B lines. And, of
2393
# course, topo_sort doesn't guarantee any real ordering.
2394
# So we use merge_sort, and add a fake node on the tip.
2395
# This ensures that left-hand parents will always be inserted into the
2396
# weave before right-hand parents.
2397
tip_key = self._key_prefix + (_mod_revision.CURRENT_REVISION,)
2398
parent_map[tip_key] = (self.a_key, self.b_key)
2400
for seq_num, key, depth, eom in reversed(tsort.merge_sort(parent_map,
2404
# for key in tsort.topo_sort(parent_map):
2405
parent_keys = parent_map[key]
2406
revision_id = key[-1]
2407
parent_ids = [k[-1] for k in parent_keys]
2408
self._weave.add_lines(revision_id, parent_ids,
2409
all_texts[revision_id])
2411
def plan_merge(self):
2412
"""Generate a 'plan' for merging the two revisions.
2414
This involves comparing their texts and determining the cause of
2415
differences. If text A has a line and text B does not, then either the
2416
line was added to text A, or it was deleted from B. Once the causes
2417
are combined, they are written out in the format described in
2418
VersionedFile.plan_merge
2420
if self._head_key is not None: # There was a single head
2421
if self._head_key == self.a_key:
2424
if self._head_key != self.b_key:
2425
raise AssertionError('There was an invalid head: %s != %s'
2426
% (self.b_key, self._head_key))
2428
head_rev = self._head_key[-1]
2429
lines = self.get_lines([head_rev])[head_rev]
2430
return ((plan, line) for line in lines)
2431
return self._weave.plan_merge(self.a_rev, self.b_rev)
1425
2434
class _PlanLCAMerge(_PlanMergeBase):
1427
2436
This merge algorithm differs from _PlanMerge in that:
1428
2438
1. comparisons are done against LCAs only
1429
2439
2. cases where a contested line is new versus one LCA but old versus
1430
2440
another are marked as conflicts, by emitting the line as conflicted-a