114
86
self.ignore_zero = False
115
87
self.backup_files = False
116
88
self.interesting_ids = None
89
self.interesting_files = None
117
90
self.show_base = False
118
91
self.reprocess = False
121
96
self.recurse = recurse
122
97
self.change_reporter = change_reporter
124
def revision_tree(self, revision_id):
125
return self.this_branch.repository.revision_tree(revision_id)
98
self._cached_trees = {}
99
self._revision_graph = revision_graph
100
self._base_is_ancestor = None
101
self._base_is_other_ancestor = None
102
self._is_criss_cross = None
103
self._lca_trees = None
106
def revision_graph(self):
107
if self._revision_graph is None:
108
self._revision_graph = self.this_branch.repository.get_graph()
109
return self._revision_graph
111
def _set_base_is_ancestor(self, value):
112
self._base_is_ancestor = value
114
def _get_base_is_ancestor(self):
115
if self._base_is_ancestor is None:
116
self._base_is_ancestor = self.revision_graph.is_ancestor(
117
self.base_rev_id, self.this_basis)
118
return self._base_is_ancestor
120
base_is_ancestor = property(_get_base_is_ancestor, _set_base_is_ancestor)
122
def _set_base_is_other_ancestor(self, value):
123
self._base_is_other_ancestor = value
125
def _get_base_is_other_ancestor(self):
126
if self._base_is_other_ancestor is None:
127
if self.other_basis is None:
129
self._base_is_other_ancestor = self.revision_graph.is_ancestor(
130
self.base_rev_id, self.other_basis)
131
return self._base_is_other_ancestor
133
base_is_other_ancestor = property(_get_base_is_other_ancestor,
134
_set_base_is_other_ancestor)
137
def from_uncommitted(tree, other_tree, pb=None, base_tree=None):
138
"""Return a Merger for uncommitted changes in other_tree.
140
:param tree: The tree to merge into
141
:param other_tree: The tree to get uncommitted changes from
142
:param pb: A progress indicator
143
:param base_tree: The basis to use for the merge. If unspecified,
144
other_tree.basis_tree() will be used.
146
if base_tree is None:
147
base_tree = other_tree.basis_tree()
148
merger = Merger(tree.branch, other_tree, base_tree, tree, pb)
149
merger.base_rev_id = merger.base_tree.get_revision_id()
150
merger.other_rev_id = None
151
merger.other_basis = merger.base_rev_id
155
def from_mergeable(klass, tree, mergeable, pb):
156
"""Return a Merger for a bundle or merge directive.
158
:param tree: The tree to merge changes into
159
:param mergeable: A merge directive or bundle
160
:param pb: A progress indicator
162
mergeable.install_revisions(tree.branch.repository)
163
base_revision_id, other_revision_id, verified =\
164
mergeable.get_merge_request(tree.branch.repository)
165
revision_graph = tree.branch.repository.get_graph()
166
if base_revision_id is not None:
167
if (base_revision_id != _mod_revision.NULL_REVISION and
168
revision_graph.is_ancestor(
169
base_revision_id, tree.branch.last_revision())):
170
base_revision_id = None
172
warning('Performing cherrypick')
173
merger = klass.from_revision_ids(pb, tree, other_revision_id,
174
base_revision_id, revision_graph=
176
return merger, verified
179
def from_revision_ids(pb, tree, other, base=None, other_branch=None,
180
base_branch=None, revision_graph=None,
182
"""Return a Merger for revision-ids.
184
:param pb: A progress indicator
185
:param tree: The tree to merge changes into
186
:param other: The revision-id to use as OTHER
187
:param base: The revision-id to use as BASE. If not specified, will
189
:param other_branch: A branch containing the other revision-id. If
190
not supplied, tree.branch is used.
191
:param base_branch: A branch containing the base revision-id. If
192
not supplied, other_branch or tree.branch will be used.
193
:param revision_graph: If you have a revision_graph precomputed, pass
194
it in, otherwise it will be created for you.
195
:param tree_branch: The branch associated with tree. If not supplied,
196
tree.branch will be used.
198
if tree_branch is None:
199
tree_branch = tree.branch
200
merger = Merger(tree_branch, this_tree=tree, pb=pb,
201
revision_graph=revision_graph)
202
if other_branch is None:
203
other_branch = tree.branch
204
merger.set_other_revision(other, other_branch)
208
if base_branch is None:
209
base_branch = other_branch
210
merger.set_base_revision(base, base_branch)
213
def revision_tree(self, revision_id, branch=None):
214
if revision_id not in self._cached_trees:
216
branch = self.this_branch
218
tree = self.this_tree.revision_tree(revision_id)
219
except errors.NoSuchRevisionInTree:
220
tree = branch.repository.revision_tree(revision_id)
221
self._cached_trees[revision_id] = tree
222
return self._cached_trees[revision_id]
224
def _get_tree(self, treespec, possible_transports=None):
225
from bzrlib import workingtree
226
location, revno = treespec
228
tree = workingtree.WorkingTree.open_containing(location)[0]
229
return tree.branch, tree
230
branch = Branch.open_containing(location, possible_transports)[0]
232
revision_id = branch.last_revision()
234
revision_id = branch.get_rev_id(revno)
235
revision_id = ensure_null(revision_id)
236
return branch, self.revision_tree(revision_id, branch)
127
238
def ensure_revision_trees(self):
128
239
if self.this_revision_tree is None:
129
self.this_basis_tree = self.this_branch.repository.revision_tree(
240
self.this_basis_tree = self.revision_tree(self.this_basis)
131
241
if self.this_basis == self.this_rev_id:
132
242
self.this_revision_tree = self.this_basis_tree
299
447
kwargs['show_base'] = self.show_base
300
448
elif self.show_base:
301
449
raise BzrError("Showing base is not supported for this"
302
" merge type. %s" % self.merge_type)
450
" merge type. %s" % self.merge_type)
451
if (not getattr(self.merge_type, 'supports_reverse_cherrypick', True)
452
and not self.base_is_other_ancestor):
453
raise errors.CannotReverseCherrypick()
454
if self.merge_type.supports_cherrypick:
455
kwargs['cherrypick'] = (not self.base_is_ancestor or
456
not self.base_is_other_ancestor)
457
if self._is_criss_cross and getattr(self.merge_type,
458
'supports_lca_trees', False):
459
kwargs['lca_trees'] = self._lca_trees
460
return self.merge_type(pb=self._pb,
461
change_reporter=self.change_reporter,
464
def _do_merge_to(self, merge):
466
if self.recurse == 'down':
467
for relpath, file_id in self.this_tree.iter_references():
468
sub_tree = self.this_tree.get_nested_tree(file_id, relpath)
469
other_revision = self.other_tree.get_reference_revision(
471
if other_revision == sub_tree.last_revision():
473
sub_merge = Merger(sub_tree.branch, this_tree=sub_tree)
474
sub_merge.merge_type = self.merge_type
475
other_branch = self.other_branch.reference_parent(file_id, relpath)
476
sub_merge.set_other_revision(other_revision, other_branch)
477
base_revision = self.base_tree.get_reference_revision(file_id)
478
sub_merge.base_tree = \
479
sub_tree.branch.repository.revision_tree(base_revision)
480
sub_merge.base_rev_id = base_revision
303
484
self.this_tree.lock_tree_write()
304
if self.base_tree is not None:
305
self.base_tree.lock_read()
306
if self.other_tree is not None:
307
self.other_tree.lock_read()
309
merge = self.merge_type(pb=self._pb,
310
change_reporter=self.change_reporter,
312
if self.recurse == 'down':
313
for path, file_id in self.this_tree.iter_references():
314
sub_tree = self.this_tree.get_nested_tree(file_id, path)
315
other_revision = self.other_tree.get_reference_revision(
317
if other_revision == sub_tree.last_revision():
319
sub_merge = Merger(sub_tree.branch, this_tree=sub_tree)
320
sub_merge.merge_type = self.merge_type
321
relpath = self.this_tree.relpath(path)
322
other_branch = self.other_branch.reference_parent(file_id, relpath)
323
sub_merge.set_other_revision(other_revision, other_branch)
324
base_revision = self.base_tree.get_reference_revision(file_id)
325
sub_merge.base_tree = \
326
sub_tree.branch.repository.revision_tree(base_revision)
330
if self.other_tree is not None:
331
self.other_tree.unlock()
332
486
if self.base_tree is not None:
333
self.base_tree.unlock()
487
self.base_tree.lock_read()
489
if self.other_tree is not None:
490
self.other_tree.lock_read()
492
merge = self.make_merger()
493
self._do_merge_to(merge)
495
if self.other_tree is not None:
496
self.other_tree.unlock()
498
if self.base_tree is not None:
499
self.base_tree.unlock()
334
501
self.this_tree.unlock()
335
502
if len(merge.cooked_conflicts) == 0:
336
if not self.ignore_zero:
503
if not self.ignore_zero and not is_quiet():
337
504
note("All changes applied successfully.")
339
506
note("%d conflicts encountered." % len(merge.cooked_conflicts))
341
508
return len(merge.cooked_conflicts)
343
def regen_inventory(self, new_entries):
344
old_entries = self.this_tree.read_working_inventory()
348
for path, file_id in new_entries:
351
new_entries_map[file_id] = path
353
def id2path(file_id):
354
path = new_entries_map.get(file_id)
357
entry = old_entries[file_id]
358
if entry.parent_id is None:
360
return pathjoin(id2path(entry.parent_id), entry.name)
362
for file_id in old_entries:
363
entry = old_entries[file_id]
364
path = id2path(file_id)
365
if file_id in self.base_tree.inventory:
366
executable = getattr(self.base_tree.inventory[file_id], 'executable', False)
368
executable = getattr(entry, 'executable', False)
369
new_inventory[file_id] = (path, file_id, entry.parent_id,
370
entry.kind, executable)
372
by_path[path] = file_id
377
for path, file_id in new_entries:
379
del new_inventory[file_id]
382
new_path_list.append((path, file_id))
383
if file_id not in old_entries:
385
# Ensure no file is added before its parent
387
for path, file_id in new_path_list:
391
parent = by_path[os.path.dirname(path)]
392
abspath = pathjoin(self.this_tree.basedir, path)
393
kind = osutils.file_kind(abspath)
394
if file_id in self.base_tree.inventory:
395
executable = getattr(self.base_tree.inventory[file_id], 'executable', False)
398
new_inventory[file_id] = (path, file_id, parent, kind, executable)
399
by_path[path] = file_id
401
# Get a list in insertion order
402
new_inventory_list = new_inventory.values()
403
mutter ("""Inventory regeneration:
404
old length: %i insertions: %i deletions: %i new_length: %i"""\
405
% (len(old_entries), insertions, deletions,
406
len(new_inventory_list)))
407
assert len(new_inventory_list) == len(old_entries) + insertions\
409
new_inventory_list.sort()
410
return new_inventory_list
511
class _InventoryNoneEntry(object):
512
"""This represents an inventory entry which *isn't there*.
514
It simplifies the merging logic if we always have an InventoryEntry, even
515
if it isn't actually present
522
symlink_target = None
525
_none_entry = _InventoryNoneEntry()
413
528
class Merge3Merger(object):
416
531
supports_reprocess = True
417
532
supports_show_base = True
418
533
history_based = False
534
supports_cherrypick = True
535
supports_reverse_cherrypick = True
536
winner_idx = {"this": 2, "other": 1, "conflict": 1}
537
supports_lca_trees = True
420
def __init__(self, working_tree, this_tree, base_tree, other_tree,
539
def __init__(self, working_tree, this_tree, base_tree, other_tree,
421
540
interesting_ids=None, reprocess=False, show_base=False,
422
pb=DummyProgress(), pp=None, change_reporter=None):
423
"""Initialize the merger object and perform the merge."""
541
pb=DummyProgress(), pp=None, change_reporter=None,
542
interesting_files=None, do_merge=True,
543
cherrypick=False, lca_trees=None):
544
"""Initialize the merger object and perform the merge.
546
:param working_tree: The working tree to apply the merge to
547
:param this_tree: The local tree in the merge operation
548
:param base_tree: The common tree in the merge operation
549
:param other_tree: The other tree to merge changes from
550
:param interesting_ids: The file_ids of files that should be
551
participate in the merge. May not be combined with
553
:param: reprocess If True, perform conflict-reduction processing.
554
:param show_base: If True, show the base revision in text conflicts.
555
(incompatible with reprocess)
556
:param pb: A Progress bar
557
:param pp: A ProgressPhase object
558
:param change_reporter: An object that should report changes made
559
:param interesting_files: The tree-relative paths of files that should
560
participate in the merge. If these paths refer to directories,
561
the contents of those directories will also be included. May not
562
be combined with interesting_ids. If neither interesting_files nor
563
interesting_ids is specified, all files may participate in the
565
:param lca_trees: Can be set to a dictionary of {revision_id:rev_tree}
566
if the ancestry was found to include a criss-cross merge.
567
Otherwise should be None.
424
569
object.__init__(self)
570
if interesting_files is not None and interesting_ids is not None:
572
'specify either interesting_ids or interesting_files')
573
self.interesting_ids = interesting_ids
574
self.interesting_files = interesting_files
425
575
self.this_tree = working_tree
426
self.this_tree.lock_tree_write()
427
576
self.base_tree = base_tree
428
self.base_tree.lock_read()
429
577
self.other_tree = other_tree
430
self.other_tree.lock_read()
431
578
self._raw_conflicts = []
432
579
self.cooked_conflicts = []
433
580
self.reprocess = reprocess
434
581
self.show_base = show_base
582
self._lca_trees = lca_trees
583
# Uncommenting this will change the default algorithm to always use
584
# _entries_lca. This can be useful for running the test suite and
585
# making sure we haven't missed any corner cases.
586
# if lca_trees is None:
587
# self._lca_trees = [self.base_tree]
437
590
self.change_reporter = change_reporter
591
self.cherrypick = cherrypick
438
592
if self.pp is None:
439
593
self.pp = ProgressPhase("Merge phase", 3, self.pb)
441
if interesting_ids is not None:
442
all_ids = interesting_ids
444
all_ids = set(base_tree)
445
all_ids.update(other_tree)
446
self.tt = TreeTransform(working_tree, self.pb)
598
self.this_tree.lock_tree_write()
599
self.base_tree.lock_read()
600
self.other_tree.lock_read()
601
self.tt = TreeTransform(self.this_tree, self.pb)
448
603
self.pp.next_phase()
449
child_pb = ui.ui_factory.nested_progress_bar()
451
for num, file_id in enumerate(all_ids):
452
child_pb.update('Preparing file merge', num, len(all_ids))
453
self.merge_names(file_id)
454
file_status = self.merge_contents(file_id)
455
self.merge_executable(file_id, file_status)
460
child_pb = ui.ui_factory.nested_progress_bar()
462
fs_conflicts = resolve_conflicts(self.tt, child_pb)
465
if change_reporter is not None:
466
from bzrlib import delta
467
delta.report_changes(self.tt._iter_changes(), change_reporter)
468
self.cook_conflicts(fs_conflicts)
469
for conflict in self.cooked_conflicts:
472
results = self.tt.apply()
604
self._compute_transform()
606
results = self.tt.apply(no_conflicts=True)
473
607
self.write_modified(results)
475
working_tree.add_conflicts(self.cooked_conflicts)
609
self.this_tree.add_conflicts(self.cooked_conflicts)
476
610
except UnsupportedOperation:
482
616
self.this_tree.unlock()
619
def make_preview_transform(self):
620
self.base_tree.lock_read()
621
self.other_tree.lock_read()
622
self.tt = TransformPreview(self.this_tree)
625
self._compute_transform()
628
self.other_tree.unlock()
629
self.base_tree.unlock()
633
def _compute_transform(self):
634
if self._lca_trees is None:
635
entries = self._entries3()
636
resolver = self._three_way
638
entries = self._entries_lca()
639
resolver = self._lca_multi_way
640
child_pb = ui.ui_factory.nested_progress_bar()
642
for num, (file_id, changed, parents3, names3,
643
executable3) in enumerate(entries):
644
child_pb.update('Preparing file merge', num, len(entries))
645
self._merge_names(file_id, parents3, names3, resolver=resolver)
647
file_status = self.merge_contents(file_id)
649
file_status = 'unmodified'
650
self._merge_executable(file_id,
651
executable3, file_status, resolver=resolver)
656
child_pb = ui.ui_factory.nested_progress_bar()
658
fs_conflicts = resolve_conflicts(self.tt, child_pb,
659
lambda t, c: conflict_pass(t, c, self.other_tree))
662
if self.change_reporter is not None:
663
from bzrlib import delta
664
delta.report_changes(
665
self.tt.iter_changes(), self.change_reporter)
666
self.cook_conflicts(fs_conflicts)
667
for conflict in self.cooked_conflicts:
671
"""Gather data about files modified between three trees.
673
Return a list of tuples of file_id, changed, parents3, names3,
674
executable3. changed is a boolean indicating whether the file contents
675
or kind were changed. parents3 is a tuple of parent ids for base,
676
other and this. names3 is a tuple of names for base, other and this.
677
executable3 is a tuple of execute-bit values for base, other and this.
680
iterator = self.other_tree.iter_changes(self.base_tree,
681
include_unchanged=True, specific_files=self.interesting_files,
682
extra_trees=[self.this_tree])
683
this_entries = dict((e.file_id, e) for p, e in
684
self.this_tree.iter_entries_by_dir(
685
self.interesting_ids))
686
for (file_id, paths, changed, versioned, parents, names, kind,
687
executable) in iterator:
688
if (self.interesting_ids is not None and
689
file_id not in self.interesting_ids):
691
entry = this_entries.get(file_id)
692
if entry is not None:
693
this_name = entry.name
694
this_parent = entry.parent_id
695
this_executable = entry.executable
699
this_executable = None
700
parents3 = parents + (this_parent,)
701
names3 = names + (this_name,)
702
executable3 = executable + (this_executable,)
703
result.append((file_id, changed, parents3, names3, executable3))
706
def _entries_lca(self):
707
"""Gather data about files modified between multiple trees.
709
This compares OTHER versus all LCA trees, and for interesting entries,
710
it then compares with THIS and BASE.
712
For the multi-valued entries, the format will be (BASE, [lca1, lca2])
713
:return: [(file_id, changed, parents, names, executable)]
714
file_id Simple file_id of the entry
715
changed Boolean, True if the kind or contents changed
717
parents ((base, [parent_id, in, lcas]), parent_id_other,
719
names ((base, [name, in, lcas]), name_in_other, name_in_this)
720
executable ((base, [exec, in, lcas]), exec_in_other, exec_in_this)
722
if self.interesting_files is not None:
723
lookup_trees = [self.this_tree, self.base_tree]
724
lookup_trees.extend(self._lca_trees)
725
# I think we should include the lca trees as well
726
interesting_ids = self.other_tree.paths2ids(self.interesting_files,
729
interesting_ids = self.interesting_ids
731
walker = _mod_tree.MultiWalker(self.other_tree, self._lca_trees)
733
base_inventory = self.base_tree.inventory
734
this_inventory = self.this_tree.inventory
735
for path, file_id, other_ie, lca_values in walker.iter_all():
736
# Is this modified at all from any of the other trees?
738
other_ie = _none_entry
739
if interesting_ids is not None and file_id not in interesting_ids:
742
# If other_revision is found in any of the lcas, that means this
743
# node is uninteresting. This is because when merging, if there are
744
# multiple heads(), we have to create a new node. So if we didn't,
745
# we know that the ancestry is linear, and that OTHER did not
747
# See doc/developers/lca_merge_resolution.txt for details
748
other_revision = other_ie.revision
749
if other_revision is not None:
750
# We can't use this shortcut when other_revision is None,
751
# because it may be None because things are WorkingTrees, and
752
# not because it is *actually* None.
753
is_unmodified = False
754
for lca_path, ie in lca_values:
755
if ie is not None and ie.revision == other_revision:
762
for lca_path, lca_ie in lca_values:
764
lca_entries.append(_none_entry)
766
lca_entries.append(lca_ie)
768
if file_id in base_inventory:
769
base_ie = base_inventory[file_id]
771
base_ie = _none_entry
773
if file_id in this_inventory:
774
this_ie = this_inventory[file_id]
776
this_ie = _none_entry
782
for lca_ie in lca_entries:
783
lca_kinds.append(lca_ie.kind)
784
lca_parent_ids.append(lca_ie.parent_id)
785
lca_names.append(lca_ie.name)
786
lca_executable.append(lca_ie.executable)
788
kind_winner = self._lca_multi_way(
789
(base_ie.kind, lca_kinds),
790
other_ie.kind, this_ie.kind)
791
parent_id_winner = self._lca_multi_way(
792
(base_ie.parent_id, lca_parent_ids),
793
other_ie.parent_id, this_ie.parent_id)
794
name_winner = self._lca_multi_way(
795
(base_ie.name, lca_names),
796
other_ie.name, this_ie.name)
798
content_changed = True
799
if kind_winner == 'this':
800
# No kind change in OTHER, see if there are *any* changes
801
if other_ie.kind == 'directory':
802
if parent_id_winner == 'this' and name_winner == 'this':
803
# No change for this directory in OTHER, skip
805
content_changed = False
806
elif other_ie.kind is None or other_ie.kind == 'file':
807
def get_sha1(ie, tree):
808
if ie.kind != 'file':
810
return tree.get_file_sha1(file_id)
811
base_sha1 = get_sha1(base_ie, self.base_tree)
812
lca_sha1s = [get_sha1(ie, tree) for ie, tree
813
in zip(lca_entries, self._lca_trees)]
814
this_sha1 = get_sha1(this_ie, self.this_tree)
815
other_sha1 = get_sha1(other_ie, self.other_tree)
816
sha1_winner = self._lca_multi_way(
817
(base_sha1, lca_sha1s), other_sha1, this_sha1,
818
allow_overriding_lca=False)
819
exec_winner = self._lca_multi_way(
820
(base_ie.executable, lca_executable),
821
other_ie.executable, this_ie.executable)
822
if (parent_id_winner == 'this' and name_winner == 'this'
823
and sha1_winner == 'this' and exec_winner == 'this'):
824
# No kind, parent, name, exec, or content change for
825
# OTHER, so this node is not considered interesting
827
if sha1_winner == 'this':
828
content_changed = False
829
elif other_ie.kind == 'symlink':
830
def get_target(ie, tree):
831
if ie.kind != 'symlink':
833
return tree.get_symlink_target(file_id)
834
base_target = get_target(base_ie, self.base_tree)
835
lca_targets = [get_target(ie, tree) for ie, tree
836
in zip(lca_entries, self._lca_trees)]
837
this_target = get_target(this_ie, self.this_tree)
838
other_target = get_target(other_ie, self.other_tree)
839
target_winner = self._lca_multi_way(
840
(base_target, lca_targets),
841
other_target, this_target)
842
if (parent_id_winner == 'this' and name_winner == 'this'
843
and target_winner == 'this'):
844
# No kind, parent, name, or symlink target change
847
if target_winner == 'this':
848
content_changed = False
849
elif other_ie.kind == 'tree-reference':
850
# The 'changed' information seems to be handled at a higher
851
# level. At least, _entries3 returns False for content
852
# changed, even when at a new revision_id.
853
content_changed = False
854
if (parent_id_winner == 'this' and name_winner == 'this'):
855
# Nothing interesting
858
raise AssertionError('unhandled kind: %s' % other_ie.kind)
859
# XXX: We need to handle kind == 'symlink'
861
# If we have gotten this far, that means something has changed
862
result.append((file_id, content_changed,
863
((base_ie.parent_id, lca_parent_ids),
864
other_ie.parent_id, this_ie.parent_id),
865
((base_ie.name, lca_names),
866
other_ie.name, this_ie.name),
867
((base_ie.executable, lca_executable),
868
other_ie.executable, this_ie.executable)
485
873
def fix_root(self):
487
875
self.tt.final_kind(self.tt.root)
488
876
except NoSuchFile:
489
877
self.tt.cancel_deletion(self.tt.root)
490
878
if self.tt.final_file_id(self.tt.root) is None:
491
self.tt.version_file(self.tt.tree_file_id(self.tt.root),
879
self.tt.version_file(self.tt.tree_file_id(self.tt.root),
493
if self.other_tree.inventory.root is None:
881
other_root_file_id = self.other_tree.get_root_id()
882
if other_root_file_id is None:
495
other_root_file_id = self.other_tree.inventory.root.file_id
496
884
other_root = self.tt.trans_id_file_id(other_root_file_id)
497
885
if other_root == self.tt.root:
887
1367
"""Three-way tree merger, text weave merger."""
888
1368
supports_reprocess = True
889
1369
supports_show_base = False
891
def __init__(self, working_tree, this_tree, base_tree, other_tree,
892
interesting_ids=None, pb=DummyProgress(), pp=None,
893
reprocess=False, change_reporter=None):
894
self.this_revision_tree = self._get_revision_tree(this_tree)
895
self.other_revision_tree = self._get_revision_tree(other_tree)
896
super(WeaveMerger, self).__init__(working_tree, this_tree,
897
base_tree, other_tree,
898
interesting_ids=interesting_ids,
899
pb=pb, pp=pp, reprocess=reprocess,
900
change_reporter=change_reporter)
902
def _get_revision_tree(self, tree):
903
"""Return a revision tree related to this tree.
904
If the tree is a WorkingTree, the basis will be returned.
906
if getattr(tree, 'get_weave', False) is False:
907
# If we have a WorkingTree, try using the basis
908
return tree.branch.basis_tree()
912
def _check_file(self, file_id):
913
"""Check that the revision tree's version of the file matches."""
914
for tree, rt in ((self.this_tree, self.this_revision_tree),
915
(self.other_tree, self.other_revision_tree)):
918
if tree.get_file_sha1(file_id) != rt.get_file_sha1(file_id):
919
raise WorkingTreeNotRevision(self.this_tree)
1370
supports_reverse_cherrypick = False
1371
history_based = True
921
1373
def _merged_lines(self, file_id):
922
1374
"""Generate the merged lines.
923
1375
There is no distinction between lines that are meant to contain <<<<<<<
926
weave = self.this_revision_tree.get_weave(file_id)
927
this_revision_id = self.this_revision_tree.inventory[file_id].revision
928
other_revision_id = \
929
self.other_revision_tree.inventory[file_id].revision
930
wm = WeaveMerge(weave, this_revision_id, other_revision_id,
931
'<<<<<<< TREE\n', '>>>>>>> MERGE-SOURCE\n')
932
return wm.merge_lines(self.reprocess)
1379
base = self.base_tree
1382
plan = self.this_tree.plan_file_merge(file_id, self.other_tree,
1384
if 'merge' in debug.debug_flags:
1386
trans_id = self.tt.trans_id_file_id(file_id)
1387
name = self.tt.final_name(trans_id) + '.plan'
1388
contents = ('%10s|%s' % l for l in plan)
1389
self.tt.new_file(name, self.tt.final_parent(trans_id), contents)
1390
textmerge = PlanWeaveMerge(plan, '<<<<<<< TREE\n',
1391
'>>>>>>> MERGE-SOURCE\n')
1392
return textmerge.merge_lines(self.reprocess)
934
1394
def text_merge(self, file_id, trans_id):
935
1395
"""Perform a (weave) text merge for a given file and file-id.
936
1396
If conflicts are encountered, .THIS and .OTHER files will be emitted,
937
1397
and a conflict will be noted.
939
self._check_file(file_id)
940
1399
lines, conflicts = self._merged_lines(file_id)
941
1400
lines = list(lines)
942
# Note we're checking whether the OUTPUT is binary in this case,
1401
# Note we're checking whether the OUTPUT is binary in this case,
943
1402
# because we don't want to get into weave merge guts.
944
1403
check_text_lines(lines)
945
1404
self.tt.create_file(lines, trans_id)
1039
1527
from bzrlib import option
1040
1528
return option._merge_type_registry
1531
def _plan_annotate_merge(annotated_a, annotated_b, ancestors_a, ancestors_b):
1532
def status_a(revision, text):
1533
if revision in ancestors_b:
1534
return 'killed-b', text
1536
return 'new-a', text
1538
def status_b(revision, text):
1539
if revision in ancestors_a:
1540
return 'killed-a', text
1542
return 'new-b', text
1544
plain_a = [t for (a, t) in annotated_a]
1545
plain_b = [t for (a, t) in annotated_b]
1546
matcher = patiencediff.PatienceSequenceMatcher(None, plain_a, plain_b)
1547
blocks = matcher.get_matching_blocks()
1550
for ai, bi, l in blocks:
1551
# process all mismatched sections
1552
# (last mismatched section is handled because blocks always
1553
# includes a 0-length last block)
1554
for revision, text in annotated_a[a_cur:ai]:
1555
yield status_a(revision, text)
1556
for revision, text in annotated_b[b_cur:bi]:
1557
yield status_b(revision, text)
1558
# and now the matched section
1561
for text_a in plain_a[ai:a_cur]:
1562
yield "unchanged", text_a
1565
class _PlanMergeBase(object):
1567
def __init__(self, a_rev, b_rev, vf, key_prefix):
1570
:param a_rev: Revision-id of one revision to merge
1571
:param b_rev: Revision-id of the other revision to merge
1572
:param vf: A VersionedFiles containing both revisions
1573
:param key_prefix: A prefix for accessing keys in vf, typically
1579
self._last_lines = None
1580
self._last_lines_revision_id = None
1581
self._cached_matching_blocks = {}
1582
self._key_prefix = key_prefix
1583
self._precache_tip_lines()
1585
def _precache_tip_lines(self):
1586
lines = self.get_lines([self.a_rev, self.b_rev])
1587
self.lines_a = lines[self.a_rev]
1588
self.lines_b = lines[self.b_rev]
1590
def get_lines(self, revisions):
1591
"""Get lines for revisions from the backing VersionedFiles.
1593
:raises RevisionNotPresent: on absent texts.
1595
keys = [(self._key_prefix + (rev,)) for rev in revisions]
1597
for record in self.vf.get_record_stream(keys, 'unordered', True):
1598
if record.storage_kind == 'absent':
1599
raise errors.RevisionNotPresent(record.key, self.vf)
1600
result[record.key[-1]] = osutils.chunks_to_lines(
1601
record.get_bytes_as('chunked'))
1604
def plan_merge(self):
1605
"""Generate a 'plan' for merging the two revisions.
1607
This involves comparing their texts and determining the cause of
1608
differences. If text A has a line and text B does not, then either the
1609
line was added to text A, or it was deleted from B. Once the causes
1610
are combined, they are written out in the format described in
1611
VersionedFile.plan_merge
1613
blocks = self._get_matching_blocks(self.a_rev, self.b_rev)
1614
unique_a, unique_b = self._unique_lines(blocks)
1615
new_a, killed_b = self._determine_status(self.a_rev, unique_a)
1616
new_b, killed_a = self._determine_status(self.b_rev, unique_b)
1617
return self._iter_plan(blocks, new_a, killed_b, new_b, killed_a)
1619
def _iter_plan(self, blocks, new_a, killed_b, new_b, killed_a):
1622
for i, j, n in blocks:
1623
for a_index in range(last_i, i):
1624
if a_index in new_a:
1625
if a_index in killed_b:
1626
yield 'conflicted-a', self.lines_a[a_index]
1628
yield 'new-a', self.lines_a[a_index]
1630
yield 'killed-b', self.lines_a[a_index]
1631
for b_index in range(last_j, j):
1632
if b_index in new_b:
1633
if b_index in killed_a:
1634
yield 'conflicted-b', self.lines_b[b_index]
1636
yield 'new-b', self.lines_b[b_index]
1638
yield 'killed-a', self.lines_b[b_index]
1639
# handle common lines
1640
for a_index in range(i, i+n):
1641
yield 'unchanged', self.lines_a[a_index]
1645
def _get_matching_blocks(self, left_revision, right_revision):
1646
"""Return a description of which sections of two revisions match.
1648
See SequenceMatcher.get_matching_blocks
1650
cached = self._cached_matching_blocks.get((left_revision,
1652
if cached is not None:
1654
if self._last_lines_revision_id == left_revision:
1655
left_lines = self._last_lines
1656
right_lines = self.get_lines([right_revision])[right_revision]
1658
lines = self.get_lines([left_revision, right_revision])
1659
left_lines = lines[left_revision]
1660
right_lines = lines[right_revision]
1661
self._last_lines = right_lines
1662
self._last_lines_revision_id = right_revision
1663
matcher = patiencediff.PatienceSequenceMatcher(None, left_lines,
1665
return matcher.get_matching_blocks()
1667
def _unique_lines(self, matching_blocks):
1668
"""Analyse matching_blocks to determine which lines are unique
1670
:return: a tuple of (unique_left, unique_right), where the values are
1671
sets of line numbers of unique lines.
1677
for i, j, n in matching_blocks:
1678
unique_left.extend(range(last_i, i))
1679
unique_right.extend(range(last_j, j))
1682
return unique_left, unique_right
1685
def _subtract_plans(old_plan, new_plan):
1686
"""Remove changes from new_plan that came from old_plan.
1688
It is assumed that the difference between the old_plan and new_plan
1689
is their choice of 'b' text.
1691
All lines from new_plan that differ from old_plan are emitted
1692
verbatim. All lines from new_plan that match old_plan but are
1693
not about the 'b' revision are emitted verbatim.
1695
Lines that match and are about the 'b' revision are the lines we
1696
don't want, so we convert 'killed-b' -> 'unchanged', and 'new-b'
1697
is skipped entirely.
1699
matcher = patiencediff.PatienceSequenceMatcher(None, old_plan,
1702
for i, j, n in matcher.get_matching_blocks():
1703
for jj in range(last_j, j):
1705
for jj in range(j, j+n):
1706
plan_line = new_plan[jj]
1707
if plan_line[0] == 'new-b':
1709
elif plan_line[0] == 'killed-b':
1710
yield 'unchanged', plan_line[1]
1716
class _PlanMerge(_PlanMergeBase):
1717
"""Plan an annotate merge using on-the-fly annotation"""
1719
def __init__(self, a_rev, b_rev, vf, key_prefix):
1720
super(_PlanMerge, self).__init__(a_rev, b_rev, vf, key_prefix)
1721
self.a_key = self._key_prefix + (self.a_rev,)
1722
self.b_key = self._key_prefix + (self.b_rev,)
1723
self.graph = Graph(self.vf)
1724
heads = self.graph.heads((self.a_key, self.b_key))
1726
# one side dominates, so we can just return its values, yay for
1728
# Ideally we would know that before we get this far
1729
self._head_key = heads.pop()
1730
if self._head_key == self.a_key:
1734
mutter('found dominating revision for %s\n%s > %s', self.vf,
1735
self._head_key[-1], other)
1738
self._head_key = None
1741
def _precache_tip_lines(self):
1742
# Turn this into a no-op, because we will do this later
1745
def _find_recursive_lcas(self):
1746
"""Find all the ancestors back to a unique lca"""
1747
cur_ancestors = (self.a_key, self.b_key)
1748
# graph.find_lca(uncommon, keys) now returns plain NULL_REVISION,
1749
# rather than a key tuple. We will just map that directly to no common
1753
next_lcas = self.graph.find_lca(*cur_ancestors)
1754
# Map a plain NULL_REVISION to a simple no-ancestors
1755
if next_lcas == set([NULL_REVISION]):
1757
# Order the lca's based on when they were merged into the tip
1758
# While the actual merge portion of weave merge uses a set() of
1759
# active revisions, the order of insertion *does* effect the
1760
# implicit ordering of the texts.
1761
for rev_key in cur_ancestors:
1762
ordered_parents = tuple(self.graph.find_merge_order(rev_key,
1764
parent_map[rev_key] = ordered_parents
1765
if len(next_lcas) == 0:
1767
elif len(next_lcas) == 1:
1768
parent_map[list(next_lcas)[0]] = ()
1770
elif len(next_lcas) > 2:
1771
# More than 2 lca's, fall back to grabbing all nodes between
1772
# this and the unique lca.
1773
mutter('More than 2 LCAs, falling back to all nodes for:'
1774
' %s, %s\n=> %s', self.a_key, self.b_key, cur_ancestors)
1775
cur_lcas = next_lcas
1776
while len(cur_lcas) > 1:
1777
cur_lcas = self.graph.find_lca(*cur_lcas)
1778
if len(cur_lcas) == 0:
1779
# No common base to find, use the full ancestry
1782
unique_lca = list(cur_lcas)[0]
1783
if unique_lca == NULL_REVISION:
1784
# find_lca will return a plain 'NULL_REVISION' rather
1785
# than a key tuple when there is no common ancestor, we
1786
# prefer to just use None, because it doesn't confuse
1787
# _get_interesting_texts()
1789
parent_map.update(self._find_unique_parents(next_lcas,
1792
cur_ancestors = next_lcas
1795
def _find_unique_parents(self, tip_keys, base_key):
1796
"""Find ancestors of tip that aren't ancestors of base.
1798
:param tip_keys: Nodes that are interesting
1799
:param base_key: Cull all ancestors of this node
1800
:return: The parent map for all revisions between tip_keys and
1801
base_key. base_key will be included. References to nodes outside of
1802
the ancestor set will also be removed.
1804
# TODO: this would be simpler if find_unique_ancestors took a list
1805
# instead of a single tip, internally it supports it, but it
1806
# isn't a "backwards compatible" api change.
1807
if base_key is None:
1808
parent_map = dict(self.graph.iter_ancestry(tip_keys))
1809
# We remove NULL_REVISION because it isn't a proper tuple key, and
1810
# thus confuses things like _get_interesting_texts, and our logic
1811
# to add the texts into the memory weave.
1812
if NULL_REVISION in parent_map:
1813
parent_map.pop(NULL_REVISION)
1816
for tip in tip_keys:
1818
self.graph.find_unique_ancestors(tip, [base_key]))
1819
parent_map = self.graph.get_parent_map(interesting)
1820
parent_map[base_key] = ()
1821
culled_parent_map, child_map, tails = self._remove_external_references(
1823
# Remove all the tails but base_key
1824
if base_key is not None:
1825
tails.remove(base_key)
1826
self._prune_tails(culled_parent_map, child_map, tails)
1827
# Now remove all the uninteresting 'linear' regions
1828
simple_map = _mod_graph.collapse_linear_regions(culled_parent_map)
1832
def _remove_external_references(parent_map):
1833
"""Remove references that go outside of the parent map.
1835
:param parent_map: Something returned from Graph.get_parent_map(keys)
1836
:return: (filtered_parent_map, child_map, tails)
1837
filtered_parent_map is parent_map without external references
1838
child_map is the {parent_key: [child_keys]} mapping
1839
tails is a list of nodes that do not have any parents in the map
1841
# TODO: The basic effect of this function seems more generic than
1842
# _PlanMerge. But the specific details of building a child_map,
1843
# and computing tails seems very specific to _PlanMerge.
1844
# Still, should this be in Graph land?
1845
filtered_parent_map = {}
1848
for key, parent_keys in parent_map.iteritems():
1849
culled_parent_keys = [p for p in parent_keys if p in parent_map]
1850
if not culled_parent_keys:
1852
for parent_key in culled_parent_keys:
1853
child_map.setdefault(parent_key, []).append(key)
1854
# TODO: Do we want to do this, it adds overhead for every node,
1855
# just to say that the node has no children
1856
child_map.setdefault(key, [])
1857
filtered_parent_map[key] = culled_parent_keys
1858
return filtered_parent_map, child_map, tails
1861
def _prune_tails(parent_map, child_map, tails_to_remove):
1862
"""Remove tails from the parent map.
1864
This will remove the supplied revisions until no more children have 0
1867
:param parent_map: A dict of {child: [parents]}, this dictionary will
1868
be modified in place.
1869
:param tails_to_remove: A list of tips that should be removed,
1870
this list will be consumed
1871
:param child_map: The reverse dict of parent_map ({parent: [children]})
1872
this dict will be modified
1873
:return: None, parent_map will be modified in place.
1875
while tails_to_remove:
1876
next = tails_to_remove.pop()
1877
parent_map.pop(next)
1878
children = child_map.pop(next)
1879
for child in children:
1880
child_parents = parent_map[child]
1881
child_parents.remove(next)
1882
if len(child_parents) == 0:
1883
tails_to_remove.append(child)
1885
def _get_interesting_texts(self, parent_map):
1886
"""Return a dict of texts we are interested in.
1888
Note that the input is in key tuples, but the output is in plain
1891
:param parent_map: The output from _find_recursive_lcas
1892
:return: A dict of {'revision_id':lines} as returned by
1893
_PlanMergeBase.get_lines()
1895
all_revision_keys = set(parent_map)
1896
all_revision_keys.add(self.a_key)
1897
all_revision_keys.add(self.b_key)
1899
# Everything else is in 'keys' but get_lines is in 'revision_ids'
1900
all_texts = self.get_lines([k[-1] for k in all_revision_keys])
1903
def _build_weave(self):
1904
from bzrlib import weave
1905
self._weave = weave.Weave(weave_name='in_memory_weave',
1906
allow_reserved=True)
1907
parent_map = self._find_recursive_lcas()
1909
all_texts = self._get_interesting_texts(parent_map)
1911
# Note: Unfortunately, the order given by topo_sort will effect the
1912
# ordering resolution in the output. Specifically, if you add A then B,
1913
# then in the output text A lines will show up before B lines. And, of
1914
# course, topo_sort doesn't guarantee any real ordering.
1915
# So we use merge_sort, and add a fake node on the tip.
1916
# This ensures that left-hand parents will always be inserted into the
1917
# weave before right-hand parents.
1918
tip_key = self._key_prefix + (_mod_revision.CURRENT_REVISION,)
1919
parent_map[tip_key] = (self.a_key, self.b_key)
1921
for seq_num, key, depth, eom in reversed(tsort.merge_sort(parent_map,
1925
# for key in tsort.topo_sort(parent_map):
1926
parent_keys = parent_map[key]
1927
revision_id = key[-1]
1928
parent_ids = [k[-1] for k in parent_keys]
1929
self._weave.add_lines(revision_id, parent_ids,
1930
all_texts[revision_id])
1932
def plan_merge(self):
1933
"""Generate a 'plan' for merging the two revisions.
1935
This involves comparing their texts and determining the cause of
1936
differences. If text A has a line and text B does not, then either the
1937
line was added to text A, or it was deleted from B. Once the causes
1938
are combined, they are written out in the format described in
1939
VersionedFile.plan_merge
1941
if self._head_key is not None: # There was a single head
1942
if self._head_key == self.a_key:
1945
if self._head_key != self.b_key:
1946
raise AssertionError('There was an invalid head: %s != %s'
1947
% (self.b_key, self._head_key))
1949
head_rev = self._head_key[-1]
1950
lines = self.get_lines([head_rev])[head_rev]
1951
return ((plan, line) for line in lines)
1952
return self._weave.plan_merge(self.a_rev, self.b_rev)
1955
class _PlanLCAMerge(_PlanMergeBase):
1957
This merge algorithm differs from _PlanMerge in that:
1958
1. comparisons are done against LCAs only
1959
2. cases where a contested line is new versus one LCA but old versus
1960
another are marked as conflicts, by emitting the line as conflicted-a
1963
This is faster, and hopefully produces more useful output.
1966
def __init__(self, a_rev, b_rev, vf, key_prefix, graph):
1967
_PlanMergeBase.__init__(self, a_rev, b_rev, vf, key_prefix)
1968
lcas = graph.find_lca(key_prefix + (a_rev,), key_prefix + (b_rev,))
1971
if lca == NULL_REVISION:
1974
self.lcas.add(lca[-1])
1975
for lca in self.lcas:
1976
if _mod_revision.is_null(lca):
1979
lca_lines = self.get_lines([lca])[lca]
1980
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_a,
1982
blocks = list(matcher.get_matching_blocks())
1983
self._cached_matching_blocks[(a_rev, lca)] = blocks
1984
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_b,
1986
blocks = list(matcher.get_matching_blocks())
1987
self._cached_matching_blocks[(b_rev, lca)] = blocks
1989
def _determine_status(self, revision_id, unique_line_numbers):
1990
"""Determines the status unique lines versus all lcas.
1992
Basically, determines why the line is unique to this revision.
1994
A line may be determined new, killed, or both.
1996
If a line is determined new, that means it was not present in at least
1997
one LCA, and is not present in the other merge revision.
1999
If a line is determined killed, that means the line was present in
2002
If a line is killed and new, this indicates that the two merge
2003
revisions contain differing conflict resolutions.
2004
:param revision_id: The id of the revision in which the lines are
2006
:param unique_line_numbers: The line numbers of unique lines.
2007
:return a tuple of (new_this, killed_other):
2011
unique_line_numbers = set(unique_line_numbers)
2012
for lca in self.lcas:
2013
blocks = self._get_matching_blocks(revision_id, lca)
2014
unique_vs_lca, _ignored = self._unique_lines(blocks)
2015
new.update(unique_line_numbers.intersection(unique_vs_lca))
2016
killed.update(unique_line_numbers.difference(unique_vs_lca))