364
449
kwargs['show_base'] = self.show_base
365
450
elif self.show_base:
366
451
raise BzrError("Showing base is not supported for this"
367
" merge type. %s" % self.merge_type)
452
" merge type. %s" % self.merge_type)
453
if (not getattr(self.merge_type, 'supports_reverse_cherrypick', True)
454
and not self.base_is_other_ancestor):
455
raise errors.CannotReverseCherrypick()
456
if self.merge_type.supports_cherrypick:
457
kwargs['cherrypick'] = (not self.base_is_ancestor or
458
not self.base_is_other_ancestor)
459
if self._is_criss_cross and getattr(self.merge_type,
460
'supports_lca_trees', False):
461
kwargs['lca_trees'] = self._lca_trees
462
return self.merge_type(pb=self._pb,
463
change_reporter=self.change_reporter,
466
def _do_merge_to(self, merge):
467
if self.other_branch is not None:
468
self.other_branch.update_references(self.this_branch)
470
if self.recurse == 'down':
471
for relpath, file_id in self.this_tree.iter_references():
472
sub_tree = self.this_tree.get_nested_tree(file_id, relpath)
473
other_revision = self.other_tree.get_reference_revision(
475
if other_revision == sub_tree.last_revision():
477
sub_merge = Merger(sub_tree.branch, this_tree=sub_tree)
478
sub_merge.merge_type = self.merge_type
479
other_branch = self.other_branch.reference_parent(file_id, relpath)
480
sub_merge.set_other_revision(other_revision, other_branch)
481
base_revision = self.base_tree.get_reference_revision(file_id)
482
sub_merge.base_tree = \
483
sub_tree.branch.repository.revision_tree(base_revision)
484
sub_merge.base_rev_id = base_revision
368
488
self.this_tree.lock_tree_write()
369
if self.base_tree is not None:
370
self.base_tree.lock_read()
371
if self.other_tree is not None:
372
self.other_tree.lock_read()
374
merge = self.merge_type(pb=self._pb,
375
change_reporter=self.change_reporter,
377
if self.recurse == 'down':
378
for path, file_id in self.this_tree.iter_references():
379
sub_tree = self.this_tree.get_nested_tree(file_id, path)
380
other_revision = self.other_tree.get_reference_revision(
382
if other_revision == sub_tree.last_revision():
384
sub_merge = Merger(sub_tree.branch, this_tree=sub_tree)
385
sub_merge.merge_type = self.merge_type
386
relpath = self.this_tree.relpath(path)
387
other_branch = self.other_branch.reference_parent(file_id, relpath)
388
sub_merge.set_other_revision(other_revision, other_branch)
389
base_revision = self.base_tree.get_reference_revision(file_id)
390
sub_merge.base_tree = \
391
sub_tree.branch.repository.revision_tree(base_revision)
395
if self.other_tree is not None:
396
self.other_tree.unlock()
397
490
if self.base_tree is not None:
398
self.base_tree.unlock()
491
self.base_tree.lock_read()
493
if self.other_tree is not None:
494
self.other_tree.lock_read()
496
merge = self.make_merger()
497
self._do_merge_to(merge)
499
if self.other_tree is not None:
500
self.other_tree.unlock()
502
if self.base_tree is not None:
503
self.base_tree.unlock()
399
505
self.this_tree.unlock()
400
506
if len(merge.cooked_conflicts) == 0:
401
if not self.ignore_zero:
507
if not self.ignore_zero and not is_quiet():
402
508
note("All changes applied successfully.")
404
510
note("%d conflicts encountered." % len(merge.cooked_conflicts))
540
707
result.append((file_id, changed, parents3, names3, executable3))
710
def _entries_lca(self):
711
"""Gather data about files modified between multiple trees.
713
This compares OTHER versus all LCA trees, and for interesting entries,
714
it then compares with THIS and BASE.
716
For the multi-valued entries, the format will be (BASE, [lca1, lca2])
717
:return: [(file_id, changed, parents, names, executable)]
718
file_id Simple file_id of the entry
719
changed Boolean, True if the kind or contents changed
721
parents ((base, [parent_id, in, lcas]), parent_id_other,
723
names ((base, [name, in, lcas]), name_in_other, name_in_this)
724
executable ((base, [exec, in, lcas]), exec_in_other, exec_in_this)
726
if self.interesting_files is not None:
727
lookup_trees = [self.this_tree, self.base_tree]
728
lookup_trees.extend(self._lca_trees)
729
# I think we should include the lca trees as well
730
interesting_ids = self.other_tree.paths2ids(self.interesting_files,
733
interesting_ids = self.interesting_ids
735
walker = _mod_tree.MultiWalker(self.other_tree, self._lca_trees)
737
base_inventory = self.base_tree.inventory
738
this_inventory = self.this_tree.inventory
739
for path, file_id, other_ie, lca_values in walker.iter_all():
740
# Is this modified at all from any of the other trees?
742
other_ie = _none_entry
743
if interesting_ids is not None and file_id not in interesting_ids:
746
# If other_revision is found in any of the lcas, that means this
747
# node is uninteresting. This is because when merging, if there are
748
# multiple heads(), we have to create a new node. So if we didn't,
749
# we know that the ancestry is linear, and that OTHER did not
751
# See doc/developers/lca_merge_resolution.txt for details
752
other_revision = other_ie.revision
753
if other_revision is not None:
754
# We can't use this shortcut when other_revision is None,
755
# because it may be None because things are WorkingTrees, and
756
# not because it is *actually* None.
757
is_unmodified = False
758
for lca_path, ie in lca_values:
759
if ie is not None and ie.revision == other_revision:
766
for lca_path, lca_ie in lca_values:
768
lca_entries.append(_none_entry)
770
lca_entries.append(lca_ie)
772
if file_id in base_inventory:
773
base_ie = base_inventory[file_id]
775
base_ie = _none_entry
777
if file_id in this_inventory:
778
this_ie = this_inventory[file_id]
780
this_ie = _none_entry
786
for lca_ie in lca_entries:
787
lca_kinds.append(lca_ie.kind)
788
lca_parent_ids.append(lca_ie.parent_id)
789
lca_names.append(lca_ie.name)
790
lca_executable.append(lca_ie.executable)
792
kind_winner = self._lca_multi_way(
793
(base_ie.kind, lca_kinds),
794
other_ie.kind, this_ie.kind)
795
parent_id_winner = self._lca_multi_way(
796
(base_ie.parent_id, lca_parent_ids),
797
other_ie.parent_id, this_ie.parent_id)
798
name_winner = self._lca_multi_way(
799
(base_ie.name, lca_names),
800
other_ie.name, this_ie.name)
802
content_changed = True
803
if kind_winner == 'this':
804
# No kind change in OTHER, see if there are *any* changes
805
if other_ie.kind == 'directory':
806
if parent_id_winner == 'this' and name_winner == 'this':
807
# No change for this directory in OTHER, skip
809
content_changed = False
810
elif other_ie.kind is None or other_ie.kind == 'file':
811
def get_sha1(ie, tree):
812
if ie.kind != 'file':
814
return tree.get_file_sha1(file_id)
815
base_sha1 = get_sha1(base_ie, self.base_tree)
816
lca_sha1s = [get_sha1(ie, tree) for ie, tree
817
in zip(lca_entries, self._lca_trees)]
818
this_sha1 = get_sha1(this_ie, self.this_tree)
819
other_sha1 = get_sha1(other_ie, self.other_tree)
820
sha1_winner = self._lca_multi_way(
821
(base_sha1, lca_sha1s), other_sha1, this_sha1,
822
allow_overriding_lca=False)
823
exec_winner = self._lca_multi_way(
824
(base_ie.executable, lca_executable),
825
other_ie.executable, this_ie.executable)
826
if (parent_id_winner == 'this' and name_winner == 'this'
827
and sha1_winner == 'this' and exec_winner == 'this'):
828
# No kind, parent, name, exec, or content change for
829
# OTHER, so this node is not considered interesting
831
if sha1_winner == 'this':
832
content_changed = False
833
elif other_ie.kind == 'symlink':
834
def get_target(ie, tree):
835
if ie.kind != 'symlink':
837
return tree.get_symlink_target(file_id)
838
base_target = get_target(base_ie, self.base_tree)
839
lca_targets = [get_target(ie, tree) for ie, tree
840
in zip(lca_entries, self._lca_trees)]
841
this_target = get_target(this_ie, self.this_tree)
842
other_target = get_target(other_ie, self.other_tree)
843
target_winner = self._lca_multi_way(
844
(base_target, lca_targets),
845
other_target, this_target)
846
if (parent_id_winner == 'this' and name_winner == 'this'
847
and target_winner == 'this'):
848
# No kind, parent, name, or symlink target change
851
if target_winner == 'this':
852
content_changed = False
853
elif other_ie.kind == 'tree-reference':
854
# The 'changed' information seems to be handled at a higher
855
# level. At least, _entries3 returns False for content
856
# changed, even when at a new revision_id.
857
content_changed = False
858
if (parent_id_winner == 'this' and name_winner == 'this'):
859
# Nothing interesting
862
raise AssertionError('unhandled kind: %s' % other_ie.kind)
863
# XXX: We need to handle kind == 'symlink'
865
# If we have gotten this far, that means something has changed
866
result.append((file_id, content_changed,
867
((base_ie.parent_id, lca_parent_ids),
868
other_ie.parent_id, this_ie.parent_id),
869
((base_ie.name, lca_names),
870
other_ie.name, this_ie.name),
871
((base_ie.executable, lca_executable),
872
other_ie.executable, this_ie.executable)
543
877
def fix_root(self):
545
879
self.tt.final_kind(self.tt.root)
546
880
except NoSuchFile:
547
881
self.tt.cancel_deletion(self.tt.root)
548
882
if self.tt.final_file_id(self.tt.root) is None:
549
self.tt.version_file(self.tt.tree_file_id(self.tt.root),
883
self.tt.version_file(self.tt.tree_file_id(self.tt.root),
551
if self.other_tree.inventory.root is None:
885
other_root_file_id = self.other_tree.get_root_id()
886
if other_root_file_id is None:
553
other_root_file_id = self.other_tree.inventory.root.file_id
554
888
other_root = self.tt.trans_id_file_id(other_root_file_id)
555
889
if other_root == self.tt.root:
1130
1559
yield status_a(revision, text)
1131
1560
for revision, text in annotated_b[b_cur:bi]:
1132
1561
yield status_b(revision, text)
1134
1562
# and now the matched section
1137
for text_a, text_b in zip(plain_a[ai:a_cur], plain_b[bi:b_cur]):
1138
assert text_a == text_b
1565
for text_a in plain_a[ai:a_cur]:
1139
1566
yield "unchanged", text_a
1569
class _PlanMergeBase(object):
1571
def __init__(self, a_rev, b_rev, vf, key_prefix):
1574
:param a_rev: Revision-id of one revision to merge
1575
:param b_rev: Revision-id of the other revision to merge
1576
:param vf: A VersionedFiles containing both revisions
1577
:param key_prefix: A prefix for accessing keys in vf, typically
1583
self._last_lines = None
1584
self._last_lines_revision_id = None
1585
self._cached_matching_blocks = {}
1586
self._key_prefix = key_prefix
1587
self._precache_tip_lines()
1589
def _precache_tip_lines(self):
1590
lines = self.get_lines([self.a_rev, self.b_rev])
1591
self.lines_a = lines[self.a_rev]
1592
self.lines_b = lines[self.b_rev]
1594
def get_lines(self, revisions):
1595
"""Get lines for revisions from the backing VersionedFiles.
1597
:raises RevisionNotPresent: on absent texts.
1599
keys = [(self._key_prefix + (rev,)) for rev in revisions]
1601
for record in self.vf.get_record_stream(keys, 'unordered', True):
1602
if record.storage_kind == 'absent':
1603
raise errors.RevisionNotPresent(record.key, self.vf)
1604
result[record.key[-1]] = osutils.chunks_to_lines(
1605
record.get_bytes_as('chunked'))
1608
def plan_merge(self):
1609
"""Generate a 'plan' for merging the two revisions.
1611
This involves comparing their texts and determining the cause of
1612
differences. If text A has a line and text B does not, then either the
1613
line was added to text A, or it was deleted from B. Once the causes
1614
are combined, they are written out in the format described in
1615
VersionedFile.plan_merge
1617
blocks = self._get_matching_blocks(self.a_rev, self.b_rev)
1618
unique_a, unique_b = self._unique_lines(blocks)
1619
new_a, killed_b = self._determine_status(self.a_rev, unique_a)
1620
new_b, killed_a = self._determine_status(self.b_rev, unique_b)
1621
return self._iter_plan(blocks, new_a, killed_b, new_b, killed_a)
1623
def _iter_plan(self, blocks, new_a, killed_b, new_b, killed_a):
1626
for i, j, n in blocks:
1627
for a_index in range(last_i, i):
1628
if a_index in new_a:
1629
if a_index in killed_b:
1630
yield 'conflicted-a', self.lines_a[a_index]
1632
yield 'new-a', self.lines_a[a_index]
1634
yield 'killed-b', self.lines_a[a_index]
1635
for b_index in range(last_j, j):
1636
if b_index in new_b:
1637
if b_index in killed_a:
1638
yield 'conflicted-b', self.lines_b[b_index]
1640
yield 'new-b', self.lines_b[b_index]
1642
yield 'killed-a', self.lines_b[b_index]
1643
# handle common lines
1644
for a_index in range(i, i+n):
1645
yield 'unchanged', self.lines_a[a_index]
1649
def _get_matching_blocks(self, left_revision, right_revision):
1650
"""Return a description of which sections of two revisions match.
1652
See SequenceMatcher.get_matching_blocks
1654
cached = self._cached_matching_blocks.get((left_revision,
1656
if cached is not None:
1658
if self._last_lines_revision_id == left_revision:
1659
left_lines = self._last_lines
1660
right_lines = self.get_lines([right_revision])[right_revision]
1662
lines = self.get_lines([left_revision, right_revision])
1663
left_lines = lines[left_revision]
1664
right_lines = lines[right_revision]
1665
self._last_lines = right_lines
1666
self._last_lines_revision_id = right_revision
1667
matcher = patiencediff.PatienceSequenceMatcher(None, left_lines,
1669
return matcher.get_matching_blocks()
1671
def _unique_lines(self, matching_blocks):
1672
"""Analyse matching_blocks to determine which lines are unique
1674
:return: a tuple of (unique_left, unique_right), where the values are
1675
sets of line numbers of unique lines.
1681
for i, j, n in matching_blocks:
1682
unique_left.extend(range(last_i, i))
1683
unique_right.extend(range(last_j, j))
1686
return unique_left, unique_right
1689
def _subtract_plans(old_plan, new_plan):
1690
"""Remove changes from new_plan that came from old_plan.
1692
It is assumed that the difference between the old_plan and new_plan
1693
is their choice of 'b' text.
1695
All lines from new_plan that differ from old_plan are emitted
1696
verbatim. All lines from new_plan that match old_plan but are
1697
not about the 'b' revision are emitted verbatim.
1699
Lines that match and are about the 'b' revision are the lines we
1700
don't want, so we convert 'killed-b' -> 'unchanged', and 'new-b'
1701
is skipped entirely.
1703
matcher = patiencediff.PatienceSequenceMatcher(None, old_plan,
1706
for i, j, n in matcher.get_matching_blocks():
1707
for jj in range(last_j, j):
1709
for jj in range(j, j+n):
1710
plan_line = new_plan[jj]
1711
if plan_line[0] == 'new-b':
1713
elif plan_line[0] == 'killed-b':
1714
yield 'unchanged', plan_line[1]
1720
class _PlanMerge(_PlanMergeBase):
1721
"""Plan an annotate merge using on-the-fly annotation"""
1723
def __init__(self, a_rev, b_rev, vf, key_prefix):
1724
super(_PlanMerge, self).__init__(a_rev, b_rev, vf, key_prefix)
1725
self.a_key = self._key_prefix + (self.a_rev,)
1726
self.b_key = self._key_prefix + (self.b_rev,)
1727
self.graph = Graph(self.vf)
1728
heads = self.graph.heads((self.a_key, self.b_key))
1730
# one side dominates, so we can just return its values, yay for
1732
# Ideally we would know that before we get this far
1733
self._head_key = heads.pop()
1734
if self._head_key == self.a_key:
1738
mutter('found dominating revision for %s\n%s > %s', self.vf,
1739
self._head_key[-1], other)
1742
self._head_key = None
1745
def _precache_tip_lines(self):
1746
# Turn this into a no-op, because we will do this later
1749
def _find_recursive_lcas(self):
1750
"""Find all the ancestors back to a unique lca"""
1751
cur_ancestors = (self.a_key, self.b_key)
1752
# graph.find_lca(uncommon, keys) now returns plain NULL_REVISION,
1753
# rather than a key tuple. We will just map that directly to no common
1757
next_lcas = self.graph.find_lca(*cur_ancestors)
1758
# Map a plain NULL_REVISION to a simple no-ancestors
1759
if next_lcas == set([NULL_REVISION]):
1761
# Order the lca's based on when they were merged into the tip
1762
# While the actual merge portion of weave merge uses a set() of
1763
# active revisions, the order of insertion *does* effect the
1764
# implicit ordering of the texts.
1765
for rev_key in cur_ancestors:
1766
ordered_parents = tuple(self.graph.find_merge_order(rev_key,
1768
parent_map[rev_key] = ordered_parents
1769
if len(next_lcas) == 0:
1771
elif len(next_lcas) == 1:
1772
parent_map[list(next_lcas)[0]] = ()
1774
elif len(next_lcas) > 2:
1775
# More than 2 lca's, fall back to grabbing all nodes between
1776
# this and the unique lca.
1777
mutter('More than 2 LCAs, falling back to all nodes for:'
1778
' %s, %s\n=> %s', self.a_key, self.b_key, cur_ancestors)
1779
cur_lcas = next_lcas
1780
while len(cur_lcas) > 1:
1781
cur_lcas = self.graph.find_lca(*cur_lcas)
1782
if len(cur_lcas) == 0:
1783
# No common base to find, use the full ancestry
1786
unique_lca = list(cur_lcas)[0]
1787
if unique_lca == NULL_REVISION:
1788
# find_lca will return a plain 'NULL_REVISION' rather
1789
# than a key tuple when there is no common ancestor, we
1790
# prefer to just use None, because it doesn't confuse
1791
# _get_interesting_texts()
1793
parent_map.update(self._find_unique_parents(next_lcas,
1796
cur_ancestors = next_lcas
1799
def _find_unique_parents(self, tip_keys, base_key):
1800
"""Find ancestors of tip that aren't ancestors of base.
1802
:param tip_keys: Nodes that are interesting
1803
:param base_key: Cull all ancestors of this node
1804
:return: The parent map for all revisions between tip_keys and
1805
base_key. base_key will be included. References to nodes outside of
1806
the ancestor set will also be removed.
1808
# TODO: this would be simpler if find_unique_ancestors took a list
1809
# instead of a single tip, internally it supports it, but it
1810
# isn't a "backwards compatible" api change.
1811
if base_key is None:
1812
parent_map = dict(self.graph.iter_ancestry(tip_keys))
1813
# We remove NULL_REVISION because it isn't a proper tuple key, and
1814
# thus confuses things like _get_interesting_texts, and our logic
1815
# to add the texts into the memory weave.
1816
if NULL_REVISION in parent_map:
1817
parent_map.pop(NULL_REVISION)
1820
for tip in tip_keys:
1822
self.graph.find_unique_ancestors(tip, [base_key]))
1823
parent_map = self.graph.get_parent_map(interesting)
1824
parent_map[base_key] = ()
1825
culled_parent_map, child_map, tails = self._remove_external_references(
1827
# Remove all the tails but base_key
1828
if base_key is not None:
1829
tails.remove(base_key)
1830
self._prune_tails(culled_parent_map, child_map, tails)
1831
# Now remove all the uninteresting 'linear' regions
1832
simple_map = _mod_graph.collapse_linear_regions(culled_parent_map)
1836
def _remove_external_references(parent_map):
1837
"""Remove references that go outside of the parent map.
1839
:param parent_map: Something returned from Graph.get_parent_map(keys)
1840
:return: (filtered_parent_map, child_map, tails)
1841
filtered_parent_map is parent_map without external references
1842
child_map is the {parent_key: [child_keys]} mapping
1843
tails is a list of nodes that do not have any parents in the map
1845
# TODO: The basic effect of this function seems more generic than
1846
# _PlanMerge. But the specific details of building a child_map,
1847
# and computing tails seems very specific to _PlanMerge.
1848
# Still, should this be in Graph land?
1849
filtered_parent_map = {}
1852
for key, parent_keys in parent_map.iteritems():
1853
culled_parent_keys = [p for p in parent_keys if p in parent_map]
1854
if not culled_parent_keys:
1856
for parent_key in culled_parent_keys:
1857
child_map.setdefault(parent_key, []).append(key)
1858
# TODO: Do we want to do this, it adds overhead for every node,
1859
# just to say that the node has no children
1860
child_map.setdefault(key, [])
1861
filtered_parent_map[key] = culled_parent_keys
1862
return filtered_parent_map, child_map, tails
1865
def _prune_tails(parent_map, child_map, tails_to_remove):
1866
"""Remove tails from the parent map.
1868
This will remove the supplied revisions until no more children have 0
1871
:param parent_map: A dict of {child: [parents]}, this dictionary will
1872
be modified in place.
1873
:param tails_to_remove: A list of tips that should be removed,
1874
this list will be consumed
1875
:param child_map: The reverse dict of parent_map ({parent: [children]})
1876
this dict will be modified
1877
:return: None, parent_map will be modified in place.
1879
while tails_to_remove:
1880
next = tails_to_remove.pop()
1881
parent_map.pop(next)
1882
children = child_map.pop(next)
1883
for child in children:
1884
child_parents = parent_map[child]
1885
child_parents.remove(next)
1886
if len(child_parents) == 0:
1887
tails_to_remove.append(child)
1889
def _get_interesting_texts(self, parent_map):
1890
"""Return a dict of texts we are interested in.
1892
Note that the input is in key tuples, but the output is in plain
1895
:param parent_map: The output from _find_recursive_lcas
1896
:return: A dict of {'revision_id':lines} as returned by
1897
_PlanMergeBase.get_lines()
1899
all_revision_keys = set(parent_map)
1900
all_revision_keys.add(self.a_key)
1901
all_revision_keys.add(self.b_key)
1903
# Everything else is in 'keys' but get_lines is in 'revision_ids'
1904
all_texts = self.get_lines([k[-1] for k in all_revision_keys])
1907
def _build_weave(self):
1908
from bzrlib import weave
1909
self._weave = weave.Weave(weave_name='in_memory_weave',
1910
allow_reserved=True)
1911
parent_map = self._find_recursive_lcas()
1913
all_texts = self._get_interesting_texts(parent_map)
1915
# Note: Unfortunately, the order given by topo_sort will effect the
1916
# ordering resolution in the output. Specifically, if you add A then B,
1917
# then in the output text A lines will show up before B lines. And, of
1918
# course, topo_sort doesn't guarantee any real ordering.
1919
# So we use merge_sort, and add a fake node on the tip.
1920
# This ensures that left-hand parents will always be inserted into the
1921
# weave before right-hand parents.
1922
tip_key = self._key_prefix + (_mod_revision.CURRENT_REVISION,)
1923
parent_map[tip_key] = (self.a_key, self.b_key)
1925
for seq_num, key, depth, eom in reversed(tsort.merge_sort(parent_map,
1929
# for key in tsort.topo_sort(parent_map):
1930
parent_keys = parent_map[key]
1931
revision_id = key[-1]
1932
parent_ids = [k[-1] for k in parent_keys]
1933
self._weave.add_lines(revision_id, parent_ids,
1934
all_texts[revision_id])
1936
def plan_merge(self):
1937
"""Generate a 'plan' for merging the two revisions.
1939
This involves comparing their texts and determining the cause of
1940
differences. If text A has a line and text B does not, then either the
1941
line was added to text A, or it was deleted from B. Once the causes
1942
are combined, they are written out in the format described in
1943
VersionedFile.plan_merge
1945
if self._head_key is not None: # There was a single head
1946
if self._head_key == self.a_key:
1949
if self._head_key != self.b_key:
1950
raise AssertionError('There was an invalid head: %s != %s'
1951
% (self.b_key, self._head_key))
1953
head_rev = self._head_key[-1]
1954
lines = self.get_lines([head_rev])[head_rev]
1955
return ((plan, line) for line in lines)
1956
return self._weave.plan_merge(self.a_rev, self.b_rev)
1959
class _PlanLCAMerge(_PlanMergeBase):
1961
This merge algorithm differs from _PlanMerge in that:
1962
1. comparisons are done against LCAs only
1963
2. cases where a contested line is new versus one LCA but old versus
1964
another are marked as conflicts, by emitting the line as conflicted-a
1967
This is faster, and hopefully produces more useful output.
1970
def __init__(self, a_rev, b_rev, vf, key_prefix, graph):
1971
_PlanMergeBase.__init__(self, a_rev, b_rev, vf, key_prefix)
1972
lcas = graph.find_lca(key_prefix + (a_rev,), key_prefix + (b_rev,))
1975
if lca == NULL_REVISION:
1978
self.lcas.add(lca[-1])
1979
for lca in self.lcas:
1980
if _mod_revision.is_null(lca):
1983
lca_lines = self.get_lines([lca])[lca]
1984
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_a,
1986
blocks = list(matcher.get_matching_blocks())
1987
self._cached_matching_blocks[(a_rev, lca)] = blocks
1988
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_b,
1990
blocks = list(matcher.get_matching_blocks())
1991
self._cached_matching_blocks[(b_rev, lca)] = blocks
1993
def _determine_status(self, revision_id, unique_line_numbers):
1994
"""Determines the status unique lines versus all lcas.
1996
Basically, determines why the line is unique to this revision.
1998
A line may be determined new, killed, or both.
2000
If a line is determined new, that means it was not present in at least
2001
one LCA, and is not present in the other merge revision.
2003
If a line is determined killed, that means the line was present in
2006
If a line is killed and new, this indicates that the two merge
2007
revisions contain differing conflict resolutions.
2008
:param revision_id: The id of the revision in which the lines are
2010
:param unique_line_numbers: The line numbers of unique lines.
2011
:return a tuple of (new_this, killed_other):
2015
unique_line_numbers = set(unique_line_numbers)
2016
for lca in self.lcas:
2017
blocks = self._get_matching_blocks(revision_id, lca)
2018
unique_vs_lca, _ignored = self._unique_lines(blocks)
2019
new.update(unique_line_numbers.intersection(unique_vs_lca))
2020
killed.update(unique_line_numbers.difference(unique_vs_lca))