445
364
kwargs['show_base'] = self.show_base
446
365
elif self.show_base:
447
366
raise BzrError("Showing base is not supported for this"
448
" merge type. %s" % self.merge_type)
449
if (not getattr(self.merge_type, 'supports_reverse_cherrypick', True)
450
and not self.base_is_other_ancestor):
451
raise errors.CannotReverseCherrypick()
452
if self.merge_type.supports_cherrypick:
453
kwargs['cherrypick'] = (not self.base_is_ancestor or
454
not self.base_is_other_ancestor)
455
if self._is_criss_cross and getattr(self.merge_type,
456
'supports_lca_trees', False):
457
kwargs['lca_trees'] = self._lca_trees
458
return self.merge_type(pb=self._pb,
459
change_reporter=self.change_reporter,
462
def _do_merge_to(self, merge):
463
if self.other_branch is not None:
464
self.other_branch.update_references(self.this_branch)
466
if self.recurse == 'down':
467
for relpath, file_id in self.this_tree.iter_references():
468
sub_tree = self.this_tree.get_nested_tree(file_id, relpath)
469
other_revision = self.other_tree.get_reference_revision(
471
if other_revision == sub_tree.last_revision():
473
sub_merge = Merger(sub_tree.branch, this_tree=sub_tree)
474
sub_merge.merge_type = self.merge_type
475
other_branch = self.other_branch.reference_parent(file_id, relpath)
476
sub_merge.set_other_revision(other_revision, other_branch)
477
base_revision = self.base_tree.get_reference_revision(file_id)
478
sub_merge.base_tree = \
479
sub_tree.branch.repository.revision_tree(base_revision)
480
sub_merge.base_rev_id = base_revision
367
" merge type. %s" % self.merge_type)
484
368
self.this_tree.lock_tree_write()
369
if self.base_tree is not None:
370
self.base_tree.lock_read()
371
if self.other_tree is not None:
372
self.other_tree.lock_read()
374
merge = self.merge_type(pb=self._pb,
375
change_reporter=self.change_reporter,
377
if self.recurse == 'down':
378
for path, file_id in self.this_tree.iter_references():
379
sub_tree = self.this_tree.get_nested_tree(file_id, path)
380
other_revision = self.other_tree.get_reference_revision(
382
if other_revision == sub_tree.last_revision():
384
sub_merge = Merger(sub_tree.branch, this_tree=sub_tree)
385
sub_merge.merge_type = self.merge_type
386
relpath = self.this_tree.relpath(path)
387
other_branch = self.other_branch.reference_parent(file_id, relpath)
388
sub_merge.set_other_revision(other_revision, other_branch)
389
base_revision = self.base_tree.get_reference_revision(file_id)
390
sub_merge.base_tree = \
391
sub_tree.branch.repository.revision_tree(base_revision)
395
if self.other_tree is not None:
396
self.other_tree.unlock()
486
397
if self.base_tree is not None:
487
self.base_tree.lock_read()
489
if self.other_tree is not None:
490
self.other_tree.lock_read()
492
merge = self.make_merger()
493
self._do_merge_to(merge)
495
if self.other_tree is not None:
496
self.other_tree.unlock()
498
if self.base_tree is not None:
499
self.base_tree.unlock()
398
self.base_tree.unlock()
501
399
self.this_tree.unlock()
502
400
if len(merge.cooked_conflicts) == 0:
503
if not self.ignore_zero and not is_quiet():
401
if not self.ignore_zero:
504
402
note("All changes applied successfully.")
506
404
note("%d conflicts encountered." % len(merge.cooked_conflicts))
703
540
result.append((file_id, changed, parents3, names3, executable3))
706
def _entries_lca(self):
707
"""Gather data about files modified between multiple trees.
709
This compares OTHER versus all LCA trees, and for interesting entries,
710
it then compares with THIS and BASE.
712
For the multi-valued entries, the format will be (BASE, [lca1, lca2])
713
:return: [(file_id, changed, parents, names, executable)]
714
file_id Simple file_id of the entry
715
changed Boolean, True if the kind or contents changed
717
parents ((base, [parent_id, in, lcas]), parent_id_other,
719
names ((base, [name, in, lcas]), name_in_other, name_in_this)
720
executable ((base, [exec, in, lcas]), exec_in_other, exec_in_this)
722
if self.interesting_files is not None:
723
lookup_trees = [self.this_tree, self.base_tree]
724
lookup_trees.extend(self._lca_trees)
725
# I think we should include the lca trees as well
726
interesting_ids = self.other_tree.paths2ids(self.interesting_files,
729
interesting_ids = self.interesting_ids
731
walker = _mod_tree.MultiWalker(self.other_tree, self._lca_trees)
733
base_inventory = self.base_tree.inventory
734
this_inventory = self.this_tree.inventory
735
for path, file_id, other_ie, lca_values in walker.iter_all():
736
# Is this modified at all from any of the other trees?
738
other_ie = _none_entry
739
if interesting_ids is not None and file_id not in interesting_ids:
742
# If other_revision is found in any of the lcas, that means this
743
# node is uninteresting. This is because when merging, if there are
744
# multiple heads(), we have to create a new node. So if we didn't,
745
# we know that the ancestry is linear, and that OTHER did not
747
# See doc/developers/lca_merge_resolution.txt for details
748
other_revision = other_ie.revision
749
if other_revision is not None:
750
# We can't use this shortcut when other_revision is None,
751
# because it may be None because things are WorkingTrees, and
752
# not because it is *actually* None.
753
is_unmodified = False
754
for lca_path, ie in lca_values:
755
if ie is not None and ie.revision == other_revision:
762
for lca_path, lca_ie in lca_values:
764
lca_entries.append(_none_entry)
766
lca_entries.append(lca_ie)
768
if file_id in base_inventory:
769
base_ie = base_inventory[file_id]
771
base_ie = _none_entry
773
if file_id in this_inventory:
774
this_ie = this_inventory[file_id]
776
this_ie = _none_entry
782
for lca_ie in lca_entries:
783
lca_kinds.append(lca_ie.kind)
784
lca_parent_ids.append(lca_ie.parent_id)
785
lca_names.append(lca_ie.name)
786
lca_executable.append(lca_ie.executable)
788
kind_winner = self._lca_multi_way(
789
(base_ie.kind, lca_kinds),
790
other_ie.kind, this_ie.kind)
791
parent_id_winner = self._lca_multi_way(
792
(base_ie.parent_id, lca_parent_ids),
793
other_ie.parent_id, this_ie.parent_id)
794
name_winner = self._lca_multi_way(
795
(base_ie.name, lca_names),
796
other_ie.name, this_ie.name)
798
content_changed = True
799
if kind_winner == 'this':
800
# No kind change in OTHER, see if there are *any* changes
801
if other_ie.kind == 'directory':
802
if parent_id_winner == 'this' and name_winner == 'this':
803
# No change for this directory in OTHER, skip
805
content_changed = False
806
elif other_ie.kind is None or other_ie.kind == 'file':
807
def get_sha1(ie, tree):
808
if ie.kind != 'file':
810
return tree.get_file_sha1(file_id)
811
base_sha1 = get_sha1(base_ie, self.base_tree)
812
lca_sha1s = [get_sha1(ie, tree) for ie, tree
813
in zip(lca_entries, self._lca_trees)]
814
this_sha1 = get_sha1(this_ie, self.this_tree)
815
other_sha1 = get_sha1(other_ie, self.other_tree)
816
sha1_winner = self._lca_multi_way(
817
(base_sha1, lca_sha1s), other_sha1, this_sha1,
818
allow_overriding_lca=False)
819
exec_winner = self._lca_multi_way(
820
(base_ie.executable, lca_executable),
821
other_ie.executable, this_ie.executable)
822
if (parent_id_winner == 'this' and name_winner == 'this'
823
and sha1_winner == 'this' and exec_winner == 'this'):
824
# No kind, parent, name, exec, or content change for
825
# OTHER, so this node is not considered interesting
827
if sha1_winner == 'this':
828
content_changed = False
829
elif other_ie.kind == 'symlink':
830
def get_target(ie, tree):
831
if ie.kind != 'symlink':
833
return tree.get_symlink_target(file_id)
834
base_target = get_target(base_ie, self.base_tree)
835
lca_targets = [get_target(ie, tree) for ie, tree
836
in zip(lca_entries, self._lca_trees)]
837
this_target = get_target(this_ie, self.this_tree)
838
other_target = get_target(other_ie, self.other_tree)
839
target_winner = self._lca_multi_way(
840
(base_target, lca_targets),
841
other_target, this_target)
842
if (parent_id_winner == 'this' and name_winner == 'this'
843
and target_winner == 'this'):
844
# No kind, parent, name, or symlink target change
847
if target_winner == 'this':
848
content_changed = False
849
elif other_ie.kind == 'tree-reference':
850
# The 'changed' information seems to be handled at a higher
851
# level. At least, _entries3 returns False for content
852
# changed, even when at a new revision_id.
853
content_changed = False
854
if (parent_id_winner == 'this' and name_winner == 'this'):
855
# Nothing interesting
858
raise AssertionError('unhandled kind: %s' % other_ie.kind)
859
# XXX: We need to handle kind == 'symlink'
861
# If we have gotten this far, that means something has changed
862
result.append((file_id, content_changed,
863
((base_ie.parent_id, lca_parent_ids),
864
other_ie.parent_id, this_ie.parent_id),
865
((base_ie.name, lca_names),
866
other_ie.name, this_ie.name),
867
((base_ie.executable, lca_executable),
868
other_ie.executable, this_ie.executable)
873
543
def fix_root(self):
875
545
self.tt.final_kind(self.tt.root)
876
546
except NoSuchFile:
877
547
self.tt.cancel_deletion(self.tt.root)
878
548
if self.tt.final_file_id(self.tt.root) is None:
879
self.tt.version_file(self.tt.tree_file_id(self.tt.root),
549
self.tt.version_file(self.tt.tree_file_id(self.tt.root),
881
other_root_file_id = self.other_tree.get_root_id()
882
if other_root_file_id is None:
551
if self.other_tree.inventory.root is None:
553
other_root_file_id = self.other_tree.inventory.root.file_id
884
554
other_root = self.tt.trans_id_file_id(other_root_file_id)
885
555
if other_root == self.tt.root:
1555
1130
yield status_a(revision, text)
1556
1131
for revision, text in annotated_b[b_cur:bi]:
1557
1132
yield status_b(revision, text)
1558
1134
# and now the matched section
1561
for text_a in plain_a[ai:a_cur]:
1137
for text_a, text_b in zip(plain_a[ai:a_cur], plain_b[bi:b_cur]):
1138
assert text_a == text_b
1562
1139
yield "unchanged", text_a
1565
class _PlanMergeBase(object):
1567
def __init__(self, a_rev, b_rev, vf, key_prefix):
1570
:param a_rev: Revision-id of one revision to merge
1571
:param b_rev: Revision-id of the other revision to merge
1572
:param vf: A VersionedFiles containing both revisions
1573
:param key_prefix: A prefix for accessing keys in vf, typically
1579
self._last_lines = None
1580
self._last_lines_revision_id = None
1581
self._cached_matching_blocks = {}
1582
self._key_prefix = key_prefix
1583
self._precache_tip_lines()
1585
def _precache_tip_lines(self):
1586
lines = self.get_lines([self.a_rev, self.b_rev])
1587
self.lines_a = lines[self.a_rev]
1588
self.lines_b = lines[self.b_rev]
1590
def get_lines(self, revisions):
1591
"""Get lines for revisions from the backing VersionedFiles.
1593
:raises RevisionNotPresent: on absent texts.
1595
keys = [(self._key_prefix + (rev,)) for rev in revisions]
1597
for record in self.vf.get_record_stream(keys, 'unordered', True):
1598
if record.storage_kind == 'absent':
1599
raise errors.RevisionNotPresent(record.key, self.vf)
1600
result[record.key[-1]] = osutils.chunks_to_lines(
1601
record.get_bytes_as('chunked'))
1604
def plan_merge(self):
1605
"""Generate a 'plan' for merging the two revisions.
1607
This involves comparing their texts and determining the cause of
1608
differences. If text A has a line and text B does not, then either the
1609
line was added to text A, or it was deleted from B. Once the causes
1610
are combined, they are written out in the format described in
1611
VersionedFile.plan_merge
1613
blocks = self._get_matching_blocks(self.a_rev, self.b_rev)
1614
unique_a, unique_b = self._unique_lines(blocks)
1615
new_a, killed_b = self._determine_status(self.a_rev, unique_a)
1616
new_b, killed_a = self._determine_status(self.b_rev, unique_b)
1617
return self._iter_plan(blocks, new_a, killed_b, new_b, killed_a)
1619
def _iter_plan(self, blocks, new_a, killed_b, new_b, killed_a):
1622
for i, j, n in blocks:
1623
for a_index in range(last_i, i):
1624
if a_index in new_a:
1625
if a_index in killed_b:
1626
yield 'conflicted-a', self.lines_a[a_index]
1628
yield 'new-a', self.lines_a[a_index]
1630
yield 'killed-b', self.lines_a[a_index]
1631
for b_index in range(last_j, j):
1632
if b_index in new_b:
1633
if b_index in killed_a:
1634
yield 'conflicted-b', self.lines_b[b_index]
1636
yield 'new-b', self.lines_b[b_index]
1638
yield 'killed-a', self.lines_b[b_index]
1639
# handle common lines
1640
for a_index in range(i, i+n):
1641
yield 'unchanged', self.lines_a[a_index]
1645
def _get_matching_blocks(self, left_revision, right_revision):
1646
"""Return a description of which sections of two revisions match.
1648
See SequenceMatcher.get_matching_blocks
1650
cached = self._cached_matching_blocks.get((left_revision,
1652
if cached is not None:
1654
if self._last_lines_revision_id == left_revision:
1655
left_lines = self._last_lines
1656
right_lines = self.get_lines([right_revision])[right_revision]
1658
lines = self.get_lines([left_revision, right_revision])
1659
left_lines = lines[left_revision]
1660
right_lines = lines[right_revision]
1661
self._last_lines = right_lines
1662
self._last_lines_revision_id = right_revision
1663
matcher = patiencediff.PatienceSequenceMatcher(None, left_lines,
1665
return matcher.get_matching_blocks()
1667
def _unique_lines(self, matching_blocks):
1668
"""Analyse matching_blocks to determine which lines are unique
1670
:return: a tuple of (unique_left, unique_right), where the values are
1671
sets of line numbers of unique lines.
1677
for i, j, n in matching_blocks:
1678
unique_left.extend(range(last_i, i))
1679
unique_right.extend(range(last_j, j))
1682
return unique_left, unique_right
1685
def _subtract_plans(old_plan, new_plan):
1686
"""Remove changes from new_plan that came from old_plan.
1688
It is assumed that the difference between the old_plan and new_plan
1689
is their choice of 'b' text.
1691
All lines from new_plan that differ from old_plan are emitted
1692
verbatim. All lines from new_plan that match old_plan but are
1693
not about the 'b' revision are emitted verbatim.
1695
Lines that match and are about the 'b' revision are the lines we
1696
don't want, so we convert 'killed-b' -> 'unchanged', and 'new-b'
1697
is skipped entirely.
1699
matcher = patiencediff.PatienceSequenceMatcher(None, old_plan,
1702
for i, j, n in matcher.get_matching_blocks():
1703
for jj in range(last_j, j):
1705
for jj in range(j, j+n):
1706
plan_line = new_plan[jj]
1707
if plan_line[0] == 'new-b':
1709
elif plan_line[0] == 'killed-b':
1710
yield 'unchanged', plan_line[1]
1716
class _PlanMerge(_PlanMergeBase):
1717
"""Plan an annotate merge using on-the-fly annotation"""
1719
def __init__(self, a_rev, b_rev, vf, key_prefix):
1720
super(_PlanMerge, self).__init__(a_rev, b_rev, vf, key_prefix)
1721
self.a_key = self._key_prefix + (self.a_rev,)
1722
self.b_key = self._key_prefix + (self.b_rev,)
1723
self.graph = Graph(self.vf)
1724
heads = self.graph.heads((self.a_key, self.b_key))
1726
# one side dominates, so we can just return its values, yay for
1728
# Ideally we would know that before we get this far
1729
self._head_key = heads.pop()
1730
if self._head_key == self.a_key:
1734
mutter('found dominating revision for %s\n%s > %s', self.vf,
1735
self._head_key[-1], other)
1738
self._head_key = None
1741
def _precache_tip_lines(self):
1742
# Turn this into a no-op, because we will do this later
1745
def _find_recursive_lcas(self):
1746
"""Find all the ancestors back to a unique lca"""
1747
cur_ancestors = (self.a_key, self.b_key)
1748
# graph.find_lca(uncommon, keys) now returns plain NULL_REVISION,
1749
# rather than a key tuple. We will just map that directly to no common
1753
next_lcas = self.graph.find_lca(*cur_ancestors)
1754
# Map a plain NULL_REVISION to a simple no-ancestors
1755
if next_lcas == set([NULL_REVISION]):
1757
# Order the lca's based on when they were merged into the tip
1758
# While the actual merge portion of weave merge uses a set() of
1759
# active revisions, the order of insertion *does* effect the
1760
# implicit ordering of the texts.
1761
for rev_key in cur_ancestors:
1762
ordered_parents = tuple(self.graph.find_merge_order(rev_key,
1764
parent_map[rev_key] = ordered_parents
1765
if len(next_lcas) == 0:
1767
elif len(next_lcas) == 1:
1768
parent_map[list(next_lcas)[0]] = ()
1770
elif len(next_lcas) > 2:
1771
# More than 2 lca's, fall back to grabbing all nodes between
1772
# this and the unique lca.
1773
mutter('More than 2 LCAs, falling back to all nodes for:'
1774
' %s, %s\n=> %s', self.a_key, self.b_key, cur_ancestors)
1775
cur_lcas = next_lcas
1776
while len(cur_lcas) > 1:
1777
cur_lcas = self.graph.find_lca(*cur_lcas)
1778
if len(cur_lcas) == 0:
1779
# No common base to find, use the full ancestry
1782
unique_lca = list(cur_lcas)[0]
1783
if unique_lca == NULL_REVISION:
1784
# find_lca will return a plain 'NULL_REVISION' rather
1785
# than a key tuple when there is no common ancestor, we
1786
# prefer to just use None, because it doesn't confuse
1787
# _get_interesting_texts()
1789
parent_map.update(self._find_unique_parents(next_lcas,
1792
cur_ancestors = next_lcas
1795
def _find_unique_parents(self, tip_keys, base_key):
1796
"""Find ancestors of tip that aren't ancestors of base.
1798
:param tip_keys: Nodes that are interesting
1799
:param base_key: Cull all ancestors of this node
1800
:return: The parent map for all revisions between tip_keys and
1801
base_key. base_key will be included. References to nodes outside of
1802
the ancestor set will also be removed.
1804
# TODO: this would be simpler if find_unique_ancestors took a list
1805
# instead of a single tip, internally it supports it, but it
1806
# isn't a "backwards compatible" api change.
1807
if base_key is None:
1808
parent_map = dict(self.graph.iter_ancestry(tip_keys))
1809
# We remove NULL_REVISION because it isn't a proper tuple key, and
1810
# thus confuses things like _get_interesting_texts, and our logic
1811
# to add the texts into the memory weave.
1812
if NULL_REVISION in parent_map:
1813
parent_map.pop(NULL_REVISION)
1816
for tip in tip_keys:
1818
self.graph.find_unique_ancestors(tip, [base_key]))
1819
parent_map = self.graph.get_parent_map(interesting)
1820
parent_map[base_key] = ()
1821
culled_parent_map, child_map, tails = self._remove_external_references(
1823
# Remove all the tails but base_key
1824
if base_key is not None:
1825
tails.remove(base_key)
1826
self._prune_tails(culled_parent_map, child_map, tails)
1827
# Now remove all the uninteresting 'linear' regions
1828
simple_map = _mod_graph.collapse_linear_regions(culled_parent_map)
1832
def _remove_external_references(parent_map):
1833
"""Remove references that go outside of the parent map.
1835
:param parent_map: Something returned from Graph.get_parent_map(keys)
1836
:return: (filtered_parent_map, child_map, tails)
1837
filtered_parent_map is parent_map without external references
1838
child_map is the {parent_key: [child_keys]} mapping
1839
tails is a list of nodes that do not have any parents in the map
1841
# TODO: The basic effect of this function seems more generic than
1842
# _PlanMerge. But the specific details of building a child_map,
1843
# and computing tails seems very specific to _PlanMerge.
1844
# Still, should this be in Graph land?
1845
filtered_parent_map = {}
1848
for key, parent_keys in parent_map.iteritems():
1849
culled_parent_keys = [p for p in parent_keys if p in parent_map]
1850
if not culled_parent_keys:
1852
for parent_key in culled_parent_keys:
1853
child_map.setdefault(parent_key, []).append(key)
1854
# TODO: Do we want to do this, it adds overhead for every node,
1855
# just to say that the node has no children
1856
child_map.setdefault(key, [])
1857
filtered_parent_map[key] = culled_parent_keys
1858
return filtered_parent_map, child_map, tails
1861
def _prune_tails(parent_map, child_map, tails_to_remove):
1862
"""Remove tails from the parent map.
1864
This will remove the supplied revisions until no more children have 0
1867
:param parent_map: A dict of {child: [parents]}, this dictionary will
1868
be modified in place.
1869
:param tails_to_remove: A list of tips that should be removed,
1870
this list will be consumed
1871
:param child_map: The reverse dict of parent_map ({parent: [children]})
1872
this dict will be modified
1873
:return: None, parent_map will be modified in place.
1875
while tails_to_remove:
1876
next = tails_to_remove.pop()
1877
parent_map.pop(next)
1878
children = child_map.pop(next)
1879
for child in children:
1880
child_parents = parent_map[child]
1881
child_parents.remove(next)
1882
if len(child_parents) == 0:
1883
tails_to_remove.append(child)
1885
def _get_interesting_texts(self, parent_map):
1886
"""Return a dict of texts we are interested in.
1888
Note that the input is in key tuples, but the output is in plain
1891
:param parent_map: The output from _find_recursive_lcas
1892
:return: A dict of {'revision_id':lines} as returned by
1893
_PlanMergeBase.get_lines()
1895
all_revision_keys = set(parent_map)
1896
all_revision_keys.add(self.a_key)
1897
all_revision_keys.add(self.b_key)
1899
# Everything else is in 'keys' but get_lines is in 'revision_ids'
1900
all_texts = self.get_lines([k[-1] for k in all_revision_keys])
1903
def _build_weave(self):
1904
from bzrlib import weave
1905
self._weave = weave.Weave(weave_name='in_memory_weave',
1906
allow_reserved=True)
1907
parent_map = self._find_recursive_lcas()
1909
all_texts = self._get_interesting_texts(parent_map)
1911
# Note: Unfortunately, the order given by topo_sort will effect the
1912
# ordering resolution in the output. Specifically, if you add A then B,
1913
# then in the output text A lines will show up before B lines. And, of
1914
# course, topo_sort doesn't guarantee any real ordering.
1915
# So we use merge_sort, and add a fake node on the tip.
1916
# This ensures that left-hand parents will always be inserted into the
1917
# weave before right-hand parents.
1918
tip_key = self._key_prefix + (_mod_revision.CURRENT_REVISION,)
1919
parent_map[tip_key] = (self.a_key, self.b_key)
1921
for seq_num, key, depth, eom in reversed(tsort.merge_sort(parent_map,
1925
# for key in tsort.topo_sort(parent_map):
1926
parent_keys = parent_map[key]
1927
revision_id = key[-1]
1928
parent_ids = [k[-1] for k in parent_keys]
1929
self._weave.add_lines(revision_id, parent_ids,
1930
all_texts[revision_id])
1932
def plan_merge(self):
1933
"""Generate a 'plan' for merging the two revisions.
1935
This involves comparing their texts and determining the cause of
1936
differences. If text A has a line and text B does not, then either the
1937
line was added to text A, or it was deleted from B. Once the causes
1938
are combined, they are written out in the format described in
1939
VersionedFile.plan_merge
1941
if self._head_key is not None: # There was a single head
1942
if self._head_key == self.a_key:
1945
if self._head_key != self.b_key:
1946
raise AssertionError('There was an invalid head: %s != %s'
1947
% (self.b_key, self._head_key))
1949
head_rev = self._head_key[-1]
1950
lines = self.get_lines([head_rev])[head_rev]
1951
return ((plan, line) for line in lines)
1952
return self._weave.plan_merge(self.a_rev, self.b_rev)
1955
class _PlanLCAMerge(_PlanMergeBase):
1957
This merge algorithm differs from _PlanMerge in that:
1958
1. comparisons are done against LCAs only
1959
2. cases where a contested line is new versus one LCA but old versus
1960
another are marked as conflicts, by emitting the line as conflicted-a
1963
This is faster, and hopefully produces more useful output.
1966
def __init__(self, a_rev, b_rev, vf, key_prefix, graph):
1967
_PlanMergeBase.__init__(self, a_rev, b_rev, vf, key_prefix)
1968
lcas = graph.find_lca(key_prefix + (a_rev,), key_prefix + (b_rev,))
1971
if lca == NULL_REVISION:
1974
self.lcas.add(lca[-1])
1975
for lca in self.lcas:
1976
if _mod_revision.is_null(lca):
1979
lca_lines = self.get_lines([lca])[lca]
1980
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_a,
1982
blocks = list(matcher.get_matching_blocks())
1983
self._cached_matching_blocks[(a_rev, lca)] = blocks
1984
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_b,
1986
blocks = list(matcher.get_matching_blocks())
1987
self._cached_matching_blocks[(b_rev, lca)] = blocks
1989
def _determine_status(self, revision_id, unique_line_numbers):
1990
"""Determines the status unique lines versus all lcas.
1992
Basically, determines why the line is unique to this revision.
1994
A line may be determined new, killed, or both.
1996
If a line is determined new, that means it was not present in at least
1997
one LCA, and is not present in the other merge revision.
1999
If a line is determined killed, that means the line was present in
2002
If a line is killed and new, this indicates that the two merge
2003
revisions contain differing conflict resolutions.
2004
:param revision_id: The id of the revision in which the lines are
2006
:param unique_line_numbers: The line numbers of unique lines.
2007
:return a tuple of (new_this, killed_other):
2011
unique_line_numbers = set(unique_line_numbers)
2012
for lca in self.lcas:
2013
blocks = self._get_matching_blocks(revision_id, lca)
2014
unique_vs_lca, _ignored = self._unique_lines(blocks)
2015
new.update(unique_line_numbers.intersection(unique_vs_lca))
2016
killed.update(unique_line_numbers.difference(unique_vs_lca))