447
365
kwargs['show_base'] = self.show_base
448
366
elif self.show_base:
449
367
raise BzrError("Showing base is not supported for this"
450
" merge type. %s" % self.merge_type)
451
if (not getattr(self.merge_type, 'supports_reverse_cherrypick', True)
452
and not self.base_is_other_ancestor):
453
raise errors.CannotReverseCherrypick()
454
if self.merge_type.supports_cherrypick:
455
kwargs['cherrypick'] = (not self.base_is_ancestor or
456
not self.base_is_other_ancestor)
457
if self._is_criss_cross and getattr(self.merge_type,
458
'supports_lca_trees', False):
459
kwargs['lca_trees'] = self._lca_trees
460
return self.merge_type(pb=self._pb,
461
change_reporter=self.change_reporter,
464
def _do_merge_to(self, merge):
465
if self.other_branch is not None:
466
self.other_branch.update_references(self.this_branch)
468
if self.recurse == 'down':
469
for relpath, file_id in self.this_tree.iter_references():
470
sub_tree = self.this_tree.get_nested_tree(file_id, relpath)
471
other_revision = self.other_tree.get_reference_revision(
473
if other_revision == sub_tree.last_revision():
475
sub_merge = Merger(sub_tree.branch, this_tree=sub_tree)
476
sub_merge.merge_type = self.merge_type
477
other_branch = self.other_branch.reference_parent(file_id, relpath)
478
sub_merge.set_other_revision(other_revision, other_branch)
479
base_revision = self.base_tree.get_reference_revision(file_id)
480
sub_merge.base_tree = \
481
sub_tree.branch.repository.revision_tree(base_revision)
482
sub_merge.base_rev_id = base_revision
368
" merge type. %s" % self.merge_type)
486
369
self.this_tree.lock_tree_write()
370
if self.base_tree is not None:
371
self.base_tree.lock_read()
372
if self.other_tree is not None:
373
self.other_tree.lock_read()
375
merge = self.merge_type(pb=self._pb,
376
change_reporter=self.change_reporter,
378
if self.recurse == 'down':
379
for path, file_id in self.this_tree.iter_references():
380
sub_tree = self.this_tree.get_nested_tree(file_id, path)
381
other_revision = self.other_tree.get_reference_revision(
383
if other_revision == sub_tree.last_revision():
385
sub_merge = Merger(sub_tree.branch, this_tree=sub_tree)
386
sub_merge.merge_type = self.merge_type
387
relpath = self.this_tree.relpath(path)
388
other_branch = self.other_branch.reference_parent(file_id, relpath)
389
sub_merge.set_other_revision(other_revision, other_branch)
390
base_revision = self.base_tree.get_reference_revision(file_id)
391
sub_merge.base_tree = \
392
sub_tree.branch.repository.revision_tree(base_revision)
396
if self.other_tree is not None:
397
self.other_tree.unlock()
488
398
if self.base_tree is not None:
489
self.base_tree.lock_read()
491
if self.other_tree is not None:
492
self.other_tree.lock_read()
494
merge = self.make_merger()
495
self._do_merge_to(merge)
497
if self.other_tree is not None:
498
self.other_tree.unlock()
500
if self.base_tree is not None:
501
self.base_tree.unlock()
399
self.base_tree.unlock()
503
400
self.this_tree.unlock()
504
401
if len(merge.cooked_conflicts) == 0:
505
if not self.ignore_zero and not is_quiet():
402
if not self.ignore_zero:
506
403
note("All changes applied successfully.")
508
405
note("%d conflicts encountered." % len(merge.cooked_conflicts))
705
541
result.append((file_id, changed, parents3, names3, executable3))
708
def _entries_lca(self):
709
"""Gather data about files modified between multiple trees.
711
This compares OTHER versus all LCA trees, and for interesting entries,
712
it then compares with THIS and BASE.
714
For the multi-valued entries, the format will be (BASE, [lca1, lca2])
715
:return: [(file_id, changed, parents, names, executable)]
716
file_id Simple file_id of the entry
717
changed Boolean, True if the kind or contents changed
719
parents ((base, [parent_id, in, lcas]), parent_id_other,
721
names ((base, [name, in, lcas]), name_in_other, name_in_this)
722
executable ((base, [exec, in, lcas]), exec_in_other, exec_in_this)
724
if self.interesting_files is not None:
725
lookup_trees = [self.this_tree, self.base_tree]
726
lookup_trees.extend(self._lca_trees)
727
# I think we should include the lca trees as well
728
interesting_ids = self.other_tree.paths2ids(self.interesting_files,
731
interesting_ids = self.interesting_ids
733
walker = _mod_tree.MultiWalker(self.other_tree, self._lca_trees)
735
base_inventory = self.base_tree.inventory
736
this_inventory = self.this_tree.inventory
737
for path, file_id, other_ie, lca_values in walker.iter_all():
738
# Is this modified at all from any of the other trees?
740
other_ie = _none_entry
741
if interesting_ids is not None and file_id not in interesting_ids:
744
# If other_revision is found in any of the lcas, that means this
745
# node is uninteresting. This is because when merging, if there are
746
# multiple heads(), we have to create a new node. So if we didn't,
747
# we know that the ancestry is linear, and that OTHER did not
749
# See doc/developers/lca_merge_resolution.txt for details
750
other_revision = other_ie.revision
751
if other_revision is not None:
752
# We can't use this shortcut when other_revision is None,
753
# because it may be None because things are WorkingTrees, and
754
# not because it is *actually* None.
755
is_unmodified = False
756
for lca_path, ie in lca_values:
757
if ie is not None and ie.revision == other_revision:
764
for lca_path, lca_ie in lca_values:
766
lca_entries.append(_none_entry)
768
lca_entries.append(lca_ie)
770
if file_id in base_inventory:
771
base_ie = base_inventory[file_id]
773
base_ie = _none_entry
775
if file_id in this_inventory:
776
this_ie = this_inventory[file_id]
778
this_ie = _none_entry
784
for lca_ie in lca_entries:
785
lca_kinds.append(lca_ie.kind)
786
lca_parent_ids.append(lca_ie.parent_id)
787
lca_names.append(lca_ie.name)
788
lca_executable.append(lca_ie.executable)
790
kind_winner = self._lca_multi_way(
791
(base_ie.kind, lca_kinds),
792
other_ie.kind, this_ie.kind)
793
parent_id_winner = self._lca_multi_way(
794
(base_ie.parent_id, lca_parent_ids),
795
other_ie.parent_id, this_ie.parent_id)
796
name_winner = self._lca_multi_way(
797
(base_ie.name, lca_names),
798
other_ie.name, this_ie.name)
800
content_changed = True
801
if kind_winner == 'this':
802
# No kind change in OTHER, see if there are *any* changes
803
if other_ie.kind == 'directory':
804
if parent_id_winner == 'this' and name_winner == 'this':
805
# No change for this directory in OTHER, skip
807
content_changed = False
808
elif other_ie.kind is None or other_ie.kind == 'file':
809
def get_sha1(ie, tree):
810
if ie.kind != 'file':
812
return tree.get_file_sha1(file_id)
813
base_sha1 = get_sha1(base_ie, self.base_tree)
814
lca_sha1s = [get_sha1(ie, tree) for ie, tree
815
in zip(lca_entries, self._lca_trees)]
816
this_sha1 = get_sha1(this_ie, self.this_tree)
817
other_sha1 = get_sha1(other_ie, self.other_tree)
818
sha1_winner = self._lca_multi_way(
819
(base_sha1, lca_sha1s), other_sha1, this_sha1,
820
allow_overriding_lca=False)
821
exec_winner = self._lca_multi_way(
822
(base_ie.executable, lca_executable),
823
other_ie.executable, this_ie.executable)
824
if (parent_id_winner == 'this' and name_winner == 'this'
825
and sha1_winner == 'this' and exec_winner == 'this'):
826
# No kind, parent, name, exec, or content change for
827
# OTHER, so this node is not considered interesting
829
if sha1_winner == 'this':
830
content_changed = False
831
elif other_ie.kind == 'symlink':
832
def get_target(ie, tree):
833
if ie.kind != 'symlink':
835
return tree.get_symlink_target(file_id)
836
base_target = get_target(base_ie, self.base_tree)
837
lca_targets = [get_target(ie, tree) for ie, tree
838
in zip(lca_entries, self._lca_trees)]
839
this_target = get_target(this_ie, self.this_tree)
840
other_target = get_target(other_ie, self.other_tree)
841
target_winner = self._lca_multi_way(
842
(base_target, lca_targets),
843
other_target, this_target)
844
if (parent_id_winner == 'this' and name_winner == 'this'
845
and target_winner == 'this'):
846
# No kind, parent, name, or symlink target change
849
if target_winner == 'this':
850
content_changed = False
851
elif other_ie.kind == 'tree-reference':
852
# The 'changed' information seems to be handled at a higher
853
# level. At least, _entries3 returns False for content
854
# changed, even when at a new revision_id.
855
content_changed = False
856
if (parent_id_winner == 'this' and name_winner == 'this'):
857
# Nothing interesting
860
raise AssertionError('unhandled kind: %s' % other_ie.kind)
861
# XXX: We need to handle kind == 'symlink'
863
# If we have gotten this far, that means something has changed
864
result.append((file_id, content_changed,
865
((base_ie.parent_id, lca_parent_ids),
866
other_ie.parent_id, this_ie.parent_id),
867
((base_ie.name, lca_names),
868
other_ie.name, this_ie.name),
869
((base_ie.executable, lca_executable),
870
other_ie.executable, this_ie.executable)
875
544
def fix_root(self):
877
546
self.tt.final_kind(self.tt.root)
878
547
except NoSuchFile:
879
548
self.tt.cancel_deletion(self.tt.root)
880
549
if self.tt.final_file_id(self.tt.root) is None:
881
self.tt.version_file(self.tt.tree_file_id(self.tt.root),
550
self.tt.version_file(self.tt.tree_file_id(self.tt.root),
883
other_root_file_id = self.other_tree.get_root_id()
884
if other_root_file_id is None:
552
if self.other_tree.inventory.root is None:
554
other_root_file_id = self.other_tree.inventory.root.file_id
886
555
other_root = self.tt.trans_id_file_id(other_root_file_id)
887
556
if other_root == self.tt.root:
1557
1131
yield status_a(revision, text)
1558
1132
for revision, text in annotated_b[b_cur:bi]:
1559
1133
yield status_b(revision, text)
1560
1135
# and now the matched section
1563
for text_a in plain_a[ai:a_cur]:
1138
for text_a, text_b in zip(plain_a[ai:a_cur], plain_b[bi:b_cur]):
1139
assert text_a == text_b
1564
1140
yield "unchanged", text_a
1567
class _PlanMergeBase(object):
1569
def __init__(self, a_rev, b_rev, vf, key_prefix):
1572
:param a_rev: Revision-id of one revision to merge
1573
:param b_rev: Revision-id of the other revision to merge
1574
:param vf: A VersionedFiles containing both revisions
1575
:param key_prefix: A prefix for accessing keys in vf, typically
1581
self._last_lines = None
1582
self._last_lines_revision_id = None
1583
self._cached_matching_blocks = {}
1584
self._key_prefix = key_prefix
1585
self._precache_tip_lines()
1587
def _precache_tip_lines(self):
1588
lines = self.get_lines([self.a_rev, self.b_rev])
1589
self.lines_a = lines[self.a_rev]
1590
self.lines_b = lines[self.b_rev]
1592
def get_lines(self, revisions):
1593
"""Get lines for revisions from the backing VersionedFiles.
1595
:raises RevisionNotPresent: on absent texts.
1597
keys = [(self._key_prefix + (rev,)) for rev in revisions]
1599
for record in self.vf.get_record_stream(keys, 'unordered', True):
1600
if record.storage_kind == 'absent':
1601
raise errors.RevisionNotPresent(record.key, self.vf)
1602
result[record.key[-1]] = osutils.chunks_to_lines(
1603
record.get_bytes_as('chunked'))
1606
def plan_merge(self):
1607
"""Generate a 'plan' for merging the two revisions.
1609
This involves comparing their texts and determining the cause of
1610
differences. If text A has a line and text B does not, then either the
1611
line was added to text A, or it was deleted from B. Once the causes
1612
are combined, they are written out in the format described in
1613
VersionedFile.plan_merge
1615
blocks = self._get_matching_blocks(self.a_rev, self.b_rev)
1616
unique_a, unique_b = self._unique_lines(blocks)
1617
new_a, killed_b = self._determine_status(self.a_rev, unique_a)
1618
new_b, killed_a = self._determine_status(self.b_rev, unique_b)
1619
return self._iter_plan(blocks, new_a, killed_b, new_b, killed_a)
1621
def _iter_plan(self, blocks, new_a, killed_b, new_b, killed_a):
1624
for i, j, n in blocks:
1625
for a_index in range(last_i, i):
1626
if a_index in new_a:
1627
if a_index in killed_b:
1628
yield 'conflicted-a', self.lines_a[a_index]
1630
yield 'new-a', self.lines_a[a_index]
1632
yield 'killed-b', self.lines_a[a_index]
1633
for b_index in range(last_j, j):
1634
if b_index in new_b:
1635
if b_index in killed_a:
1636
yield 'conflicted-b', self.lines_b[b_index]
1638
yield 'new-b', self.lines_b[b_index]
1640
yield 'killed-a', self.lines_b[b_index]
1641
# handle common lines
1642
for a_index in range(i, i+n):
1643
yield 'unchanged', self.lines_a[a_index]
1647
def _get_matching_blocks(self, left_revision, right_revision):
1648
"""Return a description of which sections of two revisions match.
1650
See SequenceMatcher.get_matching_blocks
1652
cached = self._cached_matching_blocks.get((left_revision,
1654
if cached is not None:
1656
if self._last_lines_revision_id == left_revision:
1657
left_lines = self._last_lines
1658
right_lines = self.get_lines([right_revision])[right_revision]
1660
lines = self.get_lines([left_revision, right_revision])
1661
left_lines = lines[left_revision]
1662
right_lines = lines[right_revision]
1663
self._last_lines = right_lines
1664
self._last_lines_revision_id = right_revision
1665
matcher = patiencediff.PatienceSequenceMatcher(None, left_lines,
1667
return matcher.get_matching_blocks()
1669
def _unique_lines(self, matching_blocks):
1670
"""Analyse matching_blocks to determine which lines are unique
1672
:return: a tuple of (unique_left, unique_right), where the values are
1673
sets of line numbers of unique lines.
1679
for i, j, n in matching_blocks:
1680
unique_left.extend(range(last_i, i))
1681
unique_right.extend(range(last_j, j))
1684
return unique_left, unique_right
1687
def _subtract_plans(old_plan, new_plan):
1688
"""Remove changes from new_plan that came from old_plan.
1690
It is assumed that the difference between the old_plan and new_plan
1691
is their choice of 'b' text.
1693
All lines from new_plan that differ from old_plan are emitted
1694
verbatim. All lines from new_plan that match old_plan but are
1695
not about the 'b' revision are emitted verbatim.
1697
Lines that match and are about the 'b' revision are the lines we
1698
don't want, so we convert 'killed-b' -> 'unchanged', and 'new-b'
1699
is skipped entirely.
1701
matcher = patiencediff.PatienceSequenceMatcher(None, old_plan,
1704
for i, j, n in matcher.get_matching_blocks():
1705
for jj in range(last_j, j):
1707
for jj in range(j, j+n):
1708
plan_line = new_plan[jj]
1709
if plan_line[0] == 'new-b':
1711
elif plan_line[0] == 'killed-b':
1712
yield 'unchanged', plan_line[1]
1718
class _PlanMerge(_PlanMergeBase):
1719
"""Plan an annotate merge using on-the-fly annotation"""
1721
def __init__(self, a_rev, b_rev, vf, key_prefix):
1722
super(_PlanMerge, self).__init__(a_rev, b_rev, vf, key_prefix)
1723
self.a_key = self._key_prefix + (self.a_rev,)
1724
self.b_key = self._key_prefix + (self.b_rev,)
1725
self.graph = Graph(self.vf)
1726
heads = self.graph.heads((self.a_key, self.b_key))
1728
# one side dominates, so we can just return its values, yay for
1730
# Ideally we would know that before we get this far
1731
self._head_key = heads.pop()
1732
if self._head_key == self.a_key:
1736
mutter('found dominating revision for %s\n%s > %s', self.vf,
1737
self._head_key[-1], other)
1740
self._head_key = None
1743
def _precache_tip_lines(self):
1744
# Turn this into a no-op, because we will do this later
1747
def _find_recursive_lcas(self):
1748
"""Find all the ancestors back to a unique lca"""
1749
cur_ancestors = (self.a_key, self.b_key)
1750
# graph.find_lca(uncommon, keys) now returns plain NULL_REVISION,
1751
# rather than a key tuple. We will just map that directly to no common
1755
next_lcas = self.graph.find_lca(*cur_ancestors)
1756
# Map a plain NULL_REVISION to a simple no-ancestors
1757
if next_lcas == set([NULL_REVISION]):
1759
# Order the lca's based on when they were merged into the tip
1760
# While the actual merge portion of weave merge uses a set() of
1761
# active revisions, the order of insertion *does* effect the
1762
# implicit ordering of the texts.
1763
for rev_key in cur_ancestors:
1764
ordered_parents = tuple(self.graph.find_merge_order(rev_key,
1766
parent_map[rev_key] = ordered_parents
1767
if len(next_lcas) == 0:
1769
elif len(next_lcas) == 1:
1770
parent_map[list(next_lcas)[0]] = ()
1772
elif len(next_lcas) > 2:
1773
# More than 2 lca's, fall back to grabbing all nodes between
1774
# this and the unique lca.
1775
mutter('More than 2 LCAs, falling back to all nodes for:'
1776
' %s, %s\n=> %s', self.a_key, self.b_key, cur_ancestors)
1777
cur_lcas = next_lcas
1778
while len(cur_lcas) > 1:
1779
cur_lcas = self.graph.find_lca(*cur_lcas)
1780
if len(cur_lcas) == 0:
1781
# No common base to find, use the full ancestry
1784
unique_lca = list(cur_lcas)[0]
1785
if unique_lca == NULL_REVISION:
1786
# find_lca will return a plain 'NULL_REVISION' rather
1787
# than a key tuple when there is no common ancestor, we
1788
# prefer to just use None, because it doesn't confuse
1789
# _get_interesting_texts()
1791
parent_map.update(self._find_unique_parents(next_lcas,
1794
cur_ancestors = next_lcas
1797
def _find_unique_parents(self, tip_keys, base_key):
1798
"""Find ancestors of tip that aren't ancestors of base.
1800
:param tip_keys: Nodes that are interesting
1801
:param base_key: Cull all ancestors of this node
1802
:return: The parent map for all revisions between tip_keys and
1803
base_key. base_key will be included. References to nodes outside of
1804
the ancestor set will also be removed.
1806
# TODO: this would be simpler if find_unique_ancestors took a list
1807
# instead of a single tip, internally it supports it, but it
1808
# isn't a "backwards compatible" api change.
1809
if base_key is None:
1810
parent_map = dict(self.graph.iter_ancestry(tip_keys))
1811
# We remove NULL_REVISION because it isn't a proper tuple key, and
1812
# thus confuses things like _get_interesting_texts, and our logic
1813
# to add the texts into the memory weave.
1814
if NULL_REVISION in parent_map:
1815
parent_map.pop(NULL_REVISION)
1818
for tip in tip_keys:
1820
self.graph.find_unique_ancestors(tip, [base_key]))
1821
parent_map = self.graph.get_parent_map(interesting)
1822
parent_map[base_key] = ()
1823
culled_parent_map, child_map, tails = self._remove_external_references(
1825
# Remove all the tails but base_key
1826
if base_key is not None:
1827
tails.remove(base_key)
1828
self._prune_tails(culled_parent_map, child_map, tails)
1829
# Now remove all the uninteresting 'linear' regions
1830
simple_map = _mod_graph.collapse_linear_regions(culled_parent_map)
1834
def _remove_external_references(parent_map):
1835
"""Remove references that go outside of the parent map.
1837
:param parent_map: Something returned from Graph.get_parent_map(keys)
1838
:return: (filtered_parent_map, child_map, tails)
1839
filtered_parent_map is parent_map without external references
1840
child_map is the {parent_key: [child_keys]} mapping
1841
tails is a list of nodes that do not have any parents in the map
1843
# TODO: The basic effect of this function seems more generic than
1844
# _PlanMerge. But the specific details of building a child_map,
1845
# and computing tails seems very specific to _PlanMerge.
1846
# Still, should this be in Graph land?
1847
filtered_parent_map = {}
1850
for key, parent_keys in parent_map.iteritems():
1851
culled_parent_keys = [p for p in parent_keys if p in parent_map]
1852
if not culled_parent_keys:
1854
for parent_key in culled_parent_keys:
1855
child_map.setdefault(parent_key, []).append(key)
1856
# TODO: Do we want to do this, it adds overhead for every node,
1857
# just to say that the node has no children
1858
child_map.setdefault(key, [])
1859
filtered_parent_map[key] = culled_parent_keys
1860
return filtered_parent_map, child_map, tails
1863
def _prune_tails(parent_map, child_map, tails_to_remove):
1864
"""Remove tails from the parent map.
1866
This will remove the supplied revisions until no more children have 0
1869
:param parent_map: A dict of {child: [parents]}, this dictionary will
1870
be modified in place.
1871
:param tails_to_remove: A list of tips that should be removed,
1872
this list will be consumed
1873
:param child_map: The reverse dict of parent_map ({parent: [children]})
1874
this dict will be modified
1875
:return: None, parent_map will be modified in place.
1877
while tails_to_remove:
1878
next = tails_to_remove.pop()
1879
parent_map.pop(next)
1880
children = child_map.pop(next)
1881
for child in children:
1882
child_parents = parent_map[child]
1883
child_parents.remove(next)
1884
if len(child_parents) == 0:
1885
tails_to_remove.append(child)
1887
def _get_interesting_texts(self, parent_map):
1888
"""Return a dict of texts we are interested in.
1890
Note that the input is in key tuples, but the output is in plain
1893
:param parent_map: The output from _find_recursive_lcas
1894
:return: A dict of {'revision_id':lines} as returned by
1895
_PlanMergeBase.get_lines()
1897
all_revision_keys = set(parent_map)
1898
all_revision_keys.add(self.a_key)
1899
all_revision_keys.add(self.b_key)
1901
# Everything else is in 'keys' but get_lines is in 'revision_ids'
1902
all_texts = self.get_lines([k[-1] for k in all_revision_keys])
1905
def _build_weave(self):
1906
from bzrlib import weave
1907
self._weave = weave.Weave(weave_name='in_memory_weave',
1908
allow_reserved=True)
1909
parent_map = self._find_recursive_lcas()
1911
all_texts = self._get_interesting_texts(parent_map)
1913
# Note: Unfortunately, the order given by topo_sort will effect the
1914
# ordering resolution in the output. Specifically, if you add A then B,
1915
# then in the output text A lines will show up before B lines. And, of
1916
# course, topo_sort doesn't guarantee any real ordering.
1917
# So we use merge_sort, and add a fake node on the tip.
1918
# This ensures that left-hand parents will always be inserted into the
1919
# weave before right-hand parents.
1920
tip_key = self._key_prefix + (_mod_revision.CURRENT_REVISION,)
1921
parent_map[tip_key] = (self.a_key, self.b_key)
1923
for seq_num, key, depth, eom in reversed(tsort.merge_sort(parent_map,
1927
# for key in tsort.topo_sort(parent_map):
1928
parent_keys = parent_map[key]
1929
revision_id = key[-1]
1930
parent_ids = [k[-1] for k in parent_keys]
1931
self._weave.add_lines(revision_id, parent_ids,
1932
all_texts[revision_id])
1934
def plan_merge(self):
1935
"""Generate a 'plan' for merging the two revisions.
1937
This involves comparing their texts and determining the cause of
1938
differences. If text A has a line and text B does not, then either the
1939
line was added to text A, or it was deleted from B. Once the causes
1940
are combined, they are written out in the format described in
1941
VersionedFile.plan_merge
1943
if self._head_key is not None: # There was a single head
1944
if self._head_key == self.a_key:
1947
if self._head_key != self.b_key:
1948
raise AssertionError('There was an invalid head: %s != %s'
1949
% (self.b_key, self._head_key))
1951
head_rev = self._head_key[-1]
1952
lines = self.get_lines([head_rev])[head_rev]
1953
return ((plan, line) for line in lines)
1954
return self._weave.plan_merge(self.a_rev, self.b_rev)
1957
class _PlanLCAMerge(_PlanMergeBase):
1959
This merge algorithm differs from _PlanMerge in that:
1960
1. comparisons are done against LCAs only
1961
2. cases where a contested line is new versus one LCA but old versus
1962
another are marked as conflicts, by emitting the line as conflicted-a
1965
This is faster, and hopefully produces more useful output.
1968
def __init__(self, a_rev, b_rev, vf, key_prefix, graph):
1969
_PlanMergeBase.__init__(self, a_rev, b_rev, vf, key_prefix)
1970
lcas = graph.find_lca(key_prefix + (a_rev,), key_prefix + (b_rev,))
1973
if lca == NULL_REVISION:
1976
self.lcas.add(lca[-1])
1977
for lca in self.lcas:
1978
if _mod_revision.is_null(lca):
1981
lca_lines = self.get_lines([lca])[lca]
1982
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_a,
1984
blocks = list(matcher.get_matching_blocks())
1985
self._cached_matching_blocks[(a_rev, lca)] = blocks
1986
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_b,
1988
blocks = list(matcher.get_matching_blocks())
1989
self._cached_matching_blocks[(b_rev, lca)] = blocks
1991
def _determine_status(self, revision_id, unique_line_numbers):
1992
"""Determines the status unique lines versus all lcas.
1994
Basically, determines why the line is unique to this revision.
1996
A line may be determined new, killed, or both.
1998
If a line is determined new, that means it was not present in at least
1999
one LCA, and is not present in the other merge revision.
2001
If a line is determined killed, that means the line was present in
2004
If a line is killed and new, this indicates that the two merge
2005
revisions contain differing conflict resolutions.
2006
:param revision_id: The id of the revision in which the lines are
2008
:param unique_line_numbers: The line numbers of unique lines.
2009
:return a tuple of (new_this, killed_other):
2013
unique_line_numbers = set(unique_line_numbers)
2014
for lca in self.lcas:
2015
blocks = self._get_matching_blocks(revision_id, lca)
2016
unique_vs_lca, _ignored = self._unique_lines(blocks)
2017
new.update(unique_line_numbers.intersection(unique_vs_lca))
2018
killed.update(unique_line_numbers.difference(unique_vs_lca))