460
365
kwargs['show_base'] = self.show_base
461
366
elif self.show_base:
462
367
raise BzrError("Showing base is not supported for this"
463
" merge type. %s" % self.merge_type)
464
if (not getattr(self.merge_type, 'supports_reverse_cherrypick', True)
465
and not self.base_is_other_ancestor):
466
raise errors.CannotReverseCherrypick()
467
if self.merge_type.supports_cherrypick:
468
kwargs['cherrypick'] = (not self.base_is_ancestor or
469
not self.base_is_other_ancestor)
470
if self._is_criss_cross and getattr(self.merge_type,
471
'supports_lca_trees', False):
472
kwargs['lca_trees'] = self._lca_trees
473
return self.merge_type(pb=self._pb,
474
change_reporter=self.change_reporter,
477
def _do_merge_to(self, merge):
478
if self.other_branch is not None:
479
self.other_branch.update_references(self.this_branch)
481
if self.recurse == 'down':
482
for relpath, file_id in self.this_tree.iter_references():
483
sub_tree = self.this_tree.get_nested_tree(file_id, relpath)
484
other_revision = self.other_tree.get_reference_revision(
486
if other_revision == sub_tree.last_revision():
488
sub_merge = Merger(sub_tree.branch, this_tree=sub_tree)
489
sub_merge.merge_type = self.merge_type
490
other_branch = self.other_branch.reference_parent(file_id, relpath)
491
sub_merge.set_other_revision(other_revision, other_branch)
492
base_revision = self.base_tree.get_reference_revision(file_id)
493
sub_merge.base_tree = \
494
sub_tree.branch.repository.revision_tree(base_revision)
495
sub_merge.base_rev_id = base_revision
368
" merge type. %s" % self.merge_type)
499
369
self.this_tree.lock_tree_write()
370
if self.base_tree is not None:
371
self.base_tree.lock_read()
372
if self.other_tree is not None:
373
self.other_tree.lock_read()
375
merge = self.merge_type(pb=self._pb,
376
change_reporter=self.change_reporter,
378
if self.recurse == 'down':
379
for path, file_id in self.this_tree.iter_references():
380
sub_tree = self.this_tree.get_nested_tree(file_id, path)
381
other_revision = self.other_tree.get_reference_revision(
383
if other_revision == sub_tree.last_revision():
385
sub_merge = Merger(sub_tree.branch, this_tree=sub_tree)
386
sub_merge.merge_type = self.merge_type
387
relpath = self.this_tree.relpath(path)
388
other_branch = self.other_branch.reference_parent(file_id, relpath)
389
sub_merge.set_other_revision(other_revision, other_branch)
390
base_revision = self.base_tree.get_reference_revision(file_id)
391
sub_merge.base_tree = \
392
sub_tree.branch.repository.revision_tree(base_revision)
396
if self.other_tree is not None:
397
self.other_tree.unlock()
501
398
if self.base_tree is not None:
502
self.base_tree.lock_read()
504
if self.other_tree is not None:
505
self.other_tree.lock_read()
507
merge = self.make_merger()
508
self._do_merge_to(merge)
510
if self.other_tree is not None:
511
self.other_tree.unlock()
513
if self.base_tree is not None:
514
self.base_tree.unlock()
399
self.base_tree.unlock()
516
400
self.this_tree.unlock()
517
401
if len(merge.cooked_conflicts) == 0:
518
if not self.ignore_zero and not is_quiet():
402
if not self.ignore_zero:
519
403
note("All changes applied successfully.")
521
405
note("%d conflicts encountered." % len(merge.cooked_conflicts))
718
541
result.append((file_id, changed, parents3, names3, executable3))
721
def _entries_lca(self):
722
"""Gather data about files modified between multiple trees.
724
This compares OTHER versus all LCA trees, and for interesting entries,
725
it then compares with THIS and BASE.
727
For the multi-valued entries, the format will be (BASE, [lca1, lca2])
728
:return: [(file_id, changed, parents, names, executable)]
729
file_id Simple file_id of the entry
730
changed Boolean, True if the kind or contents changed
732
parents ((base, [parent_id, in, lcas]), parent_id_other,
734
names ((base, [name, in, lcas]), name_in_other, name_in_this)
735
executable ((base, [exec, in, lcas]), exec_in_other, exec_in_this)
737
if self.interesting_files is not None:
738
lookup_trees = [self.this_tree, self.base_tree]
739
lookup_trees.extend(self._lca_trees)
740
# I think we should include the lca trees as well
741
interesting_ids = self.other_tree.paths2ids(self.interesting_files,
744
interesting_ids = self.interesting_ids
746
walker = _mod_tree.MultiWalker(self.other_tree, self._lca_trees)
748
base_inventory = self.base_tree.inventory
749
this_inventory = self.this_tree.inventory
750
for path, file_id, other_ie, lca_values in walker.iter_all():
751
# Is this modified at all from any of the other trees?
753
other_ie = _none_entry
754
if interesting_ids is not None and file_id not in interesting_ids:
757
# If other_revision is found in any of the lcas, that means this
758
# node is uninteresting. This is because when merging, if there are
759
# multiple heads(), we have to create a new node. So if we didn't,
760
# we know that the ancestry is linear, and that OTHER did not
762
# See doc/developers/lca_merge_resolution.txt for details
763
other_revision = other_ie.revision
764
if other_revision is not None:
765
# We can't use this shortcut when other_revision is None,
766
# because it may be None because things are WorkingTrees, and
767
# not because it is *actually* None.
768
is_unmodified = False
769
for lca_path, ie in lca_values:
770
if ie is not None and ie.revision == other_revision:
777
for lca_path, lca_ie in lca_values:
779
lca_entries.append(_none_entry)
781
lca_entries.append(lca_ie)
783
if file_id in base_inventory:
784
base_ie = base_inventory[file_id]
786
base_ie = _none_entry
788
if file_id in this_inventory:
789
this_ie = this_inventory[file_id]
791
this_ie = _none_entry
797
for lca_ie in lca_entries:
798
lca_kinds.append(lca_ie.kind)
799
lca_parent_ids.append(lca_ie.parent_id)
800
lca_names.append(lca_ie.name)
801
lca_executable.append(lca_ie.executable)
803
kind_winner = self._lca_multi_way(
804
(base_ie.kind, lca_kinds),
805
other_ie.kind, this_ie.kind)
806
parent_id_winner = self._lca_multi_way(
807
(base_ie.parent_id, lca_parent_ids),
808
other_ie.parent_id, this_ie.parent_id)
809
name_winner = self._lca_multi_way(
810
(base_ie.name, lca_names),
811
other_ie.name, this_ie.name)
813
content_changed = True
814
if kind_winner == 'this':
815
# No kind change in OTHER, see if there are *any* changes
816
if other_ie.kind == 'directory':
817
if parent_id_winner == 'this' and name_winner == 'this':
818
# No change for this directory in OTHER, skip
820
content_changed = False
821
elif other_ie.kind is None or other_ie.kind == 'file':
822
def get_sha1(ie, tree):
823
if ie.kind != 'file':
825
return tree.get_file_sha1(file_id)
826
base_sha1 = get_sha1(base_ie, self.base_tree)
827
lca_sha1s = [get_sha1(ie, tree) for ie, tree
828
in zip(lca_entries, self._lca_trees)]
829
this_sha1 = get_sha1(this_ie, self.this_tree)
830
other_sha1 = get_sha1(other_ie, self.other_tree)
831
sha1_winner = self._lca_multi_way(
832
(base_sha1, lca_sha1s), other_sha1, this_sha1,
833
allow_overriding_lca=False)
834
exec_winner = self._lca_multi_way(
835
(base_ie.executable, lca_executable),
836
other_ie.executable, this_ie.executable)
837
if (parent_id_winner == 'this' and name_winner == 'this'
838
and sha1_winner == 'this' and exec_winner == 'this'):
839
# No kind, parent, name, exec, or content change for
840
# OTHER, so this node is not considered interesting
842
if sha1_winner == 'this':
843
content_changed = False
844
elif other_ie.kind == 'symlink':
845
def get_target(ie, tree):
846
if ie.kind != 'symlink':
848
return tree.get_symlink_target(file_id)
849
base_target = get_target(base_ie, self.base_tree)
850
lca_targets = [get_target(ie, tree) for ie, tree
851
in zip(lca_entries, self._lca_trees)]
852
this_target = get_target(this_ie, self.this_tree)
853
other_target = get_target(other_ie, self.other_tree)
854
target_winner = self._lca_multi_way(
855
(base_target, lca_targets),
856
other_target, this_target)
857
if (parent_id_winner == 'this' and name_winner == 'this'
858
and target_winner == 'this'):
859
# No kind, parent, name, or symlink target change
862
if target_winner == 'this':
863
content_changed = False
864
elif other_ie.kind == 'tree-reference':
865
# The 'changed' information seems to be handled at a higher
866
# level. At least, _entries3 returns False for content
867
# changed, even when at a new revision_id.
868
content_changed = False
869
if (parent_id_winner == 'this' and name_winner == 'this'):
870
# Nothing interesting
873
raise AssertionError('unhandled kind: %s' % other_ie.kind)
874
# XXX: We need to handle kind == 'symlink'
876
# If we have gotten this far, that means something has changed
877
result.append((file_id, content_changed,
878
((base_ie.parent_id, lca_parent_ids),
879
other_ie.parent_id, this_ie.parent_id),
880
((base_ie.name, lca_names),
881
other_ie.name, this_ie.name),
882
((base_ie.executable, lca_executable),
883
other_ie.executable, this_ie.executable)
888
544
def fix_root(self):
890
546
self.tt.final_kind(self.tt.root)
891
547
except NoSuchFile:
892
548
self.tt.cancel_deletion(self.tt.root)
893
549
if self.tt.final_file_id(self.tt.root) is None:
894
self.tt.version_file(self.tt.tree_file_id(self.tt.root),
550
self.tt.version_file(self.tt.tree_file_id(self.tt.root),
896
other_root_file_id = self.other_tree.get_root_id()
897
if other_root_file_id is None:
552
if self.other_tree.inventory.root is None:
554
other_root_file_id = self.other_tree.inventory.root.file_id
899
555
other_root = self.tt.trans_id_file_id(other_root_file_id)
900
556
if other_root == self.tt.root:
1571
1131
yield status_a(revision, text)
1572
1132
for revision, text in annotated_b[b_cur:bi]:
1573
1133
yield status_b(revision, text)
1574
1135
# and now the matched section
1577
for text_a in plain_a[ai:a_cur]:
1138
for text_a, text_b in zip(plain_a[ai:a_cur], plain_b[bi:b_cur]):
1139
assert text_a == text_b
1578
1140
yield "unchanged", text_a
1581
class _PlanMergeBase(object):
1583
def __init__(self, a_rev, b_rev, vf, key_prefix):
1586
:param a_rev: Revision-id of one revision to merge
1587
:param b_rev: Revision-id of the other revision to merge
1588
:param vf: A VersionedFiles containing both revisions
1589
:param key_prefix: A prefix for accessing keys in vf, typically
1595
self._last_lines = None
1596
self._last_lines_revision_id = None
1597
self._cached_matching_blocks = {}
1598
self._key_prefix = key_prefix
1599
self._precache_tip_lines()
1601
def _precache_tip_lines(self):
1602
lines = self.get_lines([self.a_rev, self.b_rev])
1603
self.lines_a = lines[self.a_rev]
1604
self.lines_b = lines[self.b_rev]
1606
def get_lines(self, revisions):
1607
"""Get lines for revisions from the backing VersionedFiles.
1609
:raises RevisionNotPresent: on absent texts.
1611
keys = [(self._key_prefix + (rev,)) for rev in revisions]
1613
for record in self.vf.get_record_stream(keys, 'unordered', True):
1614
if record.storage_kind == 'absent':
1615
raise errors.RevisionNotPresent(record.key, self.vf)
1616
result[record.key[-1]] = osutils.chunks_to_lines(
1617
record.get_bytes_as('chunked'))
1620
def plan_merge(self):
1621
"""Generate a 'plan' for merging the two revisions.
1623
This involves comparing their texts and determining the cause of
1624
differences. If text A has a line and text B does not, then either the
1625
line was added to text A, or it was deleted from B. Once the causes
1626
are combined, they are written out in the format described in
1627
VersionedFile.plan_merge
1629
blocks = self._get_matching_blocks(self.a_rev, self.b_rev)
1630
unique_a, unique_b = self._unique_lines(blocks)
1631
new_a, killed_b = self._determine_status(self.a_rev, unique_a)
1632
new_b, killed_a = self._determine_status(self.b_rev, unique_b)
1633
return self._iter_plan(blocks, new_a, killed_b, new_b, killed_a)
1635
def _iter_plan(self, blocks, new_a, killed_b, new_b, killed_a):
1638
for i, j, n in blocks:
1639
for a_index in range(last_i, i):
1640
if a_index in new_a:
1641
if a_index in killed_b:
1642
yield 'conflicted-a', self.lines_a[a_index]
1644
yield 'new-a', self.lines_a[a_index]
1646
yield 'killed-b', self.lines_a[a_index]
1647
for b_index in range(last_j, j):
1648
if b_index in new_b:
1649
if b_index in killed_a:
1650
yield 'conflicted-b', self.lines_b[b_index]
1652
yield 'new-b', self.lines_b[b_index]
1654
yield 'killed-a', self.lines_b[b_index]
1655
# handle common lines
1656
for a_index in range(i, i+n):
1657
yield 'unchanged', self.lines_a[a_index]
1661
def _get_matching_blocks(self, left_revision, right_revision):
1662
"""Return a description of which sections of two revisions match.
1664
See SequenceMatcher.get_matching_blocks
1666
cached = self._cached_matching_blocks.get((left_revision,
1668
if cached is not None:
1670
if self._last_lines_revision_id == left_revision:
1671
left_lines = self._last_lines
1672
right_lines = self.get_lines([right_revision])[right_revision]
1674
lines = self.get_lines([left_revision, right_revision])
1675
left_lines = lines[left_revision]
1676
right_lines = lines[right_revision]
1677
self._last_lines = right_lines
1678
self._last_lines_revision_id = right_revision
1679
matcher = patiencediff.PatienceSequenceMatcher(None, left_lines,
1681
return matcher.get_matching_blocks()
1683
def _unique_lines(self, matching_blocks):
1684
"""Analyse matching_blocks to determine which lines are unique
1686
:return: a tuple of (unique_left, unique_right), where the values are
1687
sets of line numbers of unique lines.
1693
for i, j, n in matching_blocks:
1694
unique_left.extend(range(last_i, i))
1695
unique_right.extend(range(last_j, j))
1698
return unique_left, unique_right
1701
def _subtract_plans(old_plan, new_plan):
1702
"""Remove changes from new_plan that came from old_plan.
1704
It is assumed that the difference between the old_plan and new_plan
1705
is their choice of 'b' text.
1707
All lines from new_plan that differ from old_plan are emitted
1708
verbatim. All lines from new_plan that match old_plan but are
1709
not about the 'b' revision are emitted verbatim.
1711
Lines that match and are about the 'b' revision are the lines we
1712
don't want, so we convert 'killed-b' -> 'unchanged', and 'new-b'
1713
is skipped entirely.
1715
matcher = patiencediff.PatienceSequenceMatcher(None, old_plan,
1718
for i, j, n in matcher.get_matching_blocks():
1719
for jj in range(last_j, j):
1721
for jj in range(j, j+n):
1722
plan_line = new_plan[jj]
1723
if plan_line[0] == 'new-b':
1725
elif plan_line[0] == 'killed-b':
1726
yield 'unchanged', plan_line[1]
1732
class _PlanMerge(_PlanMergeBase):
1733
"""Plan an annotate merge using on-the-fly annotation"""
1735
def __init__(self, a_rev, b_rev, vf, key_prefix):
1736
super(_PlanMerge, self).__init__(a_rev, b_rev, vf, key_prefix)
1737
self.a_key = self._key_prefix + (self.a_rev,)
1738
self.b_key = self._key_prefix + (self.b_rev,)
1739
self.graph = Graph(self.vf)
1740
heads = self.graph.heads((self.a_key, self.b_key))
1742
# one side dominates, so we can just return its values, yay for
1744
# Ideally we would know that before we get this far
1745
self._head_key = heads.pop()
1746
if self._head_key == self.a_key:
1750
mutter('found dominating revision for %s\n%s > %s', self.vf,
1751
self._head_key[-1], other)
1754
self._head_key = None
1757
def _precache_tip_lines(self):
1758
# Turn this into a no-op, because we will do this later
1761
def _find_recursive_lcas(self):
1762
"""Find all the ancestors back to a unique lca"""
1763
cur_ancestors = (self.a_key, self.b_key)
1764
# graph.find_lca(uncommon, keys) now returns plain NULL_REVISION,
1765
# rather than a key tuple. We will just map that directly to no common
1769
next_lcas = self.graph.find_lca(*cur_ancestors)
1770
# Map a plain NULL_REVISION to a simple no-ancestors
1771
if next_lcas == set([NULL_REVISION]):
1773
# Order the lca's based on when they were merged into the tip
1774
# While the actual merge portion of weave merge uses a set() of
1775
# active revisions, the order of insertion *does* effect the
1776
# implicit ordering of the texts.
1777
for rev_key in cur_ancestors:
1778
ordered_parents = tuple(self.graph.find_merge_order(rev_key,
1780
parent_map[rev_key] = ordered_parents
1781
if len(next_lcas) == 0:
1783
elif len(next_lcas) == 1:
1784
parent_map[list(next_lcas)[0]] = ()
1786
elif len(next_lcas) > 2:
1787
# More than 2 lca's, fall back to grabbing all nodes between
1788
# this and the unique lca.
1789
mutter('More than 2 LCAs, falling back to all nodes for:'
1790
' %s, %s\n=> %s', self.a_key, self.b_key, cur_ancestors)
1791
cur_lcas = next_lcas
1792
while len(cur_lcas) > 1:
1793
cur_lcas = self.graph.find_lca(*cur_lcas)
1794
if len(cur_lcas) == 0:
1795
# No common base to find, use the full ancestry
1798
unique_lca = list(cur_lcas)[0]
1799
if unique_lca == NULL_REVISION:
1800
# find_lca will return a plain 'NULL_REVISION' rather
1801
# than a key tuple when there is no common ancestor, we
1802
# prefer to just use None, because it doesn't confuse
1803
# _get_interesting_texts()
1805
parent_map.update(self._find_unique_parents(next_lcas,
1808
cur_ancestors = next_lcas
1811
def _find_unique_parents(self, tip_keys, base_key):
1812
"""Find ancestors of tip that aren't ancestors of base.
1814
:param tip_keys: Nodes that are interesting
1815
:param base_key: Cull all ancestors of this node
1816
:return: The parent map for all revisions between tip_keys and
1817
base_key. base_key will be included. References to nodes outside of
1818
the ancestor set will also be removed.
1820
# TODO: this would be simpler if find_unique_ancestors took a list
1821
# instead of a single tip, internally it supports it, but it
1822
# isn't a "backwards compatible" api change.
1823
if base_key is None:
1824
parent_map = dict(self.graph.iter_ancestry(tip_keys))
1825
# We remove NULL_REVISION because it isn't a proper tuple key, and
1826
# thus confuses things like _get_interesting_texts, and our logic
1827
# to add the texts into the memory weave.
1828
if NULL_REVISION in parent_map:
1829
parent_map.pop(NULL_REVISION)
1832
for tip in tip_keys:
1834
self.graph.find_unique_ancestors(tip, [base_key]))
1835
parent_map = self.graph.get_parent_map(interesting)
1836
parent_map[base_key] = ()
1837
culled_parent_map, child_map, tails = self._remove_external_references(
1839
# Remove all the tails but base_key
1840
if base_key is not None:
1841
tails.remove(base_key)
1842
self._prune_tails(culled_parent_map, child_map, tails)
1843
# Now remove all the uninteresting 'linear' regions
1844
simple_map = _mod_graph.collapse_linear_regions(culled_parent_map)
1848
def _remove_external_references(parent_map):
1849
"""Remove references that go outside of the parent map.
1851
:param parent_map: Something returned from Graph.get_parent_map(keys)
1852
:return: (filtered_parent_map, child_map, tails)
1853
filtered_parent_map is parent_map without external references
1854
child_map is the {parent_key: [child_keys]} mapping
1855
tails is a list of nodes that do not have any parents in the map
1857
# TODO: The basic effect of this function seems more generic than
1858
# _PlanMerge. But the specific details of building a child_map,
1859
# and computing tails seems very specific to _PlanMerge.
1860
# Still, should this be in Graph land?
1861
filtered_parent_map = {}
1864
for key, parent_keys in parent_map.iteritems():
1865
culled_parent_keys = [p for p in parent_keys if p in parent_map]
1866
if not culled_parent_keys:
1868
for parent_key in culled_parent_keys:
1869
child_map.setdefault(parent_key, []).append(key)
1870
# TODO: Do we want to do this, it adds overhead for every node,
1871
# just to say that the node has no children
1872
child_map.setdefault(key, [])
1873
filtered_parent_map[key] = culled_parent_keys
1874
return filtered_parent_map, child_map, tails
1877
def _prune_tails(parent_map, child_map, tails_to_remove):
1878
"""Remove tails from the parent map.
1880
This will remove the supplied revisions until no more children have 0
1883
:param parent_map: A dict of {child: [parents]}, this dictionary will
1884
be modified in place.
1885
:param tails_to_remove: A list of tips that should be removed,
1886
this list will be consumed
1887
:param child_map: The reverse dict of parent_map ({parent: [children]})
1888
this dict will be modified
1889
:return: None, parent_map will be modified in place.
1891
while tails_to_remove:
1892
next = tails_to_remove.pop()
1893
parent_map.pop(next)
1894
children = child_map.pop(next)
1895
for child in children:
1896
child_parents = parent_map[child]
1897
child_parents.remove(next)
1898
if len(child_parents) == 0:
1899
tails_to_remove.append(child)
1901
def _get_interesting_texts(self, parent_map):
1902
"""Return a dict of texts we are interested in.
1904
Note that the input is in key tuples, but the output is in plain
1907
:param parent_map: The output from _find_recursive_lcas
1908
:return: A dict of {'revision_id':lines} as returned by
1909
_PlanMergeBase.get_lines()
1911
all_revision_keys = set(parent_map)
1912
all_revision_keys.add(self.a_key)
1913
all_revision_keys.add(self.b_key)
1915
# Everything else is in 'keys' but get_lines is in 'revision_ids'
1916
all_texts = self.get_lines([k[-1] for k in all_revision_keys])
1919
def _build_weave(self):
1920
from bzrlib import weave
1921
self._weave = weave.Weave(weave_name='in_memory_weave',
1922
allow_reserved=True)
1923
parent_map = self._find_recursive_lcas()
1925
all_texts = self._get_interesting_texts(parent_map)
1927
# Note: Unfortunately, the order given by topo_sort will effect the
1928
# ordering resolution in the output. Specifically, if you add A then B,
1929
# then in the output text A lines will show up before B lines. And, of
1930
# course, topo_sort doesn't guarantee any real ordering.
1931
# So we use merge_sort, and add a fake node on the tip.
1932
# This ensures that left-hand parents will always be inserted into the
1933
# weave before right-hand parents.
1934
tip_key = self._key_prefix + (_mod_revision.CURRENT_REVISION,)
1935
parent_map[tip_key] = (self.a_key, self.b_key)
1937
for seq_num, key, depth, eom in reversed(tsort.merge_sort(parent_map,
1941
# for key in tsort.topo_sort(parent_map):
1942
parent_keys = parent_map[key]
1943
revision_id = key[-1]
1944
parent_ids = [k[-1] for k in parent_keys]
1945
self._weave.add_lines(revision_id, parent_ids,
1946
all_texts[revision_id])
1948
def plan_merge(self):
1949
"""Generate a 'plan' for merging the two revisions.
1951
This involves comparing their texts and determining the cause of
1952
differences. If text A has a line and text B does not, then either the
1953
line was added to text A, or it was deleted from B. Once the causes
1954
are combined, they are written out in the format described in
1955
VersionedFile.plan_merge
1957
if self._head_key is not None: # There was a single head
1958
if self._head_key == self.a_key:
1961
if self._head_key != self.b_key:
1962
raise AssertionError('There was an invalid head: %s != %s'
1963
% (self.b_key, self._head_key))
1965
head_rev = self._head_key[-1]
1966
lines = self.get_lines([head_rev])[head_rev]
1967
return ((plan, line) for line in lines)
1968
return self._weave.plan_merge(self.a_rev, self.b_rev)
1971
class _PlanLCAMerge(_PlanMergeBase):
1973
This merge algorithm differs from _PlanMerge in that:
1974
1. comparisons are done against LCAs only
1975
2. cases where a contested line is new versus one LCA but old versus
1976
another are marked as conflicts, by emitting the line as conflicted-a
1979
This is faster, and hopefully produces more useful output.
1982
def __init__(self, a_rev, b_rev, vf, key_prefix, graph):
1983
_PlanMergeBase.__init__(self, a_rev, b_rev, vf, key_prefix)
1984
lcas = graph.find_lca(key_prefix + (a_rev,), key_prefix + (b_rev,))
1987
if lca == NULL_REVISION:
1990
self.lcas.add(lca[-1])
1991
for lca in self.lcas:
1992
if _mod_revision.is_null(lca):
1995
lca_lines = self.get_lines([lca])[lca]
1996
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_a,
1998
blocks = list(matcher.get_matching_blocks())
1999
self._cached_matching_blocks[(a_rev, lca)] = blocks
2000
matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_b,
2002
blocks = list(matcher.get_matching_blocks())
2003
self._cached_matching_blocks[(b_rev, lca)] = blocks
2005
def _determine_status(self, revision_id, unique_line_numbers):
2006
"""Determines the status unique lines versus all lcas.
2008
Basically, determines why the line is unique to this revision.
2010
A line may be determined new, killed, or both.
2012
If a line is determined new, that means it was not present in at least
2013
one LCA, and is not present in the other merge revision.
2015
If a line is determined killed, that means the line was present in
2018
If a line is killed and new, this indicates that the two merge
2019
revisions contain differing conflict resolutions.
2020
:param revision_id: The id of the revision in which the lines are
2022
:param unique_line_numbers: The line numbers of unique lines.
2023
:return a tuple of (new_this, killed_other):
2027
unique_line_numbers = set(unique_line_numbers)
2028
for lca in self.lcas:
2029
blocks = self._get_matching_blocks(revision_id, lca)
2030
unique_vs_lca, _ignored = self._unique_lines(blocks)
2031
new.update(unique_line_numbers.intersection(unique_vs_lca))
2032
killed.update(unique_line_numbers.difference(unique_vs_lca))