~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/merge.py

  • Committer: Robert Collins
  • Date: 2008-02-13 03:30:01 UTC
  • mfrom: (3221 +trunk)
  • mto: This revision was merged to the branch mainline in revision 3224.
  • Revision ID: robertc@robertcollins.net-20080213033001-rw70ul0zb02ph856
Merge to fix conflicts.

Show diffs side-by-side

added added

removed removed

Lines of Context:
20
20
import warnings
21
21
 
22
22
from bzrlib import (
 
23
    debug,
23
24
    errors,
24
25
    osutils,
25
26
    patiencediff,
44
45
from bzrlib.merge3 import Merge3
45
46
from bzrlib.osutils import rename, pathjoin
46
47
from progress import DummyProgress, ProgressPhase
47
 
from bzrlib.revision import (is_ancestor, NULL_REVISION, ensure_null)
 
48
from bzrlib.revision import (NULL_REVISION, ensure_null)
48
49
from bzrlib.textfile import check_text_lines
49
 
from bzrlib.trace import mutter, warning, note
50
 
from bzrlib.transform import (TreeTransform, resolve_conflicts, cook_conflicts,
 
50
from bzrlib.trace import mutter, warning, note, is_quiet
 
51
from bzrlib.transform import (TransformPreview, TreeTransform,
 
52
                              resolve_conflicts, cook_conflicts,
51
53
                              conflict_pass, FinalPaths, create_by_entry,
52
54
                              unique_add, ROOT_PARENT)
53
55
from bzrlib.versionedfile import PlanWeaveMerge
64
66
class Merger(object):
65
67
    def __init__(self, this_branch, other_tree=None, base_tree=None,
66
68
                 this_tree=None, pb=DummyProgress(), change_reporter=None,
67
 
                 recurse='down'):
 
69
                 recurse='down', revision_graph=None):
68
70
        object.__init__(self)
69
71
        assert this_tree is not None, "this_tree is required"
70
72
        self.this_branch = this_branch
88
90
        self.recurse = recurse
89
91
        self.change_reporter = change_reporter
90
92
        self._cached_trees = {}
 
93
        self._revision_graph = revision_graph
 
94
        self._base_is_ancestor = None
 
95
        self._base_is_other_ancestor = None
 
96
 
 
97
    @property
 
98
    def revision_graph(self):
 
99
        if self._revision_graph is None:
 
100
            self._revision_graph = self.this_branch.repository.get_graph()
 
101
        return self._revision_graph
 
102
 
 
103
    def _set_base_is_ancestor(self, value):
 
104
        self._base_is_ancestor = value
 
105
 
 
106
    def _get_base_is_ancestor(self):
 
107
        if self._base_is_ancestor is None:
 
108
            self._base_is_ancestor = self.revision_graph.is_ancestor(
 
109
                self.base_rev_id, self.this_basis)
 
110
        return self._base_is_ancestor
 
111
 
 
112
    base_is_ancestor = property(_get_base_is_ancestor, _set_base_is_ancestor)
 
113
 
 
114
    def _set_base_is_other_ancestor(self, value):
 
115
        self._base_is_other_ancestor = value
 
116
 
 
117
    def _get_base_is_other_ancestor(self):
 
118
        if self._base_is_other_ancestor is None:
 
119
            self.base_is_other_ancestor = self.revision_graph.is_ancestor(
 
120
                self.base_rev_id, self.other_basis)
 
121
        return self._base_is_other_ancestor
 
122
 
 
123
    base_is_other_ancestor = property(_get_base_is_other_ancestor,
 
124
                                      _set_base_is_other_ancestor)
91
125
 
92
126
    @staticmethod
93
127
    def from_uncommitted(tree, other_tree, pb):
101
135
                        pb)
102
136
        merger.base_rev_id = merger.base_tree.get_revision_id()
103
137
        merger.other_rev_id = None
 
138
        merger.other_basis = merger.base_rev_id
104
139
        return merger
105
140
 
106
141
    @classmethod
114
149
        mergeable.install_revisions(tree.branch.repository)
115
150
        base_revision_id, other_revision_id, verified =\
116
151
            mergeable.get_merge_request(tree.branch.repository)
 
152
        revision_graph = tree.branch.repository.get_graph()
117
153
        if (base_revision_id != _mod_revision.NULL_REVISION and
118
 
            tree.branch.repository.get_graph().is_ancestor(
 
154
            revision_graph.is_ancestor(
119
155
            base_revision_id, tree.branch.last_revision())):
120
156
            base_revision_id = None
 
157
        else:
 
158
            warning('Performing cherrypick')
121
159
        merger = klass.from_revision_ids(pb, tree, other_revision_id,
122
 
                                         base_revision_id)
 
160
                                         base_revision_id, revision_graph=
 
161
                                         revision_graph)
123
162
        return merger, verified
124
163
 
125
164
    @staticmethod
126
165
    def from_revision_ids(pb, this, other, base=None, other_branch=None,
127
 
                          base_branch=None):
 
166
                          base_branch=None, revision_graph=None):
128
167
        """Return a Merger for revision-ids.
129
168
 
130
169
        :param tree: The tree to merge changes into
137
176
            not supplied, other_branch or this.branch will be used.
138
177
        :param pb: A progress indicator
139
178
        """
140
 
        merger = Merger(this.branch, this_tree=this, pb=pb)
 
179
        merger = Merger(this.branch, this_tree=this, pb=pb,
 
180
                        revision_graph=revision_graph)
141
181
        if other_branch is None:
142
182
            other_branch = this.branch
143
183
        merger.set_other_revision(other, other_branch)
296
336
        self.base_branch = branch
297
337
        self._maybe_fetch(branch, self.this_branch, revision_id)
298
338
        self.base_tree = self.revision_tree(revision_id)
299
 
        self.base_is_ancestor = is_ancestor(self.this_basis,
300
 
                                            self.base_rev_id,
301
 
                                            self.this_branch)
302
 
        self.base_is_other_ancestor = is_ancestor(self.other_basis,
303
 
                                                  self.base_rev_id,
304
 
                                                  self.this_branch)
305
339
 
306
340
    def _maybe_fetch(self, source, target, revision_id):
307
341
        if not source.repository.has_same_location(target.repository):
308
342
            target.fetch(source, revision_id)
309
343
 
310
344
    def find_base(self):
311
 
        this_repo = self.this_branch.repository
312
 
        graph = this_repo.get_graph()
313
345
        revisions = [ensure_null(self.this_basis),
314
346
                     ensure_null(self.other_basis)]
315
347
        if NULL_REVISION in revisions:
316
348
            self.base_rev_id = NULL_REVISION
317
349
        else:
318
 
            self.base_rev_id = graph.find_unique_lca(*revisions)
 
350
            self.base_rev_id, steps = self.revision_graph.find_unique_lca(
 
351
                revisions[0], revisions[1], count_steps=True)
319
352
            if self.base_rev_id == NULL_REVISION:
320
353
                raise UnrelatedBranches()
 
354
            if steps > 1:
 
355
                warning('Warning: criss-cross merge encountered.  See bzr'
 
356
                        ' help criss-cross.')
321
357
        self.base_tree = self.revision_tree(self.base_rev_id)
322
358
        self.base_is_ancestor = True
323
359
        self.base_is_other_ancestor = True
340
376
                self.base_rev_id = _mod_revision.ensure_null(
341
377
                    base_branch.get_rev_id(base_revision[1]))
342
378
            self._maybe_fetch(base_branch, self.this_branch, self.base_rev_id)
343
 
            self.base_is_ancestor = is_ancestor(self.this_basis, 
344
 
                                                self.base_rev_id,
345
 
                                                self.this_branch)
346
 
            self.base_is_other_ancestor = is_ancestor(self.other_basis,
347
 
                                                      self.base_rev_id,
348
 
                                                      self.this_branch)
349
379
 
350
 
    def do_merge(self):
 
380
    def make_merger(self):
351
381
        kwargs = {'working_tree':self.this_tree, 'this_tree': self.this_tree,
352
382
                  'other_tree': self.other_tree,
353
383
                  'interesting_ids': self.interesting_ids,
354
384
                  'interesting_files': self.interesting_files,
355
 
                  'pp': self.pp}
 
385
                  'pp': self.pp,
 
386
                  'do_merge': False}
356
387
        if self.merge_type.requires_base:
357
388
            kwargs['base_tree'] = self.base_tree
358
389
        if self.merge_type.supports_reprocess:
364
395
            kwargs['show_base'] = self.show_base
365
396
        elif self.show_base:
366
397
            raise BzrError("Showing base is not supported for this"
367
 
                                  " merge type. %s" % self.merge_type)
 
398
                           " merge type. %s" % self.merge_type)
 
399
        if (not getattr(self.merge_type, 'supports_reverse_cherrypick', True)
 
400
            and not self.base_is_other_ancestor):
 
401
            raise errors.CannotReverseCherrypick()
 
402
        if self.merge_type.history_based:
 
403
            kwargs['cherrypick'] = (not self.base_is_ancestor or
 
404
                                    not self.base_is_other_ancestor)
 
405
        return self.merge_type(pb=self._pb,
 
406
                               change_reporter=self.change_reporter,
 
407
                               **kwargs)
 
408
 
 
409
    def do_merge(self):
 
410
        merge = self.make_merger()
368
411
        self.this_tree.lock_tree_write()
369
412
        if self.base_tree is not None:
370
413
            self.base_tree.lock_read()
371
414
        if self.other_tree is not None:
372
415
            self.other_tree.lock_read()
373
416
        try:
374
 
            merge = self.merge_type(pb=self._pb,
375
 
                                    change_reporter=self.change_reporter,
376
 
                                    **kwargs)
 
417
            merge.do_merge()
377
418
            if self.recurse == 'down':
378
419
                for path, file_id in self.this_tree.iter_references():
379
420
                    sub_tree = self.this_tree.get_nested_tree(file_id, path)
398
439
                self.base_tree.unlock()
399
440
            self.this_tree.unlock()
400
441
        if len(merge.cooked_conflicts) == 0:
401
 
            if not self.ignore_zero:
 
442
            if not self.ignore_zero and not is_quiet():
402
443
                note("All changes applied successfully.")
403
444
        else:
404
445
            note("%d conflicts encountered." % len(merge.cooked_conflicts))
412
453
    supports_reprocess = True
413
454
    supports_show_base = True
414
455
    history_based = False
 
456
    supports_reverse_cherrypick = True
415
457
    winner_idx = {"this": 2, "other": 1, "conflict": 1}
416
458
 
417
459
    def __init__(self, working_tree, this_tree, base_tree, other_tree, 
418
460
                 interesting_ids=None, reprocess=False, show_base=False,
419
461
                 pb=DummyProgress(), pp=None, change_reporter=None,
420
 
                 interesting_files=None):
 
462
                 interesting_files=None, do_merge=True):
421
463
        """Initialize the merger object and perform the merge.
422
464
 
423
465
        :param working_tree: The working tree to apply the merge to
446
488
        self.interesting_ids = interesting_ids
447
489
        self.interesting_files = interesting_files
448
490
        self.this_tree = working_tree
449
 
        self.this_tree.lock_tree_write()
450
491
        self.base_tree = base_tree
451
 
        self.base_tree.lock_read()
452
492
        self.other_tree = other_tree
453
 
        self.other_tree.lock_read()
454
493
        self._raw_conflicts = []
455
494
        self.cooked_conflicts = []
456
495
        self.reprocess = reprocess
460
499
        self.change_reporter = change_reporter
461
500
        if self.pp is None:
462
501
            self.pp = ProgressPhase("Merge phase", 3, self.pb)
 
502
        if do_merge:
 
503
            self.do_merge()
463
504
 
464
 
        self.tt = TreeTransform(working_tree, self.pb)
 
505
    def do_merge(self):
 
506
        self.this_tree.lock_tree_write()
 
507
        self.base_tree.lock_read()
 
508
        self.other_tree.lock_read()
 
509
        self.tt = TreeTransform(self.this_tree, self.pb)
465
510
        try:
466
511
            self.pp.next_phase()
467
 
            entries = self._entries3()
468
 
            child_pb = ui.ui_factory.nested_progress_bar()
469
 
            try:
470
 
                for num, (file_id, changed, parents3, names3,
471
 
                          executable3) in enumerate(entries):
472
 
                    child_pb.update('Preparing file merge', num, len(entries))
473
 
                    self._merge_names(file_id, parents3, names3)
474
 
                    if changed:
475
 
                        file_status = self.merge_contents(file_id)
476
 
                    else:
477
 
                        file_status = 'unmodified'
478
 
                    self._merge_executable(file_id,
479
 
                        executable3, file_status)
480
 
            finally:
481
 
                child_pb.finished()
482
 
            self.fix_root()
483
 
            self.pp.next_phase()
484
 
            child_pb = ui.ui_factory.nested_progress_bar()
485
 
            try:
486
 
                fs_conflicts = resolve_conflicts(self.tt, child_pb,
487
 
                    lambda t, c: conflict_pass(t, c, self.other_tree))
488
 
            finally:
489
 
                child_pb.finished()
490
 
            if change_reporter is not None:
491
 
                from bzrlib import delta
492
 
                delta.report_changes(self.tt._iter_changes(), change_reporter)
493
 
            self.cook_conflicts(fs_conflicts)
494
 
            for conflict in self.cooked_conflicts:
495
 
                warning(conflict)
 
512
            self._compute_transform()
496
513
            self.pp.next_phase()
497
514
            results = self.tt.apply(no_conflicts=True)
498
515
            self.write_modified(results)
499
516
            try:
500
 
                working_tree.add_conflicts(self.cooked_conflicts)
 
517
                self.this_tree.add_conflicts(self.cooked_conflicts)
501
518
            except UnsupportedOperation:
502
519
                pass
503
520
        finally:
507
524
            self.this_tree.unlock()
508
525
            self.pb.clear()
509
526
 
 
527
    def make_preview_transform(self):
 
528
        self.base_tree.lock_read()
 
529
        self.other_tree.lock_read()
 
530
        self.tt = TransformPreview(self.this_tree)
 
531
        try:
 
532
            self.pp.next_phase()
 
533
            self._compute_transform()
 
534
            self.pp.next_phase()
 
535
        finally:
 
536
            self.other_tree.unlock()
 
537
            self.base_tree.unlock()
 
538
            self.pb.clear()
 
539
        return self.tt
 
540
 
 
541
    def _compute_transform(self):
 
542
        entries = self._entries3()
 
543
        child_pb = ui.ui_factory.nested_progress_bar()
 
544
        try:
 
545
            for num, (file_id, changed, parents3, names3,
 
546
                      executable3) in enumerate(entries):
 
547
                child_pb.update('Preparing file merge', num, len(entries))
 
548
                self._merge_names(file_id, parents3, names3)
 
549
                if changed:
 
550
                    file_status = self.merge_contents(file_id)
 
551
                else:
 
552
                    file_status = 'unmodified'
 
553
                self._merge_executable(file_id,
 
554
                    executable3, file_status)
 
555
        finally:
 
556
            child_pb.finished()
 
557
        self.fix_root()
 
558
        self.pp.next_phase()
 
559
        child_pb = ui.ui_factory.nested_progress_bar()
 
560
        try:
 
561
            fs_conflicts = resolve_conflicts(self.tt, child_pb,
 
562
                lambda t, c: conflict_pass(t, c, self.other_tree))
 
563
        finally:
 
564
            child_pb.finished()
 
565
        if self.change_reporter is not None:
 
566
            from bzrlib import delta
 
567
            delta.report_changes(
 
568
                self.tt._iter_changes(), self.change_reporter)
 
569
        self.cook_conflicts(fs_conflicts)
 
570
        for conflict in self.cooked_conflicts:
 
571
            warning(conflict)
 
572
 
510
573
    def _entries3(self):
511
574
        """Gather data about files modified between three trees.
512
575
 
974
1037
    """Three-way tree merger, text weave merger."""
975
1038
    supports_reprocess = True
976
1039
    supports_show_base = False
 
1040
    supports_reverse_cherrypick = False
 
1041
    history_based = True
977
1042
 
978
1043
    def __init__(self, working_tree, this_tree, base_tree, other_tree, 
979
1044
                 interesting_ids=None, pb=DummyProgress(), pp=None,
980
1045
                 reprocess=False, change_reporter=None,
981
 
                 interesting_files=None):
 
1046
                 interesting_files=None, cherrypick=False, do_merge=True):
 
1047
        self.cherrypick = cherrypick
982
1048
        super(WeaveMerger, self).__init__(working_tree, this_tree, 
983
1049
                                          base_tree, other_tree, 
984
1050
                                          interesting_ids=interesting_ids, 
985
1051
                                          pb=pb, pp=pp, reprocess=reprocess,
986
 
                                          change_reporter=change_reporter)
 
1052
                                          change_reporter=change_reporter,
 
1053
                                          do_merge=do_merge)
987
1054
 
988
1055
    def _merged_lines(self, file_id):
989
1056
        """Generate the merged lines.
990
1057
        There is no distinction between lines that are meant to contain <<<<<<<
991
1058
        and conflicts.
992
1059
        """
993
 
        plan = self.this_tree.plan_file_merge(file_id, self.other_tree)
 
1060
        if self.cherrypick:
 
1061
            base = self.base_tree
 
1062
        else:
 
1063
            base = None
 
1064
        plan = self.this_tree.plan_file_merge(file_id, self.other_tree,
 
1065
                                              base=base)
 
1066
        if 'merge' in debug.debug_flags:
 
1067
            plan = list(plan)
 
1068
            trans_id = self.tt.trans_id_file_id(file_id)
 
1069
            name = self.tt.final_name(trans_id) + '.plan'
 
1070
            contents = ('%10s|%s' % l for l in plan)
 
1071
            self.tt.new_file(name, self.tt.final_parent(trans_id), contents)
994
1072
        textmerge = PlanWeaveMerge(plan, '<<<<<<< TREE\n',
995
1073
            '>>>>>>> MERGE-SOURCE\n')
996
1074
        return textmerge.merge_lines(self.reprocess)
1015
1093
            file_group.append(trans_id)
1016
1094
 
1017
1095
 
 
1096
class LCAMerger(WeaveMerger):
 
1097
 
 
1098
    def _merged_lines(self, file_id):
 
1099
        """Generate the merged lines.
 
1100
        There is no distinction between lines that are meant to contain <<<<<<<
 
1101
        and conflicts.
 
1102
        """
 
1103
        if self.cherrypick:
 
1104
            base = self.base_tree
 
1105
        else:
 
1106
            base = None
 
1107
        plan = self.this_tree.plan_file_lca_merge(file_id, self.other_tree,
 
1108
                                                  base=base)
 
1109
        if 'merge' in debug.debug_flags:
 
1110
            plan = list(plan)
 
1111
            trans_id = self.tt.trans_id_file_id(file_id)
 
1112
            name = self.tt.final_name(trans_id) + '.plan'
 
1113
            contents = ('%10s|%s' % l for l in plan)
 
1114
            self.tt.new_file(name, self.tt.final_parent(trans_id), contents)
 
1115
        textmerge = PlanWeaveMerge(plan, '<<<<<<< TREE\n',
 
1116
            '>>>>>>> MERGE-SOURCE\n')
 
1117
        return textmerge.merge_lines(self.reprocess)
 
1118
 
 
1119
 
1018
1120
class Diff3Merger(Merge3Merger):
1019
1121
    """Three-way merger using external diff3 for text merging"""
1020
1122
 
1137
1239
        for text_a, text_b in zip(plain_a[ai:a_cur], plain_b[bi:b_cur]):
1138
1240
            assert text_a == text_b
1139
1241
            yield "unchanged", text_a
 
1242
 
 
1243
 
 
1244
class _PlanMergeBase(object):
 
1245
 
 
1246
    def __init__(self, a_rev, b_rev, vf):
 
1247
        """Contructor.
 
1248
 
 
1249
        :param a_rev: Revision-id of one revision to merge
 
1250
        :param b_rev: Revision-id of the other revision to merge
 
1251
        :param vf: A versionedfile containing both revisions
 
1252
        """
 
1253
        self.a_rev = a_rev
 
1254
        self.b_rev = b_rev
 
1255
        self.lines_a = vf.get_lines(a_rev)
 
1256
        self.lines_b = vf.get_lines(b_rev)
 
1257
        self.vf = vf
 
1258
        self._last_lines = None
 
1259
        self._last_lines_revision_id = None
 
1260
        self._cached_matching_blocks = {}
 
1261
 
 
1262
    def plan_merge(self):
 
1263
        """Generate a 'plan' for merging the two revisions.
 
1264
 
 
1265
        This involves comparing their texts and determining the cause of
 
1266
        differences.  If text A has a line and text B does not, then either the
 
1267
        line was added to text A, or it was deleted from B.  Once the causes
 
1268
        are combined, they are written out in the format described in
 
1269
        VersionedFile.plan_merge
 
1270
        """
 
1271
        blocks = self._get_matching_blocks(self.a_rev, self.b_rev)
 
1272
        unique_a, unique_b = self._unique_lines(blocks)
 
1273
        new_a, killed_b = self._determine_status(self.a_rev, unique_a)
 
1274
        new_b, killed_a = self._determine_status(self.b_rev, unique_b)
 
1275
        return self._iter_plan(blocks, new_a, killed_b, new_b, killed_a)
 
1276
 
 
1277
    def _iter_plan(self, blocks, new_a, killed_b, new_b, killed_a):
 
1278
        last_i = 0
 
1279
        last_j = 0
 
1280
        for i, j, n in blocks:
 
1281
            for a_index in range(last_i, i):
 
1282
                if a_index in new_a:
 
1283
                    if a_index in killed_b:
 
1284
                        yield 'conflicted-a', self.lines_a[a_index]
 
1285
                    else:
 
1286
                        yield 'new-a', self.lines_a[a_index]
 
1287
                else:
 
1288
                    yield 'killed-b', self.lines_a[a_index]
 
1289
            for b_index in range(last_j, j):
 
1290
                if b_index in new_b:
 
1291
                    if b_index in killed_a:
 
1292
                        yield 'conflicted-b', self.lines_b[a_index]
 
1293
                    else:
 
1294
                        yield 'new-b', self.lines_b[b_index]
 
1295
                else:
 
1296
                    yield 'killed-a', self.lines_b[b_index]
 
1297
            # handle common lines
 
1298
            for a_index in range(i, i+n):
 
1299
                yield 'unchanged', self.lines_a[a_index]
 
1300
            last_i = i+n
 
1301
            last_j = j+n
 
1302
 
 
1303
    def _get_matching_blocks(self, left_revision, right_revision):
 
1304
        """Return a description of which sections of two revisions match.
 
1305
 
 
1306
        See SequenceMatcher.get_matching_blocks
 
1307
        """
 
1308
        cached = self._cached_matching_blocks.get((left_revision,
 
1309
                                                   right_revision))
 
1310
        if cached is not None:
 
1311
            return cached
 
1312
        if self._last_lines_revision_id == left_revision:
 
1313
            left_lines = self._last_lines
 
1314
        else:
 
1315
            left_lines = self.vf.get_lines(left_revision)
 
1316
        right_lines = self.vf.get_lines(right_revision)
 
1317
        self._last_lines = right_lines
 
1318
        self._last_lines_revision_id = right_revision
 
1319
        matcher = patiencediff.PatienceSequenceMatcher(None, left_lines,
 
1320
                                                       right_lines)
 
1321
        return matcher.get_matching_blocks()
 
1322
 
 
1323
    def _unique_lines(self, matching_blocks):
 
1324
        """Analyse matching_blocks to determine which lines are unique
 
1325
 
 
1326
        :return: a tuple of (unique_left, unique_right), where the values are
 
1327
            sets of line numbers of unique lines.
 
1328
        """
 
1329
        last_i = 0
 
1330
        last_j = 0
 
1331
        unique_left = []
 
1332
        unique_right = []
 
1333
        for i, j, n in matching_blocks:
 
1334
            unique_left.extend(range(last_i, i))
 
1335
            unique_right.extend(range(last_j, j))
 
1336
            last_i = i + n
 
1337
            last_j = j + n
 
1338
        return unique_left, unique_right
 
1339
 
 
1340
    @staticmethod
 
1341
    def _subtract_plans(old_plan, new_plan):
 
1342
        """Remove changes from new_plan that came from old_plan.
 
1343
 
 
1344
        It is assumed that the difference between the old_plan and new_plan
 
1345
        is their choice of 'b' text.
 
1346
 
 
1347
        All lines from new_plan that differ from old_plan are emitted
 
1348
        verbatim.  All lines from new_plan that match old_plan but are
 
1349
        not about the 'b' revision are emitted verbatim.
 
1350
 
 
1351
        Lines that match and are about the 'b' revision are the lines we
 
1352
        don't want, so we convert 'killed-b' -> 'unchanged', and 'new-b'
 
1353
        is skipped entirely.
 
1354
        """
 
1355
        matcher = patiencediff.PatienceSequenceMatcher(None, old_plan,
 
1356
                                                       new_plan)
 
1357
        last_j = 0
 
1358
        for i, j, n in matcher.get_matching_blocks():
 
1359
            for jj in range(last_j, j):
 
1360
                yield new_plan[jj]
 
1361
            for jj in range(j, j+n):
 
1362
                plan_line = new_plan[jj]
 
1363
                if plan_line[0] == 'new-b':
 
1364
                    pass
 
1365
                elif plan_line[0] == 'killed-b':
 
1366
                    yield 'unchanged', plan_line[1]
 
1367
                else:
 
1368
                    yield plan_line
 
1369
            last_j = j + n
 
1370
 
 
1371
 
 
1372
class _PlanMerge(_PlanMergeBase):
 
1373
    """Plan an annotate merge using on-the-fly annotation"""
 
1374
 
 
1375
    def __init__(self, a_rev, b_rev, vf):
 
1376
       _PlanMergeBase.__init__(self, a_rev, b_rev, vf)
 
1377
       a_ancestry = set(vf.get_ancestry(a_rev, topo_sorted=False))
 
1378
       b_ancestry = set(vf.get_ancestry(b_rev, topo_sorted=False))
 
1379
       self.uncommon = a_ancestry.symmetric_difference(b_ancestry)
 
1380
 
 
1381
    def _determine_status(self, revision_id, unique_line_numbers):
 
1382
        """Determines the status unique lines versus all lcas.
 
1383
 
 
1384
        Basically, determines why the line is unique to this revision.
 
1385
 
 
1386
        A line may be determined new or killed, but not both.
 
1387
 
 
1388
        :param revision_id: The id of the revision in which the lines are
 
1389
            unique
 
1390
        :param unique_line_numbers: The line numbers of unique lines.
 
1391
        :return a tuple of (new_this, killed_other):
 
1392
        """
 
1393
        new = self._find_new(revision_id)
 
1394
        killed = set(unique_line_numbers).difference(new)
 
1395
        return new, killed
 
1396
 
 
1397
    def _find_new(self, version_id):
 
1398
        """Determine which lines are new in the ancestry of this version.
 
1399
 
 
1400
        If a lines is present in this version, and not present in any
 
1401
        common ancestor, it is considered new.
 
1402
        """
 
1403
        if version_id not in self.uncommon:
 
1404
            return set()
 
1405
        parents = self.vf.get_parents(version_id)
 
1406
        if len(parents) == 0:
 
1407
            return set(range(len(self.vf.get_lines(version_id))))
 
1408
        new = None
 
1409
        for parent in parents:
 
1410
            blocks = self._get_matching_blocks(version_id, parent)
 
1411
            result, unused = self._unique_lines(blocks)
 
1412
            parent_new = self._find_new(parent)
 
1413
            for i, j, n in blocks:
 
1414
                for ii, jj in [(i+r, j+r) for r in range(n)]:
 
1415
                    if jj in parent_new:
 
1416
                        result.append(ii)
 
1417
            if new is None:
 
1418
                new = set(result)
 
1419
            else:
 
1420
                new.intersection_update(result)
 
1421
        return new
 
1422
 
 
1423
 
 
1424
class _PlanLCAMerge(_PlanMergeBase):
 
1425
    """
 
1426
    This merge algorithm differs from _PlanMerge in that:
 
1427
    1. comparisons are done against LCAs only
 
1428
    2. cases where a contested line is new versus one LCA but old versus
 
1429
       another are marked as conflicts, by emitting the line as conflicted-a
 
1430
       or conflicted-b.
 
1431
 
 
1432
    This is faster, and hopefully produces more useful output.
 
1433
    """
 
1434
 
 
1435
    def __init__(self, a_rev, b_rev, vf, graph):
 
1436
        _PlanMergeBase.__init__(self, a_rev, b_rev, vf)
 
1437
        self.lcas = graph.find_lca(a_rev, b_rev)
 
1438
        for lca in self.lcas:
 
1439
            lca_lines = self.vf.get_lines(lca)
 
1440
            matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_a,
 
1441
                                                           lca_lines)
 
1442
            blocks = list(matcher.get_matching_blocks())
 
1443
            self._cached_matching_blocks[(a_rev, lca)] = blocks
 
1444
            matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_b,
 
1445
                                                           lca_lines)
 
1446
            blocks = list(matcher.get_matching_blocks())
 
1447
            self._cached_matching_blocks[(b_rev, lca)] = blocks
 
1448
 
 
1449
    def _determine_status(self, revision_id, unique_line_numbers):
 
1450
        """Determines the status unique lines versus all lcas.
 
1451
 
 
1452
        Basically, determines why the line is unique to this revision.
 
1453
 
 
1454
        A line may be determined new, killed, or both.
 
1455
 
 
1456
        If a line is determined new, that means it was not present in at least
 
1457
        one LCA, and is not present in the other merge revision.
 
1458
 
 
1459
        If a line is determined killed, that means the line was present in
 
1460
        at least one LCA.
 
1461
 
 
1462
        If a line is killed and new, this indicates that the two merge
 
1463
        revisions contain differing conflict resolutions.
 
1464
        :param revision_id: The id of the revision in which the lines are
 
1465
            unique
 
1466
        :param unique_line_numbers: The line numbers of unique lines.
 
1467
        :return a tuple of (new_this, killed_other):
 
1468
        """
 
1469
        new = set()
 
1470
        killed = set()
 
1471
        unique_line_numbers = set(unique_line_numbers)
 
1472
        for lca in self.lcas:
 
1473
            blocks = self._get_matching_blocks(revision_id, lca)
 
1474
            unique_vs_lca, _ignored = self._unique_lines(blocks)
 
1475
            new.update(unique_line_numbers.intersection(unique_vs_lca))
 
1476
            killed.update(unique_line_numbers.difference(unique_vs_lca))
 
1477
        return new, killed