~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/merge.py

  • Committer: Aaron Bentley
  • Date: 2008-02-24 16:42:13 UTC
  • mfrom: (3234 +trunk)
  • mto: This revision was merged to the branch mainline in revision 3235.
  • Revision ID: aaron@aaronbentley.com-20080224164213-eza1lzru5bwuwmmj
Merge with bzr.dev

Show diffs side-by-side

added added

removed removed

Lines of Context:
47
47
from progress import DummyProgress, ProgressPhase
48
48
from bzrlib.revision import (NULL_REVISION, ensure_null)
49
49
from bzrlib.textfile import check_text_lines
50
 
from bzrlib.trace import mutter, warning, note
51
 
from bzrlib.transform import (TreeTransform, resolve_conflicts, cook_conflicts,
 
50
from bzrlib.trace import mutter, warning, note, is_quiet
 
51
from bzrlib.transform import (TransformPreview, TreeTransform,
 
52
                              resolve_conflicts, cook_conflicts,
52
53
                              conflict_pass, FinalPaths, create_by_entry,
53
54
                              unique_add, ROOT_PARENT)
54
55
from bzrlib.versionedfile import PlanWeaveMerge
65
66
class Merger(object):
66
67
    def __init__(self, this_branch, other_tree=None, base_tree=None,
67
68
                 this_tree=None, pb=DummyProgress(), change_reporter=None,
68
 
                 recurse='down'):
 
69
                 recurse='down', revision_graph=None):
69
70
        object.__init__(self)
70
71
        assert this_tree is not None, "this_tree is required"
71
72
        self.this_branch = this_branch
89
90
        self.recurse = recurse
90
91
        self.change_reporter = change_reporter
91
92
        self._cached_trees = {}
 
93
        self._revision_graph = revision_graph
 
94
        self._base_is_ancestor = None
 
95
        self._base_is_other_ancestor = None
 
96
 
 
97
    @property
 
98
    def revision_graph(self):
 
99
        if self._revision_graph is None:
 
100
            self._revision_graph = self.this_branch.repository.get_graph()
 
101
        return self._revision_graph
 
102
 
 
103
    def _set_base_is_ancestor(self, value):
 
104
        self._base_is_ancestor = value
 
105
 
 
106
    def _get_base_is_ancestor(self):
 
107
        if self._base_is_ancestor is None:
 
108
            self._base_is_ancestor = self.revision_graph.is_ancestor(
 
109
                self.base_rev_id, self.this_basis)
 
110
        return self._base_is_ancestor
 
111
 
 
112
    base_is_ancestor = property(_get_base_is_ancestor, _set_base_is_ancestor)
 
113
 
 
114
    def _set_base_is_other_ancestor(self, value):
 
115
        self._base_is_other_ancestor = value
 
116
 
 
117
    def _get_base_is_other_ancestor(self):
 
118
        if self._base_is_other_ancestor is None:
 
119
            self.base_is_other_ancestor = self.revision_graph.is_ancestor(
 
120
                self.base_rev_id, self.other_basis)
 
121
        return self._base_is_other_ancestor
 
122
 
 
123
    base_is_other_ancestor = property(_get_base_is_other_ancestor,
 
124
                                      _set_base_is_other_ancestor)
92
125
 
93
126
    @staticmethod
94
127
    def from_uncommitted(tree, other_tree, pb):
102
135
                        pb)
103
136
        merger.base_rev_id = merger.base_tree.get_revision_id()
104
137
        merger.other_rev_id = None
 
138
        merger.other_basis = merger.base_rev_id
105
139
        return merger
106
140
 
107
141
    @classmethod
115
149
        mergeable.install_revisions(tree.branch.repository)
116
150
        base_revision_id, other_revision_id, verified =\
117
151
            mergeable.get_merge_request(tree.branch.repository)
 
152
        revision_graph = tree.branch.repository.get_graph()
118
153
        if (base_revision_id != _mod_revision.NULL_REVISION and
119
 
            tree.branch.repository.get_graph().is_ancestor(
 
154
            revision_graph.is_ancestor(
120
155
            base_revision_id, tree.branch.last_revision())):
121
156
            base_revision_id = None
122
157
        else:
123
158
            warning('Performing cherrypick')
124
159
        merger = klass.from_revision_ids(pb, tree, other_revision_id,
125
 
                                         base_revision_id)
 
160
                                         base_revision_id, revision_graph=
 
161
                                         revision_graph)
126
162
        return merger, verified
127
163
 
128
164
    @staticmethod
129
165
    def from_revision_ids(pb, this, other, base=None, other_branch=None,
130
 
                          base_branch=None):
 
166
                          base_branch=None, revision_graph=None):
131
167
        """Return a Merger for revision-ids.
132
168
 
133
169
        :param tree: The tree to merge changes into
140
176
            not supplied, other_branch or this.branch will be used.
141
177
        :param pb: A progress indicator
142
178
        """
143
 
        merger = Merger(this.branch, this_tree=this, pb=pb)
 
179
        merger = Merger(this.branch, this_tree=this, pb=pb,
 
180
                        revision_graph=revision_graph)
144
181
        if other_branch is None:
145
182
            other_branch = this.branch
146
183
        merger.set_other_revision(other, other_branch)
299
336
        self.base_branch = branch
300
337
        self._maybe_fetch(branch, self.this_branch, revision_id)
301
338
        self.base_tree = self.revision_tree(revision_id)
302
 
        graph = self.this_branch.repository.get_graph()
303
 
        self.base_is_ancestor = graph.is_ancestor(self.base_rev_id,
304
 
                                                  self.this_basis)
305
 
        self.base_is_other_ancestor = graph.is_ancestor(self.base_rev_id,
306
 
                                                        self.other_basis)
307
339
 
308
340
    def _maybe_fetch(self, source, target, revision_id):
309
341
        if not source.repository.has_same_location(target.repository):
310
342
            target.fetch(source, revision_id)
311
343
 
312
344
    def find_base(self):
313
 
        this_repo = self.this_branch.repository
314
 
        graph = this_repo.get_graph()
315
345
        revisions = [ensure_null(self.this_basis),
316
346
                     ensure_null(self.other_basis)]
317
347
        if NULL_REVISION in revisions:
318
348
            self.base_rev_id = NULL_REVISION
319
349
        else:
320
 
            self.base_rev_id, steps = graph.find_unique_lca(revisions[0],
321
 
                revisions[1], count_steps=True)
 
350
            self.base_rev_id, steps = self.revision_graph.find_unique_lca(
 
351
                revisions[0], revisions[1], count_steps=True)
322
352
            if self.base_rev_id == NULL_REVISION:
323
353
                raise UnrelatedBranches()
324
354
            if steps > 1:
346
376
                self.base_rev_id = _mod_revision.ensure_null(
347
377
                    base_branch.get_rev_id(base_revision[1]))
348
378
            self._maybe_fetch(base_branch, self.this_branch, self.base_rev_id)
349
 
            graph = self.this_branch.repository.get_graph()
350
 
            self.base_is_ancestor = graph.is_ancestor(self.base_rev_id,
351
 
                                                      self.this_basis)
352
 
            self.base_is_other_ancestor = graph.is_ancestor(self.base_rev_id,
353
 
                                                            self.other_basis)
354
379
 
355
 
    def do_merge(self):
 
380
    def make_merger(self):
356
381
        kwargs = {'working_tree':self.this_tree, 'this_tree': self.this_tree,
357
382
                  'other_tree': self.other_tree,
358
383
                  'interesting_ids': self.interesting_ids,
359
384
                  'interesting_files': self.interesting_files,
360
 
                  'pp': self.pp}
 
385
                  'pp': self.pp,
 
386
                  'do_merge': False}
361
387
        if self.merge_type.requires_base:
362
388
            kwargs['base_tree'] = self.base_tree
363
389
        if self.merge_type.supports_reprocess:
369
395
            kwargs['show_base'] = self.show_base
370
396
        elif self.show_base:
371
397
            raise BzrError("Showing base is not supported for this"
372
 
                                  " merge type. %s" % self.merge_type)
 
398
                           " merge type. %s" % self.merge_type)
373
399
        if (not getattr(self.merge_type, 'supports_reverse_cherrypick', True)
374
400
            and not self.base_is_other_ancestor):
375
401
            raise errors.CannotReverseCherrypick()
376
402
        if self.merge_type.history_based:
377
403
            kwargs['cherrypick'] = (not self.base_is_ancestor or
378
404
                                    not self.base_is_other_ancestor)
 
405
        return self.merge_type(pb=self._pb,
 
406
                               change_reporter=self.change_reporter,
 
407
                               **kwargs)
 
408
 
 
409
    def do_merge(self):
 
410
        merge = self.make_merger()
379
411
        self.this_tree.lock_tree_write()
380
412
        if self.base_tree is not None:
381
413
            self.base_tree.lock_read()
382
414
        if self.other_tree is not None:
383
415
            self.other_tree.lock_read()
384
416
        try:
385
 
            merge = self.merge_type(pb=self._pb,
386
 
                                    change_reporter=self.change_reporter,
387
 
                                    **kwargs)
 
417
            merge.do_merge()
388
418
            if self.recurse == 'down':
389
419
                for path, file_id in self.this_tree.iter_references():
390
420
                    sub_tree = self.this_tree.get_nested_tree(file_id, path)
409
439
                self.base_tree.unlock()
410
440
            self.this_tree.unlock()
411
441
        if len(merge.cooked_conflicts) == 0:
412
 
            if not self.ignore_zero:
 
442
            if not self.ignore_zero and not is_quiet():
413
443
                note("All changes applied successfully.")
414
444
        else:
415
445
            note("%d conflicts encountered." % len(merge.cooked_conflicts))
429
459
    def __init__(self, working_tree, this_tree, base_tree, other_tree, 
430
460
                 interesting_ids=None, reprocess=False, show_base=False,
431
461
                 pb=DummyProgress(), pp=None, change_reporter=None,
432
 
                 interesting_files=None):
 
462
                 interesting_files=None, do_merge=True):
433
463
        """Initialize the merger object and perform the merge.
434
464
 
435
465
        :param working_tree: The working tree to apply the merge to
458
488
        self.interesting_ids = interesting_ids
459
489
        self.interesting_files = interesting_files
460
490
        self.this_tree = working_tree
461
 
        self.this_tree.lock_tree_write()
462
491
        self.base_tree = base_tree
463
 
        self.base_tree.lock_read()
464
492
        self.other_tree = other_tree
465
 
        self.other_tree.lock_read()
466
493
        self._raw_conflicts = []
467
494
        self.cooked_conflicts = []
468
495
        self.reprocess = reprocess
472
499
        self.change_reporter = change_reporter
473
500
        if self.pp is None:
474
501
            self.pp = ProgressPhase("Merge phase", 3, self.pb)
 
502
        if do_merge:
 
503
            self.do_merge()
475
504
 
476
 
        self.tt = TreeTransform(working_tree, self.pb)
 
505
    def do_merge(self):
 
506
        self.this_tree.lock_tree_write()
 
507
        self.base_tree.lock_read()
 
508
        self.other_tree.lock_read()
 
509
        self.tt = TreeTransform(self.this_tree, self.pb)
477
510
        try:
478
511
            self.pp.next_phase()
479
 
            entries = self._entries3()
480
 
            child_pb = ui.ui_factory.nested_progress_bar()
481
 
            try:
482
 
                for num, (file_id, changed, parents3, names3,
483
 
                          executable3) in enumerate(entries):
484
 
                    child_pb.update('Preparing file merge', num, len(entries))
485
 
                    self._merge_names(file_id, parents3, names3)
486
 
                    if changed:
487
 
                        file_status = self.merge_contents(file_id)
488
 
                    else:
489
 
                        file_status = 'unmodified'
490
 
                    self._merge_executable(file_id,
491
 
                        executable3, file_status)
492
 
            finally:
493
 
                child_pb.finished()
494
 
            self.fix_root()
495
 
            self.pp.next_phase()
496
 
            child_pb = ui.ui_factory.nested_progress_bar()
497
 
            try:
498
 
                fs_conflicts = resolve_conflicts(self.tt, child_pb,
499
 
                    lambda t, c: conflict_pass(t, c, self.other_tree))
500
 
            finally:
501
 
                child_pb.finished()
502
 
            if change_reporter is not None:
503
 
                from bzrlib import delta
504
 
                delta.report_changes(self.tt._iter_changes(), change_reporter)
505
 
            self.cook_conflicts(fs_conflicts)
506
 
            for conflict in self.cooked_conflicts:
507
 
                warning(conflict)
 
512
            self._compute_transform()
508
513
            self.pp.next_phase()
509
514
            results = self.tt.apply(no_conflicts=True)
510
515
            self.write_modified(results)
511
516
            try:
512
 
                working_tree.add_conflicts(self.cooked_conflicts)
 
517
                self.this_tree.add_conflicts(self.cooked_conflicts)
513
518
            except UnsupportedOperation:
514
519
                pass
515
520
        finally:
519
524
            self.this_tree.unlock()
520
525
            self.pb.clear()
521
526
 
 
527
    def make_preview_transform(self):
 
528
        self.base_tree.lock_read()
 
529
        self.other_tree.lock_read()
 
530
        self.tt = TransformPreview(self.this_tree)
 
531
        try:
 
532
            self.pp.next_phase()
 
533
            self._compute_transform()
 
534
            self.pp.next_phase()
 
535
        finally:
 
536
            self.other_tree.unlock()
 
537
            self.base_tree.unlock()
 
538
            self.pb.clear()
 
539
        return self.tt
 
540
 
 
541
    def _compute_transform(self):
 
542
        entries = self._entries3()
 
543
        child_pb = ui.ui_factory.nested_progress_bar()
 
544
        try:
 
545
            for num, (file_id, changed, parents3, names3,
 
546
                      executable3) in enumerate(entries):
 
547
                child_pb.update('Preparing file merge', num, len(entries))
 
548
                self._merge_names(file_id, parents3, names3)
 
549
                if changed:
 
550
                    file_status = self.merge_contents(file_id)
 
551
                else:
 
552
                    file_status = 'unmodified'
 
553
                self._merge_executable(file_id,
 
554
                    executable3, file_status)
 
555
        finally:
 
556
            child_pb.finished()
 
557
        self.fix_root()
 
558
        self.pp.next_phase()
 
559
        child_pb = ui.ui_factory.nested_progress_bar()
 
560
        try:
 
561
            fs_conflicts = resolve_conflicts(self.tt, child_pb,
 
562
                lambda t, c: conflict_pass(t, c, self.other_tree))
 
563
        finally:
 
564
            child_pb.finished()
 
565
        if self.change_reporter is not None:
 
566
            from bzrlib import delta
 
567
            delta.report_changes(
 
568
                self.tt._iter_changes(), self.change_reporter)
 
569
        self.cook_conflicts(fs_conflicts)
 
570
        for conflict in self.cooked_conflicts:
 
571
            warning(conflict)
 
572
 
522
573
    def _entries3(self):
523
574
        """Gather data about files modified between three trees.
524
575
 
992
1043
    def __init__(self, working_tree, this_tree, base_tree, other_tree, 
993
1044
                 interesting_ids=None, pb=DummyProgress(), pp=None,
994
1045
                 reprocess=False, change_reporter=None,
995
 
                 interesting_files=None, cherrypick=False):
 
1046
                 interesting_files=None, cherrypick=False, do_merge=True):
996
1047
        self.cherrypick = cherrypick
997
1048
        super(WeaveMerger, self).__init__(working_tree, this_tree, 
998
1049
                                          base_tree, other_tree, 
999
1050
                                          interesting_ids=interesting_ids, 
1000
1051
                                          pb=pb, pp=pp, reprocess=reprocess,
1001
 
                                          change_reporter=change_reporter)
 
1052
                                          change_reporter=change_reporter,
 
1053
                                          do_merge=do_merge)
1002
1054
 
1003
1055
    def _merged_lines(self, file_id):
1004
1056
        """Generate the merged lines.
1041
1093
            file_group.append(trans_id)
1042
1094
 
1043
1095
 
 
1096
class LCAMerger(WeaveMerger):
 
1097
 
 
1098
    def _merged_lines(self, file_id):
 
1099
        """Generate the merged lines.
 
1100
        There is no distinction between lines that are meant to contain <<<<<<<
 
1101
        and conflicts.
 
1102
        """
 
1103
        if self.cherrypick:
 
1104
            base = self.base_tree
 
1105
        else:
 
1106
            base = None
 
1107
        plan = self.this_tree.plan_file_lca_merge(file_id, self.other_tree,
 
1108
                                                  base=base)
 
1109
        if 'merge' in debug.debug_flags:
 
1110
            plan = list(plan)
 
1111
            trans_id = self.tt.trans_id_file_id(file_id)
 
1112
            name = self.tt.final_name(trans_id) + '.plan'
 
1113
            contents = ('%10s|%s' % l for l in plan)
 
1114
            self.tt.new_file(name, self.tt.final_parent(trans_id), contents)
 
1115
        textmerge = PlanWeaveMerge(plan, '<<<<<<< TREE\n',
 
1116
            '>>>>>>> MERGE-SOURCE\n')
 
1117
        return textmerge.merge_lines(self.reprocess)
 
1118
 
 
1119
 
1044
1120
class Diff3Merger(Merge3Merger):
1045
1121
    """Three-way merger using external diff3 for text merging"""
1046
1122
 
1165
1241
            yield "unchanged", text_a
1166
1242
 
1167
1243
 
1168
 
class _PlanMerge(object):
1169
 
    """Plan an annotate merge using on-the-fly annotation"""
 
1244
class _PlanMergeBase(object):
1170
1245
 
1171
1246
    def __init__(self, a_rev, b_rev, vf):
1172
1247
        """Contructor.
1180
1255
        self.lines_a = vf.get_lines(a_rev)
1181
1256
        self.lines_b = vf.get_lines(b_rev)
1182
1257
        self.vf = vf
1183
 
        a_ancestry = set(vf.get_ancestry(a_rev, topo_sorted=False))
1184
 
        b_ancestry = set(vf.get_ancestry(b_rev, topo_sorted=False))
1185
 
        self.uncommon = a_ancestry.symmetric_difference(b_ancestry)
1186
1258
        self._last_lines = None
1187
1259
        self._last_lines_revision_id = None
 
1260
        self._cached_matching_blocks = {}
1188
1261
 
1189
1262
    def plan_merge(self):
1190
1263
        """Generate a 'plan' for merging the two revisions.
1196
1269
        VersionedFile.plan_merge
1197
1270
        """
1198
1271
        blocks = self._get_matching_blocks(self.a_rev, self.b_rev)
1199
 
        new_a = self._find_new(self.a_rev)
1200
 
        new_b = self._find_new(self.b_rev)
 
1272
        unique_a, unique_b = self._unique_lines(blocks)
 
1273
        new_a, killed_b = self._determine_status(self.a_rev, unique_a)
 
1274
        new_b, killed_a = self._determine_status(self.b_rev, unique_b)
 
1275
        return self._iter_plan(blocks, new_a, killed_b, new_b, killed_a)
 
1276
 
 
1277
    def _iter_plan(self, blocks, new_a, killed_b, new_b, killed_a):
1201
1278
        last_i = 0
1202
1279
        last_j = 0
1203
 
        a_lines = self.vf.get_lines(self.a_rev)
1204
 
        b_lines = self.vf.get_lines(self.b_rev)
1205
1280
        for i, j, n in blocks:
1206
 
            # determine why lines aren't common
1207
1281
            for a_index in range(last_i, i):
1208
1282
                if a_index in new_a:
1209
 
                    cause = 'new-a'
 
1283
                    if a_index in killed_b:
 
1284
                        yield 'conflicted-a', self.lines_a[a_index]
 
1285
                    else:
 
1286
                        yield 'new-a', self.lines_a[a_index]
1210
1287
                else:
1211
 
                    cause = 'killed-b'
1212
 
                yield cause, a_lines[a_index]
 
1288
                    yield 'killed-b', self.lines_a[a_index]
1213
1289
            for b_index in range(last_j, j):
1214
1290
                if b_index in new_b:
1215
 
                    cause = 'new-b'
 
1291
                    if b_index in killed_a:
 
1292
                        yield 'conflicted-b', self.lines_b[a_index]
 
1293
                    else:
 
1294
                        yield 'new-b', self.lines_b[b_index]
1216
1295
                else:
1217
 
                    cause = 'killed-a'
1218
 
                yield cause, b_lines[b_index]
 
1296
                    yield 'killed-a', self.lines_b[b_index]
1219
1297
            # handle common lines
1220
1298
            for a_index in range(i, i+n):
1221
 
                yield 'unchanged', a_lines[a_index]
 
1299
                yield 'unchanged', self.lines_a[a_index]
1222
1300
            last_i = i+n
1223
1301
            last_j = j+n
1224
1302
 
1227
1305
 
1228
1306
        See SequenceMatcher.get_matching_blocks
1229
1307
        """
 
1308
        cached = self._cached_matching_blocks.get((left_revision,
 
1309
                                                   right_revision))
 
1310
        if cached is not None:
 
1311
            return cached
1230
1312
        if self._last_lines_revision_id == left_revision:
1231
1313
            left_lines = self._last_lines
1232
1314
        else:
1255
1337
            last_j = j + n
1256
1338
        return unique_left, unique_right
1257
1339
 
 
1340
    @staticmethod
 
1341
    def _subtract_plans(old_plan, new_plan):
 
1342
        """Remove changes from new_plan that came from old_plan.
 
1343
 
 
1344
        It is assumed that the difference between the old_plan and new_plan
 
1345
        is their choice of 'b' text.
 
1346
 
 
1347
        All lines from new_plan that differ from old_plan are emitted
 
1348
        verbatim.  All lines from new_plan that match old_plan but are
 
1349
        not about the 'b' revision are emitted verbatim.
 
1350
 
 
1351
        Lines that match and are about the 'b' revision are the lines we
 
1352
        don't want, so we convert 'killed-b' -> 'unchanged', and 'new-b'
 
1353
        is skipped entirely.
 
1354
        """
 
1355
        matcher = patiencediff.PatienceSequenceMatcher(None, old_plan,
 
1356
                                                       new_plan)
 
1357
        last_j = 0
 
1358
        for i, j, n in matcher.get_matching_blocks():
 
1359
            for jj in range(last_j, j):
 
1360
                yield new_plan[jj]
 
1361
            for jj in range(j, j+n):
 
1362
                plan_line = new_plan[jj]
 
1363
                if plan_line[0] == 'new-b':
 
1364
                    pass
 
1365
                elif plan_line[0] == 'killed-b':
 
1366
                    yield 'unchanged', plan_line[1]
 
1367
                else:
 
1368
                    yield plan_line
 
1369
            last_j = j + n
 
1370
 
 
1371
 
 
1372
class _PlanMerge(_PlanMergeBase):
 
1373
    """Plan an annotate merge using on-the-fly annotation"""
 
1374
 
 
1375
    def __init__(self, a_rev, b_rev, vf):
 
1376
       _PlanMergeBase.__init__(self, a_rev, b_rev, vf)
 
1377
       a_ancestry = set(vf.get_ancestry(a_rev, topo_sorted=False))
 
1378
       b_ancestry = set(vf.get_ancestry(b_rev, topo_sorted=False))
 
1379
       self.uncommon = a_ancestry.symmetric_difference(b_ancestry)
 
1380
 
 
1381
    def _determine_status(self, revision_id, unique_line_numbers):
 
1382
        """Determines the status unique lines versus all lcas.
 
1383
 
 
1384
        Basically, determines why the line is unique to this revision.
 
1385
 
 
1386
        A line may be determined new or killed, but not both.
 
1387
 
 
1388
        :param revision_id: The id of the revision in which the lines are
 
1389
            unique
 
1390
        :param unique_line_numbers: The line numbers of unique lines.
 
1391
        :return a tuple of (new_this, killed_other):
 
1392
        """
 
1393
        new = self._find_new(revision_id)
 
1394
        killed = set(unique_line_numbers).difference(new)
 
1395
        return new, killed
 
1396
 
1258
1397
    def _find_new(self, version_id):
1259
1398
        """Determine which lines are new in the ancestry of this version.
1260
1399
 
1281
1420
                new.intersection_update(result)
1282
1421
        return new
1283
1422
 
1284
 
    @staticmethod
1285
 
    def _subtract_plans(old_plan, new_plan):
1286
 
        matcher = patiencediff.PatienceSequenceMatcher(None, old_plan,
1287
 
                                                       new_plan)
1288
 
        last_j = 0
1289
 
        for i, j, n in matcher.get_matching_blocks():
1290
 
            for jj in range(last_j, j):
1291
 
                yield new_plan[jj]
1292
 
            for jj in range(j, j+n):
1293
 
                plan_line = new_plan[jj]
1294
 
                if plan_line[0] == 'new-b':
1295
 
                    pass
1296
 
                elif plan_line[0] == 'killed-b':
1297
 
                    yield 'unchanged', plan_line[1]
1298
 
                else:
1299
 
                    yield plan_line
1300
 
            last_j = j + n
 
1423
 
 
1424
class _PlanLCAMerge(_PlanMergeBase):
 
1425
    """
 
1426
    This merge algorithm differs from _PlanMerge in that:
 
1427
    1. comparisons are done against LCAs only
 
1428
    2. cases where a contested line is new versus one LCA but old versus
 
1429
       another are marked as conflicts, by emitting the line as conflicted-a
 
1430
       or conflicted-b.
 
1431
 
 
1432
    This is faster, and hopefully produces more useful output.
 
1433
    """
 
1434
 
 
1435
    def __init__(self, a_rev, b_rev, vf, graph):
 
1436
        _PlanMergeBase.__init__(self, a_rev, b_rev, vf)
 
1437
        self.lcas = graph.find_lca(a_rev, b_rev)
 
1438
        for lca in self.lcas:
 
1439
            lca_lines = self.vf.get_lines(lca)
 
1440
            matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_a,
 
1441
                                                           lca_lines)
 
1442
            blocks = list(matcher.get_matching_blocks())
 
1443
            self._cached_matching_blocks[(a_rev, lca)] = blocks
 
1444
            matcher = patiencediff.PatienceSequenceMatcher(None, self.lines_b,
 
1445
                                                           lca_lines)
 
1446
            blocks = list(matcher.get_matching_blocks())
 
1447
            self._cached_matching_blocks[(b_rev, lca)] = blocks
 
1448
 
 
1449
    def _determine_status(self, revision_id, unique_line_numbers):
 
1450
        """Determines the status unique lines versus all lcas.
 
1451
 
 
1452
        Basically, determines why the line is unique to this revision.
 
1453
 
 
1454
        A line may be determined new, killed, or both.
 
1455
 
 
1456
        If a line is determined new, that means it was not present in at least
 
1457
        one LCA, and is not present in the other merge revision.
 
1458
 
 
1459
        If a line is determined killed, that means the line was present in
 
1460
        at least one LCA.
 
1461
 
 
1462
        If a line is killed and new, this indicates that the two merge
 
1463
        revisions contain differing conflict resolutions.
 
1464
        :param revision_id: The id of the revision in which the lines are
 
1465
            unique
 
1466
        :param unique_line_numbers: The line numbers of unique lines.
 
1467
        :return a tuple of (new_this, killed_other):
 
1468
        """
 
1469
        new = set()
 
1470
        killed = set()
 
1471
        unique_line_numbers = set(unique_line_numbers)
 
1472
        for lca in self.lcas:
 
1473
            blocks = self._get_matching_blocks(revision_id, lca)
 
1474
            unique_vs_lca, _ignored = self._unique_lines(blocks)
 
1475
            new.update(unique_line_numbers.intersection(unique_vs_lca))
 
1476
            killed.update(unique_line_numbers.difference(unique_vs_lca))
 
1477
        return new, killed