~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/graph.py

  • Committer: John Arbash Meinel
  • Date: 2008-05-07 22:09:32 UTC
  • mto: This revision was merged to the branch mainline in revision 3443.
  • Revision ID: john@arbash-meinel.com-20080507220932-1ofutrrd6eu1dtdy
Restore the previous code, but bring in a couple changes. Including an update to have lsprof show where the time is spent.

Show diffs side-by-side

added added

removed removed

Lines of Context:
272
272
        unique_tips = self._remove_simple_descendants(unique_nodes,
273
273
                        self.get_parent_map(unique_nodes))
274
274
 
275
 
        total_tips = 0
276
 
        unique_searchers = []
277
 
        for tip in unique_tips:
278
 
            revs_to_search = unique_searcher.find_seen_ancestors([tip])
279
 
            revs_to_search.update(
280
 
                common_searcher.find_seen_ancestors(revs_to_search))
281
 
            searcher = self._make_breadth_first_searcher(revs_to_search)
282
 
            # We don't care about the starting nodes.
283
 
            searcher._label = tip
284
 
            unique_searchers.append(searcher)
285
 
            total_tips += len(revs_to_search)
 
275
        if len(unique_tips) == 1:
 
276
            unique_searchers = []
 
277
            ancestor_all_unique = unique_searcher.find_seen_ancestors(unique_tips)
 
278
        else:
 
279
            unique_searchers = []
 
280
            for tip in unique_tips:
 
281
                revs_to_search = unique_searcher.find_seen_ancestors([tip])
 
282
                revs_to_search.update(
 
283
                    common_searcher.find_seen_ancestors(revs_to_search))
 
284
                searcher = self._make_breadth_first_searcher(revs_to_search)
 
285
                # We don't care about the starting nodes.
 
286
                searcher._label = tip
 
287
                searcher.step()
 
288
                unique_searchers.append(searcher)
286
289
 
 
290
            ancestor_all_unique = None
 
291
            for searcher in unique_searchers:
 
292
                if ancestor_all_unique is None:
 
293
                    ancestor_all_unique = set(searcher.seen)
 
294
                else:
 
295
                    ancestor_all_unique = ancestor_all_unique.intersection(
 
296
                                                searcher.seen)
287
297
        # Collapse all the common nodes into a single searcher
288
 
        _mutter('Created %s unique searchers, searching %s revisions'
289
 
                ' (%s/%s common)',
290
 
                len(unique_searchers), total_tips,
291
 
                len(common_searcher._next_query),
292
 
                len(common_searcher.seen))
 
298
        all_unique_searcher = self._make_breadth_first_searcher(ancestor_all_unique)
 
299
        if ancestor_all_unique:
 
300
            all_unique_searcher.step()
 
301
 
 
302
            # Stop any search tips that are already known as ancestors of the
 
303
            # unique nodes
 
304
            stopped_common = common_searcher.stop_searching_any(
 
305
                common_searcher.find_seen_ancestors(ancestor_all_unique))
 
306
 
 
307
            total_stopped = 0
 
308
            for searcher in unique_searchers:
 
309
                total_stopped += len(searcher.stop_searching_any(
 
310
                    searcher.find_seen_ancestors(ancestor_all_unique)))
 
311
            _mutter('For %s unique nodes, created %s + 1 unique searchers'
 
312
                    ' (%s stopped search tips, %s common ancestors'
 
313
                    ' (%s stopped common)',
 
314
                    len(unique_nodes), len(unique_searchers), total_stopped,
 
315
                    len(ancestor_all_unique), len(stopped_common))
 
316
            del ancestor_all_unique, stopped_common
293
317
 
294
318
        # While we still have common nodes to search
295
319
        while common_searcher._next_query:
296
320
            newly_seen_common = set(common_searcher.step())
297
321
            newly_seen_unique = set()
298
 
            for searcher in unique_searchers:
299
 
                # newly_seen_unique.update(searcher.step())
300
 
                next = set(searcher.step())
301
 
                next.update(unique_searcher.find_seen_ancestors(next))
302
 
                next.update(common_searcher.find_seen_ancestors(next))
303
 
                for alt_searcher in unique_searchers:
304
 
                    if alt_searcher is searcher:
305
 
                        continue
306
 
                    next.update(alt_searcher.find_seen_ancestors(next))
307
 
                searcher.start_searching(next)
308
 
                newly_seen_unique.update(next)
309
 
 
 
322
            def step_searchers():
 
323
                for searcher in unique_searchers:
 
324
                    next = set(searcher.step())
 
325
                    next.update(unique_searcher.find_seen_ancestors(next))
 
326
                    next.update(common_searcher.find_seen_ancestors(next))
 
327
                    for alt_searcher in unique_searchers:
 
328
                        if alt_searcher is searcher:
 
329
                            continue
 
330
                        next.update(alt_searcher.find_seen_ancestors(next))
 
331
                    searcher.start_searching(next)
 
332
                    newly_seen_unique.update(next)
 
333
            step_searchers()
310
334
            # These nodes are common ancestors of all unique nodes
311
335
            unique_are_common_nodes = newly_seen_unique.copy()
312
 
            for searcher in unique_searchers:
313
 
                unique_are_common_nodes = unique_are_common_nodes.intersection(
314
 
                                            searcher.seen)
315
 
            unique_are_common_nodes.update(
316
 
                common_searcher.find_seen_ancestors(unique_are_common_nodes))
317
 
            common_searcher.stop_searching_any(unique_are_common_nodes)
318
 
 
319
 
            # For all searchers, see if there is overlap with another searcher
320
 
            # If there is, stop searching that node, and create another
321
 
            # searcher which is searching from the intersection.
322
 
            unique_search_tips = {}
323
 
            next_unique_searchers = unique_searchers[:]
324
 
            i = 0
325
 
            while i < len(next_unique_searchers):
326
 
                searcher = next_unique_searchers[i]
327
 
                next = set(searcher._next_query)
328
 
                j = 0
329
 
                while j < len(next_unique_searchers):
330
 
                    if i == j:
331
 
                        j += 1
332
 
                        continue
333
 
                    alt_searcher = next_unique_searchers[j]
334
 
                    alt_next = set(alt_searcher._next_query)
335
 
                    common_tips = alt_next.intersection(next)
336
 
                    if common_tips == alt_next:
337
 
                        # This searcher is a superset of alt_searcher, so stop
338
 
                        # alt_searcher
339
 
                        # import pdb; pdb.set_trace()
340
 
                        next_unique_searchers.remove(alt_searcher)
341
 
                        ancestor_intersection = searcher.seen.intersection(alt_searcher.seen)
342
 
                        searcher.seen = ancestor_intersection
343
 
                        _mutter('Combining searchers into a single searcher'
 
336
            def intersect_searchers():
 
337
                for searcher in unique_searchers:
 
338
                    unique_are_common_nodes.intersection_update(searcher.seen)
 
339
                unique_are_common_nodes.intersection_update(
 
340
                                            all_unique_searcher.seen)
 
341
                # TODO: lsprof reports this as the most expensive portion of
 
342
                #       this function. Taking 4.4s out of 6.2s spent in
 
343
                #       find_unique_ancestors
 
344
                unique_are_common_nodes.update(all_unique_searcher.step())
 
345
            intersect_searchers()
 
346
 
 
347
            if newly_seen_common:
 
348
                # If a 'common' node is an ancestor of all unique searchers, we
 
349
                # can stop searching it.
 
350
                common_searcher.stop_searching_any(
 
351
                    all_unique_searcher.seen.intersection(newly_seen_common))
 
352
            if unique_are_common_nodes:
 
353
                unique_are_common_nodes.update(
 
354
                    common_searcher.find_seen_ancestors(unique_are_common_nodes))
 
355
                # The all_unique searcher can start searching the common nodes
 
356
                # but everyone else can stop.
 
357
                all_unique_searcher.start_searching(unique_are_common_nodes)
 
358
                common_searcher.stop_searching_any(unique_are_common_nodes)
 
359
 
 
360
            def filter_searchers():
 
361
                # Filter out searchers that don't actually search different
 
362
                # nodes. We already have the ancestry intersection for them
 
363
                unique_search_tips = {}
 
364
                for searcher in unique_searchers:
 
365
                    stopped = searcher.stop_searching_any(unique_are_common_nodes)
 
366
                    will_search_set = frozenset(searcher._next_query)
 
367
                    if not will_search_set:
 
368
                        _mutter('Unique searcher %s was stopped.'
 
369
                                ' (%s iterations) %d nodes stopped',
 
370
                                searcher._label,
 
371
                                searcher._iterations,
 
372
                                len(stopped))
 
373
                    elif will_search_set not in unique_search_tips:
 
374
                        # This searcher is searching a unique set of nodes, let it
 
375
                        unique_search_tips[will_search_set] = [searcher]
 
376
                    else:
 
377
                        unique_search_tips[will_search_set].append(searcher)
 
378
                next_unique_searchers = []
 
379
                for searchers in unique_search_tips.itervalues():
 
380
                    if len(searchers) == 1:
 
381
                        # Searching unique tips, go for it
 
382
                        next_unique_searchers.append(searchers[0])
 
383
                    else:
 
384
                        # These searchers have started searching the same tips, we
 
385
                        # don't need them to cover the same ground. The
 
386
                        # intersection of their ancestry won't change, so create a
 
387
                        # new searcher, combining their histories.
 
388
                        next_searcher = searchers[0]
 
389
                        ancestor_intersection = next_searcher.seen
 
390
                        for searcher in searchers[1:]:
 
391
                            ancestor_intersection = ancestor_intersection.intersection(searcher.seen)
 
392
                        next_searcher.seen = ancestor_intersection
 
393
                        _mutter('Combining %s searchers into a single searcher'
344
394
                                ' searching %s nodes with %s ancestry',
345
 
                                len(searcher._next_query),
346
 
                                len(searcher.seen))
347
 
                        if j < i:
348
 
                            i -= 1
349
 
                        j -= 1
350
 
                    j += 1
351
 
                i += 1
352
 
 
 
395
                                len(searchers), len(next_searcher._next_query),
 
396
                                len(next_searcher.seen))
 
397
                        next_unique_searchers.append(next_searcher)
 
398
                return next_unique_searchers
 
399
            next_unique_searchers = filter_searchers()
353
400
            if len(unique_searchers) != len(next_unique_searchers):
354
 
                _mutter('Collapsed %s unique searchers => %s',
355
 
                        len(unique_searchers), len(next_unique_searchers))
 
401
                _mutter('Collapsed %s unique searchers => %s'
 
402
                        ' at %s iterations',
 
403
                        len(unique_searchers), len(next_unique_searchers),
 
404
                        all_unique_searcher._iterations)
356
405
            unique_searchers = next_unique_searchers
357
406
        return unique_nodes.difference(common_searcher.seen)
358
407