~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/groupcompress.py

  • Committer: John Arbash Meinel
  • Date: 2009-05-29 15:06:16 UTC
  • mfrom: (4392 +trunk)
  • mto: This revision was merged to the branch mainline in revision 4460.
  • Revision ID: john@arbash-meinel.com-20090529150616-m29oaesf6ekxr489
Merge bzr.dev, bringing in the gc stacking fixes.

Show diffs side-by-side

added added

removed removed

Lines of Context:
31
31
    diff,
32
32
    errors,
33
33
    graph as _mod_graph,
 
34
    knit,
34
35
    osutils,
35
36
    pack,
36
37
    patiencediff,
37
38
    trace,
38
39
    )
39
40
from bzrlib.graph import Graph
40
 
from bzrlib.knit import _DirectPackAccess
41
41
from bzrlib.btree_index import BTreeBuilder
42
42
from bzrlib.lru_cache import LRUSizeCache
43
43
from bzrlib.tsort import topo_sort
911
911
        writer.begin()
912
912
        index = _GCGraphIndex(graph_index, lambda:True, parents=parents,
913
913
            add_callback=graph_index.add_nodes)
914
 
        access = _DirectPackAccess({})
 
914
        access = knit._DirectPackAccess({})
915
915
        access.set_writer(writer, graph_index, (transport, 'newpack'))
916
916
        result = GroupCompressVersionedFiles(index, access, delta)
917
917
        result.stream = stream
1018
1018
        else:
1019
1019
            keys = [key]
1020
1020
            parent_map = {key:()}
 
1021
        # So we used Graph(self) to load the parent_map, but now that we have
 
1022
        # it, we can just query the parent map directly, so create a new Graph
 
1023
        # object
 
1024
        graph = _mod_graph.Graph(_mod_graph.DictParentsProvider(parent_map))
1021
1025
        head_cache = _mod_graph.FrozenHeadsCache(graph)
1022
1026
        parent_cache = {}
1023
1027
        reannotate = annotate.reannotate
1024
1028
        for record in self.get_record_stream(keys, 'topological', True):
1025
1029
            key = record.key
1026
 
            chunks = osutils.chunks_to_lines(record.get_bytes_as('chunked'))
 
1030
            lines = osutils.chunks_to_lines(record.get_bytes_as('chunked'))
1027
1031
            parent_lines = [parent_cache[parent] for parent in parent_map[key]]
1028
1032
            parent_cache[key] = list(
1029
 
                reannotate(parent_lines, chunks, key, None, head_cache))
 
1033
                reannotate(parent_lines, lines, key, None, head_cache))
1030
1034
        return parent_cache[key]
1031
1035
 
1032
1036
    def check(self, progress_bar=None):
1543
1547
    """Mapper from GroupCompressVersionedFiles needs into GraphIndex storage."""
1544
1548
 
1545
1549
    def __init__(self, graph_index, is_locked, parents=True,
1546
 
        add_callback=None):
 
1550
        add_callback=None, track_external_parent_refs=False):
1547
1551
        """Construct a _GCGraphIndex on a graph_index.
1548
1552
 
1549
1553
        :param graph_index: An implementation of bzrlib.index.GraphIndex.
1554
1558
        :param add_callback: If not None, allow additions to the index and call
1555
1559
            this callback with a list of added GraphIndex nodes:
1556
1560
            [(node, value, node_refs), ...]
 
1561
        :param track_external_parent_refs: As keys are added, keep track of the
 
1562
            keys they reference, so that we can query get_missing_parents(),
 
1563
            etc.
1557
1564
        """
1558
1565
        self._add_callback = add_callback
1559
1566
        self._graph_index = graph_index
1560
1567
        self._parents = parents
1561
1568
        self.has_graph = parents
1562
1569
        self._is_locked = is_locked
 
1570
        if track_external_parent_refs:
 
1571
            self._key_dependencies = knit._KeyRefs()
 
1572
        else:
 
1573
            self._key_dependencies = None
1563
1574
 
1564
1575
    def add_records(self, records, random_id=False):
1565
1576
        """Add multiple records to the index.
1610
1621
                for key, (value, node_refs) in keys.iteritems():
1611
1622
                    result.append((key, value))
1612
1623
            records = result
 
1624
        key_dependencies = self._key_dependencies
 
1625
        if key_dependencies is not None and self._parents:
 
1626
            for key, value, refs in records:
 
1627
                parents = refs[0]
 
1628
                key_dependencies.add_references(key, parents)
1613
1629
        self._add_callback(records)
1614
1630
 
1615
1631
    def _check_read(self):
1664
1680
                result[node[1]] = None
1665
1681
        return result
1666
1682
 
 
1683
    def get_missing_parents(self):
 
1684
        """Return the keys of missing parents."""
 
1685
        # Copied from _KnitGraphIndex.get_missing_parents
 
1686
        # We may have false positives, so filter those out.
 
1687
        self._key_dependencies.add_keys(
 
1688
            self.get_parent_map(self._key_dependencies.get_unsatisfied_refs()))
 
1689
        return frozenset(self._key_dependencies.get_unsatisfied_refs())
 
1690
 
1667
1691
    def get_build_details(self, keys):
1668
1692
        """Get the various build details for keys.
1669
1693
 
1715
1739
        delta_end = int(bits[3])
1716
1740
        return node[0], start, stop, basis_end, delta_end
1717
1741
 
 
1742
    def scan_unvalidated_index(self, graph_index):
 
1743
        """Inform this _GCGraphIndex that there is an unvalidated index.
 
1744
 
 
1745
        This allows this _GCGraphIndex to keep track of any missing
 
1746
        compression parents we may want to have filled in to make those
 
1747
        indices valid.
 
1748
 
 
1749
        :param graph_index: A GraphIndex
 
1750
        """
 
1751
        if self._key_dependencies is not None:
 
1752
            # Add parent refs from graph_index (and discard parent refs that
 
1753
            # the graph_index has).
 
1754
            add_refs = self._key_dependencies.add_references
 
1755
            for node in graph_index.iter_all_entries():
 
1756
                add_refs(node[1], node[3][0])
 
1757
 
 
1758
 
1718
1759
 
1719
1760
from bzrlib._groupcompress_py import (
1720
1761
    apply_delta,