~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/repofmt/groupcompress_repo.py

  • Committer: Joe Julian
  • Date: 2010-01-10 02:25:31 UTC
  • mto: (4634.119.7 2.0)
  • mto: This revision was merged to the branch mainline in revision 4959.
  • Revision ID: joe@julianfamily.org-20100110022531-wqk61rsagz8xsiga
Added MANIFEST.in to allow bdist_rpm to have all the required include files and tools. bdist_rpm will still fail to build correctly on some distributions due to a disttools bug http://bugs.python.org/issue644744

Show diffs side-by-side

added added

removed removed

Lines of Context:
1
 
# Copyright (C) 2008, 2009, 2010 Canonical Ltd
 
1
# Copyright (C) 2008, 2009 Canonical Ltd
2
2
#
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
29
29
    knit,
30
30
    osutils,
31
31
    pack,
 
32
    remote,
32
33
    revision as _mod_revision,
33
34
    trace,
34
35
    ui,
52
53
    ResumedPack,
53
54
    Packer,
54
55
    )
55
 
from bzrlib.static_tuple import StaticTuple
56
56
 
57
57
 
58
58
class GCPack(NewPack):
704
704
                self._remove_pack_from_memory(pack)
705
705
        # record the newly available packs and stop advertising the old
706
706
        # packs
707
 
        to_be_obsoleted = []
708
 
        for _, packs in pack_operations:
709
 
            to_be_obsoleted.extend(packs)
710
 
        result = self._save_pack_names(clear_obsolete_packs=True,
711
 
                                       obsolete_packs=to_be_obsoleted)
 
707
        result = self._save_pack_names(clear_obsolete_packs=True)
 
708
        # Move the old packs out of the way now they are no longer referenced.
 
709
        for revision_count, packs in pack_operations:
 
710
            self._obsolete_packs(packs)
712
711
        return result
713
712
 
714
713
 
816
815
                                 ' no new_path %r' % (file_id,))
817
816
            if new_path == '':
818
817
                new_inv.root_id = file_id
819
 
                parent_id_basename_key = StaticTuple('', '').intern()
 
818
                parent_id_basename_key = ('', '')
820
819
            else:
821
820
                utf8_entry_name = entry.name.encode('utf-8')
822
 
                parent_id_basename_key = StaticTuple(entry.parent_id,
823
 
                                                     utf8_entry_name).intern()
 
821
                parent_id_basename_key = (entry.parent_id, utf8_entry_name)
824
822
            new_value = entry_to_bytes(entry)
825
823
            # Populate Caches?
826
824
            # new_inv._path_to_fileid_cache[new_path] = file_id
827
 
            key = StaticTuple(file_id).intern()
828
 
            id_to_entry_dict[key] = new_value
 
825
            id_to_entry_dict[(file_id,)] = new_value
829
826
            parent_id_basename_dict[parent_id_basename_key] = file_id
830
827
 
831
828
        new_inv._populate_from_dicts(self.chk_bytes, id_to_entry_dict,
881
878
            if basis_tree is not None:
882
879
                basis_tree.unlock()
883
880
 
884
 
    def _deserialise_inventory(self, revision_id, bytes):
 
881
    def deserialise_inventory(self, revision_id, bytes):
885
882
        return inventory.CHKInventory.deserialise(self.chk_bytes, bytes,
886
883
            (revision_id,))
887
884
 
903
900
    def _iter_inventory_xmls(self, revision_ids, ordering):
904
901
        # Without a native 'xml' inventory, this method doesn't make sense.
905
902
        # However older working trees, and older bundles want it - so we supply
906
 
        # it allowing _get_inventory_xml to work. Bundles currently use the
 
903
        # it allowing get_inventory_xml to work. Bundles currently use the
907
904
        # serializer directly; this also isn't ideal, but there isn't an xml
908
905
        # iteration interface offered at all for repositories. We could make
909
906
        # _iter_inventory_xmls be part of the contract, even if kept private.
953
950
                        pb=pb):
954
951
                for name, bytes in items:
955
952
                    (name_utf8, file_id, revision_id) = bytes_to_info(bytes)
956
 
                    # TODO: consider interning file_id, revision_id here, or
957
 
                    #       pushing that intern() into bytes_to_info()
958
 
                    # TODO: rich_root should always be True here, for all
959
 
                    #       repositories that support chk_bytes
960
953
                    if not rich_root and name_utf8 == '':
961
954
                        continue
962
955
                    try:
1113
1106
        for stream_info in self._fetch_revision_texts(revision_ids):
1114
1107
            yield stream_info
1115
1108
        self._revision_keys = [(rev_id,) for rev_id in revision_ids]
1116
 
        self.from_repository.revisions.clear_cache()
1117
 
        self.from_repository.signatures.clear_cache()
1118
1109
        yield self._get_inventory_stream(self._revision_keys)
1119
 
        self.from_repository.inventories.clear_cache()
1120
1110
        # TODO: The keys to exclude might be part of the search recipe
1121
1111
        # For now, exclude all parents that are at the edge of ancestry, for
1122
1112
        # which we have inventories
1125
1115
                        self._revision_keys)
1126
1116
        for stream_info in self._get_filtered_chk_streams(parent_keys):
1127
1117
            yield stream_info
1128
 
        self.from_repository.chk_bytes.clear_cache()
1129
1118
        yield self._get_text_stream()
1130
 
        self.from_repository.texts.clear_cache()
1131
1119
 
1132
1120
    def get_stream_for_missing_keys(self, missing_keys):
1133
1121
        # missing keys can only occur when we are byte copying and not
1197
1185
            # are always rich-root, so there are no synthesised root records to
1198
1186
            # ignore.
1199
1187
            _, file_id, revision_id = bytes_to_info(bytes)
1200
 
            file_id = intern(file_id)
1201
 
            revision_id = intern(revision_id)
1202
 
            text_keys.add(StaticTuple(file_id, revision_id).intern())
 
1188
            text_keys.add((file_id, revision_id))
1203
1189
        yield record
1204
1190
 
1205
1191