46
"""This class represents the abstract storage layout for saving information.
36
class ImmutableStore(object):
37
"""Store that holds files indexed by unique names.
48
39
Files can be added, but not modified once they are in. Typically
49
40
the hash is used as the name, or something else known to be unique,
43
>>> st = ImmutableScratchStore()
45
>>> st.add(StringIO('hello'), 'aa')
51
You are not allowed to add an id that is already present.
53
Entries can be retrieved as files, which may then be read.
55
>>> st.add(StringIO('goodbye'), '123123')
56
>>> st['123123'].read()
59
TODO: Atomic add by writing to a temporary file and renaming.
61
In bzr 0.0.5 and earlier, files within the store were marked
62
readonly on disk. This is no longer done but existing stores need
54
raise NotImplementedError('Children should define their length')
56
def get(self, fileid, suffix=None):
57
"""Returns a file reading from a particular entry.
59
If suffix is present, retrieve the named suffix for fileid.
61
raise NotImplementedError
63
def __getitem__(self, fileid):
64
"""DEPRECATED. Please use .get(fileid) instead."""
65
raise NotImplementedError
68
raise NotImplementedError
70
def add(self, f, fileid):
71
"""Add a file object f to the store accessible from the given fileid"""
72
raise NotImplementedError('Children of Store must define their method of adding entries.')
74
def has_id(self, fileid, suffix=None):
75
"""Return True or false for the presence of fileid in the store.
77
suffix, if present, is a per file suffix, i.e. for digital signature
79
raise NotImplementedError
82
"""Return True if this store is able to be listed."""
83
return (getattr(self, "__iter__", None) is not None)
85
def copy_all_ids(self, store_from, pb=None):
86
"""Copy all the file ids from store_from into self."""
87
if not store_from.listable():
88
raise UnlistableStore(store_from)
90
for count, file_id in enumerate(store_from):
92
pb.update('listing files', count, count)
96
mutter('copy_all ids: %r', ids)
97
self.copy_multi(store_from, ids, pb=pb)
99
def copy_multi(self, other, ids, pb=None, permit_failure=False):
66
def __init__(self, basedir):
67
self._basedir = basedir
70
if '\\' in id or '/' in id:
71
raise ValueError("invalid store id %r" % id)
72
return os.path.join(self._basedir, id)
75
return "%s(%r)" % (self.__class__.__name__, self._basedir)
77
def add(self, f, fileid, compressed=True):
78
"""Add contents of a file into the store.
80
f -- An open file, or file-like object."""
81
# FIXME: Only works on files that will fit in memory
83
from bzrlib.atomicfile import AtomicFile
85
mutter("add store entry %r" % (fileid))
86
if isinstance(f, types.StringTypes):
91
p = self._path(fileid)
92
if os.access(p, os.F_OK) or os.access(p + '.gz', os.F_OK):
93
from bzrlib.errors import bailout
94
raise BzrError("store %r already contains id %r" % (self._basedir, fileid))
100
af = AtomicFile(fn, 'wb')
103
gf = gzip.GzipFile(mode='wb', fileobj=af)
113
def copy_multi(self, other, ids):
100
114
"""Copy texts for ids from other into self.
102
116
If an id is present in self, it is skipped. A count of copied
103
117
ids is returned, which may be less than len(ids).
105
:param other: Another Store object
106
:param ids: A list of entry ids to be copied
107
:param pb: A ProgressTask object, if none is given, the default will be created.
108
:param permit_failure: Allow missing entries to be ignored
109
:return: (n_copied, [failed]) The number of entries copied successfully,
110
followed by a list of entries which could not be copied (because they
114
pb.update('preparing to copy')
119
from bzrlib.progress import ProgressBar
121
pb.update('preparing to copy')
122
to_copy = [id for id in ids if id not in self]
123
if isinstance(other, ImmutableStore):
124
return self.copy_multi_immutable(other, to_copy, pb)
119
if self.has_id(fileid):
128
pb.update('copy', count, len(to_copy))
129
self.add(other[id], id)
130
assert count == len(to_copy)
135
def copy_multi_immutable(self, other, to_copy, pb):
136
from shutil import copyfile
140
other_p = other._path(id)
122
self._copy_one(fileid, None, other, pb)
123
for suffix in self._suffixes:
125
self._copy_one(fileid, suffix, other, pb)
129
pb.update('copy', count, len(ids))
144
if e.errno == errno.ENOENT:
145
copyfile(other_p+".gz", p+".gz")
139
def _copy_one(self, fileid, suffix, other, pb):
140
"""Most generic copy-one object routine.
142
Subclasses can override this to provide an optimised
143
copy between their own instances. Such overriden routines
144
should call this if they have no optimised facility for a
147
mutter('Store._copy_one: %r', fileid)
148
f = other.get(fileid, suffix)
149
self.add(f, fileid, suffix)
152
class TransportStore(Store):
153
"""A TransportStore is a Store superclass for Stores that use Transports."""
155
def add(self, f, fileid, suffix=None):
156
"""Add contents of a file into the store.
158
f -- A file-like object
160
mutter("add store entry %r", fileid)
161
names = self._id_to_names(fileid, suffix)
162
if self._transport.has_any(names):
163
raise BzrError("store %r already contains id %r"
164
% (self._transport.base, fileid))
166
# Most of the time, just adding the file will work
167
# if we find a time where it fails, (because the dir
168
# doesn't exist), then create the dir, and try again
169
self._add(names[0], f)
171
def _add(self, relpath, f):
172
"""Actually add the file to the given location.
173
This should be overridden by children.
175
raise NotImplementedError('children need to implement this function.')
177
def _check_fileid(self, fileid):
178
if type(fileid) != str:
179
raise TypeError('Fileids should be bytestrings: %s %r' % (
180
type(fileid), fileid))
181
if '\\' in fileid or '/' in fileid:
182
raise ValueError("invalid store id %r" % fileid)
184
def _id_to_names(self, fileid, suffix):
185
"""Return the names in the expected order"""
186
if suffix is not None:
187
fn = self._relpath(fileid, [suffix])
189
fn = self._relpath(fileid)
191
# FIXME RBC 20051128 this belongs in TextStore.
198
def has_id(self, fileid, suffix=None):
199
"""See Store.has_id."""
200
return self._transport.has_any(self._id_to_names(fileid, suffix))
202
def _get_name(self, fileid, suffix=None):
203
"""A special check, which returns the name of an existing file.
205
This is similar in spirit to 'has_id', but it is designed
206
to return information about which file the store has.
208
for name in self._id_to_names(fileid, suffix=suffix):
209
if self._transport.has(name):
213
def _get(self, filename):
214
"""Return an vanilla file stream for clients to read from.
216
This is the body of a template method on 'get', and should be
217
implemented by subclasses.
219
raise NotImplementedError
221
def get(self, fileid, suffix=None):
222
"""See Store.get()."""
223
names = self._id_to_names(fileid, suffix)
226
return self._get(name)
227
except errors.NoSuchFile:
229
raise KeyError(fileid)
231
def __init__(self, a_transport, prefixed=False, compressed=False,
232
dir_mode=None, file_mode=None,
234
super(TransportStore, self).__init__()
235
self._transport = a_transport
236
self._prefixed = prefixed
237
# FIXME RBC 20051128 this belongs in TextStore.
238
self._compressed = compressed
239
self._suffixes = set()
240
self._escaped = escaped
242
# It is okay for these to be None, it just means they
243
# will just use the filesystem defaults
244
self._dir_mode = dir_mode
245
self._file_mode = file_mode
246
# Create a key mapper to use
247
if escaped and prefixed:
248
self._mapper = versionedfile.HashEscapedPrefixMapper()
249
elif not escaped and prefixed:
250
self._mapper = versionedfile.HashPrefixMapper()
253
"%r: escaped unprefixed stores are not permitted."
256
self._mapper = versionedfile.PrefixMapper()
258
def _iter_files_recursive(self):
259
"""Iterate through the files in the transport."""
260
for quoted_relpath in self._transport.iter_files_recursive():
150
pb.update('copy', count, len(to_copy))
151
assert count == len(to_copy)
156
def __contains__(self, fileid):
158
p = self._path(fileid)
159
return (os.access(p, os.R_OK)
160
or os.access(p + '.gz', os.R_OK))
162
# TODO: Guard against the same thing being stored twice, compressed and uncompresse
263
164
def __iter__(self):
264
for relpath in self._iter_files_recursive():
265
# worst case is one of each suffix.
266
name = os.path.basename(relpath)
267
if name.endswith('.gz'):
270
for count in range(len(self._suffixes)):
271
for suffix in self._suffixes:
272
if name.endswith('.' + suffix):
275
yield self._mapper.unmap(name)[0]
165
for f in os.listdir(self._basedir):
167
# TODO: case-insensitive?
277
172
def __len__(self):
278
return len(list(self.__iter__()))
280
def _relpath(self, fileid, suffixes=None):
281
self._check_fileid(fileid)
283
for suffix in suffixes:
284
if not suffix in self._suffixes:
285
raise ValueError("Unregistered suffix %r" % suffix)
286
self._check_fileid(suffix)
289
path = self._mapper.map((fileid,))
290
full_path = '.'.join([path] + suffixes)
294
if self._transport is None:
295
return "%s(None)" % (self.__class__.__name__)
297
return "%s(%r)" % (self.__class__.__name__, self._transport.base)
302
"""Return True if this store is able to be listed."""
303
return self._transport.listable()
305
def register_suffix(self, suffix):
306
"""Register a suffix as being expected in this store."""
307
self._check_fileid(suffix)
309
raise ValueError('You cannot register the "gz" suffix.')
310
self._suffixes.add(suffix)
173
return len(os.listdir(self._basedir))
175
def __getitem__(self, fileid):
176
"""Returns a file reading from a particular entry."""
177
p = self._path(fileid)
179
return gzip.GzipFile(p + '.gz', 'rb')
181
if e.errno == errno.ENOENT:
312
186
def total_size(self):
313
187
"""Return (count, bytes)