49
"""This class represents the abstract storage layout for saving information.
38
"""Store that holds files indexed by unique names.
51
40
Files can be added, but not modified once they are in. Typically
52
41
the hash is used as the name, or something else known to be unique,
44
>>> st = ImmutableScratchStore()
46
>>> st.add(StringIO('hello'), 'aa')
52
You are not allowed to add an id that is already present.
54
Entries can be retrieved as files, which may then be read.
56
>>> st.add(StringIO('goodbye'), '123123')
57
>>> st['123123'].read()
60
TODO: Atomic add by writing to a temporary file and renaming.
62
TODO: Perhaps automatically transform to/from XML in a method?
63
Would just need to tell the constructor what class to
66
TODO: Even within a simple disk store like this, we could
67
gzip the files. But since many are less than one disk
68
block, that might not help a lot.
72
def __init__(self, basedir):
73
"""ImmutableStore constructor."""
74
self._basedir = basedir
77
return os.path.join(self._basedir, id)
80
return "%s(%r)" % (self.__class__.__name__, self._basedir)
82
def add(self, f, fileid, compressed=True):
83
"""Add contents of a file into the store.
85
f -- An open file, or file-like object."""
86
# FIXME: Only works on smallish files
87
# TODO: Can be optimized by copying at the same time as
89
mutter("add store entry %r" % (fileid))
90
if isinstance(f, types.StringTypes):
95
p = self._path(fileid)
96
if os.access(p, os.F_OK) or os.access(p + '.gz', os.F_OK):
97
bailout("store %r already contains id %r" % (self._basedir, fileid))
100
f = gzip.GzipFile(p + '.gz', 'wb')
101
os.chmod(p + '.gz', 0444)
110
def __contains__(self, fileid):
112
p = self._path(fileid)
113
return (os.access(p, os.R_OK)
114
or os.access(p + '.gz', os.R_OK))
116
# TODO: Guard against the same thing being stored twice, compressed and uncompresse
119
for f in os.listdir(self._basedir):
121
# TODO: case-insensitive?
56
126
def __len__(self):
57
raise NotImplementedError('Children should define their length')
59
def get(self, fileid, suffix=None):
60
"""Returns a file reading from a particular entry.
62
If suffix is present, retrieve the named suffix for fileid.
64
raise NotImplementedError
127
return len(os.listdir(self._basedir))
66
129
def __getitem__(self, fileid):
67
"""DEPRECATED. Please use .get(fileid) instead."""
68
raise NotImplementedError
70
#def __contains__(self, fileid):
71
# """Deprecated, please use has_id"""
72
# raise NotImplementedError
75
raise NotImplementedError
77
def add(self, f, fileid):
78
"""Add a file object f to the store accessible from the given fileid"""
79
raise NotImplementedError('Children of Store must define their method of adding entries.')
81
def has_id(self, fileid, suffix=None):
82
"""Return True or false for the presence of fileid in the store.
84
suffix, if present, is a per file suffix, i.e. for digital signature
86
raise NotImplementedError
89
"""Return True if this store is able to be listed."""
90
return hasattr(self, "__iter__")
92
def copy_all_ids(self, store_from, pb=None):
93
"""Copy all the file ids from store_from into self."""
94
if not store_from.listable():
95
raise UnlistableStore(store_from)
97
for count, file_id in enumerate(store_from):
99
pb.update('listing files', count, count)
103
mutter('copy_all ids: %r', ids)
104
self.copy_multi(store_from, ids, pb=pb)
106
def copy_multi(self, other, ids, pb=None, permit_failure=False):
107
"""Copy texts for ids from other into self.
109
If an id is present in self, it is skipped. A count of copied
110
ids is returned, which may be less than len(ids).
112
:param other: Another Store object
113
:param ids: A list of entry ids to be copied
114
:param pb: A ProgressBar object, if none is given, the default will be created.
115
:param permit_failure: Allow missing entries to be ignored
116
:return: (n_copied, [failed]) The number of entries copied successfully,
117
followed by a list of entries which could not be copied (because they
121
pb.update('preparing to copy')
124
ids = list(ids) # get the list for showing a length.
127
if self.has_id(fileid):
130
self._copy_one(fileid, None, other, pb)
131
for suffix in self._suffixes:
133
self._copy_one(fileid, suffix, other, pb)
137
pb.update('copy', count, len(ids))
143
assert count == len(ids)
148
def _copy_one(self, fileid, suffix, other, pb):
149
"""Most generic copy-one object routine.
151
Subclasses can override this to provide an optimised
152
copy between their own instances. Such overriden routines
153
should call this if they have no optimised facility for a
156
mutter('Store._copy_one: %r', fileid)
157
f = other.get(fileid, suffix)
158
self.add(f, fileid, suffix)
161
class TransportStore(Store):
162
"""A TransportStore is a Store superclass for Stores that use Transports."""
164
def add(self, f, fileid, suffix=None):
165
"""Add contents of a file into the store.
167
f -- A file-like object, or string
169
mutter("add store entry %r", fileid)
171
names = self._id_to_names(fileid, suffix)
172
if self._transport.has_any(names):
173
raise BzrError("store %r already contains id %r"
174
% (self._transport.base, fileid))
176
# Most of the time, just adding the file will work
177
# if we find a time where it fails, (because the dir
178
# doesn't exist), then create the dir, and try again
179
self._add(names[0], f)
182
def _add(self, relpath, f):
183
"""Actually add the file to the given location.
184
This should be overridden by children.
186
raise NotImplementedError('children need to implement this function.')
188
def _check_fileid(self, fileid):
189
if not isinstance(fileid, basestring):
190
raise TypeError('Fileids should be a string type: %s %r' % (type(fileid), fileid))
191
if '\\' in fileid or '/' in fileid:
192
raise ValueError("invalid store id %r" % fileid)
194
def _id_to_names(self, fileid, suffix):
195
"""Return the names in the expected order"""
196
if suffix is not None:
197
fn = self._relpath(fileid, [suffix])
199
fn = self._relpath(fileid)
201
# FIXME RBC 20051128 this belongs in TextStore.
208
def has_id(self, fileid, suffix=None):
209
"""See Store.has_id."""
210
return self._transport.has_any(self._id_to_names(fileid, suffix))
212
def _get_name(self, fileid, suffix=None):
213
"""A special check, which returns the name of an existing file.
215
This is similar in spirit to 'has_id', but it is designed
216
to return information about which file the store has.
218
for name in self._id_to_names(fileid, suffix=suffix):
219
if self._transport.has(name):
223
def _get(self, filename):
224
"""Return an vanilla file stream for clients to read from.
226
This is the body of a template method on 'get', and should be
227
implemented by subclasses.
229
raise NotImplementedError
231
def get(self, fileid, suffix=None):
232
"""See Store.get()."""
233
names = self._id_to_names(fileid, suffix)
236
return self._get(name)
237
except errors.NoSuchFile:
239
raise KeyError(fileid)
241
def __init__(self, a_transport, prefixed=False, compressed=False,
242
dir_mode=None, file_mode=None,
244
assert isinstance(a_transport, Transport)
245
super(TransportStore, self).__init__()
246
self._transport = a_transport
247
self._prefixed = prefixed
248
# FIXME RBC 20051128 this belongs in TextStore.
249
self._compressed = compressed
250
self._suffixes = set()
251
self._escaped = escaped
253
# It is okay for these to be None, it just means they
254
# will just use the filesystem defaults
255
self._dir_mode = dir_mode
256
self._file_mode = file_mode
258
def _unescape(self, file_id):
259
"""If filename escaping is enabled for this store, unescape and return the filename."""
261
return urllib.unquote(file_id)
265
def _iter_files_recursive(self):
266
"""Iterate through the files in the transport."""
267
for quoted_relpath in self._transport.iter_files_recursive():
268
# transport iterator always returns quoted paths, regardless of
270
yield urllib.unquote(quoted_relpath)
273
for relpath in self._iter_files_recursive():
274
# worst case is one of each suffix.
275
name = os.path.basename(relpath)
276
if name.endswith('.gz'):
279
for count in range(len(self._suffixes)):
280
for suffix in self._suffixes:
281
if name.endswith('.' + suffix):
284
yield self._unescape(name)
287
return len(list(self.__iter__()))
289
def _relpath(self, fileid, suffixes=None):
290
self._check_fileid(fileid)
292
for suffix in suffixes:
293
if not suffix in self._suffixes:
294
raise ValueError("Unregistered suffix %r" % suffix)
295
self._check_fileid(suffix)
298
fileid = self._escape_file_id(fileid)
300
# hash_prefix adds the '/' separator
301
prefix = self.hash_prefix(fileid, escaped=True)
304
path = prefix + fileid
305
full_path = u'.'.join([path] + suffixes)
306
return urlutils.escape(full_path)
308
def _escape_file_id(self, file_id):
309
"""Turn a file id into a filesystem safe string.
311
This is similar to a plain urllib.quote, except
312
it uses specific safe characters, so that it doesn't
313
have to translate a lot of valid file ids.
315
if not self._escaped:
317
if isinstance(file_id, unicode):
318
file_id = file_id.encode('utf-8')
319
# @ does not get escaped. This is because it is a valid
320
# filesystem character we use all the time, and it looks
321
# a lot better than seeing %40 all the time.
322
safe = "abcdefghijklmnopqrstuvwxyz0123456789-_@,."
323
r = [((c in safe) and c or ('%%%02x' % ord(c)))
327
def hash_prefix(self, fileid, escaped=False):
328
# fileid should be unescaped
329
if not escaped and self._escaped:
330
fileid = self._escape_file_id(fileid)
331
return "%02x/" % (adler32(fileid) & 0xff)
334
if self._transport is None:
335
return "%s(None)" % (self.__class__.__name__)
337
return "%s(%r)" % (self.__class__.__name__, self._transport.base)
342
"""Return True if this store is able to be listed."""
343
return self._transport.listable()
345
def register_suffix(self, suffix):
346
"""Register a suffix as being expected in this store."""
347
self._check_fileid(suffix)
349
raise ValueError('You cannot register the "gz" suffix.')
350
self._suffixes.add(suffix)
130
"""Returns a file reading from a particular entry."""
131
p = self._path(fileid)
133
return gzip.GzipFile(p + '.gz', 'rb')
135
if e.errno == errno.ENOENT:
352
140
def total_size(self):
353
141
"""Return (count, bytes)