56
51
class Store(object):
57
"""This class represents the abstract storage layout for saving information.
52
"""An abstract store that holds files indexed by unique names.
59
54
Files can be added, but not modified once they are in. Typically
60
55
the hash is used as the name, or something else known to be unique,
58
>>> st = ImmutableScratchStore()
60
>>> st.add(StringIO('hello'), 'aa')
66
You are not allowed to add an id that is already present.
68
Entries can be retrieved as files, which may then be read.
70
>>> st.add(StringIO('goodbye'), '123123')
71
>>> st['123123'].read()
65
raise NotImplementedError('Children should define their length')
67
def get(self, fileid, suffix=None):
68
"""Returns a file reading from a particular entry.
70
If suffix is present, retrieve the named suffix for fileid.
72
raise NotImplementedError
74
def __getitem__(self, fileid):
75
"""DEPRECATED. Please use .get(fileid) instead."""
76
raise NotImplementedError
78
#def __contains__(self, fileid):
79
# """Deprecated, please use has_id"""
80
# raise NotImplementedError
83
raise NotImplementedError
85
def add(self, f, fileid):
86
"""Add a file object f to the store accessible from the given fileid"""
87
raise NotImplementedError('Children of Store must define their method of adding entries.')
89
def has_id(self, fileid, suffix=None):
90
"""Return True or false for the presence of fileid in the store.
92
suffix, if present, is a per file suffix, i.e. for digital signature
94
raise NotImplementedError
97
"""Return True if this store is able to be listed."""
98
return (getattr(self, "__iter__", None) is not None)
100
def copy_all_ids(self, store_from, pb=None):
101
"""Copy all the file ids from store_from into self."""
102
if not store_from.listable():
103
raise UnlistableStore(store_from)
105
for count, file_id in enumerate(store_from):
107
pb.update('listing files', count, count)
111
mutter('copy_all ids: %r', ids)
112
self.copy_multi(store_from, ids, pb=pb)
114
def copy_multi(self, other, ids, pb=None, permit_failure=False):
115
"""Copy texts for ids from other into self.
117
If an id is present in self, it is skipped. A count of copied
118
ids is returned, which may be less than len(ids).
120
:param other: Another Store object
121
:param ids: A list of entry ids to be copied
122
:param pb: A ProgressBar object, if none is given, the default will be created.
123
:param permit_failure: Allow missing entries to be ignored
124
:return: (n_copied, [failed]) The number of entries copied successfully,
125
followed by a list of entries which could not be copied (because they
129
pb.update('preparing to copy')
132
ids = [osutils.safe_file_id(i) for i in ids] # get the list for showing a length.
135
if self.has_id(fileid):
138
self._copy_one(fileid, None, other, pb)
139
for suffix in self._suffixes:
141
self._copy_one(fileid, suffix, other, pb)
145
pb.update('copy', count, len(ids))
151
assert count == len(ids)
156
def _copy_one(self, fileid, suffix, other, pb):
157
"""Most generic copy-one object routine.
159
Subclasses can override this to provide an optimised
160
copy between their own instances. Such overriden routines
161
should call this if they have no optimised facility for a
164
mutter('Store._copy_one: %r', fileid)
165
f = other.get(fileid, suffix)
166
self.add(f, fileid, suffix)
169
class TransportStore(Store):
170
"""A TransportStore is a Store superclass for Stores that use Transports."""
172
def add(self, f, fileid, suffix=None):
173
"""Add contents of a file into the store.
175
f -- A file-like object
177
fileid = osutils.safe_file_id(fileid)
178
mutter("add store entry %r", fileid)
179
if isinstance(f, str):
180
symbol_versioning.warn(zero_eleven % 'Passing a string to Store.add',
181
DeprecationWarning, stacklevel=2)
184
names = self._id_to_names(fileid, suffix)
185
if self._transport.has_any(names):
186
raise BzrError("store %r already contains id %r"
187
% (self._transport.base, fileid))
189
# Most of the time, just adding the file will work
190
# if we find a time where it fails, (because the dir
191
# doesn't exist), then create the dir, and try again
192
self._add(names[0], f)
194
def _add(self, relpath, f):
195
"""Actually add the file to the given location.
196
This should be overridden by children.
198
raise NotImplementedError('children need to implement this function.')
200
def _check_fileid(self, fileid):
201
if not isinstance(fileid, basestring):
202
raise TypeError('Fileids should be a string type: %s %r' % (type(fileid), fileid))
203
if '\\' in fileid or '/' in fileid:
204
raise ValueError("invalid store id %r" % fileid)
206
def _id_to_names(self, fileid, suffix):
207
"""Return the names in the expected order"""
208
if suffix is not None:
209
fn = self._relpath(fileid, [suffix])
211
fn = self._relpath(fileid)
213
# FIXME RBC 20051128 this belongs in TextStore.
220
def has_id(self, fileid, suffix=None):
221
"""See Store.has_id."""
222
fileid = osutils.safe_file_id(fileid)
223
return self._transport.has_any(self._id_to_names(fileid, suffix))
225
def _get_name(self, fileid, suffix=None):
226
"""A special check, which returns the name of an existing file.
228
This is similar in spirit to 'has_id', but it is designed
229
to return information about which file the store has.
231
for name in self._id_to_names(fileid, suffix=suffix):
232
if self._transport.has(name):
236
def _get(self, filename):
237
"""Return an vanilla file stream for clients to read from.
239
This is the body of a template method on 'get', and should be
240
implemented by subclasses.
242
raise NotImplementedError
244
def get(self, fileid, suffix=None):
245
"""See Store.get()."""
246
fileid = osutils.safe_file_id(fileid)
247
names = self._id_to_names(fileid, suffix)
250
return self._get(name)
251
except errors.NoSuchFile:
253
raise KeyError(fileid)
255
def __init__(self, a_transport, prefixed=False, compressed=False,
256
dir_mode=None, file_mode=None,
258
assert isinstance(a_transport, Transport)
259
super(TransportStore, self).__init__()
260
self._transport = a_transport
261
self._prefixed = prefixed
262
# FIXME RBC 20051128 this belongs in TextStore.
263
self._compressed = compressed
264
self._suffixes = set()
265
self._escaped = escaped
267
# It is okay for these to be None, it just means they
268
# will just use the filesystem defaults
269
self._dir_mode = dir_mode
270
self._file_mode = file_mode
272
def _unescape(self, file_id):
273
"""If filename escaping is enabled for this store, unescape and return the filename."""
275
return urllib.unquote(file_id)
279
def _iter_files_recursive(self):
280
"""Iterate through the files in the transport."""
281
for quoted_relpath in self._transport.iter_files_recursive():
282
# transport iterator always returns quoted paths, regardless of
284
yield urllib.unquote(quoted_relpath)
287
for relpath in self._iter_files_recursive():
288
# worst case is one of each suffix.
289
name = os.path.basename(relpath)
290
if name.endswith('.gz'):
293
for count in range(len(self._suffixes)):
294
for suffix in self._suffixes:
295
if name.endswith('.' + suffix):
298
yield self._unescape(name)
301
return len(list(self.__iter__()))
303
def _relpath(self, fileid, suffixes=None):
304
self._check_fileid(fileid)
306
for suffix in suffixes:
307
if not suffix in self._suffixes:
308
raise ValueError("Unregistered suffix %r" % suffix)
309
self._check_fileid(suffix)
312
fileid = self._escape_file_id(fileid)
314
# hash_prefix adds the '/' separator
315
prefix = self.hash_prefix(fileid, escaped=True)
318
path = prefix + fileid
319
full_path = u'.'.join([path] + suffixes)
320
return urlutils.escape(full_path)
322
def _escape_file_id(self, file_id):
323
"""Turn a file id into a filesystem safe string.
325
This is similar to a plain urllib.quote, except
326
it uses specific safe characters, so that it doesn't
327
have to translate a lot of valid file ids.
329
if not self._escaped:
331
if isinstance(file_id, unicode):
332
file_id = file_id.encode('utf-8')
333
# @ does not get escaped. This is because it is a valid
334
# filesystem character we use all the time, and it looks
335
# a lot better than seeing %40 all the time.
336
safe = "abcdefghijklmnopqrstuvwxyz0123456789-_@,."
337
r = [((c in safe) and c or ('%%%02x' % ord(c)))
341
def hash_prefix(self, fileid, escaped=False):
342
# fileid should be unescaped
343
if not escaped and self._escaped:
344
fileid = self._escape_file_id(fileid)
345
return "%02x/" % (adler32(fileid) & 0xff)
348
if self._transport is None:
349
return "%s(None)" % (self.__class__.__name__)
351
return "%s(%r)" % (self.__class__.__name__, self._transport.base)
356
"""Return True if this store is able to be listed."""
357
return self._transport.listable()
359
def register_suffix(self, suffix):
360
"""Register a suffix as being expected in this store."""
361
self._check_fileid(suffix)
363
raise ValueError('You cannot register the "gz" suffix.')
364
self._suffixes.add(suffix)
366
75
def total_size(self):
367
76
"""Return (count, bytes)
373
for relpath in self._transport.iter_files_recursive():
375
total += self._transport.stat(relpath).st_size
84
total += self._item_size(fid)
88
class ImmutableStore(Store):
89
"""Store that stores files on disk.
91
TODO: Atomic add by writing to a temporary file and renaming.
92
TODO: Guard against the same thing being stored twice, compressed and
93
uncompressed during copy_multi_immutable - the window is for a
94
matching store with some crack code that lets it offer a
95
non gz FOO and then a fz FOO.
97
In bzr 0.0.5 and earlier, files within the store were marked
98
readonly on disk. This is no longer done but existing stores need
102
def __init__(self, basedir):
103
super(ImmutableStore, self).__init__()
104
self._basedir = basedir
106
def _path(self, entry_id):
107
if not isinstance(entry_id, basestring):
108
raise TypeError(type(entry_id))
109
if '\\' in entry_id or '/' in entry_id:
110
raise ValueError("invalid store id %r" % entry_id)
111
return os.path.join(self._basedir, entry_id)
114
return "%s(%r)" % (self.__class__.__name__, self._basedir)
116
def add(self, f, fileid, compressed=True):
117
"""Add contents of a file into the store.
119
f -- An open file, or file-like object."""
120
# FIXME: Only works on files that will fit in memory
122
from bzrlib.atomicfile import AtomicFile
124
mutter("add store entry %r" % (fileid))
125
if isinstance(f, types.StringTypes):
130
p = self._path(fileid)
131
if os.access(p, os.F_OK) or os.access(p + '.gz', os.F_OK):
132
raise BzrError("store %r already contains id %r" % (self._basedir, fileid))
138
af = AtomicFile(fn, 'wb')
141
gf = gzip.GzipFile(mode='wb', fileobj=af)
151
def copy_multi(self, other, ids, permit_failure=False):
152
"""Copy texts for ids from other into self.
154
If an id is present in self, it is skipped.
156
Returns (count_copied, failed), where failed is a collection of ids
157
that could not be copied.
159
pb = bzrlib.ui.ui_factory.progress_bar()
161
pb.update('preparing to copy')
162
to_copy = [id for id in ids if id not in self]
163
if isinstance(other, ImmutableStore):
164
return self.copy_multi_immutable(other, to_copy, pb,
165
permit_failure=permit_failure)
170
pb.update('copy', count, len(to_copy))
171
if not permit_failure:
172
self.add(other[id], id)
380
@deprecated_function(zero_eight)
381
def copy_all(store_from, store_to, pb=None):
181
if not permit_failure:
182
assert count == len(to_copy)
186
def copy_multi_immutable(self, other, to_copy, pb, permit_failure=False):
191
other_p = other._path(id)
193
osutils.link_or_copy(other_p, p)
194
except (IOError, OSError), e:
195
if e.errno == errno.ENOENT:
196
if not permit_failure:
197
osutils.link_or_copy(other_p+".gz", p+".gz")
200
osutils.link_or_copy(other_p+".gz", p+".gz")
202
if e.errno == errno.ENOENT:
210
pb.update('copy', count, len(to_copy))
211
assert count == len(to_copy)
215
def __contains__(self, fileid):
217
p = self._path(fileid)
218
return (os.access(p, os.R_OK)
219
or os.access(p + '.gz', os.R_OK))
221
def _item_size(self, fid):
224
return os.stat(p)[ST_SIZE]
226
return os.stat(p + '.gz')[ST_SIZE]
228
# TODO: Guard against the same thing being stored twice,
229
# compressed and uncompressed
232
for f in os.listdir(self._basedir):
234
# TODO: case-insensitive?
240
return len(os.listdir(self._basedir))
242
def __getitem__(self, fileid):
243
"""Returns a file reading from a particular entry."""
244
p = self._path(fileid)
246
return gzip.GzipFile(p + '.gz', 'rb')
248
if e.errno != errno.ENOENT:
254
if e.errno != errno.ENOENT:
257
raise KeyError(fileid)
260
class ImmutableScratchStore(ImmutableStore):
261
"""Self-destructing test subclass of ImmutableStore.
263
The Store only exists for the lifetime of the Python object.
264
Obviously you should not put anything precious in it.
267
super(ImmutableScratchStore, self).__init__(tempfile.mkdtemp())
270
for f in os.listdir(self._basedir):
271
fpath = os.path.join(self._basedir, f)
272
# needed on windows, and maybe some other filesystems
273
os.chmod(fpath, 0600)
275
os.rmdir(self._basedir)
276
mutter("%r destroyed" % self)
279
class ImmutableMemoryStore(Store):
280
"""A memory only store."""
283
super(ImmutableMemoryStore, self).__init__()
286
def add(self, stream, fileid, compressed=True):
287
if self._contents.has_key(fileid):
288
raise StoreError("fileid %s already in the store" % fileid)
289
self._contents[fileid] = stream.read()
291
def __getitem__(self, fileid):
292
"""Returns a file reading from a particular entry."""
293
if not self._contents.has_key(fileid):
295
return StringIO(self._contents[fileid])
297
def _item_size(self, fileid):
298
return len(self._contents[fileid])
301
return iter(self._contents.keys())
304
class RemoteStore(object):
306
def __init__(self, baseurl):
307
self._baseurl = baseurl
309
def _path(self, name):
311
raise ValueError('invalid store id', name)
312
return self._baseurl + '/' + name
314
def __getitem__(self, fileid):
316
from bzrlib.remotebranch import get_url
317
p = self._path(fileid)
319
return get_url(p, compressed=True)
320
except urllib2.URLError:
323
return get_url(p, compressed=False)
324
except urllib2.URLError:
325
raise KeyError(fileid)
327
def __contains__(self, fileid):
336
"""A store that caches data locally, to avoid repeated downloads.
337
The precacache method should be used to avoid server round-trips for
341
def __init__(self, store, cache_dir):
342
self.source_store = store
343
self.cache_store = ImmutableStore(cache_dir)
345
def __getitem__(self, id):
346
mutter("Cache add %s" % id)
347
if id not in self.cache_store:
348
self.cache_store.add(self.source_store[id], id)
349
return self.cache_store[id]
351
def prefetch(self, ids):
352
"""Copy a series of ids into the cache, before they are used.
353
For remote stores that support pipelining or async downloads, this can
354
increase speed considerably.
355
Failures while prefetching are ignored.
357
mutter("Prefetch of ids %s" % ",".join(ids))
358
self.cache_store.copy_multi(self.source_store, ids,
362
def copy_all(store_from, store_to):
382
363
"""Copy all ids from one store to another."""
383
store_to.copy_all_ids(store_from, pb)
364
if not hasattr(store_from, "__iter__"):
365
raise UnlistableStore(store_from)
366
ids = [f for f in store_from]
367
store_to.copy_multi(store_from, ids)