47
"""This class represents the abstract storage layout for saving information.
38
class ImmutableStore(object):
39
"""Store that holds files indexed by unique names.
49
41
Files can be added, but not modified once they are in. Typically
50
42
the hash is used as the name, or something else known to be unique,
45
>>> st = ImmutableScratchStore()
47
>>> st.add(StringIO('hello'), 'aa')
53
You are not allowed to add an id that is already present.
55
Entries can be retrieved as files, which may then be read.
57
>>> st.add(StringIO('goodbye'), '123123')
58
>>> st['123123'].read()
61
TODO: Atomic add by writing to a temporary file and renaming.
63
In bzr 0.0.5 and earlier, files within the store were marked
64
readonly on disk. This is no longer done but existing stores need
55
raise NotImplementedError('Children should define their length')
57
def get(self, fileid, suffix=None):
58
"""Returns a file reading from a particular entry.
60
If suffix is present, retrieve the named suffix for fileid.
62
raise NotImplementedError
64
def __getitem__(self, fileid):
65
"""DEPRECATED. Please use .get(fileid) instead."""
66
raise NotImplementedError
68
#def __contains__(self, fileid):
69
# """Deprecated, please use has_id"""
70
# raise NotImplementedError
73
raise NotImplementedError
75
def add(self, f, fileid):
76
"""Add a file object f to the store accessible from the given fileid"""
77
raise NotImplementedError('Children of Store must define their method of adding entries.')
79
def has_id(self, fileid, suffix=None):
80
"""Return True or false for the presence of fileid in the store.
82
suffix, if present, is a per file suffix, i.e. for digital signature
84
raise NotImplementedError
87
"""Return True if this store is able to be listed."""
88
return hasattr(self, "__iter__")
90
def copy_multi(self, other, ids, pb=None, permit_failure=False):
68
def __init__(self, basedir):
69
self._basedir = basedir
71
def _path(self, entry_id):
72
if not isinstance(entry_id, basestring):
73
raise TypeError(type(entry_id))
74
if '\\' in entry_id or '/' in entry_id:
75
raise ValueError("invalid store id %r" % entry_id)
76
return os.path.join(self._basedir, entry_id)
79
return "%s(%r)" % (self.__class__.__name__, self._basedir)
81
def add(self, f, fileid, compressed=True):
82
"""Add contents of a file into the store.
84
f -- An open file, or file-like object."""
85
# FIXME: Only works on files that will fit in memory
87
from bzrlib.atomicfile import AtomicFile
89
mutter("add store entry %r" % (fileid))
90
if isinstance(f, types.StringTypes):
95
p = self._path(fileid)
96
if os.access(p, os.F_OK) or os.access(p + '.gz', os.F_OK):
97
raise BzrError("store %r already contains id %r" % (self._basedir, fileid))
103
af = AtomicFile(fn, 'wb')
106
gf = gzip.GzipFile(mode='wb', fileobj=af)
116
def copy_multi(self, other, ids, permit_failure=False):
91
117
"""Copy texts for ids from other into self.
93
If an id is present in self, it is skipped. A count of copied
94
ids is returned, which may be less than len(ids).
119
If an id is present in self, it is skipped.
96
:param other: Another Store object
97
:param ids: A list of entry ids to be copied
98
:param pb: A ProgressBar object, if none is given, the default will be created.
99
:param permit_failure: Allow missing entries to be ignored
100
:return: (n_copied, [failed]) The number of entries copied successfully,
101
followed by a list of entries which could not be copied (because they
121
Returns (count_copied, failed), where failed is a collection of ids
122
that could not be copied.
105
pb = bzrlib.ui.ui_factory.progress_bar()
124
pb = bzrlib.ui.ui_factory.progress_bar()
106
126
pb.update('preparing to copy')
127
to_copy = [id for id in ids if id not in self]
128
if isinstance(other, ImmutableStore):
129
return self.copy_multi_immutable(other, to_copy, pb)
109
ids = list(ids) # get the list for showing a length.
112
if self.has_id(fileid):
134
pb.update('copy', count, len(to_copy))
135
if not permit_failure:
136
self.add(other[id], id)
145
if not permit_failure:
146
assert count == len(to_copy)
150
def copy_multi_immutable(self, other, to_copy, pb, permit_failure=False):
151
from shutil import copyfile
156
other_p = other._path(id)
115
self._copy_one(fileid, None, other, pb)
116
for suffix in self._suffixes:
118
self._copy_one(fileid, suffix, other, pb)
121
pb.update('copy', count, len(ids))
160
if e.errno == errno.ENOENT:
161
if not permit_failure:
162
copyfile(other_p+".gz", p+".gz")
165
copyfile(other_p+".gz", p+".gz")
167
if e.errno == errno.ENOENT:
127
assert count == len(ids)
175
pb.update('copy', count, len(to_copy))
176
assert count == len(to_copy)
129
178
return count, failed
131
def _copy_one(self, fileid, suffix, other, pb):
132
"""Most generic copy-one object routine.
134
Subclasses can override this to provide an optimised
135
copy between their own instances. Such overriden routines
136
should call this if they have no optimised facility for a
139
mutter('Store._copy_one: %r', fileid)
140
f = other.get(fileid, suffix)
141
self.add(f, fileid, suffix)
144
class TransportStore(Store):
145
"""A TransportStore is a Store superclass for Stores that use Transports."""
147
def add(self, f, fileid, suffix=None):
148
"""Add contents of a file into the store.
150
f -- A file-like object, or string
152
mutter("add store entry %r", fileid)
154
names = self._id_to_names(fileid, suffix)
155
if self._transport.has_any(names):
156
raise BzrError("store %r already contains id %r"
157
% (self._transport.base, fileid))
159
# Most of the time, just adding the file will work
160
# if we find a time where it fails, (because the dir
161
# doesn't exist), then create the dir, and try again
162
self._add(names[0], f)
165
def _add(self, relpath, f):
166
"""Actually add the file to the given location.
167
This should be overridden by children.
169
raise NotImplementedError('children need to implement this function.')
171
def _check_fileid(self, fileid):
172
if not isinstance(fileid, basestring):
173
raise TypeError('Fileids should be a string type: %s %r' % (type(fileid), fileid))
174
if '\\' in fileid or '/' in fileid:
175
raise ValueError("invalid store id %r" % fileid)
177
def _id_to_names(self, fileid, suffix):
178
"""Return the names in the expected order"""
179
if suffix is not None:
180
fn = self._relpath(fileid, [suffix])
182
fn = self._relpath(fileid)
190
def has_id(self, fileid, suffix=None):
191
"""See Store.has_id."""
192
return self._transport.has_any(self._id_to_names(fileid, suffix))
194
def _get_name(self, fileid, suffix=None):
195
"""A special check, which returns the name of an existing file.
197
This is similar in spirit to 'has_id', but it is designed
198
to return information about which file the store has.
200
for name in self._id_to_names(fileid, suffix=suffix):
201
if self._transport.has(name):
205
def _get(self, filename):
206
"""Return an vanilla file stream for clients to read from.
208
This is the body of a template method on 'get', and should be
209
implemented by subclasses.
211
raise NotImplementedError
213
def get(self, fileid, suffix=None):
214
"""See Store.get()."""
215
names = self._id_to_names(fileid, suffix)
218
return self._get(name)
219
except errors.NoSuchFile:
221
raise KeyError(fileid)
223
def __init__(self, a_transport, prefixed=False, compressed=False):
224
assert isinstance(a_transport, transport.Transport)
225
super(TransportStore, self).__init__()
226
self._transport = a_transport
227
self._prefixed = prefixed
228
self._compressed = compressed
229
self._suffixes = set()
231
def _iter_files_recursive(self):
232
"""Iterate through the files in the transport."""
233
for quoted_relpath in self._transport.iter_files_recursive():
234
yield urllib.unquote(quoted_relpath)
181
def __contains__(self, fileid):
183
p = self._path(fileid)
184
return (os.access(p, os.R_OK)
185
or os.access(p + '.gz', os.R_OK))
187
# TODO: Guard against the same thing being stored twice, compressed and uncompresse
236
189
def __iter__(self):
237
for relpath in self._iter_files_recursive():
238
# worst case is one of each suffix.
239
name = os.path.basename(relpath)
240
if name.endswith('.gz'):
243
for count in range(len(self._suffixes)):
244
for suffix in self._suffixes:
245
if name.endswith('.' + suffix):
190
for f in os.listdir(self._basedir):
192
# TODO: case-insensitive?
250
197
def __len__(self):
251
return len(list(self.__iter__()))
253
def _relpath(self, fileid, suffixes=None):
254
self._check_fileid(fileid)
256
for suffix in suffixes:
257
if not suffix in self._suffixes:
258
raise ValueError("Unregistered suffix %r" % suffix)
259
self._check_fileid(suffix)
263
path = [hash_prefix(fileid) + fileid]
266
path.extend(suffixes)
267
return transport.urlescape(u'.'.join(path))
270
if self._transport is None:
271
return "%s(None)" % (self.__class__.__name__)
273
return "%s(%r)" % (self.__class__.__name__, self._transport.base)
278
"""Return True if this store is able to be listed."""
279
return self._transport.listable()
281
def register_suffix(self, suffix):
282
"""Register a suffix as being expected in this store."""
283
self._check_fileid(suffix)
285
raise ValueError('You cannot register the "gz" suffix.')
286
self._suffixes.add(suffix)
198
return len(os.listdir(self._basedir))
201
def __getitem__(self, fileid):
202
"""Returns a file reading from a particular entry."""
203
p = self._path(fileid)
205
return gzip.GzipFile(p + '.gz', 'rb')
207
if e.errno != errno.ENOENT:
213
if e.errno != errno.ENOENT:
216
raise IndexError(fileid)
288
219
def total_size(self):
289
220
"""Return (count, bytes)
295
for relpath in self._transport.iter_files_recursive():
297
total += self._transport.stat(relpath).st_size
230
total += os.stat(p)[ST_SIZE]
232
total += os.stat(p + '.gz')[ST_SIZE]
299
234
return count, total
302
def ImmutableMemoryStore():
303
return bzrlib.store.text.TextStore(transport.memory.MemoryTransport())
306
class CachedStore(Store):
307
"""A store that caches data locally, to avoid repeated downloads.
308
The precacache method should be used to avoid server round-trips for
239
class ImmutableScratchStore(ImmutableStore):
240
"""Self-destructing test subclass of ImmutableStore.
242
The Store only exists for the lifetime of the Python object.
243
Obviously you should not put anything precious in it.
312
def __init__(self, store, cache_dir):
313
super(CachedStore, self).__init__()
314
self.source_store = store
315
# This clones the source store type with a locally bound
316
# transport. FIXME: it assumes a constructor is == cloning.
317
# clonable store - it might be nicer to actually have a clone()
318
# or something. RBC 20051003
319
self.cache_store = store.__class__(LocalTransport(cache_dir))
322
mutter("Cache add %s", id)
323
if id not in self.cache_store:
324
self.cache_store.add(self.source_store.get(id), id)
325
return self.cache_store.get(id)
327
def has_id(self, fileid, suffix=None):
328
"""See Store.has_id."""
329
if self.cache_store.has_id(fileid, suffix):
331
if self.source_store.has_id(fileid, suffix):
332
# We could asynchronously copy at this time
337
def copy_all(store_from, store_to):
338
"""Copy all ids from one store to another."""
339
# TODO: Optional progress indicator
340
if not store_from.listable():
341
raise UnlistableStore(store_from)
342
ids = [f for f in store_from]
343
mutter('copy_all ids: %r', ids)
344
store_to.copy_multi(store_from, ids)
346
def hash_prefix(fileid):
347
return "%02x/" % (adler32(fileid) & 0xff)
246
ImmutableStore.__init__(self, tempfile.mkdtemp())
249
for f in os.listdir(self._basedir):
250
fpath = os.path.join(self._basedir, f)
251
# needed on windows, and maybe some other filesystems
252
os.chmod(fpath, 0600)
254
os.rmdir(self._basedir)
255
mutter("%r destroyed" % self)