47
"""This class represents the abstract storage layout for saving information.
37
class ImmutableStore(object):
38
"""Store that holds files indexed by unique names.
49
40
Files can be added, but not modified once they are in. Typically
50
41
the hash is used as the name, or something else known to be unique,
44
>>> st = ImmutableScratchStore()
46
>>> st.add(StringIO('hello'), 'aa')
52
You are not allowed to add an id that is already present.
54
Entries can be retrieved as files, which may then be read.
56
>>> st.add(StringIO('goodbye'), '123123')
57
>>> st['123123'].read()
60
TODO: Atomic add by writing to a temporary file and renaming.
62
In bzr 0.0.5 and earlier, files within the store were marked
63
readonly on disk. This is no longer done but existing stores need
55
raise NotImplementedError('Children should define their length')
57
def get(self, fileid, suffix=None):
58
"""Returns a file reading from a particular entry.
60
If suffix is present, retrieve the named suffix for fileid.
62
raise NotImplementedError
64
def __getitem__(self, fileid):
65
"""DEPRECATED. Please use .get(fileid) instead."""
66
raise NotImplementedError
68
#def __contains__(self, fileid):
69
# """Deprecated, please use has_id"""
70
# raise NotImplementedError
73
raise NotImplementedError
75
def add(self, f, fileid):
76
"""Add a file object f to the store accessible from the given fileid"""
77
raise NotImplementedError('Children of Store must define their method of adding entries.')
79
def has_id(self, fileid, suffix=None):
80
"""Return True or false for the presence of fileid in the store.
82
suffix, if present, is a per file suffix, i.e. for digital signature
84
raise NotImplementedError
87
"""Return True if this store is able to be listed."""
88
return hasattr(self, "__iter__")
90
def copy_multi(self, other, ids, pb=None, permit_failure=False):
67
def __init__(self, basedir):
68
self._basedir = basedir
71
if '\\' in id or '/' in id:
72
raise ValueError("invalid store id %r" % id)
73
return os.path.join(self._basedir, id)
76
return "%s(%r)" % (self.__class__.__name__, self._basedir)
78
def add(self, f, fileid, compressed=True):
79
"""Add contents of a file into the store.
81
f -- An open file, or file-like object."""
82
# FIXME: Only works on files that will fit in memory
84
from bzrlib.atomicfile import AtomicFile
86
mutter("add store entry %r" % (fileid))
87
if isinstance(f, types.StringTypes):
92
p = self._path(fileid)
93
if os.access(p, os.F_OK) or os.access(p + '.gz', os.F_OK):
94
from bzrlib.errors import bailout
95
raise BzrError("store %r already contains id %r" % (self._basedir, fileid))
101
af = AtomicFile(fn, 'wb')
104
gf = gzip.GzipFile(mode='wb', fileobj=af)
114
def copy_multi(self, other, ids, permit_failure=False):
91
115
"""Copy texts for ids from other into self.
93
117
If an id is present in self, it is skipped. A count of copied
94
118
ids is returned, which may be less than len(ids).
96
:param other: Another Store object
97
:param ids: A list of entry ids to be copied
98
:param pb: A ProgressBar object, if none is given, the default will be created.
99
:param permit_failure: Allow missing entries to be ignored
100
:return: (n_copied, [failed]) The number of entries copied successfully,
101
followed by a list of entries which could not be copied (because they
105
pb = bzrlib.ui.ui_factory.progress_bar()
120
pb = bzrlib.ui.ui_factory.progress_bar()
106
122
pb.update('preparing to copy')
123
to_copy = [id for id in ids if id not in self]
124
if isinstance(other, ImmutableStore):
125
return self.copy_multi_immutable(other, to_copy, pb)
129
pb.update('copy', count, len(to_copy))
130
if not permit_failure:
131
self.add(other[id], id)
140
assert count == len(to_copy)
145
def copy_multi_immutable(self, other, to_copy, pb, permit_failure=False):
146
from shutil import copyfile
109
ids = list(ids) # get the list for showing a length.
112
if self.has_id(fileid):
151
other_p = other._path(id)
115
self._copy_one(fileid, None, other, pb)
116
for suffix in self._suffixes:
118
self._copy_one(fileid, suffix, other, pb)
121
pb.update('copy', count, len(ids))
155
if e.errno == errno.ENOENT:
156
if not permit_failure:
157
copyfile(other_p+".gz", p+".gz")
160
copyfile(other_p+".gz", p+".gz")
162
if e.errno == errno.ENOENT:
127
assert count == len(ids)
170
pb.update('copy', count, len(to_copy))
171
assert count == len(to_copy)
129
173
return count, failed
131
def _copy_one(self, fileid, suffix, other, pb):
132
"""Most generic copy-one object routine.
134
Subclasses can override this to provide an optimised
135
copy between their own instances. Such overriden routines
136
should call this if they have no optimised facility for a
139
mutter('Store._copy_one: %r', fileid)
140
f = other.get(fileid, suffix)
141
self.add(f, fileid, suffix)
144
class TransportStore(Store):
145
"""A TransportStore is a Store superclass for Stores that use Transports."""
147
def add(self, f, fileid, suffix=None):
148
"""Add contents of a file into the store.
150
f -- A file-like object, or string
152
mutter("add store entry %r", fileid)
154
names = self._id_to_names(fileid, suffix)
155
if self._transport.has_any(names):
156
raise BzrError("store %r already contains id %r"
157
% (self._transport.base, fileid))
159
# Most of the time, just adding the file will work
160
# if we find a time where it fails, (because the dir
161
# doesn't exist), then create the dir, and try again
162
self._add(names[0], f)
165
def _add(self, relpath, f):
166
"""Actually add the file to the given location.
167
This should be overridden by children.
169
raise NotImplementedError('children need to implement this function.')
171
def _check_fileid(self, fileid):
172
if not isinstance(fileid, basestring):
173
raise TypeError('Fileids should be a string type: %s %r' % (type(fileid), fileid))
174
if '\\' in fileid or '/' in fileid:
175
raise ValueError("invalid store id %r" % fileid)
177
def _id_to_names(self, fileid, suffix):
178
"""Return the names in the expected order"""
179
if suffix is not None:
180
fn = self._relpath(fileid, [suffix])
182
fn = self._relpath(fileid)
190
def has_id(self, fileid, suffix=None):
191
"""See Store.has_id."""
192
return self._transport.has_any(self._id_to_names(fileid, suffix))
194
def _get_name(self, fileid, suffix=None):
195
"""A special check, which returns the name of an existing file.
197
This is similar in spirit to 'has_id', but it is designed
198
to return information about which file the store has.
200
for name in self._id_to_names(fileid, suffix=suffix):
201
if self._transport.has(name):
205
def _get(self, filename):
206
"""Return an vanilla file stream for clients to read from.
208
This is the body of a template method on 'get', and should be
209
implemented by subclasses.
211
raise NotImplementedError
213
def get(self, fileid, suffix=None):
214
"""See Store.get()."""
215
names = self._id_to_names(fileid, suffix)
218
return self._get(name)
219
except errors.NoSuchFile:
221
raise KeyError(fileid)
223
def __init__(self, a_transport, prefixed=False, compressed=False):
224
assert isinstance(a_transport, transport.Transport)
225
super(TransportStore, self).__init__()
226
self._transport = a_transport
227
self._prefixed = prefixed
228
self._compressed = compressed
229
self._suffixes = set()
231
def _iter_files_recursive(self):
232
"""Iterate through the files in the transport."""
233
for quoted_relpath in self._transport.iter_files_recursive():
234
yield urllib.unquote(quoted_relpath)
176
def __contains__(self, fileid):
178
p = self._path(fileid)
179
return (os.access(p, os.R_OK)
180
or os.access(p + '.gz', os.R_OK))
182
# TODO: Guard against the same thing being stored twice, compressed and uncompresse
236
184
def __iter__(self):
237
for relpath in self._iter_files_recursive():
238
# worst case is one of each suffix.
239
name = os.path.basename(relpath)
240
if name.endswith('.gz'):
243
for count in range(len(self._suffixes)):
244
for suffix in self._suffixes:
245
if name.endswith('.' + suffix):
185
for f in os.listdir(self._basedir):
187
# TODO: case-insensitive?
250
192
def __len__(self):
251
return len(list(self.__iter__()))
253
def _relpath(self, fileid, suffixes=None):
254
self._check_fileid(fileid)
256
for suffix in suffixes:
257
if not suffix in self._suffixes:
258
raise ValueError("Unregistered suffix %r" % suffix)
259
self._check_fileid(suffix)
263
path = [hash_prefix(fileid) + fileid]
266
path.extend(suffixes)
267
return transport.urlescape(u'.'.join(path))
270
if self._transport is None:
271
return "%s(None)" % (self.__class__.__name__)
273
return "%s(%r)" % (self.__class__.__name__, self._transport.base)
278
"""Return True if this store is able to be listed."""
279
return self._transport.listable()
281
def register_suffix(self, suffix):
282
"""Register a suffix as being expected in this store."""
283
self._check_fileid(suffix)
285
raise ValueError('You cannot register the "gz" suffix.')
286
self._suffixes.add(suffix)
193
return len(os.listdir(self._basedir))
196
def __getitem__(self, fileid):
197
"""Returns a file reading from a particular entry."""
198
p = self._path(fileid)
200
return gzip.GzipFile(p + '.gz', 'rb')
202
if e.errno != errno.ENOENT:
208
if e.errno != errno.ENOENT:
211
raise IndexError(fileid)
288
214
def total_size(self):
289
215
"""Return (count, bytes)
295
for relpath in self._transport.iter_files_recursive():
297
total += self._transport.stat(relpath).st_size
225
total += os.stat(p)[ST_SIZE]
227
total += os.stat(p + '.gz')[ST_SIZE]
299
229
return count, total
302
def ImmutableMemoryStore():
303
return bzrlib.store.text.TextStore(transport.memory.MemoryTransport())
306
class CachedStore(Store):
307
"""A store that caches data locally, to avoid repeated downloads.
308
The precacache method should be used to avoid server round-trips for
234
class ImmutableScratchStore(ImmutableStore):
235
"""Self-destructing test subclass of ImmutableStore.
237
The Store only exists for the lifetime of the Python object.
238
Obviously you should not put anything precious in it.
312
def __init__(self, store, cache_dir):
313
super(CachedStore, self).__init__()
314
self.source_store = store
315
# This clones the source store type with a locally bound
316
# transport. FIXME: it assumes a constructor is == cloning.
317
# clonable store - it might be nicer to actually have a clone()
318
# or something. RBC 20051003
319
self.cache_store = store.__class__(LocalTransport(cache_dir))
322
mutter("Cache add %s", id)
323
if id not in self.cache_store:
324
self.cache_store.add(self.source_store.get(id), id)
325
return self.cache_store.get(id)
327
def has_id(self, fileid, suffix=None):
328
"""See Store.has_id."""
329
if self.cache_store.has_id(fileid, suffix):
331
if self.source_store.has_id(fileid, suffix):
332
# We could asynchronously copy at this time
337
def copy_all(store_from, store_to):
338
"""Copy all ids from one store to another."""
339
# TODO: Optional progress indicator
340
if not store_from.listable():
341
raise UnlistableStore(store_from)
342
ids = [f for f in store_from]
343
mutter('copy_all ids: %r', ids)
344
store_to.copy_multi(store_from, ids)
346
def hash_prefix(fileid):
347
return "%02x/" % (adler32(fileid) & 0xff)
241
ImmutableStore.__init__(self, tempfile.mkdtemp())
244
for f in os.listdir(self._basedir):
245
fpath = os.path.join(self._basedir, f)
246
# needed on windows, and maybe some other filesystems
247
os.chmod(fpath, 0600)
249
os.rmdir(self._basedir)
250
mutter("%r destroyed" % self)