47
"""This class represents the abstract storage layout for saving information.
36
class ImmutableStore(object):
37
"""Store that holds files indexed by unique names.
49
39
Files can be added, but not modified once they are in. Typically
50
40
the hash is used as the name, or something else known to be unique,
43
>>> st = ImmutableScratchStore()
45
>>> st.add(StringIO('hello'), 'aa')
51
You are not allowed to add an id that is already present.
53
Entries can be retrieved as files, which may then be read.
55
>>> st.add(StringIO('goodbye'), '123123')
56
>>> st['123123'].read()
59
TODO: Atomic add by writing to a temporary file and renaming.
61
In bzr 0.0.5 and earlier, files within the store were marked
62
readonly on disk. This is no longer done but existing stores need
55
raise NotImplementedError('Children should define their length')
57
def get(self, fileid, suffix=None):
58
"""Returns a file reading from a particular entry.
60
If suffix is present, retrieve the named suffix for fileid.
62
raise NotImplementedError
64
def __getitem__(self, fileid):
65
"""DEPRECATED. Please use .get(fileid) instead."""
66
raise NotImplementedError
68
#def __contains__(self, fileid):
69
# """Deprecated, please use has_id"""
70
# raise NotImplementedError
73
raise NotImplementedError
75
def add(self, f, fileid):
76
"""Add a file object f to the store accessible from the given fileid"""
77
raise NotImplementedError('Children of Store must define their method of adding entries.')
79
def has_id(self, fileid, suffix=None):
80
"""Return True or false for the presence of fileid in the store.
82
suffix, if present, is a per file suffix, i.e. for digital signature
84
raise NotImplementedError
87
"""Return True if this store is able to be listed."""
88
return hasattr(self, "__iter__")
90
def copy_multi(self, other, ids, pb=None, permit_failure=False):
66
def __init__(self, basedir):
67
self._basedir = basedir
70
if '\\' in id or '/' in id:
71
raise ValueError("invalid store id %r" % id)
72
return os.path.join(self._basedir, id)
75
return "%s(%r)" % (self.__class__.__name__, self._basedir)
77
def add(self, f, fileid, compressed=True):
78
"""Add contents of a file into the store.
80
f -- An open file, or file-like object."""
81
# FIXME: Only works on files that will fit in memory
83
from bzrlib.atomicfile import AtomicFile
85
mutter("add store entry %r" % (fileid))
86
if isinstance(f, types.StringTypes):
91
p = self._path(fileid)
92
if os.access(p, os.F_OK) or os.access(p + '.gz', os.F_OK):
93
from bzrlib.errors import bailout
94
raise BzrError("store %r already contains id %r" % (self._basedir, fileid))
100
af = AtomicFile(fn, 'wb')
103
gf = gzip.GzipFile(mode='wb', fileobj=af)
113
def copy_multi(self, other, ids):
91
114
"""Copy texts for ids from other into self.
93
116
If an id is present in self, it is skipped. A count of copied
94
117
ids is returned, which may be less than len(ids).
96
:param other: Another Store object
97
:param ids: A list of entry ids to be copied
98
:param pb: A ProgressBar object, if none is given, the default will be created.
99
:param permit_failure: Allow missing entries to be ignored
100
:return: (n_copied, [failed]) The number of entries copied successfully,
101
followed by a list of entries which could not be copied (because they
105
pb = bzrlib.ui.ui_factory.progress_bar()
119
from bzrlib.progress import ProgressBar
106
121
pb.update('preparing to copy')
122
to_copy = [id for id in ids if id not in self]
123
if isinstance(other, ImmutableStore):
124
return self.copy_multi_immutable(other, to_copy, pb)
109
ids = list(ids) # get the list for showing a length.
112
if self.has_id(fileid):
128
pb.update('copy', count, len(to_copy))
129
self.add(other[id], id)
130
assert count == len(to_copy)
135
def copy_multi_immutable(self, other, to_copy, pb):
136
from shutil import copyfile
140
other_p = other._path(id)
115
self._copy_one(fileid, None, other, pb)
116
for suffix in self._suffixes:
118
self._copy_one(fileid, suffix, other, pb)
121
pb.update('copy', count, len(ids))
144
if e.errno == errno.ENOENT:
145
copyfile(other_p+".gz", p+".gz")
127
assert count == len(ids)
150
pb.update('copy', count, len(to_copy))
151
assert count == len(to_copy)
131
def _copy_one(self, fileid, suffix, other, pb):
132
"""Most generic copy-one object routine.
134
Subclasses can override this to provide an optimised
135
copy between their own instances. Such overriden routines
136
should call this if they have no optimised facility for a
139
mutter('Store._copy_one: %r', fileid)
140
f = other.get(fileid, suffix)
141
self.add(f, fileid, suffix)
144
class TransportStore(Store):
145
"""A TransportStore is a Store superclass for Stores that use Transports."""
147
def add(self, f, fileid, suffix=None):
148
"""Add contents of a file into the store.
150
f -- A file-like object, or string
152
mutter("add store entry %r", fileid)
154
names = self._id_to_names(fileid, suffix)
155
if self._transport.has_any(names):
156
raise BzrError("store %r already contains id %r"
157
% (self._transport.base, fileid))
159
# Most of the time, just adding the file will work
160
# if we find a time where it fails, (because the dir
161
# doesn't exist), then create the dir, and try again
162
self._add(names[0], f)
165
def _add(self, relpath, f):
166
"""Actually add the file to the given location.
167
This should be overridden by children.
169
raise NotImplementedError('children need to implement this function.')
171
def _check_fileid(self, fileid):
172
if not isinstance(fileid, basestring):
173
raise TypeError('Fileids should be a string type: %s %r' % (type(fileid), fileid))
174
if '\\' in fileid or '/' in fileid:
175
raise ValueError("invalid store id %r" % fileid)
177
def _id_to_names(self, fileid, suffix):
178
"""Return the names in the expected order"""
179
if suffix is not None:
180
fn = self._relpath(fileid, [suffix])
182
fn = self._relpath(fileid)
190
def has_id(self, fileid, suffix=None):
191
"""See Store.has_id."""
192
return self._transport.has_any(self._id_to_names(fileid, suffix))
194
def _get_name(self, fileid, suffix=None):
195
"""A special check, which returns the name of an existing file.
197
This is similar in spirit to 'has_id', but it is designed
198
to return information about which file the store has.
200
for name in self._id_to_names(fileid, suffix=suffix):
201
if self._transport.has(name):
205
def _get(self, filename):
206
"""Return an vanilla file stream for clients to read from.
208
This is the body of a template method on 'get', and should be
209
implemented by subclasses.
211
raise NotImplementedError
213
def get(self, fileid, suffix=None):
214
"""See Store.get()."""
215
names = self._id_to_names(fileid, suffix)
218
return self._get(name)
219
except errors.NoSuchFile:
221
raise KeyError(fileid)
223
def __init__(self, a_transport, prefixed=False, compressed=False):
224
assert isinstance(a_transport, transport.Transport)
225
super(TransportStore, self).__init__()
226
self._transport = a_transport
227
self._prefixed = prefixed
228
self._compressed = compressed
229
self._suffixes = set()
231
def _iter_files_recursive(self):
232
"""Iterate through the files in the transport."""
233
for quoted_relpath in self._transport.iter_files_recursive():
234
yield urllib.unquote(quoted_relpath)
156
def __contains__(self, fileid):
158
p = self._path(fileid)
159
return (os.access(p, os.R_OK)
160
or os.access(p + '.gz', os.R_OK))
162
# TODO: Guard against the same thing being stored twice, compressed and uncompresse
236
164
def __iter__(self):
237
for relpath in self._iter_files_recursive():
238
# worst case is one of each suffix.
239
name = os.path.basename(relpath)
240
if name.endswith('.gz'):
243
for count in range(len(self._suffixes)):
244
for suffix in self._suffixes:
245
if name.endswith('.' + suffix):
165
for f in os.listdir(self._basedir):
167
# TODO: case-insensitive?
250
172
def __len__(self):
251
return len(list(self.__iter__()))
253
def _relpath(self, fileid, suffixes=None):
254
self._check_fileid(fileid)
256
for suffix in suffixes:
257
if not suffix in self._suffixes:
258
raise ValueError("Unregistered suffix %r" % suffix)
259
self._check_fileid(suffix)
263
path = [hash_prefix(fileid) + fileid]
266
path.extend(suffixes)
267
return transport.urlescape(u'.'.join(path))
270
if self._transport is None:
271
return "%s(None)" % (self.__class__.__name__)
273
return "%s(%r)" % (self.__class__.__name__, self._transport.base)
278
"""Return True if this store is able to be listed."""
279
return self._transport.listable()
281
def register_suffix(self, suffix):
282
"""Register a suffix as being expected in this store."""
283
self._check_fileid(suffix)
285
raise ValueError('You cannot register the "gz" suffix.')
286
self._suffixes.add(suffix)
173
return len(os.listdir(self._basedir))
176
def __getitem__(self, fileid):
177
"""Returns a file reading from a particular entry."""
178
p = self._path(fileid)
180
return gzip.GzipFile(p + '.gz', 'rb')
182
if e.errno != errno.ENOENT:
188
if e.errno != errno.ENOENT:
191
raise IndexError(fileid)
288
194
def total_size(self):
289
195
"""Return (count, bytes)
295
for relpath in self._transport.iter_files_recursive():
297
total += self._transport.stat(relpath).st_size
205
total += os.stat(p)[ST_SIZE]
207
total += os.stat(p + '.gz')[ST_SIZE]
299
209
return count, total
302
def ImmutableMemoryStore():
303
return bzrlib.store.text.TextStore(transport.memory.MemoryTransport())
306
class CachedStore(Store):
307
"""A store that caches data locally, to avoid repeated downloads.
308
The precacache method should be used to avoid server round-trips for
214
class ImmutableScratchStore(ImmutableStore):
215
"""Self-destructing test subclass of ImmutableStore.
217
The Store only exists for the lifetime of the Python object.
218
Obviously you should not put anything precious in it.
312
def __init__(self, store, cache_dir):
313
super(CachedStore, self).__init__()
314
self.source_store = store
315
# This clones the source store type with a locally bound
316
# transport. FIXME: it assumes a constructor is == cloning.
317
# clonable store - it might be nicer to actually have a clone()
318
# or something. RBC 20051003
319
self.cache_store = store.__class__(LocalTransport(cache_dir))
322
mutter("Cache add %s", id)
323
if id not in self.cache_store:
324
self.cache_store.add(self.source_store.get(id), id)
325
return self.cache_store.get(id)
327
def has_id(self, fileid, suffix=None):
328
"""See Store.has_id."""
329
if self.cache_store.has_id(fileid, suffix):
331
if self.source_store.has_id(fileid, suffix):
332
# We could asynchronously copy at this time
337
def copy_all(store_from, store_to):
338
"""Copy all ids from one store to another."""
339
# TODO: Optional progress indicator
340
if not store_from.listable():
341
raise UnlistableStore(store_from)
342
ids = [f for f in store_from]
343
mutter('copy_all ids: %r', ids)
344
store_to.copy_multi(store_from, ids)
346
def hash_prefix(fileid):
347
return "%02x/" % (adler32(fileid) & 0xff)
221
ImmutableStore.__init__(self, tempfile.mkdtemp())
224
for f in os.listdir(self._basedir):
225
fpath = os.path.join(self._basedir, f)
226
# needed on windows, and maybe some other filesystems
227
os.chmod(fpath, 0600)
229
os.rmdir(self._basedir)
230
mutter("%r destroyed" % self)