47
"""This class represents the abstract storage layout for saving information.
39
"""Store that holds files indexed by unique names.
49
41
Files can be added, but not modified once they are in. Typically
50
42
the hash is used as the name, or something else known to be unique,
45
>>> st = ImmutableScratchStore()
47
>>> st.add(StringIO('hello'), 'aa')
53
You are not allowed to add an id that is already present.
55
Entries can be retrieved as files, which may then be read.
57
>>> st.add(StringIO('goodbye'), '123123')
58
>>> st['123123'].read()
61
:todo: Atomic add by writing to a temporary file and renaming.
63
:todo: Perhaps automatically transform to/from XML in a method?
64
Would just need to tell the constructor what class to
67
:todo: Even within a simple disk store like this, we could
68
gzip the files. But since many are less than one disk
69
block, that might not help a lot.
55
raise NotImplementedError('Children should define their length')
57
def get(self, fileid, suffix=None):
58
"""Returns a file reading from a particular entry.
60
If suffix is present, retrieve the named suffix for fileid.
62
raise NotImplementedError
64
def __getitem__(self, fileid):
65
"""DEPRECATED. Please use .get(fileid) instead."""
66
raise NotImplementedError
68
#def __contains__(self, fileid):
69
# """Deprecated, please use has_id"""
70
# raise NotImplementedError
73
raise NotImplementedError
73
def __init__(self, basedir):
74
"""ImmutableStore constructor."""
75
self._basedir = basedir
78
return os.path.join(self._basedir, id)
81
return "%s(%r)" % (self.__class__.__name__, self._basedir)
75
83
def add(self, f, fileid):
76
"""Add a file object f to the store accessible from the given fileid"""
77
raise NotImplementedError('Children of Store must define their method of adding entries.')
79
def has_id(self, fileid, suffix=None):
80
"""Return True or false for the presence of fileid in the store.
82
suffix, if present, is a per file suffix, i.e. for digital signature
84
raise NotImplementedError
87
"""Return True if this store is able to be listed."""
88
return hasattr(self, "__iter__")
90
def copy_multi(self, other, ids, pb=None, permit_failure=False):
91
"""Copy texts for ids from other into self.
93
If an id is present in self, it is skipped. A count of copied
94
ids is returned, which may be less than len(ids).
96
:param other: Another Store object
97
:param ids: A list of entry ids to be copied
98
:param pb: A ProgressBar object, if none is given, the default will be created.
99
:param permit_failure: Allow missing entries to be ignored
100
:return: (n_copied, [failed]) The number of entries copied successfully,
101
followed by a list of entries which could not be copied (because they
105
pb = bzrlib.ui.ui_factory.progress_bar()
106
pb.update('preparing to copy')
109
ids = list(ids) # get the list for showing a length.
112
if self.has_id(fileid):
115
self._copy_one(fileid, None, other, pb)
116
for suffix in self._suffixes:
118
self._copy_one(fileid, suffix, other, pb)
121
pb.update('copy', count, len(ids))
127
assert count == len(ids)
131
def _copy_one(self, fileid, suffix, other, pb):
132
"""Most generic copy-one object routine.
134
Subclasses can override this to provide an optimised
135
copy between their own instances. Such overriden routines
136
should call this if they have no optimised facility for a
139
mutter('Store._copy_one: %r', fileid)
140
f = other.get(fileid, suffix)
141
self.add(f, fileid, suffix)
144
class TransportStore(Store):
145
"""A TransportStore is a Store superclass for Stores that use Transports."""
147
def add(self, f, fileid, suffix=None):
148
84
"""Add contents of a file into the store.
150
f -- A file-like object, or string
152
mutter("add store entry %r", fileid)
154
names = self._id_to_names(fileid, suffix)
155
if self._transport.has_any(names):
156
raise BzrError("store %r already contains id %r"
157
% (self._transport.base, fileid))
159
# Most of the time, just adding the file will work
160
# if we find a time where it fails, (because the dir
161
# doesn't exist), then create the dir, and try again
162
self._add(names[0], f)
165
def _add(self, relpath, f):
166
"""Actually add the file to the given location.
167
This should be overridden by children.
169
raise NotImplementedError('children need to implement this function.')
171
def _check_fileid(self, fileid):
172
if not isinstance(fileid, basestring):
173
raise TypeError('Fileids should be a string type: %s %r' % (type(fileid), fileid))
174
if '\\' in fileid or '/' in fileid:
175
raise ValueError("invalid store id %r" % fileid)
177
def _id_to_names(self, fileid, suffix):
178
"""Return the names in the expected order"""
179
if suffix is not None:
180
fn = self._relpath(fileid, [suffix])
182
fn = self._relpath(fileid)
190
def has_id(self, fileid, suffix=None):
191
"""See Store.has_id."""
192
return self._transport.has_any(self._id_to_names(fileid, suffix))
194
def _get_name(self, fileid, suffix=None):
195
"""A special check, which returns the name of an existing file.
197
This is similar in spirit to 'has_id', but it is designed
198
to return information about which file the store has.
200
for name in self._id_to_names(fileid, suffix=suffix):
201
if self._transport.has(name):
205
def _get(self, filename):
206
"""Return an vanilla file stream for clients to read from.
208
This is the body of a template method on 'get', and should be
209
implemented by subclasses.
211
raise NotImplementedError
213
def get(self, fileid, suffix=None):
214
"""See Store.get()."""
215
names = self._id_to_names(fileid, suffix)
218
return self._get(name)
219
except errors.NoSuchFile:
221
raise KeyError(fileid)
223
def __init__(self, a_transport, prefixed=False, compressed=False):
224
assert isinstance(a_transport, transport.Transport)
225
super(TransportStore, self).__init__()
226
self._transport = a_transport
227
self._prefixed = prefixed
228
self._compressed = compressed
229
self._suffixes = set()
231
def _iter_files_recursive(self):
232
"""Iterate through the files in the transport."""
233
for quoted_relpath in self._transport.iter_files_recursive():
234
yield urllib.unquote(quoted_relpath)
86
:param f: An open file, or file-like object."""
87
# FIXME: Only works on smallish files
88
# TODO: Can be optimized by copying at the same time as
90
mutter("add store entry %r" % (fileid))
91
if isinstance(f, types.StringTypes):
95
if fileid not in self:
96
filename = self._path(fileid)
97
f = file(filename, 'wb')
102
osutils.make_readonly(filename)
105
def __contains__(self, fileid):
107
return os.access(self._path(fileid), os.R_OK)
236
110
def __iter__(self):
237
for relpath in self._iter_files_recursive():
238
# worst case is one of each suffix.
239
name = os.path.basename(relpath)
240
if name.endswith('.gz'):
243
for count in range(len(self._suffixes)):
244
for suffix in self._suffixes:
245
if name.endswith('.' + suffix):
251
return len(list(self.__iter__()))
253
def _relpath(self, fileid, suffixes=None):
254
self._check_fileid(fileid)
256
for suffix in suffixes:
257
if not suffix in self._suffixes:
258
raise ValueError("Unregistered suffix %r" % suffix)
259
self._check_fileid(suffix)
263
path = [hash_prefix(fileid) + fileid]
266
path.extend(suffixes)
267
return transport.urlescape(u'.'.join(path))
270
if self._transport is None:
271
return "%s(None)" % (self.__class__.__name__)
273
return "%s(%r)" % (self.__class__.__name__, self._transport.base)
278
"""Return True if this store is able to be listed."""
279
return self._transport.listable()
281
def register_suffix(self, suffix):
282
"""Register a suffix as being expected in this store."""
283
self._check_fileid(suffix)
285
raise ValueError('You cannot register the "gz" suffix.')
286
self._suffixes.add(suffix)
288
def total_size(self):
289
"""Return (count, bytes)
291
This is the (compressed) size stored on disk, not the size of
295
for relpath in self._transport.iter_files_recursive():
297
total += self._transport.stat(relpath).st_size
302
def ImmutableMemoryStore():
303
return bzrlib.store.text.TextStore(transport.memory.MemoryTransport())
306
class CachedStore(Store):
307
"""A store that caches data locally, to avoid repeated downloads.
308
The precacache method should be used to avoid server round-trips for
111
return iter(os.listdir(self._basedir))
113
def __getitem__(self, fileid):
114
"""Returns a file reading from a particular entry."""
115
return file(self._path(fileid), 'rb')
117
def delete_all(self):
121
def delete(self, fileid):
122
"""Remove nominated store entry.
124
Most stores will be add-only."""
125
filename = self._path(fileid)
126
## osutils.make_writable(filename)
130
"""Remove store; only allowed if it is empty."""
131
os.rmdir(self._basedir)
132
mutter("%r destroyed" % self)
136
class ImmutableScratchStore(ImmutableStore):
137
"""Self-destructing test subclass of ImmutableStore.
139
The Store only exists for the lifetime of the Python object.
140
Obviously you should not put anything precious in it.
312
def __init__(self, store, cache_dir):
313
super(CachedStore, self).__init__()
314
self.source_store = store
315
# This clones the source store type with a locally bound
316
# transport. FIXME: it assumes a constructor is == cloning.
317
# clonable store - it might be nicer to actually have a clone()
318
# or something. RBC 20051003
319
self.cache_store = store.__class__(LocalTransport(cache_dir))
322
mutter("Cache add %s", id)
323
if id not in self.cache_store:
324
self.cache_store.add(self.source_store.get(id), id)
325
return self.cache_store.get(id)
327
def has_id(self, fileid, suffix=None):
328
"""See Store.has_id."""
329
if self.cache_store.has_id(fileid, suffix):
331
if self.source_store.has_id(fileid, suffix):
332
# We could asynchronously copy at this time
337
def copy_all(store_from, store_to):
338
"""Copy all ids from one store to another."""
339
# TODO: Optional progress indicator
340
if not store_from.listable():
341
raise UnlistableStore(store_from)
342
ids = [f for f in store_from]
343
mutter('copy_all ids: %r', ids)
344
store_to.copy_multi(store_from, ids)
346
def hash_prefix(fileid):
347
return "%02x/" % (adler32(fileid) & 0xff)
143
ImmutableStore.__init__(self, tempfile.mkdtemp())