34
37
class StoreError(Exception):
37
41
class Store(object):
38
42
"""This class represents the abstract storage layout for saving information.
44
Files can be added, but not modified once they are in. Typically
45
the hash is used as the name, or something else known to be unique,
41
_max_buffered_requests = 10
43
def __init__(self, transport):
44
assert isinstance(transport, bzrlib.transport.Transport)
45
self._transport = transport
48
if self._transport is None:
49
return "%s(None)" % (self.__class__.__name__)
51
return "%s(%r)" % (self.__class__.__name__, self._transport.base)
56
50
raise NotImplementedError('Children should define their length')
188
182
return len(to_copy), failed
185
class TransportStore(Store):
186
"""A TransportStore is a Store superclass for Stores that use Transports."""
188
_max_buffered_requests = 10
190
def __init__(self, transport):
191
assert isinstance(transport, bzrlib.transport.Transport)
192
super(TransportStore, self).__init__()
193
self._transport = transport
196
if self._transport is None:
197
return "%s(None)" % (self.__class__.__name__)
199
return "%s(%r)" % (self.__class__.__name__, self._transport.base)
204
class ImmutableMemoryStore(Store):
205
"""A memory only store."""
207
def __contains__(self, fileid):
208
return self._contents.has_key(fileid)
211
super(ImmutableMemoryStore, self).__init__()
214
def add(self, stream, fileid, compressed=True):
215
if self._contents.has_key(fileid):
216
raise StoreError("fileid %s already in the store" % fileid)
217
self._contents[fileid] = stream.read()
219
def __getitem__(self, fileid):
220
"""Returns a file reading from a particular entry."""
221
if not self._contents.has_key(fileid):
223
return StringIO(self._contents[fileid])
225
def _item_size(self, fileid):
226
return len(self._contents[fileid])
229
return iter(self._contents.keys())
231
def total_size(self):
236
result += self._item_size(fileid)
240
class CachedStore(Store):
241
"""A store that caches data locally, to avoid repeated downloads.
242
The precacache method should be used to avoid server round-trips for
246
def __init__(self, store, cache_dir):
247
super(CachedStore, self).__init__()
248
self.source_store = store
249
# This clones the source store type with a locally bound
250
# transport. FIXME: it assumes a constructor is == cloning.
251
# clonable store - it might be nicer to actually have a clone()
252
# or something. RBC 20051003
253
self.cache_store = store.__class__(LocalTransport(cache_dir))
255
def __getitem__(self, id):
256
mutter("Cache add %s" % id)
257
if id not in self.cache_store:
258
self.cache_store.add(self.source_store[id], id)
259
return self.cache_store[id]
261
def __contains__(self, fileid):
262
if fileid in self.cache_store:
264
if fileid in self.source_store:
265
# We could copy at this time
269
def get(self, fileids, permit_failure=False, pb=None):
270
fileids = list(fileids)
271
hasids = self.cache_store.has(fileids)
273
for has, fileid in zip(hasids, fileids):
277
self.cache_store.copy_multi(self.source_store, needs,
278
permit_failure=permit_failure)
279
return self.cache_store.get(fileids,
280
permit_failure=permit_failure, pb=pb)
282
def prefetch(self, ids):
283
"""Copy a series of ids into the cache, before they are used.
284
For remote stores that support pipelining or async downloads, this can
285
increase speed considerably.
287
Failures while prefetching are ignored.
289
mutter("Prefetch of ids %s" % ",".join(ids))
290
self.cache_store.copy_multi(self.source_store, ids,
190
294
def copy_all(store_from, store_to):
191
295
"""Copy all ids from one store to another."""
192
296
# TODO: Optional progress indicator