42
"""This class represents the abstract storage layout for saving information.
39
"""Store that holds files indexed by unique names.
44
41
Files can be added, but not modified once they are in. Typically
45
42
the hash is used as the name, or something else known to be unique,
45
>>> st = ImmutableScratchStore()
47
>>> st.add(StringIO('hello'), 'aa')
53
You are not allowed to add an id that is already present.
55
Entries can be retrieved as files, which may then be read.
57
>>> st.add(StringIO('goodbye'), '123123')
58
>>> st['123123'].read()
61
:todo: Atomic add by writing to a temporary file and renaming.
63
:todo: Perhaps automatically transform to/from XML in a method?
64
Would just need to tell the constructor what class to
67
:todo: Even within a simple disk store like this, we could
68
gzip the files. But since many are less than one disk
69
block, that might not help a lot.
50
raise NotImplementedError('Children should define their length')
52
def __getitem__(self, fileid):
53
"""Returns a file reading from a particular entry."""
54
raise NotImplementedError
56
def __contains__(self, fileid):
58
raise NotImplementedError
61
raise NotImplementedError
73
def __init__(self, basedir):
74
"""ImmutableStore constructor."""
75
self._basedir = basedir
78
return os.path.join(self._basedir, id)
81
return "%s(%r)" % (self.__class__.__name__, self._basedir)
63
83
def add(self, f, fileid):
64
"""Add a file object f to the store accessible from the given fileid"""
65
raise NotImplementedError('Children of Store must define their method of adding entries.')
67
def add_multi(self, entries):
68
"""Add a series of file-like or string objects to the store with the given
71
:param entries: A list of tuples of file,id pairs [(file1, id1), (file2, id2), ...]
72
This could also be a generator yielding (file,id) pairs.
74
for f, fileid in entries:
77
def has(self, fileids):
78
"""Return True/False for each entry in fileids.
80
:param fileids: A List or generator yielding file ids.
81
:return: A generator or list returning True/False for each entry.
83
for fileid in fileids:
89
def get(self, fileids, permit_failure=False, pb=None):
90
"""Return a set of files, one for each requested entry.
92
:param permit_failure: If true, return None for entries which do not
94
:return: A list or generator of file-like objects, one for each id.
96
for fileid in fileids:
105
def copy_multi(self, other, ids, pb=None, permit_failure=False):
106
"""Copy texts for ids from other into self.
108
If an id is present in self, it is skipped. A count of copied
109
ids is returned, which may be less than len(ids).
111
:param other: Another Store object
112
:param ids: A list of entry ids to be copied
113
:param pb: A ProgressBar object, if none is given, the default will be created.
114
:param permit_failure: Allow missing entries to be ignored
115
:return: (n_copied, [failed]) The number of entries copied successfully,
116
followed by a list of entries which could not be copied (because they
120
pb = bzrlib.ui.ui_factory.progress_bar()
122
# XXX: Is there any reason why we couldn't make this accept a generator
123
# and build a list as it finds things to copy?
124
ids = list(ids) # Make sure we don't have a generator, since we iterate 2 times
125
pb.update('preparing to copy')
127
for file_id, has in zip(ids, self.has(ids)):
129
to_copy.append(file_id)
130
return self._do_copy(other, to_copy, pb, permit_failure=permit_failure)
132
def _do_copy(self, other, to_copy, pb, permit_failure=False):
133
"""This is the standard copying mechanism, just get them one at
134
a time from remote, and store them locally.
136
:param other: Another Store object
137
:param to_copy: A list of entry ids to copy
138
:param pb: A ProgressBar object to display completion status.
139
:param permit_failure: Allow missing entries to be ignored
140
:return: (n_copied, [failed])
141
The number of entries copied, and a list of failed entries.
143
# This should be updated to use add_multi() rather than
144
# the current methods of buffering requests.
145
# One question, is it faster to queue up 1-10 and then copy 1-10
146
# then queue up 11-20, copy 11-20
147
# or to queue up 1-10, copy 1, queue 11, copy 2, etc?
148
# sort of pipeline versus batch.
150
# We can't use self._transport.copy_to because we don't know
151
# whether the local tree is in the same format as other
153
def buffer_requests():
155
buffered_requests = []
156
for fileid in to_copy:
166
buffered_requests.append((f, fileid))
167
if len(buffered_requests) > self._max_buffered_requests:
168
yield buffered_requests.pop(0)
170
pb.update('copy', count, len(to_copy))
172
for req in buffered_requests:
175
pb.update('copy', count, len(to_copy))
177
assert count == len(to_copy)
179
self.add_multi(buffer_requests())
182
return len(to_copy), failed
185
class TransportStore(Store):
186
"""A TransportStore is a Store superclass for Stores that use Transports."""
188
_max_buffered_requests = 10
190
def __init__(self, transport):
191
assert isinstance(transport, bzrlib.transport.Transport)
192
super(TransportStore, self).__init__()
193
self._transport = transport
196
if self._transport is None:
197
return "%s(None)" % (self.__class__.__name__)
84
"""Add contents of a file into the store.
86
:param f: An open file, or file-like object."""
87
# FIXME: Only works on smallish files
88
# TODO: Can be optimized by copying at the same time as
90
mutter("add store entry %r" % (fileid))
91
if isinstance(f, types.StringTypes):
199
return "%s(%r)" % (self.__class__.__name__, self._transport.base)
204
class ImmutableMemoryStore(Store):
205
"""A memory only store."""
95
if fileid not in self:
96
filename = self._path(fileid)
97
f = file(filename, 'wb')
102
osutils.make_readonly(filename)
207
105
def __contains__(self, fileid):
208
return self._contents.has_key(fileid)
107
return os.access(self._path(fileid), os.R_OK)
111
return iter(os.listdir(self._basedir))
113
def __getitem__(self, fileid):
114
"""Returns a file reading from a particular entry."""
115
return file(self._path(fileid), 'rb')
117
def delete_all(self):
121
def delete(self, fileid):
122
"""Remove nominated store entry.
124
Most stores will be add-only."""
125
filename = self._path(fileid)
126
## osutils.make_writable(filename)
130
"""Remove store; only allowed if it is empty."""
131
os.rmdir(self._basedir)
132
mutter("%r destroyed" % self)
136
class ImmutableScratchStore(ImmutableStore):
137
"""Self-destructing test subclass of ImmutableStore.
139
The Store only exists for the lifetime of the Python object.
140
Obviously you should not put anything precious in it.
210
142
def __init__(self):
211
super(ImmutableMemoryStore, self).__init__()
214
def add(self, stream, fileid, compressed=True):
215
if self._contents.has_key(fileid):
216
raise StoreError("fileid %s already in the store" % fileid)
217
self._contents[fileid] = stream.read()
219
def __getitem__(self, fileid):
220
"""Returns a file reading from a particular entry."""
221
if not self._contents.has_key(fileid):
223
return StringIO(self._contents[fileid])
225
def _item_size(self, fileid):
226
return len(self._contents[fileid])
229
return iter(self._contents.keys())
231
def total_size(self):
236
result += self._item_size(fileid)
240
class CachedStore(Store):
241
"""A store that caches data locally, to avoid repeated downloads.
242
The precacache method should be used to avoid server round-trips for
246
def __init__(self, store, cache_dir):
247
super(CachedStore, self).__init__()
248
self.source_store = store
249
# This clones the source store type with a locally bound
250
# transport. FIXME: it assumes a constructor is == cloning.
251
# clonable store - it might be nicer to actually have a clone()
252
# or something. RBC 20051003
253
self.cache_store = store.__class__(LocalTransport(cache_dir))
255
def __getitem__(self, id):
256
mutter("Cache add %s" % id)
257
if id not in self.cache_store:
258
self.cache_store.add(self.source_store[id], id)
259
return self.cache_store[id]
261
def __contains__(self, fileid):
262
if fileid in self.cache_store:
264
if fileid in self.source_store:
265
# We could copy at this time
269
def get(self, fileids, permit_failure=False, pb=None):
270
fileids = list(fileids)
271
hasids = self.cache_store.has(fileids)
273
for has, fileid in zip(hasids, fileids):
277
self.cache_store.copy_multi(self.source_store, needs,
278
permit_failure=permit_failure)
279
return self.cache_store.get(fileids,
280
permit_failure=permit_failure, pb=pb)
282
def prefetch(self, ids):
283
"""Copy a series of ids into the cache, before they are used.
284
For remote stores that support pipelining or async downloads, this can
285
increase speed considerably.
287
Failures while prefetching are ignored.
289
mutter("Prefetch of ids %s" % ",".join(ids))
290
self.cache_store.copy_multi(self.source_store, ids,
294
def copy_all(store_from, store_to):
295
"""Copy all ids from one store to another."""
296
# TODO: Optional progress indicator
297
if not hasattr(store_from, "__iter__"):
298
raise UnlistableStore(store_from)
300
ids = [f for f in store_from]
301
except (NotImplementedError, TransportNotPossible):
302
raise UnlistableStore(store_from)
303
store_to.copy_multi(store_from, ids)
143
ImmutableStore.__init__(self, tempfile.mkdtemp())