47
"""This class represents the abstract storage layout for saving information.
36
class ImmutableStore(object):
37
"""Store that holds files indexed by unique names.
49
39
Files can be added, but not modified once they are in. Typically
50
40
the hash is used as the name, or something else known to be unique,
43
>>> st = ImmutableScratchStore()
45
>>> st.add(StringIO('hello'), 'aa')
51
You are not allowed to add an id that is already present.
53
Entries can be retrieved as files, which may then be read.
55
>>> st.add(StringIO('goodbye'), '123123')
56
>>> st['123123'].read()
59
TODO: Atomic add by writing to a temporary file and renaming.
61
In bzr 0.0.5 and earlier, files within the store were marked
62
readonly on disk. This is no longer done but existing stores need
55
raise NotImplementedError('Children should define their length')
57
def get(self, file_id, suffix=None):
58
"""Returns a file reading from a particular entry.
60
If suffix is present, retrieve the named suffix for file_id.
62
raise NotImplementedError
64
def __getitem__(self, fileid):
65
"""DEPRECATED. Please use .get(file_id) instead."""
66
raise NotImplementedError
68
#def __contains__(self, fileid):
69
# """Deprecated, please use has_id"""
70
# raise NotImplementedError
73
raise NotImplementedError
75
def add(self, f, fileid):
76
"""Add a file object f to the store accessible from the given fileid"""
77
raise NotImplementedError('Children of Store must define their method of adding entries.')
79
def has_id(self, file_id, suffix=None):
80
"""Return True or false for the presence of file_id in the store.
82
suffix, if present, is a per file suffix, i.e. for digital signature
84
raise NotImplementedError
87
"""Return True if this store is able to be listed."""
88
return hasattr(self, "__iter__")
90
def copy_multi(self, other, ids, pb=None, permit_failure=False):
66
def __init__(self, basedir):
67
self._basedir = basedir
70
if '\\' in id or '/' in id:
71
raise ValueError("invalid store id %r" % id)
72
return os.path.join(self._basedir, id)
75
return "%s(%r)" % (self.__class__.__name__, self._basedir)
77
def add(self, f, fileid, compressed=True):
78
"""Add contents of a file into the store.
80
f -- An open file, or file-like object."""
81
# FIXME: Only works on files that will fit in memory
83
from bzrlib.atomicfile import AtomicFile
85
mutter("add store entry %r" % (fileid))
86
if isinstance(f, types.StringTypes):
91
p = self._path(fileid)
92
if os.access(p, os.F_OK) or os.access(p + '.gz', os.F_OK):
93
raise BzrError("store %r already contains id %r" % (self._basedir, fileid))
99
af = AtomicFile(fn, 'wb')
102
gf = gzip.GzipFile(mode='wb', fileobj=af)
112
def copy_multi(self, other, ids):
91
113
"""Copy texts for ids from other into self.
93
115
If an id is present in self, it is skipped. A count of copied
94
116
ids is returned, which may be less than len(ids).
96
:param other: Another Store object
97
:param ids: A list of entry ids to be copied
98
:param pb: A ProgressBar object, if none is given, the default will be created.
99
:param permit_failure: Allow missing entries to be ignored
100
:return: (n_copied, [failed]) The number of entries copied successfully,
101
followed by a list of entries which could not be copied (because they
105
pb = bzrlib.ui.ui_factory.progress_bar()
118
from bzrlib.progress import ProgressBar
106
120
pb.update('preparing to copy')
121
to_copy = [id for id in ids if id not in self]
122
if isinstance(other, ImmutableStore):
123
return self.copy_multi_immutable(other, to_copy, pb)
109
ids = list(ids) # get the list for showing a length.
112
if self.has_id(fileid):
127
pb.update('copy', count, len(to_copy))
128
self.add(other[id], id)
129
assert count == len(to_copy)
134
def copy_multi_immutable(self, other, to_copy, pb):
135
from shutil import copyfile
139
other_p = other._path(id)
115
self._copy_one(fileid, None, other, pb)
116
for suffix in self._suffixes:
118
self._copy_one(fileid, suffix, other, pb)
121
pb.update('copy', count, len(ids))
143
if e.errno == errno.ENOENT:
144
copyfile(other_p+".gz", p+".gz")
127
assert count == len(ids)
149
pb.update('copy', count, len(to_copy))
150
assert count == len(to_copy)
131
def _copy_one(self, fileid, suffix, other, pb):
132
"""Most generic copy-one object routine.
134
Subclasses can override this to provide an optimised
135
copy between their own instances. Such overriden routines
136
should call this if they have no optimised facility for a
139
f = other.get(fileid, suffix)
140
self.add(f, fileid, suffix)
143
class TransportStore(Store):
144
"""A TransportStore is a Store superclass for Stores that use Transports."""
146
def add(self, f, fileid, suffix=None):
147
"""Add contents of a file into the store.
149
f -- A file-like object, or string
151
mutter("add store entry %r" % (fileid))
153
if suffix is not None:
154
fn = self._relpath(fileid, [suffix])
156
fn = self._relpath(fileid)
157
if self._transport.has(fn):
158
raise BzrError("store %r already contains id %r" % (self._transport.base, fileid))
162
self._transport.mkdir(hash_prefix(fileid)[:-1])
163
except errors.FileExists:
168
def _check_fileid(self, fileid):
169
if not isinstance(fileid, basestring):
170
raise TypeError('Fileids should be a string type: %s %r' % (type(fileid), fileid))
171
if '\\' in fileid or '/' in fileid:
172
raise ValueError("invalid store id %r" % fileid)
174
def has_id(self, fileid, suffix=None):
175
"""See Store.has_id."""
176
if suffix is not None:
177
fn = self._relpath(fileid, [suffix])
179
fn = self._relpath(fileid)
180
return self._transport.has(fn)
182
def _get(self, filename):
183
"""Return an vanilla file stream for clients to read from.
185
This is the body of a template method on 'get', and should be
186
implemented by subclasses.
188
raise NotImplementedError
190
def get(self, fileid, suffix=None):
191
"""See Store.get()."""
192
if suffix is None or suffix == 'gz':
193
fn = self._relpath(fileid)
195
fn = self._relpath(fileid, [suffix])
198
except errors.NoSuchFile:
199
raise KeyError(fileid)
201
def __init__(self, a_transport, prefixed=False):
202
assert isinstance(a_transport, transport.Transport)
203
super(TransportStore, self).__init__()
204
self._transport = a_transport
205
self._prefixed = prefixed
206
# conflating the .gz extension and user suffixes was a mistake.
207
# RBC 20051017 - TODO SOON, separate them again.
208
self._suffixes = set()
210
def _iter_files_recursive(self):
211
"""Iterate through the files in the transport."""
212
for quoted_relpath in self._transport.iter_files_recursive():
213
yield urllib.unquote(quoted_relpath)
155
def __contains__(self, fileid):
157
p = self._path(fileid)
158
return (os.access(p, os.R_OK)
159
or os.access(p + '.gz', os.R_OK))
161
# TODO: Guard against the same thing being stored twice, compressed and uncompresse
215
163
def __iter__(self):
216
for relpath in self._iter_files_recursive():
217
# worst case is one of each suffix.
218
name = os.path.basename(relpath)
219
if name.endswith('.gz'):
222
for count in range(len(self._suffixes)):
223
for suffix in self._suffixes:
224
if name.endswith('.' + suffix):
164
for f in os.listdir(self._basedir):
166
# TODO: case-insensitive?
229
171
def __len__(self):
230
return len(list(self.__iter__()))
232
def _relpath(self, fileid, suffixes=[]):
233
self._check_fileid(fileid)
234
for suffix in suffixes:
235
if not suffix in self._suffixes:
236
raise ValueError("Unregistered suffix %r" % suffix)
237
self._check_fileid(suffix)
239
path = [hash_prefix(fileid) + fileid]
242
path.extend(suffixes)
243
return transport.urlescape('.'.join(path))
246
if self._transport is None:
247
return "%s(None)" % (self.__class__.__name__)
249
return "%s(%r)" % (self.__class__.__name__, self._transport.base)
254
"""Return True if this store is able to be listed."""
255
return self._transport.listable()
257
def register_suffix(self, suffix):
258
"""Register a suffix as being expected in this store."""
259
self._check_fileid(suffix)
260
self._suffixes.add(suffix)
172
return len(os.listdir(self._basedir))
174
def __getitem__(self, fileid):
175
"""Returns a file reading from a particular entry."""
176
p = self._path(fileid)
178
return gzip.GzipFile(p + '.gz', 'rb')
180
if e.errno == errno.ENOENT:
262
185
def total_size(self):
263
186
"""Return (count, bytes)
269
for relpath in self._transport.iter_files_recursive():
271
total += self._transport.stat(relpath).st_size
196
total += os.stat(p)[ST_SIZE]
198
total += os.stat(p + '.gz')[ST_SIZE]
273
200
return count, total
276
def ImmutableMemoryStore():
277
return bzrlib.store.text.TextStore(transport.memory.MemoryTransport())
280
class CachedStore(Store):
281
"""A store that caches data locally, to avoid repeated downloads.
282
The precacache method should be used to avoid server round-trips for
205
class ImmutableScratchStore(ImmutableStore):
206
"""Self-destructing test subclass of ImmutableStore.
208
The Store only exists for the lifetime of the Python object.
209
Obviously you should not put anything precious in it.
286
def __init__(self, store, cache_dir):
287
super(CachedStore, self).__init__()
288
self.source_store = store
289
# This clones the source store type with a locally bound
290
# transport. FIXME: it assumes a constructor is == cloning.
291
# clonable store - it might be nicer to actually have a clone()
292
# or something. RBC 20051003
293
self.cache_store = store.__class__(LocalTransport(cache_dir))
296
mutter("Cache add %s" % id)
297
if id not in self.cache_store:
298
self.cache_store.add(self.source_store.get(id), id)
299
return self.cache_store.get(id)
301
def has_id(self, fileid, suffix=None):
302
"""See Store.has_id."""
303
if self.cache_store.has_id(fileid, suffix):
305
if self.source_store.has_id(fileid, suffix):
306
# We could copy at this time
311
def copy_all(store_from, store_to):
312
"""Copy all ids from one store to another."""
313
# TODO: Optional progress indicator
314
if not store_from.listable():
315
raise UnlistableStore(store_from)
316
ids = [f for f in store_from]
317
store_to.copy_multi(store_from, ids)
319
def hash_prefix(file_id):
320
return "%02x/" % (adler32(file_id) & 0xff)
212
ImmutableStore.__init__(self, tempfile.mkdtemp())
215
for f in os.listdir(self._basedir):
216
fpath = os.path.join(self._basedir, f)
217
# needed on windows, and maybe some other filesystems
218
os.chmod(fpath, 0600)
220
os.rmdir(self._basedir)
221
mutter("%r destroyed" % self)