46
"""This class represents the abstract storage layout for saving information.
38
class ImmutableStore(object):
39
"""Store that holds files indexed by unique names.
48
41
Files can be added, but not modified once they are in. Typically
49
42
the hash is used as the name, or something else known to be unique,
45
>>> st = ImmutableScratchStore()
47
>>> st.add(StringIO('hello'), 'aa')
53
You are not allowed to add an id that is already present.
55
Entries can be retrieved as files, which may then be read.
57
>>> st.add(StringIO('goodbye'), '123123')
58
>>> st['123123'].read()
61
TODO: Atomic add by writing to a temporary file and renaming.
63
In bzr 0.0.5 and earlier, files within the store were marked
64
readonly on disk. This is no longer done but existing stores need
54
raise NotImplementedError('Children should define their length')
56
def get(self, fileid, suffix=None):
57
"""Returns a file reading from a particular entry.
59
If suffix is present, retrieve the named suffix for fileid.
61
raise NotImplementedError
63
def __getitem__(self, fileid):
64
"""DEPRECATED. Please use .get(fileid) instead."""
65
raise NotImplementedError
68
raise NotImplementedError
70
def add(self, f, fileid):
71
"""Add a file object f to the store accessible from the given fileid"""
72
raise NotImplementedError('Children of Store must define their method of adding entries.')
74
def has_id(self, fileid, suffix=None):
75
"""Return True or false for the presence of fileid in the store.
77
suffix, if present, is a per file suffix, i.e. for digital signature
79
raise NotImplementedError
82
"""Return True if this store is able to be listed."""
83
return (getattr(self, "__iter__", None) is not None)
85
def copy_all_ids(self, store_from, pb=None):
86
"""Copy all the file ids from store_from into self."""
87
if not store_from.listable():
88
raise UnlistableStore(store_from)
90
for count, file_id in enumerate(store_from):
92
pb.update('listing files', count, count)
96
mutter('copy_all ids: %r', ids)
97
self.copy_multi(store_from, ids, pb=pb)
99
def copy_multi(self, other, ids, pb=None, permit_failure=False):
68
def __init__(self, basedir):
69
self._basedir = basedir
71
def _path(self, entry_id):
72
if not isinstance(entry_id, basestring):
73
raise TypeError(type(entry_id))
74
if '\\' in entry_id or '/' in entry_id:
75
raise ValueError("invalid store id %r" % entry_id)
76
return os.path.join(self._basedir, entry_id)
79
return "%s(%r)" % (self.__class__.__name__, self._basedir)
81
def add(self, f, fileid, compressed=True):
82
"""Add contents of a file into the store.
84
f -- An open file, or file-like object."""
85
# FIXME: Only works on files that will fit in memory
87
from bzrlib.atomicfile import AtomicFile
89
mutter("add store entry %r" % (fileid))
90
if isinstance(f, types.StringTypes):
95
p = self._path(fileid)
96
if os.access(p, os.F_OK) or os.access(p + '.gz', os.F_OK):
97
raise BzrError("store %r already contains id %r" % (self._basedir, fileid))
103
af = AtomicFile(fn, 'wb')
106
gf = gzip.GzipFile(mode='wb', fileobj=af)
116
def copy_multi(self, other, ids, permit_failure=False):
100
117
"""Copy texts for ids from other into self.
102
If an id is present in self, it is skipped. A count of copied
103
ids is returned, which may be less than len(ids).
119
If an id is present in self, it is skipped.
105
:param other: Another Store object
106
:param ids: A list of entry ids to be copied
107
:param pb: A ProgressTask object, if none is given, the default will be created.
108
:param permit_failure: Allow missing entries to be ignored
109
:return: (n_copied, [failed]) The number of entries copied successfully,
110
followed by a list of entries which could not be copied (because they
121
Returns (count_copied, failed), where failed is a collection of ids
122
that could not be copied.
114
pb.update('preparing to copy')
124
pb = bzrlib.ui.ui_factory.progress_bar()
126
pb.update('preparing to copy')
127
to_copy = [id for id in ids if id not in self]
128
if isinstance(other, ImmutableStore):
129
return self.copy_multi_immutable(other, to_copy, pb)
119
if self.has_id(fileid):
134
pb.update('copy', count, len(to_copy))
135
if not permit_failure:
136
self.add(other[id], id)
145
if not permit_failure:
146
assert count == len(to_copy)
150
def copy_multi_immutable(self, other, to_copy, pb, permit_failure=False):
151
from shutil import copyfile
156
other_p = other._path(id)
122
self._copy_one(fileid, None, other, pb)
123
for suffix in self._suffixes:
125
self._copy_one(fileid, suffix, other, pb)
129
pb.update('copy', count, len(ids))
160
if e.errno == errno.ENOENT:
161
if not permit_failure:
162
copyfile(other_p+".gz", p+".gz")
165
copyfile(other_p+".gz", p+".gz")
167
if e.errno == errno.ENOENT:
175
pb.update('copy', count, len(to_copy))
176
assert count == len(to_copy)
137
178
return count, failed
139
def _copy_one(self, fileid, suffix, other, pb):
140
"""Most generic copy-one object routine.
142
Subclasses can override this to provide an optimised
143
copy between their own instances. Such overriden routines
144
should call this if they have no optimised facility for a
147
mutter('Store._copy_one: %r', fileid)
148
f = other.get(fileid, suffix)
149
self.add(f, fileid, suffix)
152
class TransportStore(Store):
153
"""A TransportStore is a Store superclass for Stores that use Transports."""
155
def add(self, f, fileid, suffix=None):
156
"""Add contents of a file into the store.
158
f -- A file-like object
160
mutter("add store entry %r", fileid)
161
names = self._id_to_names(fileid, suffix)
162
if self._transport.has_any(names):
163
raise BzrError("store %r already contains id %r"
164
% (self._transport.base, fileid))
166
# Most of the time, just adding the file will work
167
# if we find a time where it fails, (because the dir
168
# doesn't exist), then create the dir, and try again
169
self._add(names[0], f)
171
def _add(self, relpath, f):
172
"""Actually add the file to the given location.
173
This should be overridden by children.
175
raise NotImplementedError('children need to implement this function.')
177
def _check_fileid(self, fileid):
178
if type(fileid) != str:
179
raise TypeError('Fileids should be bytestrings: %s %r' % (
180
type(fileid), fileid))
181
if '\\' in fileid or '/' in fileid:
182
raise ValueError("invalid store id %r" % fileid)
184
def _id_to_names(self, fileid, suffix):
185
"""Return the names in the expected order"""
186
if suffix is not None:
187
fn = self._relpath(fileid, [suffix])
189
fn = self._relpath(fileid)
191
# FIXME RBC 20051128 this belongs in TextStore.
198
def has_id(self, fileid, suffix=None):
199
"""See Store.has_id."""
200
return self._transport.has_any(self._id_to_names(fileid, suffix))
202
def _get_name(self, fileid, suffix=None):
203
"""A special check, which returns the name of an existing file.
205
This is similar in spirit to 'has_id', but it is designed
206
to return information about which file the store has.
208
for name in self._id_to_names(fileid, suffix=suffix):
209
if self._transport.has(name):
213
def _get(self, filename):
214
"""Return an vanilla file stream for clients to read from.
216
This is the body of a template method on 'get', and should be
217
implemented by subclasses.
219
raise NotImplementedError
221
def get(self, fileid, suffix=None):
222
"""See Store.get()."""
223
names = self._id_to_names(fileid, suffix)
226
return self._get(name)
227
except errors.NoSuchFile:
229
raise KeyError(fileid)
231
def __init__(self, a_transport, prefixed=False, compressed=False,
232
dir_mode=None, file_mode=None,
234
super(TransportStore, self).__init__()
235
self._transport = a_transport
236
self._prefixed = prefixed
237
# FIXME RBC 20051128 this belongs in TextStore.
238
self._compressed = compressed
239
self._suffixes = set()
240
self._escaped = escaped
242
# It is okay for these to be None, it just means they
243
# will just use the filesystem defaults
244
self._dir_mode = dir_mode
245
self._file_mode = file_mode
246
# Create a key mapper to use
247
if escaped and prefixed:
248
self._mapper = versionedfile.HashEscapedPrefixMapper()
249
elif not escaped and prefixed:
250
self._mapper = versionedfile.HashPrefixMapper()
253
"%r: escaped unprefixed stores are not permitted."
256
self._mapper = versionedfile.PrefixMapper()
258
def _iter_files_recursive(self):
259
"""Iterate through the files in the transport."""
260
for quoted_relpath in self._transport.iter_files_recursive():
181
def __contains__(self, fileid):
183
p = self._path(fileid)
184
return (os.access(p, os.R_OK)
185
or os.access(p + '.gz', os.R_OK))
187
# TODO: Guard against the same thing being stored twice, compressed and uncompresse
263
189
def __iter__(self):
264
for relpath in self._iter_files_recursive():
265
# worst case is one of each suffix.
266
name = os.path.basename(relpath)
267
if name.endswith('.gz'):
270
for count in range(len(self._suffixes)):
271
for suffix in self._suffixes:
272
if name.endswith('.' + suffix):
275
yield self._mapper.unmap(name)[0]
190
for f in os.listdir(self._basedir):
192
# TODO: case-insensitive?
277
197
def __len__(self):
278
return len(list(self.__iter__()))
280
def _relpath(self, fileid, suffixes=None):
281
self._check_fileid(fileid)
283
for suffix in suffixes:
284
if not suffix in self._suffixes:
285
raise ValueError("Unregistered suffix %r" % suffix)
286
self._check_fileid(suffix)
289
path = self._mapper.map((fileid,))
290
full_path = '.'.join([path] + suffixes)
294
if self._transport is None:
295
return "%s(None)" % (self.__class__.__name__)
297
return "%s(%r)" % (self.__class__.__name__, self._transport.base)
302
"""Return True if this store is able to be listed."""
303
return self._transport.listable()
305
def register_suffix(self, suffix):
306
"""Register a suffix as being expected in this store."""
307
self._check_fileid(suffix)
309
raise ValueError('You cannot register the "gz" suffix.')
310
self._suffixes.add(suffix)
198
return len(os.listdir(self._basedir))
201
def __getitem__(self, fileid):
202
"""Returns a file reading from a particular entry."""
203
p = self._path(fileid)
205
return gzip.GzipFile(p + '.gz', 'rb')
207
if e.errno != errno.ENOENT:
213
if e.errno != errno.ENOENT:
216
raise IndexError(fileid)
312
219
def total_size(self):
313
220
"""Return (count, bytes)