44
"""This class represents the abstract storage layout for saving information.
36
class ImmutableStore(object):
37
"""Store that holds files indexed by unique names.
46
39
Files can be added, but not modified once they are in. Typically
47
40
the hash is used as the name, or something else known to be unique,
43
>>> st = ImmutableScratchStore()
45
>>> st.add(StringIO('hello'), 'aa')
51
You are not allowed to add an id that is already present.
53
Entries can be retrieved as files, which may then be read.
55
>>> st.add(StringIO('goodbye'), '123123')
56
>>> st['123123'].read()
59
TODO: Atomic add by writing to a temporary file and renaming.
61
In bzr 0.0.5 and earlier, files within the store were marked
62
readonly on disk. This is no longer done but existing stores need
52
raise NotImplementedError('Children should define their length')
54
def get(self, fileid, suffix=None):
55
"""Returns a file reading from a particular entry.
57
If suffix is present, retrieve the named suffix for fileid.
59
raise NotImplementedError
61
def __getitem__(self, fileid):
62
"""DEPRECATED. Please use .get(fileid) instead."""
63
raise NotImplementedError
66
raise NotImplementedError
68
def add(self, f, fileid):
69
"""Add a file object f to the store accessible from the given fileid"""
70
raise NotImplementedError('Children of Store must define their method of adding entries.')
72
def has_id(self, fileid, suffix=None):
73
"""Return True or false for the presence of fileid in the store.
75
suffix, if present, is a per file suffix, i.e. for digital signature
77
raise NotImplementedError
80
"""Return True if this store is able to be listed."""
81
return (getattr(self, "__iter__", None) is not None)
83
def copy_all_ids(self, store_from, pb=None):
84
"""Copy all the file ids from store_from into self."""
85
if not store_from.listable():
86
raise UnlistableStore(store_from)
88
for count, file_id in enumerate(store_from):
90
pb.update('listing files', count, count)
94
mutter('copy_all ids: %r', ids)
95
self.copy_multi(store_from, ids, pb=pb)
97
def copy_multi(self, other, ids, pb=None, permit_failure=False):
66
def __init__(self, basedir):
67
self._basedir = basedir
70
if '\\' in id or '/' in id:
71
raise ValueError("invalid store id %r" % id)
72
return os.path.join(self._basedir, id)
75
return "%s(%r)" % (self.__class__.__name__, self._basedir)
77
def add(self, f, fileid, compressed=True):
78
"""Add contents of a file into the store.
80
f -- An open file, or file-like object."""
81
# FIXME: Only works on files that will fit in memory
83
from bzrlib.atomicfile import AtomicFile
85
mutter("add store entry %r" % (fileid))
86
if isinstance(f, types.StringTypes):
91
p = self._path(fileid)
92
if os.access(p, os.F_OK) or os.access(p + '.gz', os.F_OK):
93
from bzrlib.errors import bailout
94
raise BzrError("store %r already contains id %r" % (self._basedir, fileid))
100
af = AtomicFile(fn, 'wb')
103
gf = gzip.GzipFile(mode='wb', fileobj=af)
113
def copy_multi(self, other, ids):
98
114
"""Copy texts for ids from other into self.
100
116
If an id is present in self, it is skipped. A count of copied
101
117
ids is returned, which may be less than len(ids).
103
:param other: Another Store object
104
:param ids: A list of entry ids to be copied
105
:param pb: A ProgressTask object, if none is given, the default will be created.
106
:param permit_failure: Allow missing entries to be ignored
107
:return: (n_copied, [failed]) The number of entries copied successfully,
108
followed by a list of entries which could not be copied (because they
112
pb.update('preparing to copy')
119
from bzrlib.progress import ProgressBar
121
pb.update('preparing to copy')
122
to_copy = [id for id in ids if id not in self]
123
if isinstance(other, ImmutableStore):
124
return self.copy_multi_immutable(other, to_copy, pb)
117
if self.has_id(fileid):
128
pb.update('copy', count, len(to_copy))
129
self.add(other[id], id)
130
assert count == len(to_copy)
135
def copy_multi_immutable(self, other, to_copy, pb):
136
from shutil import copyfile
140
other_p = other._path(id)
120
self._copy_one(fileid, None, other, pb)
121
for suffix in self._suffixes:
123
self._copy_one(fileid, suffix, other, pb)
127
pb.update('copy', count, len(ids))
144
if e.errno == errno.ENOENT:
145
copyfile(other_p+".gz", p+".gz")
137
def _copy_one(self, fileid, suffix, other, pb):
138
"""Most generic copy-one object routine.
140
Subclasses can override this to provide an optimised
141
copy between their own instances. Such overriden routines
142
should call this if they have no optimised facility for a
145
mutter('Store._copy_one: %r', fileid)
146
f = other.get(fileid, suffix)
147
self.add(f, fileid, suffix)
150
class TransportStore(Store):
151
"""A TransportStore is a Store superclass for Stores that use Transports."""
153
def add(self, f, fileid, suffix=None):
154
"""Add contents of a file into the store.
156
f -- A file-like object
158
mutter("add store entry %r", fileid)
159
names = self._id_to_names(fileid, suffix)
160
if self._transport.has_any(names):
161
raise BzrError("store %r already contains id %r"
162
% (self._transport.base, fileid))
164
# Most of the time, just adding the file will work
165
# if we find a time where it fails, (because the dir
166
# doesn't exist), then create the dir, and try again
167
self._add(names[0], f)
169
def _add(self, relpath, f):
170
"""Actually add the file to the given location.
171
This should be overridden by children.
173
raise NotImplementedError('children need to implement this function.')
175
def _check_fileid(self, fileid):
176
if type(fileid) != str:
177
raise TypeError('Fileids should be bytestrings: %s %r' % (
178
type(fileid), fileid))
179
if '\\' in fileid or '/' in fileid:
180
raise ValueError("invalid store id %r" % fileid)
182
def _id_to_names(self, fileid, suffix):
183
"""Return the names in the expected order"""
184
if suffix is not None:
185
fn = self._relpath(fileid, [suffix])
187
fn = self._relpath(fileid)
189
# FIXME RBC 20051128 this belongs in TextStore.
196
def has_id(self, fileid, suffix=None):
197
"""See Store.has_id."""
198
return self._transport.has_any(self._id_to_names(fileid, suffix))
200
def _get_name(self, fileid, suffix=None):
201
"""A special check, which returns the name of an existing file.
203
This is similar in spirit to 'has_id', but it is designed
204
to return information about which file the store has.
206
for name in self._id_to_names(fileid, suffix=suffix):
207
if self._transport.has(name):
211
def _get(self, filename):
212
"""Return an vanilla file stream for clients to read from.
214
This is the body of a template method on 'get', and should be
215
implemented by subclasses.
217
raise NotImplementedError
219
def get(self, fileid, suffix=None):
220
"""See Store.get()."""
221
names = self._id_to_names(fileid, suffix)
224
return self._get(name)
225
except errors.NoSuchFile:
227
raise KeyError(fileid)
229
def __init__(self, a_transport, prefixed=False, compressed=False,
230
dir_mode=None, file_mode=None,
232
super(TransportStore, self).__init__()
233
self._transport = a_transport
234
self._prefixed = prefixed
235
# FIXME RBC 20051128 this belongs in TextStore.
236
self._compressed = compressed
237
self._suffixes = set()
238
self._escaped = escaped
240
# It is okay for these to be None, it just means they
241
# will just use the filesystem defaults
242
self._dir_mode = dir_mode
243
self._file_mode = file_mode
244
# Create a key mapper to use
245
if escaped and prefixed:
246
self._mapper = versionedfile.HashEscapedPrefixMapper()
247
elif not escaped and prefixed:
248
self._mapper = versionedfile.HashPrefixMapper()
251
"%r: escaped unprefixed stores are not permitted."
254
self._mapper = versionedfile.PrefixMapper()
256
def _iter_files_recursive(self):
257
"""Iterate through the files in the transport."""
258
for quoted_relpath in self._transport.iter_files_recursive():
150
pb.update('copy', count, len(to_copy))
151
assert count == len(to_copy)
156
def __contains__(self, fileid):
158
p = self._path(fileid)
159
return (os.access(p, os.R_OK)
160
or os.access(p + '.gz', os.R_OK))
162
# TODO: Guard against the same thing being stored twice, compressed and uncompresse
261
164
def __iter__(self):
262
for relpath in self._iter_files_recursive():
263
# worst case is one of each suffix.
264
name = os.path.basename(relpath)
265
if name.endswith('.gz'):
268
for count in range(len(self._suffixes)):
269
for suffix in self._suffixes:
270
if name.endswith('.' + suffix):
273
yield self._mapper.unmap(name)[0]
165
for f in os.listdir(self._basedir):
167
# TODO: case-insensitive?
275
172
def __len__(self):
276
return len(list(self.__iter__()))
278
def _relpath(self, fileid, suffixes=None):
279
self._check_fileid(fileid)
281
for suffix in suffixes:
282
if not suffix in self._suffixes:
283
raise ValueError("Unregistered suffix %r" % suffix)
284
self._check_fileid(suffix)
287
path = self._mapper.map((fileid,))
288
full_path = '.'.join([path] + suffixes)
292
if self._transport is None:
293
return "%s(None)" % (self.__class__.__name__)
295
return "%s(%r)" % (self.__class__.__name__, self._transport.base)
300
"""Return True if this store is able to be listed."""
301
return self._transport.listable()
303
def register_suffix(self, suffix):
304
"""Register a suffix as being expected in this store."""
305
self._check_fileid(suffix)
307
raise ValueError('You cannot register the "gz" suffix.')
308
self._suffixes.add(suffix)
173
return len(os.listdir(self._basedir))
175
def __getitem__(self, fileid):
176
"""Returns a file reading from a particular entry."""
177
p = self._path(fileid)
179
return gzip.GzipFile(p + '.gz', 'rb')
181
if e.errno == errno.ENOENT:
310
186
def total_size(self):
311
187
"""Return (count, bytes)