51
41
class Store(object):
52
"""An abstract store that holds files indexed by unique names.
42
"""This class represents the abstract storage layout for saving information.
54
44
Files can be added, but not modified once they are in. Typically
55
45
the hash is used as the name, or something else known to be unique,
58
>>> st = ImmutableScratchStore()
60
>>> st.add(StringIO('hello'), 'aa')
66
You are not allowed to add an id that is already present.
68
Entries can be retrieved as files, which may then be read.
70
>>> st.add(StringIO('goodbye'), '123123')
71
>>> st['123123'].read()
76
"""Return (count, bytes)
78
This is the (compressed) size stored on disk, not the size of
84
total += self._item_size(fid)
88
class ImmutableStore(Store):
89
"""Store that stores files on disk.
91
TODO: Atomic add by writing to a temporary file and renaming.
92
TODO: Guard against the same thing being stored twice, compressed and
93
uncompressed during copy_multi_immutable - the window is for a
94
matching store with some crack code that lets it offer a
95
non gz FOO and then a fz FOO.
97
In bzr 0.0.5 and earlier, files within the store were marked
98
readonly on disk. This is no longer done but existing stores need
102
def __init__(self, basedir):
103
super(ImmutableStore, self).__init__()
104
self._basedir = basedir
106
def _path(self, entry_id):
107
if not isinstance(entry_id, basestring):
108
raise TypeError(type(entry_id))
109
if '\\' in entry_id or '/' in entry_id:
110
raise ValueError("invalid store id %r" % entry_id)
111
return os.path.join(self._basedir, entry_id)
114
return "%s(%r)" % (self.__class__.__name__, self._basedir)
116
def add(self, f, fileid, compressed=True):
117
"""Add contents of a file into the store.
119
f -- An open file, or file-like object."""
120
# FIXME: Only works on files that will fit in memory
122
from bzrlib.atomicfile import AtomicFile
124
mutter("add store entry %r" % (fileid))
125
if isinstance(f, types.StringTypes):
130
p = self._path(fileid)
131
if os.access(p, os.F_OK) or os.access(p + '.gz', os.F_OK):
132
raise BzrError("store %r already contains id %r" % (self._basedir, fileid))
138
af = AtomicFile(fn, 'wb')
141
gf = gzip.GzipFile(mode='wb', fileobj=af)
50
raise NotImplementedError('Children should define their length')
52
def __getitem__(self, fileid):
53
"""Returns a file reading from a particular entry."""
54
raise NotImplementedError
56
def __contains__(self, fileid):
58
raise NotImplementedError
61
raise NotImplementedError
63
def add(self, f, fileid):
64
"""Add a file object f to the store accessible from the given fileid"""
65
raise NotImplementedError('Children of Store must define their method of adding entries.')
67
def add_multi(self, entries):
68
"""Add a series of file-like or string objects to the store with the given
71
:param entries: A list of tuples of file,id pairs [(file1, id1), (file2, id2), ...]
72
This could also be a generator yielding (file,id) pairs.
74
for f, fileid in entries:
77
def has(self, fileids):
78
"""Return True/False for each entry in fileids.
80
:param fileids: A List or generator yielding file ids.
81
:return: A generator or list returning True/False for each entry.
83
for fileid in fileids:
151
def copy_multi(self, other, ids, permit_failure=False):
89
def get(self, fileids, permit_failure=False, pb=None):
90
"""Return a set of files, one for each requested entry.
92
:param permit_failure: If true, return None for entries which do not
94
:return: A list or generator of file-like objects, one for each id.
96
for fileid in fileids:
105
def copy_multi(self, other, ids, pb=None, permit_failure=False):
152
106
"""Copy texts for ids from other into self.
154
If an id is present in self, it is skipped.
108
If an id is present in self, it is skipped. A count of copied
109
ids is returned, which may be less than len(ids).
156
Returns (count_copied, failed), where failed is a collection of ids
157
that could not be copied.
111
:param other: Another Store object
112
:param ids: A list of entry ids to be copied
113
:param pb: A ProgressBar object, if none is given, the default will be created.
114
:param permit_failure: Allow missing entries to be ignored
115
:return: (n_copied, [failed]) The number of entries copied successfully,
116
followed by a list of entries which could not be copied (because they
159
pb = bzrlib.ui.ui_factory.progress_bar()
120
pb = bzrlib.ui.ui_factory.progress_bar()
122
# XXX: Is there any reason why we couldn't make this accept a generator
123
# and build a list as it finds things to copy?
124
ids = list(ids) # Make sure we don't have a generator, since we iterate 2 times
161
125
pb.update('preparing to copy')
162
to_copy = [id for id in ids if id not in self]
163
if isinstance(other, ImmutableStore):
164
return self.copy_multi_immutable(other, to_copy, pb,
165
permit_failure=permit_failure)
127
for file_id, has in zip(ids, self.has(ids)):
129
to_copy.append(file_id)
130
return self._do_copy(other, to_copy, pb, permit_failure=permit_failure)
132
def _do_copy(self, other, to_copy, pb, permit_failure=False):
133
"""This is the standard copying mechanism, just get them one at
134
a time from remote, and store them locally.
136
:param other: Another Store object
137
:param to_copy: A list of entry ids to copy
138
:param pb: A ProgressBar object to display completion status.
139
:param permit_failure: Allow missing entries to be ignored
140
:return: (n_copied, [failed])
141
The number of entries copied, and a list of failed entries.
143
# This should be updated to use add_multi() rather than
144
# the current methods of buffering requests.
145
# One question, is it faster to queue up 1-10 and then copy 1-10
146
# then queue up 11-20, copy 11-20
147
# or to queue up 1-10, copy 1, queue 11, copy 2, etc?
148
# sort of pipeline versus batch.
150
# We can't use self._transport.copy_to because we don't know
151
# whether the local tree is in the same format as other
170
pb.update('copy', count, len(to_copy))
171
if not permit_failure:
172
self.add(other[id], id)
153
def buffer_requests():
155
buffered_requests = []
156
for fileid in to_copy:
181
if not permit_failure:
166
buffered_requests.append((f, fileid))
167
if len(buffered_requests) > self._max_buffered_requests:
168
yield buffered_requests.pop(0)
170
pb.update('copy', count, len(to_copy))
172
for req in buffered_requests:
175
pb.update('copy', count, len(to_copy))
182
177
assert count == len(to_copy)
186
def copy_multi_immutable(self, other, to_copy, pb, permit_failure=False):
191
other_p = other._path(id)
193
osutils.link_or_copy(other_p, p)
194
except (IOError, OSError), e:
195
if e.errno == errno.ENOENT:
196
if not permit_failure:
197
osutils.link_or_copy(other_p+".gz", p+".gz")
200
osutils.link_or_copy(other_p+".gz", p+".gz")
202
if e.errno == errno.ENOENT:
210
pb.update('copy', count, len(to_copy))
211
assert count == len(to_copy)
215
def __contains__(self, fileid):
217
p = self._path(fileid)
218
return (os.access(p, os.R_OK)
219
or os.access(p + '.gz', os.R_OK))
221
def _item_size(self, fid):
224
return os.stat(p)[ST_SIZE]
226
return os.stat(p + '.gz')[ST_SIZE]
228
# TODO: Guard against the same thing being stored twice,
229
# compressed and uncompressed
232
for f in os.listdir(self._basedir):
234
# TODO: case-insensitive?
240
return len(os.listdir(self._basedir))
242
def __getitem__(self, fileid):
243
"""Returns a file reading from a particular entry."""
244
p = self._path(fileid)
246
return gzip.GzipFile(p + '.gz', 'rb')
248
if e.errno != errno.ENOENT:
254
if e.errno != errno.ENOENT:
257
raise KeyError(fileid)
260
class ImmutableScratchStore(ImmutableStore):
261
"""Self-destructing test subclass of ImmutableStore.
263
The Store only exists for the lifetime of the Python object.
264
Obviously you should not put anything precious in it.
267
super(ImmutableScratchStore, self).__init__(tempfile.mkdtemp())
270
for f in os.listdir(self._basedir):
271
fpath = os.path.join(self._basedir, f)
272
# needed on windows, and maybe some other filesystems
273
os.chmod(fpath, 0600)
275
os.rmdir(self._basedir)
276
mutter("%r destroyed" % self)
179
self.add_multi(buffer_requests())
182
return len(to_copy), failed
185
class TransportStore(Store):
186
"""A TransportStore is a Store superclass for Stores that use Transports."""
188
_max_buffered_requests = 10
190
def __init__(self, transport):
191
assert isinstance(transport, bzrlib.transport.Transport)
192
super(TransportStore, self).__init__()
193
self._transport = transport
196
if self._transport is None:
197
return "%s(None)" % (self.__class__.__name__)
199
return "%s(%r)" % (self.__class__.__name__, self._transport.base)
279
204
class ImmutableMemoryStore(Store):
280
205
"""A memory only store."""
207
def __contains__(self, fileid):
208
return self._contents.has_key(fileid)
282
210
def __init__(self):
283
211
super(ImmutableMemoryStore, self).__init__()
284
212
self._contents = {}