202
class _MPDiffGenerator(object):
203
"""Pull out the functionality for generating mp_diffs."""
205
def __init__(self, vf, keys):
207
# This is the order the keys were requested in
208
self.ordered_keys = tuple(keys)
209
# keys + their parents, what we need to compute the diffs
210
self.needed_keys = ()
211
# Map from key: mp_diff
213
# Map from key: parents_needed (may have ghosts)
215
# Parents that aren't present
216
self.ghost_parents = ()
217
# Map from parent_key => number of children for this text
219
# Content chunks that are cached while we still need them
222
def _find_needed_keys(self):
223
"""Find the set of keys we need to request.
225
This includes all the original keys passed in, and the non-ghost
226
parents of those keys.
228
:return: (needed_keys, refcounts)
229
needed_keys is the set of all texts we need to extract
230
refcounts is a dict of {key: num_children} letting us know when we
231
no longer need to cache a given parent text
233
# All the keys and their parents
234
needed_keys = set(self.ordered_keys)
235
parent_map = self.vf.get_parent_map(needed_keys)
236
self.parent_map = parent_map
237
# TODO: Should we be using a different construct here? I think this
238
# uses difference_update internally, and we expect the result to
240
missing_keys = needed_keys.difference(parent_map)
242
raise errors.RevisionNotPresent(list(missing_keys)[0], self.vf)
243
# Parents that might be missing. They are allowed to be ghosts, but we
244
# should check for them
246
setdefault = refcounts.setdefault
248
for child_key, parent_keys in parent_map.iteritems():
250
# parent_keys may be None if a given VersionedFile claims to
251
# not support graph operations.
253
just_parents.update(parent_keys)
254
needed_keys.update(parent_keys)
255
for p in parent_keys:
256
refcounts[p] = setdefault(p, 0) + 1
257
just_parents.difference_update(parent_map)
258
# Remove any parents that are actually ghosts from the needed set
259
self.present_parents = set(self.vf.get_parent_map(just_parents))
260
self.ghost_parents = just_parents.difference(self.present_parents)
261
needed_keys.difference_update(self.ghost_parents)
262
self.needed_keys = needed_keys
263
self.refcounts = refcounts
264
return needed_keys, refcounts
266
def _compute_diff(self, key, parent_lines, lines):
267
"""Compute a single mp_diff, and store it in self._diffs"""
268
if len(parent_lines) > 0:
269
# XXX: _extract_blocks is not usefully defined anywhere...
270
# It was meant to extract the left-parent diff without
271
# having to recompute it for Knit content (pack-0.92,
272
# etc). That seems to have regressed somewhere
273
left_parent_blocks = self.vf._extract_blocks(key,
274
parent_lines[0], lines)
276
left_parent_blocks = None
277
diff = multiparent.MultiParent.from_lines(lines,
278
parent_lines, left_parent_blocks)
279
self.diffs[key] = diff
281
def _process_one_record(self, key, this_chunks):
283
if key in self.parent_map:
284
# This record should be ready to diff, since we requested
285
# content in 'topological' order
286
parent_keys = self.parent_map.pop(key)
287
# If a VersionedFile claims 'no-graph' support, then it may return
288
# None for any parent request, so we replace it with an empty tuple
289
if parent_keys is None:
292
for p in parent_keys:
293
# Alternatively we could check p not in self.needed_keys, but
294
# ghost_parents should be tiny versus huge
295
if p in self.ghost_parents:
297
refcount = self.refcounts[p]
298
if refcount == 1: # Last child reference
299
self.refcounts.pop(p)
300
parent_chunks = self.chunks.pop(p)
302
self.refcounts[p] = refcount - 1
303
parent_chunks = self.chunks[p]
304
p_lines = osutils.chunks_to_lines(parent_chunks)
305
# TODO: Should we cache the line form? We did the
306
# computation to get it, but storing it this way will
307
# be less memory efficient...
308
parent_lines.append(p_lines)
310
lines = osutils.chunks_to_lines(this_chunks)
311
# Since we needed the lines, we'll go ahead and cache them this way
313
self._compute_diff(key, parent_lines, lines)
315
# Is this content required for any more children?
316
if key in self.refcounts:
317
self.chunks[key] = this_chunks
319
def _extract_diffs(self):
320
needed_keys, refcounts = self._find_needed_keys()
321
for record in self.vf.get_record_stream(needed_keys,
322
'topological', True):
323
if record.storage_kind == 'absent':
324
raise errors.RevisionNotPresent(record.key, self.vf)
325
self._process_one_record(record.key,
326
record.get_bytes_as('chunked'))
328
def compute_diffs(self):
329
self._extract_diffs()
330
dpop = self.diffs.pop
331
return [dpop(k) for k in self.ordered_keys]
334
210
class VersionedFile(object):
335
211
"""Versioned text file storage.
1181
1049
def make_mpdiffs(self, keys):
1182
1050
"""Create multiparent diffs for specified keys."""
1183
generator = _MPDiffGenerator(self, keys)
1184
return generator.compute_diffs()
1186
def get_annotator(self):
1187
return annotate.Annotator(self)
1051
keys_order = tuple(keys)
1052
keys = frozenset(keys)
1053
knit_keys = set(keys)
1054
parent_map = self.get_parent_map(keys)
1055
for parent_keys in parent_map.itervalues():
1057
knit_keys.update(parent_keys)
1058
missing_keys = keys - set(parent_map)
1060
raise errors.RevisionNotPresent(list(missing_keys)[0], self)
1061
# We need to filter out ghosts, because we can't diff against them.
1062
maybe_ghosts = knit_keys - keys
1063
ghosts = maybe_ghosts - set(self.get_parent_map(maybe_ghosts))
1064
knit_keys.difference_update(ghosts)
1066
chunks_to_lines = osutils.chunks_to_lines
1067
for record in self.get_record_stream(knit_keys, 'topological', True):
1068
lines[record.key] = chunks_to_lines(record.get_bytes_as('chunked'))
1069
# line_block_dict = {}
1070
# for parent, blocks in record.extract_line_blocks():
1071
# line_blocks[parent] = blocks
1072
# line_blocks[record.key] = line_block_dict
1074
for key in keys_order:
1076
parents = parent_map[key] or []
1077
# Note that filtering knit_keys can lead to a parent difference
1078
# between the creation and the application of the mpdiff.
1079
parent_lines = [lines[p] for p in parents if p in knit_keys]
1080
if len(parent_lines) > 0:
1081
left_parent_blocks = self._extract_blocks(key, parent_lines[0],
1084
left_parent_blocks = None
1085
diffs.append(multiparent.MultiParent.from_lines(target,
1086
parent_lines, left_parent_blocks))
1189
1089
missing_keys = index._missing_keys_from_parent_map
1191
1091
def _extract_blocks(self, version_id, source, target):
1194
def _transitive_fallbacks(self):
1195
"""Return the whole stack of fallback versionedfiles.
1197
This VersionedFiles may have a list of fallbacks, but it doesn't
1198
necessarily know about the whole stack going down, and it can't know
1199
at open time because they may change after the objects are opened.
1202
for a_vfs in self._immediate_fallback_vfs:
1203
all_fallbacks.append(a_vfs)
1204
all_fallbacks.extend(a_vfs._transitive_fallbacks())
1205
return all_fallbacks
1208
1095
class ThunkedVersionedFiles(VersionedFiles):
1209
1096
"""Storage for many versioned files thunked onto a 'VersionedFile' class.
1753
class NoDupeAddLinesDecorator(object):
1754
"""Decorator for a VersionedFiles that skips doing an add_lines if the key
1758
def __init__(self, store):
1761
def add_lines(self, key, parents, lines, parent_texts=None,
1762
left_matching_blocks=None, nostore_sha=None, random_id=False,
1763
check_content=True):
1764
"""See VersionedFiles.add_lines.
1766
This implementation may return None as the third element of the return
1767
value when the original store wouldn't.
1770
raise NotImplementedError(
1771
"NoDupeAddLinesDecorator.add_lines does not implement the "
1772
"nostore_sha behaviour.")
1774
sha1 = osutils.sha_strings(lines)
1775
key = ("sha1:" + sha1,)
1778
if key in self._store.get_parent_map([key]):
1779
# This key has already been inserted, so don't do it again.
1781
sha1 = osutils.sha_strings(lines)
1782
return sha1, sum(map(len, lines)), None
1783
return self._store.add_lines(key, parents, lines,
1784
parent_texts=parent_texts,
1785
left_matching_blocks=left_matching_blocks,
1786
nostore_sha=nostore_sha, random_id=random_id,
1787
check_content=check_content)
1789
def __getattr__(self, name):
1790
return getattr(self._store, name)
1793
1628
def network_bytes_to_kind_and_offset(network_bytes):
1794
1629
"""Strip of a record kind from the front of network_bytes.
1886
1721
for prefix in sorted(per_prefix_map):
1887
1722
present_keys.extend(reversed(tsort.topo_sort(per_prefix_map[prefix])))
1888
1723
return present_keys
1891
class _KeyRefs(object):
1893
def __init__(self, track_new_keys=False):
1894
# dict mapping 'key' to 'set of keys referring to that key'
1897
# set remembering all new keys
1898
self.new_keys = set()
1900
self.new_keys = None
1906
self.new_keys.clear()
1908
def add_references(self, key, refs):
1909
# Record the new references
1910
for referenced in refs:
1912
needed_by = self.refs[referenced]
1914
needed_by = self.refs[referenced] = set()
1916
# Discard references satisfied by the new key
1919
def get_new_keys(self):
1920
return self.new_keys
1922
def get_unsatisfied_refs(self):
1923
return self.refs.iterkeys()
1925
def _satisfy_refs_for_key(self, key):
1929
# No keys depended on this key. That's ok.
1932
def add_key(self, key):
1933
# satisfy refs for key, and remember that we've seen this key.
1934
self._satisfy_refs_for_key(key)
1935
if self.new_keys is not None:
1936
self.new_keys.add(key)
1938
def satisfy_refs_for_keys(self, keys):
1940
self._satisfy_refs_for_key(key)
1942
def get_referrers(self):
1944
for referrers in self.refs.itervalues():
1945
result.update(referrers)