57
59
def __repr__(self):
58
60
return 'KnitParentsProvider(%r)' % self._knit
62
@symbol_versioning.deprecated_method(symbol_versioning.one_one)
60
63
def get_parents(self, revision_ids):
62
for revision_id in revision_ids:
64
"""See graph._StackedParentsProvider.get_parents"""
65
parent_map = self.get_parent_map(revision_ids)
66
return [parent_map.get(r, None) for r in revision_ids]
68
def get_parent_map(self, keys):
69
"""See graph._StackedParentsProvider.get_parent_map"""
71
for revision_id in keys:
72
if revision_id is None:
73
raise ValueError('get_parent_map(None) is not valid')
63
74
if revision_id == _mod_revision.NULL_REVISION:
75
parent_map[revision_id] = ()
67
parents = self._knit.get_parents_with_ghosts(revision_id)
79
self._knit.get_parents_with_ghosts(revision_id))
68
80
except errors.RevisionNotPresent:
71
83
if len(parents) == 0:
72
parents = [_mod_revision.NULL_REVISION]
73
parents_list.append(parents)
84
parents = (_mod_revision.NULL_REVISION,)
85
parent_map[revision_id] = parents
89
class _KnitsParentsProvider(object):
91
def __init__(self, knit, prefix=()):
92
"""Create a parent provider for string keys mapped to tuple keys."""
97
return 'KnitsParentsProvider(%r)' % self._knit
99
def get_parent_map(self, keys):
100
"""See graph._StackedParentsProvider.get_parent_map"""
101
parent_map = self._knit.get_parent_map(
102
[self._prefix + (key,) for key in keys])
104
for key, parents in parent_map.items():
106
if len(parents) == 0:
107
parents = (_mod_revision.NULL_REVISION,)
109
parents = tuple(parent[-1] for parent in parents)
110
result[revid] = parents
111
for revision_id in keys:
112
if revision_id == _mod_revision.NULL_REVISION:
113
result[revision_id] = ()
77
117
class KnitRepository(MetaDirRepository):
84
124
_commit_builder_class = None
85
125
_serializer = None
87
def __init__(self, _format, a_bzrdir, control_files, _revision_store,
88
control_store, text_store, _commit_builder_class, _serializer):
89
MetaDirRepository.__init__(self, _format, a_bzrdir, control_files,
90
_revision_store, control_store, text_store)
127
def __init__(self, _format, a_bzrdir, control_files, _commit_builder_class,
129
MetaDirRepository.__init__(self, _format, a_bzrdir, control_files)
91
130
self._commit_builder_class = _commit_builder_class
92
131
self._serializer = _serializer
93
132
self._reconcile_fixes_text_parents = True
95
def _warn_if_deprecated(self):
96
# This class isn't deprecated
99
def _inventory_add_lines(self, inv_vf, revid, parents, lines, check_content):
100
return inv_vf.add_lines_with_ghosts(revid, parents, lines,
101
check_content=check_content)[0]
133
self._fetch_uses_deltas = True
134
self._fetch_order = 'topological'
104
137
def _all_revision_ids(self):
105
138
"""See Repository.all_revision_ids()."""
106
# Knits get the revision graph from the index of the revision knit, so
107
# it's always possible even if they're on an unlistable transport.
108
return self._revision_store.all_revision_ids(self.get_transaction())
139
return [key[0] for key in self.revisions.keys()]
141
def _activate_new_inventory(self):
142
"""Put a replacement inventory.new into use as inventories."""
143
# Copy the content across
145
t.copy('inventory.new.kndx', 'inventory.kndx')
147
t.copy('inventory.new.knit', 'inventory.knit')
148
except errors.NoSuchFile:
149
# empty inventories knit
150
t.delete('inventory.knit')
151
# delete the temp inventory
152
t.delete('inventory.new.kndx')
154
t.delete('inventory.new.knit')
155
except errors.NoSuchFile:
156
# empty inventories knit
158
# Force index reload (sanity check)
159
self.inventories._index._reset_cache()
160
self.inventories.keys()
162
def _backup_inventory(self):
164
t.copy('inventory.kndx', 'inventory.backup.kndx')
165
t.copy('inventory.knit', 'inventory.backup.knit')
167
def _move_file_id(self, from_id, to_id):
168
t = self._transport.clone('knits')
169
from_rel_url = self.texts._index._mapper.map((from_id, None))
170
to_rel_url = self.texts._index._mapper.map((to_id, None))
171
# We expect both files to always exist in this case.
172
for suffix in ('.knit', '.kndx'):
173
t.rename(from_rel_url + suffix, to_rel_url + suffix)
175
def _remove_file_id(self, file_id):
176
t = self._transport.clone('knits')
177
rel_url = self.texts._index._mapper.map((file_id, None))
178
for suffix in ('.kndx', '.knit'):
180
t.delete(rel_url + suffix)
181
except errors.NoSuchFile:
184
def _temp_inventories(self):
185
result = self._format._get_inventories(self._transport, self,
187
# Reconciling when the output has no revisions would result in no
188
# writes - but we want to ensure there is an inventory for
189
# compatibility with older clients that don't lazy-load.
190
result.get_parent_map([('A',)])
110
193
def fileid_involved_between_revs(self, from_revid, to_revid):
111
194
"""Find file_id(s) which are involved in the changes between revisions.
133
216
return self._fileid_involved_by_set(changed)
136
def get_ancestry(self, revision_id, topo_sorted=True):
137
"""Return a list of revision-ids integrated by a revision.
139
This is topologically sorted, unless 'topo_sorted' is specified as
142
if _mod_revision.is_null(revision_id):
144
vf = self._get_revision_vf()
146
return [None] + vf.get_ancestry(revision_id, topo_sorted)
147
except errors.RevisionNotPresent:
148
raise errors.NoSuchRevision(self, revision_id)
151
def get_data_stream(self, revision_ids):
152
"""See Repository.get_data_stream."""
153
item_keys = self.item_keys_introduced_by(revision_ids)
154
for knit_kind, file_id, versions in item_keys:
156
if knit_kind == 'file':
157
name = ('file', file_id)
158
knit = self.weave_store.get_weave_or_empty(
159
file_id, self.get_transaction())
160
elif knit_kind == 'inventory':
161
knit = self.get_inventory_weave()
162
elif knit_kind == 'revisions':
163
knit = self._revision_store.get_revision_file(
164
self.get_transaction())
165
elif knit_kind == 'signatures':
166
knit = self._revision_store.get_signature_file(
167
self.get_transaction())
169
raise AssertionError('Unknown knit kind %r' % (knit_kind,))
170
yield name, _get_stream_as_bytes(knit, versions)
173
219
def get_revision(self, revision_id):
174
220
"""Return the Revision object for a named revision"""
175
221
revision_id = osutils.safe_revision_id(revision_id)
176
222
return self.get_revision_reconcile(revision_id)
179
def get_revision_graph(self, revision_id=None):
180
"""Return a dictionary containing the revision graph.
182
:param revision_id: The revision_id to get a graph from. If None, then
183
the entire revision graph is returned. This is a deprecated mode of
184
operation and will be removed in the future.
185
:return: a dictionary of revision_id->revision_parents_list.
187
if 'evil' in debug.debug_flags:
189
"get_revision_graph scales with size of history.")
190
# special case NULL_REVISION
191
if revision_id == _mod_revision.NULL_REVISION:
193
a_weave = self._get_revision_vf()
194
if revision_id is None:
195
return a_weave.get_graph()
196
if revision_id not in a_weave:
197
raise errors.NoSuchRevision(self, revision_id)
199
# add what can be reached from revision_id
200
return a_weave.get_graph([revision_id])
203
def get_revision_graph_with_ghosts(self, revision_ids=None):
204
"""Return a graph of the revisions with ghosts marked as applicable.
206
:param revision_ids: an iterable of revisions to graph or None for all.
207
:return: a Graph object with the graph reachable from revision_ids.
209
if 'evil' in debug.debug_flags:
211
"get_revision_graph_with_ghosts scales with size of history.")
212
result = deprecated_graph.Graph()
213
vf = self._get_revision_vf()
214
versions = set(vf.versions())
216
pending = set(self.all_revision_ids())
219
pending = set(revision_ids)
220
# special case NULL_REVISION
221
if _mod_revision.NULL_REVISION in pending:
222
pending.remove(_mod_revision.NULL_REVISION)
223
required = set(pending)
226
revision_id = pending.pop()
227
if not revision_id in versions:
228
if revision_id in required:
229
raise errors.NoSuchRevision(self, revision_id)
231
result.add_ghost(revision_id)
232
# mark it as done so we don't try for it again.
233
done.add(revision_id)
235
parent_ids = vf.get_parents_with_ghosts(revision_id)
236
for parent_id in parent_ids:
237
# is this queued or done ?
238
if (parent_id not in pending and
239
parent_id not in done):
241
pending.add(parent_id)
242
result.add_node(revision_id, parent_ids)
243
done.add(revision_id)
246
def _get_revision_vf(self):
247
""":return: a versioned file containing the revisions."""
248
vf = self._revision_store.get_revision_file(self.get_transaction())
251
def _get_history_vf(self):
252
"""Get a versionedfile whose history graph reflects all revisions.
254
For knit repositories, this is the revision knit.
256
return self._get_revision_vf()
258
224
@needs_write_lock
259
225
def reconcile(self, other=None, thorough=False):
260
226
"""Reconcile this repository."""
276
239
:returns: an iterator yielding tuples of (revison-id, parents-in-index,
277
240
parents-in-revision).
279
vf = self._get_revision_vf()
280
index_versions = vf.versions()
281
for index_version in index_versions:
282
parents_according_to_index = vf._index.get_parents_with_ghosts(
284
revision = self._revision_store.get_revision(index_version,
285
self.get_transaction())
286
parents_according_to_revision = revision.parent_ids
242
if not self.is_locked():
243
raise AssertionError()
245
for index_version in vf.keys():
246
parent_map = vf.get_parent_map([index_version])
247
parents_according_to_index = tuple(parent[-1] for parent in
248
parent_map[index_version])
249
revision = self.get_revision(index_version[-1])
250
parents_according_to_revision = tuple(revision.parent_ids)
287
251
if parents_according_to_index != parents_according_to_revision:
288
yield (index_version, parents_according_to_index,
252
yield (index_version[-1], parents_according_to_index,
289
253
parents_according_to_revision)
291
255
def _check_for_inconsistent_revision_parents(self):
324
288
# Set this attribute in derived clases to control the _serializer that the
325
289
# repository objects will have passed to their constructor.
326
290
_serializer = xml5.serializer_v5
328
def _get_control_store(self, repo_transport, control_files):
329
"""Return the control store for this repository."""
330
return VersionedFileStore(
333
file_mode=control_files._file_mode,
334
versionedfile_class=knit.KnitVersionedFile,
335
versionedfile_kwargs={'factory':knit.KnitPlainFactory()},
338
def _get_revision_store(self, repo_transport, control_files):
339
"""See RepositoryFormat._get_revision_store()."""
340
versioned_file_store = VersionedFileStore(
342
file_mode=control_files._file_mode,
345
versionedfile_class=knit.KnitVersionedFile,
346
versionedfile_kwargs={'delta':False,
347
'factory':knit.KnitPlainFactory(),
351
return KnitRevisionStore(versioned_file_store)
353
def _get_text_store(self, transport, control_files):
354
"""See RepositoryFormat._get_text_store()."""
355
return self._get_versioned_file_store('knits',
358
versionedfile_class=knit.KnitVersionedFile,
359
versionedfile_kwargs={
360
'create_parent_dir':True,
362
'dir_mode':control_files._dir_mode,
291
# Knit based repositories handle ghosts reasonably well.
292
supports_ghosts = True
293
# External lookups are not supported in this format.
294
supports_external_lookups = False
296
def _get_inventories(self, repo_transport, repo, name='inventory'):
297
mapper = ConstantMapper(name)
298
index = _KndxIndex(repo_transport, mapper, repo.get_transaction,
299
repo.is_write_locked, repo.is_locked)
300
access = _KnitKeyAccess(repo_transport, mapper)
301
return KnitVersionedFiles(index, access, annotated=False)
303
def _get_revisions(self, repo_transport, repo):
304
mapper = ConstantMapper('revisions')
305
index = _KndxIndex(repo_transport, mapper, repo.get_transaction,
306
repo.is_write_locked, repo.is_locked)
307
access = _KnitKeyAccess(repo_transport, mapper)
308
return KnitVersionedFiles(index, access, max_delta_chain=0,
311
def _get_signatures(self, repo_transport, repo):
312
mapper = ConstantMapper('signatures')
313
index = _KndxIndex(repo_transport, mapper, repo.get_transaction,
314
repo.is_write_locked, repo.is_locked)
315
access = _KnitKeyAccess(repo_transport, mapper)
316
return KnitVersionedFiles(index, access, max_delta_chain=0,
319
def _get_texts(self, repo_transport, repo):
320
mapper = HashEscapedPrefixMapper()
321
base_transport = repo_transport.clone('knits')
322
index = _KndxIndex(base_transport, mapper, repo.get_transaction,
323
repo.is_write_locked, repo.is_locked)
324
access = _KnitKeyAccess(base_transport, mapper)
325
return KnitVersionedFiles(index, access, max_delta_chain=200,
366
328
def initialize(self, a_bzrdir, shared=False):
367
329
"""Create a knit format 1 repository.
380
342
repo_transport = a_bzrdir.get_repository_transport(None)
381
343
control_files = lockable_files.LockableFiles(repo_transport,
382
344
'lock', lockdir.LockDir)
383
control_store = self._get_control_store(repo_transport, control_files)
384
345
transaction = transactions.WriteTransaction()
385
# trigger a write of the inventory store.
386
control_store.get_weave_or_empty('inventory', transaction)
387
_revision_store = self._get_revision_store(repo_transport, control_files)
346
result = self.open(a_bzrdir=a_bzrdir, _found=True)
388
348
# the revision id here is irrelevant: it will not be stored, and cannot
390
_revision_store.has_revision_id('A', transaction)
391
_revision_store.get_signature_file(transaction)
392
return self.open(a_bzrdir=a_bzrdir, _found=True)
349
# already exist, we do this to create files on disk for older clients.
350
result.inventories.get_parent_map([('A',)])
351
result.revisions.get_parent_map([('A',)])
352
result.signatures.get_parent_map([('A',)])
394
356
def open(self, a_bzrdir, _found=False, _override_transport=None):
395
357
"""See RepositoryFormat.open().
402
364
format = RepositoryFormat.find_format(a_bzrdir)
403
assert format.__class__ == self.__class__
404
365
if _override_transport is not None:
405
366
repo_transport = _override_transport
407
368
repo_transport = a_bzrdir.get_repository_transport(None)
408
369
control_files = lockable_files.LockableFiles(repo_transport,
409
370
'lock', lockdir.LockDir)
410
text_store = self._get_text_store(repo_transport, control_files)
411
control_store = self._get_control_store(repo_transport, control_files)
412
_revision_store = self._get_revision_store(repo_transport, control_files)
413
return self.repository_class(_format=self,
371
repo = self.repository_class(_format=self,
414
372
a_bzrdir=a_bzrdir,
415
373
control_files=control_files,
416
_revision_store=_revision_store,
417
control_store=control_store,
418
text_store=text_store,
419
374
_commit_builder_class=self._commit_builder_class,
420
375
_serializer=self._serializer)
376
repo.revisions = self._get_revisions(repo_transport, repo)
377
repo.signatures = self._get_signatures(repo_transport, repo)
378
repo.inventories = self._get_inventories(repo_transport, repo)
379
repo.texts = self._get_texts(repo_transport, repo)
380
repo._transport = repo_transport
423
384
class RepositoryFormatKnit1(RepositoryFormatKnit):
502
463
return "Knit repository format 3"
505
def _get_stream_as_bytes(knit, required_versions):
506
"""Generate a serialised data stream.
508
The format is a bencoding of a list. The first element of the list is a
509
string of the format signature, then each subsequent element is a list
510
corresponding to a record. Those lists contain:
517
:returns: a bencoded list.
466
class RepositoryFormatKnit4(RepositoryFormatKnit):
467
"""Bzr repository knit format 4.
469
This repository format has everything in format 3, except for
471
- knits for file texts and inventory
472
- hash subdirectory based stores.
473
- knits for revisions and signatures
474
- TextStores for revisions and signatures.
475
- a format marker of its own
476
- an optional 'shared-storage' flag
477
- an optional 'no-working-trees' flag
479
- support for recording full info about the tree root
519
knit_stream = knit.get_data_stream(required_versions)
520
format_signature, data_list, callable = knit_stream
522
data.append(format_signature)
523
for version, options, length, parents in data_list:
524
data.append([version, options, parents, callable(length)])
525
return bencode.bencode(data)
482
repository_class = KnitRepository
483
_commit_builder_class = RootCommitBuilder
484
rich_root_data = True
485
supports_tree_reference = False
486
_serializer = xml6.serializer_v6
488
def _get_matching_bzrdir(self):
489
return bzrdir.format_registry.make_bzrdir('rich-root')
491
def _ignore_setting_bzrdir(self, format):
494
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
496
def check_conversion_target(self, target_format):
497
if not target_format.rich_root_data:
498
raise errors.BadConversionTarget(
499
'Does not support rich root data.', target_format)
501
def get_format_string(self):
502
"""See RepositoryFormat.get_format_string()."""
503
return 'Bazaar Knit Repository Format 4 (bzr 1.0)\n'
505
def get_format_description(self):
506
"""See RepositoryFormat.get_format_description()."""
507
return "Knit repository format 4"