~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/repofmt/knitrepo.py

Merge with stored_kind

Show diffs side-by-side

added added

removed removed

Lines of Context:
35
35
    xml6,
36
36
    xml7,
37
37
    )
38
 
 
39
38
from bzrlib.decorators import needs_read_lock, needs_write_lock
 
39
from bzrlib.knit import KnitVersionedFiles, _KndxIndex, _KnitKeyAccess
40
40
from bzrlib.repository import (
41
41
    CommitBuilder,
42
42
    MetaDirRepository,
48
48
from bzrlib.store.versioned import VersionedFileStore
49
49
from bzrlib.trace import mutter, mutter_callsite
50
50
from bzrlib.util import bencode
 
51
from bzrlib.versionedfile import ConstantMapper, HashEscapedPrefixMapper
51
52
 
52
53
 
53
54
class _KnitParentsProvider(object):
85
86
        return parent_map
86
87
 
87
88
 
 
89
class _KnitsParentsProvider(object):
 
90
 
 
91
    def __init__(self, knit, prefix=()):
 
92
        """Create a parent provider for string keys mapped to tuple keys."""
 
93
        self._knit = knit
 
94
        self._prefix = prefix
 
95
 
 
96
    def __repr__(self):
 
97
        return 'KnitsParentsProvider(%r)' % self._knit
 
98
 
 
99
    def get_parent_map(self, keys):
 
100
        """See graph._StackedParentsProvider.get_parent_map"""
 
101
        parent_map = self._knit.get_parent_map(
 
102
            [self._prefix + (key,) for key in keys])
 
103
        result = {}
 
104
        for key, parents in parent_map.items():
 
105
            revid = key[-1]
 
106
            if len(parents) == 0:
 
107
                parents = (_mod_revision.NULL_REVISION,)
 
108
            else:
 
109
                parents = tuple(parent[-1] for parent in parents)
 
110
            result[revid] = parents
 
111
        for revision_id in keys:
 
112
            if revision_id == _mod_revision.NULL_REVISION:
 
113
                result[revision_id] = ()
 
114
        return result
 
115
 
 
116
 
88
117
class KnitRepository(MetaDirRepository):
89
118
    """Knit format repository."""
90
119
 
95
124
    _commit_builder_class = None
96
125
    _serializer = None
97
126
 
98
 
    def __init__(self, _format, a_bzrdir, control_files, _revision_store,
99
 
        control_store, text_store, _commit_builder_class, _serializer):
100
 
        MetaDirRepository.__init__(self, _format, a_bzrdir, control_files,
101
 
            _revision_store, control_store, text_store)
 
127
    def __init__(self, _format, a_bzrdir, control_files, _commit_builder_class,
 
128
        _serializer):
 
129
        MetaDirRepository.__init__(self, _format, a_bzrdir, control_files)
102
130
        self._commit_builder_class = _commit_builder_class
103
131
        self._serializer = _serializer
104
132
        self._reconcile_fixes_text_parents = True
105
 
        control_store.get_scope = self.get_transaction
106
 
        text_store.get_scope = self.get_transaction
107
 
        _revision_store.get_scope = self.get_transaction
108
133
 
109
134
    def _warn_if_deprecated(self):
110
135
        # This class isn't deprecated
111
136
        pass
112
137
 
113
 
    def _inventory_add_lines(self, inv_vf, revid, parents, lines, check_content):
114
 
        return inv_vf.add_lines_with_ghosts(revid, parents, lines,
115
 
            check_content=check_content)[0]
116
 
 
117
138
    @needs_read_lock
118
139
    def _all_revision_ids(self):
119
140
        """See Repository.all_revision_ids()."""
120
 
        # Knits get the revision graph from the index of the revision knit, so
121
 
        # it's always possible even if they're on an unlistable transport.
122
 
        return self._revision_store.all_revision_ids(self.get_transaction())
 
141
        return [key[0] for key in self.revisions.keys()]
 
142
 
 
143
    def _activate_new_inventory(self):
 
144
        """Put a replacement inventory.new into use as inventories."""
 
145
        # Copy the content across
 
146
        t = self._transport
 
147
        t.copy('inventory.new.kndx', 'inventory.kndx')
 
148
        try:
 
149
            t.copy('inventory.new.knit', 'inventory.knit')
 
150
        except errors.NoSuchFile:
 
151
            # empty inventories knit
 
152
            t.delete('inventory.knit')
 
153
        # delete the temp inventory
 
154
        t.delete('inventory.new.kndx')
 
155
        try:
 
156
            t.delete('inventory.new.knit')
 
157
        except errors.NoSuchFile:
 
158
            # empty inventories knit
 
159
            pass
 
160
        # Force index reload (sanity check)
 
161
        self.inventories._index._reset_cache()
 
162
        self.inventories.keys()
 
163
 
 
164
    def _backup_inventory(self):
 
165
        t = self._transport
 
166
        t.copy('inventory.kndx', 'inventory.backup.kndx')
 
167
        t.copy('inventory.knit', 'inventory.backup.knit')
 
168
 
 
169
    def _move_file_id(self, from_id, to_id):
 
170
        t = self._transport.clone('knits')
 
171
        from_rel_url = self.texts._index._mapper.map((from_id, None))
 
172
        to_rel_url = self.texts._index._mapper.map((to_id, None))
 
173
        # We expect both files to always exist in this case.
 
174
        for suffix in ('.knit', '.kndx'):
 
175
            t.rename(from_rel_url + suffix, to_rel_url + suffix)
 
176
 
 
177
    def _remove_file_id(self, file_id):
 
178
        t = self._transport.clone('knits')
 
179
        rel_url = self.texts._index._mapper.map((file_id, None))
 
180
        for suffix in ('.kndx', '.knit'):
 
181
            try:
 
182
                t.delete(rel_url + suffix)
 
183
            except errors.NoSuchFile:
 
184
                pass
 
185
 
 
186
    def _temp_inventories(self):
 
187
        result = self._format._get_inventories(self._transport, self,
 
188
            'inventory.new')
 
189
        # Reconciling when the output has no revisions would result in no
 
190
        # writes - but we want to ensure there is an inventory for
 
191
        # compatibility with older clients that don't lazy-load.
 
192
        result.get_parent_map([('A',)])
 
193
        return result
123
194
 
124
195
    def fileid_involved_between_revs(self, from_revid, to_revid):
125
196
        """Find file_id(s) which are involved in the changes between revisions.
147
218
        return self._fileid_involved_by_set(changed)
148
219
 
149
220
    @needs_read_lock
150
 
    def get_ancestry(self, revision_id, topo_sorted=True):
151
 
        """Return a list of revision-ids integrated by a revision.
152
 
        
153
 
        This is topologically sorted, unless 'topo_sorted' is specified as
154
 
        False.
155
 
        """
156
 
        if _mod_revision.is_null(revision_id):
157
 
            return [None]
158
 
        vf = self._get_revision_vf()
159
 
        try:
160
 
            return [None] + vf.get_ancestry(revision_id, topo_sorted)
161
 
        except errors.RevisionNotPresent:
162
 
            raise errors.NoSuchRevision(self, revision_id)
163
 
 
164
 
    @symbol_versioning.deprecated_method(symbol_versioning.one_two)
165
 
    def get_data_stream(self, revision_ids):
166
 
        """See Repository.get_data_stream.
167
 
        
168
 
        Deprecated in 1.2 for get_data_stream_for_search.
169
 
        """
170
 
        search_result = self.revision_ids_to_search_result(set(revision_ids))
171
 
        return self.get_data_stream_for_search(search_result)
172
 
 
173
 
    def get_data_stream_for_search(self, search):
174
 
        """See Repository.get_data_stream_for_search."""
175
 
        item_keys = self.item_keys_introduced_by(search.get_keys())
176
 
        for knit_kind, file_id, versions in item_keys:
177
 
            name = (knit_kind,)
178
 
            if knit_kind == 'file':
179
 
                name = ('file', file_id)
180
 
                knit = self.weave_store.get_weave_or_empty(
181
 
                    file_id, self.get_transaction())
182
 
            elif knit_kind == 'inventory':
183
 
                knit = self.get_inventory_weave()
184
 
            elif knit_kind == 'revisions':
185
 
                knit = self._revision_store.get_revision_file(
186
 
                    self.get_transaction())
187
 
            elif knit_kind == 'signatures':
188
 
                knit = self._revision_store.get_signature_file(
189
 
                    self.get_transaction())
190
 
            else:
191
 
                raise AssertionError('Unknown knit kind %r' % (knit_kind,))
192
 
            yield name, _get_stream_as_bytes(knit, versions)
193
 
 
194
 
    @needs_read_lock
195
221
    def get_revision(self, revision_id):
196
222
        """Return the Revision object for a named revision"""
197
223
        revision_id = osutils.safe_revision_id(revision_id)
198
224
        return self.get_revision_reconcile(revision_id)
199
225
 
200
 
    def _get_revision_vf(self):
201
 
        """:return: a versioned file containing the revisions."""
202
 
        vf = self._revision_store.get_revision_file(self.get_transaction())
203
 
        return vf
204
 
 
205
 
    def has_revisions(self, revision_ids):
206
 
        """See Repository.has_revisions()."""
207
 
        result = set()
208
 
        transaction = self.get_transaction()
209
 
        for revision_id in revision_ids:
210
 
            if self._revision_store.has_revision_id(revision_id, transaction):
211
 
                result.add(revision_id)
212
 
        return result
213
 
 
214
226
    @needs_write_lock
215
227
    def reconcile(self, other=None, thorough=False):
216
228
        """Reconcile this repository."""
220
232
        return reconciler
221
233
    
222
234
    def _make_parents_provider(self):
223
 
        return _KnitParentsProvider(self._get_revision_vf())
 
235
        return _KnitsParentsProvider(self.revisions)
224
236
 
225
237
    def _find_inconsistent_revision_parents(self):
226
238
        """Find revisions with different parent lists in the revision object
231
243
        """
232
244
        if not self.is_locked():
233
245
            raise AssertionError()
234
 
        vf = self._get_revision_vf()
235
 
        for index_version in vf.versions():
236
 
            parents_according_to_index = tuple(vf.get_parents_with_ghosts(
237
 
                index_version))
238
 
            revision = self.get_revision(index_version)
 
246
        vf = self.revisions
 
247
        for index_version in vf.keys():
 
248
            parent_map = vf.get_parent_map([index_version])
 
249
            parents_according_to_index = tuple(parent[-1] for parent in
 
250
                parent_map[index_version])
 
251
            revision = self.get_revision(index_version[-1])
239
252
            parents_according_to_revision = tuple(revision.parent_ids)
240
253
            if parents_according_to_index != parents_according_to_revision:
241
 
                yield (index_version, parents_according_to_index,
 
254
                yield (index_version[-1], parents_according_to_index,
242
255
                    parents_according_to_revision)
243
256
 
244
257
    def _check_for_inconsistent_revision_parents(self):
282
295
    # External lookups are not supported in this format.
283
296
    supports_external_lookups = False
284
297
 
285
 
    def _get_control_store(self, repo_transport, control_files):
286
 
        """Return the control store for this repository."""
287
 
        return VersionedFileStore(
288
 
            repo_transport,
289
 
            prefixed=False,
290
 
            file_mode=control_files._file_mode,
291
 
            versionedfile_class=knit.make_file_knit,
292
 
            versionedfile_kwargs={'factory':knit.KnitPlainFactory()},
293
 
            )
294
 
 
295
 
    def _get_revision_store(self, repo_transport, control_files):
296
 
        """See RepositoryFormat._get_revision_store()."""
297
 
        versioned_file_store = VersionedFileStore(
298
 
            repo_transport,
299
 
            file_mode=control_files._file_mode,
300
 
            prefixed=False,
301
 
            precious=True,
302
 
            versionedfile_class=knit.make_file_knit,
303
 
            versionedfile_kwargs={'delta':False,
304
 
                                  'factory':knit.KnitPlainFactory(),
305
 
                                 },
306
 
            escaped=True,
307
 
            )
308
 
        return KnitRevisionStore(versioned_file_store)
309
 
 
310
 
    def _get_text_store(self, transport, control_files):
311
 
        """See RepositoryFormat._get_text_store()."""
312
 
        return self._get_versioned_file_store('knits',
313
 
                                  transport,
314
 
                                  control_files,
315
 
                                  versionedfile_class=knit.make_file_knit,
316
 
                                  versionedfile_kwargs={
317
 
                                      'create_parent_dir':True,
318
 
                                      'delay_create':True,
319
 
                                      'dir_mode':control_files._dir_mode,
320
 
                                  },
321
 
                                  escaped=True)
 
298
    def _get_inventories(self, repo_transport, repo, name='inventory'):
 
299
        mapper = ConstantMapper(name)
 
300
        index = _KndxIndex(repo_transport, mapper, repo.get_transaction,
 
301
            repo.is_write_locked, repo.is_locked)
 
302
        access = _KnitKeyAccess(repo_transport, mapper)
 
303
        return KnitVersionedFiles(index, access, annotated=False)
 
304
 
 
305
    def _get_revisions(self, repo_transport, repo):
 
306
        mapper = ConstantMapper('revisions')
 
307
        index = _KndxIndex(repo_transport, mapper, repo.get_transaction,
 
308
            repo.is_write_locked, repo.is_locked)
 
309
        access = _KnitKeyAccess(repo_transport, mapper)
 
310
        return KnitVersionedFiles(index, access, max_delta_chain=0,
 
311
            annotated=False)
 
312
 
 
313
    def _get_signatures(self, repo_transport, repo):
 
314
        mapper = ConstantMapper('signatures')
 
315
        index = _KndxIndex(repo_transport, mapper, repo.get_transaction,
 
316
            repo.is_write_locked, repo.is_locked)
 
317
        access = _KnitKeyAccess(repo_transport, mapper)
 
318
        return KnitVersionedFiles(index, access, max_delta_chain=0,
 
319
            annotated=False)
 
320
 
 
321
    def _get_texts(self, repo_transport, repo):
 
322
        mapper = HashEscapedPrefixMapper()
 
323
        base_transport = repo_transport.clone('knits')
 
324
        index = _KndxIndex(base_transport, mapper, repo.get_transaction,
 
325
            repo.is_write_locked, repo.is_locked)
 
326
        access = _KnitKeyAccess(base_transport, mapper)
 
327
        return KnitVersionedFiles(index, access, max_delta_chain=200,
 
328
            annotated=True)
322
329
 
323
330
    def initialize(self, a_bzrdir, shared=False):
324
331
        """Create a knit format 1 repository.
337
344
        repo_transport = a_bzrdir.get_repository_transport(None)
338
345
        control_files = lockable_files.LockableFiles(repo_transport,
339
346
                                'lock', lockdir.LockDir)
340
 
        control_store = self._get_control_store(repo_transport, control_files)
341
347
        transaction = transactions.WriteTransaction()
342
 
        # trigger a write of the inventory store.
343
 
        control_store.get_weave_or_empty('inventory', transaction)
344
 
        _revision_store = self._get_revision_store(repo_transport, control_files)
 
348
        result = self.open(a_bzrdir=a_bzrdir, _found=True)
 
349
        result.lock_write()
345
350
        # the revision id here is irrelevant: it will not be stored, and cannot
346
 
        # already exist.
347
 
        _revision_store.has_revision_id('A', transaction)
348
 
        _revision_store.get_signature_file(transaction)
349
 
        return self.open(a_bzrdir=a_bzrdir, _found=True)
 
351
        # already exist, we do this to create files on disk for older clients.
 
352
        result.inventories.get_parent_map([('A',)])
 
353
        result.revisions.get_parent_map([('A',)])
 
354
        result.signatures.get_parent_map([('A',)])
 
355
        result.unlock()
 
356
        return result
350
357
 
351
358
    def open(self, a_bzrdir, _found=False, _override_transport=None):
352
359
        """See RepositoryFormat.open().
363
370
            repo_transport = a_bzrdir.get_repository_transport(None)
364
371
        control_files = lockable_files.LockableFiles(repo_transport,
365
372
                                'lock', lockdir.LockDir)
366
 
        text_store = self._get_text_store(repo_transport, control_files)
367
 
        control_store = self._get_control_store(repo_transport, control_files)
368
 
        _revision_store = self._get_revision_store(repo_transport, control_files)
369
 
        return self.repository_class(_format=self,
 
373
        repo = self.repository_class(_format=self,
370
374
                              a_bzrdir=a_bzrdir,
371
375
                              control_files=control_files,
372
 
                              _revision_store=_revision_store,
373
 
                              control_store=control_store,
374
 
                              text_store=text_store,
375
376
                              _commit_builder_class=self._commit_builder_class,
376
377
                              _serializer=self._serializer)
 
378
        repo.revisions = self._get_revisions(repo_transport, repo)
 
379
        repo.signatures = self._get_signatures(repo_transport, repo)
 
380
        repo.inventories = self._get_inventories(repo_transport, repo)
 
381
        repo.texts = self._get_texts(repo_transport, repo)
 
382
        repo._transport = repo_transport
 
383
        return repo
377
384
 
378
385
 
379
386
class RepositoryFormatKnit1(RepositoryFormatKnit):
500
507
    def get_format_description(self):
501
508
        """See RepositoryFormat.get_format_description()."""
502
509
        return "Knit repository format 4"
503
 
 
504
 
 
505
 
def _get_stream_as_bytes(knit, required_versions):
506
 
    """Generate a serialised data stream.
507
 
 
508
 
    The format is a bencoding of a list.  The first element of the list is a
509
 
    string of the format signature, then each subsequent element is a list
510
 
    corresponding to a record.  Those lists contain:
511
 
 
512
 
      * a version id
513
 
      * a list of options
514
 
      * a list of parents
515
 
      * the bytes
516
 
 
517
 
    :returns: a bencoded list.
518
 
    """
519
 
    knit_stream = knit.get_data_stream(required_versions)
520
 
    format_signature, data_list, callable = knit_stream
521
 
    data = []
522
 
    data.append(format_signature)
523
 
    for version, options, length, parents in data_list:
524
 
        data.append([version, options, parents, callable(length)])
525
 
    return bencode.bencode(data)