~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/repofmt/pack_repo.py

  • Committer: Martin Pool
  • Date: 2010-04-01 04:41:18 UTC
  • mto: This revision was merged to the branch mainline in revision 5128.
  • Revision ID: mbp@sourcefrog.net-20100401044118-shyctqc02ob08ngz
ignore .testrepository

Show diffs side-by-side

added added

removed removed

Lines of Context:
49
49
""")
50
50
from bzrlib import (
51
51
    bzrdir,
52
 
    btree_index,
53
52
    errors,
54
53
    lockable_files,
55
54
    lockdir,
57
56
    )
58
57
 
59
58
from bzrlib.decorators import needs_write_lock, only_raises
 
59
from bzrlib.btree_index import (
 
60
    BTreeGraphIndex,
 
61
    BTreeBuilder,
 
62
    )
60
63
from bzrlib.index import (
61
64
    GraphIndex,
62
65
    InMemoryGraphIndex,
63
66
    )
64
 
from bzrlib.lock import LogicalLockResult
65
67
from bzrlib.repofmt.knitrepo import KnitRepository
66
68
from bzrlib.repository import (
67
69
    CommitBuilder,
68
70
    MetaDirRepositoryFormat,
69
71
    RepositoryFormat,
70
 
    RepositoryWriteLockResult,
71
72
    RootCommitBuilder,
72
73
    StreamSource,
73
74
    )
228
229
        unlimited_cache = False
229
230
        if index_type == 'chk':
230
231
            unlimited_cache = True
231
 
        index = self.index_class(self.index_transport,
232
 
                    self.index_name(index_type, self.name),
233
 
                    self.index_sizes[self.index_offset(index_type)],
234
 
                    unlimited_cache=unlimited_cache)
235
 
        if index_type == 'chk':
236
 
            index._leaf_factory = btree_index._gcchk_factory
237
 
        setattr(self, index_type + '_index', index)
 
232
        setattr(self, index_type + '_index',
 
233
            self.index_class(self.index_transport,
 
234
                self.index_name(index_type, self.name),
 
235
                self.index_sizes[self.index_offset(index_type)],
 
236
                unlimited_cache=unlimited_cache))
238
237
 
239
238
 
240
239
class ExistingPack(Pack):
588
587
                                             flush_func=flush_func)
589
588
        self.add_callback = None
590
589
 
 
590
    def replace_indices(self, index_to_pack, indices):
 
591
        """Replace the current mappings with fresh ones.
 
592
 
 
593
        This should probably not be used eventually, rather incremental add and
 
594
        removal of indices. It has been added during refactoring of existing
 
595
        code.
 
596
 
 
597
        :param index_to_pack: A mapping from index objects to
 
598
            (transport, name) tuples for the pack file data.
 
599
        :param indices: A list of indices.
 
600
        """
 
601
        # refresh the revision pack map dict without replacing the instance.
 
602
        self.index_to_pack.clear()
 
603
        self.index_to_pack.update(index_to_pack)
 
604
        # XXX: API break - clearly a 'replace' method would be good?
 
605
        self.combined_index._indices[:] = indices
 
606
        # the current add nodes callback for the current writable index if
 
607
        # there is one.
 
608
        self.add_callback = None
 
609
 
591
610
    def add_index(self, index, pack):
592
611
        """Add index to the aggregate, which is an index for Pack pack.
593
612
 
600
619
        # expose it to the index map
601
620
        self.index_to_pack[index] = pack.access_tuple()
602
621
        # put it at the front of the linear index list
603
 
        self.combined_index.insert_index(0, index, pack.name)
 
622
        self.combined_index.insert_index(0, index)
604
623
 
605
624
    def add_writable_index(self, index, pack):
606
625
        """Add an index which is able to have data added to it.
626
645
        self.data_access.set_writer(None, None, (None, None))
627
646
        self.index_to_pack.clear()
628
647
        del self.combined_index._indices[:]
629
 
        del self.combined_index._index_names[:]
630
648
        self.add_callback = None
631
649
 
632
650
    def remove_index(self, index):
635
653
        :param index: An index from the pack parameter.
636
654
        """
637
655
        del self.index_to_pack[index]
638
 
        pos = self.combined_index._indices.index(index)
639
 
        del self.combined_index._indices[pos]
640
 
        del self.combined_index._index_names[pos]
 
656
        self.combined_index._indices.remove(index)
641
657
        if (self.add_callback is not None and
642
658
            getattr(index, 'add_nodes', None) == self.add_callback):
643
659
            self.add_callback = None
1399
1415
        self.inventory_index = AggregateIndex(self.reload_pack_names, flush)
1400
1416
        self.text_index = AggregateIndex(self.reload_pack_names, flush)
1401
1417
        self.signature_index = AggregateIndex(self.reload_pack_names, flush)
1402
 
        all_indices = [self.revision_index, self.inventory_index,
1403
 
                self.text_index, self.signature_index]
1404
1418
        if use_chk_index:
1405
1419
            self.chk_index = AggregateIndex(self.reload_pack_names, flush)
1406
 
            all_indices.append(self.chk_index)
1407
1420
        else:
1408
1421
            # used to determine if we're using a chk_index elsewhere.
1409
1422
            self.chk_index = None
1410
 
        # Tell all the CombinedGraphIndex objects about each other, so they can
1411
 
        # share hints about which pack names to search first.
1412
 
        all_combined = [agg_idx.combined_index for agg_idx in all_indices]
1413
 
        for combined_idx in all_combined:
1414
 
            combined_idx.set_sibling_indices(
1415
 
                set(all_combined).difference([combined_idx]))
1416
1423
        # resumed packs
1417
1424
        self._resumed_packs = []
1418
1425
 
1561
1568
        """Is the collection already packed?"""
1562
1569
        return not (self.repo._format.pack_compresses or (len(self._names) > 1))
1563
1570
 
1564
 
    def pack(self, hint=None, clean_obsolete_packs=False):
 
1571
    def pack(self, hint=None):
1565
1572
        """Pack the pack collection totally."""
1566
1573
        self.ensure_loaded()
1567
1574
        total_packs = len(self._names)
1583
1590
                pack_operations[-1][1].append(pack)
1584
1591
        self._execute_pack_operations(pack_operations, OptimisingPacker)
1585
1592
 
1586
 
        if clean_obsolete_packs:
1587
 
            self._clear_obsolete_packs()
1588
 
 
1589
1593
    def plan_autopack_combinations(self, existing_packs, pack_distribution):
1590
1594
        """Plan a pack operation.
1591
1595
 
1679
1683
            txt_index = self._make_index(name, '.tix')
1680
1684
            sig_index = self._make_index(name, '.six')
1681
1685
            if self.chk_index is not None:
1682
 
                chk_index = self._make_index(name, '.cix', is_chk=True)
 
1686
                chk_index = self._make_index(name, '.cix', unlimited_cache=True)
1683
1687
            else:
1684
1688
                chk_index = None
1685
1689
            result = ExistingPack(self._pack_transport, name, rev_index,
1705
1709
            sig_index = self._make_index(name, '.six', resume=True)
1706
1710
            if self.chk_index is not None:
1707
1711
                chk_index = self._make_index(name, '.cix', resume=True,
1708
 
                                             is_chk=True)
 
1712
                                             unlimited_cache=True)
1709
1713
            else:
1710
1714
                chk_index = None
1711
1715
            result = self.resumed_pack_factory(name, rev_index, inv_index,
1741
1745
        return self._index_class(self.transport, 'pack-names', None
1742
1746
                ).iter_all_entries()
1743
1747
 
1744
 
    def _make_index(self, name, suffix, resume=False, is_chk=False):
 
1748
    def _make_index(self, name, suffix, resume=False, unlimited_cache=False):
1745
1749
        size_offset = self._suffix_offsets[suffix]
1746
1750
        index_name = name + suffix
1747
1751
        if resume:
1750
1754
        else:
1751
1755
            transport = self._index_transport
1752
1756
            index_size = self._names[name][size_offset]
1753
 
        index = self._index_class(transport, index_name, index_size,
1754
 
                                  unlimited_cache=is_chk)
1755
 
        if is_chk and self._index_class is btree_index.BTreeGraphIndex: 
1756
 
            index._leaf_factory = btree_index._gcchk_factory
1757
 
        return index
 
1757
        return self._index_class(transport, index_name, index_size,
 
1758
                                 unlimited_cache=unlimited_cache)
1758
1759
 
1759
1760
    def _max_pack_count(self, total_revisions):
1760
1761
        """Return the maximum number of packs to use for total revisions.
2344
2345
        return self._write_lock_count
2345
2346
 
2346
2347
    def lock_write(self, token=None):
2347
 
        """Lock the repository for writes.
2348
 
 
2349
 
        :return: A bzrlib.repository.RepositoryWriteLockResult.
2350
 
        """
2351
2348
        locked = self.is_locked()
2352
2349
        if not self._write_lock_count and locked:
2353
2350
            raise errors.ReadOnlyError(self)
2362
2359
                # Writes don't affect fallback repos
2363
2360
                repo.lock_read()
2364
2361
            self._refresh_data()
2365
 
        return RepositoryWriteLockResult(self.unlock, None)
2366
2362
 
2367
2363
    def lock_read(self):
2368
 
        """Lock the repository for reads.
2369
 
 
2370
 
        :return: A bzrlib.lock.LogicalLockResult.
2371
 
        """
2372
2364
        locked = self.is_locked()
2373
2365
        if self._write_lock_count:
2374
2366
            self._write_lock_count += 1
2381
2373
            for repo in self._fallback_repositories:
2382
2374
                repo.lock_read()
2383
2375
            self._refresh_data()
2384
 
        return LogicalLockResult(self.unlock)
2385
2376
 
2386
2377
    def leave_lock_in_place(self):
2387
2378
        # not supported - raise an error
2392
2383
        raise NotImplementedError(self.dont_leave_lock_in_place)
2393
2384
 
2394
2385
    @needs_write_lock
2395
 
    def pack(self, hint=None, clean_obsolete_packs=False):
 
2386
    def pack(self, hint=None):
2396
2387
        """Compress the data within the repository.
2397
2388
 
2398
2389
        This will pack all the data to a single pack. In future it may
2399
2390
        recompress deltas or do other such expensive operations.
2400
2391
        """
2401
 
        self._pack_collection.pack(hint=hint, clean_obsolete_packs=clean_obsolete_packs)
 
2392
        self._pack_collection.pack(hint=hint)
2402
2393
 
2403
2394
    @needs_write_lock
2404
2395
    def reconcile(self, other=None, thorough=False):
2560
2551
        utf8_files = [('format', self.get_format_string())]
2561
2552
 
2562
2553
        self._upload_blank_content(a_bzrdir, dirs, files, utf8_files, shared)
2563
 
        repository = self.open(a_bzrdir=a_bzrdir, _found=True)
2564
 
        self._run_post_repo_init_hooks(repository, a_bzrdir, shared)
2565
 
        return repository
 
2554
        return self.open(a_bzrdir=a_bzrdir, _found=True)
2566
2555
 
2567
2556
    def open(self, a_bzrdir, _found=False, _override_transport=None):
2568
2557
        """See RepositoryFormat.open().
2831
2820
    _commit_builder_class = PackCommitBuilder
2832
2821
    supports_external_lookups = True
2833
2822
    # What index classes to use
2834
 
    index_builder_class = btree_index.BTreeBuilder
2835
 
    index_class = btree_index.BTreeGraphIndex
 
2823
    index_builder_class = BTreeBuilder
 
2824
    index_class = BTreeGraphIndex
2836
2825
 
2837
2826
    @property
2838
2827
    def _serializer(self):
2867
2856
    supports_tree_reference = False # no subtrees
2868
2857
    supports_external_lookups = True
2869
2858
    # What index classes to use
2870
 
    index_builder_class = btree_index.BTreeBuilder
2871
 
    index_class = btree_index.BTreeGraphIndex
 
2859
    index_builder_class = BTreeBuilder
 
2860
    index_class = BTreeGraphIndex
2872
2861
 
2873
2862
    @property
2874
2863
    def _serializer(self):
2909
2898
    supports_tree_reference = True
2910
2899
    supports_external_lookups = True
2911
2900
    # What index classes to use
2912
 
    index_builder_class = btree_index.BTreeBuilder
2913
 
    index_class = btree_index.BTreeGraphIndex
 
2901
    index_builder_class = BTreeBuilder
 
2902
    index_class = BTreeGraphIndex
2914
2903
 
2915
2904
    @property
2916
2905
    def _serializer(self):