~bzr-pqm/bzr/bzr.dev

« back to all changes in this revision

Viewing changes to bzrlib/tests/test_repository.py

  • Committer: John Arbash Meinel
  • Date: 2009-12-10 17:16:19 UTC
  • mfrom: (4884 +trunk)
  • mto: This revision was merged to the branch mainline in revision 4889.
  • Revision ID: john@arbash-meinel.com-20091210171619-ehdcxjbl8afhq9g1
Bring in bzr.dev 4884

Show diffs side-by-side

added added

removed removed

Lines of Context:
24
24
 
25
25
from stat import S_ISDIR
26
26
from StringIO import StringIO
 
27
import sys
27
28
 
28
29
import bzrlib
29
30
from bzrlib.errors import (NotBranchError,
252
253
        tree = control.create_workingtree()
253
254
        tree.add(['foo'], ['Foo:Bar'], ['file'])
254
255
        tree.put_file_bytes_non_atomic('Foo:Bar', 'content\n')
255
 
        tree.commit('first post', rev_id='first')
 
256
        try:
 
257
            tree.commit('first post', rev_id='first')
 
258
        except errors.IllegalPath:
 
259
            if sys.platform != 'win32':
 
260
                raise
 
261
            self.knownFailure('Foo:Bar cannot be used as a file-id on windows'
 
262
                              ' in repo format 7')
 
263
            return
256
264
        self.assertEqualDiff(
257
265
            '# bzr weave file v5\n'
258
266
            'i\n'
681
689
        self.assertFalse(repo._format.supports_external_lookups)
682
690
 
683
691
 
684
 
class Test2a(TestCaseWithTransport):
 
692
class Test2a(tests.TestCaseWithMemoryTransport):
 
693
 
 
694
    def test_fetch_combines_groups(self):
 
695
        builder = self.make_branch_builder('source', format='2a')
 
696
        builder.start_series()
 
697
        builder.build_snapshot('1', None, [
 
698
            ('add', ('', 'root-id', 'directory', '')),
 
699
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
700
        builder.build_snapshot('2', ['1'], [
 
701
            ('modify', ('file-id', 'content-2\n'))])
 
702
        builder.finish_series()
 
703
        source = builder.get_branch()
 
704
        target = self.make_repository('target', format='2a')
 
705
        target.fetch(source.repository)
 
706
        target.lock_read()
 
707
        self.addCleanup(target.unlock)
 
708
        details = target.texts._index.get_build_details(
 
709
            [('file-id', '1',), ('file-id', '2',)])
 
710
        file_1_details = details[('file-id', '1')]
 
711
        file_2_details = details[('file-id', '2')]
 
712
        # The index, and what to read off disk, should be the same for both
 
713
        # versions of the file.
 
714
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
 
715
 
 
716
    def test_fetch_combines_groups(self):
 
717
        builder = self.make_branch_builder('source', format='2a')
 
718
        builder.start_series()
 
719
        builder.build_snapshot('1', None, [
 
720
            ('add', ('', 'root-id', 'directory', '')),
 
721
            ('add', ('file', 'file-id', 'file', 'content\n'))])
 
722
        builder.build_snapshot('2', ['1'], [
 
723
            ('modify', ('file-id', 'content-2\n'))])
 
724
        builder.finish_series()
 
725
        source = builder.get_branch()
 
726
        target = self.make_repository('target', format='2a')
 
727
        target.fetch(source.repository)
 
728
        target.lock_read()
 
729
        self.addCleanup(target.unlock)
 
730
        details = target.texts._index.get_build_details(
 
731
            [('file-id', '1',), ('file-id', '2',)])
 
732
        file_1_details = details[('file-id', '1')]
 
733
        file_2_details = details[('file-id', '2')]
 
734
        # The index, and what to read off disk, should be the same for both
 
735
        # versions of the file.
 
736
        self.assertEqual(file_1_details[0][:3], file_2_details[0][:3])
685
737
 
686
738
    def test_fetch_combines_groups(self):
687
739
        builder = self.make_branch_builder('source', format='2a')
710
762
        self.assertTrue(repo._format.pack_compresses)
711
763
 
712
764
    def test_inventories_use_chk_map_with_parent_base_dict(self):
713
 
        tree = self.make_branch_and_tree('repo', format="2a")
 
765
        tree = self.make_branch_and_memory_tree('repo', format="2a")
 
766
        tree.lock_write()
 
767
        tree.add([''], ['TREE_ROOT'])
714
768
        revid = tree.commit("foo")
 
769
        tree.unlock()
715
770
        tree.lock_read()
716
771
        self.addCleanup(tree.unlock)
717
772
        inv = tree.branch.repository.get_inventory(revid)
726
781
        # at 20 unchanged commits, chk pages are packed that are split into
727
782
        # two groups such that the new pack being made doesn't have all its
728
783
        # pages in the source packs (though they are in the repository).
729
 
        tree = self.make_branch_and_tree('tree', format='2a')
 
784
        # Use a memory backed repository, we don't need to hit disk for this
 
785
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
786
        tree.lock_write()
 
787
        self.addCleanup(tree.unlock)
 
788
        tree.add([''], ['TREE_ROOT'])
730
789
        for pos in range(20):
731
790
            tree.commit(str(pos))
732
791
 
733
792
    def test_pack_with_hint(self):
734
 
        tree = self.make_branch_and_tree('tree', format='2a')
 
793
        tree = self.make_branch_and_memory_tree('tree', format='2a')
 
794
        tree.lock_write()
 
795
        self.addCleanup(tree.unlock)
 
796
        tree.add([''], ['TREE_ROOT'])
735
797
        # 1 commit to leave untouched
736
798
        tree.commit('1')
737
799
        to_keep = tree.branch.repository._pack_collection.names()
1365
1427
            index_class=BTreeGraphIndex,
1366
1428
            use_chk_index=False)
1367
1429
        pack = pack_repo.NewPack(collection)
 
1430
        self.addCleanup(pack.abort) # Make sure the write stream gets closed
1368
1431
        self.assertIsInstance(pack.revision_index, BTreeBuilder)
1369
1432
        self.assertIsInstance(pack.inventory_index, BTreeBuilder)
1370
1433
        self.assertIsInstance(pack._hash, type(osutils.md5()))
1423
1486
        packer = pack_repo.OptimisingPacker(self.get_pack_collection(),
1424
1487
                                            [], '.test')
1425
1488
        new_pack = packer.open_pack()
 
1489
        self.addCleanup(new_pack.abort) # ensure cleanup
1426
1490
        self.assertIsInstance(new_pack, pack_repo.NewPack)
1427
1491
        self.assertTrue(new_pack.revision_index._optimize_for_size)
1428
1492
        self.assertTrue(new_pack.inventory_index._optimize_for_size)