4634.170.1
by John Arbash Meinel
Fix bug #437003. Autopacking should not fail for an |
1 |
# Copyright (C) 2006-2011 Canonical Ltd
|
1685.1.63
by Martin Pool
Small Transport fixups |
2 |
#
|
1534.4.40
by Robert Collins
Add RepositoryFormats and allow bzrdir.open or create _repository to be used. |
3 |
# This program is free software; you can redistribute it and/or modify
|
4 |
# it under the terms of the GNU General Public License as published by
|
|
5 |
# the Free Software Foundation; either version 2 of the License, or
|
|
6 |
# (at your option) any later version.
|
|
1685.1.63
by Martin Pool
Small Transport fixups |
7 |
#
|
1534.4.40
by Robert Collins
Add RepositoryFormats and allow bzrdir.open or create _repository to be used. |
8 |
# This program is distributed in the hope that it will be useful,
|
9 |
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
10 |
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
11 |
# GNU General Public License for more details.
|
|
1685.1.63
by Martin Pool
Small Transport fixups |
12 |
#
|
1534.4.40
by Robert Collins
Add RepositoryFormats and allow bzrdir.open or create _repository to be used. |
13 |
# You should have received a copy of the GNU General Public License
|
14 |
# along with this program; if not, write to the Free Software
|
|
4183.7.1
by Sabin Iacob
update FSF mailing address |
15 |
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
1534.4.40
by Robert Collins
Add RepositoryFormats and allow bzrdir.open or create _repository to be used. |
16 |
|
17 |
"""Tests for the Repository facility that are not interface tests.
|
|
18 |
||
3689.1.4
by John Arbash Meinel
Doc strings that reference repository_implementations |
19 |
For interface tests see tests/per_repository/*.py.
|
1534.4.40
by Robert Collins
Add RepositoryFormats and allow bzrdir.open or create _repository to be used. |
20 |
|
21 |
For concrete class tests see this file, and for storage formats tests
|
|
22 |
also see this file.
|
|
23 |
"""
|
|
24 |
||
1773.4.1
by Martin Pool
Add pyflakes makefile target; fix many warnings |
25 |
from stat import S_ISDIR |
1534.4.40
by Robert Collins
Add RepositoryFormats and allow bzrdir.open or create _repository to be used. |
26 |
|
1556.1.4
by Robert Collins
Add a new format for what will become knit, and the surrounding logic to upgrade repositories within metadirs, and tests for the same. |
27 |
import bzrlib |
5651.3.1
by Jelmer Vernooij
Add RepositoryFormatRegistry. |
28 |
from bzrlib.errors import ( |
29 |
UnknownFormatError, |
|
30 |
UnsupportedFormatError, |
|
31 |
)
|
|
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
32 |
from bzrlib import ( |
5365.5.20
by John Arbash Meinel
Add some tests that check the leaf factory is correct. |
33 |
btree_index, |
5651.3.2
by Jelmer Vernooij
Fix deprecation warnings in test suite. |
34 |
symbol_versioning, |
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
35 |
tests, |
5609.9.1
by Martin
Blindly change all users of get_transport to address the function via the transport module |
36 |
transport, |
6341.1.4
by Jelmer Vernooij
Move more functionality to vf_search. |
37 |
vf_search, |
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
38 |
)
|
3735.1.1
by Robert Collins
Add development2 formats using BTree indices. |
39 |
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex |
5121.2.2
by Jelmer Vernooij
Remove more unused imports in the tests. |
40 |
from bzrlib.index import GraphIndex |
2241.1.1
by Martin Pool
Change RepositoryFormat to use a Registry rather than ad-hoc dictionary |
41 |
from bzrlib.repository import RepositoryFormat |
2670.3.5
by Andrew Bennetts
Remove get_stream_as_bytes from KnitVersionedFile's API, make it a function in knitrepo.py instead. |
42 |
from bzrlib.tests import ( |
43 |
TestCase, |
|
44 |
TestCaseWithTransport, |
|
45 |
)
|
|
2241.1.1
by Martin Pool
Change RepositoryFormat to use a Registry rather than ad-hoc dictionary |
46 |
from bzrlib import ( |
2535.3.41
by Andrew Bennetts
Add tests for InterRemoteToOther.is_compatible. |
47 |
bzrdir, |
6472.2.1
by Jelmer Vernooij
Use bzrdir.controldir for generic access to control directories. |
48 |
controldir, |
2535.3.41
by Andrew Bennetts
Add tests for InterRemoteToOther.is_compatible. |
49 |
errors, |
2535.3.57
by Andrew Bennetts
Perform some sanity checking of data streams rather than blindly inserting them into our repository. |
50 |
inventory, |
2929.3.5
by Vincent Ladeuil
New files, same warnings, same fixes. |
51 |
osutils, |
2241.1.1
by Martin Pool
Change RepositoryFormat to use a Registry rather than ad-hoc dictionary |
52 |
repository, |
2535.3.57
by Andrew Bennetts
Perform some sanity checking of data streams rather than blindly inserting them into our repository. |
53 |
revision as _mod_revision, |
2241.1.1
by Martin Pool
Change RepositoryFormat to use a Registry rather than ad-hoc dictionary |
54 |
upgrade, |
4634.126.1
by John Arbash Meinel
(jam) Fix bug #507566, concurrent autopacking correctness. |
55 |
versionedfile, |
5815.4.15
by Jelmer Vernooij
Fix some imports. |
56 |
vf_repository, |
2241.1.1
by Martin Pool
Change RepositoryFormat to use a Registry rather than ad-hoc dictionary |
57 |
workingtree, |
58 |
)
|
|
3735.42.5
by John Arbash Meinel
Change the tests so we now just use a direct test that _get_source is |
59 |
from bzrlib.repofmt import ( |
60 |
groupcompress_repo, |
|
61 |
knitrepo, |
|
5757.2.2
by Jelmer Vernooij
Fix imports. |
62 |
knitpack_repo, |
3735.42.5
by John Arbash Meinel
Change the tests so we now just use a direct test that _get_source is |
63 |
pack_repo, |
64 |
)
|
|
1534.4.40
by Robert Collins
Add RepositoryFormats and allow bzrdir.open or create _repository to be used. |
65 |
|
66 |
||
67 |
class TestDefaultFormat(TestCase): |
|
68 |
||
1534.4.41
by Robert Collins
Branch now uses BzrDir reasonably sanely. |
69 |
def test_get_set_default_format(self): |
6472.2.1
by Jelmer Vernooij
Use bzrdir.controldir for generic access to control directories. |
70 |
old_default = controldir.format_registry.get('default') |
2204.5.3
by Aaron Bentley
zap old repository default handling |
71 |
private_default = old_default().repository_format.__class__ |
5651.3.2
by Jelmer Vernooij
Fix deprecation warnings in test suite. |
72 |
old_format = repository.format_registry.get_default() |
1910.2.33
by Aaron Bentley
Fix default format test |
73 |
self.assertTrue(isinstance(old_format, private_default)) |
2204.5.3
by Aaron Bentley
zap old repository default handling |
74 |
def make_sample_bzrdir(): |
75 |
my_bzrdir = bzrdir.BzrDirMetaFormat1() |
|
76 |
my_bzrdir.repository_format = SampleRepositoryFormat() |
|
77 |
return my_bzrdir |
|
6472.2.1
by Jelmer Vernooij
Use bzrdir.controldir for generic access to control directories. |
78 |
controldir.format_registry.remove('default') |
79 |
controldir.format_registry.register('sample', make_sample_bzrdir, '') |
|
80 |
controldir.format_registry.set_default('sample') |
|
1534.4.40
by Robert Collins
Add RepositoryFormats and allow bzrdir.open or create _repository to be used. |
81 |
# creating a repository should now create an instrumented dir.
|
82 |
try: |
|
1534.4.47
by Robert Collins
Split out repository into .bzr/repository |
83 |
# the default branch format is used by the meta dir format
|
84 |
# which is not the default bzrdir format at this point
|
|
1685.1.63
by Martin Pool
Small Transport fixups |
85 |
dir = bzrdir.BzrDirMetaFormat1().initialize('memory:///') |
1534.4.47
by Robert Collins
Split out repository into .bzr/repository |
86 |
result = dir.create_repository() |
1534.4.40
by Robert Collins
Add RepositoryFormats and allow bzrdir.open or create _repository to be used. |
87 |
self.assertEqual(result, 'A bzr repository dir') |
2241.1.1
by Martin Pool
Change RepositoryFormat to use a Registry rather than ad-hoc dictionary |
88 |
finally: |
6472.2.1
by Jelmer Vernooij
Use bzrdir.controldir for generic access to control directories. |
89 |
controldir.format_registry.remove('default') |
90 |
controldir.format_registry.remove('sample') |
|
91 |
controldir.format_registry.register('default', old_default, '') |
|
5651.3.2
by Jelmer Vernooij
Fix deprecation warnings in test suite. |
92 |
self.assertIsInstance(repository.format_registry.get_default(), |
2204.5.3
by Aaron Bentley
zap old repository default handling |
93 |
old_format.__class__) |
1534.4.40
by Robert Collins
Add RepositoryFormats and allow bzrdir.open or create _repository to be used. |
94 |
|
95 |
||
6349.2.6
by Jelmer Vernooij
Add test for RepositoryFormatMetaDir.from_string. |
96 |
class SampleRepositoryFormat(repository.RepositoryFormatMetaDir): |
1534.4.40
by Robert Collins
Add RepositoryFormats and allow bzrdir.open or create _repository to be used. |
97 |
"""A sample format
|
98 |
||
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
99 |
this format is initializable, unsupported to aid in testing the
|
1534.4.47
by Robert Collins
Split out repository into .bzr/repository |
100 |
open and open(unsupported=True) routines.
|
1534.4.40
by Robert Collins
Add RepositoryFormats and allow bzrdir.open or create _repository to be used. |
101 |
"""
|
102 |
||
6349.2.6
by Jelmer Vernooij
Add test for RepositoryFormatMetaDir.from_string. |
103 |
@classmethod
|
104 |
def get_format_string(cls): |
|
1534.4.40
by Robert Collins
Add RepositoryFormats and allow bzrdir.open or create _repository to be used. |
105 |
"""See RepositoryFormat.get_format_string()."""
|
106 |
return "Sample .bzr repository format." |
|
107 |
||
1534.6.1
by Robert Collins
allow API creation of shared repositories |
108 |
def initialize(self, a_bzrdir, shared=False): |
1534.4.40
by Robert Collins
Add RepositoryFormats and allow bzrdir.open or create _repository to be used. |
109 |
"""Initialize a repository in a BzrDir"""
|
1534.4.47
by Robert Collins
Split out repository into .bzr/repository |
110 |
t = a_bzrdir.get_repository_transport(self) |
1955.3.13
by John Arbash Meinel
Run the full test suite, and fix up any deprecation warnings. |
111 |
t.put_bytes('format', self.get_format_string()) |
1534.4.40
by Robert Collins
Add RepositoryFormats and allow bzrdir.open or create _repository to be used. |
112 |
return 'A bzr repository dir' |
113 |
||
114 |
def is_supported(self): |
|
115 |
return False |
|
116 |
||
1534.4.47
by Robert Collins
Split out repository into .bzr/repository |
117 |
def open(self, a_bzrdir, _found=False): |
1534.4.40
by Robert Collins
Add RepositoryFormats and allow bzrdir.open or create _repository to be used. |
118 |
return "opened repository." |
1534.4.41
by Robert Collins
Branch now uses BzrDir reasonably sanely. |
119 |
|
120 |
||
5651.3.5
by Jelmer Vernooij
add tests for 'extra' repository formats. |
121 |
class SampleExtraRepositoryFormat(repository.RepositoryFormat): |
122 |
"""A sample format that can not be used in a metadir
|
|
123 |
||
124 |
"""
|
|
125 |
||
126 |
def get_format_string(self): |
|
127 |
raise NotImplementedError |
|
128 |
||
129 |
||
1534.4.47
by Robert Collins
Split out repository into .bzr/repository |
130 |
class TestRepositoryFormat(TestCaseWithTransport): |
131 |
"""Tests for the Repository format detection used by the bzr meta dir facility.BzrBranchFormat facility."""
|
|
132 |
||
133 |
def test_find_format(self): |
|
134 |
# is the right format object found for a repository?
|
|
135 |
# create a branch with a few known format objects.
|
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
136 |
# this is not quite the same as
|
1534.4.47
by Robert Collins
Split out repository into .bzr/repository |
137 |
self.build_tree(["foo/", "bar/"]) |
138 |
def check_format(format, url): |
|
139 |
dir = format._matchingbzrdir.initialize(url) |
|
140 |
format.initialize(dir) |
|
6083.1.1
by Jelmer Vernooij
Use get_transport_from_{url,path} in more places. |
141 |
t = transport.get_transport_from_path(url) |
6349.2.1
by Jelmer Vernooij
Add BzrDirMetaComponentFormat. |
142 |
found_format = repository.RepositoryFormatMetaDir.find_format(dir) |
5784.1.1
by Martin Pool
Stop using failIf, failUnless, etc |
143 |
self.assertIsInstance(found_format, format.__class__) |
5582.10.54
by Jelmer Vernooij
Use default format rather than RepositoryFormat7. |
144 |
check_format(repository.format_registry.get_default(), "bar") |
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
145 |
|
1534.4.47
by Robert Collins
Split out repository into .bzr/repository |
146 |
def test_find_format_no_repository(self): |
147 |
dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url()) |
|
148 |
self.assertRaises(errors.NoRepositoryPresent, |
|
6349.2.1
by Jelmer Vernooij
Add BzrDirMetaComponentFormat. |
149 |
repository.RepositoryFormatMetaDir.find_format, |
1534.4.47
by Robert Collins
Split out repository into .bzr/repository |
150 |
dir) |
151 |
||
6349.2.6
by Jelmer Vernooij
Add test for RepositoryFormatMetaDir.from_string. |
152 |
def test_from_string(self): |
153 |
self.assertIsInstance( |
|
154 |
SampleRepositoryFormat.from_string( |
|
155 |
"Sample .bzr repository format."), |
|
156 |
SampleRepositoryFormat) |
|
6213.1.54
by Jelmer Vernooij
Fix tests. |
157 |
self.assertRaises(AssertionError, |
6349.2.6
by Jelmer Vernooij
Add test for RepositoryFormatMetaDir.from_string. |
158 |
SampleRepositoryFormat.from_string, |
159 |
"Different .bzr repository format.") |
|
160 |
||
1534.4.47
by Robert Collins
Split out repository into .bzr/repository |
161 |
def test_find_format_unknown_format(self): |
162 |
dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url()) |
|
163 |
SampleRepositoryFormat().initialize(dir) |
|
164 |
self.assertRaises(UnknownFormatError, |
|
6349.2.1
by Jelmer Vernooij
Add BzrDirMetaComponentFormat. |
165 |
repository.RepositoryFormatMetaDir.find_format, |
1534.4.47
by Robert Collins
Split out repository into .bzr/repository |
166 |
dir) |
167 |
||
6213.1.16
by Jelmer Vernooij
Feature support in repository. |
168 |
def test_find_format_with_features(self): |
169 |
tree = self.make_branch_and_tree('.', format='2a') |
|
6213.1.58
by Jelmer Vernooij
Use update_feature_flags everywhere. |
170 |
tree.branch.repository.update_feature_flags({"name": "necessity"}) |
6213.1.28
by Jelmer Vernooij
Fix tests. |
171 |
found_format = repository.RepositoryFormatMetaDir.find_format(tree.bzrdir) |
172 |
self.assertIsInstance(found_format, repository.RepositoryFormatMetaDir) |
|
6213.1.32
by Jelmer Vernooij
Fix check support status. |
173 |
self.assertEquals(found_format.features.get("name"), "necessity") |
174 |
self.assertRaises(errors.MissingFeature, found_format.check_support_status, |
|
175 |
True) |
|
176 |
self.addCleanup(repository.RepositoryFormatMetaDir.unregister_feature, |
|
177 |
"name") |
|
178 |
repository.RepositoryFormatMetaDir.register_feature("name") |
|
179 |
found_format.check_support_status(True) |
|
6213.1.16
by Jelmer Vernooij
Feature support in repository. |
180 |
|
1534.4.47
by Robert Collins
Split out repository into .bzr/repository |
181 |
|
5651.3.1
by Jelmer Vernooij
Add RepositoryFormatRegistry. |
182 |
class TestRepositoryFormatRegistry(TestCase): |
183 |
||
184 |
def setUp(self): |
|
185 |
super(TestRepositoryFormatRegistry, self).setUp() |
|
186 |
self.registry = repository.RepositoryFormatRegistry() |
|
187 |
||
188 |
def test_register_unregister_format(self): |
|
189 |
format = SampleRepositoryFormat() |
|
190 |
self.registry.register(format) |
|
191 |
self.assertEquals(format, self.registry.get("Sample .bzr repository format.")) |
|
192 |
self.registry.remove(format) |
|
193 |
self.assertRaises(KeyError, self.registry.get, "Sample .bzr repository format.") |
|
194 |
||
5651.3.2
by Jelmer Vernooij
Fix deprecation warnings in test suite. |
195 |
def test_get_all(self): |
5651.3.1
by Jelmer Vernooij
Add RepositoryFormatRegistry. |
196 |
format = SampleRepositoryFormat() |
5651.3.7
by Jelmer Vernooij
Fix tests. |
197 |
self.assertEquals([], self.registry._get_all()) |
5651.3.1
by Jelmer Vernooij
Add RepositoryFormatRegistry. |
198 |
self.registry.register(format) |
5651.3.7
by Jelmer Vernooij
Fix tests. |
199 |
self.assertEquals([format], self.registry._get_all()) |
5651.3.5
by Jelmer Vernooij
add tests for 'extra' repository formats. |
200 |
|
201 |
def test_register_extra(self): |
|
202 |
format = SampleExtraRepositoryFormat() |
|
5651.3.7
by Jelmer Vernooij
Fix tests. |
203 |
self.assertEquals([], self.registry._get_all()) |
5651.3.5
by Jelmer Vernooij
add tests for 'extra' repository formats. |
204 |
self.registry.register_extra(format) |
5651.3.7
by Jelmer Vernooij
Fix tests. |
205 |
self.assertEquals([format], self.registry._get_all()) |
5651.3.5
by Jelmer Vernooij
add tests for 'extra' repository formats. |
206 |
|
207 |
def test_register_extra_lazy(self): |
|
5651.3.7
by Jelmer Vernooij
Fix tests. |
208 |
self.assertEquals([], self.registry._get_all()) |
5651.3.5
by Jelmer Vernooij
add tests for 'extra' repository formats. |
209 |
self.registry.register_extra_lazy("bzrlib.tests.test_repository", |
210 |
"SampleExtraRepositoryFormat") |
|
5651.3.7
by Jelmer Vernooij
Fix tests. |
211 |
formats = self.registry._get_all() |
5651.3.5
by Jelmer Vernooij
add tests for 'extra' repository formats. |
212 |
self.assertEquals(1, len(formats)) |
213 |
self.assertIsInstance(formats[0], SampleExtraRepositoryFormat) |
|
5651.3.1
by Jelmer Vernooij
Add RepositoryFormatRegistry. |
214 |
|
215 |
||
1556.1.3
by Robert Collins
Rearrangment of Repository logic to be less type code driven, and bugfix InterRepository.missing_revision_ids |
216 |
class TestFormatKnit1(TestCaseWithTransport): |
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
217 |
|
3565.3.1
by Robert Collins
* The generic fetch code now uses two attributes on Repository objects |
218 |
def test_attribute__fetch_order(self): |
219 |
"""Knits need topological data insertion."""
|
|
220 |
repo = self.make_repository('.', |
|
6472.2.1
by Jelmer Vernooij
Use bzrdir.controldir for generic access to control directories. |
221 |
format=controldir.format_registry.get('knit')()) |
4053.1.4
by Robert Collins
Move the fetch control attributes from Repository to RepositoryFormat. |
222 |
self.assertEqual('topological', repo._format._fetch_order) |
3565.3.1
by Robert Collins
* The generic fetch code now uses two attributes on Repository objects |
223 |
|
224 |
def test_attribute__fetch_uses_deltas(self): |
|
225 |
"""Knits reuse deltas."""
|
|
226 |
repo = self.make_repository('.', |
|
6472.2.1
by Jelmer Vernooij
Use bzrdir.controldir for generic access to control directories. |
227 |
format=controldir.format_registry.get('knit')()) |
4053.1.4
by Robert Collins
Move the fetch control attributes from Repository to RepositoryFormat. |
228 |
self.assertEqual(True, repo._format._fetch_uses_deltas) |
3565.3.1
by Robert Collins
* The generic fetch code now uses two attributes on Repository objects |
229 |
|
1556.1.3
by Robert Collins
Rearrangment of Repository logic to be less type code driven, and bugfix InterRepository.missing_revision_ids |
230 |
def test_disk_layout(self): |
231 |
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url()) |
|
2241.1.6
by Martin Pool
Move Knit repositories into the submodule bzrlib.repofmt.knitrepo and |
232 |
repo = knitrepo.RepositoryFormatKnit1().initialize(control) |
1556.1.3
by Robert Collins
Rearrangment of Repository logic to be less type code driven, and bugfix InterRepository.missing_revision_ids |
233 |
# in case of side effects of locking.
|
234 |
repo.lock_write() |
|
235 |
repo.unlock() |
|
236 |
# we want:
|
|
237 |
# format 'Bazaar-NG Knit Repository Format 1'
|
|
1553.5.62
by Martin Pool
Add tests that MetaDir repositories use LockDirs |
238 |
# lock: is a directory
|
1556.1.3
by Robert Collins
Rearrangment of Repository logic to be less type code driven, and bugfix InterRepository.missing_revision_ids |
239 |
# inventory.weave == empty_weave
|
240 |
# empty revision-store directory
|
|
241 |
# empty weaves directory
|
|
242 |
t = control.get_repository_transport(None) |
|
243 |
self.assertEqualDiff('Bazaar-NG Knit Repository Format 1', |
|
244 |
t.get('format').read()) |
|
1553.5.57
by Martin Pool
[merge] sync from bzr.dev |
245 |
# XXX: no locks left when unlocked at the moment
|
246 |
# self.assertEqualDiff('', t.get('lock').read())
|
|
1556.1.3
by Robert Collins
Rearrangment of Repository logic to be less type code driven, and bugfix InterRepository.missing_revision_ids |
247 |
self.assertTrue(S_ISDIR(t.stat('knits').st_mode)) |
1563.2.35
by Robert Collins
cleanup deprecation warnings and finish conversion so the inventory is knit based too. |
248 |
self.check_knits(t) |
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
249 |
# Check per-file knits.
|
250 |
branch = control.create_branch() |
|
251 |
tree = control.create_workingtree() |
|
252 |
tree.add(['foo'], ['Nasty-IdC:'], ['file']) |
|
253 |
tree.put_file_bytes_non_atomic('Nasty-IdC:', '') |
|
254 |
tree.commit('1st post', rev_id='foo') |
|
255 |
self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a', |
|
256 |
'\nfoo fulltext 0 81 :') |
|
1563.2.35
by Robert Collins
cleanup deprecation warnings and finish conversion so the inventory is knit based too. |
257 |
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
258 |
def assertHasKnit(self, t, knit_name, extra_content=''): |
1654.1.3
by Robert Collins
Refactor repository knit tests slightly to remove duplication - add a assertHasKnit method. |
259 |
"""Assert that knit_name exists on t."""
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
260 |
self.assertEqualDiff('# bzr knit index 8\n' + extra_content, |
1654.1.3
by Robert Collins
Refactor repository knit tests slightly to remove duplication - add a assertHasKnit method. |
261 |
t.get(knit_name + '.kndx').read()) |
262 |
||
1563.2.35
by Robert Collins
cleanup deprecation warnings and finish conversion so the inventory is knit based too. |
263 |
def check_knits(self, t): |
264 |
"""check knit content for a repository."""
|
|
1654.1.3
by Robert Collins
Refactor repository knit tests slightly to remove duplication - add a assertHasKnit method. |
265 |
self.assertHasKnit(t, 'inventory') |
266 |
self.assertHasKnit(t, 'revisions') |
|
267 |
self.assertHasKnit(t, 'signatures') |
|
1556.1.3
by Robert Collins
Rearrangment of Repository logic to be less type code driven, and bugfix InterRepository.missing_revision_ids |
268 |
|
269 |
def test_shared_disk_layout(self): |
|
270 |
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url()) |
|
2241.1.6
by Martin Pool
Move Knit repositories into the submodule bzrlib.repofmt.knitrepo and |
271 |
repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True) |
1556.1.3
by Robert Collins
Rearrangment of Repository logic to be less type code driven, and bugfix InterRepository.missing_revision_ids |
272 |
# we want:
|
273 |
# format 'Bazaar-NG Knit Repository Format 1'
|
|
1553.5.62
by Martin Pool
Add tests that MetaDir repositories use LockDirs |
274 |
# lock: is a directory
|
1556.1.3
by Robert Collins
Rearrangment of Repository logic to be less type code driven, and bugfix InterRepository.missing_revision_ids |
275 |
# inventory.weave == empty_weave
|
276 |
# empty revision-store directory
|
|
277 |
# empty weaves directory
|
|
278 |
# a 'shared-storage' marker file.
|
|
279 |
t = control.get_repository_transport(None) |
|
280 |
self.assertEqualDiff('Bazaar-NG Knit Repository Format 1', |
|
281 |
t.get('format').read()) |
|
1553.5.57
by Martin Pool
[merge] sync from bzr.dev |
282 |
# XXX: no locks left when unlocked at the moment
|
283 |
# self.assertEqualDiff('', t.get('lock').read())
|
|
1556.1.3
by Robert Collins
Rearrangment of Repository logic to be less type code driven, and bugfix InterRepository.missing_revision_ids |
284 |
self.assertEqualDiff('', t.get('shared-storage').read()) |
285 |
self.assertTrue(S_ISDIR(t.stat('knits').st_mode)) |
|
1563.2.35
by Robert Collins
cleanup deprecation warnings and finish conversion so the inventory is knit based too. |
286 |
self.check_knits(t) |
1556.1.3
by Robert Collins
Rearrangment of Repository logic to be less type code driven, and bugfix InterRepository.missing_revision_ids |
287 |
|
288 |
def test_shared_no_tree_disk_layout(self): |
|
289 |
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url()) |
|
2241.1.6
by Martin Pool
Move Knit repositories into the submodule bzrlib.repofmt.knitrepo and |
290 |
repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True) |
1556.1.3
by Robert Collins
Rearrangment of Repository logic to be less type code driven, and bugfix InterRepository.missing_revision_ids |
291 |
repo.set_make_working_trees(False) |
292 |
# we want:
|
|
293 |
# format 'Bazaar-NG Knit Repository Format 1'
|
|
294 |
# lock ''
|
|
295 |
# inventory.weave == empty_weave
|
|
296 |
# empty revision-store directory
|
|
297 |
# empty weaves directory
|
|
298 |
# a 'shared-storage' marker file.
|
|
299 |
t = control.get_repository_transport(None) |
|
300 |
self.assertEqualDiff('Bazaar-NG Knit Repository Format 1', |
|
301 |
t.get('format').read()) |
|
1553.5.57
by Martin Pool
[merge] sync from bzr.dev |
302 |
# XXX: no locks left when unlocked at the moment
|
303 |
# self.assertEqualDiff('', t.get('lock').read())
|
|
1556.1.3
by Robert Collins
Rearrangment of Repository logic to be less type code driven, and bugfix InterRepository.missing_revision_ids |
304 |
self.assertEqualDiff('', t.get('shared-storage').read()) |
305 |
self.assertEqualDiff('', t.get('no-working-trees').read()) |
|
306 |
repo.set_make_working_trees(True) |
|
307 |
self.assertFalse(t.has('no-working-trees')) |
|
308 |
self.assertTrue(S_ISDIR(t.stat('knits').st_mode)) |
|
1563.2.35
by Robert Collins
cleanup deprecation warnings and finish conversion so the inventory is knit based too. |
309 |
self.check_knits(t) |
1556.1.3
by Robert Collins
Rearrangment of Repository logic to be less type code driven, and bugfix InterRepository.missing_revision_ids |
310 |
|
2917.2.1
by John Arbash Meinel
Fix bug #152360. The xml5 serializer should be using |
311 |
def test_deserialise_sets_root_revision(self): |
312 |
"""We must have a inventory.root.revision
|
|
313 |
||
314 |
Old versions of the XML5 serializer did not set the revision_id for
|
|
315 |
the whole inventory. So we grab the one from the expected text. Which
|
|
316 |
is valid when the api is not being abused.
|
|
317 |
"""
|
|
318 |
repo = self.make_repository('.', |
|
6472.2.1
by Jelmer Vernooij
Use bzrdir.controldir for generic access to control directories. |
319 |
format=controldir.format_registry.get('knit')()) |
2917.2.1
by John Arbash Meinel
Fix bug #152360. The xml5 serializer should be using |
320 |
inv_xml = '<inventory format="5">\n</inventory>\n' |
4988.3.3
by Jelmer Vernooij
rename Repository.deserialise_inventory to Repository._deserialise_inventory. |
321 |
inv = repo._deserialise_inventory('test-rev-id', inv_xml) |
2917.2.1
by John Arbash Meinel
Fix bug #152360. The xml5 serializer should be using |
322 |
self.assertEqual('test-rev-id', inv.root.revision) |
323 |
||
324 |
def test_deserialise_uses_global_revision_id(self): |
|
325 |
"""If it is set, then we re-use the global revision id"""
|
|
326 |
repo = self.make_repository('.', |
|
6472.2.1
by Jelmer Vernooij
Use bzrdir.controldir for generic access to control directories. |
327 |
format=controldir.format_registry.get('knit')()) |
2917.2.1
by John Arbash Meinel
Fix bug #152360. The xml5 serializer should be using |
328 |
inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n' |
329 |
'</inventory>\n') |
|
330 |
# Arguably, the deserialise_inventory should detect a mismatch, and
|
|
331 |
# raise an error, rather than silently using one revision_id over the
|
|
332 |
# other.
|
|
4988.3.3
by Jelmer Vernooij
rename Repository.deserialise_inventory to Repository._deserialise_inventory. |
333 |
self.assertRaises(AssertionError, repo._deserialise_inventory, |
3169.2.2
by Robert Collins
Add a test to Repository.deserialise_inventory that the resulting ivnentory is the one asked for, and update relevant tests. Also tweak the model 1 to 2 regenerate inventories logic to use the revision trees parent marker which is more accurate in some cases. |
334 |
'test-rev-id', inv_xml) |
4988.3.3
by Jelmer Vernooij
rename Repository.deserialise_inventory to Repository._deserialise_inventory. |
335 |
inv = repo._deserialise_inventory('other-rev-id', inv_xml) |
2917.2.1
by John Arbash Meinel
Fix bug #152360. The xml5 serializer should be using |
336 |
self.assertEqual('other-rev-id', inv.root.revision) |
337 |
||
3221.3.1
by Robert Collins
* Repository formats have a new supported-feature attribute |
338 |
def test_supports_external_lookups(self): |
339 |
repo = self.make_repository('.', |
|
6472.2.1
by Jelmer Vernooij
Use bzrdir.controldir for generic access to control directories. |
340 |
format=controldir.format_registry.get('knit')()) |
3221.3.1
by Robert Collins
* Repository formats have a new supported-feature attribute |
341 |
self.assertFalse(repo._format.supports_external_lookups) |
342 |
||
2535.3.53
by Andrew Bennetts
Remove get_stream_as_bytes from KnitVersionedFile's API, make it a function in knitrepo.py instead. |
343 |
|
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
344 |
class DummyRepository(object): |
345 |
"""A dummy repository for testing."""
|
|
346 |
||
3452.2.11
by Andrew Bennetts
Merge thread. |
347 |
_format = None |
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
348 |
_serializer = None |
349 |
||
350 |
def supports_rich_root(self): |
|
4606.4.1
by Robert Collins
Prepare test_repository's inter_repository tests for 2a. |
351 |
if self._format is not None: |
352 |
return self._format.rich_root_data |
|
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
353 |
return False |
354 |
||
3709.5.10
by Andrew Bennetts
Fix test failure caused by missing attributes on DummyRepository. |
355 |
def get_graph(self): |
356 |
raise NotImplementedError |
|
357 |
||
358 |
def get_parent_map(self, revision_ids): |
|
359 |
raise NotImplementedError |
|
360 |
||
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
361 |
|
362 |
class InterDummy(repository.InterRepository): |
|
363 |
"""An inter-repository optimised code path for DummyRepository.
|
|
364 |
||
365 |
This is for use during testing where we use DummyRepository as repositories
|
|
1534.1.28
by Robert Collins
Allow for optimised InterRepository selection. |
366 |
so that none of the default regsitered inter-repository classes will
|
2818.4.2
by Robert Collins
Review feedback. |
367 |
MATCH.
|
1534.1.28
by Robert Collins
Allow for optimised InterRepository selection. |
368 |
"""
|
369 |
||
370 |
@staticmethod
|
|
371 |
def is_compatible(repo_source, repo_target): |
|
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
372 |
"""InterDummy is compatible with DummyRepository."""
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
373 |
return (isinstance(repo_source, DummyRepository) and |
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
374 |
isinstance(repo_target, DummyRepository)) |
1534.1.28
by Robert Collins
Allow for optimised InterRepository selection. |
375 |
|
376 |
||
1534.1.27
by Robert Collins
Start InterRepository with InterRepository.get. |
377 |
class TestInterRepository(TestCaseWithTransport): |
378 |
||
379 |
def test_get_default_inter_repository(self): |
|
380 |
# test that the InterRepository.get(repo_a, repo_b) probes
|
|
381 |
# for a inter_repo class where is_compatible(repo_a, repo_b) returns
|
|
382 |
# true and returns a default inter_repo otherwise.
|
|
383 |
# This also tests that the default registered optimised interrepository
|
|
384 |
# classes do not barf inappropriately when a surprising repository type
|
|
385 |
# is handed to them.
|
|
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
386 |
dummy_a = DummyRepository() |
5815.4.19
by Jelmer Vernooij
Fix test failures. |
387 |
dummy_a._format = RepositoryFormat() |
388 |
dummy_a._format.supports_full_versioned_files = True |
|
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
389 |
dummy_b = DummyRepository() |
5815.4.19
by Jelmer Vernooij
Fix test failures. |
390 |
dummy_b._format = RepositoryFormat() |
391 |
dummy_b._format.supports_full_versioned_files = True |
|
1534.1.28
by Robert Collins
Allow for optimised InterRepository selection. |
392 |
self.assertGetsDefaultInterRepository(dummy_a, dummy_b) |
393 |
||
394 |
def assertGetsDefaultInterRepository(self, repo_a, repo_b): |
|
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
395 |
"""Asserts that InterRepository.get(repo_a, repo_b) -> the default.
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
396 |
|
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
397 |
The effective default is now InterSameDataRepository because there is
|
398 |
no actual sane default in the presence of incompatible data models.
|
|
399 |
"""
|
|
1534.1.28
by Robert Collins
Allow for optimised InterRepository selection. |
400 |
inter_repo = repository.InterRepository.get(repo_a, repo_b) |
5815.4.19
by Jelmer Vernooij
Fix test failures. |
401 |
self.assertEqual(vf_repository.InterSameDataRepository, |
1534.1.27
by Robert Collins
Start InterRepository with InterRepository.get. |
402 |
inter_repo.__class__) |
1534.1.28
by Robert Collins
Allow for optimised InterRepository selection. |
403 |
self.assertEqual(repo_a, inter_repo.source) |
404 |
self.assertEqual(repo_b, inter_repo.target) |
|
405 |
||
406 |
def test_register_inter_repository_class(self): |
|
407 |
# test that a optimised code path provider - a
|
|
408 |
# InterRepository subclass can be registered and unregistered
|
|
409 |
# and that it is correctly selected when given a repository
|
|
410 |
# pair that it returns true on for the is_compatible static method
|
|
411 |
# check
|
|
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
412 |
dummy_a = DummyRepository() |
4606.4.1
by Robert Collins
Prepare test_repository's inter_repository tests for 2a. |
413 |
dummy_a._format = RepositoryFormat() |
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
414 |
dummy_b = DummyRepository() |
4606.4.1
by Robert Collins
Prepare test_repository's inter_repository tests for 2a. |
415 |
dummy_b._format = RepositoryFormat() |
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
416 |
repo = self.make_repository('.') |
417 |
# hack dummies to look like repo somewhat.
|
|
418 |
dummy_a._serializer = repo._serializer |
|
4606.4.1
by Robert Collins
Prepare test_repository's inter_repository tests for 2a. |
419 |
dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference |
420 |
dummy_a._format.rich_root_data = repo._format.rich_root_data |
|
5815.4.19
by Jelmer Vernooij
Fix test failures. |
421 |
dummy_a._format.supports_full_versioned_files = repo._format.supports_full_versioned_files |
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
422 |
dummy_b._serializer = repo._serializer |
4606.4.1
by Robert Collins
Prepare test_repository's inter_repository tests for 2a. |
423 |
dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference |
424 |
dummy_b._format.rich_root_data = repo._format.rich_root_data |
|
5815.4.19
by Jelmer Vernooij
Fix test failures. |
425 |
dummy_b._format.supports_full_versioned_files = repo._format.supports_full_versioned_files |
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
426 |
repository.InterRepository.register_optimiser(InterDummy) |
1534.1.28
by Robert Collins
Allow for optimised InterRepository selection. |
427 |
try: |
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
428 |
# we should get the default for something InterDummy returns False
|
1534.1.28
by Robert Collins
Allow for optimised InterRepository selection. |
429 |
# to
|
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
430 |
self.assertFalse(InterDummy.is_compatible(dummy_a, repo)) |
431 |
self.assertGetsDefaultInterRepository(dummy_a, repo) |
|
432 |
# and we should get an InterDummy for a pair it 'likes'
|
|
433 |
self.assertTrue(InterDummy.is_compatible(dummy_a, dummy_b)) |
|
1534.1.28
by Robert Collins
Allow for optimised InterRepository selection. |
434 |
inter_repo = repository.InterRepository.get(dummy_a, dummy_b) |
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
435 |
self.assertEqual(InterDummy, inter_repo.__class__) |
1534.1.28
by Robert Collins
Allow for optimised InterRepository selection. |
436 |
self.assertEqual(dummy_a, inter_repo.source) |
437 |
self.assertEqual(dummy_b, inter_repo.target) |
|
438 |
finally: |
|
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
439 |
repository.InterRepository.unregister_optimiser(InterDummy) |
1534.1.28
by Robert Collins
Allow for optimised InterRepository selection. |
440 |
# now we should get the default InterRepository object again.
|
441 |
self.assertGetsDefaultInterRepository(dummy_a, dummy_b) |
|
1534.1.33
by Robert Collins
Move copy_content_into into InterRepository and InterWeaveRepo, and disable the default codepath test as we have optimised paths for all current combinations. |
442 |
|
2241.1.17
by Martin Pool
Restore old InterWeave tests |
443 |
|
5671.4.2
by Jelmer Vernooij
Use stub formats to test CopyConverter. |
444 |
class TestRepositoryFormat1(knitrepo.RepositoryFormatKnit1): |
445 |
||
6213.1.16
by Jelmer Vernooij
Feature support in repository. |
446 |
@classmethod
|
447 |
def get_format_string(cls): |
|
5671.4.2
by Jelmer Vernooij
Use stub formats to test CopyConverter. |
448 |
return "Test Format 1" |
449 |
||
450 |
||
451 |
class TestRepositoryFormat2(knitrepo.RepositoryFormatKnit1): |
|
452 |
||
6213.1.16
by Jelmer Vernooij
Feature support in repository. |
453 |
@classmethod
|
454 |
def get_format_string(cls): |
|
5671.4.2
by Jelmer Vernooij
Use stub formats to test CopyConverter. |
455 |
return "Test Format 2" |
456 |
||
457 |
||
1556.1.4
by Robert Collins
Add a new format for what will become knit, and the surrounding logic to upgrade repositories within metadirs, and tests for the same. |
458 |
class TestRepositoryConverter(TestCaseWithTransport): |
459 |
||
460 |
def test_convert_empty(self): |
|
5671.4.2
by Jelmer Vernooij
Use stub formats to test CopyConverter. |
461 |
source_format = TestRepositoryFormat1() |
462 |
target_format = TestRepositoryFormat2() |
|
463 |
repository.format_registry.register(source_format) |
|
464 |
self.addCleanup(repository.format_registry.remove, |
|
465 |
source_format) |
|
466 |
repository.format_registry.register(target_format) |
|
467 |
self.addCleanup(repository.format_registry.remove, |
|
468 |
target_format) |
|
5609.9.4
by Vincent Ladeuil
Use self.get_transport instead of transport.get_transport where possible. |
469 |
t = self.get_transport() |
1556.1.4
by Robert Collins
Add a new format for what will become knit, and the surrounding logic to upgrade repositories within metadirs, and tests for the same. |
470 |
t.mkdir('repository') |
471 |
repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository') |
|
5671.4.2
by Jelmer Vernooij
Use stub formats to test CopyConverter. |
472 |
repo = TestRepositoryFormat1().initialize(repo_dir) |
1556.1.4
by Robert Collins
Add a new format for what will become knit, and the surrounding logic to upgrade repositories within metadirs, and tests for the same. |
473 |
converter = repository.CopyConverter(target_format) |
1594.1.3
by Robert Collins
Fixup pb usage to use nested_progress_bar. |
474 |
pb = bzrlib.ui.ui_factory.nested_progress_bar() |
475 |
try: |
|
476 |
converter.convert(repo, pb) |
|
477 |
finally: |
|
478 |
pb.finished() |
|
1556.1.4
by Robert Collins
Add a new format for what will become knit, and the surrounding logic to upgrade repositories within metadirs, and tests for the same. |
479 |
repo = repo_dir.open_repository() |
480 |
self.assertTrue(isinstance(target_format, repo._format.__class__)) |
|
1843.2.5
by Aaron Bentley
Add test of _unescape_xml |
481 |
|
482 |
||
2255.2.211
by Robert Collins
Remove knit2 repository format- it has never been supported. |
483 |
class TestRepositoryFormatKnit3(TestCaseWithTransport): |
1910.2.13
by Aaron Bentley
Start work on converter |
484 |
|
3565.3.1
by Robert Collins
* The generic fetch code now uses two attributes on Repository objects |
485 |
def test_attribute__fetch_order(self): |
486 |
"""Knits need topological data insertion."""
|
|
487 |
format = bzrdir.BzrDirMetaFormat1() |
|
488 |
format.repository_format = knitrepo.RepositoryFormatKnit3() |
|
489 |
repo = self.make_repository('.', format=format) |
|
4053.1.4
by Robert Collins
Move the fetch control attributes from Repository to RepositoryFormat. |
490 |
self.assertEqual('topological', repo._format._fetch_order) |
3565.3.1
by Robert Collins
* The generic fetch code now uses two attributes on Repository objects |
491 |
|
492 |
def test_attribute__fetch_uses_deltas(self): |
|
493 |
"""Knits reuse deltas."""
|
|
494 |
format = bzrdir.BzrDirMetaFormat1() |
|
495 |
format.repository_format = knitrepo.RepositoryFormatKnit3() |
|
496 |
repo = self.make_repository('.', format=format) |
|
4053.1.4
by Robert Collins
Move the fetch control attributes from Repository to RepositoryFormat. |
497 |
self.assertEqual(True, repo._format._fetch_uses_deltas) |
3565.3.1
by Robert Collins
* The generic fetch code now uses two attributes on Repository objects |
498 |
|
1910.2.13
by Aaron Bentley
Start work on converter |
499 |
def test_convert(self): |
500 |
"""Ensure the upgrade adds weaves for roots"""
|
|
1910.2.35
by Aaron Bentley
Better fix for convesion test |
501 |
format = bzrdir.BzrDirMetaFormat1() |
2241.1.6
by Martin Pool
Move Knit repositories into the submodule bzrlib.repofmt.knitrepo and |
502 |
format.repository_format = knitrepo.RepositoryFormatKnit1() |
1910.2.35
by Aaron Bentley
Better fix for convesion test |
503 |
tree = self.make_branch_and_tree('.', format) |
1910.2.13
by Aaron Bentley
Start work on converter |
504 |
tree.commit("Dull commit", rev_id="dull") |
505 |
revision_tree = tree.branch.repository.revision_tree('dull') |
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
506 |
revision_tree.lock_read() |
507 |
try: |
|
508 |
self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines, |
|
6405.2.7
by Jelmer Vernooij
Fix more tests. |
509 |
revision_tree.get_root_id()) |
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
510 |
finally: |
511 |
revision_tree.unlock() |
|
1910.2.13
by Aaron Bentley
Start work on converter |
512 |
format = bzrdir.BzrDirMetaFormat1() |
2255.2.211
by Robert Collins
Remove knit2 repository format- it has never been supported. |
513 |
format.repository_format = knitrepo.RepositoryFormatKnit3() |
1910.2.13
by Aaron Bentley
Start work on converter |
514 |
upgrade.Convert('.', format) |
1910.2.27
by Aaron Bentley
Fixed conversion test |
515 |
tree = workingtree.WorkingTree.open('.') |
1910.2.13
by Aaron Bentley
Start work on converter |
516 |
revision_tree = tree.branch.repository.revision_tree('dull') |
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
517 |
revision_tree.lock_read() |
518 |
try: |
|
6405.2.7
by Jelmer Vernooij
Fix more tests. |
519 |
revision_tree.get_file_lines(revision_tree.get_root_id()) |
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
520 |
finally: |
521 |
revision_tree.unlock() |
|
1910.2.27
by Aaron Bentley
Fixed conversion test |
522 |
tree.commit("Another dull commit", rev_id='dull2') |
523 |
revision_tree = tree.branch.repository.revision_tree('dull2') |
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
524 |
revision_tree.lock_read() |
525 |
self.addCleanup(revision_tree.unlock) |
|
6405.2.7
by Jelmer Vernooij
Fix more tests. |
526 |
self.assertEqual('dull', |
527 |
revision_tree.get_file_revision(revision_tree.get_root_id())) |
|
2220.2.2
by Martin Pool
Add tag command and basic implementation |
528 |
|
3221.3.1
by Robert Collins
* Repository formats have a new supported-feature attribute |
529 |
def test_supports_external_lookups(self): |
530 |
format = bzrdir.BzrDirMetaFormat1() |
|
531 |
format.repository_format = knitrepo.RepositoryFormatKnit3() |
|
532 |
repo = self.make_repository('.', format=format) |
|
533 |
self.assertFalse(repo._format.supports_external_lookups) |
|
534 |
||
2535.3.57
by Andrew Bennetts
Perform some sanity checking of data streams rather than blindly inserting them into our repository. |
535 |
|
4667.1.1
by John Arbash Meinel
Drop the Test2a test times from 5+s down to 1.4s |
536 |
class Test2a(tests.TestCaseWithMemoryTransport): |
4431.3.7
by Jonathan Lange
Cherrypick bzr.dev 4470, resolving conflicts. |
537 |
|
5365.5.20
by John Arbash Meinel
Add some tests that check the leaf factory is correct. |
538 |
def test_chk_bytes_uses_custom_btree_parser(self): |
539 |
mt = self.make_branch_and_memory_tree('test', format='2a') |
|
540 |
mt.lock_write() |
|
541 |
self.addCleanup(mt.unlock) |
|
542 |
mt.add([''], ['root-id']) |
|
543 |
mt.commit('first') |
|
544 |
index = mt.branch.repository.chk_bytes._index._graph_index._indices[0] |
|
545 |
self.assertEqual(btree_index._gcchk_factory, index._leaf_factory) |
|
546 |
# It should also work if we re-open the repo
|
|
547 |
repo = mt.branch.repository.bzrdir.open_repository() |
|
548 |
repo.lock_read() |
|
549 |
self.addCleanup(repo.unlock) |
|
550 |
index = repo.chk_bytes._index._graph_index._indices[0] |
|
551 |
self.assertEqual(btree_index._gcchk_factory, index._leaf_factory) |
|
552 |
||
4634.20.1
by Robert Collins
Fix bug 402652 by recompressing all texts that are streamed - slightly slower at fetch, substantially faster and more compact at read. |
553 |
def test_fetch_combines_groups(self): |
554 |
builder = self.make_branch_builder('source', format='2a') |
|
555 |
builder.start_series() |
|
556 |
builder.build_snapshot('1', None, [ |
|
557 |
('add', ('', 'root-id', 'directory', '')), |
|
558 |
('add', ('file', 'file-id', 'file', 'content\n'))]) |
|
559 |
builder.build_snapshot('2', ['1'], [ |
|
560 |
('modify', ('file-id', 'content-2\n'))]) |
|
561 |
builder.finish_series() |
|
562 |
source = builder.get_branch() |
|
563 |
target = self.make_repository('target', format='2a') |
|
564 |
target.fetch(source.repository) |
|
565 |
target.lock_read() |
|
4665.3.2
by John Arbash Meinel
An alternative implementation that passes both tests. |
566 |
self.addCleanup(target.unlock) |
4634.20.1
by Robert Collins
Fix bug 402652 by recompressing all texts that are streamed - slightly slower at fetch, substantially faster and more compact at read. |
567 |
details = target.texts._index.get_build_details( |
568 |
[('file-id', '1',), ('file-id', '2',)]) |
|
569 |
file_1_details = details[('file-id', '1')] |
|
570 |
file_2_details = details[('file-id', '2')] |
|
571 |
# The index, and what to read off disk, should be the same for both
|
|
572 |
# versions of the file.
|
|
573 |
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3]) |
|
574 |
||
4634.23.1
by Robert Collins
Cherrypick from bzr.dev: Fix bug 402652: recompress badly packed groups during fetch. (John Arbash Meinel, Robert Collins) |
575 |
def test_fetch_combines_groups(self): |
576 |
builder = self.make_branch_builder('source', format='2a') |
|
577 |
builder.start_series() |
|
578 |
builder.build_snapshot('1', None, [ |
|
579 |
('add', ('', 'root-id', 'directory', '')), |
|
580 |
('add', ('file', 'file-id', 'file', 'content\n'))]) |
|
581 |
builder.build_snapshot('2', ['1'], [ |
|
582 |
('modify', ('file-id', 'content-2\n'))]) |
|
583 |
builder.finish_series() |
|
584 |
source = builder.get_branch() |
|
585 |
target = self.make_repository('target', format='2a') |
|
586 |
target.fetch(source.repository) |
|
587 |
target.lock_read() |
|
588 |
self.addCleanup(target.unlock) |
|
589 |
details = target.texts._index.get_build_details( |
|
590 |
[('file-id', '1',), ('file-id', '2',)]) |
|
591 |
file_1_details = details[('file-id', '1')] |
|
592 |
file_2_details = details[('file-id', '2')] |
|
593 |
# The index, and what to read off disk, should be the same for both
|
|
594 |
# versions of the file.
|
|
595 |
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3]) |
|
596 |
||
597 |
def test_fetch_combines_groups(self): |
|
598 |
builder = self.make_branch_builder('source', format='2a') |
|
599 |
builder.start_series() |
|
600 |
builder.build_snapshot('1', None, [ |
|
601 |
('add', ('', 'root-id', 'directory', '')), |
|
602 |
('add', ('file', 'file-id', 'file', 'content\n'))]) |
|
603 |
builder.build_snapshot('2', ['1'], [ |
|
604 |
('modify', ('file-id', 'content-2\n'))]) |
|
605 |
builder.finish_series() |
|
606 |
source = builder.get_branch() |
|
607 |
target = self.make_repository('target', format='2a') |
|
608 |
target.fetch(source.repository) |
|
609 |
target.lock_read() |
|
610 |
self.addCleanup(target.unlock) |
|
611 |
details = target.texts._index.get_build_details( |
|
612 |
[('file-id', '1',), ('file-id', '2',)]) |
|
613 |
file_1_details = details[('file-id', '1')] |
|
614 |
file_2_details = details[('file-id', '2')] |
|
615 |
# The index, and what to read off disk, should be the same for both
|
|
616 |
# versions of the file.
|
|
617 |
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3]) |
|
618 |
||
4431.3.7
by Jonathan Lange
Cherrypick bzr.dev 4470, resolving conflicts. |
619 |
def test_format_pack_compresses_True(self): |
620 |
repo = self.make_repository('repo', format='2a') |
|
621 |
self.assertTrue(repo._format.pack_compresses) |
|
3735.2.40
by Robert Collins
Add development4 which has a parent_id to basename index on CHKInventory objects. |
622 |
|
623 |
def test_inventories_use_chk_map_with_parent_base_dict(self): |
|
4667.1.1
by John Arbash Meinel
Drop the Test2a test times from 5+s down to 1.4s |
624 |
tree = self.make_branch_and_memory_tree('repo', format="2a") |
625 |
tree.lock_write() |
|
626 |
tree.add([''], ['TREE_ROOT']) |
|
3735.2.40
by Robert Collins
Add development4 which has a parent_id to basename index on CHKInventory objects. |
627 |
revid = tree.commit("foo") |
4667.1.1
by John Arbash Meinel
Drop the Test2a test times from 5+s down to 1.4s |
628 |
tree.unlock() |
3735.2.40
by Robert Collins
Add development4 which has a parent_id to basename index on CHKInventory objects. |
629 |
tree.lock_read() |
630 |
self.addCleanup(tree.unlock) |
|
631 |
inv = tree.branch.repository.get_inventory(revid) |
|
3735.2.41
by Robert Collins
Make the parent_id_basename index be updated during CHKInventory.apply_delta. |
632 |
self.assertNotEqual(None, inv.parent_id_basename_to_file_id) |
633 |
inv.parent_id_basename_to_file_id._ensure_root() |
|
3735.2.40
by Robert Collins
Add development4 which has a parent_id to basename index on CHKInventory objects. |
634 |
inv.id_to_entry._ensure_root() |
4241.6.8
by Robert Collins, John Arbash Meinel, Ian Clatworthy, Vincent Ladeuil
Add --development6-rich-root, disabling the legacy and unneeded development2 format, and activating the tests for CHK features disabled pending this format. (Robert Collins, John Arbash Meinel, Ian Clatworthy, Vincent Ladeuil) |
635 |
self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size) |
636 |
self.assertEqual(65536, |
|
3735.2.41
by Robert Collins
Make the parent_id_basename index be updated during CHKInventory.apply_delta. |
637 |
inv.parent_id_basename_to_file_id._root_node.maximum_size) |
3735.2.40
by Robert Collins
Add development4 which has a parent_id to basename index on CHKInventory objects. |
638 |
|
4431.3.7
by Jonathan Lange
Cherrypick bzr.dev 4470, resolving conflicts. |
639 |
def test_autopack_unchanged_chk_nodes(self): |
640 |
# at 20 unchanged commits, chk pages are packed that are split into
|
|
641 |
# two groups such that the new pack being made doesn't have all its
|
|
642 |
# pages in the source packs (though they are in the repository).
|
|
4667.1.1
by John Arbash Meinel
Drop the Test2a test times from 5+s down to 1.4s |
643 |
# Use a memory backed repository, we don't need to hit disk for this
|
644 |
tree = self.make_branch_and_memory_tree('tree', format='2a') |
|
645 |
tree.lock_write() |
|
646 |
self.addCleanup(tree.unlock) |
|
647 |
tree.add([''], ['TREE_ROOT']) |
|
4431.3.7
by Jonathan Lange
Cherrypick bzr.dev 4470, resolving conflicts. |
648 |
for pos in range(20): |
649 |
tree.commit(str(pos)) |
|
650 |
||
651 |
def test_pack_with_hint(self): |
|
4667.1.1
by John Arbash Meinel
Drop the Test2a test times from 5+s down to 1.4s |
652 |
tree = self.make_branch_and_memory_tree('tree', format='2a') |
653 |
tree.lock_write() |
|
654 |
self.addCleanup(tree.unlock) |
|
655 |
tree.add([''], ['TREE_ROOT']) |
|
4431.3.7
by Jonathan Lange
Cherrypick bzr.dev 4470, resolving conflicts. |
656 |
# 1 commit to leave untouched
|
657 |
tree.commit('1') |
|
658 |
to_keep = tree.branch.repository._pack_collection.names() |
|
659 |
# 2 to combine
|
|
660 |
tree.commit('2') |
|
661 |
tree.commit('3') |
|
662 |
all = tree.branch.repository._pack_collection.names() |
|
663 |
combine = list(set(all) - set(to_keep)) |
|
664 |
self.assertLength(3, all) |
|
665 |
self.assertLength(2, combine) |
|
666 |
tree.branch.repository.pack(hint=combine) |
|
667 |
final = tree.branch.repository._pack_collection.names() |
|
668 |
self.assertLength(2, final) |
|
669 |
self.assertFalse(combine[0] in final) |
|
670 |
self.assertFalse(combine[1] in final) |
|
671 |
self.assertSubset(to_keep, final) |
|
672 |
||
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
673 |
def test_stream_source_to_gc(self): |
4462.2.1
by Robert Collins
Add new attribute to RepositoryFormat pack_compresses, hinting when pack can be useful. |
674 |
source = self.make_repository('source', format='2a') |
675 |
target = self.make_repository('target', format='2a') |
|
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
676 |
stream = source._get_source(target._format) |
677 |
self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource) |
|
678 |
||
679 |
def test_stream_source_to_non_gc(self): |
|
4462.2.1
by Robert Collins
Add new attribute to RepositoryFormat pack_compresses, hinting when pack can be useful. |
680 |
source = self.make_repository('source', format='2a') |
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
681 |
target = self.make_repository('target', format='rich-root-pack') |
682 |
stream = source._get_source(target._format) |
|
683 |
# We don't want the child GroupCHKStreamSource
|
|
5815.4.15
by Jelmer Vernooij
Fix some imports. |
684 |
self.assertIs(type(stream), vf_repository.StreamSource) |
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
685 |
|
4360.4.9
by John Arbash Meinel
Merge bzr.dev, bringing in the gc stacking fixes. |
686 |
def test_get_stream_for_missing_keys_includes_all_chk_refs(self): |
687 |
source_builder = self.make_branch_builder('source', |
|
4462.2.1
by Robert Collins
Add new attribute to RepositoryFormat pack_compresses, hinting when pack can be useful. |
688 |
format='2a') |
4360.4.9
by John Arbash Meinel
Merge bzr.dev, bringing in the gc stacking fixes. |
689 |
# We have to build a fairly large tree, so that we are sure the chk
|
690 |
# pages will have split into multiple pages.
|
|
691 |
entries = [('add', ('', 'a-root-id', 'directory', None))] |
|
692 |
for i in 'abcdefghijklmnopqrstuvwxyz123456789': |
|
693 |
for j in 'abcdefghijklmnopqrstuvwxyz123456789': |
|
694 |
fname = i + j |
|
695 |
fid = fname + '-id' |
|
696 |
content = 'content for %s\n' % (fname,) |
|
697 |
entries.append(('add', (fname, fid, 'file', content))) |
|
698 |
source_builder.start_series() |
|
699 |
source_builder.build_snapshot('rev-1', None, entries) |
|
700 |
# Now change a few of them, so we get a few new pages for the second
|
|
701 |
# revision
|
|
702 |
source_builder.build_snapshot('rev-2', ['rev-1'], [ |
|
703 |
('modify', ('aa-id', 'new content for aa-id\n')), |
|
704 |
('modify', ('cc-id', 'new content for cc-id\n')), |
|
705 |
('modify', ('zz-id', 'new content for zz-id\n')), |
|
706 |
])
|
|
707 |
source_builder.finish_series() |
|
708 |
source_branch = source_builder.get_branch() |
|
709 |
source_branch.lock_read() |
|
710 |
self.addCleanup(source_branch.unlock) |
|
4462.2.1
by Robert Collins
Add new attribute to RepositoryFormat pack_compresses, hinting when pack can be useful. |
711 |
target = self.make_repository('target', format='2a') |
4360.4.9
by John Arbash Meinel
Merge bzr.dev, bringing in the gc stacking fixes. |
712 |
source = source_branch.repository._get_source(target._format) |
713 |
self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource) |
|
714 |
||
715 |
# On a regular pass, getting the inventories and chk pages for rev-2
|
|
716 |
# would only get the newly created chk pages
|
|
6341.1.4
by Jelmer Vernooij
Move more functionality to vf_search. |
717 |
search = vf_search.SearchResult(set(['rev-2']), set(['rev-1']), 1, |
4360.4.9
by John Arbash Meinel
Merge bzr.dev, bringing in the gc stacking fixes. |
718 |
set(['rev-2'])) |
719 |
simple_chk_records = [] |
|
720 |
for vf_name, substream in source.get_stream(search): |
|
721 |
if vf_name == 'chk_bytes': |
|
722 |
for record in substream: |
|
723 |
simple_chk_records.append(record.key) |
|
724 |
else: |
|
725 |
for _ in substream: |
|
726 |
continue
|
|
727 |
# 3 pages, the root (InternalNode), + 2 pages which actually changed
|
|
728 |
self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',), |
|
729 |
('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',), |
|
730 |
('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',), |
|
731 |
('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)], |
|
732 |
simple_chk_records) |
|
733 |
# Now, when we do a similar call using 'get_stream_for_missing_keys'
|
|
734 |
# we should get a much larger set of pages.
|
|
735 |
missing = [('inventories', 'rev-2')] |
|
736 |
full_chk_records = [] |
|
737 |
for vf_name, substream in source.get_stream_for_missing_keys(missing): |
|
738 |
if vf_name == 'inventories': |
|
739 |
for record in substream: |
|
740 |
self.assertEqual(('rev-2',), record.key) |
|
741 |
elif vf_name == 'chk_bytes': |
|
742 |
for record in substream: |
|
743 |
full_chk_records.append(record.key) |
|
744 |
else: |
|
745 |
self.fail('Should not be getting a stream of %s' % (vf_name,)) |
|
746 |
# We have 257 records now. This is because we have 1 root page, and 256
|
|
747 |
# leaf pages in a complete listing.
|
|
748 |
self.assertEqual(257, len(full_chk_records)) |
|
749 |
self.assertSubset(simple_chk_records, full_chk_records) |
|
750 |
||
4465.2.7
by Aaron Bentley
Move test_inconsistency_fatal to test_repository |
751 |
def test_inconsistency_fatal(self): |
752 |
repo = self.make_repository('repo', format='2a') |
|
753 |
self.assertTrue(repo.revisions._index._inconsistency_fatal) |
|
754 |
self.assertFalse(repo.texts._index._inconsistency_fatal) |
|
755 |
self.assertFalse(repo.inventories._index._inconsistency_fatal) |
|
756 |
self.assertFalse(repo.signatures._index._inconsistency_fatal) |
|
757 |
self.assertFalse(repo.chk_bytes._index._inconsistency_fatal) |
|
758 |
||
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
759 |
|
760 |
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport): |
|
761 |
||
762 |
def test_source_to_exact_pack_092(self): |
|
763 |
source = self.make_repository('source', format='pack-0.92') |
|
764 |
target = self.make_repository('target', format='pack-0.92') |
|
765 |
stream_source = source._get_source(target._format) |
|
5757.2.2
by Jelmer Vernooij
Fix imports. |
766 |
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource) |
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
767 |
|
768 |
def test_source_to_exact_pack_rich_root_pack(self): |
|
769 |
source = self.make_repository('source', format='rich-root-pack') |
|
770 |
target = self.make_repository('target', format='rich-root-pack') |
|
771 |
stream_source = source._get_source(target._format) |
|
5757.2.2
by Jelmer Vernooij
Fix imports. |
772 |
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource) |
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
773 |
|
774 |
def test_source_to_exact_pack_19(self): |
|
775 |
source = self.make_repository('source', format='1.9') |
|
776 |
target = self.make_repository('target', format='1.9') |
|
777 |
stream_source = source._get_source(target._format) |
|
5757.2.2
by Jelmer Vernooij
Fix imports. |
778 |
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource) |
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
779 |
|
780 |
def test_source_to_exact_pack_19_rich_root(self): |
|
781 |
source = self.make_repository('source', format='1.9-rich-root') |
|
782 |
target = self.make_repository('target', format='1.9-rich-root') |
|
783 |
stream_source = source._get_source(target._format) |
|
5757.2.2
by Jelmer Vernooij
Fix imports. |
784 |
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource) |
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
785 |
|
786 |
def test_source_to_remote_exact_pack_19(self): |
|
787 |
trans = self.make_smart_server('target') |
|
788 |
trans.ensure_base() |
|
789 |
source = self.make_repository('source', format='1.9') |
|
790 |
target = self.make_repository('target', format='1.9') |
|
791 |
target = repository.Repository.open(trans.base) |
|
792 |
stream_source = source._get_source(target._format) |
|
5757.2.2
by Jelmer Vernooij
Fix imports. |
793 |
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource) |
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
794 |
|
795 |
def test_stream_source_to_non_exact(self): |
|
796 |
source = self.make_repository('source', format='pack-0.92') |
|
797 |
target = self.make_repository('target', format='1.9') |
|
798 |
stream = source._get_source(target._format) |
|
5815.4.15
by Jelmer Vernooij
Fix some imports. |
799 |
self.assertIs(type(stream), vf_repository.StreamSource) |
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
800 |
|
801 |
def test_stream_source_to_non_exact_rich_root(self): |
|
802 |
source = self.make_repository('source', format='1.9') |
|
803 |
target = self.make_repository('target', format='1.9-rich-root') |
|
804 |
stream = source._get_source(target._format) |
|
5815.4.15
by Jelmer Vernooij
Fix some imports. |
805 |
self.assertIs(type(stream), vf_repository.StreamSource) |
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
806 |
|
807 |
def test_source_to_remote_non_exact_pack_19(self): |
|
808 |
trans = self.make_smart_server('target') |
|
809 |
trans.ensure_base() |
|
810 |
source = self.make_repository('source', format='1.9') |
|
811 |
target = self.make_repository('target', format='1.6') |
|
812 |
target = repository.Repository.open(trans.base) |
|
813 |
stream_source = source._get_source(target._format) |
|
5815.4.15
by Jelmer Vernooij
Fix some imports. |
814 |
self.assertIs(type(stream_source), vf_repository.StreamSource) |
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
815 |
|
816 |
def test_stream_source_to_knit(self): |
|
817 |
source = self.make_repository('source', format='pack-0.92') |
|
818 |
target = self.make_repository('target', format='dirstate') |
|
819 |
stream = source._get_source(target._format) |
|
5815.4.15
by Jelmer Vernooij
Fix some imports. |
820 |
self.assertIs(type(stream), vf_repository.StreamSource) |
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
821 |
|
3735.2.40
by Robert Collins
Add development4 which has a parent_id to basename index on CHKInventory objects. |
822 |
|
4343.3.32
by John Arbash Meinel
Change the tests for _find_revision_outside_set to the new _find_parent_ids function. |
823 |
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport): |
824 |
"""Tests for _find_parent_ids_of_revisions."""
|
|
3735.4.1
by Andrew Bennetts
Add _find_revision_outside_set. |
825 |
|
826 |
def setUp(self): |
|
4343.3.32
by John Arbash Meinel
Change the tests for _find_revision_outside_set to the new _find_parent_ids function. |
827 |
super(TestDevelopment6FindParentIdsOfRevisions, self).setUp() |
5546.1.1
by Andrew Bennetts
Remove RepositoryFormatCHK1 and RepositoryFormatCHK2. |
828 |
self.builder = self.make_branch_builder('source') |
3735.4.1
by Andrew Bennetts
Add _find_revision_outside_set. |
829 |
self.builder.start_series() |
830 |
self.builder.build_snapshot('initial', None, |
|
831 |
[('add', ('', 'tree-root', 'directory', None))]) |
|
832 |
self.repo = self.builder.get_branch().repository |
|
833 |
self.addCleanup(self.builder.finish_series) |
|
3735.2.99
by John Arbash Meinel
Merge bzr.dev 4034. Whitespace cleanup |
834 |
|
4343.3.32
by John Arbash Meinel
Change the tests for _find_revision_outside_set to the new _find_parent_ids function. |
835 |
def assertParentIds(self, expected_result, rev_set): |
836 |
self.assertEqual(sorted(expected_result), |
|
837 |
sorted(self.repo._find_parent_ids_of_revisions(rev_set))) |
|
3735.4.1
by Andrew Bennetts
Add _find_revision_outside_set. |
838 |
|
839 |
def test_simple(self): |
|
840 |
self.builder.build_snapshot('revid1', None, []) |
|
4343.3.32
by John Arbash Meinel
Change the tests for _find_revision_outside_set to the new _find_parent_ids function. |
841 |
self.builder.build_snapshot('revid2', ['revid1'], []) |
3735.4.1
by Andrew Bennetts
Add _find_revision_outside_set. |
842 |
rev_set = ['revid2'] |
4343.3.32
by John Arbash Meinel
Change the tests for _find_revision_outside_set to the new _find_parent_ids function. |
843 |
self.assertParentIds(['revid1'], rev_set) |
3735.4.1
by Andrew Bennetts
Add _find_revision_outside_set. |
844 |
|
845 |
def test_not_first_parent(self): |
|
846 |
self.builder.build_snapshot('revid1', None, []) |
|
4343.3.32
by John Arbash Meinel
Change the tests for _find_revision_outside_set to the new _find_parent_ids function. |
847 |
self.builder.build_snapshot('revid2', ['revid1'], []) |
848 |
self.builder.build_snapshot('revid3', ['revid2'], []) |
|
3735.4.1
by Andrew Bennetts
Add _find_revision_outside_set. |
849 |
rev_set = ['revid3', 'revid2'] |
4343.3.32
by John Arbash Meinel
Change the tests for _find_revision_outside_set to the new _find_parent_ids function. |
850 |
self.assertParentIds(['revid1'], rev_set) |
3735.4.1
by Andrew Bennetts
Add _find_revision_outside_set. |
851 |
|
852 |
def test_not_null(self): |
|
853 |
rev_set = ['initial'] |
|
4343.3.32
by John Arbash Meinel
Change the tests for _find_revision_outside_set to the new _find_parent_ids function. |
854 |
self.assertParentIds([], rev_set) |
3735.4.1
by Andrew Bennetts
Add _find_revision_outside_set. |
855 |
|
856 |
def test_not_null_set(self): |
|
857 |
self.builder.build_snapshot('revid1', None, []) |
|
858 |
rev_set = [_mod_revision.NULL_REVISION] |
|
4343.3.32
by John Arbash Meinel
Change the tests for _find_revision_outside_set to the new _find_parent_ids function. |
859 |
self.assertParentIds([], rev_set) |
3735.4.1
by Andrew Bennetts
Add _find_revision_outside_set. |
860 |
|
861 |
def test_ghost(self): |
|
862 |
self.builder.build_snapshot('revid1', None, []) |
|
863 |
rev_set = ['ghost', 'revid1'] |
|
4343.3.32
by John Arbash Meinel
Change the tests for _find_revision_outside_set to the new _find_parent_ids function. |
864 |
self.assertParentIds(['initial'], rev_set) |
3735.4.1
by Andrew Bennetts
Add _find_revision_outside_set. |
865 |
|
866 |
def test_ghost_parent(self): |
|
867 |
self.builder.build_snapshot('revid1', None, []) |
|
868 |
self.builder.build_snapshot('revid2', ['revid1', 'ghost'], []) |
|
869 |
rev_set = ['revid2', 'revid1'] |
|
4343.3.32
by John Arbash Meinel
Change the tests for _find_revision_outside_set to the new _find_parent_ids function. |
870 |
self.assertParentIds(['ghost', 'initial'], rev_set) |
3735.4.1
by Andrew Bennetts
Add _find_revision_outside_set. |
871 |
|
872 |
def test_righthand_parent(self): |
|
873 |
self.builder.build_snapshot('revid1', None, []) |
|
874 |
self.builder.build_snapshot('revid2a', ['revid1'], []) |
|
875 |
self.builder.build_snapshot('revid2b', ['revid1'], []) |
|
876 |
self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], []) |
|
877 |
rev_set = ['revid3', 'revid2a'] |
|
4343.3.32
by John Arbash Meinel
Change the tests for _find_revision_outside_set to the new _find_parent_ids function. |
878 |
self.assertParentIds(['revid1', 'revid2b'], rev_set) |
3735.4.1
by Andrew Bennetts
Add _find_revision_outside_set. |
879 |
|
880 |
||
2535.3.57
by Andrew Bennetts
Perform some sanity checking of data streams rather than blindly inserting them into our repository. |
881 |
class TestWithBrokenRepo(TestCaseWithTransport): |
2592.3.214
by Robert Collins
Merge bzr.dev. |
882 |
"""These tests seem to be more appropriate as interface tests?"""
|
2535.3.57
by Andrew Bennetts
Perform some sanity checking of data streams rather than blindly inserting them into our repository. |
883 |
|
884 |
def make_broken_repository(self): |
|
885 |
# XXX: This function is borrowed from Aaron's "Reconcile can fix bad
|
|
886 |
# parent references" branch which is due to land in bzr.dev soon. Once
|
|
887 |
# it does, this duplication should be removed.
|
|
888 |
repo = self.make_repository('broken-repo') |
|
889 |
cleanups = [] |
|
890 |
try: |
|
891 |
repo.lock_write() |
|
892 |
cleanups.append(repo.unlock) |
|
893 |
repo.start_write_group() |
|
894 |
cleanups.append(repo.commit_write_group) |
|
895 |
# make rev1a: A well-formed revision, containing 'file1'
|
|
896 |
inv = inventory.Inventory(revision_id='rev1a') |
|
897 |
inv.root.revision = 'rev1a' |
|
898 |
self.add_file(repo, inv, 'file1', 'rev1a', []) |
|
4634.35.21
by Andrew Bennetts
Fix test_insert_from_broken_repo in test_repository. |
899 |
repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], []) |
2535.3.57
by Andrew Bennetts
Perform some sanity checking of data streams rather than blindly inserting them into our repository. |
900 |
repo.add_inventory('rev1a', inv, []) |
901 |
revision = _mod_revision.Revision('rev1a', |
|
902 |
committer='jrandom@example.com', timestamp=0, |
|
903 |
inventory_sha1='', timezone=0, message='foo', parent_ids=[]) |
|
6351.3.2
by Jelmer Vernooij
Convert some gpg options to config stacks. |
904 |
repo.add_revision('rev1a', revision, inv) |
2535.3.57
by Andrew Bennetts
Perform some sanity checking of data streams rather than blindly inserting them into our repository. |
905 |
|
906 |
# make rev1b, which has no Revision, but has an Inventory, and
|
|
907 |
# file1
|
|
908 |
inv = inventory.Inventory(revision_id='rev1b') |
|
909 |
inv.root.revision = 'rev1b' |
|
910 |
self.add_file(repo, inv, 'file1', 'rev1b', []) |
|
911 |
repo.add_inventory('rev1b', inv, []) |
|
912 |
||
913 |
# make rev2, with file1 and file2
|
|
914 |
# file2 is sane
|
|
915 |
# file1 has 'rev1b' as an ancestor, even though this is not
|
|
916 |
# mentioned by 'rev1a', making it an unreferenced ancestor
|
|
917 |
inv = inventory.Inventory() |
|
918 |
self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b']) |
|
919 |
self.add_file(repo, inv, 'file2', 'rev2', []) |
|
920 |
self.add_revision(repo, 'rev2', inv, ['rev1a']) |
|
921 |
||
922 |
# make ghost revision rev1c
|
|
923 |
inv = inventory.Inventory() |
|
924 |
self.add_file(repo, inv, 'file2', 'rev1c', []) |
|
925 |
||
926 |
# make rev3 with file2
|
|
927 |
# file2 refers to 'rev1c', which is a ghost in this repository, so
|
|
928 |
# file2 cannot have rev1c as its ancestor.
|
|
929 |
inv = inventory.Inventory() |
|
930 |
self.add_file(repo, inv, 'file2', 'rev3', ['rev1c']) |
|
931 |
self.add_revision(repo, 'rev3', inv, ['rev1c']) |
|
932 |
return repo |
|
933 |
finally: |
|
934 |
for cleanup in reversed(cleanups): |
|
935 |
cleanup() |
|
936 |
||
937 |
def add_revision(self, repo, revision_id, inv, parent_ids): |
|
938 |
inv.revision_id = revision_id |
|
939 |
inv.root.revision = revision_id |
|
4634.35.21
by Andrew Bennetts
Fix test_insert_from_broken_repo in test_repository. |
940 |
repo.texts.add_lines((inv.root.file_id, revision_id), [], []) |
2535.3.57
by Andrew Bennetts
Perform some sanity checking of data streams rather than blindly inserting them into our repository. |
941 |
repo.add_inventory(revision_id, inv, parent_ids) |
942 |
revision = _mod_revision.Revision(revision_id, |
|
943 |
committer='jrandom@example.com', timestamp=0, inventory_sha1='', |
|
944 |
timezone=0, message='foo', parent_ids=parent_ids) |
|
6351.3.2
by Jelmer Vernooij
Convert some gpg options to config stacks. |
945 |
repo.add_revision(revision_id, revision, inv) |
2535.3.57
by Andrew Bennetts
Perform some sanity checking of data streams rather than blindly inserting them into our repository. |
946 |
|
947 |
def add_file(self, repo, inv, filename, revision, parents): |
|
948 |
file_id = filename + '-id' |
|
949 |
entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT') |
|
950 |
entry.revision = revision |
|
2535.4.10
by Andrew Bennetts
Fix one failing test, disable another. |
951 |
entry.text_size = 0 |
2535.3.57
by Andrew Bennetts
Perform some sanity checking of data streams rather than blindly inserting them into our repository. |
952 |
inv.add(entry) |
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
953 |
text_key = (file_id, revision) |
954 |
parent_keys = [(file_id, parent) for parent in parents] |
|
955 |
repo.texts.add_lines(text_key, parent_keys, ['line\n']) |
|
2535.3.57
by Andrew Bennetts
Perform some sanity checking of data streams rather than blindly inserting them into our repository. |
956 |
|
957 |
def test_insert_from_broken_repo(self): |
|
958 |
"""Inserting a data stream from a broken repository won't silently
|
|
959 |
corrupt the target repository.
|
|
960 |
"""
|
|
961 |
broken_repo = self.make_broken_repository() |
|
962 |
empty_repo = self.make_repository('empty-repo') |
|
4606.1.1
by Robert Collins
Change test_insert_from_broken_repo from a known failure to a working test. |
963 |
try: |
964 |
empty_repo.fetch(broken_repo) |
|
965 |
except (errors.RevisionNotPresent, errors.BzrCheckError): |
|
966 |
# Test successful: compression parent not being copied leads to
|
|
967 |
# error.
|
|
968 |
return
|
|
969 |
empty_repo.lock_read() |
|
970 |
self.addCleanup(empty_repo.unlock) |
|
971 |
text = empty_repo.texts.get_record_stream( |
|
972 |
[('file2-id', 'rev3')], 'topological', True).next() |
|
973 |
self.assertEqual('line\n', text.get_bytes_as('fulltext')) |
|
2592.3.214
by Robert Collins
Merge bzr.dev. |
974 |
|
975 |
||
2592.3.84
by Robert Collins
Start of autopacking logic. |
976 |
class TestRepositoryPackCollection(TestCaseWithTransport): |
977 |
||
978 |
def get_format(self): |
|
6472.2.1
by Jelmer Vernooij
Use bzrdir.controldir for generic access to control directories. |
979 |
return controldir.format_registry.make_bzrdir('pack-0.92') |
2592.3.84
by Robert Collins
Start of autopacking logic. |
980 |
|
3711.4.1
by John Arbash Meinel
Fix bug #242510, when determining the autopack sequence, |
981 |
def get_packs(self): |
982 |
format = self.get_format() |
|
983 |
repo = self.make_repository('.', format=format) |
|
984 |
return repo._pack_collection |
|
985 |
||
3789.2.20
by John Arbash Meinel
The autopack code can now trigger itself to retry when _copy_revision_texts fails. |
986 |
def make_packs_and_alt_repo(self, write_lock=False): |
3789.2.19
by John Arbash Meinel
Refactor to make the tests a bit simpler |
987 |
"""Create a pack repo with 3 packs, and access it via a second repo."""
|
4617.4.1
by Robert Collins
Fix a pack specific test which didn't lock its format down. |
988 |
tree = self.make_branch_and_tree('.', format=self.get_format()) |
3789.2.19
by John Arbash Meinel
Refactor to make the tests a bit simpler |
989 |
tree.lock_write() |
990 |
self.addCleanup(tree.unlock) |
|
991 |
rev1 = tree.commit('one') |
|
992 |
rev2 = tree.commit('two') |
|
993 |
rev3 = tree.commit('three') |
|
994 |
r = repository.Repository.open('.') |
|
3789.2.20
by John Arbash Meinel
The autopack code can now trigger itself to retry when _copy_revision_texts fails. |
995 |
if write_lock: |
996 |
r.lock_write() |
|
997 |
else: |
|
998 |
r.lock_read() |
|
3789.2.19
by John Arbash Meinel
Refactor to make the tests a bit simpler |
999 |
self.addCleanup(r.unlock) |
1000 |
packs = r._pack_collection |
|
1001 |
packs.ensure_loaded() |
|
1002 |
return tree, r, packs, [rev1, rev2, rev3] |
|
1003 |
||
4634.127.1
by John Arbash Meinel
Partial fix for bug #507557. |
1004 |
def test__clear_obsolete_packs(self): |
1005 |
packs = self.get_packs() |
|
1006 |
obsolete_pack_trans = packs.transport.clone('obsolete_packs') |
|
1007 |
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n') |
|
1008 |
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n') |
|
1009 |
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n') |
|
1010 |
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n') |
|
1011 |
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n') |
|
1012 |
res = packs._clear_obsolete_packs() |
|
1013 |
self.assertEqual(['a-pack', 'another-pack'], sorted(res)) |
|
1014 |
self.assertEqual([], obsolete_pack_trans.list_dir('.')) |
|
1015 |
||
1016 |
def test__clear_obsolete_packs_preserve(self): |
|
1017 |
packs = self.get_packs() |
|
1018 |
obsolete_pack_trans = packs.transport.clone('obsolete_packs') |
|
1019 |
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n') |
|
1020 |
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n') |
|
1021 |
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n') |
|
1022 |
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n') |
|
1023 |
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n') |
|
1024 |
res = packs._clear_obsolete_packs(preserve=set(['a-pack'])) |
|
1025 |
self.assertEqual(['a-pack', 'another-pack'], sorted(res)) |
|
1026 |
self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'], |
|
1027 |
sorted(obsolete_pack_trans.list_dir('.'))) |
|
1028 |
||
2592.3.84
by Robert Collins
Start of autopacking logic. |
1029 |
def test__max_pack_count(self): |
2592.3.219
by Robert Collins
Review feedback. |
1030 |
"""The maximum pack count is a function of the number of revisions."""
|
2592.3.84
by Robert Collins
Start of autopacking logic. |
1031 |
# no revisions - one pack, so that we can have a revision free repo
|
1032 |
# without it blowing up
|
|
3711.4.1
by John Arbash Meinel
Fix bug #242510, when determining the autopack sequence, |
1033 |
packs = self.get_packs() |
2592.3.84
by Robert Collins
Start of autopacking logic. |
1034 |
self.assertEqual(1, packs._max_pack_count(0)) |
1035 |
# after that the sum of the digits, - check the first 1-9
|
|
1036 |
self.assertEqual(1, packs._max_pack_count(1)) |
|
1037 |
self.assertEqual(2, packs._max_pack_count(2)) |
|
1038 |
self.assertEqual(3, packs._max_pack_count(3)) |
|
1039 |
self.assertEqual(4, packs._max_pack_count(4)) |
|
1040 |
self.assertEqual(5, packs._max_pack_count(5)) |
|
1041 |
self.assertEqual(6, packs._max_pack_count(6)) |
|
1042 |
self.assertEqual(7, packs._max_pack_count(7)) |
|
1043 |
self.assertEqual(8, packs._max_pack_count(8)) |
|
1044 |
self.assertEqual(9, packs._max_pack_count(9)) |
|
1045 |
# check the boundary cases with two digits for the next decade
|
|
1046 |
self.assertEqual(1, packs._max_pack_count(10)) |
|
1047 |
self.assertEqual(2, packs._max_pack_count(11)) |
|
1048 |
self.assertEqual(10, packs._max_pack_count(19)) |
|
1049 |
self.assertEqual(2, packs._max_pack_count(20)) |
|
1050 |
self.assertEqual(3, packs._max_pack_count(21)) |
|
1051 |
# check some arbitrary big numbers
|
|
1052 |
self.assertEqual(25, packs._max_pack_count(112894)) |
|
1053 |
||
4928.1.1
by Martin Pool
Give RepositoryPackCollection a repr |
1054 |
def test_repr(self): |
1055 |
packs = self.get_packs() |
|
1056 |
self.assertContainsRe(repr(packs), |
|
1057 |
'RepositoryPackCollection(.*Repository(.*))') |
|
1058 |
||
4634.127.2
by John Arbash Meinel
Change the _obsolete_packs code to handle files that are already gone. |
1059 |
def test__obsolete_packs(self): |
1060 |
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True) |
|
1061 |
names = packs.names() |
|
1062 |
pack = packs.get_pack_by_name(names[0]) |
|
1063 |
# Schedule this one for removal
|
|
1064 |
packs._remove_pack_from_memory(pack) |
|
1065 |
# Simulate a concurrent update by renaming the .pack file and one of
|
|
1066 |
# the indices
|
|
1067 |
packs.transport.rename('packs/%s.pack' % (names[0],), |
|
1068 |
'obsolete_packs/%s.pack' % (names[0],)) |
|
1069 |
packs.transport.rename('indices/%s.iix' % (names[0],), |
|
1070 |
'obsolete_packs/%s.iix' % (names[0],)) |
|
1071 |
# Now trigger the obsoletion, and ensure that all the remaining files
|
|
1072 |
# are still renamed
|
|
1073 |
packs._obsolete_packs([pack]) |
|
1074 |
self.assertEqual([n + '.pack' for n in names[1:]], |
|
1075 |
sorted(packs._pack_transport.list_dir('.'))) |
|
1076 |
# names[0] should not be present in the index anymore
|
|
1077 |
self.assertEqual(names[1:], |
|
1078 |
sorted(set([osutils.splitext(n)[0] for n in |
|
1079 |
packs._index_transport.list_dir('.')]))) |
|
1080 |
||
6225.2.8
by Jelmer Vernooij
Fix creation of obsolete_packs if it is missing. |
1081 |
def test__obsolete_packs_missing_directory(self): |
1082 |
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True) |
|
1083 |
r.control_transport.rmdir('obsolete_packs') |
|
1084 |
names = packs.names() |
|
1085 |
pack = packs.get_pack_by_name(names[0]) |
|
1086 |
# Schedule this one for removal
|
|
1087 |
packs._remove_pack_from_memory(pack) |
|
1088 |
# Now trigger the obsoletion, and ensure that all the remaining files
|
|
1089 |
# are still renamed
|
|
1090 |
packs._obsolete_packs([pack]) |
|
1091 |
self.assertEqual([n + '.pack' for n in names[1:]], |
|
1092 |
sorted(packs._pack_transport.list_dir('.'))) |
|
1093 |
# names[0] should not be present in the index anymore
|
|
1094 |
self.assertEqual(names[1:], |
|
1095 |
sorted(set([osutils.splitext(n)[0] for n in |
|
1096 |
packs._index_transport.list_dir('.')]))) |
|
1097 |
||
2592.3.84
by Robert Collins
Start of autopacking logic. |
1098 |
def test_pack_distribution_zero(self): |
3711.4.1
by John Arbash Meinel
Fix bug #242510, when determining the autopack sequence, |
1099 |
packs = self.get_packs() |
2592.3.84
by Robert Collins
Start of autopacking logic. |
1100 |
self.assertEqual([0], packs.pack_distribution(0)) |
3052.1.6
by John Arbash Meinel
Change the lock check to raise ObjectNotLocked. |
1101 |
|
1102 |
def test_ensure_loaded_unlocked(self): |
|
3711.4.1
by John Arbash Meinel
Fix bug #242510, when determining the autopack sequence, |
1103 |
packs = self.get_packs() |
3052.1.6
by John Arbash Meinel
Change the lock check to raise ObjectNotLocked. |
1104 |
self.assertRaises(errors.ObjectNotLocked, |
3711.4.1
by John Arbash Meinel
Fix bug #242510, when determining the autopack sequence, |
1105 |
packs.ensure_loaded) |
3052.1.6
by John Arbash Meinel
Change the lock check to raise ObjectNotLocked. |
1106 |
|
2592.3.84
by Robert Collins
Start of autopacking logic. |
1107 |
def test_pack_distribution_one_to_nine(self): |
3711.4.1
by John Arbash Meinel
Fix bug #242510, when determining the autopack sequence, |
1108 |
packs = self.get_packs() |
2592.3.84
by Robert Collins
Start of autopacking logic. |
1109 |
self.assertEqual([1], |
1110 |
packs.pack_distribution(1)) |
|
1111 |
self.assertEqual([1, 1], |
|
1112 |
packs.pack_distribution(2)) |
|
1113 |
self.assertEqual([1, 1, 1], |
|
1114 |
packs.pack_distribution(3)) |
|
1115 |
self.assertEqual([1, 1, 1, 1], |
|
1116 |
packs.pack_distribution(4)) |
|
1117 |
self.assertEqual([1, 1, 1, 1, 1], |
|
1118 |
packs.pack_distribution(5)) |
|
1119 |
self.assertEqual([1, 1, 1, 1, 1, 1], |
|
1120 |
packs.pack_distribution(6)) |
|
1121 |
self.assertEqual([1, 1, 1, 1, 1, 1, 1], |
|
1122 |
packs.pack_distribution(7)) |
|
1123 |
self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1], |
|
1124 |
packs.pack_distribution(8)) |
|
1125 |
self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1], |
|
1126 |
packs.pack_distribution(9)) |
|
1127 |
||
1128 |
def test_pack_distribution_stable_at_boundaries(self): |
|
1129 |
"""When there are multi-rev packs the counts are stable."""
|
|
3711.4.1
by John Arbash Meinel
Fix bug #242510, when determining the autopack sequence, |
1130 |
packs = self.get_packs() |
2592.3.84
by Robert Collins
Start of autopacking logic. |
1131 |
# in 10s:
|
1132 |
self.assertEqual([10], packs.pack_distribution(10)) |
|
1133 |
self.assertEqual([10, 1], packs.pack_distribution(11)) |
|
1134 |
self.assertEqual([10, 10], packs.pack_distribution(20)) |
|
1135 |
self.assertEqual([10, 10, 1], packs.pack_distribution(21)) |
|
1136 |
# 100s
|
|
1137 |
self.assertEqual([100], packs.pack_distribution(100)) |
|
1138 |
self.assertEqual([100, 1], packs.pack_distribution(101)) |
|
1139 |
self.assertEqual([100, 10, 1], packs.pack_distribution(111)) |
|
1140 |
self.assertEqual([100, 100], packs.pack_distribution(200)) |
|
1141 |
self.assertEqual([100, 100, 1], packs.pack_distribution(201)) |
|
1142 |
self.assertEqual([100, 100, 10, 1], packs.pack_distribution(211)) |
|
1143 |
||
2592.3.85
by Robert Collins
Finish autopack corner cases. |
1144 |
def test_plan_pack_operations_2009_revisions_skip_all_packs(self): |
3711.4.1
by John Arbash Meinel
Fix bug #242510, when determining the autopack sequence, |
1145 |
packs = self.get_packs() |
2592.3.85
by Robert Collins
Finish autopack corner cases. |
1146 |
existing_packs = [(2000, "big"), (9, "medium")] |
1147 |
# rev count - 2009 -> 2x1000 + 9x1
|
|
1148 |
pack_operations = packs.plan_autopack_combinations( |
|
1149 |
existing_packs, [1000, 1000, 1, 1, 1, 1, 1, 1, 1, 1, 1]) |
|
1150 |
self.assertEqual([], pack_operations) |
|
1151 |
||
1152 |
def test_plan_pack_operations_2010_revisions_skip_all_packs(self): |
|
3711.4.1
by John Arbash Meinel
Fix bug #242510, when determining the autopack sequence, |
1153 |
packs = self.get_packs() |
2592.3.85
by Robert Collins
Finish autopack corner cases. |
1154 |
existing_packs = [(2000, "big"), (9, "medium"), (1, "single")] |
1155 |
# rev count - 2010 -> 2x1000 + 1x10
|
|
1156 |
pack_operations = packs.plan_autopack_combinations( |
|
1157 |
existing_packs, [1000, 1000, 10]) |
|
1158 |
self.assertEqual([], pack_operations) |
|
1159 |
||
1160 |
def test_plan_pack_operations_2010_combines_smallest_two(self): |
|
3711.4.1
by John Arbash Meinel
Fix bug #242510, when determining the autopack sequence, |
1161 |
packs = self.get_packs() |
2592.3.85
by Robert Collins
Finish autopack corner cases. |
1162 |
existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"), |
1163 |
(1, "single1")] |
|
1164 |
# rev count - 2010 -> 2x1000 + 1x10 (3)
|
|
1165 |
pack_operations = packs.plan_autopack_combinations( |
|
1166 |
existing_packs, [1000, 1000, 10]) |
|
3711.4.2
by John Arbash Meinel
Change the logic to solve it in a different way. |
1167 |
self.assertEqual([[2, ["single2", "single1"]]], pack_operations) |
2592.3.85
by Robert Collins
Finish autopack corner cases. |
1168 |
|
3711.4.2
by John Arbash Meinel
Change the logic to solve it in a different way. |
1169 |
def test_plan_pack_operations_creates_a_single_op(self): |
3711.4.1
by John Arbash Meinel
Fix bug #242510, when determining the autopack sequence, |
1170 |
packs = self.get_packs() |
3711.4.2
by John Arbash Meinel
Change the logic to solve it in a different way. |
1171 |
existing_packs = [(50, 'a'), (40, 'b'), (30, 'c'), (10, 'd'), |
1172 |
(10, 'e'), (6, 'f'), (4, 'g')] |
|
1173 |
# rev count 150 -> 1x100 and 5x10
|
|
1174 |
# The two size 10 packs do not need to be touched. The 50, 40, 30 would
|
|
1175 |
# be combined into a single 120 size pack, and the 6 & 4 would
|
|
1176 |
# becombined into a size 10 pack. However, if we have to rewrite them,
|
|
1177 |
# we save a pack file with no increased I/O by putting them into the
|
|
1178 |
# same file.
|
|
1179 |
distribution = packs.pack_distribution(150) |
|
3711.4.1
by John Arbash Meinel
Fix bug #242510, when determining the autopack sequence, |
1180 |
pack_operations = packs.plan_autopack_combinations(existing_packs, |
3711.4.2
by John Arbash Meinel
Change the logic to solve it in a different way. |
1181 |
distribution) |
1182 |
self.assertEqual([[130, ['a', 'b', 'c', 'f', 'g']]], pack_operations) |
|
3711.4.1
by John Arbash Meinel
Fix bug #242510, when determining the autopack sequence, |
1183 |
|
2592.3.173
by Robert Collins
Basic implementation of all_packs. |
1184 |
def test_all_packs_none(self): |
1185 |
format = self.get_format() |
|
1186 |
tree = self.make_branch_and_tree('.', format=format) |
|
1187 |
tree.lock_read() |
|
1188 |
self.addCleanup(tree.unlock) |
|
2592.3.232
by Martin Pool
Disambiguate two member variables called _packs into _packs_by_name and _pack_collection |
1189 |
packs = tree.branch.repository._pack_collection |
2592.3.173
by Robert Collins
Basic implementation of all_packs. |
1190 |
packs.ensure_loaded() |
1191 |
self.assertEqual([], packs.all_packs()) |
|
1192 |
||
1193 |
def test_all_packs_one(self): |
|
1194 |
format = self.get_format() |
|
1195 |
tree = self.make_branch_and_tree('.', format=format) |
|
1196 |
tree.commit('start') |
|
1197 |
tree.lock_read() |
|
1198 |
self.addCleanup(tree.unlock) |
|
2592.3.232
by Martin Pool
Disambiguate two member variables called _packs into _packs_by_name and _pack_collection |
1199 |
packs = tree.branch.repository._pack_collection |
2592.3.173
by Robert Collins
Basic implementation of all_packs. |
1200 |
packs.ensure_loaded() |
2592.3.176
by Robert Collins
Various pack refactorings. |
1201 |
self.assertEqual([ |
1202 |
packs.get_pack_by_name(packs.names()[0])], |
|
1203 |
packs.all_packs()) |
|
2592.3.173
by Robert Collins
Basic implementation of all_packs. |
1204 |
|
1205 |
def test_all_packs_two(self): |
|
1206 |
format = self.get_format() |
|
1207 |
tree = self.make_branch_and_tree('.', format=format) |
|
1208 |
tree.commit('start') |
|
1209 |
tree.commit('continue') |
|
1210 |
tree.lock_read() |
|
1211 |
self.addCleanup(tree.unlock) |
|
2592.3.232
by Martin Pool
Disambiguate two member variables called _packs into _packs_by_name and _pack_collection |
1212 |
packs = tree.branch.repository._pack_collection |
2592.3.173
by Robert Collins
Basic implementation of all_packs. |
1213 |
packs.ensure_loaded() |
1214 |
self.assertEqual([ |
|
2592.3.176
by Robert Collins
Various pack refactorings. |
1215 |
packs.get_pack_by_name(packs.names()[0]), |
1216 |
packs.get_pack_by_name(packs.names()[1]), |
|
2592.3.173
by Robert Collins
Basic implementation of all_packs. |
1217 |
], packs.all_packs()) |
1218 |
||
2592.3.176
by Robert Collins
Various pack refactorings. |
1219 |
def test_get_pack_by_name(self): |
1220 |
format = self.get_format() |
|
1221 |
tree = self.make_branch_and_tree('.', format=format) |
|
1222 |
tree.commit('start') |
|
1223 |
tree.lock_read() |
|
1224 |
self.addCleanup(tree.unlock) |
|
2592.3.232
by Martin Pool
Disambiguate two member variables called _packs into _packs_by_name and _pack_collection |
1225 |
packs = tree.branch.repository._pack_collection |
4145.1.6
by Robert Collins
More test fallout, but all caught now. |
1226 |
packs.reset() |
2592.3.176
by Robert Collins
Various pack refactorings. |
1227 |
packs.ensure_loaded() |
1228 |
name = packs.names()[0] |
|
1229 |
pack_1 = packs.get_pack_by_name(name) |
|
1230 |
# the pack should be correctly initialised
|
|
3517.4.5
by Martin Pool
Correct use of packs._names in test_get_pack_by_name |
1231 |
sizes = packs._names[name] |
3221.12.4
by Robert Collins
Implement basic repository supporting external references. |
1232 |
rev_index = GraphIndex(packs._index_transport, name + '.rix', sizes[0]) |
1233 |
inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1]) |
|
1234 |
txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2]) |
|
1235 |
sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3]) |
|
2592.3.191
by Robert Collins
Give Pack responsibility for index naming, and two concrete classes - NewPack for new packs and ExistingPack for packs we read from disk. |
1236 |
self.assertEqual(pack_repo.ExistingPack(packs._pack_transport, |
2592.3.219
by Robert Collins
Review feedback. |
1237 |
name, rev_index, inv_index, txt_index, sig_index), pack_1) |
2592.3.176
by Robert Collins
Various pack refactorings. |
1238 |
# and the same instance should be returned on successive calls.
|
1239 |
self.assertTrue(pack_1 is packs.get_pack_by_name(name)) |
|
1240 |
||
3789.1.2
by John Arbash Meinel
Add RepositoryPackCollection.reload_pack_names() |
1241 |
def test_reload_pack_names_new_entry(self): |
3789.2.19
by John Arbash Meinel
Refactor to make the tests a bit simpler |
1242 |
tree, r, packs, revs = self.make_packs_and_alt_repo() |
3789.1.2
by John Arbash Meinel
Add RepositoryPackCollection.reload_pack_names() |
1243 |
names = packs.names() |
1244 |
# Add a new pack file into the repository
|
|
3789.2.19
by John Arbash Meinel
Refactor to make the tests a bit simpler |
1245 |
rev4 = tree.commit('four') |
3789.1.2
by John Arbash Meinel
Add RepositoryPackCollection.reload_pack_names() |
1246 |
new_names = tree.branch.repository._pack_collection.names() |
1247 |
new_name = set(new_names).difference(names) |
|
1248 |
self.assertEqual(1, len(new_name)) |
|
1249 |
new_name = new_name.pop() |
|
1250 |
# The old collection hasn't noticed yet
|
|
1251 |
self.assertEqual(names, packs.names()) |
|
3789.1.8
by John Arbash Meinel
Change the api of reload_pack_names(). |
1252 |
self.assertTrue(packs.reload_pack_names()) |
3789.1.2
by John Arbash Meinel
Add RepositoryPackCollection.reload_pack_names() |
1253 |
self.assertEqual(new_names, packs.names()) |
1254 |
# And the repository can access the new revision
|
|
3789.2.19
by John Arbash Meinel
Refactor to make the tests a bit simpler |
1255 |
self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4])) |
3789.1.8
by John Arbash Meinel
Change the api of reload_pack_names(). |
1256 |
self.assertFalse(packs.reload_pack_names()) |
3789.1.2
by John Arbash Meinel
Add RepositoryPackCollection.reload_pack_names() |
1257 |
|
1258 |
def test_reload_pack_names_added_and_removed(self): |
|
3789.2.19
by John Arbash Meinel
Refactor to make the tests a bit simpler |
1259 |
tree, r, packs, revs = self.make_packs_and_alt_repo() |
3789.1.2
by John Arbash Meinel
Add RepositoryPackCollection.reload_pack_names() |
1260 |
names = packs.names() |
1261 |
# Now repack the whole thing
|
|
1262 |
tree.branch.repository.pack() |
|
1263 |
new_names = tree.branch.repository._pack_collection.names() |
|
1264 |
# The other collection hasn't noticed yet
|
|
1265 |
self.assertEqual(names, packs.names()) |
|
3789.1.8
by John Arbash Meinel
Change the api of reload_pack_names(). |
1266 |
self.assertTrue(packs.reload_pack_names()) |
3789.1.2
by John Arbash Meinel
Add RepositoryPackCollection.reload_pack_names() |
1267 |
self.assertEqual(new_names, packs.names()) |
3789.2.19
by John Arbash Meinel
Refactor to make the tests a bit simpler |
1268 |
self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]])) |
3789.1.8
by John Arbash Meinel
Change the api of reload_pack_names(). |
1269 |
self.assertFalse(packs.reload_pack_names()) |
3789.1.2
by John Arbash Meinel
Add RepositoryPackCollection.reload_pack_names() |
1270 |
|
4634.126.1
by John Arbash Meinel
(jam) Fix bug #507566, concurrent autopacking correctness. |
1271 |
def test_reload_pack_names_preserves_pending(self): |
1272 |
# TODO: Update this to also test for pending-deleted names
|
|
1273 |
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True) |
|
1274 |
# We will add one pack (via start_write_group + insert_record_stream),
|
|
1275 |
# and remove another pack (via _remove_pack_from_memory)
|
|
1276 |
orig_names = packs.names() |
|
1277 |
orig_at_load = packs._packs_at_load |
|
1278 |
to_remove_name = iter(orig_names).next() |
|
1279 |
r.start_write_group() |
|
1280 |
self.addCleanup(r.abort_write_group) |
|
1281 |
r.texts.insert_record_stream([versionedfile.FulltextContentFactory( |
|
1282 |
('text', 'rev'), (), None, 'content\n')]) |
|
1283 |
new_pack = packs._new_pack |
|
1284 |
self.assertTrue(new_pack.data_inserted()) |
|
1285 |
new_pack.finish() |
|
1286 |
packs.allocate(new_pack) |
|
1287 |
packs._new_pack = None |
|
1288 |
removed_pack = packs.get_pack_by_name(to_remove_name) |
|
1289 |
packs._remove_pack_from_memory(removed_pack) |
|
1290 |
names = packs.names() |
|
4634.127.3
by John Arbash Meinel
Add code so we don't try to obsolete files someone else has 'claimed'. |
1291 |
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names() |
4634.126.1
by John Arbash Meinel
(jam) Fix bug #507566, concurrent autopacking correctness. |
1292 |
new_names = set([x[0][0] for x in new_nodes]) |
1293 |
self.assertEqual(names, sorted([x[0][0] for x in all_nodes])) |
|
1294 |
self.assertEqual(set(names) - set(orig_names), new_names) |
|
1295 |
self.assertEqual(set([new_pack.name]), new_names) |
|
1296 |
self.assertEqual([to_remove_name], |
|
1297 |
sorted([x[0][0] for x in deleted_nodes])) |
|
1298 |
packs.reload_pack_names() |
|
1299 |
reloaded_names = packs.names() |
|
1300 |
self.assertEqual(orig_at_load, packs._packs_at_load) |
|
1301 |
self.assertEqual(names, reloaded_names) |
|
4634.127.3
by John Arbash Meinel
Add code so we don't try to obsolete files someone else has 'claimed'. |
1302 |
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names() |
4634.126.1
by John Arbash Meinel
(jam) Fix bug #507566, concurrent autopacking correctness. |
1303 |
new_names = set([x[0][0] for x in new_nodes]) |
1304 |
self.assertEqual(names, sorted([x[0][0] for x in all_nodes])) |
|
1305 |
self.assertEqual(set(names) - set(orig_names), new_names) |
|
1306 |
self.assertEqual(set([new_pack.name]), new_names) |
|
1307 |
self.assertEqual([to_remove_name], |
|
1308 |
sorted([x[0][0] for x in deleted_nodes])) |
|
1309 |
||
4634.127.5
by John Arbash Meinel
Possible fix for making sure packs triggering autopacking get cleaned up. |
1310 |
def test_autopack_obsoletes_new_pack(self): |
1311 |
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True) |
|
1312 |
packs._max_pack_count = lambda x: 1 |
|
1313 |
packs.pack_distribution = lambda x: [10] |
|
1314 |
r.start_write_group() |
|
1315 |
r.revisions.insert_record_stream([versionedfile.FulltextContentFactory( |
|
1316 |
('bogus-rev',), (), None, 'bogus-content\n')]) |
|
1317 |
# This should trigger an autopack, which will combine everything into a
|
|
1318 |
# single pack file.
|
|
1319 |
new_names = r.commit_write_group() |
|
1320 |
names = packs.names() |
|
1321 |
self.assertEqual(1, len(names)) |
|
1322 |
self.assertEqual([names[0] + '.pack'], |
|
1323 |
packs._pack_transport.list_dir('.')) |
|
1324 |
||
3789.2.20
by John Arbash Meinel
The autopack code can now trigger itself to retry when _copy_revision_texts fails. |
1325 |
def test_autopack_reloads_and_stops(self): |
1326 |
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True) |
|
1327 |
# After we have determined what needs to be autopacked, trigger a
|
|
1328 |
# full-pack via the other repo which will cause us to re-evaluate and
|
|
1329 |
# decide we don't need to do anything
|
|
1330 |
orig_execute = packs._execute_pack_operations |
|
1331 |
def _munged_execute_pack_ops(*args, **kwargs): |
|
1332 |
tree.branch.repository.pack() |
|
1333 |
return orig_execute(*args, **kwargs) |
|
1334 |
packs._execute_pack_operations = _munged_execute_pack_ops |
|
1335 |
packs._max_pack_count = lambda x: 1 |
|
1336 |
packs.pack_distribution = lambda x: [10] |
|
1337 |
self.assertFalse(packs.autopack()) |
|
1338 |
self.assertEqual(1, len(packs.names())) |
|
1339 |
self.assertEqual(tree.branch.repository._pack_collection.names(), |
|
1340 |
packs.names()) |
|
1341 |
||
4634.127.1
by John Arbash Meinel
Partial fix for bug #507557. |
1342 |
def test__save_pack_names(self): |
1343 |
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True) |
|
1344 |
names = packs.names() |
|
1345 |
pack = packs.get_pack_by_name(names[0]) |
|
1346 |
packs._remove_pack_from_memory(pack) |
|
1347 |
packs._save_pack_names(obsolete_packs=[pack]) |
|
1348 |
cur_packs = packs._pack_transport.list_dir('.') |
|
1349 |
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs)) |
|
1350 |
# obsolete_packs will also have stuff like .rix and .iix present.
|
|
1351 |
obsolete_packs = packs.transport.list_dir('obsolete_packs') |
|
1352 |
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs]) |
|
1353 |
self.assertEqual([pack.name], sorted(obsolete_names)) |
|
1354 |
||
1355 |
def test__save_pack_names_already_obsoleted(self): |
|
1356 |
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True) |
|
1357 |
names = packs.names() |
|
1358 |
pack = packs.get_pack_by_name(names[0]) |
|
1359 |
packs._remove_pack_from_memory(pack) |
|
1360 |
# We are going to simulate a concurrent autopack by manually obsoleting
|
|
1361 |
# the pack directly.
|
|
1362 |
packs._obsolete_packs([pack]) |
|
1363 |
packs._save_pack_names(clear_obsolete_packs=True, |
|
1364 |
obsolete_packs=[pack]) |
|
1365 |
cur_packs = packs._pack_transport.list_dir('.') |
|
1366 |
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs)) |
|
1367 |
# Note that while we set clear_obsolete_packs=True, it should not
|
|
1368 |
# delete a pack file that we have also scheduled for obsoletion.
|
|
1369 |
obsolete_packs = packs.transport.list_dir('obsolete_packs') |
|
1370 |
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs]) |
|
1371 |
self.assertEqual([pack.name], sorted(obsolete_names)) |
|
1372 |
||
6225.2.6
by Jelmer Vernooij
Move pack clear test to test_repository. |
1373 |
def test_pack_no_obsolete_packs_directory(self): |
1374 |
"""Bug #314314, don't fail if obsolete_packs directory does
|
|
1375 |
not exist."""
|
|
1376 |
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True) |
|
1377 |
r.control_transport.rmdir('obsolete_packs') |
|
1378 |
packs._clear_obsolete_packs() |
|
4634.127.3
by John Arbash Meinel
Add code so we don't try to obsolete files someone else has 'claimed'. |
1379 |
|
2592.3.173
by Robert Collins
Basic implementation of all_packs. |
1380 |
|
1381 |
class TestPack(TestCaseWithTransport): |
|
1382 |
"""Tests for the Pack object."""
|
|
1383 |
||
1384 |
def assertCurrentlyEqual(self, left, right): |
|
1385 |
self.assertTrue(left == right) |
|
1386 |
self.assertTrue(right == left) |
|
1387 |
self.assertFalse(left != right) |
|
1388 |
self.assertFalse(right != left) |
|
1389 |
||
1390 |
def assertCurrentlyNotEqual(self, left, right): |
|
1391 |
self.assertFalse(left == right) |
|
1392 |
self.assertFalse(right == left) |
|
1393 |
self.assertTrue(left != right) |
|
1394 |
self.assertTrue(right != left) |
|
1395 |
||
1396 |
def test___eq____ne__(self): |
|
2592.3.191
by Robert Collins
Give Pack responsibility for index naming, and two concrete classes - NewPack for new packs and ExistingPack for packs we read from disk. |
1397 |
left = pack_repo.ExistingPack('', '', '', '', '', '') |
1398 |
right = pack_repo.ExistingPack('', '', '', '', '', '') |
|
2592.3.173
by Robert Collins
Basic implementation of all_packs. |
1399 |
self.assertCurrentlyEqual(left, right) |
1400 |
# change all attributes and ensure equality changes as we do.
|
|
1401 |
left.revision_index = 'a' |
|
1402 |
self.assertCurrentlyNotEqual(left, right) |
|
1403 |
right.revision_index = 'a' |
|
1404 |
self.assertCurrentlyEqual(left, right) |
|
1405 |
left.inventory_index = 'a' |
|
1406 |
self.assertCurrentlyNotEqual(left, right) |
|
1407 |
right.inventory_index = 'a' |
|
1408 |
self.assertCurrentlyEqual(left, right) |
|
1409 |
left.text_index = 'a' |
|
1410 |
self.assertCurrentlyNotEqual(left, right) |
|
1411 |
right.text_index = 'a' |
|
1412 |
self.assertCurrentlyEqual(left, right) |
|
1413 |
left.signature_index = 'a' |
|
1414 |
self.assertCurrentlyNotEqual(left, right) |
|
1415 |
right.signature_index = 'a' |
|
1416 |
self.assertCurrentlyEqual(left, right) |
|
1417 |
left.name = 'a' |
|
1418 |
self.assertCurrentlyNotEqual(left, right) |
|
1419 |
right.name = 'a' |
|
1420 |
self.assertCurrentlyEqual(left, right) |
|
1421 |
left.transport = 'a' |
|
1422 |
self.assertCurrentlyNotEqual(left, right) |
|
1423 |
right.transport = 'a' |
|
1424 |
self.assertCurrentlyEqual(left, right) |
|
2592.3.179
by Robert Collins
Generate the revision_index_map for packing during the core operation, from the pack objects. |
1425 |
|
1426 |
def test_file_name(self): |
|
2592.3.191
by Robert Collins
Give Pack responsibility for index naming, and two concrete classes - NewPack for new packs and ExistingPack for packs we read from disk. |
1427 |
pack = pack_repo.ExistingPack('', 'a_name', '', '', '', '') |
2592.3.179
by Robert Collins
Generate the revision_index_map for packing during the core operation, from the pack objects. |
1428 |
self.assertEqual('a_name.pack', pack.file_name()) |
2592.3.192
by Robert Collins
Move new revision index management to NewPack. |
1429 |
|
1430 |
||
1431 |
class TestNewPack(TestCaseWithTransport): |
|
1432 |
"""Tests for pack_repo.NewPack."""
|
|
1433 |
||
2592.3.193
by Robert Collins
Move hash tracking of new packs into NewPack. |
1434 |
def test_new_instance_attributes(self): |
2592.3.194
by Robert Collins
Output the revision index from NewPack.finish |
1435 |
upload_transport = self.get_transport('upload') |
1436 |
pack_transport = self.get_transport('pack') |
|
1437 |
index_transport = self.get_transport('index') |
|
1438 |
upload_transport.mkdir('.') |
|
4241.6.8
by Robert Collins, John Arbash Meinel, Ian Clatworthy, Vincent Ladeuil
Add --development6-rich-root, disabling the legacy and unneeded development2 format, and activating the tests for CHK features disabled pending this format. (Robert Collins, John Arbash Meinel, Ian Clatworthy, Vincent Ladeuil) |
1439 |
collection = pack_repo.RepositoryPackCollection( |
1440 |
repo=None, |
|
3830.3.1
by Martin Pool
NewPack should be constructed from the PackCollection, rather than attributes of it |
1441 |
transport=self.get_transport('.'), |
1442 |
index_transport=index_transport, |
|
1443 |
upload_transport=upload_transport, |
|
1444 |
pack_transport=pack_transport, |
|
1445 |
index_builder_class=BTreeBuilder, |
|
4241.6.8
by Robert Collins, John Arbash Meinel, Ian Clatworthy, Vincent Ladeuil
Add --development6-rich-root, disabling the legacy and unneeded development2 format, and activating the tests for CHK features disabled pending this format. (Robert Collins, John Arbash Meinel, Ian Clatworthy, Vincent Ladeuil) |
1446 |
index_class=BTreeGraphIndex, |
1447 |
use_chk_index=False) |
|
3830.3.1
by Martin Pool
NewPack should be constructed from the PackCollection, rather than attributes of it |
1448 |
pack = pack_repo.NewPack(collection) |
4857.2.1
by John Arbash Meinel
2 test_repository tests weren't adding cleanups when opening files. |
1449 |
self.addCleanup(pack.abort) # Make sure the write stream gets closed |
3735.1.1
by Robert Collins
Add development2 formats using BTree indices. |
1450 |
self.assertIsInstance(pack.revision_index, BTreeBuilder) |
1451 |
self.assertIsInstance(pack.inventory_index, BTreeBuilder) |
|
2929.3.5
by Vincent Ladeuil
New files, same warnings, same fixes. |
1452 |
self.assertIsInstance(pack._hash, type(osutils.md5())) |
2592.3.194
by Robert Collins
Output the revision index from NewPack.finish |
1453 |
self.assertTrue(pack.upload_transport is upload_transport) |
1454 |
self.assertTrue(pack.index_transport is index_transport) |
|
1455 |
self.assertTrue(pack.pack_transport is pack_transport) |
|
1456 |
self.assertEqual(None, pack.index_sizes) |
|
1457 |
self.assertEqual(20, len(pack.random_name)) |
|
1458 |
self.assertIsInstance(pack.random_name, str) |
|
1459 |
self.assertIsInstance(pack.start_time, float) |
|
2951.1.2
by Robert Collins
Partial refactoring of pack_repo to create a Packer object for packing. |
1460 |
|
1461 |
||
1462 |
class TestPacker(TestCaseWithTransport): |
|
1463 |
"""Tests for the packs repository Packer class."""
|
|
2951.1.10
by Robert Collins
Peer review feedback with Ian. |
1464 |
|
3824.2.4
by John Arbash Meinel
Add a test that ensures the pack ordering changes as part of calling .pack() |
1465 |
def test_pack_optimizes_pack_order(self): |
4617.8.1
by Robert Collins
Lock down another test assuming the default was a PackRepository. |
1466 |
builder = self.make_branch_builder('.', format="1.9") |
3824.2.4
by John Arbash Meinel
Add a test that ensures the pack ordering changes as part of calling .pack() |
1467 |
builder.start_series() |
1468 |
builder.build_snapshot('A', None, [ |
|
1469 |
('add', ('', 'root-id', 'directory', None)), |
|
1470 |
('add', ('f', 'f-id', 'file', 'content\n'))]) |
|
1471 |
builder.build_snapshot('B', ['A'], |
|
1472 |
[('modify', ('f-id', 'new-content\n'))]) |
|
1473 |
builder.build_snapshot('C', ['B'], |
|
1474 |
[('modify', ('f-id', 'third-content\n'))]) |
|
1475 |
builder.build_snapshot('D', ['C'], |
|
1476 |
[('modify', ('f-id', 'fourth-content\n'))]) |
|
1477 |
b = builder.get_branch() |
|
1478 |
b.lock_read() |
|
1479 |
builder.finish_series() |
|
1480 |
self.addCleanup(b.unlock) |
|
1481 |
# At this point, we should have 4 pack files available
|
|
1482 |
# Because of how they were built, they correspond to
|
|
1483 |
# ['D', 'C', 'B', 'A']
|
|
1484 |
packs = b.repository._pack_collection.packs |
|
5757.7.5
by Jelmer Vernooij
Fix imports in tests. |
1485 |
packer = knitpack_repo.KnitPacker(b.repository._pack_collection, |
3824.2.4
by John Arbash Meinel
Add a test that ensures the pack ordering changes as part of calling .pack() |
1486 |
packs, 'testing', |
1487 |
revision_ids=['B', 'C']) |
|
1488 |
# Now, when we are copying the B & C revisions, their pack files should
|
|
1489 |
# be moved to the front of the stack
|
|
3824.2.5
by Andrew Bennetts
Minor tweaks to comments etc. |
1490 |
# The new ordering moves B & C to the front of the .packs attribute,
|
1491 |
# and leaves the others in the original order.
|
|
3824.2.4
by John Arbash Meinel
Add a test that ensures the pack ordering changes as part of calling .pack() |
1492 |
new_packs = [packs[1], packs[2], packs[0], packs[3]] |
1493 |
new_pack = packer.pack() |
|
1494 |
self.assertEqual(new_packs, packer.packs) |
|
3146.6.1
by Aaron Bentley
InterDifferingSerializer shows a progress bar |
1495 |
|
1496 |
||
3777.5.4
by John Arbash Meinel
OptimisingPacker now sets the optimize flags for the indexes being built. |
1497 |
class TestOptimisingPacker(TestCaseWithTransport): |
1498 |
"""Tests for the OptimisingPacker class."""
|
|
1499 |
||
1500 |
def get_pack_collection(self): |
|
1501 |
repo = self.make_repository('.') |
|
1502 |
return repo._pack_collection |
|
1503 |
||
1504 |
def test_open_pack_will_optimise(self): |
|
5757.7.3
by Jelmer Vernooij
Move more knitpack-specific functionality out of Packer. |
1505 |
packer = knitpack_repo.OptimisingKnitPacker(self.get_pack_collection(), |
3777.5.4
by John Arbash Meinel
OptimisingPacker now sets the optimize flags for the indexes being built. |
1506 |
[], '.test') |
1507 |
new_pack = packer.open_pack() |
|
4857.2.1
by John Arbash Meinel
2 test_repository tests weren't adding cleanups when opening files. |
1508 |
self.addCleanup(new_pack.abort) # ensure cleanup |
3777.5.4
by John Arbash Meinel
OptimisingPacker now sets the optimize flags for the indexes being built. |
1509 |
self.assertIsInstance(new_pack, pack_repo.NewPack) |
1510 |
self.assertTrue(new_pack.revision_index._optimize_for_size) |
|
1511 |
self.assertTrue(new_pack.inventory_index._optimize_for_size) |
|
1512 |
self.assertTrue(new_pack.text_index._optimize_for_size) |
|
1513 |
self.assertTrue(new_pack.signature_index._optimize_for_size) |
|
4462.2.6
by Robert Collins
Cause StreamSink to partially pack repositories after cross format fetches when beneficial. |
1514 |
|
1515 |
||
4634.170.1
by John Arbash Meinel
Fix bug #437003. Autopacking should not fail for an |
1516 |
class TestGCCHKPacker(TestCaseWithTransport): |
1517 |
||
1518 |
def make_abc_branch(self): |
|
1519 |
builder = self.make_branch_builder('source') |
|
1520 |
builder.start_series() |
|
1521 |
builder.build_snapshot('A', None, [ |
|
1522 |
('add', ('', 'root-id', 'directory', None)), |
|
1523 |
('add', ('file', 'file-id', 'file', 'content\n')), |
|
1524 |
])
|
|
1525 |
builder.build_snapshot('B', ['A'], [ |
|
1526 |
('add', ('dir', 'dir-id', 'directory', None))]) |
|
1527 |
builder.build_snapshot('C', ['B'], [ |
|
1528 |
('modify', ('file-id', 'new content\n'))]) |
|
1529 |
builder.finish_series() |
|
1530 |
return builder.get_branch() |
|
1531 |
||
1532 |
def make_branch_with_disjoint_inventory_and_revision(self): |
|
1533 |
"""a repo with separate packs for a revisions Revision and Inventory.
|
|
1534 |
||
1535 |
There will be one pack file that holds the Revision content, and one
|
|
1536 |
for the Inventory content.
|
|
1537 |
||
1538 |
:return: (repository,
|
|
1539 |
pack_name_with_rev_A_Revision,
|
|
1540 |
pack_name_with_rev_A_Inventory,
|
|
1541 |
pack_name_with_rev_C_content)
|
|
1542 |
"""
|
|
1543 |
b_source = self.make_abc_branch() |
|
1544 |
b_base = b_source.bzrdir.sprout('base', revision_id='A').open_branch() |
|
1545 |
b_stacked = b_base.bzrdir.sprout('stacked', stacked=True).open_branch() |
|
1546 |
b_stacked.lock_write() |
|
1547 |
self.addCleanup(b_stacked.unlock) |
|
1548 |
b_stacked.fetch(b_source, 'B') |
|
1549 |
# Now re-open the stacked repo directly (no fallbacks) so that we can
|
|
1550 |
# fill in the A rev.
|
|
1551 |
repo_not_stacked = b_stacked.bzrdir.open_repository() |
|
1552 |
repo_not_stacked.lock_write() |
|
1553 |
self.addCleanup(repo_not_stacked.unlock) |
|
1554 |
# Now we should have a pack file with A's inventory, but not its
|
|
1555 |
# Revision
|
|
1556 |
self.assertEqual([('A',), ('B',)], |
|
1557 |
sorted(repo_not_stacked.inventories.keys())) |
|
1558 |
self.assertEqual([('B',)], |
|
1559 |
sorted(repo_not_stacked.revisions.keys())) |
|
1560 |
stacked_pack_names = repo_not_stacked._pack_collection.names() |
|
1561 |
# We have a couple names here, figure out which has A's inventory
|
|
1562 |
for name in stacked_pack_names: |
|
1563 |
pack = repo_not_stacked._pack_collection.get_pack_by_name(name) |
|
1564 |
keys = [n[1] for n in pack.inventory_index.iter_all_entries()] |
|
1565 |
if ('A',) in keys: |
|
1566 |
inv_a_pack_name = name |
|
1567 |
break
|
|
1568 |
else: |
|
1569 |
self.fail('Could not find pack containing A\'s inventory') |
|
1570 |
repo_not_stacked.fetch(b_source.repository, 'A') |
|
1571 |
self.assertEqual([('A',), ('B',)], |
|
1572 |
sorted(repo_not_stacked.revisions.keys())) |
|
1573 |
new_pack_names = set(repo_not_stacked._pack_collection.names()) |
|
1574 |
rev_a_pack_names = new_pack_names.difference(stacked_pack_names) |
|
1575 |
self.assertEqual(1, len(rev_a_pack_names)) |
|
1576 |
rev_a_pack_name = list(rev_a_pack_names)[0] |
|
1577 |
# Now fetch 'C', so we have a couple pack files to join
|
|
1578 |
repo_not_stacked.fetch(b_source.repository, 'C') |
|
1579 |
rev_c_pack_names = set(repo_not_stacked._pack_collection.names()) |
|
1580 |
rev_c_pack_names = rev_c_pack_names.difference(new_pack_names) |
|
1581 |
self.assertEqual(1, len(rev_c_pack_names)) |
|
1582 |
rev_c_pack_name = list(rev_c_pack_names)[0] |
|
1583 |
return (repo_not_stacked, rev_a_pack_name, inv_a_pack_name, |
|
1584 |
rev_c_pack_name) |
|
1585 |
||
1586 |
def test_pack_with_distant_inventories(self): |
|
1587 |
# See https://bugs.launchpad.net/bzr/+bug/437003
|
|
1588 |
# When repacking, it is possible to have an inventory in a different
|
|
1589 |
# pack file than the associated revision. An autopack can then come
|
|
1590 |
# along, and miss that inventory, and complain.
|
|
1591 |
(repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name |
|
1592 |
) = self.make_branch_with_disjoint_inventory_and_revision() |
|
1593 |
a_pack = repo._pack_collection.get_pack_by_name(rev_a_pack_name) |
|
1594 |
c_pack = repo._pack_collection.get_pack_by_name(rev_c_pack_name) |
|
1595 |
packer = groupcompress_repo.GCCHKPacker(repo._pack_collection, |
|
1596 |
[a_pack, c_pack], '.test-pack') |
|
1597 |
# This would raise ValueError in bug #437003, but should not raise an
|
|
1598 |
# error once fixed.
|
|
1599 |
packer.pack() |
|
1600 |
||
1601 |
def test_pack_with_missing_inventory(self): |
|
1602 |
# Similar to test_pack_with_missing_inventory, but this time, we force
|
|
1603 |
# the A inventory to actually be gone from the repository.
|
|
1604 |
(repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name |
|
1605 |
) = self.make_branch_with_disjoint_inventory_and_revision() |
|
1606 |
inv_a_pack = repo._pack_collection.get_pack_by_name(inv_a_pack_name) |
|
1607 |
repo._pack_collection._remove_pack_from_memory(inv_a_pack) |
|
1608 |
packer = groupcompress_repo.GCCHKPacker(repo._pack_collection, |
|
1609 |
repo._pack_collection.all_packs(), '.test-pack') |
|
1610 |
e = self.assertRaises(ValueError, packer.pack) |
|
1611 |
packer.new_pack.abort() |
|
4634.170.2
by John Arbash Meinel
Loosen the match a bit. Newer versions have StaticTuple. |
1612 |
self.assertContainsRe(str(e), |
1613 |
r"We are missing inventories for revisions: .*'A'") |
|
4634.170.1
by John Arbash Meinel
Fix bug #437003. Autopacking should not fail for an |
1614 |
|
1615 |
||
4431.3.7
by Jonathan Lange
Cherrypick bzr.dev 4470, resolving conflicts. |
1616 |
class TestCrossFormatPacks(TestCaseWithTransport): |
1617 |
||
1618 |
def log_pack(self, hint=None): |
|
1619 |
self.calls.append(('pack', hint)) |
|
1620 |
self.orig_pack(hint=hint) |
|
1621 |
if self.expect_hint: |
|
1622 |
self.assertTrue(hint) |
|
1623 |
||
1624 |
def run_stream(self, src_fmt, target_fmt, expect_pack_called): |
|
1625 |
self.expect_hint = expect_pack_called |
|
1626 |
self.calls = [] |
|
1627 |
source_tree = self.make_branch_and_tree('src', format=src_fmt) |
|
1628 |
source_tree.lock_write() |
|
1629 |
self.addCleanup(source_tree.unlock) |
|
1630 |
tip = source_tree.commit('foo') |
|
1631 |
target = self.make_repository('target', format=target_fmt) |
|
1632 |
target.lock_write() |
|
1633 |
self.addCleanup(target.unlock) |
|
1634 |
source = source_tree.branch.repository._get_source(target._format) |
|
1635 |
self.orig_pack = target.pack |
|
5340.15.1
by John Arbash Meinel
supersede exc-info branch |
1636 |
self.overrideAttr(target, "pack", self.log_pack) |
4431.3.7
by Jonathan Lange
Cherrypick bzr.dev 4470, resolving conflicts. |
1637 |
search = target.search_missing_revision_ids( |
5539.2.11
by Andrew Bennetts
Fix deprecation warning from test suite. |
1638 |
source_tree.branch.repository, revision_ids=[tip]) |
4431.3.7
by Jonathan Lange
Cherrypick bzr.dev 4470, resolving conflicts. |
1639 |
stream = source.get_stream(search) |
1640 |
from_format = source_tree.branch.repository._format |
|
1641 |
sink = target._get_sink() |
|
1642 |
sink.insert_stream(stream, from_format, []) |
|
1643 |
if expect_pack_called: |
|
1644 |
self.assertLength(1, self.calls) |
|
1645 |
else: |
|
1646 |
self.assertLength(0, self.calls) |
|
1647 |
||
1648 |
def run_fetch(self, src_fmt, target_fmt, expect_pack_called): |
|
1649 |
self.expect_hint = expect_pack_called |
|
1650 |
self.calls = [] |
|
1651 |
source_tree = self.make_branch_and_tree('src', format=src_fmt) |
|
1652 |
source_tree.lock_write() |
|
1653 |
self.addCleanup(source_tree.unlock) |
|
1654 |
tip = source_tree.commit('foo') |
|
1655 |
target = self.make_repository('target', format=target_fmt) |
|
1656 |
target.lock_write() |
|
1657 |
self.addCleanup(target.unlock) |
|
1658 |
source = source_tree.branch.repository |
|
1659 |
self.orig_pack = target.pack |
|
5340.15.1
by John Arbash Meinel
supersede exc-info branch |
1660 |
self.overrideAttr(target, "pack", self.log_pack) |
4431.3.7
by Jonathan Lange
Cherrypick bzr.dev 4470, resolving conflicts. |
1661 |
target.fetch(source) |
1662 |
if expect_pack_called: |
|
1663 |
self.assertLength(1, self.calls) |
|
1664 |
else: |
|
1665 |
self.assertLength(0, self.calls) |
|
1666 |
||
1667 |
def test_sink_format_hint_no(self): |
|
1668 |
# When the target format says packing makes no difference, pack is not
|
|
1669 |
# called.
|
|
1670 |
self.run_stream('1.9', 'rich-root-pack', False) |
|
1671 |
||
1672 |
def test_sink_format_hint_yes(self): |
|
1673 |
# When the target format says packing makes a difference, pack is
|
|
1674 |
# called.
|
|
1675 |
self.run_stream('1.9', '2a', True) |
|
1676 |
||
1677 |
def test_sink_format_same_no(self): |
|
1678 |
# When the formats are the same, pack is not called.
|
|
1679 |
self.run_stream('2a', '2a', False) |
|
1680 |
||
1681 |
def test_IDS_format_hint_no(self): |
|
1682 |
# When the target format says packing makes no difference, pack is not
|
|
1683 |
# called.
|
|
1684 |
self.run_fetch('1.9', 'rich-root-pack', False) |
|
1685 |
||
1686 |
def test_IDS_format_hint_yes(self): |
|
1687 |
# When the target format says packing makes a difference, pack is
|
|
1688 |
# called.
|
|
1689 |
self.run_fetch('1.9', '2a', True) |
|
1690 |
||
1691 |
def test_IDS_format_same_no(self): |
|
1692 |
# When the formats are the same, pack is not called.
|
|
1693 |
self.run_fetch('2a', '2a', False) |
|
6015.20.1
by John Arbash Meinel
Add a __repr__ to _LazyListJoin to make it easier to debug. |
1694 |
|
1695 |
||
1696 |
class Test_LazyListJoin(tests.TestCase): |
|
1697 |
||
1698 |
def test__repr__(self): |
|
1699 |
lazy = repository._LazyListJoin(['a'], ['b']) |
|
1700 |
self.assertEqual("bzrlib.repository._LazyListJoin((['a'], ['b']))", |
|
1701 |
repr(lazy)) |
|
6213.1.21
by Jelmer Vernooij
Add feature support for repository. |
1702 |
|
1703 |
||
1704 |
class TestFeatures(tests.TestCaseWithTransport): |
|
1705 |
||
1706 |
def test_open_with_present_feature(self): |
|
1707 |
self.addCleanup( |
|
6213.1.28
by Jelmer Vernooij
Fix tests. |
1708 |
repository.RepositoryFormatMetaDir.unregister_feature, |
6213.1.21
by Jelmer Vernooij
Add feature support for repository. |
1709 |
"makes-cheese-sandwich") |
6213.1.28
by Jelmer Vernooij
Fix tests. |
1710 |
repository.RepositoryFormatMetaDir.register_feature( |
6213.1.21
by Jelmer Vernooij
Add feature support for repository. |
1711 |
"makes-cheese-sandwich") |
1712 |
repo = self.make_repository('.') |
|
1713 |
repo.lock_write() |
|
6213.1.32
by Jelmer Vernooij
Fix check support status. |
1714 |
repo._format.features["makes-cheese-sandwich"] = "required" |
6213.1.21
by Jelmer Vernooij
Add feature support for repository. |
1715 |
repo._format.check_support_status(False) |
1716 |
repo.unlock() |
|
1717 |
||
1718 |
def test_open_with_missing_required_feature(self): |
|
1719 |
repo = self.make_repository('.') |
|
1720 |
repo.lock_write() |
|
6213.1.32
by Jelmer Vernooij
Fix check support status. |
1721 |
repo._format.features["makes-cheese-sandwich"] = "required" |
6213.1.21
by Jelmer Vernooij
Add feature support for repository. |
1722 |
self.assertRaises(errors.MissingFeature, |
1723 |
repo._format.check_support_status, False) |