4634.170.1
by John Arbash Meinel
Fix bug #437003. Autopacking should not fail for an |
1 |
# Copyright (C) 2006-2011 Canonical Ltd
|
1685.1.63
by Martin Pool
Small Transport fixups |
2 |
#
|
1534.4.40
by Robert Collins
Add RepositoryFormats and allow bzrdir.open or create _repository to be used. |
3 |
# This program is free software; you can redistribute it and/or modify
|
4 |
# it under the terms of the GNU General Public License as published by
|
|
5 |
# the Free Software Foundation; either version 2 of the License, or
|
|
6 |
# (at your option) any later version.
|
|
1685.1.63
by Martin Pool
Small Transport fixups |
7 |
#
|
1534.4.40
by Robert Collins
Add RepositoryFormats and allow bzrdir.open or create _repository to be used. |
8 |
# This program is distributed in the hope that it will be useful,
|
9 |
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
10 |
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
11 |
# GNU General Public License for more details.
|
|
1685.1.63
by Martin Pool
Small Transport fixups |
12 |
#
|
1534.4.40
by Robert Collins
Add RepositoryFormats and allow bzrdir.open or create _repository to be used. |
13 |
# You should have received a copy of the GNU General Public License
|
14 |
# along with this program; if not, write to the Free Software
|
|
4183.7.1
by Sabin Iacob
update FSF mailing address |
15 |
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
1534.4.40
by Robert Collins
Add RepositoryFormats and allow bzrdir.open or create _repository to be used. |
16 |
|
17 |
"""Tests for the Repository facility that are not interface tests.
|
|
18 |
||
3689.1.4
by John Arbash Meinel
Doc strings that reference repository_implementations |
19 |
For interface tests see tests/per_repository/*.py.
|
1534.4.40
by Robert Collins
Add RepositoryFormats and allow bzrdir.open or create _repository to be used. |
20 |
|
21 |
For concrete class tests see this file, and for storage formats tests
|
|
22 |
also see this file.
|
|
23 |
"""
|
|
24 |
||
1773.4.1
by Martin Pool
Add pyflakes makefile target; fix many warnings |
25 |
from stat import S_ISDIR |
1534.4.40
by Robert Collins
Add RepositoryFormats and allow bzrdir.open or create _repository to be used. |
26 |
|
1556.1.4
by Robert Collins
Add a new format for what will become knit, and the surrounding logic to upgrade repositories within metadirs, and tests for the same. |
27 |
import bzrlib |
5651.3.1
by Jelmer Vernooij
Add RepositoryFormatRegistry. |
28 |
from bzrlib.errors import ( |
29 |
UnknownFormatError, |
|
30 |
UnsupportedFormatError, |
|
31 |
)
|
|
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
32 |
from bzrlib import ( |
5365.5.20
by John Arbash Meinel
Add some tests that check the leaf factory is correct. |
33 |
btree_index, |
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
34 |
graph, |
5651.3.2
by Jelmer Vernooij
Fix deprecation warnings in test suite. |
35 |
symbol_versioning, |
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
36 |
tests, |
5609.9.1
by Martin
Blindly change all users of get_transport to address the function via the transport module |
37 |
transport, |
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
38 |
)
|
3735.1.1
by Robert Collins
Add development2 formats using BTree indices. |
39 |
from bzrlib.btree_index import BTreeBuilder, BTreeGraphIndex |
5121.2.2
by Jelmer Vernooij
Remove more unused imports in the tests. |
40 |
from bzrlib.index import GraphIndex |
2241.1.1
by Martin Pool
Change RepositoryFormat to use a Registry rather than ad-hoc dictionary |
41 |
from bzrlib.repository import RepositoryFormat |
2670.3.5
by Andrew Bennetts
Remove get_stream_as_bytes from KnitVersionedFile's API, make it a function in knitrepo.py instead. |
42 |
from bzrlib.tests import ( |
43 |
TestCase, |
|
44 |
TestCaseWithTransport, |
|
45 |
)
|
|
2241.1.1
by Martin Pool
Change RepositoryFormat to use a Registry rather than ad-hoc dictionary |
46 |
from bzrlib import ( |
2535.3.41
by Andrew Bennetts
Add tests for InterRemoteToOther.is_compatible. |
47 |
bzrdir, |
48 |
errors, |
|
2535.3.57
by Andrew Bennetts
Perform some sanity checking of data streams rather than blindly inserting them into our repository. |
49 |
inventory, |
2929.3.5
by Vincent Ladeuil
New files, same warnings, same fixes. |
50 |
osutils, |
2241.1.1
by Martin Pool
Change RepositoryFormat to use a Registry rather than ad-hoc dictionary |
51 |
repository, |
2535.3.57
by Andrew Bennetts
Perform some sanity checking of data streams rather than blindly inserting them into our repository. |
52 |
revision as _mod_revision, |
2241.1.1
by Martin Pool
Change RepositoryFormat to use a Registry rather than ad-hoc dictionary |
53 |
upgrade, |
4634.126.1
by John Arbash Meinel
(jam) Fix bug #507566, concurrent autopacking correctness. |
54 |
versionedfile, |
5815.4.15
by Jelmer Vernooij
Fix some imports. |
55 |
vf_repository, |
2241.1.1
by Martin Pool
Change RepositoryFormat to use a Registry rather than ad-hoc dictionary |
56 |
workingtree, |
57 |
)
|
|
3735.42.5
by John Arbash Meinel
Change the tests so we now just use a direct test that _get_source is |
58 |
from bzrlib.repofmt import ( |
59 |
groupcompress_repo, |
|
60 |
knitrepo, |
|
5757.2.2
by Jelmer Vernooij
Fix imports. |
61 |
knitpack_repo, |
3735.42.5
by John Arbash Meinel
Change the tests so we now just use a direct test that _get_source is |
62 |
pack_repo, |
63 |
)
|
|
1534.4.40
by Robert Collins
Add RepositoryFormats and allow bzrdir.open or create _repository to be used. |
64 |
|
65 |
||
66 |
class TestDefaultFormat(TestCase): |
|
67 |
||
1534.4.41
by Robert Collins
Branch now uses BzrDir reasonably sanely. |
68 |
def test_get_set_default_format(self): |
2204.5.3
by Aaron Bentley
zap old repository default handling |
69 |
old_default = bzrdir.format_registry.get('default') |
70 |
private_default = old_default().repository_format.__class__ |
|
5651.3.2
by Jelmer Vernooij
Fix deprecation warnings in test suite. |
71 |
old_format = repository.format_registry.get_default() |
1910.2.33
by Aaron Bentley
Fix default format test |
72 |
self.assertTrue(isinstance(old_format, private_default)) |
2204.5.3
by Aaron Bentley
zap old repository default handling |
73 |
def make_sample_bzrdir(): |
74 |
my_bzrdir = bzrdir.BzrDirMetaFormat1() |
|
75 |
my_bzrdir.repository_format = SampleRepositoryFormat() |
|
76 |
return my_bzrdir |
|
77 |
bzrdir.format_registry.remove('default') |
|
78 |
bzrdir.format_registry.register('sample', make_sample_bzrdir, '') |
|
79 |
bzrdir.format_registry.set_default('sample') |
|
1534.4.40
by Robert Collins
Add RepositoryFormats and allow bzrdir.open or create _repository to be used. |
80 |
# creating a repository should now create an instrumented dir.
|
81 |
try: |
|
1534.4.47
by Robert Collins
Split out repository into .bzr/repository |
82 |
# the default branch format is used by the meta dir format
|
83 |
# which is not the default bzrdir format at this point
|
|
1685.1.63
by Martin Pool
Small Transport fixups |
84 |
dir = bzrdir.BzrDirMetaFormat1().initialize('memory:///') |
1534.4.47
by Robert Collins
Split out repository into .bzr/repository |
85 |
result = dir.create_repository() |
1534.4.40
by Robert Collins
Add RepositoryFormats and allow bzrdir.open or create _repository to be used. |
86 |
self.assertEqual(result, 'A bzr repository dir') |
2241.1.1
by Martin Pool
Change RepositoryFormat to use a Registry rather than ad-hoc dictionary |
87 |
finally: |
2204.5.3
by Aaron Bentley
zap old repository default handling |
88 |
bzrdir.format_registry.remove('default') |
2363.5.14
by Aaron Bentley
Prevent repository.get_set_default_format from corrupting inventory |
89 |
bzrdir.format_registry.remove('sample') |
2204.5.3
by Aaron Bentley
zap old repository default handling |
90 |
bzrdir.format_registry.register('default', old_default, '') |
5651.3.2
by Jelmer Vernooij
Fix deprecation warnings in test suite. |
91 |
self.assertIsInstance(repository.format_registry.get_default(), |
2204.5.3
by Aaron Bentley
zap old repository default handling |
92 |
old_format.__class__) |
1534.4.40
by Robert Collins
Add RepositoryFormats and allow bzrdir.open or create _repository to be used. |
93 |
|
94 |
||
95 |
class SampleRepositoryFormat(repository.RepositoryFormat): |
|
96 |
"""A sample format
|
|
97 |
||
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
98 |
this format is initializable, unsupported to aid in testing the
|
1534.4.47
by Robert Collins
Split out repository into .bzr/repository |
99 |
open and open(unsupported=True) routines.
|
1534.4.40
by Robert Collins
Add RepositoryFormats and allow bzrdir.open or create _repository to be used. |
100 |
"""
|
101 |
||
102 |
def get_format_string(self): |
|
103 |
"""See RepositoryFormat.get_format_string()."""
|
|
104 |
return "Sample .bzr repository format." |
|
105 |
||
1534.6.1
by Robert Collins
allow API creation of shared repositories |
106 |
def initialize(self, a_bzrdir, shared=False): |
1534.4.40
by Robert Collins
Add RepositoryFormats and allow bzrdir.open or create _repository to be used. |
107 |
"""Initialize a repository in a BzrDir"""
|
1534.4.47
by Robert Collins
Split out repository into .bzr/repository |
108 |
t = a_bzrdir.get_repository_transport(self) |
1955.3.13
by John Arbash Meinel
Run the full test suite, and fix up any deprecation warnings. |
109 |
t.put_bytes('format', self.get_format_string()) |
1534.4.40
by Robert Collins
Add RepositoryFormats and allow bzrdir.open or create _repository to be used. |
110 |
return 'A bzr repository dir' |
111 |
||
112 |
def is_supported(self): |
|
113 |
return False |
|
114 |
||
1534.4.47
by Robert Collins
Split out repository into .bzr/repository |
115 |
def open(self, a_bzrdir, _found=False): |
1534.4.40
by Robert Collins
Add RepositoryFormats and allow bzrdir.open or create _repository to be used. |
116 |
return "opened repository." |
1534.4.41
by Robert Collins
Branch now uses BzrDir reasonably sanely. |
117 |
|
118 |
||
5651.3.5
by Jelmer Vernooij
add tests for 'extra' repository formats. |
119 |
class SampleExtraRepositoryFormat(repository.RepositoryFormat): |
120 |
"""A sample format that can not be used in a metadir
|
|
121 |
||
122 |
"""
|
|
123 |
||
124 |
def get_format_string(self): |
|
125 |
raise NotImplementedError |
|
126 |
||
127 |
||
1534.4.47
by Robert Collins
Split out repository into .bzr/repository |
128 |
class TestRepositoryFormat(TestCaseWithTransport): |
129 |
"""Tests for the Repository format detection used by the bzr meta dir facility.BzrBranchFormat facility."""
|
|
130 |
||
131 |
def test_find_format(self): |
|
132 |
# is the right format object found for a repository?
|
|
133 |
# create a branch with a few known format objects.
|
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
134 |
# this is not quite the same as
|
1534.4.47
by Robert Collins
Split out repository into .bzr/repository |
135 |
self.build_tree(["foo/", "bar/"]) |
136 |
def check_format(format, url): |
|
137 |
dir = format._matchingbzrdir.initialize(url) |
|
138 |
format.initialize(dir) |
|
5609.9.1
by Martin
Blindly change all users of get_transport to address the function via the transport module |
139 |
t = transport.get_transport(url) |
1534.4.47
by Robert Collins
Split out repository into .bzr/repository |
140 |
found_format = repository.RepositoryFormat.find_format(dir) |
5784.1.1
by Martin Pool
Stop using failIf, failUnless, etc |
141 |
self.assertIsInstance(found_format, format.__class__) |
5582.10.54
by Jelmer Vernooij
Use default format rather than RepositoryFormat7. |
142 |
check_format(repository.format_registry.get_default(), "bar") |
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
143 |
|
1534.4.47
by Robert Collins
Split out repository into .bzr/repository |
144 |
def test_find_format_no_repository(self): |
145 |
dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url()) |
|
146 |
self.assertRaises(errors.NoRepositoryPresent, |
|
147 |
repository.RepositoryFormat.find_format, |
|
148 |
dir) |
|
149 |
||
150 |
def test_find_format_unknown_format(self): |
|
151 |
dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url()) |
|
152 |
SampleRepositoryFormat().initialize(dir) |
|
153 |
self.assertRaises(UnknownFormatError, |
|
154 |
repository.RepositoryFormat.find_format, |
|
155 |
dir) |
|
156 |
||
157 |
def test_register_unregister_format(self): |
|
5651.3.1
by Jelmer Vernooij
Add RepositoryFormatRegistry. |
158 |
# Test deprecated format registration functions
|
1534.4.47
by Robert Collins
Split out repository into .bzr/repository |
159 |
format = SampleRepositoryFormat() |
160 |
# make a control dir
|
|
161 |
dir = bzrdir.BzrDirMetaFormat1().initialize(self.get_url()) |
|
162 |
# make a repo
|
|
163 |
format.initialize(dir) |
|
164 |
# register a format for it.
|
|
5651.3.2
by Jelmer Vernooij
Fix deprecation warnings in test suite. |
165 |
self.applyDeprecated(symbol_versioning.deprecated_in((2, 4, 0)), |
166 |
repository.RepositoryFormat.register_format, format) |
|
1534.4.47
by Robert Collins
Split out repository into .bzr/repository |
167 |
# which repository.Open will refuse (not supported)
|
5651.3.1
by Jelmer Vernooij
Add RepositoryFormatRegistry. |
168 |
self.assertRaises(UnsupportedFormatError, repository.Repository.open, |
169 |
self.get_url()) |
|
1534.4.47
by Robert Collins
Split out repository into .bzr/repository |
170 |
# but open(unsupported) will work
|
171 |
self.assertEqual(format.open(dir), "opened repository.") |
|
172 |
# unregister the format
|
|
5651.3.2
by Jelmer Vernooij
Fix deprecation warnings in test suite. |
173 |
self.applyDeprecated(symbol_versioning.deprecated_in((2, 4, 0)), |
174 |
repository.RepositoryFormat.unregister_format, format) |
|
1534.4.47
by Robert Collins
Split out repository into .bzr/repository |
175 |
|
176 |
||
5651.3.1
by Jelmer Vernooij
Add RepositoryFormatRegistry. |
177 |
class TestRepositoryFormatRegistry(TestCase): |
178 |
||
179 |
def setUp(self): |
|
180 |
super(TestRepositoryFormatRegistry, self).setUp() |
|
181 |
self.registry = repository.RepositoryFormatRegistry() |
|
182 |
||
183 |
def test_register_unregister_format(self): |
|
184 |
format = SampleRepositoryFormat() |
|
185 |
self.registry.register(format) |
|
186 |
self.assertEquals(format, self.registry.get("Sample .bzr repository format.")) |
|
187 |
self.registry.remove(format) |
|
188 |
self.assertRaises(KeyError, self.registry.get, "Sample .bzr repository format.") |
|
189 |
||
5651.3.2
by Jelmer Vernooij
Fix deprecation warnings in test suite. |
190 |
def test_get_all(self): |
5651.3.1
by Jelmer Vernooij
Add RepositoryFormatRegistry. |
191 |
format = SampleRepositoryFormat() |
5651.3.7
by Jelmer Vernooij
Fix tests. |
192 |
self.assertEquals([], self.registry._get_all()) |
5651.3.1
by Jelmer Vernooij
Add RepositoryFormatRegistry. |
193 |
self.registry.register(format) |
5651.3.7
by Jelmer Vernooij
Fix tests. |
194 |
self.assertEquals([format], self.registry._get_all()) |
5651.3.5
by Jelmer Vernooij
add tests for 'extra' repository formats. |
195 |
|
196 |
def test_register_extra(self): |
|
197 |
format = SampleExtraRepositoryFormat() |
|
5651.3.7
by Jelmer Vernooij
Fix tests. |
198 |
self.assertEquals([], self.registry._get_all()) |
5651.3.5
by Jelmer Vernooij
add tests for 'extra' repository formats. |
199 |
self.registry.register_extra(format) |
5651.3.7
by Jelmer Vernooij
Fix tests. |
200 |
self.assertEquals([format], self.registry._get_all()) |
5651.3.5
by Jelmer Vernooij
add tests for 'extra' repository formats. |
201 |
|
202 |
def test_register_extra_lazy(self): |
|
5651.3.7
by Jelmer Vernooij
Fix tests. |
203 |
self.assertEquals([], self.registry._get_all()) |
5651.3.5
by Jelmer Vernooij
add tests for 'extra' repository formats. |
204 |
self.registry.register_extra_lazy("bzrlib.tests.test_repository", |
205 |
"SampleExtraRepositoryFormat") |
|
5651.3.7
by Jelmer Vernooij
Fix tests. |
206 |
formats = self.registry._get_all() |
5651.3.5
by Jelmer Vernooij
add tests for 'extra' repository formats. |
207 |
self.assertEquals(1, len(formats)) |
208 |
self.assertIsInstance(formats[0], SampleExtraRepositoryFormat) |
|
5651.3.1
by Jelmer Vernooij
Add RepositoryFormatRegistry. |
209 |
|
210 |
||
1556.1.3
by Robert Collins
Rearrangment of Repository logic to be less type code driven, and bugfix InterRepository.missing_revision_ids |
211 |
class TestFormatKnit1(TestCaseWithTransport): |
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
212 |
|
3565.3.1
by Robert Collins
* The generic fetch code now uses two attributes on Repository objects |
213 |
def test_attribute__fetch_order(self): |
214 |
"""Knits need topological data insertion."""
|
|
215 |
repo = self.make_repository('.', |
|
216 |
format=bzrdir.format_registry.get('knit')()) |
|
4053.1.4
by Robert Collins
Move the fetch control attributes from Repository to RepositoryFormat. |
217 |
self.assertEqual('topological', repo._format._fetch_order) |
3565.3.1
by Robert Collins
* The generic fetch code now uses two attributes on Repository objects |
218 |
|
219 |
def test_attribute__fetch_uses_deltas(self): |
|
220 |
"""Knits reuse deltas."""
|
|
221 |
repo = self.make_repository('.', |
|
222 |
format=bzrdir.format_registry.get('knit')()) |
|
4053.1.4
by Robert Collins
Move the fetch control attributes from Repository to RepositoryFormat. |
223 |
self.assertEqual(True, repo._format._fetch_uses_deltas) |
3565.3.1
by Robert Collins
* The generic fetch code now uses two attributes on Repository objects |
224 |
|
1556.1.3
by Robert Collins
Rearrangment of Repository logic to be less type code driven, and bugfix InterRepository.missing_revision_ids |
225 |
def test_disk_layout(self): |
226 |
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url()) |
|
2241.1.6
by Martin Pool
Move Knit repositories into the submodule bzrlib.repofmt.knitrepo and |
227 |
repo = knitrepo.RepositoryFormatKnit1().initialize(control) |
1556.1.3
by Robert Collins
Rearrangment of Repository logic to be less type code driven, and bugfix InterRepository.missing_revision_ids |
228 |
# in case of side effects of locking.
|
229 |
repo.lock_write() |
|
230 |
repo.unlock() |
|
231 |
# we want:
|
|
232 |
# format 'Bazaar-NG Knit Repository Format 1'
|
|
1553.5.62
by Martin Pool
Add tests that MetaDir repositories use LockDirs |
233 |
# lock: is a directory
|
1556.1.3
by Robert Collins
Rearrangment of Repository logic to be less type code driven, and bugfix InterRepository.missing_revision_ids |
234 |
# inventory.weave == empty_weave
|
235 |
# empty revision-store directory
|
|
236 |
# empty weaves directory
|
|
237 |
t = control.get_repository_transport(None) |
|
238 |
self.assertEqualDiff('Bazaar-NG Knit Repository Format 1', |
|
239 |
t.get('format').read()) |
|
1553.5.57
by Martin Pool
[merge] sync from bzr.dev |
240 |
# XXX: no locks left when unlocked at the moment
|
241 |
# self.assertEqualDiff('', t.get('lock').read())
|
|
1556.1.3
by Robert Collins
Rearrangment of Repository logic to be less type code driven, and bugfix InterRepository.missing_revision_ids |
242 |
self.assertTrue(S_ISDIR(t.stat('knits').st_mode)) |
1563.2.35
by Robert Collins
cleanup deprecation warnings and finish conversion so the inventory is knit based too. |
243 |
self.check_knits(t) |
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
244 |
# Check per-file knits.
|
245 |
branch = control.create_branch() |
|
246 |
tree = control.create_workingtree() |
|
247 |
tree.add(['foo'], ['Nasty-IdC:'], ['file']) |
|
248 |
tree.put_file_bytes_non_atomic('Nasty-IdC:', '') |
|
249 |
tree.commit('1st post', rev_id='foo') |
|
250 |
self.assertHasKnit(t, 'knits/e8/%254easty-%2549d%2543%253a', |
|
251 |
'\nfoo fulltext 0 81 :') |
|
1563.2.35
by Robert Collins
cleanup deprecation warnings and finish conversion so the inventory is knit based too. |
252 |
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
253 |
def assertHasKnit(self, t, knit_name, extra_content=''): |
1654.1.3
by Robert Collins
Refactor repository knit tests slightly to remove duplication - add a assertHasKnit method. |
254 |
"""Assert that knit_name exists on t."""
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
255 |
self.assertEqualDiff('# bzr knit index 8\n' + extra_content, |
1654.1.3
by Robert Collins
Refactor repository knit tests slightly to remove duplication - add a assertHasKnit method. |
256 |
t.get(knit_name + '.kndx').read()) |
257 |
||
1563.2.35
by Robert Collins
cleanup deprecation warnings and finish conversion so the inventory is knit based too. |
258 |
def check_knits(self, t): |
259 |
"""check knit content for a repository."""
|
|
1654.1.3
by Robert Collins
Refactor repository knit tests slightly to remove duplication - add a assertHasKnit method. |
260 |
self.assertHasKnit(t, 'inventory') |
261 |
self.assertHasKnit(t, 'revisions') |
|
262 |
self.assertHasKnit(t, 'signatures') |
|
1556.1.3
by Robert Collins
Rearrangment of Repository logic to be less type code driven, and bugfix InterRepository.missing_revision_ids |
263 |
|
264 |
def test_shared_disk_layout(self): |
|
265 |
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url()) |
|
2241.1.6
by Martin Pool
Move Knit repositories into the submodule bzrlib.repofmt.knitrepo and |
266 |
repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True) |
1556.1.3
by Robert Collins
Rearrangment of Repository logic to be less type code driven, and bugfix InterRepository.missing_revision_ids |
267 |
# we want:
|
268 |
# format 'Bazaar-NG Knit Repository Format 1'
|
|
1553.5.62
by Martin Pool
Add tests that MetaDir repositories use LockDirs |
269 |
# lock: is a directory
|
1556.1.3
by Robert Collins
Rearrangment of Repository logic to be less type code driven, and bugfix InterRepository.missing_revision_ids |
270 |
# inventory.weave == empty_weave
|
271 |
# empty revision-store directory
|
|
272 |
# empty weaves directory
|
|
273 |
# a 'shared-storage' marker file.
|
|
274 |
t = control.get_repository_transport(None) |
|
275 |
self.assertEqualDiff('Bazaar-NG Knit Repository Format 1', |
|
276 |
t.get('format').read()) |
|
1553.5.57
by Martin Pool
[merge] sync from bzr.dev |
277 |
# XXX: no locks left when unlocked at the moment
|
278 |
# self.assertEqualDiff('', t.get('lock').read())
|
|
1556.1.3
by Robert Collins
Rearrangment of Repository logic to be less type code driven, and bugfix InterRepository.missing_revision_ids |
279 |
self.assertEqualDiff('', t.get('shared-storage').read()) |
280 |
self.assertTrue(S_ISDIR(t.stat('knits').st_mode)) |
|
1563.2.35
by Robert Collins
cleanup deprecation warnings and finish conversion so the inventory is knit based too. |
281 |
self.check_knits(t) |
1556.1.3
by Robert Collins
Rearrangment of Repository logic to be less type code driven, and bugfix InterRepository.missing_revision_ids |
282 |
|
283 |
def test_shared_no_tree_disk_layout(self): |
|
284 |
control = bzrdir.BzrDirMetaFormat1().initialize(self.get_url()) |
|
2241.1.6
by Martin Pool
Move Knit repositories into the submodule bzrlib.repofmt.knitrepo and |
285 |
repo = knitrepo.RepositoryFormatKnit1().initialize(control, shared=True) |
1556.1.3
by Robert Collins
Rearrangment of Repository logic to be less type code driven, and bugfix InterRepository.missing_revision_ids |
286 |
repo.set_make_working_trees(False) |
287 |
# we want:
|
|
288 |
# format 'Bazaar-NG Knit Repository Format 1'
|
|
289 |
# lock ''
|
|
290 |
# inventory.weave == empty_weave
|
|
291 |
# empty revision-store directory
|
|
292 |
# empty weaves directory
|
|
293 |
# a 'shared-storage' marker file.
|
|
294 |
t = control.get_repository_transport(None) |
|
295 |
self.assertEqualDiff('Bazaar-NG Knit Repository Format 1', |
|
296 |
t.get('format').read()) |
|
1553.5.57
by Martin Pool
[merge] sync from bzr.dev |
297 |
# XXX: no locks left when unlocked at the moment
|
298 |
# self.assertEqualDiff('', t.get('lock').read())
|
|
1556.1.3
by Robert Collins
Rearrangment of Repository logic to be less type code driven, and bugfix InterRepository.missing_revision_ids |
299 |
self.assertEqualDiff('', t.get('shared-storage').read()) |
300 |
self.assertEqualDiff('', t.get('no-working-trees').read()) |
|
301 |
repo.set_make_working_trees(True) |
|
302 |
self.assertFalse(t.has('no-working-trees')) |
|
303 |
self.assertTrue(S_ISDIR(t.stat('knits').st_mode)) |
|
1563.2.35
by Robert Collins
cleanup deprecation warnings and finish conversion so the inventory is knit based too. |
304 |
self.check_knits(t) |
1556.1.3
by Robert Collins
Rearrangment of Repository logic to be less type code driven, and bugfix InterRepository.missing_revision_ids |
305 |
|
2917.2.1
by John Arbash Meinel
Fix bug #152360. The xml5 serializer should be using |
306 |
def test_deserialise_sets_root_revision(self): |
307 |
"""We must have a inventory.root.revision
|
|
308 |
||
309 |
Old versions of the XML5 serializer did not set the revision_id for
|
|
310 |
the whole inventory. So we grab the one from the expected text. Which
|
|
311 |
is valid when the api is not being abused.
|
|
312 |
"""
|
|
313 |
repo = self.make_repository('.', |
|
314 |
format=bzrdir.format_registry.get('knit')()) |
|
315 |
inv_xml = '<inventory format="5">\n</inventory>\n' |
|
4988.3.3
by Jelmer Vernooij
rename Repository.deserialise_inventory to Repository._deserialise_inventory. |
316 |
inv = repo._deserialise_inventory('test-rev-id', inv_xml) |
2917.2.1
by John Arbash Meinel
Fix bug #152360. The xml5 serializer should be using |
317 |
self.assertEqual('test-rev-id', inv.root.revision) |
318 |
||
319 |
def test_deserialise_uses_global_revision_id(self): |
|
320 |
"""If it is set, then we re-use the global revision id"""
|
|
321 |
repo = self.make_repository('.', |
|
322 |
format=bzrdir.format_registry.get('knit')()) |
|
323 |
inv_xml = ('<inventory format="5" revision_id="other-rev-id">\n' |
|
324 |
'</inventory>\n') |
|
325 |
# Arguably, the deserialise_inventory should detect a mismatch, and
|
|
326 |
# raise an error, rather than silently using one revision_id over the
|
|
327 |
# other.
|
|
4988.3.3
by Jelmer Vernooij
rename Repository.deserialise_inventory to Repository._deserialise_inventory. |
328 |
self.assertRaises(AssertionError, repo._deserialise_inventory, |
3169.2.2
by Robert Collins
Add a test to Repository.deserialise_inventory that the resulting ivnentory is the one asked for, and update relevant tests. Also tweak the model 1 to 2 regenerate inventories logic to use the revision trees parent marker which is more accurate in some cases. |
329 |
'test-rev-id', inv_xml) |
4988.3.3
by Jelmer Vernooij
rename Repository.deserialise_inventory to Repository._deserialise_inventory. |
330 |
inv = repo._deserialise_inventory('other-rev-id', inv_xml) |
2917.2.1
by John Arbash Meinel
Fix bug #152360. The xml5 serializer should be using |
331 |
self.assertEqual('other-rev-id', inv.root.revision) |
332 |
||
3221.3.1
by Robert Collins
* Repository formats have a new supported-feature attribute |
333 |
def test_supports_external_lookups(self): |
334 |
repo = self.make_repository('.', |
|
335 |
format=bzrdir.format_registry.get('knit')()) |
|
336 |
self.assertFalse(repo._format.supports_external_lookups) |
|
337 |
||
2535.3.53
by Andrew Bennetts
Remove get_stream_as_bytes from KnitVersionedFile's API, make it a function in knitrepo.py instead. |
338 |
|
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
339 |
class DummyRepository(object): |
340 |
"""A dummy repository for testing."""
|
|
341 |
||
3452.2.11
by Andrew Bennetts
Merge thread. |
342 |
_format = None |
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
343 |
_serializer = None |
344 |
||
345 |
def supports_rich_root(self): |
|
4606.4.1
by Robert Collins
Prepare test_repository's inter_repository tests for 2a. |
346 |
if self._format is not None: |
347 |
return self._format.rich_root_data |
|
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
348 |
return False |
349 |
||
3709.5.10
by Andrew Bennetts
Fix test failure caused by missing attributes on DummyRepository. |
350 |
def get_graph(self): |
351 |
raise NotImplementedError |
|
352 |
||
353 |
def get_parent_map(self, revision_ids): |
|
354 |
raise NotImplementedError |
|
355 |
||
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
356 |
|
357 |
class InterDummy(repository.InterRepository): |
|
358 |
"""An inter-repository optimised code path for DummyRepository.
|
|
359 |
||
360 |
This is for use during testing where we use DummyRepository as repositories
|
|
1534.1.28
by Robert Collins
Allow for optimised InterRepository selection. |
361 |
so that none of the default regsitered inter-repository classes will
|
2818.4.2
by Robert Collins
Review feedback. |
362 |
MATCH.
|
1534.1.28
by Robert Collins
Allow for optimised InterRepository selection. |
363 |
"""
|
364 |
||
365 |
@staticmethod
|
|
366 |
def is_compatible(repo_source, repo_target): |
|
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
367 |
"""InterDummy is compatible with DummyRepository."""
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
368 |
return (isinstance(repo_source, DummyRepository) and |
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
369 |
isinstance(repo_target, DummyRepository)) |
1534.1.28
by Robert Collins
Allow for optimised InterRepository selection. |
370 |
|
371 |
||
1534.1.27
by Robert Collins
Start InterRepository with InterRepository.get. |
372 |
class TestInterRepository(TestCaseWithTransport): |
373 |
||
374 |
def test_get_default_inter_repository(self): |
|
375 |
# test that the InterRepository.get(repo_a, repo_b) probes
|
|
376 |
# for a inter_repo class where is_compatible(repo_a, repo_b) returns
|
|
377 |
# true and returns a default inter_repo otherwise.
|
|
378 |
# This also tests that the default registered optimised interrepository
|
|
379 |
# classes do not barf inappropriately when a surprising repository type
|
|
380 |
# is handed to them.
|
|
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
381 |
dummy_a = DummyRepository() |
5815.4.19
by Jelmer Vernooij
Fix test failures. |
382 |
dummy_a._format = RepositoryFormat() |
383 |
dummy_a._format.supports_full_versioned_files = True |
|
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
384 |
dummy_b = DummyRepository() |
5815.4.19
by Jelmer Vernooij
Fix test failures. |
385 |
dummy_b._format = RepositoryFormat() |
386 |
dummy_b._format.supports_full_versioned_files = True |
|
1534.1.28
by Robert Collins
Allow for optimised InterRepository selection. |
387 |
self.assertGetsDefaultInterRepository(dummy_a, dummy_b) |
388 |
||
389 |
def assertGetsDefaultInterRepository(self, repo_a, repo_b): |
|
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
390 |
"""Asserts that InterRepository.get(repo_a, repo_b) -> the default.
|
3943.8.1
by Marius Kruger
remove all trailing whitespace from bzr source |
391 |
|
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
392 |
The effective default is now InterSameDataRepository because there is
|
393 |
no actual sane default in the presence of incompatible data models.
|
|
394 |
"""
|
|
1534.1.28
by Robert Collins
Allow for optimised InterRepository selection. |
395 |
inter_repo = repository.InterRepository.get(repo_a, repo_b) |
5815.4.19
by Jelmer Vernooij
Fix test failures. |
396 |
self.assertEqual(vf_repository.InterSameDataRepository, |
1534.1.27
by Robert Collins
Start InterRepository with InterRepository.get. |
397 |
inter_repo.__class__) |
1534.1.28
by Robert Collins
Allow for optimised InterRepository selection. |
398 |
self.assertEqual(repo_a, inter_repo.source) |
399 |
self.assertEqual(repo_b, inter_repo.target) |
|
400 |
||
401 |
def test_register_inter_repository_class(self): |
|
402 |
# test that a optimised code path provider - a
|
|
403 |
# InterRepository subclass can be registered and unregistered
|
|
404 |
# and that it is correctly selected when given a repository
|
|
405 |
# pair that it returns true on for the is_compatible static method
|
|
406 |
# check
|
|
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
407 |
dummy_a = DummyRepository() |
4606.4.1
by Robert Collins
Prepare test_repository's inter_repository tests for 2a. |
408 |
dummy_a._format = RepositoryFormat() |
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
409 |
dummy_b = DummyRepository() |
4606.4.1
by Robert Collins
Prepare test_repository's inter_repository tests for 2a. |
410 |
dummy_b._format = RepositoryFormat() |
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
411 |
repo = self.make_repository('.') |
412 |
# hack dummies to look like repo somewhat.
|
|
413 |
dummy_a._serializer = repo._serializer |
|
4606.4.1
by Robert Collins
Prepare test_repository's inter_repository tests for 2a. |
414 |
dummy_a._format.supports_tree_reference = repo._format.supports_tree_reference |
415 |
dummy_a._format.rich_root_data = repo._format.rich_root_data |
|
5815.4.19
by Jelmer Vernooij
Fix test failures. |
416 |
dummy_a._format.supports_full_versioned_files = repo._format.supports_full_versioned_files |
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
417 |
dummy_b._serializer = repo._serializer |
4606.4.1
by Robert Collins
Prepare test_repository's inter_repository tests for 2a. |
418 |
dummy_b._format.supports_tree_reference = repo._format.supports_tree_reference |
419 |
dummy_b._format.rich_root_data = repo._format.rich_root_data |
|
5815.4.19
by Jelmer Vernooij
Fix test failures. |
420 |
dummy_b._format.supports_full_versioned_files = repo._format.supports_full_versioned_files |
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
421 |
repository.InterRepository.register_optimiser(InterDummy) |
1534.1.28
by Robert Collins
Allow for optimised InterRepository selection. |
422 |
try: |
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
423 |
# we should get the default for something InterDummy returns False
|
1534.1.28
by Robert Collins
Allow for optimised InterRepository selection. |
424 |
# to
|
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
425 |
self.assertFalse(InterDummy.is_compatible(dummy_a, repo)) |
426 |
self.assertGetsDefaultInterRepository(dummy_a, repo) |
|
427 |
# and we should get an InterDummy for a pair it 'likes'
|
|
428 |
self.assertTrue(InterDummy.is_compatible(dummy_a, dummy_b)) |
|
1534.1.28
by Robert Collins
Allow for optimised InterRepository selection. |
429 |
inter_repo = repository.InterRepository.get(dummy_a, dummy_b) |
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
430 |
self.assertEqual(InterDummy, inter_repo.__class__) |
1534.1.28
by Robert Collins
Allow for optimised InterRepository selection. |
431 |
self.assertEqual(dummy_a, inter_repo.source) |
432 |
self.assertEqual(dummy_b, inter_repo.target) |
|
433 |
finally: |
|
2305.2.3
by Andrew Bennetts
Bring across test_repository improvements from the hpss branch to fix the last test failures. |
434 |
repository.InterRepository.unregister_optimiser(InterDummy) |
1534.1.28
by Robert Collins
Allow for optimised InterRepository selection. |
435 |
# now we should get the default InterRepository object again.
|
436 |
self.assertGetsDefaultInterRepository(dummy_a, dummy_b) |
|
1534.1.33
by Robert Collins
Move copy_content_into into InterRepository and InterWeaveRepo, and disable the default codepath test as we have optimised paths for all current combinations. |
437 |
|
2241.1.17
by Martin Pool
Restore old InterWeave tests |
438 |
|
5671.4.2
by Jelmer Vernooij
Use stub formats to test CopyConverter. |
439 |
class TestRepositoryFormat1(knitrepo.RepositoryFormatKnit1): |
440 |
||
441 |
def get_format_string(self): |
|
442 |
return "Test Format 1" |
|
443 |
||
444 |
||
445 |
class TestRepositoryFormat2(knitrepo.RepositoryFormatKnit1): |
|
446 |
||
447 |
def get_format_string(self): |
|
448 |
return "Test Format 2" |
|
449 |
||
450 |
||
1556.1.4
by Robert Collins
Add a new format for what will become knit, and the surrounding logic to upgrade repositories within metadirs, and tests for the same. |
451 |
class TestRepositoryConverter(TestCaseWithTransport): |
452 |
||
453 |
def test_convert_empty(self): |
|
5671.4.2
by Jelmer Vernooij
Use stub formats to test CopyConverter. |
454 |
source_format = TestRepositoryFormat1() |
455 |
target_format = TestRepositoryFormat2() |
|
456 |
repository.format_registry.register(source_format) |
|
457 |
self.addCleanup(repository.format_registry.remove, |
|
458 |
source_format) |
|
459 |
repository.format_registry.register(target_format) |
|
460 |
self.addCleanup(repository.format_registry.remove, |
|
461 |
target_format) |
|
5609.9.4
by Vincent Ladeuil
Use self.get_transport instead of transport.get_transport where possible. |
462 |
t = self.get_transport() |
1556.1.4
by Robert Collins
Add a new format for what will become knit, and the surrounding logic to upgrade repositories within metadirs, and tests for the same. |
463 |
t.mkdir('repository') |
464 |
repo_dir = bzrdir.BzrDirMetaFormat1().initialize('repository') |
|
5671.4.2
by Jelmer Vernooij
Use stub formats to test CopyConverter. |
465 |
repo = TestRepositoryFormat1().initialize(repo_dir) |
1556.1.4
by Robert Collins
Add a new format for what will become knit, and the surrounding logic to upgrade repositories within metadirs, and tests for the same. |
466 |
converter = repository.CopyConverter(target_format) |
1594.1.3
by Robert Collins
Fixup pb usage to use nested_progress_bar. |
467 |
pb = bzrlib.ui.ui_factory.nested_progress_bar() |
468 |
try: |
|
469 |
converter.convert(repo, pb) |
|
470 |
finally: |
|
471 |
pb.finished() |
|
1556.1.4
by Robert Collins
Add a new format for what will become knit, and the surrounding logic to upgrade repositories within metadirs, and tests for the same. |
472 |
repo = repo_dir.open_repository() |
473 |
self.assertTrue(isinstance(target_format, repo._format.__class__)) |
|
1843.2.5
by Aaron Bentley
Add test of _unescape_xml |
474 |
|
475 |
||
2255.2.211
by Robert Collins
Remove knit2 repository format- it has never been supported. |
476 |
class TestRepositoryFormatKnit3(TestCaseWithTransport): |
1910.2.13
by Aaron Bentley
Start work on converter |
477 |
|
3565.3.1
by Robert Collins
* The generic fetch code now uses two attributes on Repository objects |
478 |
def test_attribute__fetch_order(self): |
479 |
"""Knits need topological data insertion."""
|
|
480 |
format = bzrdir.BzrDirMetaFormat1() |
|
481 |
format.repository_format = knitrepo.RepositoryFormatKnit3() |
|
482 |
repo = self.make_repository('.', format=format) |
|
4053.1.4
by Robert Collins
Move the fetch control attributes from Repository to RepositoryFormat. |
483 |
self.assertEqual('topological', repo._format._fetch_order) |
3565.3.1
by Robert Collins
* The generic fetch code now uses two attributes on Repository objects |
484 |
|
485 |
def test_attribute__fetch_uses_deltas(self): |
|
486 |
"""Knits reuse deltas."""
|
|
487 |
format = bzrdir.BzrDirMetaFormat1() |
|
488 |
format.repository_format = knitrepo.RepositoryFormatKnit3() |
|
489 |
repo = self.make_repository('.', format=format) |
|
4053.1.4
by Robert Collins
Move the fetch control attributes from Repository to RepositoryFormat. |
490 |
self.assertEqual(True, repo._format._fetch_uses_deltas) |
3565.3.1
by Robert Collins
* The generic fetch code now uses two attributes on Repository objects |
491 |
|
1910.2.13
by Aaron Bentley
Start work on converter |
492 |
def test_convert(self): |
493 |
"""Ensure the upgrade adds weaves for roots"""
|
|
1910.2.35
by Aaron Bentley
Better fix for convesion test |
494 |
format = bzrdir.BzrDirMetaFormat1() |
2241.1.6
by Martin Pool
Move Knit repositories into the submodule bzrlib.repofmt.knitrepo and |
495 |
format.repository_format = knitrepo.RepositoryFormatKnit1() |
1910.2.35
by Aaron Bentley
Better fix for convesion test |
496 |
tree = self.make_branch_and_tree('.', format) |
1910.2.13
by Aaron Bentley
Start work on converter |
497 |
tree.commit("Dull commit", rev_id="dull") |
498 |
revision_tree = tree.branch.repository.revision_tree('dull') |
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
499 |
revision_tree.lock_read() |
500 |
try: |
|
501 |
self.assertRaises(errors.NoSuchFile, revision_tree.get_file_lines, |
|
502 |
revision_tree.inventory.root.file_id) |
|
503 |
finally: |
|
504 |
revision_tree.unlock() |
|
1910.2.13
by Aaron Bentley
Start work on converter |
505 |
format = bzrdir.BzrDirMetaFormat1() |
2255.2.211
by Robert Collins
Remove knit2 repository format- it has never been supported. |
506 |
format.repository_format = knitrepo.RepositoryFormatKnit3() |
1910.2.13
by Aaron Bentley
Start work on converter |
507 |
upgrade.Convert('.', format) |
1910.2.27
by Aaron Bentley
Fixed conversion test |
508 |
tree = workingtree.WorkingTree.open('.') |
1910.2.13
by Aaron Bentley
Start work on converter |
509 |
revision_tree = tree.branch.repository.revision_tree('dull') |
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
510 |
revision_tree.lock_read() |
511 |
try: |
|
512 |
revision_tree.get_file_lines(revision_tree.inventory.root.file_id) |
|
513 |
finally: |
|
514 |
revision_tree.unlock() |
|
1910.2.27
by Aaron Bentley
Fixed conversion test |
515 |
tree.commit("Another dull commit", rev_id='dull2') |
516 |
revision_tree = tree.branch.repository.revision_tree('dull2') |
|
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
517 |
revision_tree.lock_read() |
518 |
self.addCleanup(revision_tree.unlock) |
|
1910.2.27
by Aaron Bentley
Fixed conversion test |
519 |
self.assertEqual('dull', revision_tree.inventory.root.revision) |
2220.2.2
by Martin Pool
Add tag command and basic implementation |
520 |
|
3221.3.1
by Robert Collins
* Repository formats have a new supported-feature attribute |
521 |
def test_supports_external_lookups(self): |
522 |
format = bzrdir.BzrDirMetaFormat1() |
|
523 |
format.repository_format = knitrepo.RepositoryFormatKnit3() |
|
524 |
repo = self.make_repository('.', format=format) |
|
525 |
self.assertFalse(repo._format.supports_external_lookups) |
|
526 |
||
2535.3.57
by Andrew Bennetts
Perform some sanity checking of data streams rather than blindly inserting them into our repository. |
527 |
|
4667.1.1
by John Arbash Meinel
Drop the Test2a test times from 5+s down to 1.4s |
528 |
class Test2a(tests.TestCaseWithMemoryTransport): |
4431.3.7
by Jonathan Lange
Cherrypick bzr.dev 4470, resolving conflicts. |
529 |
|
5365.5.20
by John Arbash Meinel
Add some tests that check the leaf factory is correct. |
530 |
def test_chk_bytes_uses_custom_btree_parser(self): |
531 |
mt = self.make_branch_and_memory_tree('test', format='2a') |
|
532 |
mt.lock_write() |
|
533 |
self.addCleanup(mt.unlock) |
|
534 |
mt.add([''], ['root-id']) |
|
535 |
mt.commit('first') |
|
536 |
index = mt.branch.repository.chk_bytes._index._graph_index._indices[0] |
|
537 |
self.assertEqual(btree_index._gcchk_factory, index._leaf_factory) |
|
538 |
# It should also work if we re-open the repo
|
|
539 |
repo = mt.branch.repository.bzrdir.open_repository() |
|
540 |
repo.lock_read() |
|
541 |
self.addCleanup(repo.unlock) |
|
542 |
index = repo.chk_bytes._index._graph_index._indices[0] |
|
543 |
self.assertEqual(btree_index._gcchk_factory, index._leaf_factory) |
|
544 |
||
4634.20.1
by Robert Collins
Fix bug 402652 by recompressing all texts that are streamed - slightly slower at fetch, substantially faster and more compact at read. |
545 |
def test_fetch_combines_groups(self): |
546 |
builder = self.make_branch_builder('source', format='2a') |
|
547 |
builder.start_series() |
|
548 |
builder.build_snapshot('1', None, [ |
|
549 |
('add', ('', 'root-id', 'directory', '')), |
|
550 |
('add', ('file', 'file-id', 'file', 'content\n'))]) |
|
551 |
builder.build_snapshot('2', ['1'], [ |
|
552 |
('modify', ('file-id', 'content-2\n'))]) |
|
553 |
builder.finish_series() |
|
554 |
source = builder.get_branch() |
|
555 |
target = self.make_repository('target', format='2a') |
|
556 |
target.fetch(source.repository) |
|
557 |
target.lock_read() |
|
4665.3.2
by John Arbash Meinel
An alternative implementation that passes both tests. |
558 |
self.addCleanup(target.unlock) |
4634.20.1
by Robert Collins
Fix bug 402652 by recompressing all texts that are streamed - slightly slower at fetch, substantially faster and more compact at read. |
559 |
details = target.texts._index.get_build_details( |
560 |
[('file-id', '1',), ('file-id', '2',)]) |
|
561 |
file_1_details = details[('file-id', '1')] |
|
562 |
file_2_details = details[('file-id', '2')] |
|
563 |
# The index, and what to read off disk, should be the same for both
|
|
564 |
# versions of the file.
|
|
565 |
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3]) |
|
566 |
||
4634.23.1
by Robert Collins
Cherrypick from bzr.dev: Fix bug 402652: recompress badly packed groups during fetch. (John Arbash Meinel, Robert Collins) |
567 |
def test_fetch_combines_groups(self): |
568 |
builder = self.make_branch_builder('source', format='2a') |
|
569 |
builder.start_series() |
|
570 |
builder.build_snapshot('1', None, [ |
|
571 |
('add', ('', 'root-id', 'directory', '')), |
|
572 |
('add', ('file', 'file-id', 'file', 'content\n'))]) |
|
573 |
builder.build_snapshot('2', ['1'], [ |
|
574 |
('modify', ('file-id', 'content-2\n'))]) |
|
575 |
builder.finish_series() |
|
576 |
source = builder.get_branch() |
|
577 |
target = self.make_repository('target', format='2a') |
|
578 |
target.fetch(source.repository) |
|
579 |
target.lock_read() |
|
580 |
self.addCleanup(target.unlock) |
|
581 |
details = target.texts._index.get_build_details( |
|
582 |
[('file-id', '1',), ('file-id', '2',)]) |
|
583 |
file_1_details = details[('file-id', '1')] |
|
584 |
file_2_details = details[('file-id', '2')] |
|
585 |
# The index, and what to read off disk, should be the same for both
|
|
586 |
# versions of the file.
|
|
587 |
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3]) |
|
588 |
||
589 |
def test_fetch_combines_groups(self): |
|
590 |
builder = self.make_branch_builder('source', format='2a') |
|
591 |
builder.start_series() |
|
592 |
builder.build_snapshot('1', None, [ |
|
593 |
('add', ('', 'root-id', 'directory', '')), |
|
594 |
('add', ('file', 'file-id', 'file', 'content\n'))]) |
|
595 |
builder.build_snapshot('2', ['1'], [ |
|
596 |
('modify', ('file-id', 'content-2\n'))]) |
|
597 |
builder.finish_series() |
|
598 |
source = builder.get_branch() |
|
599 |
target = self.make_repository('target', format='2a') |
|
600 |
target.fetch(source.repository) |
|
601 |
target.lock_read() |
|
602 |
self.addCleanup(target.unlock) |
|
603 |
details = target.texts._index.get_build_details( |
|
604 |
[('file-id', '1',), ('file-id', '2',)]) |
|
605 |
file_1_details = details[('file-id', '1')] |
|
606 |
file_2_details = details[('file-id', '2')] |
|
607 |
# The index, and what to read off disk, should be the same for both
|
|
608 |
# versions of the file.
|
|
609 |
self.assertEqual(file_1_details[0][:3], file_2_details[0][:3]) |
|
610 |
||
4431.3.7
by Jonathan Lange
Cherrypick bzr.dev 4470, resolving conflicts. |
611 |
def test_format_pack_compresses_True(self): |
612 |
repo = self.make_repository('repo', format='2a') |
|
613 |
self.assertTrue(repo._format.pack_compresses) |
|
3735.2.40
by Robert Collins
Add development4 which has a parent_id to basename index on CHKInventory objects. |
614 |
|
615 |
def test_inventories_use_chk_map_with_parent_base_dict(self): |
|
4667.1.1
by John Arbash Meinel
Drop the Test2a test times from 5+s down to 1.4s |
616 |
tree = self.make_branch_and_memory_tree('repo', format="2a") |
617 |
tree.lock_write() |
|
618 |
tree.add([''], ['TREE_ROOT']) |
|
3735.2.40
by Robert Collins
Add development4 which has a parent_id to basename index on CHKInventory objects. |
619 |
revid = tree.commit("foo") |
4667.1.1
by John Arbash Meinel
Drop the Test2a test times from 5+s down to 1.4s |
620 |
tree.unlock() |
3735.2.40
by Robert Collins
Add development4 which has a parent_id to basename index on CHKInventory objects. |
621 |
tree.lock_read() |
622 |
self.addCleanup(tree.unlock) |
|
623 |
inv = tree.branch.repository.get_inventory(revid) |
|
3735.2.41
by Robert Collins
Make the parent_id_basename index be updated during CHKInventory.apply_delta. |
624 |
self.assertNotEqual(None, inv.parent_id_basename_to_file_id) |
625 |
inv.parent_id_basename_to_file_id._ensure_root() |
|
3735.2.40
by Robert Collins
Add development4 which has a parent_id to basename index on CHKInventory objects. |
626 |
inv.id_to_entry._ensure_root() |
4241.6.8
by Robert Collins, John Arbash Meinel, Ian Clatworthy, Vincent Ladeuil
Add --development6-rich-root, disabling the legacy and unneeded development2 format, and activating the tests for CHK features disabled pending this format. (Robert Collins, John Arbash Meinel, Ian Clatworthy, Vincent Ladeuil) |
627 |
self.assertEqual(65536, inv.id_to_entry._root_node.maximum_size) |
628 |
self.assertEqual(65536, |
|
3735.2.41
by Robert Collins
Make the parent_id_basename index be updated during CHKInventory.apply_delta. |
629 |
inv.parent_id_basename_to_file_id._root_node.maximum_size) |
3735.2.40
by Robert Collins
Add development4 which has a parent_id to basename index on CHKInventory objects. |
630 |
|
4431.3.7
by Jonathan Lange
Cherrypick bzr.dev 4470, resolving conflicts. |
631 |
def test_autopack_unchanged_chk_nodes(self): |
632 |
# at 20 unchanged commits, chk pages are packed that are split into
|
|
633 |
# two groups such that the new pack being made doesn't have all its
|
|
634 |
# pages in the source packs (though they are in the repository).
|
|
4667.1.1
by John Arbash Meinel
Drop the Test2a test times from 5+s down to 1.4s |
635 |
# Use a memory backed repository, we don't need to hit disk for this
|
636 |
tree = self.make_branch_and_memory_tree('tree', format='2a') |
|
637 |
tree.lock_write() |
|
638 |
self.addCleanup(tree.unlock) |
|
639 |
tree.add([''], ['TREE_ROOT']) |
|
4431.3.7
by Jonathan Lange
Cherrypick bzr.dev 4470, resolving conflicts. |
640 |
for pos in range(20): |
641 |
tree.commit(str(pos)) |
|
642 |
||
643 |
def test_pack_with_hint(self): |
|
4667.1.1
by John Arbash Meinel
Drop the Test2a test times from 5+s down to 1.4s |
644 |
tree = self.make_branch_and_memory_tree('tree', format='2a') |
645 |
tree.lock_write() |
|
646 |
self.addCleanup(tree.unlock) |
|
647 |
tree.add([''], ['TREE_ROOT']) |
|
4431.3.7
by Jonathan Lange
Cherrypick bzr.dev 4470, resolving conflicts. |
648 |
# 1 commit to leave untouched
|
649 |
tree.commit('1') |
|
650 |
to_keep = tree.branch.repository._pack_collection.names() |
|
651 |
# 2 to combine
|
|
652 |
tree.commit('2') |
|
653 |
tree.commit('3') |
|
654 |
all = tree.branch.repository._pack_collection.names() |
|
655 |
combine = list(set(all) - set(to_keep)) |
|
656 |
self.assertLength(3, all) |
|
657 |
self.assertLength(2, combine) |
|
658 |
tree.branch.repository.pack(hint=combine) |
|
659 |
final = tree.branch.repository._pack_collection.names() |
|
660 |
self.assertLength(2, final) |
|
661 |
self.assertFalse(combine[0] in final) |
|
662 |
self.assertFalse(combine[1] in final) |
|
663 |
self.assertSubset(to_keep, final) |
|
664 |
||
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
665 |
def test_stream_source_to_gc(self): |
4462.2.1
by Robert Collins
Add new attribute to RepositoryFormat pack_compresses, hinting when pack can be useful. |
666 |
source = self.make_repository('source', format='2a') |
667 |
target = self.make_repository('target', format='2a') |
|
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
668 |
stream = source._get_source(target._format) |
669 |
self.assertIsInstance(stream, groupcompress_repo.GroupCHKStreamSource) |
|
670 |
||
671 |
def test_stream_source_to_non_gc(self): |
|
4462.2.1
by Robert Collins
Add new attribute to RepositoryFormat pack_compresses, hinting when pack can be useful. |
672 |
source = self.make_repository('source', format='2a') |
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
673 |
target = self.make_repository('target', format='rich-root-pack') |
674 |
stream = source._get_source(target._format) |
|
675 |
# We don't want the child GroupCHKStreamSource
|
|
5815.4.15
by Jelmer Vernooij
Fix some imports. |
676 |
self.assertIs(type(stream), vf_repository.StreamSource) |
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
677 |
|
4360.4.9
by John Arbash Meinel
Merge bzr.dev, bringing in the gc stacking fixes. |
678 |
def test_get_stream_for_missing_keys_includes_all_chk_refs(self): |
679 |
source_builder = self.make_branch_builder('source', |
|
4462.2.1
by Robert Collins
Add new attribute to RepositoryFormat pack_compresses, hinting when pack can be useful. |
680 |
format='2a') |
4360.4.9
by John Arbash Meinel
Merge bzr.dev, bringing in the gc stacking fixes. |
681 |
# We have to build a fairly large tree, so that we are sure the chk
|
682 |
# pages will have split into multiple pages.
|
|
683 |
entries = [('add', ('', 'a-root-id', 'directory', None))] |
|
684 |
for i in 'abcdefghijklmnopqrstuvwxyz123456789': |
|
685 |
for j in 'abcdefghijklmnopqrstuvwxyz123456789': |
|
686 |
fname = i + j |
|
687 |
fid = fname + '-id' |
|
688 |
content = 'content for %s\n' % (fname,) |
|
689 |
entries.append(('add', (fname, fid, 'file', content))) |
|
690 |
source_builder.start_series() |
|
691 |
source_builder.build_snapshot('rev-1', None, entries) |
|
692 |
# Now change a few of them, so we get a few new pages for the second
|
|
693 |
# revision
|
|
694 |
source_builder.build_snapshot('rev-2', ['rev-1'], [ |
|
695 |
('modify', ('aa-id', 'new content for aa-id\n')), |
|
696 |
('modify', ('cc-id', 'new content for cc-id\n')), |
|
697 |
('modify', ('zz-id', 'new content for zz-id\n')), |
|
698 |
])
|
|
699 |
source_builder.finish_series() |
|
700 |
source_branch = source_builder.get_branch() |
|
701 |
source_branch.lock_read() |
|
702 |
self.addCleanup(source_branch.unlock) |
|
4462.2.1
by Robert Collins
Add new attribute to RepositoryFormat pack_compresses, hinting when pack can be useful. |
703 |
target = self.make_repository('target', format='2a') |
4360.4.9
by John Arbash Meinel
Merge bzr.dev, bringing in the gc stacking fixes. |
704 |
source = source_branch.repository._get_source(target._format) |
705 |
self.assertIsInstance(source, groupcompress_repo.GroupCHKStreamSource) |
|
706 |
||
707 |
# On a regular pass, getting the inventories and chk pages for rev-2
|
|
708 |
# would only get the newly created chk pages
|
|
709 |
search = graph.SearchResult(set(['rev-2']), set(['rev-1']), 1, |
|
710 |
set(['rev-2'])) |
|
711 |
simple_chk_records = [] |
|
712 |
for vf_name, substream in source.get_stream(search): |
|
713 |
if vf_name == 'chk_bytes': |
|
714 |
for record in substream: |
|
715 |
simple_chk_records.append(record.key) |
|
716 |
else: |
|
717 |
for _ in substream: |
|
718 |
continue
|
|
719 |
# 3 pages, the root (InternalNode), + 2 pages which actually changed
|
|
720 |
self.assertEqual([('sha1:91481f539e802c76542ea5e4c83ad416bf219f73',), |
|
721 |
('sha1:4ff91971043668583985aec83f4f0ab10a907d3f',), |
|
722 |
('sha1:81e7324507c5ca132eedaf2d8414ee4bb2226187',), |
|
723 |
('sha1:b101b7da280596c71a4540e9a1eeba8045985ee0',)], |
|
724 |
simple_chk_records) |
|
725 |
# Now, when we do a similar call using 'get_stream_for_missing_keys'
|
|
726 |
# we should get a much larger set of pages.
|
|
727 |
missing = [('inventories', 'rev-2')] |
|
728 |
full_chk_records = [] |
|
729 |
for vf_name, substream in source.get_stream_for_missing_keys(missing): |
|
730 |
if vf_name == 'inventories': |
|
731 |
for record in substream: |
|
732 |
self.assertEqual(('rev-2',), record.key) |
|
733 |
elif vf_name == 'chk_bytes': |
|
734 |
for record in substream: |
|
735 |
full_chk_records.append(record.key) |
|
736 |
else: |
|
737 |
self.fail('Should not be getting a stream of %s' % (vf_name,)) |
|
738 |
# We have 257 records now. This is because we have 1 root page, and 256
|
|
739 |
# leaf pages in a complete listing.
|
|
740 |
self.assertEqual(257, len(full_chk_records)) |
|
741 |
self.assertSubset(simple_chk_records, full_chk_records) |
|
742 |
||
4465.2.7
by Aaron Bentley
Move test_inconsistency_fatal to test_repository |
743 |
def test_inconsistency_fatal(self): |
744 |
repo = self.make_repository('repo', format='2a') |
|
745 |
self.assertTrue(repo.revisions._index._inconsistency_fatal) |
|
746 |
self.assertFalse(repo.texts._index._inconsistency_fatal) |
|
747 |
self.assertFalse(repo.inventories._index._inconsistency_fatal) |
|
748 |
self.assertFalse(repo.signatures._index._inconsistency_fatal) |
|
749 |
self.assertFalse(repo.chk_bytes._index._inconsistency_fatal) |
|
750 |
||
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
751 |
|
752 |
class TestKnitPackStreamSource(tests.TestCaseWithMemoryTransport): |
|
753 |
||
754 |
def test_source_to_exact_pack_092(self): |
|
755 |
source = self.make_repository('source', format='pack-0.92') |
|
756 |
target = self.make_repository('target', format='pack-0.92') |
|
757 |
stream_source = source._get_source(target._format) |
|
5757.2.2
by Jelmer Vernooij
Fix imports. |
758 |
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource) |
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
759 |
|
760 |
def test_source_to_exact_pack_rich_root_pack(self): |
|
761 |
source = self.make_repository('source', format='rich-root-pack') |
|
762 |
target = self.make_repository('target', format='rich-root-pack') |
|
763 |
stream_source = source._get_source(target._format) |
|
5757.2.2
by Jelmer Vernooij
Fix imports. |
764 |
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource) |
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
765 |
|
766 |
def test_source_to_exact_pack_19(self): |
|
767 |
source = self.make_repository('source', format='1.9') |
|
768 |
target = self.make_repository('target', format='1.9') |
|
769 |
stream_source = source._get_source(target._format) |
|
5757.2.2
by Jelmer Vernooij
Fix imports. |
770 |
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource) |
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
771 |
|
772 |
def test_source_to_exact_pack_19_rich_root(self): |
|
773 |
source = self.make_repository('source', format='1.9-rich-root') |
|
774 |
target = self.make_repository('target', format='1.9-rich-root') |
|
775 |
stream_source = source._get_source(target._format) |
|
5757.2.2
by Jelmer Vernooij
Fix imports. |
776 |
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource) |
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
777 |
|
778 |
def test_source_to_remote_exact_pack_19(self): |
|
779 |
trans = self.make_smart_server('target') |
|
780 |
trans.ensure_base() |
|
781 |
source = self.make_repository('source', format='1.9') |
|
782 |
target = self.make_repository('target', format='1.9') |
|
783 |
target = repository.Repository.open(trans.base) |
|
784 |
stream_source = source._get_source(target._format) |
|
5757.2.2
by Jelmer Vernooij
Fix imports. |
785 |
self.assertIsInstance(stream_source, knitpack_repo.KnitPackStreamSource) |
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
786 |
|
787 |
def test_stream_source_to_non_exact(self): |
|
788 |
source = self.make_repository('source', format='pack-0.92') |
|
789 |
target = self.make_repository('target', format='1.9') |
|
790 |
stream = source._get_source(target._format) |
|
5815.4.15
by Jelmer Vernooij
Fix some imports. |
791 |
self.assertIs(type(stream), vf_repository.StreamSource) |
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
792 |
|
793 |
def test_stream_source_to_non_exact_rich_root(self): |
|
794 |
source = self.make_repository('source', format='1.9') |
|
795 |
target = self.make_repository('target', format='1.9-rich-root') |
|
796 |
stream = source._get_source(target._format) |
|
5815.4.15
by Jelmer Vernooij
Fix some imports. |
797 |
self.assertIs(type(stream), vf_repository.StreamSource) |
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
798 |
|
799 |
def test_source_to_remote_non_exact_pack_19(self): |
|
800 |
trans = self.make_smart_server('target') |
|
801 |
trans.ensure_base() |
|
802 |
source = self.make_repository('source', format='1.9') |
|
803 |
target = self.make_repository('target', format='1.6') |
|
804 |
target = repository.Repository.open(trans.base) |
|
805 |
stream_source = source._get_source(target._format) |
|
5815.4.15
by Jelmer Vernooij
Fix some imports. |
806 |
self.assertIs(type(stream_source), vf_repository.StreamSource) |
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
807 |
|
808 |
def test_stream_source_to_knit(self): |
|
809 |
source = self.make_repository('source', format='pack-0.92') |
|
810 |
target = self.make_repository('target', format='dirstate') |
|
811 |
stream = source._get_source(target._format) |
|
5815.4.15
by Jelmer Vernooij
Fix some imports. |
812 |
self.assertIs(type(stream), vf_repository.StreamSource) |
4360.4.3
by John Arbash Meinel
Introduce a KnitPackStreamSource which is used when |
813 |
|
3735.2.40
by Robert Collins
Add development4 which has a parent_id to basename index on CHKInventory objects. |
814 |
|
4343.3.32
by John Arbash Meinel
Change the tests for _find_revision_outside_set to the new _find_parent_ids function. |
815 |
class TestDevelopment6FindParentIdsOfRevisions(TestCaseWithTransport): |
816 |
"""Tests for _find_parent_ids_of_revisions."""
|
|
3735.4.1
by Andrew Bennetts
Add _find_revision_outside_set. |
817 |
|
818 |
def setUp(self): |
|
4343.3.32
by John Arbash Meinel
Change the tests for _find_revision_outside_set to the new _find_parent_ids function. |
819 |
super(TestDevelopment6FindParentIdsOfRevisions, self).setUp() |
5546.1.1
by Andrew Bennetts
Remove RepositoryFormatCHK1 and RepositoryFormatCHK2. |
820 |
self.builder = self.make_branch_builder('source') |
3735.4.1
by Andrew Bennetts
Add _find_revision_outside_set. |
821 |
self.builder.start_series() |
822 |
self.builder.build_snapshot('initial', None, |
|
823 |
[('add', ('', 'tree-root', 'directory', None))]) |
|
824 |
self.repo = self.builder.get_branch().repository |
|
825 |
self.addCleanup(self.builder.finish_series) |
|
3735.2.99
by John Arbash Meinel
Merge bzr.dev 4034. Whitespace cleanup |
826 |
|
4343.3.32
by John Arbash Meinel
Change the tests for _find_revision_outside_set to the new _find_parent_ids function. |
827 |
def assertParentIds(self, expected_result, rev_set): |
828 |
self.assertEqual(sorted(expected_result), |
|
829 |
sorted(self.repo._find_parent_ids_of_revisions(rev_set))) |
|
3735.4.1
by Andrew Bennetts
Add _find_revision_outside_set. |
830 |
|
831 |
def test_simple(self): |
|
832 |
self.builder.build_snapshot('revid1', None, []) |
|
4343.3.32
by John Arbash Meinel
Change the tests for _find_revision_outside_set to the new _find_parent_ids function. |
833 |
self.builder.build_snapshot('revid2', ['revid1'], []) |
3735.4.1
by Andrew Bennetts
Add _find_revision_outside_set. |
834 |
rev_set = ['revid2'] |
4343.3.32
by John Arbash Meinel
Change the tests for _find_revision_outside_set to the new _find_parent_ids function. |
835 |
self.assertParentIds(['revid1'], rev_set) |
3735.4.1
by Andrew Bennetts
Add _find_revision_outside_set. |
836 |
|
837 |
def test_not_first_parent(self): |
|
838 |
self.builder.build_snapshot('revid1', None, []) |
|
4343.3.32
by John Arbash Meinel
Change the tests for _find_revision_outside_set to the new _find_parent_ids function. |
839 |
self.builder.build_snapshot('revid2', ['revid1'], []) |
840 |
self.builder.build_snapshot('revid3', ['revid2'], []) |
|
3735.4.1
by Andrew Bennetts
Add _find_revision_outside_set. |
841 |
rev_set = ['revid3', 'revid2'] |
4343.3.32
by John Arbash Meinel
Change the tests for _find_revision_outside_set to the new _find_parent_ids function. |
842 |
self.assertParentIds(['revid1'], rev_set) |
3735.4.1
by Andrew Bennetts
Add _find_revision_outside_set. |
843 |
|
844 |
def test_not_null(self): |
|
845 |
rev_set = ['initial'] |
|
4343.3.32
by John Arbash Meinel
Change the tests for _find_revision_outside_set to the new _find_parent_ids function. |
846 |
self.assertParentIds([], rev_set) |
3735.4.1
by Andrew Bennetts
Add _find_revision_outside_set. |
847 |
|
848 |
def test_not_null_set(self): |
|
849 |
self.builder.build_snapshot('revid1', None, []) |
|
850 |
rev_set = [_mod_revision.NULL_REVISION] |
|
4343.3.32
by John Arbash Meinel
Change the tests for _find_revision_outside_set to the new _find_parent_ids function. |
851 |
self.assertParentIds([], rev_set) |
3735.4.1
by Andrew Bennetts
Add _find_revision_outside_set. |
852 |
|
853 |
def test_ghost(self): |
|
854 |
self.builder.build_snapshot('revid1', None, []) |
|
855 |
rev_set = ['ghost', 'revid1'] |
|
4343.3.32
by John Arbash Meinel
Change the tests for _find_revision_outside_set to the new _find_parent_ids function. |
856 |
self.assertParentIds(['initial'], rev_set) |
3735.4.1
by Andrew Bennetts
Add _find_revision_outside_set. |
857 |
|
858 |
def test_ghost_parent(self): |
|
859 |
self.builder.build_snapshot('revid1', None, []) |
|
860 |
self.builder.build_snapshot('revid2', ['revid1', 'ghost'], []) |
|
861 |
rev_set = ['revid2', 'revid1'] |
|
4343.3.32
by John Arbash Meinel
Change the tests for _find_revision_outside_set to the new _find_parent_ids function. |
862 |
self.assertParentIds(['ghost', 'initial'], rev_set) |
3735.4.1
by Andrew Bennetts
Add _find_revision_outside_set. |
863 |
|
864 |
def test_righthand_parent(self): |
|
865 |
self.builder.build_snapshot('revid1', None, []) |
|
866 |
self.builder.build_snapshot('revid2a', ['revid1'], []) |
|
867 |
self.builder.build_snapshot('revid2b', ['revid1'], []) |
|
868 |
self.builder.build_snapshot('revid3', ['revid2a', 'revid2b'], []) |
|
869 |
rev_set = ['revid3', 'revid2a'] |
|
4343.3.32
by John Arbash Meinel
Change the tests for _find_revision_outside_set to the new _find_parent_ids function. |
870 |
self.assertParentIds(['revid1', 'revid2b'], rev_set) |
3735.4.1
by Andrew Bennetts
Add _find_revision_outside_set. |
871 |
|
872 |
||
2535.3.57
by Andrew Bennetts
Perform some sanity checking of data streams rather than blindly inserting them into our repository. |
873 |
class TestWithBrokenRepo(TestCaseWithTransport): |
2592.3.214
by Robert Collins
Merge bzr.dev. |
874 |
"""These tests seem to be more appropriate as interface tests?"""
|
2535.3.57
by Andrew Bennetts
Perform some sanity checking of data streams rather than blindly inserting them into our repository. |
875 |
|
876 |
def make_broken_repository(self): |
|
877 |
# XXX: This function is borrowed from Aaron's "Reconcile can fix bad
|
|
878 |
# parent references" branch which is due to land in bzr.dev soon. Once
|
|
879 |
# it does, this duplication should be removed.
|
|
880 |
repo = self.make_repository('broken-repo') |
|
881 |
cleanups = [] |
|
882 |
try: |
|
883 |
repo.lock_write() |
|
884 |
cleanups.append(repo.unlock) |
|
885 |
repo.start_write_group() |
|
886 |
cleanups.append(repo.commit_write_group) |
|
887 |
# make rev1a: A well-formed revision, containing 'file1'
|
|
888 |
inv = inventory.Inventory(revision_id='rev1a') |
|
889 |
inv.root.revision = 'rev1a' |
|
890 |
self.add_file(repo, inv, 'file1', 'rev1a', []) |
|
4634.35.21
by Andrew Bennetts
Fix test_insert_from_broken_repo in test_repository. |
891 |
repo.texts.add_lines((inv.root.file_id, 'rev1a'), [], []) |
2535.3.57
by Andrew Bennetts
Perform some sanity checking of data streams rather than blindly inserting them into our repository. |
892 |
repo.add_inventory('rev1a', inv, []) |
893 |
revision = _mod_revision.Revision('rev1a', |
|
894 |
committer='jrandom@example.com', timestamp=0, |
|
895 |
inventory_sha1='', timezone=0, message='foo', parent_ids=[]) |
|
896 |
repo.add_revision('rev1a',revision, inv) |
|
897 |
||
898 |
# make rev1b, which has no Revision, but has an Inventory, and
|
|
899 |
# file1
|
|
900 |
inv = inventory.Inventory(revision_id='rev1b') |
|
901 |
inv.root.revision = 'rev1b' |
|
902 |
self.add_file(repo, inv, 'file1', 'rev1b', []) |
|
903 |
repo.add_inventory('rev1b', inv, []) |
|
904 |
||
905 |
# make rev2, with file1 and file2
|
|
906 |
# file2 is sane
|
|
907 |
# file1 has 'rev1b' as an ancestor, even though this is not
|
|
908 |
# mentioned by 'rev1a', making it an unreferenced ancestor
|
|
909 |
inv = inventory.Inventory() |
|
910 |
self.add_file(repo, inv, 'file1', 'rev2', ['rev1a', 'rev1b']) |
|
911 |
self.add_file(repo, inv, 'file2', 'rev2', []) |
|
912 |
self.add_revision(repo, 'rev2', inv, ['rev1a']) |
|
913 |
||
914 |
# make ghost revision rev1c
|
|
915 |
inv = inventory.Inventory() |
|
916 |
self.add_file(repo, inv, 'file2', 'rev1c', []) |
|
917 |
||
918 |
# make rev3 with file2
|
|
919 |
# file2 refers to 'rev1c', which is a ghost in this repository, so
|
|
920 |
# file2 cannot have rev1c as its ancestor.
|
|
921 |
inv = inventory.Inventory() |
|
922 |
self.add_file(repo, inv, 'file2', 'rev3', ['rev1c']) |
|
923 |
self.add_revision(repo, 'rev3', inv, ['rev1c']) |
|
924 |
return repo |
|
925 |
finally: |
|
926 |
for cleanup in reversed(cleanups): |
|
927 |
cleanup() |
|
928 |
||
929 |
def add_revision(self, repo, revision_id, inv, parent_ids): |
|
930 |
inv.revision_id = revision_id |
|
931 |
inv.root.revision = revision_id |
|
4634.35.21
by Andrew Bennetts
Fix test_insert_from_broken_repo in test_repository. |
932 |
repo.texts.add_lines((inv.root.file_id, revision_id), [], []) |
2535.3.57
by Andrew Bennetts
Perform some sanity checking of data streams rather than blindly inserting them into our repository. |
933 |
repo.add_inventory(revision_id, inv, parent_ids) |
934 |
revision = _mod_revision.Revision(revision_id, |
|
935 |
committer='jrandom@example.com', timestamp=0, inventory_sha1='', |
|
936 |
timezone=0, message='foo', parent_ids=parent_ids) |
|
937 |
repo.add_revision(revision_id,revision, inv) |
|
938 |
||
939 |
def add_file(self, repo, inv, filename, revision, parents): |
|
940 |
file_id = filename + '-id' |
|
941 |
entry = inventory.InventoryFile(file_id, filename, 'TREE_ROOT') |
|
942 |
entry.revision = revision |
|
2535.4.10
by Andrew Bennetts
Fix one failing test, disable another. |
943 |
entry.text_size = 0 |
2535.3.57
by Andrew Bennetts
Perform some sanity checking of data streams rather than blindly inserting them into our repository. |
944 |
inv.add(entry) |
3350.6.4
by Robert Collins
First cut at pluralised VersionedFiles. Some rather massive API incompatabilities, primarily because of the difficulty of coherence among competing stores. |
945 |
text_key = (file_id, revision) |
946 |
parent_keys = [(file_id, parent) for parent in parents] |
|
947 |
repo.texts.add_lines(text_key, parent_keys, ['line\n']) |
|
2535.3.57
by Andrew Bennetts
Perform some sanity checking of data streams rather than blindly inserting them into our repository. |
948 |
|
949 |
def test_insert_from_broken_repo(self): |
|
950 |
"""Inserting a data stream from a broken repository won't silently
|
|
951 |
corrupt the target repository.
|
|
952 |
"""
|
|
953 |
broken_repo = self.make_broken_repository() |
|
954 |
empty_repo = self.make_repository('empty-repo') |
|
4606.1.1
by Robert Collins
Change test_insert_from_broken_repo from a known failure to a working test. |
955 |
try: |
956 |
empty_repo.fetch(broken_repo) |
|
957 |
except (errors.RevisionNotPresent, errors.BzrCheckError): |
|
958 |
# Test successful: compression parent not being copied leads to
|
|
959 |
# error.
|
|
960 |
return
|
|
961 |
empty_repo.lock_read() |
|
962 |
self.addCleanup(empty_repo.unlock) |
|
963 |
text = empty_repo.texts.get_record_stream( |
|
964 |
[('file2-id', 'rev3')], 'topological', True).next() |
|
965 |
self.assertEqual('line\n', text.get_bytes_as('fulltext')) |
|
2592.3.214
by Robert Collins
Merge bzr.dev. |
966 |
|
967 |
||
2592.3.84
by Robert Collins
Start of autopacking logic. |
968 |
class TestRepositoryPackCollection(TestCaseWithTransport): |
969 |
||
970 |
def get_format(self): |
|
3010.3.3
by Martin Pool
Merge trunk |
971 |
return bzrdir.format_registry.make_bzrdir('pack-0.92') |
2592.3.84
by Robert Collins
Start of autopacking logic. |
972 |
|
3711.4.1
by John Arbash Meinel
Fix bug #242510, when determining the autopack sequence, |
973 |
def get_packs(self): |
974 |
format = self.get_format() |
|
975 |
repo = self.make_repository('.', format=format) |
|
976 |
return repo._pack_collection |
|
977 |
||
3789.2.20
by John Arbash Meinel
The autopack code can now trigger itself to retry when _copy_revision_texts fails. |
978 |
def make_packs_and_alt_repo(self, write_lock=False): |
3789.2.19
by John Arbash Meinel
Refactor to make the tests a bit simpler |
979 |
"""Create a pack repo with 3 packs, and access it via a second repo."""
|
4617.4.1
by Robert Collins
Fix a pack specific test which didn't lock its format down. |
980 |
tree = self.make_branch_and_tree('.', format=self.get_format()) |
3789.2.19
by John Arbash Meinel
Refactor to make the tests a bit simpler |
981 |
tree.lock_write() |
982 |
self.addCleanup(tree.unlock) |
|
983 |
rev1 = tree.commit('one') |
|
984 |
rev2 = tree.commit('two') |
|
985 |
rev3 = tree.commit('three') |
|
986 |
r = repository.Repository.open('.') |
|
3789.2.20
by John Arbash Meinel
The autopack code can now trigger itself to retry when _copy_revision_texts fails. |
987 |
if write_lock: |
988 |
r.lock_write() |
|
989 |
else: |
|
990 |
r.lock_read() |
|
3789.2.19
by John Arbash Meinel
Refactor to make the tests a bit simpler |
991 |
self.addCleanup(r.unlock) |
992 |
packs = r._pack_collection |
|
993 |
packs.ensure_loaded() |
|
994 |
return tree, r, packs, [rev1, rev2, rev3] |
|
995 |
||
4634.127.1
by John Arbash Meinel
Partial fix for bug #507557. |
996 |
def test__clear_obsolete_packs(self): |
997 |
packs = self.get_packs() |
|
998 |
obsolete_pack_trans = packs.transport.clone('obsolete_packs') |
|
999 |
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n') |
|
1000 |
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n') |
|
1001 |
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n') |
|
1002 |
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n') |
|
1003 |
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n') |
|
1004 |
res = packs._clear_obsolete_packs() |
|
1005 |
self.assertEqual(['a-pack', 'another-pack'], sorted(res)) |
|
1006 |
self.assertEqual([], obsolete_pack_trans.list_dir('.')) |
|
1007 |
||
1008 |
def test__clear_obsolete_packs_preserve(self): |
|
1009 |
packs = self.get_packs() |
|
1010 |
obsolete_pack_trans = packs.transport.clone('obsolete_packs') |
|
1011 |
obsolete_pack_trans.put_bytes('a-pack.pack', 'content\n') |
|
1012 |
obsolete_pack_trans.put_bytes('a-pack.rix', 'content\n') |
|
1013 |
obsolete_pack_trans.put_bytes('a-pack.iix', 'content\n') |
|
1014 |
obsolete_pack_trans.put_bytes('another-pack.pack', 'foo\n') |
|
1015 |
obsolete_pack_trans.put_bytes('not-a-pack.rix', 'foo\n') |
|
1016 |
res = packs._clear_obsolete_packs(preserve=set(['a-pack'])) |
|
1017 |
self.assertEqual(['a-pack', 'another-pack'], sorted(res)) |
|
1018 |
self.assertEqual(['a-pack.iix', 'a-pack.pack', 'a-pack.rix'], |
|
1019 |
sorted(obsolete_pack_trans.list_dir('.'))) |
|
1020 |
||
2592.3.84
by Robert Collins
Start of autopacking logic. |
1021 |
def test__max_pack_count(self): |
2592.3.219
by Robert Collins
Review feedback. |
1022 |
"""The maximum pack count is a function of the number of revisions."""
|
2592.3.84
by Robert Collins
Start of autopacking logic. |
1023 |
# no revisions - one pack, so that we can have a revision free repo
|
1024 |
# without it blowing up
|
|
3711.4.1
by John Arbash Meinel
Fix bug #242510, when determining the autopack sequence, |
1025 |
packs = self.get_packs() |
2592.3.84
by Robert Collins
Start of autopacking logic. |
1026 |
self.assertEqual(1, packs._max_pack_count(0)) |
1027 |
# after that the sum of the digits, - check the first 1-9
|
|
1028 |
self.assertEqual(1, packs._max_pack_count(1)) |
|
1029 |
self.assertEqual(2, packs._max_pack_count(2)) |
|
1030 |
self.assertEqual(3, packs._max_pack_count(3)) |
|
1031 |
self.assertEqual(4, packs._max_pack_count(4)) |
|
1032 |
self.assertEqual(5, packs._max_pack_count(5)) |
|
1033 |
self.assertEqual(6, packs._max_pack_count(6)) |
|
1034 |
self.assertEqual(7, packs._max_pack_count(7)) |
|
1035 |
self.assertEqual(8, packs._max_pack_count(8)) |
|
1036 |
self.assertEqual(9, packs._max_pack_count(9)) |
|
1037 |
# check the boundary cases with two digits for the next decade
|
|
1038 |
self.assertEqual(1, packs._max_pack_count(10)) |
|
1039 |
self.assertEqual(2, packs._max_pack_count(11)) |
|
1040 |
self.assertEqual(10, packs._max_pack_count(19)) |
|
1041 |
self.assertEqual(2, packs._max_pack_count(20)) |
|
1042 |
self.assertEqual(3, packs._max_pack_count(21)) |
|
1043 |
# check some arbitrary big numbers
|
|
1044 |
self.assertEqual(25, packs._max_pack_count(112894)) |
|
1045 |
||
4928.1.1
by Martin Pool
Give RepositoryPackCollection a repr |
1046 |
def test_repr(self): |
1047 |
packs = self.get_packs() |
|
1048 |
self.assertContainsRe(repr(packs), |
|
1049 |
'RepositoryPackCollection(.*Repository(.*))') |
|
1050 |
||
4634.127.2
by John Arbash Meinel
Change the _obsolete_packs code to handle files that are already gone. |
1051 |
def test__obsolete_packs(self): |
1052 |
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True) |
|
1053 |
names = packs.names() |
|
1054 |
pack = packs.get_pack_by_name(names[0]) |
|
1055 |
# Schedule this one for removal
|
|
1056 |
packs._remove_pack_from_memory(pack) |
|
1057 |
# Simulate a concurrent update by renaming the .pack file and one of
|
|
1058 |
# the indices
|
|
1059 |
packs.transport.rename('packs/%s.pack' % (names[0],), |
|
1060 |
'obsolete_packs/%s.pack' % (names[0],)) |
|
1061 |
packs.transport.rename('indices/%s.iix' % (names[0],), |
|
1062 |
'obsolete_packs/%s.iix' % (names[0],)) |
|
1063 |
# Now trigger the obsoletion, and ensure that all the remaining files
|
|
1064 |
# are still renamed
|
|
1065 |
packs._obsolete_packs([pack]) |
|
1066 |
self.assertEqual([n + '.pack' for n in names[1:]], |
|
1067 |
sorted(packs._pack_transport.list_dir('.'))) |
|
1068 |
# names[0] should not be present in the index anymore
|
|
1069 |
self.assertEqual(names[1:], |
|
1070 |
sorted(set([osutils.splitext(n)[0] for n in |
|
1071 |
packs._index_transport.list_dir('.')]))) |
|
1072 |
||
2592.3.84
by Robert Collins
Start of autopacking logic. |
1073 |
def test_pack_distribution_zero(self): |
3711.4.1
by John Arbash Meinel
Fix bug #242510, when determining the autopack sequence, |
1074 |
packs = self.get_packs() |
2592.3.84
by Robert Collins
Start of autopacking logic. |
1075 |
self.assertEqual([0], packs.pack_distribution(0)) |
3052.1.6
by John Arbash Meinel
Change the lock check to raise ObjectNotLocked. |
1076 |
|
1077 |
def test_ensure_loaded_unlocked(self): |
|
3711.4.1
by John Arbash Meinel
Fix bug #242510, when determining the autopack sequence, |
1078 |
packs = self.get_packs() |
3052.1.6
by John Arbash Meinel
Change the lock check to raise ObjectNotLocked. |
1079 |
self.assertRaises(errors.ObjectNotLocked, |
3711.4.1
by John Arbash Meinel
Fix bug #242510, when determining the autopack sequence, |
1080 |
packs.ensure_loaded) |
3052.1.6
by John Arbash Meinel
Change the lock check to raise ObjectNotLocked. |
1081 |
|
2592.3.84
by Robert Collins
Start of autopacking logic. |
1082 |
def test_pack_distribution_one_to_nine(self): |
3711.4.1
by John Arbash Meinel
Fix bug #242510, when determining the autopack sequence, |
1083 |
packs = self.get_packs() |
2592.3.84
by Robert Collins
Start of autopacking logic. |
1084 |
self.assertEqual([1], |
1085 |
packs.pack_distribution(1)) |
|
1086 |
self.assertEqual([1, 1], |
|
1087 |
packs.pack_distribution(2)) |
|
1088 |
self.assertEqual([1, 1, 1], |
|
1089 |
packs.pack_distribution(3)) |
|
1090 |
self.assertEqual([1, 1, 1, 1], |
|
1091 |
packs.pack_distribution(4)) |
|
1092 |
self.assertEqual([1, 1, 1, 1, 1], |
|
1093 |
packs.pack_distribution(5)) |
|
1094 |
self.assertEqual([1, 1, 1, 1, 1, 1], |
|
1095 |
packs.pack_distribution(6)) |
|
1096 |
self.assertEqual([1, 1, 1, 1, 1, 1, 1], |
|
1097 |
packs.pack_distribution(7)) |
|
1098 |
self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1], |
|
1099 |
packs.pack_distribution(8)) |
|
1100 |
self.assertEqual([1, 1, 1, 1, 1, 1, 1, 1, 1], |
|
1101 |
packs.pack_distribution(9)) |
|
1102 |
||
1103 |
def test_pack_distribution_stable_at_boundaries(self): |
|
1104 |
"""When there are multi-rev packs the counts are stable."""
|
|
3711.4.1
by John Arbash Meinel
Fix bug #242510, when determining the autopack sequence, |
1105 |
packs = self.get_packs() |
2592.3.84
by Robert Collins
Start of autopacking logic. |
1106 |
# in 10s:
|
1107 |
self.assertEqual([10], packs.pack_distribution(10)) |
|
1108 |
self.assertEqual([10, 1], packs.pack_distribution(11)) |
|
1109 |
self.assertEqual([10, 10], packs.pack_distribution(20)) |
|
1110 |
self.assertEqual([10, 10, 1], packs.pack_distribution(21)) |
|
1111 |
# 100s
|
|
1112 |
self.assertEqual([100], packs.pack_distribution(100)) |
|
1113 |
self.assertEqual([100, 1], packs.pack_distribution(101)) |
|
1114 |
self.assertEqual([100, 10, 1], packs.pack_distribution(111)) |
|
1115 |
self.assertEqual([100, 100], packs.pack_distribution(200)) |
|
1116 |
self.assertEqual([100, 100, 1], packs.pack_distribution(201)) |
|
1117 |
self.assertEqual([100, 100, 10, 1], packs.pack_distribution(211)) |
|
1118 |
||
2592.3.85
by Robert Collins
Finish autopack corner cases. |
1119 |
def test_plan_pack_operations_2009_revisions_skip_all_packs(self): |
3711.4.1
by John Arbash Meinel
Fix bug #242510, when determining the autopack sequence, |
1120 |
packs = self.get_packs() |
2592.3.85
by Robert Collins
Finish autopack corner cases. |
1121 |
existing_packs = [(2000, "big"), (9, "medium")] |
1122 |
# rev count - 2009 -> 2x1000 + 9x1
|
|
1123 |
pack_operations = packs.plan_autopack_combinations( |
|
1124 |
existing_packs, [1000, 1000, 1, 1, 1, 1, 1, 1, 1, 1, 1]) |
|
1125 |
self.assertEqual([], pack_operations) |
|
1126 |
||
1127 |
def test_plan_pack_operations_2010_revisions_skip_all_packs(self): |
|
3711.4.1
by John Arbash Meinel
Fix bug #242510, when determining the autopack sequence, |
1128 |
packs = self.get_packs() |
2592.3.85
by Robert Collins
Finish autopack corner cases. |
1129 |
existing_packs = [(2000, "big"), (9, "medium"), (1, "single")] |
1130 |
# rev count - 2010 -> 2x1000 + 1x10
|
|
1131 |
pack_operations = packs.plan_autopack_combinations( |
|
1132 |
existing_packs, [1000, 1000, 10]) |
|
1133 |
self.assertEqual([], pack_operations) |
|
1134 |
||
1135 |
def test_plan_pack_operations_2010_combines_smallest_two(self): |
|
3711.4.1
by John Arbash Meinel
Fix bug #242510, when determining the autopack sequence, |
1136 |
packs = self.get_packs() |
2592.3.85
by Robert Collins
Finish autopack corner cases. |
1137 |
existing_packs = [(1999, "big"), (9, "medium"), (1, "single2"), |
1138 |
(1, "single1")] |
|
1139 |
# rev count - 2010 -> 2x1000 + 1x10 (3)
|
|
1140 |
pack_operations = packs.plan_autopack_combinations( |
|
1141 |
existing_packs, [1000, 1000, 10]) |
|
3711.4.2
by John Arbash Meinel
Change the logic to solve it in a different way. |
1142 |
self.assertEqual([[2, ["single2", "single1"]]], pack_operations) |
2592.3.85
by Robert Collins
Finish autopack corner cases. |
1143 |
|
3711.4.2
by John Arbash Meinel
Change the logic to solve it in a different way. |
1144 |
def test_plan_pack_operations_creates_a_single_op(self): |
3711.4.1
by John Arbash Meinel
Fix bug #242510, when determining the autopack sequence, |
1145 |
packs = self.get_packs() |
3711.4.2
by John Arbash Meinel
Change the logic to solve it in a different way. |
1146 |
existing_packs = [(50, 'a'), (40, 'b'), (30, 'c'), (10, 'd'), |
1147 |
(10, 'e'), (6, 'f'), (4, 'g')] |
|
1148 |
# rev count 150 -> 1x100 and 5x10
|
|
1149 |
# The two size 10 packs do not need to be touched. The 50, 40, 30 would
|
|
1150 |
# be combined into a single 120 size pack, and the 6 & 4 would
|
|
1151 |
# becombined into a size 10 pack. However, if we have to rewrite them,
|
|
1152 |
# we save a pack file with no increased I/O by putting them into the
|
|
1153 |
# same file.
|
|
1154 |
distribution = packs.pack_distribution(150) |
|
3711.4.1
by John Arbash Meinel
Fix bug #242510, when determining the autopack sequence, |
1155 |
pack_operations = packs.plan_autopack_combinations(existing_packs, |
3711.4.2
by John Arbash Meinel
Change the logic to solve it in a different way. |
1156 |
distribution) |
1157 |
self.assertEqual([[130, ['a', 'b', 'c', 'f', 'g']]], pack_operations) |
|
3711.4.1
by John Arbash Meinel
Fix bug #242510, when determining the autopack sequence, |
1158 |
|
2592.3.173
by Robert Collins
Basic implementation of all_packs. |
1159 |
def test_all_packs_none(self): |
1160 |
format = self.get_format() |
|
1161 |
tree = self.make_branch_and_tree('.', format=format) |
|
1162 |
tree.lock_read() |
|
1163 |
self.addCleanup(tree.unlock) |
|
2592.3.232
by Martin Pool
Disambiguate two member variables called _packs into _packs_by_name and _pack_collection |
1164 |
packs = tree.branch.repository._pack_collection |
2592.3.173
by Robert Collins
Basic implementation of all_packs. |
1165 |
packs.ensure_loaded() |
1166 |
self.assertEqual([], packs.all_packs()) |
|
1167 |
||
1168 |
def test_all_packs_one(self): |
|
1169 |
format = self.get_format() |
|
1170 |
tree = self.make_branch_and_tree('.', format=format) |
|
1171 |
tree.commit('start') |
|
1172 |
tree.lock_read() |
|
1173 |
self.addCleanup(tree.unlock) |
|
2592.3.232
by Martin Pool
Disambiguate two member variables called _packs into _packs_by_name and _pack_collection |
1174 |
packs = tree.branch.repository._pack_collection |
2592.3.173
by Robert Collins
Basic implementation of all_packs. |
1175 |
packs.ensure_loaded() |
2592.3.176
by Robert Collins
Various pack refactorings. |
1176 |
self.assertEqual([ |
1177 |
packs.get_pack_by_name(packs.names()[0])], |
|
1178 |
packs.all_packs()) |
|
2592.3.173
by Robert Collins
Basic implementation of all_packs. |
1179 |
|
1180 |
def test_all_packs_two(self): |
|
1181 |
format = self.get_format() |
|
1182 |
tree = self.make_branch_and_tree('.', format=format) |
|
1183 |
tree.commit('start') |
|
1184 |
tree.commit('continue') |
|
1185 |
tree.lock_read() |
|
1186 |
self.addCleanup(tree.unlock) |
|
2592.3.232
by Martin Pool
Disambiguate two member variables called _packs into _packs_by_name and _pack_collection |
1187 |
packs = tree.branch.repository._pack_collection |
2592.3.173
by Robert Collins
Basic implementation of all_packs. |
1188 |
packs.ensure_loaded() |
1189 |
self.assertEqual([ |
|
2592.3.176
by Robert Collins
Various pack refactorings. |
1190 |
packs.get_pack_by_name(packs.names()[0]), |
1191 |
packs.get_pack_by_name(packs.names()[1]), |
|
2592.3.173
by Robert Collins
Basic implementation of all_packs. |
1192 |
], packs.all_packs()) |
1193 |
||
2592.3.176
by Robert Collins
Various pack refactorings. |
1194 |
def test_get_pack_by_name(self): |
1195 |
format = self.get_format() |
|
1196 |
tree = self.make_branch_and_tree('.', format=format) |
|
1197 |
tree.commit('start') |
|
1198 |
tree.lock_read() |
|
1199 |
self.addCleanup(tree.unlock) |
|
2592.3.232
by Martin Pool
Disambiguate two member variables called _packs into _packs_by_name and _pack_collection |
1200 |
packs = tree.branch.repository._pack_collection |
4145.1.6
by Robert Collins
More test fallout, but all caught now. |
1201 |
packs.reset() |
2592.3.176
by Robert Collins
Various pack refactorings. |
1202 |
packs.ensure_loaded() |
1203 |
name = packs.names()[0] |
|
1204 |
pack_1 = packs.get_pack_by_name(name) |
|
1205 |
# the pack should be correctly initialised
|
|
3517.4.5
by Martin Pool
Correct use of packs._names in test_get_pack_by_name |
1206 |
sizes = packs._names[name] |
3221.12.4
by Robert Collins
Implement basic repository supporting external references. |
1207 |
rev_index = GraphIndex(packs._index_transport, name + '.rix', sizes[0]) |
1208 |
inv_index = GraphIndex(packs._index_transport, name + '.iix', sizes[1]) |
|
1209 |
txt_index = GraphIndex(packs._index_transport, name + '.tix', sizes[2]) |
|
1210 |
sig_index = GraphIndex(packs._index_transport, name + '.six', sizes[3]) |
|
2592.3.191
by Robert Collins
Give Pack responsibility for index naming, and two concrete classes - NewPack for new packs and ExistingPack for packs we read from disk. |
1211 |
self.assertEqual(pack_repo.ExistingPack(packs._pack_transport, |
2592.3.219
by Robert Collins
Review feedback. |
1212 |
name, rev_index, inv_index, txt_index, sig_index), pack_1) |
2592.3.176
by Robert Collins
Various pack refactorings. |
1213 |
# and the same instance should be returned on successive calls.
|
1214 |
self.assertTrue(pack_1 is packs.get_pack_by_name(name)) |
|
1215 |
||
3789.1.2
by John Arbash Meinel
Add RepositoryPackCollection.reload_pack_names() |
1216 |
def test_reload_pack_names_new_entry(self): |
3789.2.19
by John Arbash Meinel
Refactor to make the tests a bit simpler |
1217 |
tree, r, packs, revs = self.make_packs_and_alt_repo() |
3789.1.2
by John Arbash Meinel
Add RepositoryPackCollection.reload_pack_names() |
1218 |
names = packs.names() |
1219 |
# Add a new pack file into the repository
|
|
3789.2.19
by John Arbash Meinel
Refactor to make the tests a bit simpler |
1220 |
rev4 = tree.commit('four') |
3789.1.2
by John Arbash Meinel
Add RepositoryPackCollection.reload_pack_names() |
1221 |
new_names = tree.branch.repository._pack_collection.names() |
1222 |
new_name = set(new_names).difference(names) |
|
1223 |
self.assertEqual(1, len(new_name)) |
|
1224 |
new_name = new_name.pop() |
|
1225 |
# The old collection hasn't noticed yet
|
|
1226 |
self.assertEqual(names, packs.names()) |
|
3789.1.8
by John Arbash Meinel
Change the api of reload_pack_names(). |
1227 |
self.assertTrue(packs.reload_pack_names()) |
3789.1.2
by John Arbash Meinel
Add RepositoryPackCollection.reload_pack_names() |
1228 |
self.assertEqual(new_names, packs.names()) |
1229 |
# And the repository can access the new revision
|
|
3789.2.19
by John Arbash Meinel
Refactor to make the tests a bit simpler |
1230 |
self.assertEqual({rev4:(revs[-1],)}, r.get_parent_map([rev4])) |
3789.1.8
by John Arbash Meinel
Change the api of reload_pack_names(). |
1231 |
self.assertFalse(packs.reload_pack_names()) |
3789.1.2
by John Arbash Meinel
Add RepositoryPackCollection.reload_pack_names() |
1232 |
|
1233 |
def test_reload_pack_names_added_and_removed(self): |
|
3789.2.19
by John Arbash Meinel
Refactor to make the tests a bit simpler |
1234 |
tree, r, packs, revs = self.make_packs_and_alt_repo() |
3789.1.2
by John Arbash Meinel
Add RepositoryPackCollection.reload_pack_names() |
1235 |
names = packs.names() |
1236 |
# Now repack the whole thing
|
|
1237 |
tree.branch.repository.pack() |
|
1238 |
new_names = tree.branch.repository._pack_collection.names() |
|
1239 |
# The other collection hasn't noticed yet
|
|
1240 |
self.assertEqual(names, packs.names()) |
|
3789.1.8
by John Arbash Meinel
Change the api of reload_pack_names(). |
1241 |
self.assertTrue(packs.reload_pack_names()) |
3789.1.2
by John Arbash Meinel
Add RepositoryPackCollection.reload_pack_names() |
1242 |
self.assertEqual(new_names, packs.names()) |
3789.2.19
by John Arbash Meinel
Refactor to make the tests a bit simpler |
1243 |
self.assertEqual({revs[-1]:(revs[-2],)}, r.get_parent_map([revs[-1]])) |
3789.1.8
by John Arbash Meinel
Change the api of reload_pack_names(). |
1244 |
self.assertFalse(packs.reload_pack_names()) |
3789.1.2
by John Arbash Meinel
Add RepositoryPackCollection.reload_pack_names() |
1245 |
|
4634.126.1
by John Arbash Meinel
(jam) Fix bug #507566, concurrent autopacking correctness. |
1246 |
def test_reload_pack_names_preserves_pending(self): |
1247 |
# TODO: Update this to also test for pending-deleted names
|
|
1248 |
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True) |
|
1249 |
# We will add one pack (via start_write_group + insert_record_stream),
|
|
1250 |
# and remove another pack (via _remove_pack_from_memory)
|
|
1251 |
orig_names = packs.names() |
|
1252 |
orig_at_load = packs._packs_at_load |
|
1253 |
to_remove_name = iter(orig_names).next() |
|
1254 |
r.start_write_group() |
|
1255 |
self.addCleanup(r.abort_write_group) |
|
1256 |
r.texts.insert_record_stream([versionedfile.FulltextContentFactory( |
|
1257 |
('text', 'rev'), (), None, 'content\n')]) |
|
1258 |
new_pack = packs._new_pack |
|
1259 |
self.assertTrue(new_pack.data_inserted()) |
|
1260 |
new_pack.finish() |
|
1261 |
packs.allocate(new_pack) |
|
1262 |
packs._new_pack = None |
|
1263 |
removed_pack = packs.get_pack_by_name(to_remove_name) |
|
1264 |
packs._remove_pack_from_memory(removed_pack) |
|
1265 |
names = packs.names() |
|
4634.127.3
by John Arbash Meinel
Add code so we don't try to obsolete files someone else has 'claimed'. |
1266 |
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names() |
4634.126.1
by John Arbash Meinel
(jam) Fix bug #507566, concurrent autopacking correctness. |
1267 |
new_names = set([x[0][0] for x in new_nodes]) |
1268 |
self.assertEqual(names, sorted([x[0][0] for x in all_nodes])) |
|
1269 |
self.assertEqual(set(names) - set(orig_names), new_names) |
|
1270 |
self.assertEqual(set([new_pack.name]), new_names) |
|
1271 |
self.assertEqual([to_remove_name], |
|
1272 |
sorted([x[0][0] for x in deleted_nodes])) |
|
1273 |
packs.reload_pack_names() |
|
1274 |
reloaded_names = packs.names() |
|
1275 |
self.assertEqual(orig_at_load, packs._packs_at_load) |
|
1276 |
self.assertEqual(names, reloaded_names) |
|
4634.127.3
by John Arbash Meinel
Add code so we don't try to obsolete files someone else has 'claimed'. |
1277 |
all_nodes, deleted_nodes, new_nodes, _ = packs._diff_pack_names() |
4634.126.1
by John Arbash Meinel
(jam) Fix bug #507566, concurrent autopacking correctness. |
1278 |
new_names = set([x[0][0] for x in new_nodes]) |
1279 |
self.assertEqual(names, sorted([x[0][0] for x in all_nodes])) |
|
1280 |
self.assertEqual(set(names) - set(orig_names), new_names) |
|
1281 |
self.assertEqual(set([new_pack.name]), new_names) |
|
1282 |
self.assertEqual([to_remove_name], |
|
1283 |
sorted([x[0][0] for x in deleted_nodes])) |
|
1284 |
||
4634.127.5
by John Arbash Meinel
Possible fix for making sure packs triggering autopacking get cleaned up. |
1285 |
def test_autopack_obsoletes_new_pack(self): |
1286 |
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True) |
|
1287 |
packs._max_pack_count = lambda x: 1 |
|
1288 |
packs.pack_distribution = lambda x: [10] |
|
1289 |
r.start_write_group() |
|
1290 |
r.revisions.insert_record_stream([versionedfile.FulltextContentFactory( |
|
1291 |
('bogus-rev',), (), None, 'bogus-content\n')]) |
|
1292 |
# This should trigger an autopack, which will combine everything into a
|
|
1293 |
# single pack file.
|
|
1294 |
new_names = r.commit_write_group() |
|
1295 |
names = packs.names() |
|
1296 |
self.assertEqual(1, len(names)) |
|
1297 |
self.assertEqual([names[0] + '.pack'], |
|
1298 |
packs._pack_transport.list_dir('.')) |
|
1299 |
||
3789.2.20
by John Arbash Meinel
The autopack code can now trigger itself to retry when _copy_revision_texts fails. |
1300 |
def test_autopack_reloads_and_stops(self): |
1301 |
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True) |
|
1302 |
# After we have determined what needs to be autopacked, trigger a
|
|
1303 |
# full-pack via the other repo which will cause us to re-evaluate and
|
|
1304 |
# decide we don't need to do anything
|
|
1305 |
orig_execute = packs._execute_pack_operations |
|
1306 |
def _munged_execute_pack_ops(*args, **kwargs): |
|
1307 |
tree.branch.repository.pack() |
|
1308 |
return orig_execute(*args, **kwargs) |
|
1309 |
packs._execute_pack_operations = _munged_execute_pack_ops |
|
1310 |
packs._max_pack_count = lambda x: 1 |
|
1311 |
packs.pack_distribution = lambda x: [10] |
|
1312 |
self.assertFalse(packs.autopack()) |
|
1313 |
self.assertEqual(1, len(packs.names())) |
|
1314 |
self.assertEqual(tree.branch.repository._pack_collection.names(), |
|
1315 |
packs.names()) |
|
1316 |
||
4634.127.1
by John Arbash Meinel
Partial fix for bug #507557. |
1317 |
def test__save_pack_names(self): |
1318 |
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True) |
|
1319 |
names = packs.names() |
|
1320 |
pack = packs.get_pack_by_name(names[0]) |
|
1321 |
packs._remove_pack_from_memory(pack) |
|
1322 |
packs._save_pack_names(obsolete_packs=[pack]) |
|
1323 |
cur_packs = packs._pack_transport.list_dir('.') |
|
1324 |
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs)) |
|
1325 |
# obsolete_packs will also have stuff like .rix and .iix present.
|
|
1326 |
obsolete_packs = packs.transport.list_dir('obsolete_packs') |
|
1327 |
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs]) |
|
1328 |
self.assertEqual([pack.name], sorted(obsolete_names)) |
|
1329 |
||
1330 |
def test__save_pack_names_already_obsoleted(self): |
|
1331 |
tree, r, packs, revs = self.make_packs_and_alt_repo(write_lock=True) |
|
1332 |
names = packs.names() |
|
1333 |
pack = packs.get_pack_by_name(names[0]) |
|
1334 |
packs._remove_pack_from_memory(pack) |
|
1335 |
# We are going to simulate a concurrent autopack by manually obsoleting
|
|
1336 |
# the pack directly.
|
|
1337 |
packs._obsolete_packs([pack]) |
|
1338 |
packs._save_pack_names(clear_obsolete_packs=True, |
|
1339 |
obsolete_packs=[pack]) |
|
1340 |
cur_packs = packs._pack_transport.list_dir('.') |
|
1341 |
self.assertEqual([n + '.pack' for n in names[1:]], sorted(cur_packs)) |
|
1342 |
# Note that while we set clear_obsolete_packs=True, it should not
|
|
1343 |
# delete a pack file that we have also scheduled for obsoletion.
|
|
1344 |
obsolete_packs = packs.transport.list_dir('obsolete_packs') |
|
1345 |
obsolete_names = set([osutils.splitext(n)[0] for n in obsolete_packs]) |
|
1346 |
self.assertEqual([pack.name], sorted(obsolete_names)) |
|
1347 |
||
4634.127.3
by John Arbash Meinel
Add code so we don't try to obsolete files someone else has 'claimed'. |
1348 |
|
2592.3.173
by Robert Collins
Basic implementation of all_packs. |
1349 |
|
1350 |
class TestPack(TestCaseWithTransport): |
|
1351 |
"""Tests for the Pack object."""
|
|
1352 |
||
1353 |
def assertCurrentlyEqual(self, left, right): |
|
1354 |
self.assertTrue(left == right) |
|
1355 |
self.assertTrue(right == left) |
|
1356 |
self.assertFalse(left != right) |
|
1357 |
self.assertFalse(right != left) |
|
1358 |
||
1359 |
def assertCurrentlyNotEqual(self, left, right): |
|
1360 |
self.assertFalse(left == right) |
|
1361 |
self.assertFalse(right == left) |
|
1362 |
self.assertTrue(left != right) |
|
1363 |
self.assertTrue(right != left) |
|
1364 |
||
1365 |
def test___eq____ne__(self): |
|
2592.3.191
by Robert Collins
Give Pack responsibility for index naming, and two concrete classes - NewPack for new packs and ExistingPack for packs we read from disk. |
1366 |
left = pack_repo.ExistingPack('', '', '', '', '', '') |
1367 |
right = pack_repo.ExistingPack('', '', '', '', '', '') |
|
2592.3.173
by Robert Collins
Basic implementation of all_packs. |
1368 |
self.assertCurrentlyEqual(left, right) |
1369 |
# change all attributes and ensure equality changes as we do.
|
|
1370 |
left.revision_index = 'a' |
|
1371 |
self.assertCurrentlyNotEqual(left, right) |
|
1372 |
right.revision_index = 'a' |
|
1373 |
self.assertCurrentlyEqual(left, right) |
|
1374 |
left.inventory_index = 'a' |
|
1375 |
self.assertCurrentlyNotEqual(left, right) |
|
1376 |
right.inventory_index = 'a' |
|
1377 |
self.assertCurrentlyEqual(left, right) |
|
1378 |
left.text_index = 'a' |
|
1379 |
self.assertCurrentlyNotEqual(left, right) |
|
1380 |
right.text_index = 'a' |
|
1381 |
self.assertCurrentlyEqual(left, right) |
|
1382 |
left.signature_index = 'a' |
|
1383 |
self.assertCurrentlyNotEqual(left, right) |
|
1384 |
right.signature_index = 'a' |
|
1385 |
self.assertCurrentlyEqual(left, right) |
|
1386 |
left.name = 'a' |
|
1387 |
self.assertCurrentlyNotEqual(left, right) |
|
1388 |
right.name = 'a' |
|
1389 |
self.assertCurrentlyEqual(left, right) |
|
1390 |
left.transport = 'a' |
|
1391 |
self.assertCurrentlyNotEqual(left, right) |
|
1392 |
right.transport = 'a' |
|
1393 |
self.assertCurrentlyEqual(left, right) |
|
2592.3.179
by Robert Collins
Generate the revision_index_map for packing during the core operation, from the pack objects. |
1394 |
|
1395 |
def test_file_name(self): |
|
2592.3.191
by Robert Collins
Give Pack responsibility for index naming, and two concrete classes - NewPack for new packs and ExistingPack for packs we read from disk. |
1396 |
pack = pack_repo.ExistingPack('', 'a_name', '', '', '', '') |
2592.3.179
by Robert Collins
Generate the revision_index_map for packing during the core operation, from the pack objects. |
1397 |
self.assertEqual('a_name.pack', pack.file_name()) |
2592.3.192
by Robert Collins
Move new revision index management to NewPack. |
1398 |
|
1399 |
||
1400 |
class TestNewPack(TestCaseWithTransport): |
|
1401 |
"""Tests for pack_repo.NewPack."""
|
|
1402 |
||
2592.3.193
by Robert Collins
Move hash tracking of new packs into NewPack. |
1403 |
def test_new_instance_attributes(self): |
2592.3.194
by Robert Collins
Output the revision index from NewPack.finish |
1404 |
upload_transport = self.get_transport('upload') |
1405 |
pack_transport = self.get_transport('pack') |
|
1406 |
index_transport = self.get_transport('index') |
|
1407 |
upload_transport.mkdir('.') |
|
4241.6.8
by Robert Collins, John Arbash Meinel, Ian Clatworthy, Vincent Ladeuil
Add --development6-rich-root, disabling the legacy and unneeded development2 format, and activating the tests for CHK features disabled pending this format. (Robert Collins, John Arbash Meinel, Ian Clatworthy, Vincent Ladeuil) |
1408 |
collection = pack_repo.RepositoryPackCollection( |
1409 |
repo=None, |
|
3830.3.1
by Martin Pool
NewPack should be constructed from the PackCollection, rather than attributes of it |
1410 |
transport=self.get_transport('.'), |
1411 |
index_transport=index_transport, |
|
1412 |
upload_transport=upload_transport, |
|
1413 |
pack_transport=pack_transport, |
|
1414 |
index_builder_class=BTreeBuilder, |
|
4241.6.8
by Robert Collins, John Arbash Meinel, Ian Clatworthy, Vincent Ladeuil
Add --development6-rich-root, disabling the legacy and unneeded development2 format, and activating the tests for CHK features disabled pending this format. (Robert Collins, John Arbash Meinel, Ian Clatworthy, Vincent Ladeuil) |
1415 |
index_class=BTreeGraphIndex, |
1416 |
use_chk_index=False) |
|
3830.3.1
by Martin Pool
NewPack should be constructed from the PackCollection, rather than attributes of it |
1417 |
pack = pack_repo.NewPack(collection) |
4857.2.1
by John Arbash Meinel
2 test_repository tests weren't adding cleanups when opening files. |
1418 |
self.addCleanup(pack.abort) # Make sure the write stream gets closed |
3735.1.1
by Robert Collins
Add development2 formats using BTree indices. |
1419 |
self.assertIsInstance(pack.revision_index, BTreeBuilder) |
1420 |
self.assertIsInstance(pack.inventory_index, BTreeBuilder) |
|
2929.3.5
by Vincent Ladeuil
New files, same warnings, same fixes. |
1421 |
self.assertIsInstance(pack._hash, type(osutils.md5())) |
2592.3.194
by Robert Collins
Output the revision index from NewPack.finish |
1422 |
self.assertTrue(pack.upload_transport is upload_transport) |
1423 |
self.assertTrue(pack.index_transport is index_transport) |
|
1424 |
self.assertTrue(pack.pack_transport is pack_transport) |
|
1425 |
self.assertEqual(None, pack.index_sizes) |
|
1426 |
self.assertEqual(20, len(pack.random_name)) |
|
1427 |
self.assertIsInstance(pack.random_name, str) |
|
1428 |
self.assertIsInstance(pack.start_time, float) |
|
2951.1.2
by Robert Collins
Partial refactoring of pack_repo to create a Packer object for packing. |
1429 |
|
1430 |
||
1431 |
class TestPacker(TestCaseWithTransport): |
|
1432 |
"""Tests for the packs repository Packer class."""
|
|
2951.1.10
by Robert Collins
Peer review feedback with Ian. |
1433 |
|
3824.2.4
by John Arbash Meinel
Add a test that ensures the pack ordering changes as part of calling .pack() |
1434 |
def test_pack_optimizes_pack_order(self): |
4617.8.1
by Robert Collins
Lock down another test assuming the default was a PackRepository. |
1435 |
builder = self.make_branch_builder('.', format="1.9") |
3824.2.4
by John Arbash Meinel
Add a test that ensures the pack ordering changes as part of calling .pack() |
1436 |
builder.start_series() |
1437 |
builder.build_snapshot('A', None, [ |
|
1438 |
('add', ('', 'root-id', 'directory', None)), |
|
1439 |
('add', ('f', 'f-id', 'file', 'content\n'))]) |
|
1440 |
builder.build_snapshot('B', ['A'], |
|
1441 |
[('modify', ('f-id', 'new-content\n'))]) |
|
1442 |
builder.build_snapshot('C', ['B'], |
|
1443 |
[('modify', ('f-id', 'third-content\n'))]) |
|
1444 |
builder.build_snapshot('D', ['C'], |
|
1445 |
[('modify', ('f-id', 'fourth-content\n'))]) |
|
1446 |
b = builder.get_branch() |
|
1447 |
b.lock_read() |
|
1448 |
builder.finish_series() |
|
1449 |
self.addCleanup(b.unlock) |
|
1450 |
# At this point, we should have 4 pack files available
|
|
1451 |
# Because of how they were built, they correspond to
|
|
1452 |
# ['D', 'C', 'B', 'A']
|
|
1453 |
packs = b.repository._pack_collection.packs |
|
5757.7.5
by Jelmer Vernooij
Fix imports in tests. |
1454 |
packer = knitpack_repo.KnitPacker(b.repository._pack_collection, |
3824.2.4
by John Arbash Meinel
Add a test that ensures the pack ordering changes as part of calling .pack() |
1455 |
packs, 'testing', |
1456 |
revision_ids=['B', 'C']) |
|
1457 |
# Now, when we are copying the B & C revisions, their pack files should
|
|
1458 |
# be moved to the front of the stack
|
|
3824.2.5
by Andrew Bennetts
Minor tweaks to comments etc. |
1459 |
# The new ordering moves B & C to the front of the .packs attribute,
|
1460 |
# and leaves the others in the original order.
|
|
3824.2.4
by John Arbash Meinel
Add a test that ensures the pack ordering changes as part of calling .pack() |
1461 |
new_packs = [packs[1], packs[2], packs[0], packs[3]] |
1462 |
new_pack = packer.pack() |
|
1463 |
self.assertEqual(new_packs, packer.packs) |
|
3146.6.1
by Aaron Bentley
InterDifferingSerializer shows a progress bar |
1464 |
|
1465 |
||
3777.5.4
by John Arbash Meinel
OptimisingPacker now sets the optimize flags for the indexes being built. |
1466 |
class TestOptimisingPacker(TestCaseWithTransport): |
1467 |
"""Tests for the OptimisingPacker class."""
|
|
1468 |
||
1469 |
def get_pack_collection(self): |
|
1470 |
repo = self.make_repository('.') |
|
1471 |
return repo._pack_collection |
|
1472 |
||
1473 |
def test_open_pack_will_optimise(self): |
|
5757.7.3
by Jelmer Vernooij
Move more knitpack-specific functionality out of Packer. |
1474 |
packer = knitpack_repo.OptimisingKnitPacker(self.get_pack_collection(), |
3777.5.4
by John Arbash Meinel
OptimisingPacker now sets the optimize flags for the indexes being built. |
1475 |
[], '.test') |
1476 |
new_pack = packer.open_pack() |
|
4857.2.1
by John Arbash Meinel
2 test_repository tests weren't adding cleanups when opening files. |
1477 |
self.addCleanup(new_pack.abort) # ensure cleanup |
3777.5.4
by John Arbash Meinel
OptimisingPacker now sets the optimize flags for the indexes being built. |
1478 |
self.assertIsInstance(new_pack, pack_repo.NewPack) |
1479 |
self.assertTrue(new_pack.revision_index._optimize_for_size) |
|
1480 |
self.assertTrue(new_pack.inventory_index._optimize_for_size) |
|
1481 |
self.assertTrue(new_pack.text_index._optimize_for_size) |
|
1482 |
self.assertTrue(new_pack.signature_index._optimize_for_size) |
|
4462.2.6
by Robert Collins
Cause StreamSink to partially pack repositories after cross format fetches when beneficial. |
1483 |
|
1484 |
||
4634.170.1
by John Arbash Meinel
Fix bug #437003. Autopacking should not fail for an |
1485 |
class TestGCCHKPacker(TestCaseWithTransport): |
1486 |
||
1487 |
def make_abc_branch(self): |
|
1488 |
builder = self.make_branch_builder('source') |
|
1489 |
builder.start_series() |
|
1490 |
builder.build_snapshot('A', None, [ |
|
1491 |
('add', ('', 'root-id', 'directory', None)), |
|
1492 |
('add', ('file', 'file-id', 'file', 'content\n')), |
|
1493 |
])
|
|
1494 |
builder.build_snapshot('B', ['A'], [ |
|
1495 |
('add', ('dir', 'dir-id', 'directory', None))]) |
|
1496 |
builder.build_snapshot('C', ['B'], [ |
|
1497 |
('modify', ('file-id', 'new content\n'))]) |
|
1498 |
builder.finish_series() |
|
1499 |
return builder.get_branch() |
|
1500 |
||
1501 |
def make_branch_with_disjoint_inventory_and_revision(self): |
|
1502 |
"""a repo with separate packs for a revisions Revision and Inventory.
|
|
1503 |
||
1504 |
There will be one pack file that holds the Revision content, and one
|
|
1505 |
for the Inventory content.
|
|
1506 |
||
1507 |
:return: (repository,
|
|
1508 |
pack_name_with_rev_A_Revision,
|
|
1509 |
pack_name_with_rev_A_Inventory,
|
|
1510 |
pack_name_with_rev_C_content)
|
|
1511 |
"""
|
|
1512 |
b_source = self.make_abc_branch() |
|
1513 |
b_base = b_source.bzrdir.sprout('base', revision_id='A').open_branch() |
|
1514 |
b_stacked = b_base.bzrdir.sprout('stacked', stacked=True).open_branch() |
|
1515 |
b_stacked.lock_write() |
|
1516 |
self.addCleanup(b_stacked.unlock) |
|
1517 |
b_stacked.fetch(b_source, 'B') |
|
1518 |
# Now re-open the stacked repo directly (no fallbacks) so that we can
|
|
1519 |
# fill in the A rev.
|
|
1520 |
repo_not_stacked = b_stacked.bzrdir.open_repository() |
|
1521 |
repo_not_stacked.lock_write() |
|
1522 |
self.addCleanup(repo_not_stacked.unlock) |
|
1523 |
# Now we should have a pack file with A's inventory, but not its
|
|
1524 |
# Revision
|
|
1525 |
self.assertEqual([('A',), ('B',)], |
|
1526 |
sorted(repo_not_stacked.inventories.keys())) |
|
1527 |
self.assertEqual([('B',)], |
|
1528 |
sorted(repo_not_stacked.revisions.keys())) |
|
1529 |
stacked_pack_names = repo_not_stacked._pack_collection.names() |
|
1530 |
# We have a couple names here, figure out which has A's inventory
|
|
1531 |
for name in stacked_pack_names: |
|
1532 |
pack = repo_not_stacked._pack_collection.get_pack_by_name(name) |
|
1533 |
keys = [n[1] for n in pack.inventory_index.iter_all_entries()] |
|
1534 |
if ('A',) in keys: |
|
1535 |
inv_a_pack_name = name |
|
1536 |
break
|
|
1537 |
else: |
|
1538 |
self.fail('Could not find pack containing A\'s inventory') |
|
1539 |
repo_not_stacked.fetch(b_source.repository, 'A') |
|
1540 |
self.assertEqual([('A',), ('B',)], |
|
1541 |
sorted(repo_not_stacked.revisions.keys())) |
|
1542 |
new_pack_names = set(repo_not_stacked._pack_collection.names()) |
|
1543 |
rev_a_pack_names = new_pack_names.difference(stacked_pack_names) |
|
1544 |
self.assertEqual(1, len(rev_a_pack_names)) |
|
1545 |
rev_a_pack_name = list(rev_a_pack_names)[0] |
|
1546 |
# Now fetch 'C', so we have a couple pack files to join
|
|
1547 |
repo_not_stacked.fetch(b_source.repository, 'C') |
|
1548 |
rev_c_pack_names = set(repo_not_stacked._pack_collection.names()) |
|
1549 |
rev_c_pack_names = rev_c_pack_names.difference(new_pack_names) |
|
1550 |
self.assertEqual(1, len(rev_c_pack_names)) |
|
1551 |
rev_c_pack_name = list(rev_c_pack_names)[0] |
|
1552 |
return (repo_not_stacked, rev_a_pack_name, inv_a_pack_name, |
|
1553 |
rev_c_pack_name) |
|
1554 |
||
1555 |
def test_pack_with_distant_inventories(self): |
|
1556 |
# See https://bugs.launchpad.net/bzr/+bug/437003
|
|
1557 |
# When repacking, it is possible to have an inventory in a different
|
|
1558 |
# pack file than the associated revision. An autopack can then come
|
|
1559 |
# along, and miss that inventory, and complain.
|
|
1560 |
(repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name |
|
1561 |
) = self.make_branch_with_disjoint_inventory_and_revision() |
|
1562 |
a_pack = repo._pack_collection.get_pack_by_name(rev_a_pack_name) |
|
1563 |
c_pack = repo._pack_collection.get_pack_by_name(rev_c_pack_name) |
|
1564 |
packer = groupcompress_repo.GCCHKPacker(repo._pack_collection, |
|
1565 |
[a_pack, c_pack], '.test-pack') |
|
1566 |
# This would raise ValueError in bug #437003, but should not raise an
|
|
1567 |
# error once fixed.
|
|
1568 |
packer.pack() |
|
1569 |
||
1570 |
def test_pack_with_missing_inventory(self): |
|
1571 |
# Similar to test_pack_with_missing_inventory, but this time, we force
|
|
1572 |
# the A inventory to actually be gone from the repository.
|
|
1573 |
(repo, rev_a_pack_name, inv_a_pack_name, rev_c_pack_name |
|
1574 |
) = self.make_branch_with_disjoint_inventory_and_revision() |
|
1575 |
inv_a_pack = repo._pack_collection.get_pack_by_name(inv_a_pack_name) |
|
1576 |
repo._pack_collection._remove_pack_from_memory(inv_a_pack) |
|
1577 |
packer = groupcompress_repo.GCCHKPacker(repo._pack_collection, |
|
1578 |
repo._pack_collection.all_packs(), '.test-pack') |
|
1579 |
e = self.assertRaises(ValueError, packer.pack) |
|
1580 |
packer.new_pack.abort() |
|
4634.170.2
by John Arbash Meinel
Loosen the match a bit. Newer versions have StaticTuple. |
1581 |
self.assertContainsRe(str(e), |
1582 |
r"We are missing inventories for revisions: .*'A'") |
|
4634.170.1
by John Arbash Meinel
Fix bug #437003. Autopacking should not fail for an |
1583 |
|
1584 |
||
4431.3.7
by Jonathan Lange
Cherrypick bzr.dev 4470, resolving conflicts. |
1585 |
class TestCrossFormatPacks(TestCaseWithTransport): |
1586 |
||
1587 |
def log_pack(self, hint=None): |
|
1588 |
self.calls.append(('pack', hint)) |
|
1589 |
self.orig_pack(hint=hint) |
|
1590 |
if self.expect_hint: |
|
1591 |
self.assertTrue(hint) |
|
1592 |
||
1593 |
def run_stream(self, src_fmt, target_fmt, expect_pack_called): |
|
1594 |
self.expect_hint = expect_pack_called |
|
1595 |
self.calls = [] |
|
1596 |
source_tree = self.make_branch_and_tree('src', format=src_fmt) |
|
1597 |
source_tree.lock_write() |
|
1598 |
self.addCleanup(source_tree.unlock) |
|
1599 |
tip = source_tree.commit('foo') |
|
1600 |
target = self.make_repository('target', format=target_fmt) |
|
1601 |
target.lock_write() |
|
1602 |
self.addCleanup(target.unlock) |
|
1603 |
source = source_tree.branch.repository._get_source(target._format) |
|
1604 |
self.orig_pack = target.pack |
|
1605 |
target.pack = self.log_pack |
|
1606 |
search = target.search_missing_revision_ids( |
|
5539.2.11
by Andrew Bennetts
Fix deprecation warning from test suite. |
1607 |
source_tree.branch.repository, revision_ids=[tip]) |
4431.3.7
by Jonathan Lange
Cherrypick bzr.dev 4470, resolving conflicts. |
1608 |
stream = source.get_stream(search) |
1609 |
from_format = source_tree.branch.repository._format |
|
1610 |
sink = target._get_sink() |
|
1611 |
sink.insert_stream(stream, from_format, []) |
|
1612 |
if expect_pack_called: |
|
1613 |
self.assertLength(1, self.calls) |
|
1614 |
else: |
|
1615 |
self.assertLength(0, self.calls) |
|
1616 |
||
1617 |
def run_fetch(self, src_fmt, target_fmt, expect_pack_called): |
|
1618 |
self.expect_hint = expect_pack_called |
|
1619 |
self.calls = [] |
|
1620 |
source_tree = self.make_branch_and_tree('src', format=src_fmt) |
|
1621 |
source_tree.lock_write() |
|
1622 |
self.addCleanup(source_tree.unlock) |
|
1623 |
tip = source_tree.commit('foo') |
|
1624 |
target = self.make_repository('target', format=target_fmt) |
|
1625 |
target.lock_write() |
|
1626 |
self.addCleanup(target.unlock) |
|
1627 |
source = source_tree.branch.repository |
|
1628 |
self.orig_pack = target.pack |
|
1629 |
target.pack = self.log_pack |
|
1630 |
target.fetch(source) |
|
1631 |
if expect_pack_called: |
|
1632 |
self.assertLength(1, self.calls) |
|
1633 |
else: |
|
1634 |
self.assertLength(0, self.calls) |
|
1635 |
||
1636 |
def test_sink_format_hint_no(self): |
|
1637 |
# When the target format says packing makes no difference, pack is not
|
|
1638 |
# called.
|
|
1639 |
self.run_stream('1.9', 'rich-root-pack', False) |
|
1640 |
||
1641 |
def test_sink_format_hint_yes(self): |
|
1642 |
# When the target format says packing makes a difference, pack is
|
|
1643 |
# called.
|
|
1644 |
self.run_stream('1.9', '2a', True) |
|
1645 |
||
1646 |
def test_sink_format_same_no(self): |
|
1647 |
# When the formats are the same, pack is not called.
|
|
1648 |
self.run_stream('2a', '2a', False) |
|
1649 |
||
1650 |
def test_IDS_format_hint_no(self): |
|
1651 |
# When the target format says packing makes no difference, pack is not
|
|
1652 |
# called.
|
|
1653 |
self.run_fetch('1.9', 'rich-root-pack', False) |
|
1654 |
||
1655 |
def test_IDS_format_hint_yes(self): |
|
1656 |
# When the target format says packing makes a difference, pack is
|
|
1657 |
# called.
|
|
1658 |
self.run_fetch('1.9', '2a', True) |
|
1659 |
||
1660 |
def test_IDS_format_same_no(self): |
|
1661 |
# When the formats are the same, pack is not called.
|
|
1662 |
self.run_fetch('2a', '2a', False) |