189
190
def __init__(self, upload_transport, index_transport, pack_transport,
191
upload_suffix='', file_mode=None):
191
192
"""Create a NewPack instance.
193
194
:param upload_transport: A writable transport for the pack to be
199
200
upload_transport.clone('../packs').
200
201
:param upload_suffix: An optional suffix to be given to any temporary
201
202
files created during the pack creation. e.g '.autopack'
203
:param file_mode: An optional file mode to create the new files with.
203
205
# The relative locations of the packs are constrained, but all are
204
206
# passed in because the caller has them, so as to avoid object churn.
223
225
self.index_transport = index_transport
224
226
# where is the pack renamed to when it is finished?
225
227
self.pack_transport = pack_transport
228
# What file mode to upload the pack and indices with.
229
self._file_mode = file_mode
226
230
# tracks the content written to the .pack file.
227
231
self._hash = md5.new()
228
232
# a four-tuple with the length in bytes of the indices, once the pack
239
243
self.start_time = time.time()
240
244
# open an output stream for the data added to the pack.
241
245
self.write_stream = self.upload_transport.open_write_stream(
246
self.random_name, mode=self._file_mode)
243
247
if 'pack' in debug.debug_flags:
244
248
mutter('%s: create_pack: pack stream open: %s%s t+%6.3fs',
245
249
time.ctime(), self.upload_transport.base, self.random_name,
369
373
index_name = self.index_name(index_type, self.name)
370
374
self.index_sizes[self.index_offset(index_type)] = \
371
self.index_transport.put_file(index_name, index.finish())
375
self.index_transport.put_file(index_name, index.finish(),
376
mode=self._file_mode)
372
377
if 'pack' in debug.debug_flags:
373
378
# XXX: size might be interesting?
374
379
mutter('%s: create_pack: wrote %s index: %s%s t+%6.3fs',
533
538
"""Open a pack for the pack we are creating."""
534
539
return NewPack(self._pack_collection._upload_transport,
535
540
self._pack_collection._index_transport,
536
self._pack_collection._pack_transport, upload_suffix=self.suffix)
541
self._pack_collection._pack_transport, upload_suffix=self.suffix,
542
file_mode=self._pack_collection.repo.control_files._file_mode)
538
544
def _create_pack_from_packs(self):
539
545
self.pb.update("Opening pack", 0, 5)
708
714
"""Copy knit nodes between packs.
710
716
:param output_lines: Return lines present in the copied data as
717
an iterator of line,version_id.
713
719
pb = ui.ui_factory.nested_progress_bar()
754
760
for (names, read_func), (_1, _2, (key, eol_flag, references)) in \
755
761
izip(reader.iter_records(), pack_readv_requests):
756
762
raw_data = read_func(None)
758
765
# read the entire thing
759
content, _ = knit_data._parse_record(key[-1], raw_data)
766
content, _ = knit_data._parse_record(version_id, raw_data)
760
767
if len(references[-1]) == 0:
761
768
line_iterator = factory.get_fulltext_content(content)
763
770
line_iterator = factory.get_linedelta_content(content)
764
771
for line in line_iterator:
772
yield line, version_id
767
774
# check the header only
768
df, _ = knit_data._parse_record_header(key[-1], raw_data)
775
df, _ = knit_data._parse_record_header(version_id, raw_data)
770
777
pos, size = writer.add_bytes_record(raw_data, names)
771
778
write_index.add_node(key, eol_flag + "%d %d" % (pos, size), references)
1245
1252
for key, value in disk_nodes:
1246
1253
builder.add_node(key, value)
1247
self.transport.put_file('pack-names', builder.finish())
1254
self.transport.put_file('pack-names', builder.finish(),
1255
mode=self.repo.control_files._file_mode)
1248
1256
# move the baseline forward
1249
1257
self._packs_at_load = disk_nodes
1250
1258
# now clear out the obsolete packs directory
1287
1295
if not self.repo.is_write_locked():
1288
1296
raise errors.NotWriteLocked(self)
1289
1297
self._new_pack = NewPack(self._upload_transport, self._index_transport,
1290
self._pack_transport, upload_suffix='.pack')
1298
self._pack_transport, upload_suffix='.pack',
1299
file_mode=self.repo.control_files._file_mode)
1291
1300
# allow writing: queue writes to a new index
1292
1301
self.revision_index.add_writable_index(self._new_pack.revision_index,
1293
1302
self._new_pack)
1803
1812
_serializer = xml5.serializer_v5
1805
1814
def _get_matching_bzrdir(self):
1806
return bzrdir.format_registry.make_bzrdir('knitpack-experimental')
1815
return bzrdir.format_registry.make_bzrdir('pack-0.92')
1808
1817
def _ignore_setting_bzrdir(self, format):
1841
1850
def _get_matching_bzrdir(self):
1842
1851
return bzrdir.format_registry.make_bzrdir(
1843
'knitpack-subtree-experimental')
1852
'pack-0.92-subtree')
1845
1854
def _ignore_setting_bzrdir(self, format):
1862
1871
def get_format_description(self):
1863
1872
"""See RepositoryFormat.get_format_description()."""
1864
1873
return "Packs containing knits with subtree support\n"
1876
class RepositoryFormatKnitPack4(RepositoryFormatPack):
1877
"""A rich-root, no subtrees parameterised Pack repository.
1879
This repository format uses the xml6 serializer to get:
1880
- support for recording full info about the tree root
1882
This format was introduced in 1.0.
1885
repository_class = KnitPackRepository
1886
_commit_builder_class = PackRootCommitBuilder
1887
rich_root_data = True
1888
supports_tree_reference = False
1889
_serializer = xml6.serializer_v6
1891
def _get_matching_bzrdir(self):
1892
return bzrdir.format_registry.make_bzrdir(
1895
def _ignore_setting_bzrdir(self, format):
1898
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
1900
def check_conversion_target(self, target_format):
1901
if not target_format.rich_root_data:
1902
raise errors.BadConversionTarget(
1903
'Does not support rich root data.', target_format)
1905
def get_format_string(self):
1906
"""See RepositoryFormat.get_format_string()."""
1907
return ("Bazaar pack repository format 1 with rich root"
1908
" (needs bzr 1.0)\n")
1910
def get_format_description(self):
1911
"""See RepositoryFormat.get_format_description()."""
1912
return "Packs containing knits with rich root support\n"