1197
1198
:return: True if packing took place.
1199
1200
# XXX: Should not be needed when the management of indices is sane.
1200
total_revisions = self.revision_index.combined_index.key_count()
1201
total_packs = len(self._names)
1202
total_revisions = self._local_revision_index().key_count()
1203
total_packs = len(list(collection for collection, sizes in
1204
self._names.values() if collection is self))
1202
1205
if self._max_pack_count(total_revisions) >= total_packs:
1204
1207
# XXX: the following may want to be a class, to pack with a given
1250
1255
for revision_count, packs in pack_operations:
1251
1256
self._obsolete_packs(packs)
1258
def _local_revision_index(self):
1259
"""Return a combined index for all the local packs only."""
1260
index = CombinedGraphIndex([])
1261
for name, (collection, sizes) in self._names.items():
1262
if collection is not self:
1264
index.insert_index(0, self.get_pack_by_name(name).revision_index)
1253
1267
def lock_names(self):
1254
1268
"""Acquire the mutex around the pack-names index.
1261
1275
def pack(self):
1262
1276
"""Pack the pack collection totally."""
1263
1277
self.ensure_loaded()
1264
total_packs = len(self._names)
1278
total_packs = len(list(collection for collection, sizes in
1279
self._names.values() if collection is self))
1265
1280
if total_packs < 2:
1266
1281
# This is arguably wrong because we might not be optimal, but for
1267
1282
# now lets leave it in. (e.g. reconcile -> one pack. But not
1270
total_revisions = self.revision_index.combined_index.key_count()
1285
total_revisions = self._local_revision_index().key_count()
1271
1286
# XXX: the following may want to be a class, to pack with a given
1273
1288
mutter('Packing repository %s, which has %d pack files, '
1277
1292
pack_distribution = [1]
1278
1293
pack_operations = [[0, []]]
1279
1294
for pack in self.all_packs():
1295
if self._names[pack.name][0] is not self:
1280
1297
pack_operations[-1][0] += pack.get_revision_count()
1281
1298
pack_operations[-1][1].append(pack)
1282
1299
self._execute_pack_operations(pack_operations, OptimisingPacker)
1330
1347
raise errors.ObjectNotLocked(self.repo)
1331
1348
if self._names is None:
1332
1349
self._names = {}
1350
# Get fallback repository packs.
1351
# TODO: we really should try local packs first and thus order the
1352
# indices appropriately.
1353
self._names.update(self.fallback_packs_details())
1354
# Now the local packs.
1333
1355
self._packs_at_load = set()
1334
1356
for index, key, value in self._iter_disk_pack_index():
1336
self._names[name] = self._parse_index_sizes(value)
1358
self._names[name] = (self, self._parse_index_sizes(value))
1337
1359
self._packs_at_load.add((key, value))
1338
1360
# populate all the metadata.
1339
1361
self.all_packs()
1363
def fallback_packs_details(self):
1364
"""Return a dict of name -> (collection, index) size tuples."""
1366
for repo in self.repo._fallback_repositories:
1367
collection = repo._pack_collection
1368
for index, key, value in collection._iter_disk_pack_index():
1369
result[key[0]] = (collection, self._parse_index_sizes(value))
1341
1372
def _parse_index_sizes(self, value):
1342
1373
"""Parse a string of index sizes."""
1343
1374
return tuple([int(digits) for digits in value.split(' ')])
1345
1376
def get_pack_by_name(self, name):
1346
1377
"""Get a Pack object by name.
1379
If previously accessed this returns from the self._packs_by_name cache.
1348
1381
:param name: The name of the pack - e.g. '123456'
1349
1382
:return: A Pack object.
1352
1385
return self._packs_by_name[name]
1353
1386
except KeyError:
1387
collection = self._names[name][0]
1354
1388
rev_index = self._make_index(name, '.rix')
1355
1389
inv_index = self._make_index(name, '.iix')
1356
1390
txt_index = self._make_index(name, '.tix')
1357
1391
sig_index = self._make_index(name, '.six')
1358
result = ExistingPack(self._pack_transport, name, rev_index,
1392
result = ExistingPack(collection._pack_transport, name, rev_index,
1359
1393
inv_index, txt_index, sig_index)
1360
1394
self.add_pack_to_memory(result)
1370
1404
if a_new_pack.name in self._names:
1371
1405
raise errors.BzrError(
1372
1406
'Pack %r already exists in %s' % (a_new_pack.name, self))
1373
self._names[a_new_pack.name] = tuple(a_new_pack.index_sizes)
1407
self._names[a_new_pack.name] = self, tuple(a_new_pack.index_sizes)
1374
1408
self.add_pack_to_memory(a_new_pack)
1376
1410
def _iter_disk_pack_index(self):
1384
1418
).iter_all_entries()
1386
1420
def _make_index(self, name, suffix):
1421
collection = self._names[name][0]
1387
1422
size_offset = self._suffix_offsets[suffix]
1388
1423
index_name = name + suffix
1389
index_size = self._names[name][size_offset]
1424
index_size = self._names[name][1][size_offset]
1390
1425
return GraphIndex(
1391
self._index_transport, index_name, index_size)
1426
collection._index_transport, index_name, index_size)
1393
1428
def _max_pack_count(self, total_revisions):
1394
1429
"""Return the maximum number of packs to use for total revisions.
1563
1598
disk_nodes.add((key, value))
1564
1599
# do a two-way diff against our original content
1565
1600
current_nodes = set()
1566
for name, sizes in self._names.iteritems():
1601
for name, (collection, sizes) in self._names.iteritems():
1602
if collection is not self:
1567
1604
current_nodes.add(
1568
1605
((name, ), ' '.join(str(size) for size in sizes)))
1569
1606
deleted_nodes = self._packs_at_load - current_nodes
1587
1624
self._unlock_names()
1588
1625
# synchronise the memory packs list with what we just wrote:
1589
new_names = dict(disk_nodes)
1626
new_names = self.fallback_packs_details()
1627
for key, value in disk_nodes:
1628
new_names[key[0]] = self, self._parse_index_sizes(value)
1590
1629
# drop no longer present nodes
1591
1630
for pack in self.all_packs():
1592
if (pack.name,) not in new_names:
1631
if pack.name not in new_names:
1593
1632
self._remove_pack_from_memory(pack)
1594
1633
# add new nodes/refresh existing ones
1595
for key, value in disk_nodes:
1597
sizes = self._parse_index_sizes(value)
1634
for name, (collection, sizes) in new_names.iteritems():
1598
1635
if name in self._names:
1600
if sizes != self._names[name]:
1637
if sizes != self._names[name][1]:
1601
1638
# the pack for name has had its indices replaced - rare but
1602
1639
# important to handle. XXX: probably can never happen today
1603
1640
# because the three-way merge code above does not handle it
1896
def _add_fallback_repository_check(self, repository):
1897
"""Check that this repository can fallback to repository safely.
1899
:param repository: A repository to fallback to.
1900
:return: True if the repositories can stack ok.
1902
return (InterRepository._same_model(self, repository) and
1903
self._format.__class__ == repository._format.__class__)
1859
1905
def _find_inconsistent_revision_parents(self):
1860
1906
"""Find revisions with incorrectly cached parents.
2328
2374
"pack-0.92-subtree\n")
2377
class RepositoryFormatPackDevelopment1(RepositoryFormatPackDevelopment0):
2378
"""A no-subtrees development repository.
2380
This format should be retained until the second release after bzr 1.2.
2382
Supports external lookups, which results in non-truncated ghosts after
2383
reconcile compared to pack-0.92 formats.
2386
supports_external_lookups = True
2388
def _get_matching_bzrdir(self):
2389
return bzrdir.format_registry.make_bzrdir('development1')
2391
def _ignore_setting_bzrdir(self, format):
2394
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
2396
def get_format_string(self):
2397
"""See RepositoryFormat.get_format_string()."""
2398
return "Bazaar development format 1 (needs bzr.dev from before 1.3)\n"
2400
def get_format_description(self):
2401
"""See RepositoryFormat.get_format_description()."""
2402
return ("Development repository format, currently the same as "
2403
"pack-0.92 with external reference support.\n")
2405
def check_conversion_target(self, target_format):
2409
class RepositoryFormatPackDevelopment1Subtree(RepositoryFormatPackDevelopment0Subtree):
2410
"""A subtrees development repository.
2412
This format should be retained until the second release after bzr 1.2.
2414
Supports external lookups, which results in non-truncated ghosts after
2415
reconcile compared to pack-0.92 formats.
2418
supports_external_lookups = True
2420
def _get_matching_bzrdir(self):
2421
return bzrdir.format_registry.make_bzrdir(
2422
'development1-subtree')
2424
def _ignore_setting_bzrdir(self, format):
2427
_matchingbzrdir = property(_get_matching_bzrdir, _ignore_setting_bzrdir)
2429
def check_conversion_target(self, target_format):
2430
if not target_format.rich_root_data:
2431
raise errors.BadConversionTarget(
2432
'Does not support rich root data.', target_format)
2433
if not getattr(target_format, 'supports_tree_reference', False):
2434
raise errors.BadConversionTarget(
2435
'Does not support nested trees', target_format)
2437
def get_format_string(self):
2438
"""See RepositoryFormat.get_format_string()."""
2439
return ("Bazaar development format 1 with subtree support "
2440
"(needs bzr.dev from before 1.3)\n")
2442
def get_format_description(self):
2443
"""See RepositoryFormat.get_format_description()."""
2444
return ("Development repository format, currently the same as "
2445
"pack-0.92-subtree with external reference support.\n")