494
494
ie.executable = content_summary[2]
495
495
file_obj, stat_value = tree.get_file_with_stat(ie.file_id, path)
497
lines = file_obj.readlines()
497
text = file_obj.read()
501
501
ie.text_sha1, ie.text_size = self._add_text_to_weave(
502
ie.file_id, lines, heads, nostore_sha)
502
ie.file_id, text, heads, nostore_sha)
503
503
# Let the caller know we generated a stat fingerprint.
504
504
fingerprint = (ie.text_sha1, stat_value)
505
505
except errors.ExistingContent:
518
518
ie.revision = parent_entry.revision
519
519
return self._get_delta(ie, basis_inv, path), False, None
521
self._add_text_to_weave(ie.file_id, lines, heads, None)
520
self._add_text_to_weave(ie.file_id, '', heads, None)
522
521
elif kind == 'symlink':
523
522
current_link_target = content_summary[3]
532
531
ie.symlink_target = parent_entry.symlink_target
533
532
return self._get_delta(ie, basis_inv, path), False, None
534
533
ie.symlink_target = current_link_target
536
self._add_text_to_weave(ie.file_id, lines, heads, None)
534
self._add_text_to_weave(ie.file_id, '', heads, None)
537
535
elif kind == 'tree-reference':
539
537
if content_summary[3] != parent_entry.reference_revision:
544
542
ie.revision = parent_entry.revision
545
543
return self._get_delta(ie, basis_inv, path), False, None
546
544
ie.reference_revision = content_summary[3]
548
self._add_text_to_weave(ie.file_id, lines, heads, None)
545
self._add_text_to_weave(ie.file_id, '', heads, None)
550
547
raise NotImplementedError('unknown kind')
551
548
ie.revision = self._new_revision_id
745
742
entry.executable = True
747
744
entry.executable = False
748
if (carry_over_possible and
745
if (carry_over_possible and
749
746
parent_entry.executable == entry.executable):
750
747
# Check the file length, content hash after reading
754
751
nostore_sha = None
755
752
file_obj, stat_value = tree.get_file_with_stat(file_id, change[1][1])
757
lines = file_obj.readlines()
754
text = file_obj.read()
761
758
entry.text_sha1, entry.text_size = self._add_text_to_weave(
762
file_id, lines, heads, nostore_sha)
759
file_id, text, heads, nostore_sha)
763
760
yield file_id, change[1][1], (entry.text_sha1, stat_value)
764
761
except errors.ExistingContent:
765
762
# No content change against a carry_over parent
774
771
parent_entry.symlink_target == entry.symlink_target):
775
772
carried_over = True
777
self._add_text_to_weave(change[0], [], heads, None)
774
self._add_text_to_weave(change[0], '', heads, None)
778
775
elif kind == 'directory':
779
776
if carry_over_possible:
780
777
carried_over = True
782
779
# Nothing to set on the entry.
783
780
# XXX: split into the Root and nonRoot versions.
784
781
if change[1][1] != '' or self.repository.supports_rich_root():
785
self._add_text_to_weave(change[0], [], heads, None)
782
self._add_text_to_weave(change[0], '', heads, None)
786
783
elif kind == 'tree-reference':
787
784
if not self.repository._format.supports_tree_reference:
788
785
# This isn't quite sane as an error, but we shouldn't
797
794
parent_entry.reference_revision == reference_revision):
798
795
carried_over = True
800
self._add_text_to_weave(change[0], [], heads, None)
797
self._add_text_to_weave(change[0], '', heads, None)
802
799
raise AssertionError('unknown kind %r' % kind)
803
800
if not carried_over:
818
815
self._require_root_change(tree)
819
816
self.basis_delta_revision = basis_revision_id
821
def _add_text_to_weave(self, file_id, new_lines, parents, nostore_sha):
822
# Note: as we read the content directly from the tree, we know its not
823
# been turned into unicode or badly split - but a broken tree
824
# implementation could give us bad output from readlines() so this is
825
# not a guarantee of safety. What would be better is always checking
826
# the content during test suite execution. RBC 20070912
827
parent_keys = tuple((file_id, parent) for parent in parents)
828
return self.repository.texts.add_lines(
829
(file_id, self._new_revision_id), parent_keys, new_lines,
830
nostore_sha=nostore_sha, random_id=self.random_revid,
831
check_content=False)[0:2]
818
def _add_text_to_weave(self, file_id, new_text, parents, nostore_sha):
819
parent_keys = tuple([(file_id, parent) for parent in parents])
820
return self.repository.texts._add_text(
821
(file_id, self._new_revision_id), parent_keys, new_text,
822
nostore_sha=nostore_sha, random_id=self.random_revid)[0:2]
834
825
class RootCommitBuilder(CommitBuilder):
1413
1404
raise errors.BzrError('mismatched lock context %r and '
1414
1405
'write group %r.' %
1415
1406
(self.get_transaction(), self._write_group))
1416
self._commit_write_group()
1407
result = self._commit_write_group()
1417
1408
self._write_group = None
1419
1411
def _commit_write_group(self):
1420
1412
"""Template method for per-repository write group cleanup.
2427
2419
keys = tsort.topo_sort(parent_map)
2428
2420
return [None] + list(keys)
2422
def pack(self, hint=None):
2431
2423
"""Compress the data within the repository.
2433
2425
This operation only makes sense for some repository types. For other
2436
2428
This stub method does not require a lock, but subclasses should use
2437
2429
@needs_write_lock as this is a long running call its reasonable to
2438
2430
implicitly lock for the user.
2432
:param hint: If not supplied, the whole repository is packed.
2433
If supplied, the repository may use the hint parameter as a
2434
hint for the parts of the repository to pack. A hint can be
2435
obtained from the result of commit_write_group(). Out of
2436
date hints are simply ignored, because concurrent operations
2437
can obsolete them rapidly.
2441
2440
def get_transaction(self):
2844
2843
# Does this format have < O(tree_size) delta generation. Used to hint what
2845
2844
# code path for commit, amongst other things.
2846
2845
fast_deltas = None
2846
# Does doing a pack operation compress data? Useful for the pack UI command
2847
# (so if there is one pack, the operation can still proceed because it may
2848
# help), and for fetching when data won't have come from the same
2850
pack_compresses = False
2848
2852
def __str__(self):
2849
2853
return "<%s>" % self.__class__.__name__
3675
3679
cache = lru_cache.LRUCache(100)
3676
3680
cache[basis_id] = basis_tree
3677
3681
del basis_tree # We don't want to hang on to it here
3678
3683
for offset in range(0, len(revision_ids), batch_size):
3679
3684
self.target.start_write_group()
3686
3691
self.target.abort_write_group()
3689
self.target.commit_write_group()
3694
hint = self.target.commit_write_group()
3697
if hints and self.target._format.pack_compresses:
3698
self.target.pack(hint=hints)
3690
3699
pb.update('Transferring revisions', len(revision_ids),
3691
3700
len(revision_ids))
4034
4043
# missing keys can handle suspending a write group).
4035
4044
write_group_tokens = self.target_repo.suspend_write_group()
4036
4045
return write_group_tokens, missing_keys
4037
self.target_repo.commit_write_group()
4046
hint = self.target_repo.commit_write_group()
4047
if (to_serializer != src_serializer and
4048
self.target_repo._format.pack_compresses):
4049
self.target_repo.pack(hint=hint)
4038
4050
return [], set()
4040
4052
def _extract_and_insert_inventories(self, substream, serializer):