139
137
# A counter of how many files have been renamed
140
138
self.rename_count = 0
143
"""Support Context Manager API."""
146
def __exit__(self, exc_type, exc_val, exc_tb):
147
"""Support Context Manager API."""
150
140
def finalize(self):
151
141
"""Release the working tree lock, if held.
239
228
self._new_root = new_roots[0]
241
230
old_new_root = new_roots[0]
231
# TODO: What to do if a old_new_root is present, but self._new_root is
232
# not listed as being removed? This code explicitly unversions
233
# the old root and versions it with the new file_id. Though that
234
# seems like an incomplete delta
242
236
# unversion the new root's directory.
243
if self.final_kind(self._new_root) is None:
244
file_id = self.final_file_id(old_new_root)
246
file_id = self.final_file_id(self._new_root)
237
file_id = self.final_file_id(old_new_root)
247
238
if old_new_root in self._new_id:
248
239
self.cancel_versioning(old_new_root)
253
244
if (self.tree_file_id(self._new_root) is not None and
254
245
self._new_root not in self._removed_id):
255
246
self.unversion_file(self._new_root)
256
if file_id is not None:
257
self.version_file(file_id, self._new_root)
247
self.version_file(file_id, self._new_root)
259
249
# Now move children of new root into old root directory.
260
250
# Ensure all children are registered with the transaction, but don't
394
384
return sorted(FinalPaths(self).get_paths(new_ids))
396
386
def _inventory_altered(self):
397
"""Determine which trans_ids need new Inventory entries.
399
An new entry is needed when anything that would be reflected by an
400
inventory entry changes, including file name, file_id, parent file_id,
401
file kind, and the execute bit.
403
Some care is taken to return entries with real changes, not cases
404
where the value is deleted and then restored to its original value,
405
but some actually unchanged values may be returned.
407
:returns: A list of (path, trans_id) for all items requiring an
408
inventory change. Ordered by path.
411
# Find entries whose file_ids are new (or changed).
412
new_file_id = set(t for t in self._new_id
413
if self._new_id[t] != self.tree_file_id(t))
414
for id_set in [self._new_name, self._new_parent, new_file_id,
387
"""Get the trans_ids and paths of files needing new inv entries."""
389
for id_set in [self._new_name, self._new_parent, self._new_id,
415
390
self._new_executability]:
416
changed_ids.update(id_set)
417
# removing implies a kind change
391
new_ids.update(id_set)
418
392
changed_kind = set(self._removed_contents)
420
393
changed_kind.intersection_update(self._new_contents)
421
# Ignore entries that are already known to have changed.
422
changed_kind.difference_update(changed_ids)
423
# to keep only the truly changed ones
394
changed_kind.difference_update(new_ids)
424
395
changed_kind = (t for t in changed_kind
425
396
if self.tree_kind(t) != self.final_kind(t))
426
# all kind changes will alter the inventory
427
changed_ids.update(changed_kind)
428
# To find entries with changed parent_ids, find parents which existed,
429
# but changed file_id.
430
changed_file_id = set(t for t in new_file_id if t in self._removed_id)
431
# Now add all their children to the set.
432
for parent_trans_id in new_file_id:
433
changed_ids.update(self.iter_tree_children(parent_trans_id))
434
return sorted(FinalPaths(self).get_paths(changed_ids))
397
new_ids.update(changed_kind)
398
return sorted(FinalPaths(self).get_paths(new_ids))
436
400
def final_kind(self, trans_id):
437
401
"""Determine the final file kind, after any changes applied.
1727
1691
child_pb = ui.ui_factory.nested_progress_bar()
1729
1693
if precomputed_delta is None:
1730
child_pb.update(gettext('Apply phase'), 0, 2)
1694
child_pb.update('Apply phase', 0, 2)
1731
1695
inventory_delta = self._generate_inventory_delta()
1741
child_pb.update(gettext('Apply phase'), 0 + offset, 2 + offset)
1705
child_pb.update('Apply phase', 0 + offset, 2 + offset)
1742
1706
self._apply_removals(mover)
1743
child_pb.update(gettext('Apply phase'), 1 + offset, 2 + offset)
1707
child_pb.update('Apply phase', 1 + offset, 2 + offset)
1744
1708
modified_paths = self._apply_insertions(mover)
1746
1710
mover.rollback()
1767
1729
for num, trans_id in enumerate(self._removed_id):
1768
1730
if (num % 10) == 0:
1769
child_pb.update(gettext('removing file'), num, total_entries)
1731
child_pb.update('removing file', num, total_entries)
1770
1732
if trans_id == self._new_root:
1771
1733
file_id = self._tree.get_root_id()
1784
1746
final_kinds = {}
1785
1747
for num, (path, trans_id) in enumerate(new_paths):
1786
1748
if (num % 10) == 0:
1787
child_pb.update(gettext('adding file'),
1749
child_pb.update('adding file',
1788
1750
num + len(self._removed_id), total_entries)
1789
1751
file_id = new_path_file_ids[trans_id]
1790
1752
if file_id is None:
1830
1792
tree_paths.sort(reverse=True)
1831
1793
child_pb = ui.ui_factory.nested_progress_bar()
1833
for num, (path, trans_id) in enumerate(tree_paths):
1834
# do not attempt to move root into a subdirectory of itself.
1837
child_pb.update(gettext('removing file'), num, len(tree_paths))
1795
for num, data in enumerate(tree_paths):
1796
path, trans_id = data
1797
child_pb.update('removing file', num, len(tree_paths))
1838
1798
full_path = self._tree.abspath(path)
1839
1799
if trans_id in self._removed_contents:
1840
1800
delete_path = os.path.join(self._deletiondir, trans_id)
1870
1830
for num, (path, trans_id) in enumerate(new_paths):
1871
1831
if (num % 10) == 0:
1872
child_pb.update(gettext('adding file'), num, len(new_paths))
1832
child_pb.update('adding file', num, len(new_paths))
1873
1833
full_path = self._tree.abspath(path)
1874
1834
if trans_id in self._needs_rename:
2262
def get_file_verifier(self, file_id, path=None, stat_value=None):
2263
trans_id = self._transform.trans_id_file_id(file_id)
2264
kind = self._transform._new_contents.get(trans_id)
2266
return self._transform._tree.get_file_verifier(file_id)
2268
fileobj = self.get_file(file_id)
2270
return ("SHA1", sha_file(fileobj))
2274
2222
def get_file_sha1(self, file_id, path=None, stat_value=None):
2275
2223
trans_id = self._transform.trans_id_file_id(file_id)
2276
2224
kind = self._transform._new_contents.get(trans_id)
2584
2532
existing_files.update(f[0] for f in files)
2585
2533
for num, (tree_path, entry) in \
2586
2534
enumerate(tree.inventory.iter_entries_by_dir()):
2587
pb.update(gettext("Building tree"), num - len(deferred_contents), total)
2535
pb.update("Building tree", num - len(deferred_contents), total)
2588
2536
if entry.parent_id is None:
2590
2538
reparent = False
2674
2622
new_desired_files.append((file_id,
2675
2623
(trans_id, tree_path, text_sha1)))
2677
pb.update(gettext('Adding file contents'), count + offset, total)
2625
pb.update('Adding file contents', count + offset, total)
2679
2627
tt.create_hardlink(accelerator_tree.abspath(accelerator_path),
2701
2649
contents = filtered_output_bytes(contents, filters,
2702
2650
ContentFilterContext(tree_path, tree))
2703
2651
tt.create_file(contents, trans_id, sha1=text_sha1)
2704
pb.update(gettext('Adding file contents'), count + offset, total)
2652
pb.update('Adding file contents', count + offset, total)
2707
2655
def _reparent_children(tt, old_parent, new_parent):
2989
2937
basis_tree = working_tree.basis_tree()
2990
2938
basis_tree.lock_read()
2991
2939
new_sha1 = target_tree.get_file_sha1(file_id)
2992
if (basis_tree.has_id(file_id) and
2993
new_sha1 == basis_tree.get_file_sha1(file_id)):
2940
if (file_id in basis_tree and new_sha1 ==
2941
basis_tree.get_file_sha1(file_id)):
2994
2942
if file_id in merge_modified:
2995
2943
del merge_modified[file_id]
3147
3095
elif c_type == 'unversioned parent':
3148
3096
file_id = tt.inactive_file_id(conflict[1])
3149
3097
# special-case the other tree root (move its children instead)
3150
if path_tree and path_tree.has_id(file_id):
3098
if path_tree and file_id in path_tree:
3151
3099
if path_tree.path2id('') == file_id:
3152
3100
# This is the root entry, skip it
3172
3120
def cook_conflicts(raw_conflicts, tt):
3173
3121
"""Generate a list of cooked conflicts, sorted by file path"""
3122
from bzrlib.conflicts import Conflict
3174
3123
conflict_iter = iter_cook_conflicts(raw_conflicts, tt)
3175
return sorted(conflict_iter, key=conflicts.Conflict.sort_key)
3124
return sorted(conflict_iter, key=Conflict.sort_key)
3178
3127
def iter_cook_conflicts(raw_conflicts, tt):
3128
from bzrlib.conflicts import Conflict
3179
3129
fp = FinalPaths(tt)
3180
3130
for conflict in raw_conflicts:
3181
3131
c_type = conflict[0]
3183
3133
modified_path = fp.get_path(conflict[2])
3184
3134
modified_id = tt.final_file_id(conflict[2])
3185
3135
if len(conflict) == 3:
3186
yield conflicts.Conflict.factory(
3187
c_type, action=action, path=modified_path, file_id=modified_id)
3136
yield Conflict.factory(c_type, action=action, path=modified_path,
3137
file_id=modified_id)
3190
3140
conflicting_path = fp.get_path(conflict[3])
3191
3141
conflicting_id = tt.final_file_id(conflict[3])
3192
yield conflicts.Conflict.factory(
3193
c_type, action=action, path=modified_path,
3194
file_id=modified_id,
3195
conflict_path=conflicting_path,
3196
conflict_file_id=conflicting_id)
3142
yield Conflict.factory(c_type, action=action, path=modified_path,
3143
file_id=modified_id,
3144
conflict_path=conflicting_path,
3145
conflict_file_id=conflicting_id)
3199
3148
class _FileMover(object):