828
893
self.create_symlink(target, trans_id)
831
def new_orphan(self, trans_id, parent_id):
832
"""Schedule an item to be orphaned.
834
When a directory is about to be removed, its children, if they are not
835
versioned are moved out of the way: they don't have a parent anymore.
837
:param trans_id: The trans_id of the existing item.
838
:param parent_id: The parent trans_id of the item.
840
raise NotImplementedError(self.new_orphan)
842
def _get_potential_orphans(self, dir_id):
843
"""Find the potential orphans in a directory.
845
A directory can't be safely deleted if there are versioned files in it.
846
If all the contained files are unversioned then they can be orphaned.
848
The 'None' return value means that the directory contains at least one
849
versioned file and should not be deleted.
851
:param dir_id: The directory trans id.
853
:return: A list of the orphan trans ids or None if at least one
854
versioned file is present.
857
# Find the potential orphans, stop if one item should be kept
858
for child_tid in self.by_parent()[dir_id]:
859
if child_tid in self._removed_contents:
860
# The child is removed as part of the transform. Since it was
861
# versioned before, it's not an orphan
863
elif self.final_file_id(child_tid) is None:
864
# The child is not versioned
865
orphans.append(child_tid)
867
# We have a versioned file here, searching for orphans is
873
def _affected_ids(self):
874
"""Return the set of transform ids affected by the transform"""
875
trans_ids = set(self._removed_id)
876
trans_ids.update(self._new_id.keys())
877
trans_ids.update(self._removed_contents)
878
trans_ids.update(self._new_contents.keys())
879
trans_ids.update(self._new_executability.keys())
880
trans_ids.update(self._new_name.keys())
881
trans_ids.update(self._new_parent.keys())
884
def _get_file_id_maps(self):
885
"""Return mapping of file_ids to trans_ids in the to and from states"""
886
trans_ids = self._affected_ids()
889
# Build up two dicts: trans_ids associated with file ids in the
890
# FROM state, vs the TO state.
891
for trans_id in trans_ids:
892
from_file_id = self.tree_file_id(trans_id)
893
if from_file_id is not None:
894
from_trans_ids[from_file_id] = trans_id
895
to_file_id = self.final_file_id(trans_id)
896
if to_file_id is not None:
897
to_trans_ids[to_file_id] = trans_id
898
return from_trans_ids, to_trans_ids
900
def _from_file_data(self, from_trans_id, from_versioned, file_id):
901
"""Get data about a file in the from (tree) state
903
Return a (name, parent, kind, executable) tuple
905
from_path = self._tree_id_paths.get(from_trans_id)
907
# get data from working tree if versioned
908
from_entry = self._tree.iter_entries_by_dir([file_id]).next()[1]
909
from_name = from_entry.name
910
from_parent = from_entry.parent_id
913
if from_path is None:
914
# File does not exist in FROM state
918
# File exists, but is not versioned. Have to use path-
920
from_name = os.path.basename(from_path)
921
tree_parent = self.get_tree_parent(from_trans_id)
922
from_parent = self.tree_file_id(tree_parent)
923
if from_path is not None:
924
from_kind, from_executable, from_stats = \
925
self._tree._comparison_data(from_entry, from_path)
928
from_executable = False
929
return from_name, from_parent, from_kind, from_executable
931
def _to_file_data(self, to_trans_id, from_trans_id, from_executable):
932
"""Get data about a file in the to (target) state
934
Return a (name, parent, kind, executable) tuple
936
to_name = self.final_name(to_trans_id)
937
to_kind = self.final_kind(to_trans_id)
938
to_parent = self.final_file_id(self.final_parent(to_trans_id))
939
if to_trans_id in self._new_executability:
940
to_executable = self._new_executability[to_trans_id]
941
elif to_trans_id == from_trans_id:
942
to_executable = from_executable
944
to_executable = False
945
return to_name, to_parent, to_kind, to_executable
947
def iter_changes(self):
948
"""Produce output in the same format as Tree.iter_changes.
950
Will produce nonsensical results if invoked while inventory/filesystem
951
conflicts (as reported by TreeTransform.find_conflicts()) are present.
953
This reads the Transform, but only reproduces changes involving a
954
file_id. Files that are not versioned in either of the FROM or TO
955
states are not reflected.
957
final_paths = FinalPaths(self)
958
from_trans_ids, to_trans_ids = self._get_file_id_maps()
960
# Now iterate through all active file_ids
961
for file_id in set(from_trans_ids.keys() + to_trans_ids.keys()):
963
from_trans_id = from_trans_ids.get(file_id)
964
# find file ids, and determine versioning state
965
if from_trans_id is None:
966
from_versioned = False
967
from_trans_id = to_trans_ids[file_id]
969
from_versioned = True
970
to_trans_id = to_trans_ids.get(file_id)
971
if to_trans_id is None:
973
to_trans_id = from_trans_id
977
from_name, from_parent, from_kind, from_executable = \
978
self._from_file_data(from_trans_id, from_versioned, file_id)
980
to_name, to_parent, to_kind, to_executable = \
981
self._to_file_data(to_trans_id, from_trans_id, from_executable)
983
if not from_versioned:
986
from_path = self._tree_id_paths.get(from_trans_id)
990
to_path = final_paths.get_path(to_trans_id)
991
if from_kind != to_kind:
993
elif to_kind in ('file', 'symlink') and (
994
to_trans_id != from_trans_id or
995
to_trans_id in self._new_contents):
997
if (not modified and from_versioned == to_versioned and
998
from_parent==to_parent and from_name == to_name and
999
from_executable == to_executable):
1001
results.append((file_id, (from_path, to_path), modified,
1002
(from_versioned, to_versioned),
1003
(from_parent, to_parent),
1004
(from_name, to_name),
1005
(from_kind, to_kind),
1006
(from_executable, to_executable)))
1007
return iter(sorted(results, key=lambda x:x[1]))
1009
def get_preview_tree(self):
1010
"""Return a tree representing the result of the transform.
1012
The tree is a snapshot, and altering the TreeTransform will invalidate
1015
return _PreviewTree(self)
1017
def commit(self, branch, message, merge_parents=None, strict=False,
1018
timestamp=None, timezone=None, committer=None, authors=None,
1019
revprops=None, revision_id=None):
1020
"""Commit the result of this TreeTransform to a branch.
1022
:param branch: The branch to commit to.
1023
:param message: The message to attach to the commit.
1024
:param merge_parents: Additional parent revision-ids specified by
1026
:param strict: If True, abort the commit if there are unversioned
1028
:param timestamp: if not None, seconds-since-epoch for the time and
1029
date. (May be a float.)
1030
:param timezone: Optional timezone for timestamp, as an offset in
1032
:param committer: Optional committer in email-id format.
1033
(e.g. "J Random Hacker <jrandom@example.com>")
1034
:param authors: Optional list of authors in email-id format.
1035
:param revprops: Optional dictionary of revision properties.
1036
:param revision_id: Optional revision id. (Specifying a revision-id
1037
may reduce performance for some non-native formats.)
1038
:return: The revision_id of the revision committed.
1040
self._check_malformed()
1042
unversioned = set(self._new_contents).difference(set(self._new_id))
1043
for trans_id in unversioned:
1044
if self.final_file_id(trans_id) is None:
1045
raise errors.StrictCommitFailed()
1047
revno, last_rev_id = branch.last_revision_info()
1048
if last_rev_id == _mod_revision.NULL_REVISION:
1049
if merge_parents is not None:
1050
raise ValueError('Cannot supply merge parents for first'
1054
parent_ids = [last_rev_id]
1055
if merge_parents is not None:
1056
parent_ids.extend(merge_parents)
1057
if self._tree.get_revision_id() != last_rev_id:
1058
raise ValueError('TreeTransform not based on branch basis: %s' %
1059
self._tree.get_revision_id())
1060
revprops = commit.Commit.update_revprops(revprops, branch, authors)
1061
builder = branch.get_commit_builder(parent_ids,
1062
timestamp=timestamp,
1064
committer=committer,
1066
revision_id=revision_id)
1067
preview = self.get_preview_tree()
1068
list(builder.record_iter_changes(preview, last_rev_id,
1069
self.iter_changes()))
1070
builder.finish_inventory()
1071
revision_id = builder.commit(message)
1072
branch.set_last_revision_info(revno + 1, revision_id)
1075
def _text_parent(self, trans_id):
1076
file_id = self.tree_file_id(trans_id)
1078
if file_id is None or self._tree.kind(file_id) != 'file':
1080
except errors.NoSuchFile:
1084
def _get_parents_texts(self, trans_id):
1085
"""Get texts for compression parents of this file."""
1086
file_id = self._text_parent(trans_id)
1089
return (self._tree.get_file_text(file_id),)
1091
def _get_parents_lines(self, trans_id):
1092
"""Get lines for compression parents of this file."""
1093
file_id = self._text_parent(trans_id)
1096
return (self._tree.get_file_lines(file_id),)
1098
def serialize(self, serializer):
1099
"""Serialize this TreeTransform.
1101
:param serializer: A Serialiser like pack.ContainerSerializer.
1103
new_name = dict((k, v.encode('utf-8')) for k, v in
1104
self._new_name.items())
1105
new_executability = dict((k, int(v)) for k, v in
1106
self._new_executability.items())
1107
tree_path_ids = dict((k.encode('utf-8'), v)
1108
for k, v in self._tree_path_ids.items())
1110
'_id_number': self._id_number,
1111
'_new_name': new_name,
1112
'_new_parent': self._new_parent,
1113
'_new_executability': new_executability,
1114
'_new_id': self._new_id,
1115
'_tree_path_ids': tree_path_ids,
1116
'_removed_id': list(self._removed_id),
1117
'_removed_contents': list(self._removed_contents),
1118
'_non_present_ids': self._non_present_ids,
1120
yield serializer.bytes_record(bencode.bencode(attribs),
1122
for trans_id, kind in self._new_contents.items():
1124
lines = osutils.chunks_to_lines(
1125
self._read_file_chunks(trans_id))
1126
parents = self._get_parents_lines(trans_id)
1127
mpdiff = multiparent.MultiParent.from_lines(lines, parents)
1128
content = ''.join(mpdiff.to_patch())
1129
if kind == 'directory':
1131
if kind == 'symlink':
1132
content = self._read_symlink_target(trans_id)
1133
yield serializer.bytes_record(content, ((trans_id, kind),))
1135
def deserialize(self, records):
1136
"""Deserialize a stored TreeTransform.
1138
:param records: An iterable of (names, content) tuples, as per
1139
pack.ContainerPushParser.
1141
names, content = records.next()
1142
attribs = bencode.bdecode(content)
1143
self._id_number = attribs['_id_number']
1144
self._new_name = dict((k, v.decode('utf-8'))
1145
for k, v in attribs['_new_name'].items())
1146
self._new_parent = attribs['_new_parent']
1147
self._new_executability = dict((k, bool(v)) for k, v in
1148
attribs['_new_executability'].items())
1149
self._new_id = attribs['_new_id']
1150
self._r_new_id = dict((v, k) for k, v in self._new_id.items())
1151
self._tree_path_ids = {}
1152
self._tree_id_paths = {}
1153
for bytepath, trans_id in attribs['_tree_path_ids'].items():
1154
path = bytepath.decode('utf-8')
1155
self._tree_path_ids[path] = trans_id
1156
self._tree_id_paths[trans_id] = path
1157
self._removed_id = set(attribs['_removed_id'])
1158
self._removed_contents = set(attribs['_removed_contents'])
1159
self._non_present_ids = attribs['_non_present_ids']
1160
for ((trans_id, kind),), content in records:
1162
mpdiff = multiparent.MultiParent.from_patch(content)
1163
lines = mpdiff.to_lines(self._get_parents_texts(trans_id))
1164
self.create_file(lines, trans_id)
1165
if kind == 'directory':
1166
self.create_directory(trans_id)
1167
if kind == 'symlink':
1168
self.create_symlink(content.decode('utf-8'), trans_id)
1171
class DiskTreeTransform(TreeTransformBase):
1172
"""Tree transform storing its contents on disk."""
1174
def __init__(self, tree, limbodir, pb=None,
1175
case_sensitive=True):
1177
:param tree: The tree that will be transformed, but not necessarily
1179
:param limbodir: A directory where new files can be stored until
1180
they are installed in their proper places
1182
:param case_sensitive: If True, the target of the transform is
1183
case sensitive, not just case preserving.
1185
TreeTransformBase.__init__(self, tree, pb, case_sensitive)
1186
self._limbodir = limbodir
1187
self._deletiondir = None
1188
# A mapping of transform ids to their limbo filename
1189
self._limbo_files = {}
1190
self._possibly_stale_limbo_files = set()
1191
# A mapping of transform ids to a set of the transform ids of children
1192
# that their limbo directory has
1193
self._limbo_children = {}
1194
# Map transform ids to maps of child filename to child transform id
1195
self._limbo_children_names = {}
1196
# List of transform ids that need to be renamed from limbo into place
1197
self._needs_rename = set()
1198
self._creation_mtime = None
1201
"""Release the working tree lock, if held, clean up limbo dir.
1203
This is required if apply has not been invoked, but can be invoked
1206
if self._tree is None:
1209
limbo_paths = self._limbo_files.values() + list(
1210
self._possibly_stale_limbo_files)
1211
limbo_paths = sorted(limbo_paths, reverse=True)
1212
for path in limbo_paths:
1216
if e.errno != errno.ENOENT:
1218
# XXX: warn? perhaps we just got interrupted at an
1219
# inconvenient moment, but perhaps files are disappearing
1222
delete_any(self._limbodir)
1224
# We don't especially care *why* the dir is immortal.
1225
raise ImmortalLimbo(self._limbodir)
1227
if self._deletiondir is not None:
1228
delete_any(self._deletiondir)
1230
raise errors.ImmortalPendingDeletion(self._deletiondir)
1232
TreeTransformBase.finalize(self)
1234
def _limbo_supports_executable(self):
1235
"""Check if the limbo path supports the executable bit."""
1236
# FIXME: Check actual file system capabilities of limbodir
1237
return osutils.supports_executable()
1239
def _limbo_name(self, trans_id):
1240
"""Generate the limbo name of a file"""
1241
limbo_name = self._limbo_files.get(trans_id)
1242
if limbo_name is None:
1243
limbo_name = self._generate_limbo_path(trans_id)
1244
self._limbo_files[trans_id] = limbo_name
1247
def _generate_limbo_path(self, trans_id):
1248
"""Generate a limbo path using the trans_id as the relative path.
1250
This is suitable as a fallback, and when the transform should not be
1251
sensitive to the path encoding of the limbo directory.
1253
self._needs_rename.add(trans_id)
1254
return pathjoin(self._limbodir, trans_id)
1256
def adjust_path(self, name, parent, trans_id):
1257
previous_parent = self._new_parent.get(trans_id)
1258
previous_name = self._new_name.get(trans_id)
1259
TreeTransformBase.adjust_path(self, name, parent, trans_id)
1260
if (trans_id in self._limbo_files and
1261
trans_id not in self._needs_rename):
1262
self._rename_in_limbo([trans_id])
1263
if previous_parent != parent:
1264
self._limbo_children[previous_parent].remove(trans_id)
1265
if previous_parent != parent or previous_name != name:
1266
del self._limbo_children_names[previous_parent][previous_name]
1268
def _rename_in_limbo(self, trans_ids):
1269
"""Fix limbo names so that the right final path is produced.
1271
This means we outsmarted ourselves-- we tried to avoid renaming
1272
these files later by creating them with their final names in their
1273
final parents. But now the previous name or parent is no longer
1274
suitable, so we have to rename them.
1276
Even for trans_ids that have no new contents, we must remove their
1277
entries from _limbo_files, because they are now stale.
1279
for trans_id in trans_ids:
1280
old_path = self._limbo_files[trans_id]
1281
self._possibly_stale_limbo_files.add(old_path)
1282
del self._limbo_files[trans_id]
1283
if trans_id not in self._new_contents:
1285
new_path = self._limbo_name(trans_id)
1286
os.rename(old_path, new_path)
1287
self._possibly_stale_limbo_files.remove(old_path)
1288
for descendant in self._limbo_descendants(trans_id):
1289
desc_path = self._limbo_files[descendant]
1290
desc_path = new_path + desc_path[len(old_path):]
1291
self._limbo_files[descendant] = desc_path
1293
def _limbo_descendants(self, trans_id):
1294
"""Return the set of trans_ids whose limbo paths descend from this."""
1295
descendants = set(self._limbo_children.get(trans_id, []))
1296
for descendant in list(descendants):
1297
descendants.update(self._limbo_descendants(descendant))
1300
def create_file(self, contents, trans_id, mode_id=None, sha1=None):
1301
"""Schedule creation of a new file.
1305
:param contents: an iterator of strings, all of which will be written
1306
to the target destination.
1307
:param trans_id: TreeTransform handle
1308
:param mode_id: If not None, force the mode of the target file to match
1309
the mode of the object referenced by mode_id.
1310
Otherwise, we will try to preserve mode bits of an existing file.
1311
:param sha1: If the sha1 of this content is already known, pass it in.
1312
We can use it to prevent future sha1 computations.
1314
name = self._limbo_name(trans_id)
1315
f = open(name, 'wb')
1317
unique_add(self._new_contents, trans_id, 'file')
1318
f.writelines(contents)
1321
self._set_mtime(name)
1322
self._set_mode(trans_id, mode_id, S_ISREG)
1323
# It is unfortunate we have to use lstat instead of fstat, but we just
1324
# used utime and chmod on the file, so we need the accurate final
1326
if sha1 is not None:
1327
self._observed_sha1s[trans_id] = (sha1, osutils.lstat(name))
1329
def _read_file_chunks(self, trans_id):
1330
cur_file = open(self._limbo_name(trans_id), 'rb')
1332
return cur_file.readlines()
1336
def _read_symlink_target(self, trans_id):
1337
return os.readlink(self._limbo_name(trans_id))
1339
def _set_mtime(self, path):
1340
"""All files that are created get the same mtime.
1342
This time is set by the first object to be created.
1344
if self._creation_mtime is None:
1345
self._creation_mtime = time.time()
1346
os.utime(path, (self._creation_mtime, self._creation_mtime))
1348
def create_hardlink(self, path, trans_id):
1349
"""Schedule creation of a hard link"""
1350
name = self._limbo_name(trans_id)
1354
if e.errno != errno.EPERM:
1356
raise errors.HardLinkNotSupported(path)
1358
unique_add(self._new_contents, trans_id, 'file')
1360
# Clean up the file, it never got registered so
1361
# TreeTransform.finalize() won't clean it up.
1365
def create_directory(self, trans_id):
1366
"""Schedule creation of a new directory.
1368
See also new_directory.
1370
os.mkdir(self._limbo_name(trans_id))
1371
unique_add(self._new_contents, trans_id, 'directory')
1373
def create_symlink(self, target, trans_id):
1374
"""Schedule creation of a new symbolic link.
1376
target is a bytestring.
1377
See also new_symlink.
1380
os.symlink(target, self._limbo_name(trans_id))
1381
unique_add(self._new_contents, trans_id, 'symlink')
1384
path = FinalPaths(self).get_path(trans_id)
1387
raise UnableCreateSymlink(path=path)
1389
def cancel_creation(self, trans_id):
1390
"""Cancel the creation of new file contents."""
1391
del self._new_contents[trans_id]
1392
if trans_id in self._observed_sha1s:
1393
del self._observed_sha1s[trans_id]
1394
children = self._limbo_children.get(trans_id)
1395
# if this is a limbo directory with children, move them before removing
1397
if children is not None:
1398
self._rename_in_limbo(children)
1399
del self._limbo_children[trans_id]
1400
del self._limbo_children_names[trans_id]
1401
delete_any(self._limbo_name(trans_id))
1403
def new_orphan(self, trans_id, parent_id):
1404
conf = self._tree.get_config_stack()
1405
handle_orphan = conf.get('bzr.transform.orphan_policy')
1406
handle_orphan(self, trans_id, parent_id)
1409
class OrphaningError(errors.BzrError):
1411
# Only bugs could lead to such exception being seen by the user
1412
internal_error = True
1413
_fmt = "Error while orphaning %s in %s directory"
1415
def __init__(self, orphan, parent):
1416
errors.BzrError.__init__(self)
1417
self.orphan = orphan
1418
self.parent = parent
1421
class OrphaningForbidden(OrphaningError):
1423
_fmt = "Policy: %s doesn't allow creating orphans."
1425
def __init__(self, policy):
1426
errors.BzrError.__init__(self)
1427
self.policy = policy
1430
def move_orphan(tt, orphan_id, parent_id):
1431
"""See TreeTransformBase.new_orphan.
1433
This creates a new orphan in the `bzr-orphans` dir at the root of the
1436
:param tt: The TreeTransform orphaning `trans_id`.
1438
:param orphan_id: The trans id that should be orphaned.
1440
:param parent_id: The orphan parent trans id.
1442
# Add the orphan dir if it doesn't exist
1443
orphan_dir_basename = 'bzr-orphans'
1444
od_id = tt.trans_id_tree_path(orphan_dir_basename)
1445
if tt.final_kind(od_id) is None:
1446
tt.create_directory(od_id)
1447
parent_path = tt._tree_id_paths[parent_id]
1448
# Find a name that doesn't exist yet in the orphan dir
1449
actual_name = tt.final_name(orphan_id)
1450
new_name = tt._available_backup_name(actual_name, od_id)
1451
tt.adjust_path(new_name, od_id, orphan_id)
1452
trace.warning('%s has been orphaned in %s'
1453
% (joinpath(parent_path, actual_name), orphan_dir_basename))
1456
def refuse_orphan(tt, orphan_id, parent_id):
1457
"""See TreeTransformBase.new_orphan.
1459
This refuses to create orphan, letting the caller handle the conflict.
1461
raise OrphaningForbidden('never')
1464
orphaning_registry = registry.Registry()
1465
orphaning_registry.register(
1466
'conflict', refuse_orphan,
1467
'Leave orphans in place and create a conflict on the directory.')
1468
orphaning_registry.register(
1469
'move', move_orphan,
1470
'Move orphans into the bzr-orphans directory.')
1471
orphaning_registry._set_default_key('conflict')
1474
opt_transform_orphan = _mod_config.RegistryOption(
1475
'bzr.transform.orphan_policy', orphaning_registry,
1476
help='Policy for orphaned files during transform operations.',
1480
class TreeTransform(DiskTreeTransform):
1481
"""Represent a tree transformation.
1483
This object is designed to support incremental generation of the transform,
1486
However, it gives optimum performance when parent directories are created
1487
before their contents. The transform is then able to put child files
1488
directly in their parent directory, avoiding later renames.
1490
It is easy to produce malformed transforms, but they are generally
1491
harmless. Attempting to apply a malformed transform will cause an
1492
exception to be raised before any modifications are made to the tree.
1494
Many kinds of malformed transforms can be corrected with the
1495
resolve_conflicts function. The remaining ones indicate programming error,
1496
such as trying to create a file with no path.
1498
Two sets of file creation methods are supplied. Convenience methods are:
1503
These are composed of the low-level methods:
1505
* create_file or create_directory or create_symlink
1509
Transform/Transaction ids
1510
-------------------------
1511
trans_ids are temporary ids assigned to all files involved in a transform.
1512
It's possible, even common, that not all files in the Tree have trans_ids.
1514
trans_ids are used because filenames and file_ids are not good enough
1515
identifiers; filenames change, and not all files have file_ids. File-ids
1516
are also associated with trans-ids, so that moving a file moves its
1519
trans_ids are only valid for the TreeTransform that generated them.
1523
Limbo is a temporary directory use to hold new versions of files.
1524
Files are added to limbo by create_file, create_directory, create_symlink,
1525
and their convenience variants (new_*). Files may be removed from limbo
1526
using cancel_creation. Files are renamed from limbo into their final
1527
location as part of TreeTransform.apply
1529
Limbo must be cleaned up, by either calling TreeTransform.apply or
1530
calling TreeTransform.finalize.
1532
Files are placed into limbo inside their parent directories, where
1533
possible. This reduces subsequent renames, and makes operations involving
1534
lots of files faster. This optimization is only possible if the parent
1535
directory is created *before* creating any of its children, so avoid
1536
creating children before parents, where possible.
1540
This temporary directory is used by _FileMover for storing files that are
1541
about to be deleted. In case of rollback, the files will be restored.
1542
FileMover does not delete files until it is sure that a rollback will not
1545
def __init__(self, tree, pb=None):
1546
"""Note: a tree_write lock is taken on the tree.
1548
Use TreeTransform.finalize() to release the lock (can be omitted if
1549
TreeTransform.apply() called).
1551
tree.lock_tree_write()
1554
limbodir = urlutils.local_path_from_url(
1555
tree._transport.abspath('limbo'))
1556
osutils.ensure_empty_directory_exists(
1558
errors.ExistingLimbo)
1559
deletiondir = urlutils.local_path_from_url(
1560
tree._transport.abspath('pending-deletion'))
1561
osutils.ensure_empty_directory_exists(
1563
errors.ExistingPendingDeletion)
1568
# Cache of realpath results, to speed up canonical_path
1569
self._realpaths = {}
1570
# Cache of relpath results, to speed up canonical_path
1572
DiskTreeTransform.__init__(self, tree, limbodir, pb,
1573
tree.case_sensitive)
1574
self._deletiondir = deletiondir
1576
def canonical_path(self, path):
1577
"""Get the canonical tree-relative path"""
1578
# don't follow final symlinks
1579
abs = self._tree.abspath(path)
1580
if abs in self._relpaths:
1581
return self._relpaths[abs]
1582
dirname, basename = os.path.split(abs)
1583
if dirname not in self._realpaths:
1584
self._realpaths[dirname] = os.path.realpath(dirname)
1585
dirname = self._realpaths[dirname]
1586
abs = pathjoin(dirname, basename)
1587
if dirname in self._relpaths:
1588
relpath = pathjoin(self._relpaths[dirname], basename)
1589
relpath = relpath.rstrip('/\\')
1591
relpath = self._tree.relpath(abs)
1592
self._relpaths[abs] = relpath
1595
def tree_kind(self, trans_id):
1596
"""Determine the file kind in the working tree.
1598
:returns: The file kind or None if the file does not exist
1600
path = self._tree_id_paths.get(trans_id)
1604
return file_kind(self._tree.abspath(path))
1605
except errors.NoSuchFile:
1608
def _set_mode(self, trans_id, mode_id, typefunc):
1609
"""Set the mode of new file contents.
1610
The mode_id is the existing file to get the mode from (often the same
1611
as trans_id). The operation is only performed if there's a mode match
1612
according to typefunc.
1617
old_path = self._tree_id_paths[mode_id]
1621
mode = os.stat(self._tree.abspath(old_path)).st_mode
1623
if e.errno in (errno.ENOENT, errno.ENOTDIR):
1624
# Either old_path doesn't exist, or the parent of the
1625
# target is not a directory (but will be one eventually)
1626
# Either way, we know it doesn't exist *right now*
1627
# See also bug #248448
1632
osutils.chmod_if_possible(self._limbo_name(trans_id), mode)
1634
def iter_tree_children(self, parent_id):
1635
"""Iterate through the entry's tree children, if any"""
1637
path = self._tree_id_paths[parent_id]
1641
children = os.listdir(self._tree.abspath(path))
1643
if not (osutils._is_error_enotdir(e)
1644
or e.errno in (errno.ENOENT, errno.ESRCH)):
1648
for child in children:
1649
childpath = joinpath(path, child)
1650
if self._tree.is_control_filename(childpath):
1652
yield self.trans_id_tree_path(childpath)
1654
def _generate_limbo_path(self, trans_id):
1655
"""Generate a limbo path using the final path if possible.
1657
This optimizes the performance of applying the tree transform by
1658
avoiding renames. These renames can be avoided only when the parent
1659
directory is already scheduled for creation.
1661
If the final path cannot be used, falls back to using the trans_id as
1664
parent = self._new_parent.get(trans_id)
1665
# if the parent directory is already in limbo (e.g. when building a
1666
# tree), choose a limbo name inside the parent, to reduce further
1668
use_direct_path = False
1669
if self._new_contents.get(parent) == 'directory':
1670
filename = self._new_name.get(trans_id)
1671
if filename is not None:
1672
if parent not in self._limbo_children:
1673
self._limbo_children[parent] = set()
1674
self._limbo_children_names[parent] = {}
1675
use_direct_path = True
1676
# the direct path can only be used if no other file has
1677
# already taken this pathname, i.e. if the name is unused, or
1678
# if it is already associated with this trans_id.
1679
elif self._case_sensitive_target:
1680
if (self._limbo_children_names[parent].get(filename)
1681
in (trans_id, None)):
1682
use_direct_path = True
1684
for l_filename, l_trans_id in\
1685
self._limbo_children_names[parent].iteritems():
1686
if l_trans_id == trans_id:
1688
if l_filename.lower() == filename.lower():
1691
use_direct_path = True
1693
if not use_direct_path:
1694
return DiskTreeTransform._generate_limbo_path(self, trans_id)
1696
limbo_name = pathjoin(self._limbo_files[parent], filename)
1697
self._limbo_children[parent].add(trans_id)
1698
self._limbo_children_names[parent][filename] = trans_id
1702
def apply(self, no_conflicts=False, precomputed_delta=None, _mover=None):
1703
"""Apply all changes to the inventory and filesystem.
1705
If filesystem or inventory conflicts are present, MalformedTransform
1708
If apply succeeds, finalize is not necessary.
1710
:param no_conflicts: if True, the caller guarantees there are no
1711
conflicts, so no check is made.
1712
:param precomputed_delta: An inventory delta to use instead of
1714
:param _mover: Supply an alternate FileMover, for testing
1716
for hook in MutableTree.hooks['pre_transform']:
1717
hook(self._tree, self)
1718
if not no_conflicts:
1719
self._check_malformed()
1720
child_pb = ui.ui_factory.nested_progress_bar()
1722
if precomputed_delta is None:
1723
child_pb.update(gettext('Apply phase'), 0, 2)
1724
inventory_delta = self._generate_inventory_delta()
1727
inventory_delta = precomputed_delta
1730
mover = _FileMover()
1734
child_pb.update(gettext('Apply phase'), 0 + offset, 2 + offset)
1735
self._apply_removals(mover)
1736
child_pb.update(gettext('Apply phase'), 1 + offset, 2 + offset)
1737
modified_paths = self._apply_insertions(mover)
1742
mover.apply_deletions()
1745
if self.final_file_id(self.root) is None:
1746
inventory_delta = [e for e in inventory_delta if e[0] != '']
1747
self._tree.apply_inventory_delta(inventory_delta)
1748
self._apply_observed_sha1s()
1751
return _TransformResults(modified_paths, self.rename_count)
1753
def _generate_inventory_delta(self):
1754
"""Generate an inventory delta for the current transform."""
1755
inventory_delta = []
1756
child_pb = ui.ui_factory.nested_progress_bar()
1757
new_paths = self._inventory_altered()
1758
total_entries = len(new_paths) + len(self._removed_id)
1760
for num, trans_id in enumerate(self._removed_id):
1762
child_pb.update(gettext('removing file'), num, total_entries)
1763
if trans_id == self._new_root:
1764
file_id = self._tree.get_root_id()
1766
file_id = self.tree_file_id(trans_id)
1767
# File-id isn't really being deleted, just moved
1768
if file_id in self._r_new_id:
1770
path = self._tree_id_paths[trans_id]
1771
inventory_delta.append((path, None, file_id, None))
1772
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1774
entries = self._tree.iter_entries_by_dir(
1775
new_path_file_ids.values())
1776
old_paths = dict((e.file_id, p) for p, e in entries)
1778
for num, (path, trans_id) in enumerate(new_paths):
1780
child_pb.update(gettext('adding file'),
1781
num + len(self._removed_id), total_entries)
1782
file_id = new_path_file_ids[trans_id]
1786
kind = self.final_kind(trans_id)
1788
kind = self._tree.stored_kind(file_id)
1789
parent_trans_id = self.final_parent(trans_id)
1790
parent_file_id = new_path_file_ids.get(parent_trans_id)
1791
if parent_file_id is None:
1792
parent_file_id = self.final_file_id(parent_trans_id)
1793
if trans_id in self._new_reference_revision:
1794
new_entry = inventory.TreeReference(
1796
self._new_name[trans_id],
1797
self.final_file_id(self._new_parent[trans_id]),
1798
None, self._new_reference_revision[trans_id])
1800
new_entry = inventory.make_entry(kind,
1801
self.final_name(trans_id),
1802
parent_file_id, file_id)
1803
old_path = old_paths.get(new_entry.file_id)
1804
new_executability = self._new_executability.get(trans_id)
1805
if new_executability is not None:
1806
new_entry.executable = new_executability
1807
inventory_delta.append(
1808
(old_path, path, new_entry.file_id, new_entry))
1811
return inventory_delta
1813
def _apply_removals(self, mover):
1814
"""Perform tree operations that remove directory/inventory names.
1816
That is, delete files that are to be deleted, and put any files that
1817
need renaming into limbo. This must be done in strict child-to-parent
1820
If inventory_delta is None, no inventory delta generation is performed.
1822
tree_paths = list(self._tree_path_ids.iteritems())
1823
tree_paths.sort(reverse=True)
1824
child_pb = ui.ui_factory.nested_progress_bar()
1826
for num, (path, trans_id) in enumerate(tree_paths):
1827
# do not attempt to move root into a subdirectory of itself.
1830
child_pb.update(gettext('removing file'), num, len(tree_paths))
1831
full_path = self._tree.abspath(path)
1832
if trans_id in self._removed_contents:
1833
delete_path = os.path.join(self._deletiondir, trans_id)
1834
mover.pre_delete(full_path, delete_path)
1835
elif (trans_id in self._new_name
1836
or trans_id in self._new_parent):
1838
mover.rename(full_path, self._limbo_name(trans_id))
1839
except errors.TransformRenameFailed, e:
1840
if e.errno != errno.ENOENT:
1843
self.rename_count += 1
1847
def _apply_insertions(self, mover):
1848
"""Perform tree operations that insert directory/inventory names.
1850
That is, create any files that need to be created, and restore from
1851
limbo any files that needed renaming. This must be done in strict
1852
parent-to-child order.
1854
If inventory_delta is None, no inventory delta is calculated, and
1855
no list of modified paths is returned.
1857
new_paths = self.new_paths(filesystem_only=True)
1859
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1861
child_pb = ui.ui_factory.nested_progress_bar()
1863
for num, (path, trans_id) in enumerate(new_paths):
1865
child_pb.update(gettext('adding file'), num, len(new_paths))
1866
full_path = self._tree.abspath(path)
1867
if trans_id in self._needs_rename:
1869
mover.rename(self._limbo_name(trans_id), full_path)
1870
except errors.TransformRenameFailed, e:
1871
# We may be renaming a dangling inventory id
1872
if e.errno != errno.ENOENT:
1875
self.rename_count += 1
1876
# TODO: if trans_id in self._observed_sha1s, we should
1877
# re-stat the final target, since ctime will be
1878
# updated by the change.
1879
if (trans_id in self._new_contents or
1880
self.path_changed(trans_id)):
1881
if trans_id in self._new_contents:
1882
modified_paths.append(full_path)
1883
if trans_id in self._new_executability:
1884
self._set_executability(path, trans_id)
1885
if trans_id in self._observed_sha1s:
1886
o_sha1, o_st_val = self._observed_sha1s[trans_id]
1887
st = osutils.lstat(full_path)
1888
self._observed_sha1s[trans_id] = (o_sha1, st)
1891
for path, trans_id in new_paths:
1892
# new_paths includes stuff like workingtree conflicts. Only the
1893
# stuff in new_contents actually comes from limbo.
1894
if trans_id in self._limbo_files:
1895
del self._limbo_files[trans_id]
1896
self._new_contents.clear()
1897
return modified_paths
1899
def _apply_observed_sha1s(self):
1900
"""After we have finished renaming everything, update observed sha1s
1902
This has to be done after self._tree.apply_inventory_delta, otherwise
1903
it doesn't know anything about the files we are updating. Also, we want
1904
to do this as late as possible, so that most entries end up cached.
1906
# TODO: this doesn't update the stat information for directories. So
1907
# the first 'bzr status' will still need to rewrite
1908
# .bzr/checkout/dirstate. However, we at least don't need to
1909
# re-read all of the files.
1910
# TODO: If the operation took a while, we could do a time.sleep(3) here
1911
# to allow the clock to tick over and ensure we won't have any
1912
# problems. (we could observe start time, and finish time, and if
1913
# it is less than eg 10% overhead, add a sleep call.)
1914
paths = FinalPaths(self)
1915
for trans_id, observed in self._observed_sha1s.iteritems():
1916
path = paths.get_path(trans_id)
1917
# We could get the file_id, but dirstate prefers to use the path
1918
# anyway, and it is 'cheaper' to determine.
1919
# file_id = self._new_id[trans_id]
1920
self._tree._observed_sha1(None, path, observed)
1923
class TransformPreview(DiskTreeTransform):
1924
"""A TreeTransform for generating preview trees.
1926
Unlike TreeTransform, this version works when the input tree is a
1927
RevisionTree, rather than a WorkingTree. As a result, it tends to ignore
1928
unversioned files in the input tree.
1931
def __init__(self, tree, pb=None, case_sensitive=True):
1933
limbodir = osutils.mkdtemp(prefix='bzr-limbo-')
1934
DiskTreeTransform.__init__(self, tree, limbodir, pb, case_sensitive)
1936
def canonical_path(self, path):
1939
def tree_kind(self, trans_id):
1940
path = self._tree_id_paths.get(trans_id)
1943
kind = self._tree.path_content_summary(path)[0]
1944
if kind == 'missing':
1948
def _set_mode(self, trans_id, mode_id, typefunc):
1949
"""Set the mode of new file contents.
1950
The mode_id is the existing file to get the mode from (often the same
1951
as trans_id). The operation is only performed if there's a mode match
1952
according to typefunc.
1954
# is it ok to ignore this? probably
1957
def iter_tree_children(self, parent_id):
1958
"""Iterate through the entry's tree children, if any"""
1960
path = self._tree_id_paths[parent_id]
1963
file_id = self.tree_file_id(parent_id)
1966
entry = self._tree.iter_entries_by_dir([file_id]).next()[1]
1967
children = getattr(entry, 'children', {})
1968
for child in children:
1969
childpath = joinpath(path, child)
1970
yield self.trans_id_tree_path(childpath)
1972
def new_orphan(self, trans_id, parent_id):
1973
raise NotImplementedError(self.new_orphan)
1976
class _PreviewTree(tree.InventoryTree):
1977
"""Partial implementation of Tree to support show_diff_trees"""
1979
def __init__(self, transform):
1980
self._transform = transform
1981
self._final_paths = FinalPaths(transform)
1982
self.__by_parent = None
1983
self._parent_ids = []
1984
self._all_children_cache = {}
1985
self._path2trans_id_cache = {}
1986
self._final_name_cache = {}
1987
self._iter_changes_cache = dict((c[0], c) for c in
1988
self._transform.iter_changes())
1990
def _content_change(self, file_id):
1991
"""Return True if the content of this file changed"""
1992
changes = self._iter_changes_cache.get(file_id)
1993
# changes[2] is true if the file content changed. See
1994
# InterTree.iter_changes.
1995
return (changes is not None and changes[2])
1997
def _get_repository(self):
1998
repo = getattr(self._transform._tree, '_repository', None)
2000
repo = self._transform._tree.branch.repository
2003
def _iter_parent_trees(self):
2004
for revision_id in self.get_parent_ids():
2006
yield self.revision_tree(revision_id)
2007
except errors.NoSuchRevisionInTree:
2008
yield self._get_repository().revision_tree(revision_id)
2010
def _get_file_revision(self, file_id, vf, tree_revision):
2011
parent_keys = [(file_id, t.get_file_revision(file_id)) for t in
2012
self._iter_parent_trees()]
2013
vf.add_lines((file_id, tree_revision), parent_keys,
2014
self.get_file_lines(file_id))
2015
repo = self._get_repository()
2016
base_vf = repo.texts
2017
if base_vf not in vf.fallback_versionedfiles:
2018
vf.fallback_versionedfiles.append(base_vf)
2019
return tree_revision
2021
def _stat_limbo_file(self, file_id=None, trans_id=None):
2022
if trans_id is None:
2023
trans_id = self._transform.trans_id_file_id(file_id)
2024
name = self._transform._limbo_name(trans_id)
2025
return os.lstat(name)
2028
def _by_parent(self):
2029
if self.__by_parent is None:
2030
self.__by_parent = self._transform.by_parent()
2031
return self.__by_parent
2033
def _comparison_data(self, entry, path):
2034
kind, size, executable, link_or_sha1 = self.path_content_summary(path)
2035
if kind == 'missing':
2039
file_id = self._transform.final_file_id(self._path2trans_id(path))
2040
executable = self.is_executable(file_id, path)
2041
return kind, executable, None
2043
def is_locked(self):
2046
def lock_read(self):
2047
# Perhaps in theory, this should lock the TreeTransform?
2054
@deprecated_method(deprecated_in((2, 5, 0)))
2055
def inventory(self):
2056
"""This Tree does not use inventory as its backing data."""
2057
raise NotImplementedError(_PreviewTree.inventory)
2060
def root_inventory(self):
2061
"""This Tree does not use inventory as its backing data."""
2062
raise NotImplementedError(_PreviewTree.root_inventory)
2064
def get_root_id(self):
2065
return self._transform.final_file_id(self._transform.root)
2067
def all_file_ids(self):
2068
tree_ids = set(self._transform._tree.all_file_ids())
2069
tree_ids.difference_update(self._transform.tree_file_id(t)
2070
for t in self._transform._removed_id)
2071
tree_ids.update(self._transform._new_id.values())
2075
return iter(self.all_file_ids())
2077
def _has_id(self, file_id, fallback_check):
2078
if file_id in self._transform._r_new_id:
2080
elif file_id in set([self._transform.tree_file_id(trans_id) for
2081
trans_id in self._transform._removed_id]):
2084
return fallback_check(file_id)
2086
def has_id(self, file_id):
2087
return self._has_id(file_id, self._transform._tree.has_id)
2089
def has_or_had_id(self, file_id):
2090
return self._has_id(file_id, self._transform._tree.has_or_had_id)
2092
def _path2trans_id(self, path):
2093
# We must not use None here, because that is a valid value to store.
2094
trans_id = self._path2trans_id_cache.get(path, object)
2095
if trans_id is not object:
2097
segments = splitpath(path)
2098
cur_parent = self._transform.root
2099
for cur_segment in segments:
2100
for child in self._all_children(cur_parent):
2101
final_name = self._final_name_cache.get(child)
2102
if final_name is None:
2103
final_name = self._transform.final_name(child)
2104
self._final_name_cache[child] = final_name
2105
if final_name == cur_segment:
2109
self._path2trans_id_cache[path] = None
2111
self._path2trans_id_cache[path] = cur_parent
2114
def path2id(self, path):
2115
if isinstance(path, list):
2118
path = osutils.pathjoin(*path)
2119
return self._transform.final_file_id(self._path2trans_id(path))
2121
def id2path(self, file_id):
2122
trans_id = self._transform.trans_id_file_id(file_id)
2124
return self._final_paths._determine_path(trans_id)
2126
raise errors.NoSuchId(self, file_id)
2128
def _all_children(self, trans_id):
2129
children = self._all_children_cache.get(trans_id)
2130
if children is not None:
2132
children = set(self._transform.iter_tree_children(trans_id))
2133
# children in the _new_parent set are provided by _by_parent.
2134
children.difference_update(self._transform._new_parent.keys())
2135
children.update(self._by_parent.get(trans_id, []))
2136
self._all_children_cache[trans_id] = children
2139
def iter_children(self, file_id):
2140
trans_id = self._transform.trans_id_file_id(file_id)
2141
for child_trans_id in self._all_children(trans_id):
2142
yield self._transform.final_file_id(child_trans_id)
2145
possible_extras = set(self._transform.trans_id_tree_path(p) for p
2146
in self._transform._tree.extras())
2147
possible_extras.update(self._transform._new_contents)
2148
possible_extras.update(self._transform._removed_id)
2149
for trans_id in possible_extras:
2150
if self._transform.final_file_id(trans_id) is None:
2151
yield self._final_paths._determine_path(trans_id)
2153
def _make_inv_entries(self, ordered_entries, specific_file_ids=None,
2154
yield_parents=False):
2155
for trans_id, parent_file_id in ordered_entries:
2156
file_id = self._transform.final_file_id(trans_id)
2159
if (specific_file_ids is not None
2160
and file_id not in specific_file_ids):
2162
kind = self._transform.final_kind(trans_id)
2164
kind = self._transform._tree.stored_kind(file_id)
2165
new_entry = inventory.make_entry(
2167
self._transform.final_name(trans_id),
2168
parent_file_id, file_id)
2169
yield new_entry, trans_id
2171
def _list_files_by_dir(self):
2172
todo = [ROOT_PARENT]
2174
while len(todo) > 0:
2176
parent_file_id = self._transform.final_file_id(parent)
2177
children = list(self._all_children(parent))
2178
paths = dict(zip(children, self._final_paths.get_paths(children)))
2179
children.sort(key=paths.get)
2180
todo.extend(reversed(children))
2181
for trans_id in children:
2182
ordered_ids.append((trans_id, parent_file_id))
2185
def iter_child_entries(self, file_id, path=None):
2186
self.id2path(file_id)
2187
trans_id = self._transform.trans_id_file_id(file_id)
2188
todo = [(child_trans_id, trans_id) for child_trans_id in
2189
self._all_children(trans_id)]
2190
for entry, trans_id in self._make_inv_entries(todo):
2193
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
2194
# This may not be a maximally efficient implementation, but it is
2195
# reasonably straightforward. An implementation that grafts the
2196
# TreeTransform changes onto the tree's iter_entries_by_dir results
2197
# might be more efficient, but requires tricky inferences about stack
2199
ordered_ids = self._list_files_by_dir()
2200
for entry, trans_id in self._make_inv_entries(ordered_ids,
2201
specific_file_ids, yield_parents=yield_parents):
2202
yield unicode(self._final_paths.get_path(trans_id)), entry
2204
def _iter_entries_for_dir(self, dir_path):
2205
"""Return path, entry for items in a directory without recursing down."""
2206
dir_file_id = self.path2id(dir_path)
2208
for file_id in self.iter_children(dir_file_id):
2209
trans_id = self._transform.trans_id_file_id(file_id)
2210
ordered_ids.append((trans_id, file_id))
2211
for entry, trans_id in self._make_inv_entries(ordered_ids):
2212
yield unicode(self._final_paths.get_path(trans_id)), entry
2214
def list_files(self, include_root=False, from_dir=None, recursive=True):
2215
"""See WorkingTree.list_files."""
2216
# XXX This should behave like WorkingTree.list_files, but is really
2217
# more like RevisionTree.list_files.
2221
prefix = from_dir + '/'
2222
entries = self.iter_entries_by_dir()
2223
for path, entry in entries:
2224
if entry.name == '' and not include_root:
2227
if not path.startswith(prefix):
2229
path = path[len(prefix):]
2230
yield path, 'V', entry.kind, entry.file_id, entry
2232
if from_dir is None and include_root is True:
2233
root_entry = inventory.make_entry('directory', '',
2234
ROOT_PARENT, self.get_root_id())
2235
yield '', 'V', 'directory', root_entry.file_id, root_entry
2236
entries = self._iter_entries_for_dir(from_dir or '')
2237
for path, entry in entries:
2238
yield path, 'V', entry.kind, entry.file_id, entry
2240
def kind(self, file_id):
2241
trans_id = self._transform.trans_id_file_id(file_id)
2242
return self._transform.final_kind(trans_id)
2244
def stored_kind(self, file_id):
2245
trans_id = self._transform.trans_id_file_id(file_id)
2247
return self._transform._new_contents[trans_id]
2249
return self._transform._tree.stored_kind(file_id)
2251
def get_file_mtime(self, file_id, path=None):
2252
"""See Tree.get_file_mtime"""
2253
if not self._content_change(file_id):
2254
return self._transform._tree.get_file_mtime(file_id)
2255
return self._stat_limbo_file(file_id).st_mtime
2257
def _file_size(self, entry, stat_value):
2258
return self.get_file_size(entry.file_id)
2260
def get_file_size(self, file_id):
2261
"""See Tree.get_file_size"""
2262
trans_id = self._transform.trans_id_file_id(file_id)
2263
kind = self._transform.final_kind(trans_id)
2266
if trans_id in self._transform._new_contents:
2267
return self._stat_limbo_file(trans_id=trans_id).st_size
2268
if self.kind(file_id) == 'file':
2269
return self._transform._tree.get_file_size(file_id)
2273
def get_file_verifier(self, file_id, path=None, stat_value=None):
2274
trans_id = self._transform.trans_id_file_id(file_id)
2275
kind = self._transform._new_contents.get(trans_id)
2277
return self._transform._tree.get_file_verifier(file_id)
2279
fileobj = self.get_file(file_id)
2281
return ("SHA1", sha_file(fileobj))
2285
def get_file_sha1(self, file_id, path=None, stat_value=None):
2286
trans_id = self._transform.trans_id_file_id(file_id)
2287
kind = self._transform._new_contents.get(trans_id)
2289
return self._transform._tree.get_file_sha1(file_id)
2291
fileobj = self.get_file(file_id)
2293
return sha_file(fileobj)
2297
def is_executable(self, file_id, path=None):
2300
trans_id = self._transform.trans_id_file_id(file_id)
2302
return self._transform._new_executability[trans_id]
2305
return self._transform._tree.is_executable(file_id, path)
2307
if e.errno == errno.ENOENT:
2310
except errors.NoSuchId:
2313
def has_filename(self, path):
2314
trans_id = self._path2trans_id(path)
2315
if trans_id in self._transform._new_contents:
2317
elif trans_id in self._transform._removed_contents:
2320
return self._transform._tree.has_filename(path)
2322
def path_content_summary(self, path):
2323
trans_id = self._path2trans_id(path)
2324
tt = self._transform
2325
tree_path = tt._tree_id_paths.get(trans_id)
2326
kind = tt._new_contents.get(trans_id)
2328
if tree_path is None or trans_id in tt._removed_contents:
2329
return 'missing', None, None, None
2330
summary = tt._tree.path_content_summary(tree_path)
2331
kind, size, executable, link_or_sha1 = summary
2334
limbo_name = tt._limbo_name(trans_id)
2335
if trans_id in tt._new_reference_revision:
2336
kind = 'tree-reference'
2338
statval = os.lstat(limbo_name)
2339
size = statval.st_size
2340
if not tt._limbo_supports_executable():
2343
executable = statval.st_mode & S_IEXEC
2347
if kind == 'symlink':
2348
link_or_sha1 = os.readlink(limbo_name).decode(osutils._fs_enc)
2349
executable = tt._new_executability.get(trans_id, executable)
2350
return kind, size, executable, link_or_sha1
2352
def iter_changes(self, from_tree, include_unchanged=False,
2353
specific_files=None, pb=None, extra_trees=None,
2354
require_versioned=True, want_unversioned=False):
2355
"""See InterTree.iter_changes.
2357
This has a fast path that is only used when the from_tree matches
2358
the transform tree, and no fancy options are supplied.
2360
if (from_tree is not self._transform._tree or include_unchanged or
2361
specific_files or want_unversioned):
2362
return tree.InterTree(from_tree, self).iter_changes(
2363
include_unchanged=include_unchanged,
2364
specific_files=specific_files,
2366
extra_trees=extra_trees,
2367
require_versioned=require_versioned,
2368
want_unversioned=want_unversioned)
2369
if want_unversioned:
2370
raise ValueError('want_unversioned is not supported')
2371
return self._transform.iter_changes()
2373
def get_file(self, file_id, path=None):
2374
"""See Tree.get_file"""
2375
if not self._content_change(file_id):
2376
return self._transform._tree.get_file(file_id, path)
2377
trans_id = self._transform.trans_id_file_id(file_id)
2378
name = self._transform._limbo_name(trans_id)
2379
return open(name, 'rb')
2381
def get_file_with_stat(self, file_id, path=None):
2382
return self.get_file(file_id, path), None
2384
def annotate_iter(self, file_id,
2385
default_revision=_mod_revision.CURRENT_REVISION):
2386
changes = self._iter_changes_cache.get(file_id)
2390
changed_content, versioned, kind = (changes[2], changes[3],
2394
get_old = (kind[0] == 'file' and versioned[0])
2396
old_annotation = self._transform._tree.annotate_iter(file_id,
2397
default_revision=default_revision)
2401
return old_annotation
2402
if not changed_content:
2403
return old_annotation
2404
# TODO: This is doing something similar to what WT.annotate_iter is
2405
# doing, however it fails slightly because it doesn't know what
2406
# the *other* revision_id is, so it doesn't know how to give the
2407
# other as the origin for some lines, they all get
2408
# 'default_revision'
2409
# It would be nice to be able to use the new Annotator based
2410
# approach, as well.
2411
return annotate.reannotate([old_annotation],
2412
self.get_file(file_id).readlines(),
2415
def get_symlink_target(self, file_id, path=None):
2416
"""See Tree.get_symlink_target"""
2417
if not self._content_change(file_id):
2418
return self._transform._tree.get_symlink_target(file_id)
2419
trans_id = self._transform.trans_id_file_id(file_id)
2420
name = self._transform._limbo_name(trans_id)
2421
return osutils.readlink(name)
2423
def walkdirs(self, prefix=''):
2424
pending = [self._transform.root]
2425
while len(pending) > 0:
2426
parent_id = pending.pop()
2429
prefix = prefix.rstrip('/')
2430
parent_path = self._final_paths.get_path(parent_id)
2431
parent_file_id = self._transform.final_file_id(parent_id)
2432
for child_id in self._all_children(parent_id):
2433
path_from_root = self._final_paths.get_path(child_id)
2434
basename = self._transform.final_name(child_id)
2435
file_id = self._transform.final_file_id(child_id)
2436
kind = self._transform.final_kind(child_id)
2437
if kind is not None:
2438
versioned_kind = kind
2441
versioned_kind = self._transform._tree.stored_kind(file_id)
2442
if versioned_kind == 'directory':
2443
subdirs.append(child_id)
2444
children.append((path_from_root, basename, kind, None,
2445
file_id, versioned_kind))
2447
if parent_path.startswith(prefix):
2448
yield (parent_path, parent_file_id), children
2449
pending.extend(sorted(subdirs, key=self._final_paths.get_path,
2452
def get_parent_ids(self):
2453
return self._parent_ids
2455
def set_parent_ids(self, parent_ids):
2456
self._parent_ids = parent_ids
2458
def get_revision_tree(self, revision_id):
2459
return self._transform._tree.get_revision_tree(revision_id)
2462
896
def joinpath(parent, child):
2463
897
"""Join tree-relative paths, handling the tree root specially"""
2464
898
if parent is None or parent == "":
2494
928
self._known_paths[trans_id] = self._determine_path(trans_id)
2495
929
return self._known_paths[trans_id]
2497
def get_paths(self, trans_ids):
2498
return [(self.get_path(t), t) for t in trans_ids]
2502
931
def topology_sorted_ids(tree):
2503
932
"""Determine the topological order of the ids in a tree"""
2504
933
file_ids = list(tree)
2505
934
file_ids.sort(key=tree.id2path)
2509
def build_tree(tree, wt, accelerator_tree=None, hardlink=False,
2510
delta_from_tree=False):
2511
"""Create working tree for a branch, using a TreeTransform.
2513
This function should be used on empty trees, having a tree root at most.
2514
(see merge and revert functionality for working with existing trees)
2516
Existing files are handled like so:
2518
- Existing bzrdirs take precedence over creating new items. They are
2519
created as '%s.diverted' % name.
2520
- Otherwise, if the content on disk matches the content we are building,
2521
it is silently replaced.
2522
- Otherwise, conflict resolution will move the old file to 'oldname.moved'.
2524
:param tree: The tree to convert wt into a copy of
2525
:param wt: The working tree that files will be placed into
2526
:param accelerator_tree: A tree which can be used for retrieving file
2527
contents more quickly than tree itself, i.e. a workingtree. tree
2528
will be used for cases where accelerator_tree's content is different.
2529
:param hardlink: If true, hard-link files to accelerator_tree, where
2530
possible. accelerator_tree must implement abspath, i.e. be a
2532
:param delta_from_tree: If true, build_tree may use the input Tree to
2533
generate the inventory delta.
2535
wt.lock_tree_write()
2539
if accelerator_tree is not None:
2540
accelerator_tree.lock_read()
2542
return _build_tree(tree, wt, accelerator_tree, hardlink,
2545
if accelerator_tree is not None:
2546
accelerator_tree.unlock()
2553
def _build_tree(tree, wt, accelerator_tree, hardlink, delta_from_tree):
2554
"""See build_tree."""
2555
for num, _unused in enumerate(wt.all_file_ids()):
2556
if num > 0: # more than just a root
2557
raise errors.WorkingTreeAlreadyPopulated(base=wt.basedir)
937
def build_tree(tree, wt):
938
"""Create working tree for a branch, using a Transaction."""
2558
939
file_trans_id = {}
2559
top_pb = ui.ui_factory.nested_progress_bar()
940
top_pb = bzrlib.ui.ui_factory.nested_progress_bar()
2560
941
pp = ProgressPhase("Build phase", 2, top_pb)
2561
if tree.get_root_id() is not None:
2562
# This is kind of a hack: we should be altering the root
2563
# as part of the regular tree shape diff logic.
2564
# The conditional test here is to avoid doing an
2565
# expensive operation (flush) every time the root id
2566
# is set within the tree, nor setting the root and thus
2567
# marking the tree as dirty, because we use two different
2568
# idioms here: tree interfaces and inventory interfaces.
2569
if wt.get_root_id() != tree.get_root_id():
2570
wt.set_root_id(tree.get_root_id())
2572
942
tt = TreeTransform(wt)
2576
file_trans_id[wt.get_root_id()] = \
2577
tt.trans_id_tree_file_id(wt.get_root_id())
2578
pb = ui.ui_factory.nested_progress_bar()
945
file_trans_id[wt.get_root_id()] = tt.trans_id_tree_file_id(wt.get_root_id())
946
file_ids = topology_sorted_ids(tree)
947
pb = bzrlib.ui.ui_factory.nested_progress_bar()
2580
deferred_contents = []
2582
total = len(tree.all_file_ids())
2584
precomputed_delta = []
2586
precomputed_delta = None
2587
# Check if tree inventory has content. If so, we populate
2588
# existing_files with the directory content. If there are no
2589
# entries we skip populating existing_files as its not used.
2590
# This improves performance and unncessary work on large
2591
# directory trees. (#501307)
2593
existing_files = set()
2594
for dir, files in wt.walkdirs():
2595
existing_files.update(f[0] for f in files)
2596
for num, (tree_path, entry) in \
2597
enumerate(tree.iter_entries_by_dir()):
2598
pb.update(gettext("Building tree"), num - len(deferred_contents), total)
949
for num, file_id in enumerate(file_ids):
950
pb.update("Building tree", num, len(file_ids))
951
entry = tree.inventory[file_id]
2599
952
if entry.parent_id is None:
2602
file_id = entry.file_id
2604
precomputed_delta.append((None, tree_path, file_id, entry))
2605
if tree_path in existing_files:
2606
target_path = wt.abspath(tree_path)
2607
kind = file_kind(target_path)
2608
if kind == "directory":
2610
controldir.ControlDir.open(target_path)
2611
except errors.NotBranchError:
2615
if (file_id not in divert and
2616
_content_match(tree, entry, file_id, kind,
2618
tt.delete_contents(tt.trans_id_tree_path(tree_path))
2619
if kind == 'directory':
954
if entry.parent_id not in file_trans_id:
955
raise repr(entry.parent_id)
2621
956
parent_id = file_trans_id[entry.parent_id]
2622
if entry.kind == 'file':
2623
# We *almost* replicate new_by_entry, so that we can defer
2624
# getting the file text, and get them all at once.
2625
trans_id = tt.create_path(entry.name, parent_id)
2626
file_trans_id[file_id] = trans_id
2627
tt.version_file(file_id, trans_id)
2628
executable = tree.is_executable(file_id, tree_path)
2630
tt.set_executability(executable, trans_id)
2631
trans_data = (trans_id, tree_path, entry.text_sha1)
2632
deferred_contents.append((file_id, trans_data))
2634
file_trans_id[file_id] = new_by_entry(tt, entry, parent_id,
2637
new_trans_id = file_trans_id[file_id]
2638
old_parent = tt.trans_id_tree_path(tree_path)
2639
_reparent_children(tt, old_parent, new_trans_id)
2640
offset = num + 1 - len(deferred_contents)
2641
_create_files(tt, tree, deferred_contents, pb, offset,
2642
accelerator_tree, hardlink)
957
file_trans_id[file_id] = new_by_entry(tt, entry, parent_id,
2646
divert_trans = set(file_trans_id[f] for f in divert)
2647
resolver = lambda t, c: resolve_checkout(t, c, divert_trans)
2648
raw_conflicts = resolve_conflicts(tt, pass_func=resolver)
2649
if len(raw_conflicts) > 0:
2650
precomputed_delta = None
2651
conflicts = cook_conflicts(raw_conflicts, tt)
2652
for conflict in conflicts:
2653
trace.warning(unicode(conflict))
2655
wt.add_conflicts(conflicts)
2656
except errors.UnsupportedOperation:
2658
result = tt.apply(no_conflicts=True,
2659
precomputed_delta=precomputed_delta)
2662
965
top_pb.finished()
2666
def _create_files(tt, tree, desired_files, pb, offset, accelerator_tree,
2668
total = len(desired_files) + offset
2670
if accelerator_tree is None:
2671
new_desired_files = desired_files
2673
iter = accelerator_tree.iter_changes(tree, include_unchanged=True)
2674
unchanged = [(f, p[1]) for (f, p, c, v, d, n, k, e)
2675
in iter if not (c or e[0] != e[1])]
2676
if accelerator_tree.supports_content_filtering():
2677
unchanged = [(f, p) for (f, p) in unchanged
2678
if not accelerator_tree.iter_search_rules([p]).next()]
2679
unchanged = dict(unchanged)
2680
new_desired_files = []
2682
for file_id, (trans_id, tree_path, text_sha1) in desired_files:
2683
accelerator_path = unchanged.get(file_id)
2684
if accelerator_path is None:
2685
new_desired_files.append((file_id,
2686
(trans_id, tree_path, text_sha1)))
2688
pb.update(gettext('Adding file contents'), count + offset, total)
2690
tt.create_hardlink(accelerator_tree.abspath(accelerator_path),
2693
contents = accelerator_tree.get_file(file_id, accelerator_path)
2694
if wt.supports_content_filtering():
2695
filters = wt._content_filter_stack(tree_path)
2696
contents = filtered_output_bytes(contents, filters,
2697
ContentFilterContext(tree_path, tree))
2699
tt.create_file(contents, trans_id, sha1=text_sha1)
2703
except AttributeError:
2704
# after filtering, contents may no longer be file-like
2708
for count, ((trans_id, tree_path, text_sha1), contents) in enumerate(
2709
tree.iter_files_bytes(new_desired_files)):
2710
if wt.supports_content_filtering():
2711
filters = wt._content_filter_stack(tree_path)
2712
contents = filtered_output_bytes(contents, filters,
2713
ContentFilterContext(tree_path, tree))
2714
tt.create_file(contents, trans_id, sha1=text_sha1)
2715
pb.update(gettext('Adding file contents'), count + offset, total)
2718
def _reparent_children(tt, old_parent, new_parent):
2719
for child in tt.iter_tree_children(old_parent):
2720
tt.adjust_path(tt.final_name(child), new_parent, child)
2723
def _reparent_transform_children(tt, old_parent, new_parent):
2724
by_parent = tt.by_parent()
2725
for child in by_parent[old_parent]:
2726
tt.adjust_path(tt.final_name(child), new_parent, child)
2727
return by_parent[old_parent]
2730
def _content_match(tree, entry, file_id, kind, target_path):
2731
if entry.kind != kind:
2733
if entry.kind == "directory":
2735
if entry.kind == "file":
2736
f = file(target_path, 'rb')
2738
if tree.get_file_text(file_id) == f.read():
2742
elif entry.kind == "symlink":
2743
if tree.get_symlink_target(file_id) == os.readlink(target_path):
2748
def resolve_checkout(tt, conflicts, divert):
2749
new_conflicts = set()
2750
for c_type, conflict in ((c[0], c) for c in conflicts):
2751
# Anything but a 'duplicate' would indicate programmer error
2752
if c_type != 'duplicate':
2753
raise AssertionError(c_type)
2754
# Now figure out which is new and which is old
2755
if tt.new_contents(conflict[1]):
2756
new_file = conflict[1]
2757
old_file = conflict[2]
2759
new_file = conflict[2]
2760
old_file = conflict[1]
2762
# We should only get here if the conflict wasn't completely
2764
final_parent = tt.final_parent(old_file)
2765
if new_file in divert:
2766
new_name = tt.final_name(old_file)+'.diverted'
2767
tt.adjust_path(new_name, final_parent, new_file)
2768
new_conflicts.add((c_type, 'Diverted to',
2769
new_file, old_file))
2771
new_name = tt.final_name(old_file)+'.moved'
2772
tt.adjust_path(new_name, final_parent, old_file)
2773
new_conflicts.add((c_type, 'Moved existing file to',
2774
old_file, new_file))
2775
return new_conflicts
2778
967
def new_by_entry(tt, entry, parent_id, tree):
2779
968
"""Create a new file according to its inventory entry"""
2832
996
tt.set_executability(entry.executable, trans_id)
2835
def revert(working_tree, target_tree, filenames, backups=False,
2836
pb=None, change_reporter=None):
999
def find_interesting(working_tree, target_tree, filenames):
1000
"""Find the ids corresponding to specified filenames."""
1001
trees = (working_tree, target_tree)
1002
return tree.find_ids_across_trees(filenames, trees)
1005
def change_entry(tt, file_id, working_tree, target_tree,
1006
trans_id_file_id, backups, trans_id, by_parent):
1007
"""Replace a file_id's contents with those from a target tree."""
1008
e_trans_id = trans_id_file_id(file_id)
1009
entry = target_tree.inventory[file_id]
1010
has_contents, contents_mod, meta_mod, = _entry_changes(file_id, entry,
1013
mode_id = e_trans_id
1016
tt.delete_contents(e_trans_id)
1018
parent_trans_id = trans_id_file_id(entry.parent_id)
1019
backup_name = get_backup_name(entry, by_parent,
1020
parent_trans_id, tt)
1021
tt.adjust_path(backup_name, parent_trans_id, e_trans_id)
1022
tt.unversion_file(e_trans_id)
1023
e_trans_id = tt.create_path(entry.name, parent_trans_id)
1024
tt.version_file(file_id, e_trans_id)
1025
trans_id[file_id] = e_trans_id
1026
create_by_entry(tt, entry, target_tree, e_trans_id, mode_id=mode_id)
1027
create_entry_executability(tt, entry, e_trans_id)
1030
tt.set_executability(entry.executable, e_trans_id)
1031
if tt.final_name(e_trans_id) != entry.name:
1034
parent_id = tt.final_parent(e_trans_id)
1035
parent_file_id = tt.final_file_id(parent_id)
1036
if parent_file_id != entry.parent_id:
1041
parent_trans_id = trans_id_file_id(entry.parent_id)
1042
tt.adjust_path(entry.name, parent_trans_id, e_trans_id)
1045
def get_backup_name(entry, by_parent, parent_trans_id, tt):
1046
"""Produce a backup-style name that appears to be available"""
1050
yield "%s.~%d~" % (entry.name, counter)
1052
for name in name_gen():
1053
if not tt.has_named_child(by_parent, parent_trans_id, name):
1056
def _entry_changes(file_id, entry, working_tree):
1057
"""Determine in which ways the inventory entry has changed.
1059
Returns booleans: has_contents, content_mod, meta_mod
1060
has_contents means there are currently contents, but they differ
1061
contents_mod means contents need to be modified
1062
meta_mod means the metadata needs to be modified
1064
cur_entry = working_tree.inventory[file_id]
1066
working_kind = working_tree.kind(file_id)
1069
has_contents = False
1072
if has_contents is True:
1073
if entry.kind != working_kind:
1074
contents_mod, meta_mod = True, False
1076
cur_entry._read_tree_state(working_tree.id2path(file_id),
1078
contents_mod, meta_mod = entry.detect_changes(cur_entry)
1079
cur_entry._forget_tree_state()
1080
return has_contents, contents_mod, meta_mod
1083
def revert(working_tree, target_tree, filenames, backups=False,
1084
pb=DummyProgress()):
2837
1085
"""Revert a working tree's contents to those of a target tree."""
2838
target_tree.lock_read()
2839
pb = ui.ui_factory.nested_progress_bar()
1086
interesting_ids = find_interesting(working_tree, target_tree, filenames)
1087
def interesting(file_id):
1088
return interesting_ids is None or file_id in interesting_ids
2840
1090
tt = TreeTransform(working_tree, pb)
2842
pp = ProgressPhase("Revert phase", 3, pb)
2843
conflicts, merge_modified = _prepare_revert_transform(
2844
working_tree, target_tree, tt, filenames, backups, pp)
2846
change_reporter = delta._ChangeReporter(
2847
unversioned_filter=working_tree.is_ignored)
2848
delta.report_changes(tt.iter_changes(), change_reporter)
1092
merge_modified = working_tree.merge_modified()
1094
def trans_id_file_id(file_id):
1096
return trans_id[file_id]
1098
return tt.trans_id_tree_file_id(file_id)
1100
pp = ProgressPhase("Revert phase", 4, pb)
1102
sorted_interesting = [i for i in topology_sorted_ids(target_tree) if
1104
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1106
by_parent = tt.by_parent()
1107
for id_num, file_id in enumerate(sorted_interesting):
1108
child_pb.update("Reverting file", id_num+1,
1109
len(sorted_interesting))
1110
if file_id not in working_tree.inventory:
1111
entry = target_tree.inventory[file_id]
1112
parent_id = trans_id_file_id(entry.parent_id)
1113
e_trans_id = new_by_entry(tt, entry, parent_id, target_tree)
1114
trans_id[file_id] = e_trans_id
1116
backup_this = backups
1117
if file_id in merge_modified:
1119
del merge_modified[file_id]
1120
change_entry(tt, file_id, working_tree, target_tree,
1121
trans_id_file_id, backup_this, trans_id,
1126
wt_interesting = [i for i in working_tree.inventory if interesting(i)]
1127
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1129
for id_num, file_id in enumerate(wt_interesting):
1130
child_pb.update("New file check", id_num+1,
1131
len(sorted_interesting))
1132
if file_id not in target_tree:
1133
trans_id = tt.trans_id_tree_file_id(file_id)
1134
tt.unversion_file(trans_id)
1135
if file_id in merge_modified:
1136
tt.delete_contents(trans_id)
1137
del merge_modified[file_id]
1141
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1143
raw_conflicts = resolve_conflicts(tt, child_pb)
1146
conflicts = cook_conflicts(raw_conflicts, tt)
2849
1147
for conflict in conflicts:
2850
trace.warning(unicode(conflict))
2851
1149
pp.next_phase()
2853
working_tree.set_merge_modified(merge_modified)
1151
working_tree.set_merge_modified({})
2855
target_tree.unlock()
2858
1155
return conflicts
2861
def _prepare_revert_transform(working_tree, target_tree, tt, filenames,
2862
backups, pp, basis_tree=None,
2863
merge_modified=None):
2864
child_pb = ui.ui_factory.nested_progress_bar()
2866
if merge_modified is None:
2867
merge_modified = working_tree.merge_modified()
2868
merge_modified = _alter_files(working_tree, target_tree, tt,
2869
child_pb, filenames, backups,
2870
merge_modified, basis_tree)
2873
child_pb = ui.ui_factory.nested_progress_bar()
2875
raw_conflicts = resolve_conflicts(tt, child_pb,
2876
lambda t, c: conflict_pass(t, c, target_tree))
2879
conflicts = cook_conflicts(raw_conflicts, tt)
2880
return conflicts, merge_modified
2883
def _alter_files(working_tree, target_tree, tt, pb, specific_files,
2884
backups, merge_modified, basis_tree=None):
2885
if basis_tree is not None:
2886
basis_tree.lock_read()
2887
# We ask the working_tree for its changes relative to the target, rather
2888
# than the target changes relative to the working tree. Because WT4 has an
2889
# optimizer to compare itself to a target, but no optimizer for the
2891
change_list = working_tree.iter_changes(target_tree,
2892
specific_files=specific_files, pb=pb)
2893
if target_tree.get_root_id() is None:
2899
for id_num, (file_id, path, changed_content, versioned, parent, name,
2900
kind, executable) in enumerate(change_list):
2901
target_path, wt_path = path
2902
target_versioned, wt_versioned = versioned
2903
target_parent, wt_parent = parent
2904
target_name, wt_name = name
2905
target_kind, wt_kind = kind
2906
target_executable, wt_executable = executable
2907
if skip_root and wt_parent is None:
2909
trans_id = tt.trans_id_file_id(file_id)
2912
keep_content = False
2913
if wt_kind == 'file' and (backups or target_kind is None):
2914
wt_sha1 = working_tree.get_file_sha1(file_id)
2915
if merge_modified.get(file_id) != wt_sha1:
2916
# acquire the basis tree lazily to prevent the
2917
# expense of accessing it when it's not needed ?
2918
# (Guessing, RBC, 200702)
2919
if basis_tree is None:
2920
basis_tree = working_tree.basis_tree()
2921
basis_tree.lock_read()
2922
if basis_tree.has_id(file_id):
2923
if wt_sha1 != basis_tree.get_file_sha1(file_id):
2925
elif target_kind is None and not target_versioned:
2927
if wt_kind is not None:
2928
if not keep_content:
2929
tt.delete_contents(trans_id)
2930
elif target_kind is not None:
2931
parent_trans_id = tt.trans_id_file_id(wt_parent)
2932
backup_name = tt._available_backup_name(
2933
wt_name, parent_trans_id)
2934
tt.adjust_path(backup_name, parent_trans_id, trans_id)
2935
new_trans_id = tt.create_path(wt_name, parent_trans_id)
2936
if wt_versioned and target_versioned:
2937
tt.unversion_file(trans_id)
2938
tt.version_file(file_id, new_trans_id)
2939
# New contents should have the same unix perms as old
2942
trans_id = new_trans_id
2943
if target_kind in ('directory', 'tree-reference'):
2944
tt.create_directory(trans_id)
2945
if target_kind == 'tree-reference':
2946
revision = target_tree.get_reference_revision(file_id,
2948
tt.set_tree_reference(revision, trans_id)
2949
elif target_kind == 'symlink':
2950
tt.create_symlink(target_tree.get_symlink_target(file_id),
2952
elif target_kind == 'file':
2953
deferred_files.append((file_id, (trans_id, mode_id)))
2954
if basis_tree is None:
2955
basis_tree = working_tree.basis_tree()
2956
basis_tree.lock_read()
2957
new_sha1 = target_tree.get_file_sha1(file_id)
2958
if (basis_tree.has_id(file_id) and
2959
new_sha1 == basis_tree.get_file_sha1(file_id)):
2960
if file_id in merge_modified:
2961
del merge_modified[file_id]
2963
merge_modified[file_id] = new_sha1
2965
# preserve the execute bit when backing up
2966
if keep_content and wt_executable == target_executable:
2967
tt.set_executability(target_executable, trans_id)
2968
elif target_kind is not None:
2969
raise AssertionError(target_kind)
2970
if not wt_versioned and target_versioned:
2971
tt.version_file(file_id, trans_id)
2972
if wt_versioned and not target_versioned:
2973
tt.unversion_file(trans_id)
2974
if (target_name is not None and
2975
(wt_name != target_name or wt_parent != target_parent)):
2976
if target_name == '' and target_parent is None:
2977
parent_trans = ROOT_PARENT
2979
parent_trans = tt.trans_id_file_id(target_parent)
2980
if wt_parent is None and wt_versioned:
2981
tt.adjust_root_path(target_name, parent_trans)
2983
tt.adjust_path(target_name, parent_trans, trans_id)
2984
if wt_executable != target_executable and target_kind == "file":
2985
tt.set_executability(target_executable, trans_id)
2986
if working_tree.supports_content_filtering():
2987
for index, ((trans_id, mode_id), bytes) in enumerate(
2988
target_tree.iter_files_bytes(deferred_files)):
2989
file_id = deferred_files[index][0]
2990
# We're reverting a tree to the target tree so using the
2991
# target tree to find the file path seems the best choice
2992
# here IMO - Ian C 27/Oct/2009
2993
filter_tree_path = target_tree.id2path(file_id)
2994
filters = working_tree._content_filter_stack(filter_tree_path)
2995
bytes = filtered_output_bytes(bytes, filters,
2996
ContentFilterContext(filter_tree_path, working_tree))
2997
tt.create_file(bytes, trans_id, mode_id)
2999
for (trans_id, mode_id), bytes in target_tree.iter_files_bytes(
3001
tt.create_file(bytes, trans_id, mode_id)
3002
tt.fixup_new_roots()
3004
if basis_tree is not None:
3006
return merge_modified
3009
def resolve_conflicts(tt, pb=None, pass_func=None):
1158
def resolve_conflicts(tt, pb=DummyProgress()):
3010
1159
"""Make many conflict-resolution attempts, but die if they fail"""
3011
if pass_func is None:
3012
pass_func = conflict_pass
3013
1160
new_conflicts = set()
3014
pb = ui.ui_factory.nested_progress_bar()
3016
1162
for n in range(10):
3017
pb.update(gettext('Resolution pass'), n+1, 10)
1163
pb.update('Resolution pass', n+1, 10)
3018
1164
conflicts = tt.find_conflicts()
3019
1165
if len(conflicts) == 0:
3020
1166
return new_conflicts
3021
new_conflicts.update(pass_func(tt, conflicts))
1167
new_conflicts.update(conflict_pass(tt, conflicts))
3022
1168
raise MalformedTransform(conflicts=conflicts)
3027
def conflict_pass(tt, conflicts, path_tree=None):
3028
"""Resolve some classes of conflicts.
3030
:param tt: The transform to resolve conflicts in
3031
:param conflicts: The conflicts to resolve
3032
:param path_tree: A Tree to get supplemental paths from
1173
def conflict_pass(tt, conflicts):
1174
"""Resolve some classes of conflicts."""
3034
1175
new_conflicts = set()
3035
1176
for c_type, conflict in ((c[0], c) for c in conflicts):
3036
1177
if c_type == 'duplicate id':