376
433
del self._new_id[trans_id]
377
434
del self._r_new_id[file_id]
379
def new_paths(self, filesystem_only=False):
380
"""Determine the paths of all new and changed files.
382
:param filesystem_only: if True, only calculate values for files
383
that require renames or execute bit changes.
437
"""Determine the paths of all new and changed files"""
387
stale_ids = self._needs_rename.difference(self._new_name)
388
stale_ids.difference_update(self._new_parent)
389
stale_ids.difference_update(self._new_contents)
390
stale_ids.difference_update(self._new_id)
391
needs_rename = self._needs_rename.difference(stale_ids)
392
id_sets = (needs_rename, self._new_executability)
394
id_sets = (self._new_name, self._new_parent, self._new_contents,
395
self._new_id, self._new_executability)
396
for id_set in id_sets:
439
fp = FinalPaths(self)
440
for id_set in (self._new_name, self._new_parent, self._new_contents,
441
self._new_id, self._new_executability):
397
442
new_ids.update(id_set)
398
return sorted(FinalPaths(self).get_paths(new_ids))
400
def _inventory_altered(self):
401
"""Determine which trans_ids need new Inventory entries.
403
An new entry is needed when anything that would be reflected by an
404
inventory entry changes, including file name, file_id, parent file_id,
405
file kind, and the execute bit.
407
Some care is taken to return entries with real changes, not cases
408
where the value is deleted and then restored to its original value,
409
but some actually unchanged values may be returned.
411
:returns: A list of (path, trans_id) for all items requiring an
412
inventory change. Ordered by path.
443
new_paths = [(fp.get_path(t), t) for t in new_ids]
447
def tree_kind(self, trans_id):
448
"""Determine the file kind in the working tree.
450
Raises NoSuchFile if the file does not exist
415
# Find entries whose file_ids are new (or changed).
416
new_file_id = set(t for t in self._new_id
417
if self._new_id[t] != self.tree_file_id(t))
418
for id_set in [self._new_name, self._new_parent, new_file_id,
419
self._new_executability]:
420
changed_ids.update(id_set)
421
# removing implies a kind change
422
changed_kind = set(self._removed_contents)
424
changed_kind.intersection_update(self._new_contents)
425
# Ignore entries that are already known to have changed.
426
changed_kind.difference_update(changed_ids)
427
# to keep only the truly changed ones
428
changed_kind = (t for t in changed_kind
429
if self.tree_kind(t) != self.final_kind(t))
430
# all kind changes will alter the inventory
431
changed_ids.update(changed_kind)
432
# To find entries with changed parent_ids, find parents which existed,
433
# but changed file_id.
434
changed_file_id = set(t for t in new_file_id if t in self._removed_id)
435
# Now add all their children to the set.
436
for parent_trans_id in new_file_id:
437
changed_ids.update(self.iter_tree_children(parent_trans_id))
438
return sorted(FinalPaths(self).get_paths(changed_ids))
452
path = self._tree_id_paths.get(trans_id)
454
raise NoSuchFile(None)
456
return file_kind(self._tree.abspath(path))
458
if e.errno != errno.ENOENT:
461
raise NoSuchFile(path)
440
463
def final_kind(self, trans_id):
441
464
"""Determine the final file kind, after any changes applied.
443
:return: None if the file does not exist/has no contents. (It is
444
conceivable that a path would be created without the corresponding
445
contents insertion command)
466
Raises NoSuchFile if the file does not exist/has no contents.
467
(It is conceivable that a path would be created without the
468
corresponding contents insertion command)
447
470
if trans_id in self._new_contents:
448
471
return self._new_contents[trans_id]
449
472
elif trans_id in self._removed_contents:
473
raise NoSuchFile(None)
452
475
return self.tree_kind(trans_id)
1009
1050
def get_preview_tree(self):
1010
1051
"""Return a tree representing the result of the transform.
1012
The tree is a snapshot, and altering the TreeTransform will invalidate
1053
This tree only supports the subset of Tree functionality required
1054
by show_diff_trees. It must only be compared to tt._tree.
1015
1056
return _PreviewTree(self)
1017
def commit(self, branch, message, merge_parents=None, strict=False,
1018
timestamp=None, timezone=None, committer=None, authors=None,
1019
revprops=None, revision_id=None):
1020
"""Commit the result of this TreeTransform to a branch.
1022
:param branch: The branch to commit to.
1023
:param message: The message to attach to the commit.
1024
:param merge_parents: Additional parent revision-ids specified by
1026
:param strict: If True, abort the commit if there are unversioned
1028
:param timestamp: if not None, seconds-since-epoch for the time and
1029
date. (May be a float.)
1030
:param timezone: Optional timezone for timestamp, as an offset in
1032
:param committer: Optional committer in email-id format.
1033
(e.g. "J Random Hacker <jrandom@example.com>")
1034
:param authors: Optional list of authors in email-id format.
1035
:param revprops: Optional dictionary of revision properties.
1036
:param revision_id: Optional revision id. (Specifying a revision-id
1037
may reduce performance for some non-native formats.)
1038
:return: The revision_id of the revision committed.
1040
self._check_malformed()
1042
unversioned = set(self._new_contents).difference(set(self._new_id))
1043
for trans_id in unversioned:
1044
if self.final_file_id(trans_id) is None:
1045
raise errors.StrictCommitFailed()
1047
revno, last_rev_id = branch.last_revision_info()
1048
if last_rev_id == _mod_revision.NULL_REVISION:
1049
if merge_parents is not None:
1050
raise ValueError('Cannot supply merge parents for first'
1054
parent_ids = [last_rev_id]
1055
if merge_parents is not None:
1056
parent_ids.extend(merge_parents)
1057
if self._tree.get_revision_id() != last_rev_id:
1058
raise ValueError('TreeTransform not based on branch basis: %s' %
1059
self._tree.get_revision_id())
1060
revprops = commit.Commit.update_revprops(revprops, branch, authors)
1061
builder = branch.get_commit_builder(parent_ids,
1062
timestamp=timestamp,
1064
committer=committer,
1066
revision_id=revision_id)
1067
preview = self.get_preview_tree()
1068
list(builder.record_iter_changes(preview, last_rev_id,
1069
self.iter_changes()))
1070
builder.finish_inventory()
1071
revision_id = builder.commit(message)
1072
branch.set_last_revision_info(revno + 1, revision_id)
1075
def _text_parent(self, trans_id):
1076
file_id = self.tree_file_id(trans_id)
1078
if file_id is None or self._tree.kind(file_id) != 'file':
1080
except errors.NoSuchFile:
1084
def _get_parents_texts(self, trans_id):
1085
"""Get texts for compression parents of this file."""
1086
file_id = self._text_parent(trans_id)
1089
return (self._tree.get_file_text(file_id),)
1091
def _get_parents_lines(self, trans_id):
1092
"""Get lines for compression parents of this file."""
1093
file_id = self._text_parent(trans_id)
1096
return (self._tree.get_file_lines(file_id),)
1098
def serialize(self, serializer):
1099
"""Serialize this TreeTransform.
1101
:param serializer: A Serialiser like pack.ContainerSerializer.
1103
new_name = dict((k, v.encode('utf-8')) for k, v in
1104
self._new_name.items())
1105
new_executability = dict((k, int(v)) for k, v in
1106
self._new_executability.items())
1107
tree_path_ids = dict((k.encode('utf-8'), v)
1108
for k, v in self._tree_path_ids.items())
1110
'_id_number': self._id_number,
1111
'_new_name': new_name,
1112
'_new_parent': self._new_parent,
1113
'_new_executability': new_executability,
1114
'_new_id': self._new_id,
1115
'_tree_path_ids': tree_path_ids,
1116
'_removed_id': list(self._removed_id),
1117
'_removed_contents': list(self._removed_contents),
1118
'_non_present_ids': self._non_present_ids,
1120
yield serializer.bytes_record(bencode.bencode(attribs),
1122
for trans_id, kind in self._new_contents.items():
1124
lines = osutils.chunks_to_lines(
1125
self._read_file_chunks(trans_id))
1126
parents = self._get_parents_lines(trans_id)
1127
mpdiff = multiparent.MultiParent.from_lines(lines, parents)
1128
content = ''.join(mpdiff.to_patch())
1129
if kind == 'directory':
1131
if kind == 'symlink':
1132
content = self._read_symlink_target(trans_id)
1133
yield serializer.bytes_record(content, ((trans_id, kind),))
1135
def deserialize(self, records):
1136
"""Deserialize a stored TreeTransform.
1138
:param records: An iterable of (names, content) tuples, as per
1139
pack.ContainerPushParser.
1141
names, content = records.next()
1142
attribs = bencode.bdecode(content)
1143
self._id_number = attribs['_id_number']
1144
self._new_name = dict((k, v.decode('utf-8'))
1145
for k, v in attribs['_new_name'].items())
1146
self._new_parent = attribs['_new_parent']
1147
self._new_executability = dict((k, bool(v)) for k, v in
1148
attribs['_new_executability'].items())
1149
self._new_id = attribs['_new_id']
1150
self._r_new_id = dict((v, k) for k, v in self._new_id.items())
1151
self._tree_path_ids = {}
1152
self._tree_id_paths = {}
1153
for bytepath, trans_id in attribs['_tree_path_ids'].items():
1154
path = bytepath.decode('utf-8')
1155
self._tree_path_ids[path] = trans_id
1156
self._tree_id_paths[trans_id] = path
1157
self._removed_id = set(attribs['_removed_id'])
1158
self._removed_contents = set(attribs['_removed_contents'])
1159
self._non_present_ids = attribs['_non_present_ids']
1160
for ((trans_id, kind),), content in records:
1162
mpdiff = multiparent.MultiParent.from_patch(content)
1163
lines = mpdiff.to_lines(self._get_parents_texts(trans_id))
1164
self.create_file(lines, trans_id)
1165
if kind == 'directory':
1166
self.create_directory(trans_id)
1167
if kind == 'symlink':
1168
self.create_symlink(content.decode('utf-8'), trans_id)
1171
class DiskTreeTransform(TreeTransformBase):
1172
"""Tree transform storing its contents on disk."""
1174
def __init__(self, tree, limbodir, pb=None,
1175
case_sensitive=True):
1177
:param tree: The tree that will be transformed, but not necessarily
1179
:param limbodir: A directory where new files can be stored until
1180
they are installed in their proper places
1182
:param case_sensitive: If True, the target of the transform is
1183
case sensitive, not just case preserving.
1185
TreeTransformBase.__init__(self, tree, pb, case_sensitive)
1186
self._limbodir = limbodir
1187
self._deletiondir = None
1188
# A mapping of transform ids to their limbo filename
1189
self._limbo_files = {}
1190
self._possibly_stale_limbo_files = set()
1191
# A mapping of transform ids to a set of the transform ids of children
1192
# that their limbo directory has
1193
self._limbo_children = {}
1194
# Map transform ids to maps of child filename to child transform id
1195
self._limbo_children_names = {}
1196
# List of transform ids that need to be renamed from limbo into place
1197
self._needs_rename = set()
1198
self._creation_mtime = None
1201
"""Release the working tree lock, if held, clean up limbo dir.
1203
This is required if apply has not been invoked, but can be invoked
1206
if self._tree is None:
1209
limbo_paths = self._limbo_files.values() + list(
1210
self._possibly_stale_limbo_files)
1211
limbo_paths = sorted(limbo_paths, reverse=True)
1212
for path in limbo_paths:
1216
if e.errno != errno.ENOENT:
1218
# XXX: warn? perhaps we just got interrupted at an
1219
# inconvenient moment, but perhaps files are disappearing
1222
delete_any(self._limbodir)
1224
# We don't especially care *why* the dir is immortal.
1225
raise ImmortalLimbo(self._limbodir)
1227
if self._deletiondir is not None:
1228
delete_any(self._deletiondir)
1230
raise errors.ImmortalPendingDeletion(self._deletiondir)
1232
TreeTransformBase.finalize(self)
1234
def _limbo_supports_executable(self):
1235
"""Check if the limbo path supports the executable bit."""
1236
# FIXME: Check actual file system capabilities of limbodir
1237
return osutils.supports_executable()
1239
def _limbo_name(self, trans_id):
1240
"""Generate the limbo name of a file"""
1241
limbo_name = self._limbo_files.get(trans_id)
1242
if limbo_name is None:
1243
limbo_name = self._generate_limbo_path(trans_id)
1244
self._limbo_files[trans_id] = limbo_name
1247
def _generate_limbo_path(self, trans_id):
1248
"""Generate a limbo path using the trans_id as the relative path.
1250
This is suitable as a fallback, and when the transform should not be
1251
sensitive to the path encoding of the limbo directory.
1253
self._needs_rename.add(trans_id)
1254
return pathjoin(self._limbodir, trans_id)
1256
def adjust_path(self, name, parent, trans_id):
1257
previous_parent = self._new_parent.get(trans_id)
1258
previous_name = self._new_name.get(trans_id)
1259
TreeTransformBase.adjust_path(self, name, parent, trans_id)
1260
if (trans_id in self._limbo_files and
1261
trans_id not in self._needs_rename):
1262
self._rename_in_limbo([trans_id])
1263
if previous_parent != parent:
1264
self._limbo_children[previous_parent].remove(trans_id)
1265
if previous_parent != parent or previous_name != name:
1266
del self._limbo_children_names[previous_parent][previous_name]
1268
def _rename_in_limbo(self, trans_ids):
1269
"""Fix limbo names so that the right final path is produced.
1271
This means we outsmarted ourselves-- we tried to avoid renaming
1272
these files later by creating them with their final names in their
1273
final parents. But now the previous name or parent is no longer
1274
suitable, so we have to rename them.
1276
Even for trans_ids that have no new contents, we must remove their
1277
entries from _limbo_files, because they are now stale.
1279
for trans_id in trans_ids:
1280
old_path = self._limbo_files[trans_id]
1281
self._possibly_stale_limbo_files.add(old_path)
1282
del self._limbo_files[trans_id]
1283
if trans_id not in self._new_contents:
1285
new_path = self._limbo_name(trans_id)
1286
os.rename(old_path, new_path)
1287
self._possibly_stale_limbo_files.remove(old_path)
1288
for descendant in self._limbo_descendants(trans_id):
1289
desc_path = self._limbo_files[descendant]
1290
desc_path = new_path + desc_path[len(old_path):]
1291
self._limbo_files[descendant] = desc_path
1293
def _limbo_descendants(self, trans_id):
1294
"""Return the set of trans_ids whose limbo paths descend from this."""
1295
descendants = set(self._limbo_children.get(trans_id, []))
1296
for descendant in list(descendants):
1297
descendants.update(self._limbo_descendants(descendant))
1300
def create_file(self, contents, trans_id, mode_id=None, sha1=None):
1301
"""Schedule creation of a new file.
1305
:param contents: an iterator of strings, all of which will be written
1306
to the target destination.
1307
:param trans_id: TreeTransform handle
1308
:param mode_id: If not None, force the mode of the target file to match
1309
the mode of the object referenced by mode_id.
1310
Otherwise, we will try to preserve mode bits of an existing file.
1311
:param sha1: If the sha1 of this content is already known, pass it in.
1312
We can use it to prevent future sha1 computations.
1314
name = self._limbo_name(trans_id)
1315
f = open(name, 'wb')
1317
unique_add(self._new_contents, trans_id, 'file')
1318
f.writelines(contents)
1321
self._set_mtime(name)
1322
self._set_mode(trans_id, mode_id, S_ISREG)
1323
# It is unfortunate we have to use lstat instead of fstat, but we just
1324
# used utime and chmod on the file, so we need the accurate final
1326
if sha1 is not None:
1327
self._observed_sha1s[trans_id] = (sha1, osutils.lstat(name))
1329
def _read_file_chunks(self, trans_id):
1330
cur_file = open(self._limbo_name(trans_id), 'rb')
1332
return cur_file.readlines()
1336
def _read_symlink_target(self, trans_id):
1337
return os.readlink(self._limbo_name(trans_id))
1339
def _set_mtime(self, path):
1340
"""All files that are created get the same mtime.
1342
This time is set by the first object to be created.
1344
if self._creation_mtime is None:
1345
self._creation_mtime = time.time()
1346
os.utime(path, (self._creation_mtime, self._creation_mtime))
1348
def create_hardlink(self, path, trans_id):
1349
"""Schedule creation of a hard link"""
1350
name = self._limbo_name(trans_id)
1354
if e.errno != errno.EPERM:
1356
raise errors.HardLinkNotSupported(path)
1358
unique_add(self._new_contents, trans_id, 'file')
1360
# Clean up the file, it never got registered so
1361
# TreeTransform.finalize() won't clean it up.
1365
def create_directory(self, trans_id):
1366
"""Schedule creation of a new directory.
1368
See also new_directory.
1370
os.mkdir(self._limbo_name(trans_id))
1371
unique_add(self._new_contents, trans_id, 'directory')
1373
def create_symlink(self, target, trans_id):
1374
"""Schedule creation of a new symbolic link.
1376
target is a bytestring.
1377
See also new_symlink.
1380
os.symlink(target, self._limbo_name(trans_id))
1381
unique_add(self._new_contents, trans_id, 'symlink')
1384
path = FinalPaths(self).get_path(trans_id)
1387
raise UnableCreateSymlink(path=path)
1389
def cancel_creation(self, trans_id):
1390
"""Cancel the creation of new file contents."""
1391
del self._new_contents[trans_id]
1392
if trans_id in self._observed_sha1s:
1393
del self._observed_sha1s[trans_id]
1394
children = self._limbo_children.get(trans_id)
1395
# if this is a limbo directory with children, move them before removing
1397
if children is not None:
1398
self._rename_in_limbo(children)
1399
del self._limbo_children[trans_id]
1400
del self._limbo_children_names[trans_id]
1401
delete_any(self._limbo_name(trans_id))
1403
def new_orphan(self, trans_id, parent_id):
1404
conf = self._tree.get_config_stack()
1405
handle_orphan = conf.get('bzr.transform.orphan_policy')
1406
handle_orphan(self, trans_id, parent_id)
1409
class OrphaningError(errors.BzrError):
1411
# Only bugs could lead to such exception being seen by the user
1412
internal_error = True
1413
_fmt = "Error while orphaning %s in %s directory"
1415
def __init__(self, orphan, parent):
1416
errors.BzrError.__init__(self)
1417
self.orphan = orphan
1418
self.parent = parent
1421
class OrphaningForbidden(OrphaningError):
1423
_fmt = "Policy: %s doesn't allow creating orphans."
1425
def __init__(self, policy):
1426
errors.BzrError.__init__(self)
1427
self.policy = policy
1430
def move_orphan(tt, orphan_id, parent_id):
1431
"""See TreeTransformBase.new_orphan.
1433
This creates a new orphan in the `bzr-orphans` dir at the root of the
1436
:param tt: The TreeTransform orphaning `trans_id`.
1438
:param orphan_id: The trans id that should be orphaned.
1440
:param parent_id: The orphan parent trans id.
1442
# Add the orphan dir if it doesn't exist
1443
orphan_dir_basename = 'bzr-orphans'
1444
od_id = tt.trans_id_tree_path(orphan_dir_basename)
1445
if tt.final_kind(od_id) is None:
1446
tt.create_directory(od_id)
1447
parent_path = tt._tree_id_paths[parent_id]
1448
# Find a name that doesn't exist yet in the orphan dir
1449
actual_name = tt.final_name(orphan_id)
1450
new_name = tt._available_backup_name(actual_name, od_id)
1451
tt.adjust_path(new_name, od_id, orphan_id)
1452
trace.warning('%s has been orphaned in %s'
1453
% (joinpath(parent_path, actual_name), orphan_dir_basename))
1456
def refuse_orphan(tt, orphan_id, parent_id):
1457
"""See TreeTransformBase.new_orphan.
1459
This refuses to create orphan, letting the caller handle the conflict.
1461
raise OrphaningForbidden('never')
1464
orphaning_registry = registry.Registry()
1465
orphaning_registry.register(
1466
'conflict', refuse_orphan,
1467
'Leave orphans in place and create a conflict on the directory.')
1468
orphaning_registry.register(
1469
'move', move_orphan,
1470
'Move orphans into the bzr-orphans directory.')
1471
orphaning_registry._set_default_key('conflict')
1474
opt_transform_orphan = _mod_config.RegistryOption(
1475
'bzr.transform.orphan_policy', orphaning_registry,
1476
help='Policy for orphaned files during transform operations.',
1480
class TreeTransform(DiskTreeTransform):
1059
class TreeTransform(TreeTransformBase):
1481
1060
"""Represent a tree transformation.
1483
1062
This object is designed to support incremental generation of the transform,
1551
1130
tree.lock_tree_write()
1133
control_files = tree._control_files
1554
1134
limbodir = urlutils.local_path_from_url(
1555
tree._transport.abspath('limbo'))
1556
osutils.ensure_empty_directory_exists(
1558
errors.ExistingLimbo)
1135
control_files.controlfilename('limbo'))
1139
if e.errno == errno.EEXIST:
1140
raise ExistingLimbo(limbodir)
1559
1141
deletiondir = urlutils.local_path_from_url(
1560
tree._transport.abspath('pending-deletion'))
1561
osutils.ensure_empty_directory_exists(
1563
errors.ExistingPendingDeletion)
1142
control_files.controlfilename('pending-deletion'))
1144
os.mkdir(deletiondir)
1146
if e.errno == errno.EEXIST:
1147
raise errors.ExistingPendingDeletion(deletiondir)
1568
# Cache of realpath results, to speed up canonical_path
1569
self._realpaths = {}
1570
# Cache of relpath results, to speed up canonical_path
1572
DiskTreeTransform.__init__(self, tree, limbodir, pb,
1152
TreeTransformBase.__init__(self, tree, limbodir, pb,
1573
1153
tree.case_sensitive)
1574
1154
self._deletiondir = deletiondir
1576
def canonical_path(self, path):
1577
"""Get the canonical tree-relative path"""
1578
# don't follow final symlinks
1579
abs = self._tree.abspath(path)
1580
if abs in self._relpaths:
1581
return self._relpaths[abs]
1582
dirname, basename = os.path.split(abs)
1583
if dirname not in self._realpaths:
1584
self._realpaths[dirname] = os.path.realpath(dirname)
1585
dirname = self._realpaths[dirname]
1586
abs = pathjoin(dirname, basename)
1587
if dirname in self._relpaths:
1588
relpath = pathjoin(self._relpaths[dirname], basename)
1589
relpath = relpath.rstrip('/\\')
1591
relpath = self._tree.relpath(abs)
1592
self._relpaths[abs] = relpath
1595
def tree_kind(self, trans_id):
1596
"""Determine the file kind in the working tree.
1598
:returns: The file kind or None if the file does not exist
1600
path = self._tree_id_paths.get(trans_id)
1604
return file_kind(self._tree.abspath(path))
1605
except errors.NoSuchFile:
1608
def _set_mode(self, trans_id, mode_id, typefunc):
1609
"""Set the mode of new file contents.
1610
The mode_id is the existing file to get the mode from (often the same
1611
as trans_id). The operation is only performed if there's a mode match
1612
according to typefunc.
1617
old_path = self._tree_id_paths[mode_id]
1621
mode = os.stat(self._tree.abspath(old_path)).st_mode
1623
if e.errno in (errno.ENOENT, errno.ENOTDIR):
1624
# Either old_path doesn't exist, or the parent of the
1625
# target is not a directory (but will be one eventually)
1626
# Either way, we know it doesn't exist *right now*
1627
# See also bug #248448
1632
osutils.chmod_if_possible(self._limbo_name(trans_id), mode)
1634
def iter_tree_children(self, parent_id):
1635
"""Iterate through the entry's tree children, if any"""
1637
path = self._tree_id_paths[parent_id]
1641
children = os.listdir(self._tree.abspath(path))
1643
if not (osutils._is_error_enotdir(e)
1644
or e.errno in (errno.ENOENT, errno.ESRCH)):
1648
for child in children:
1649
childpath = joinpath(path, child)
1650
if self._tree.is_control_filename(childpath):
1652
yield self.trans_id_tree_path(childpath)
1654
def _generate_limbo_path(self, trans_id):
1655
"""Generate a limbo path using the final path if possible.
1657
This optimizes the performance of applying the tree transform by
1658
avoiding renames. These renames can be avoided only when the parent
1659
directory is already scheduled for creation.
1661
If the final path cannot be used, falls back to using the trans_id as
1664
parent = self._new_parent.get(trans_id)
1665
# if the parent directory is already in limbo (e.g. when building a
1666
# tree), choose a limbo name inside the parent, to reduce further
1668
use_direct_path = False
1669
if self._new_contents.get(parent) == 'directory':
1670
filename = self._new_name.get(trans_id)
1671
if filename is not None:
1672
if parent not in self._limbo_children:
1673
self._limbo_children[parent] = set()
1674
self._limbo_children_names[parent] = {}
1675
use_direct_path = True
1676
# the direct path can only be used if no other file has
1677
# already taken this pathname, i.e. if the name is unused, or
1678
# if it is already associated with this trans_id.
1679
elif self._case_sensitive_target:
1680
if (self._limbo_children_names[parent].get(filename)
1681
in (trans_id, None)):
1682
use_direct_path = True
1684
for l_filename, l_trans_id in\
1685
self._limbo_children_names[parent].iteritems():
1686
if l_trans_id == trans_id:
1688
if l_filename.lower() == filename.lower():
1691
use_direct_path = True
1693
if not use_direct_path:
1694
return DiskTreeTransform._generate_limbo_path(self, trans_id)
1696
limbo_name = pathjoin(self._limbo_files[parent], filename)
1697
self._limbo_children[parent].add(trans_id)
1698
self._limbo_children_names[parent][filename] = trans_id
1702
def apply(self, no_conflicts=False, precomputed_delta=None, _mover=None):
1156
def apply(self, no_conflicts=False, _mover=None):
1703
1157
"""Apply all changes to the inventory and filesystem.
1705
1159
If filesystem or inventory conflicts are present, MalformedTransform
1742
1190
mover.apply_deletions()
1744
1192
child_pb.finished()
1745
if self.final_file_id(self.root) is None:
1746
inventory_delta = [e for e in inventory_delta if e[0] != '']
1747
1193
self._tree.apply_inventory_delta(inventory_delta)
1748
self._apply_observed_sha1s()
1749
1194
self._done = True
1750
1195
self.finalize()
1751
1196
return _TransformResults(modified_paths, self.rename_count)
1753
def _generate_inventory_delta(self):
1754
"""Generate an inventory delta for the current transform."""
1755
inventory_delta = []
1756
child_pb = ui.ui_factory.nested_progress_bar()
1757
new_paths = self._inventory_altered()
1758
total_entries = len(new_paths) + len(self._removed_id)
1760
for num, trans_id in enumerate(self._removed_id):
1762
child_pb.update(gettext('removing file'), num, total_entries)
1763
if trans_id == self._new_root:
1764
file_id = self._tree.get_root_id()
1766
file_id = self.tree_file_id(trans_id)
1767
# File-id isn't really being deleted, just moved
1768
if file_id in self._r_new_id:
1770
path = self._tree_id_paths[trans_id]
1771
inventory_delta.append((path, None, file_id, None))
1772
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1774
entries = self._tree.iter_entries_by_dir(
1775
new_path_file_ids.values())
1776
old_paths = dict((e.file_id, p) for p, e in entries)
1778
for num, (path, trans_id) in enumerate(new_paths):
1780
child_pb.update(gettext('adding file'),
1781
num + len(self._removed_id), total_entries)
1782
file_id = new_path_file_ids[trans_id]
1786
kind = self.final_kind(trans_id)
1788
kind = self._tree.stored_kind(file_id)
1789
parent_trans_id = self.final_parent(trans_id)
1790
parent_file_id = new_path_file_ids.get(parent_trans_id)
1791
if parent_file_id is None:
1792
parent_file_id = self.final_file_id(parent_trans_id)
1793
if trans_id in self._new_reference_revision:
1794
new_entry = inventory.TreeReference(
1796
self._new_name[trans_id],
1797
self.final_file_id(self._new_parent[trans_id]),
1798
None, self._new_reference_revision[trans_id])
1800
new_entry = inventory.make_entry(kind,
1801
self.final_name(trans_id),
1802
parent_file_id, file_id)
1803
old_path = old_paths.get(new_entry.file_id)
1804
new_executability = self._new_executability.get(trans_id)
1805
if new_executability is not None:
1806
new_entry.executable = new_executability
1807
inventory_delta.append(
1808
(old_path, path, new_entry.file_id, new_entry))
1811
return inventory_delta
1813
def _apply_removals(self, mover):
1198
def _apply_removals(self, inv, inventory_delta, mover):
1814
1199
"""Perform tree operations that remove directory/inventory names.
1816
1201
That is, delete files that are to be deleted, and put any files that
1817
1202
need renaming into limbo. This must be done in strict child-to-parent
1820
If inventory_delta is None, no inventory delta generation is performed.
1822
1205
tree_paths = list(self._tree_path_ids.iteritems())
1823
1206
tree_paths.sort(reverse=True)
1824
child_pb = ui.ui_factory.nested_progress_bar()
1207
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1826
for num, (path, trans_id) in enumerate(tree_paths):
1827
# do not attempt to move root into a subdirectory of itself.
1830
child_pb.update(gettext('removing file'), num, len(tree_paths))
1209
for num, data in enumerate(tree_paths):
1210
path, trans_id = data
1211
child_pb.update('removing file', num, len(tree_paths))
1831
1212
full_path = self._tree.abspath(path)
1832
1213
if trans_id in self._removed_contents:
1833
delete_path = os.path.join(self._deletiondir, trans_id)
1834
mover.pre_delete(full_path, delete_path)
1835
elif (trans_id in self._new_name
1836
or trans_id in self._new_parent):
1214
mover.pre_delete(full_path, os.path.join(self._deletiondir,
1216
elif trans_id in self._new_name or trans_id in \
1838
1219
mover.rename(full_path, self._limbo_name(trans_id))
1839
except errors.TransformRenameFailed, e:
1840
1221
if e.errno != errno.ENOENT:
1843
1224
self.rename_count += 1
1225
if trans_id in self._removed_id:
1226
if trans_id == self._new_root:
1227
file_id = self._tree.get_root_id()
1229
file_id = self.tree_file_id(trans_id)
1230
if file_id is not None:
1231
inventory_delta.append((path, None, file_id, None))
1845
1233
child_pb.finished()
1847
def _apply_insertions(self, mover):
1235
def _apply_insertions(self, inv, inventory_delta, mover):
1848
1236
"""Perform tree operations that insert directory/inventory names.
1850
1238
That is, create any files that need to be created, and restore from
1851
1239
limbo any files that needed renaming. This must be done in strict
1852
1240
parent-to-child order.
1854
If inventory_delta is None, no inventory delta is calculated, and
1855
no list of modified paths is returned.
1857
new_paths = self.new_paths(filesystem_only=True)
1242
new_paths = self.new_paths()
1858
1243
modified_paths = []
1859
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1861
child_pb = ui.ui_factory.nested_progress_bar()
1244
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1863
1247
for num, (path, trans_id) in enumerate(new_paths):
1865
child_pb.update(gettext('adding file'), num, len(new_paths))
1866
full_path = self._tree.abspath(path)
1867
if trans_id in self._needs_rename:
1869
mover.rename(self._limbo_name(trans_id), full_path)
1870
except errors.TransformRenameFailed, e:
1871
# We may be renaming a dangling inventory id
1872
if e.errno != errno.ENOENT:
1875
self.rename_count += 1
1876
# TODO: if trans_id in self._observed_sha1s, we should
1877
# re-stat the final target, since ctime will be
1878
# updated by the change.
1879
if (trans_id in self._new_contents or
1880
self.path_changed(trans_id)):
1249
child_pb.update('adding file', num, len(new_paths))
1251
kind = self._new_contents[trans_id]
1253
kind = contents = None
1254
if trans_id in self._new_contents or \
1255
self.path_changed(trans_id):
1256
full_path = self._tree.abspath(path)
1257
if trans_id in self._needs_rename:
1259
mover.rename(self._limbo_name(trans_id), full_path)
1261
# We may be renaming a dangling inventory id
1262
if e.errno != errno.ENOENT:
1265
self.rename_count += 1
1881
1266
if trans_id in self._new_contents:
1882
1267
modified_paths.append(full_path)
1268
completed_new.append(trans_id)
1270
if trans_id in self._new_id:
1272
kind = file_kind(self._tree.abspath(path))
1273
if trans_id in self._new_reference_revision:
1274
new_entry = inventory.TreeReference(
1275
self._new_id[trans_id],
1276
self._new_name[trans_id],
1277
self.final_file_id(self._new_parent[trans_id]),
1278
None, self._new_reference_revision[trans_id])
1280
new_entry = inventory.make_entry(kind,
1281
self.final_name(trans_id),
1282
self.final_file_id(self.final_parent(trans_id)),
1283
self._new_id[trans_id])
1285
if trans_id in self._new_name or trans_id in\
1286
self._new_parent or\
1287
trans_id in self._new_executability:
1288
file_id = self.final_file_id(trans_id)
1289
if file_id is not None:
1290
entry = inv[file_id]
1291
new_entry = entry.copy()
1293
if trans_id in self._new_name or trans_id in\
1295
if new_entry is not None:
1296
new_entry.name = self.final_name(trans_id)
1297
parent = self.final_parent(trans_id)
1298
parent_id = self.final_file_id(parent)
1299
new_entry.parent_id = parent_id
1883
1301
if trans_id in self._new_executability:
1884
self._set_executability(path, trans_id)
1885
if trans_id in self._observed_sha1s:
1886
o_sha1, o_st_val = self._observed_sha1s[trans_id]
1887
st = osutils.lstat(full_path)
1888
self._observed_sha1s[trans_id] = (o_sha1, st)
1302
self._set_executability(path, new_entry, trans_id)
1303
if new_entry is not None:
1304
if new_entry.file_id in inv:
1305
old_path = inv.id2path(new_entry.file_id)
1308
inventory_delta.append((old_path, path,
1890
1312
child_pb.finished()
1891
for path, trans_id in new_paths:
1892
# new_paths includes stuff like workingtree conflicts. Only the
1893
# stuff in new_contents actually comes from limbo.
1894
if trans_id in self._limbo_files:
1895
del self._limbo_files[trans_id]
1896
self._new_contents.clear()
1313
for trans_id in completed_new:
1314
del self._new_contents[trans_id]
1897
1315
return modified_paths
1899
def _apply_observed_sha1s(self):
1900
"""After we have finished renaming everything, update observed sha1s
1902
This has to be done after self._tree.apply_inventory_delta, otherwise
1903
it doesn't know anything about the files we are updating. Also, we want
1904
to do this as late as possible, so that most entries end up cached.
1906
# TODO: this doesn't update the stat information for directories. So
1907
# the first 'bzr status' will still need to rewrite
1908
# .bzr/checkout/dirstate. However, we at least don't need to
1909
# re-read all of the files.
1910
# TODO: If the operation took a while, we could do a time.sleep(3) here
1911
# to allow the clock to tick over and ensure we won't have any
1912
# problems. (we could observe start time, and finish time, and if
1913
# it is less than eg 10% overhead, add a sleep call.)
1914
paths = FinalPaths(self)
1915
for trans_id, observed in self._observed_sha1s.iteritems():
1916
path = paths.get_path(trans_id)
1917
# We could get the file_id, but dirstate prefers to use the path
1918
# anyway, and it is 'cheaper' to determine.
1919
# file_id = self._new_id[trans_id]
1920
self._tree._observed_sha1(None, path, observed)
1923
class TransformPreview(DiskTreeTransform):
1318
class TransformPreview(TreeTransformBase):
1924
1319
"""A TreeTransform for generating preview trees.
1926
1321
Unlike TreeTransform, this version works when the input tree is a
1961
1354
except KeyError:
1963
1356
file_id = self.tree_file_id(parent_id)
1966
entry = self._tree.iter_entries_by_dir([file_id]).next()[1]
1967
children = getattr(entry, 'children', {})
1968
for child in children:
1357
for child in self._tree.inventory[file_id].children.iterkeys():
1969
1358
childpath = joinpath(path, child)
1970
1359
yield self.trans_id_tree_path(childpath)
1972
def new_orphan(self, trans_id, parent_id):
1973
raise NotImplementedError(self.new_orphan)
1976
class _PreviewTree(tree.InventoryTree):
1362
class _PreviewTree(object):
1977
1363
"""Partial implementation of Tree to support show_diff_trees"""
1979
1365
def __init__(self, transform):
1980
1366
self._transform = transform
1981
self._final_paths = FinalPaths(transform)
1982
self.__by_parent = None
1983
self._parent_ids = []
1984
self._all_children_cache = {}
1985
self._path2trans_id_cache = {}
1986
self._final_name_cache = {}
1987
self._iter_changes_cache = dict((c[0], c) for c in
1988
self._transform.iter_changes())
1990
def _content_change(self, file_id):
1991
"""Return True if the content of this file changed"""
1992
changes = self._iter_changes_cache.get(file_id)
1993
# changes[2] is true if the file content changed. See
1994
# InterTree.iter_changes.
1995
return (changes is not None and changes[2])
1997
def _get_repository(self):
1998
repo = getattr(self._transform._tree, '_repository', None)
2000
repo = self._transform._tree.branch.repository
2003
def _iter_parent_trees(self):
2004
for revision_id in self.get_parent_ids():
2006
yield self.revision_tree(revision_id)
2007
except errors.NoSuchRevisionInTree:
2008
yield self._get_repository().revision_tree(revision_id)
2010
def _get_file_revision(self, file_id, vf, tree_revision):
2011
parent_keys = [(file_id, t.get_file_revision(file_id)) for t in
2012
self._iter_parent_trees()]
2013
vf.add_lines((file_id, tree_revision), parent_keys,
2014
self.get_file_lines(file_id))
2015
repo = self._get_repository()
2016
base_vf = repo.texts
2017
if base_vf not in vf.fallback_versionedfiles:
2018
vf.fallback_versionedfiles.append(base_vf)
2019
return tree_revision
2021
def _stat_limbo_file(self, file_id=None, trans_id=None):
2022
if trans_id is None:
2023
trans_id = self._transform.trans_id_file_id(file_id)
2024
name = self._transform._limbo_name(trans_id)
2025
return os.lstat(name)
2028
def _by_parent(self):
2029
if self.__by_parent is None:
2030
self.__by_parent = self._transform.by_parent()
2031
return self.__by_parent
2033
def _comparison_data(self, entry, path):
2034
kind, size, executable, link_or_sha1 = self.path_content_summary(path)
2035
if kind == 'missing':
2039
file_id = self._transform.final_file_id(self._path2trans_id(path))
2040
executable = self.is_executable(file_id, path)
2041
return kind, executable, None
2043
def is_locked(self):
2046
1368
def lock_read(self):
2047
1369
# Perhaps in theory, this should lock the TreeTransform?
2050
1372
def unlock(self):
2054
@deprecated_method(deprecated_in((2, 5, 0)))
2055
def inventory(self):
2056
"""This Tree does not use inventory as its backing data."""
2057
raise NotImplementedError(_PreviewTree.inventory)
2060
def root_inventory(self):
2061
"""This Tree does not use inventory as its backing data."""
2062
raise NotImplementedError(_PreviewTree.root_inventory)
2064
def get_root_id(self):
2065
return self._transform.final_file_id(self._transform.root)
2067
def all_file_ids(self):
2068
tree_ids = set(self._transform._tree.all_file_ids())
2069
tree_ids.difference_update(self._transform.tree_file_id(t)
2070
for t in self._transform._removed_id)
2071
tree_ids.update(self._transform._new_id.values())
2075
return iter(self.all_file_ids())
2077
def _has_id(self, file_id, fallback_check):
2078
if file_id in self._transform._r_new_id:
2080
elif file_id in set([self._transform.tree_file_id(trans_id) for
2081
trans_id in self._transform._removed_id]):
2084
return fallback_check(file_id)
2086
def has_id(self, file_id):
2087
return self._has_id(file_id, self._transform._tree.has_id)
2089
def has_or_had_id(self, file_id):
2090
return self._has_id(file_id, self._transform._tree.has_or_had_id)
2092
def _path2trans_id(self, path):
2093
# We must not use None here, because that is a valid value to store.
2094
trans_id = self._path2trans_id_cache.get(path, object)
2095
if trans_id is not object:
2097
segments = splitpath(path)
2098
cur_parent = self._transform.root
2099
for cur_segment in segments:
2100
for child in self._all_children(cur_parent):
2101
final_name = self._final_name_cache.get(child)
2102
if final_name is None:
2103
final_name = self._transform.final_name(child)
2104
self._final_name_cache[child] = final_name
2105
if final_name == cur_segment:
2109
self._path2trans_id_cache[path] = None
2111
self._path2trans_id_cache[path] = cur_parent
2114
def path2id(self, path):
2115
if isinstance(path, list):
2118
path = osutils.pathjoin(*path)
2119
return self._transform.final_file_id(self._path2trans_id(path))
2121
def id2path(self, file_id):
2122
trans_id = self._transform.trans_id_file_id(file_id)
2124
return self._final_paths._determine_path(trans_id)
2126
raise errors.NoSuchId(self, file_id)
2128
def _all_children(self, trans_id):
2129
children = self._all_children_cache.get(trans_id)
2130
if children is not None:
2132
children = set(self._transform.iter_tree_children(trans_id))
2133
# children in the _new_parent set are provided by _by_parent.
2134
children.difference_update(self._transform._new_parent.keys())
2135
children.update(self._by_parent.get(trans_id, []))
2136
self._all_children_cache[trans_id] = children
2139
def iter_children(self, file_id):
2140
trans_id = self._transform.trans_id_file_id(file_id)
2141
for child_trans_id in self._all_children(trans_id):
2142
yield self._transform.final_file_id(child_trans_id)
2145
possible_extras = set(self._transform.trans_id_tree_path(p) for p
2146
in self._transform._tree.extras())
2147
possible_extras.update(self._transform._new_contents)
2148
possible_extras.update(self._transform._removed_id)
2149
for trans_id in possible_extras:
2150
if self._transform.final_file_id(trans_id) is None:
2151
yield self._final_paths._determine_path(trans_id)
2153
def _make_inv_entries(self, ordered_entries, specific_file_ids=None,
2154
yield_parents=False):
2155
for trans_id, parent_file_id in ordered_entries:
2156
file_id = self._transform.final_file_id(trans_id)
2159
if (specific_file_ids is not None
2160
and file_id not in specific_file_ids):
2162
kind = self._transform.final_kind(trans_id)
2164
kind = self._transform._tree.stored_kind(file_id)
2165
new_entry = inventory.make_entry(
2167
self._transform.final_name(trans_id),
2168
parent_file_id, file_id)
2169
yield new_entry, trans_id
2171
def _list_files_by_dir(self):
2172
todo = [ROOT_PARENT]
2174
while len(todo) > 0:
2176
parent_file_id = self._transform.final_file_id(parent)
2177
children = list(self._all_children(parent))
2178
paths = dict(zip(children, self._final_paths.get_paths(children)))
2179
children.sort(key=paths.get)
2180
todo.extend(reversed(children))
2181
for trans_id in children:
2182
ordered_ids.append((trans_id, parent_file_id))
2185
def iter_child_entries(self, file_id, path=None):
2186
self.id2path(file_id)
2187
trans_id = self._transform.trans_id_file_id(file_id)
2188
todo = [(child_trans_id, trans_id) for child_trans_id in
2189
self._all_children(trans_id)]
2190
for entry, trans_id in self._make_inv_entries(todo):
2193
def iter_entries_by_dir(self, specific_file_ids=None, yield_parents=False):
2194
# This may not be a maximally efficient implementation, but it is
2195
# reasonably straightforward. An implementation that grafts the
2196
# TreeTransform changes onto the tree's iter_entries_by_dir results
2197
# might be more efficient, but requires tricky inferences about stack
2199
ordered_ids = self._list_files_by_dir()
2200
for entry, trans_id in self._make_inv_entries(ordered_ids,
2201
specific_file_ids, yield_parents=yield_parents):
2202
yield unicode(self._final_paths.get_path(trans_id)), entry
2204
def _iter_entries_for_dir(self, dir_path):
2205
"""Return path, entry for items in a directory without recursing down."""
2206
dir_file_id = self.path2id(dir_path)
2208
for file_id in self.iter_children(dir_file_id):
2209
trans_id = self._transform.trans_id_file_id(file_id)
2210
ordered_ids.append((trans_id, file_id))
2211
for entry, trans_id in self._make_inv_entries(ordered_ids):
2212
yield unicode(self._final_paths.get_path(trans_id)), entry
2214
def list_files(self, include_root=False, from_dir=None, recursive=True):
2215
"""See WorkingTree.list_files."""
2216
# XXX This should behave like WorkingTree.list_files, but is really
2217
# more like RevisionTree.list_files.
2221
prefix = from_dir + '/'
2222
entries = self.iter_entries_by_dir()
2223
for path, entry in entries:
2224
if entry.name == '' and not include_root:
2227
if not path.startswith(prefix):
2229
path = path[len(prefix):]
2230
yield path, 'V', entry.kind, entry.file_id, entry
2232
if from_dir is None and include_root is True:
2233
root_entry = inventory.make_entry('directory', '',
2234
ROOT_PARENT, self.get_root_id())
2235
yield '', 'V', 'directory', root_entry.file_id, root_entry
2236
entries = self._iter_entries_for_dir(from_dir or '')
2237
for path, entry in entries:
2238
yield path, 'V', entry.kind, entry.file_id, entry
1375
def _iter_changes(self, from_tree, include_unchanged=False,
1376
specific_files=None, pb=None, extra_trees=None,
1377
require_versioned=True, want_unversioned=False):
1378
"""See InterTree._iter_changes.
1380
This implementation does not support include_unchanged, specific_files,
1381
or want_unversioned. extra_trees, require_versioned, and pb are
1384
if from_tree is not self._transform._tree:
1385
raise ValueError('from_tree must be transform source tree.')
1386
if include_unchanged:
1387
raise ValueError('include_unchanged is not supported')
1388
if specific_files is not None:
1389
raise ValueError('specific_files is not supported')
1390
if want_unversioned:
1391
raise ValueError('want_unversioned is not supported')
1392
return self._transform._iter_changes()
2240
1394
def kind(self, file_id):
2241
1395
trans_id = self._transform.trans_id_file_id(file_id)
2242
1396
return self._transform.final_kind(trans_id)
2244
def stored_kind(self, file_id):
2245
trans_id = self._transform.trans_id_file_id(file_id)
2247
return self._transform._new_contents[trans_id]
2249
return self._transform._tree.stored_kind(file_id)
2251
1398
def get_file_mtime(self, file_id, path=None):
2252
1399
"""See Tree.get_file_mtime"""
2253
if not self._content_change(file_id):
2254
return self._transform._tree.get_file_mtime(file_id)
2255
return self._stat_limbo_file(file_id).st_mtime
2257
def _file_size(self, entry, stat_value):
2258
return self.get_file_size(entry.file_id)
2260
def get_file_size(self, file_id):
2261
"""See Tree.get_file_size"""
2262
trans_id = self._transform.trans_id_file_id(file_id)
2263
kind = self._transform.final_kind(trans_id)
2266
if trans_id in self._transform._new_contents:
2267
return self._stat_limbo_file(trans_id=trans_id).st_size
2268
if self.kind(file_id) == 'file':
2269
return self._transform._tree.get_file_size(file_id)
2273
def get_file_verifier(self, file_id, path=None, stat_value=None):
2274
trans_id = self._transform.trans_id_file_id(file_id)
2275
kind = self._transform._new_contents.get(trans_id)
2277
return self._transform._tree.get_file_verifier(file_id)
2279
fileobj = self.get_file(file_id)
2281
return ("SHA1", sha_file(fileobj))
2285
def get_file_sha1(self, file_id, path=None, stat_value=None):
2286
trans_id = self._transform.trans_id_file_id(file_id)
2287
kind = self._transform._new_contents.get(trans_id)
2289
return self._transform._tree.get_file_sha1(file_id)
2291
fileobj = self.get_file(file_id)
2293
return sha_file(fileobj)
2297
def is_executable(self, file_id, path=None):
2300
trans_id = self._transform.trans_id_file_id(file_id)
2302
return self._transform._new_executability[trans_id]
2305
return self._transform._tree.is_executable(file_id, path)
2307
if e.errno == errno.ENOENT:
2310
except errors.NoSuchId:
2313
def has_filename(self, path):
2314
trans_id = self._path2trans_id(path)
2315
if trans_id in self._transform._new_contents:
2317
elif trans_id in self._transform._removed_contents:
2320
return self._transform._tree.has_filename(path)
2322
def path_content_summary(self, path):
2323
trans_id = self._path2trans_id(path)
2324
tt = self._transform
2325
tree_path = tt._tree_id_paths.get(trans_id)
2326
kind = tt._new_contents.get(trans_id)
2328
if tree_path is None or trans_id in tt._removed_contents:
2329
return 'missing', None, None, None
2330
summary = tt._tree.path_content_summary(tree_path)
2331
kind, size, executable, link_or_sha1 = summary
2334
limbo_name = tt._limbo_name(trans_id)
2335
if trans_id in tt._new_reference_revision:
2336
kind = 'tree-reference'
2338
statval = os.lstat(limbo_name)
2339
size = statval.st_size
2340
if not tt._limbo_supports_executable():
2343
executable = statval.st_mode & S_IEXEC
2347
if kind == 'symlink':
2348
link_or_sha1 = os.readlink(limbo_name).decode(osutils._fs_enc)
2349
executable = tt._new_executability.get(trans_id, executable)
2350
return kind, size, executable, link_or_sha1
2352
def iter_changes(self, from_tree, include_unchanged=False,
2353
specific_files=None, pb=None, extra_trees=None,
2354
require_versioned=True, want_unversioned=False):
2355
"""See InterTree.iter_changes.
2357
This has a fast path that is only used when the from_tree matches
2358
the transform tree, and no fancy options are supplied.
2360
if (from_tree is not self._transform._tree or include_unchanged or
2361
specific_files or want_unversioned):
2362
return tree.InterTree(from_tree, self).iter_changes(
2363
include_unchanged=include_unchanged,
2364
specific_files=specific_files,
2366
extra_trees=extra_trees,
2367
require_versioned=require_versioned,
2368
want_unversioned=want_unversioned)
2369
if want_unversioned:
2370
raise ValueError('want_unversioned is not supported')
2371
return self._transform.iter_changes()
2373
def get_file(self, file_id, path=None):
1400
trans_id = self._transform.trans_id_file_id(file_id)
1401
name = self._transform._limbo_name(trans_id)
1402
return os.stat(name).st_mtime
1404
def get_file(self, file_id):
2374
1405
"""See Tree.get_file"""
2375
if not self._content_change(file_id):
2376
return self._transform._tree.get_file(file_id, path)
2377
1406
trans_id = self._transform.trans_id_file_id(file_id)
2378
1407
name = self._transform._limbo_name(trans_id)
2379
1408
return open(name, 'rb')
2381
def get_file_with_stat(self, file_id, path=None):
2382
return self.get_file(file_id, path), None
2384
def annotate_iter(self, file_id,
2385
default_revision=_mod_revision.CURRENT_REVISION):
2386
changes = self._iter_changes_cache.get(file_id)
2390
changed_content, versioned, kind = (changes[2], changes[3],
2394
get_old = (kind[0] == 'file' and versioned[0])
2396
old_annotation = self._transform._tree.annotate_iter(file_id,
2397
default_revision=default_revision)
2401
return old_annotation
2402
if not changed_content:
2403
return old_annotation
2404
# TODO: This is doing something similar to what WT.annotate_iter is
2405
# doing, however it fails slightly because it doesn't know what
2406
# the *other* revision_id is, so it doesn't know how to give the
2407
# other as the origin for some lines, they all get
2408
# 'default_revision'
2409
# It would be nice to be able to use the new Annotator based
2410
# approach, as well.
2411
return annotate.reannotate([old_annotation],
2412
self.get_file(file_id).readlines(),
2415
def get_symlink_target(self, file_id, path=None):
2416
"""See Tree.get_symlink_target"""
2417
if not self._content_change(file_id):
2418
return self._transform._tree.get_symlink_target(file_id)
2419
trans_id = self._transform.trans_id_file_id(file_id)
2420
name = self._transform._limbo_name(trans_id)
2421
return osutils.readlink(name)
2423
def walkdirs(self, prefix=''):
2424
pending = [self._transform.root]
2425
while len(pending) > 0:
2426
parent_id = pending.pop()
2429
prefix = prefix.rstrip('/')
2430
parent_path = self._final_paths.get_path(parent_id)
2431
parent_file_id = self._transform.final_file_id(parent_id)
2432
for child_id in self._all_children(parent_id):
2433
path_from_root = self._final_paths.get_path(child_id)
2434
basename = self._transform.final_name(child_id)
2435
file_id = self._transform.final_file_id(child_id)
2436
kind = self._transform.final_kind(child_id)
2437
if kind is not None:
2438
versioned_kind = kind
2441
versioned_kind = self._transform._tree.stored_kind(file_id)
2442
if versioned_kind == 'directory':
2443
subdirs.append(child_id)
2444
children.append((path_from_root, basename, kind, None,
2445
file_id, versioned_kind))
2447
if parent_path.startswith(prefix):
2448
yield (parent_path, parent_file_id), children
2449
pending.extend(sorted(subdirs, key=self._final_paths.get_path,
2452
def get_parent_ids(self):
2453
return self._parent_ids
2455
def set_parent_ids(self, parent_ids):
2456
self._parent_ids = parent_ids
2458
def get_revision_tree(self, revision_id):
2459
return self._transform._tree.get_revision_tree(revision_id)
1410
def paths2ids(self, specific_files, trees=None, require_versioned=False):
1411
"""See Tree.paths2ids"""
2462
1415
def joinpath(parent, child):
2637
1568
new_trans_id = file_trans_id[file_id]
2638
1569
old_parent = tt.trans_id_tree_path(tree_path)
2639
1570
_reparent_children(tt, old_parent, new_trans_id)
2640
offset = num + 1 - len(deferred_contents)
2641
_create_files(tt, tree, deferred_contents, pb, offset,
2642
accelerator_tree, hardlink)
1571
for num, (trans_id, bytes) in enumerate(
1572
_iter_files_bytes_accelerated(tree, accelerator_tree,
1573
deferred_contents)):
1574
tt.create_file(bytes, trans_id)
1575
pb.update('Adding file contents',
1576
(num + len(tree.inventory) - len(deferred_contents)),
1577
len(tree.inventory))
2645
1580
pp.next_phase()
2646
1581
divert_trans = set(file_trans_id[f] for f in divert)
2647
1582
resolver = lambda t, c: resolve_checkout(t, c, divert_trans)
2648
1583
raw_conflicts = resolve_conflicts(tt, pass_func=resolver)
2649
if len(raw_conflicts) > 0:
2650
precomputed_delta = None
2651
1584
conflicts = cook_conflicts(raw_conflicts, tt)
2652
1585
for conflict in conflicts:
2653
trace.warning(unicode(conflict))
2655
1588
wt.add_conflicts(conflicts)
2656
1589
except errors.UnsupportedOperation:
2658
result = tt.apply(no_conflicts=True,
2659
precomputed_delta=precomputed_delta)
1591
result = tt.apply(no_conflicts=True)
2662
1594
top_pb.finished()
2666
def _create_files(tt, tree, desired_files, pb, offset, accelerator_tree,
2668
total = len(desired_files) + offset
1598
def _iter_files_bytes_accelerated(tree, accelerator_tree, desired_files):
2670
1599
if accelerator_tree is None:
2671
1600
new_desired_files = desired_files
2673
iter = accelerator_tree.iter_changes(tree, include_unchanged=True)
2674
unchanged = [(f, p[1]) for (f, p, c, v, d, n, k, e)
2675
in iter if not (c or e[0] != e[1])]
2676
if accelerator_tree.supports_content_filtering():
2677
unchanged = [(f, p) for (f, p) in unchanged
2678
if not accelerator_tree.iter_search_rules([p]).next()]
2679
unchanged = dict(unchanged)
1602
iter = accelerator_tree._iter_changes(tree, include_unchanged=True)
1603
unchanged = dict((f, p[1]) for (f, p, c, v, d, n, k, e)
2680
1605
new_desired_files = []
2682
for file_id, (trans_id, tree_path, text_sha1) in desired_files:
1606
for file_id, identifier in desired_files:
2683
1607
accelerator_path = unchanged.get(file_id)
2684
1608
if accelerator_path is None:
2685
new_desired_files.append((file_id,
2686
(trans_id, tree_path, text_sha1)))
1609
new_desired_files.append((file_id, identifier))
2688
pb.update(gettext('Adding file contents'), count + offset, total)
2690
tt.create_hardlink(accelerator_tree.abspath(accelerator_path),
2693
contents = accelerator_tree.get_file(file_id, accelerator_path)
2694
if wt.supports_content_filtering():
2695
filters = wt._content_filter_stack(tree_path)
2696
contents = filtered_output_bytes(contents, filters,
2697
ContentFilterContext(tree_path, tree))
2699
tt.create_file(contents, trans_id, sha1=text_sha1)
2703
except AttributeError:
2704
# after filtering, contents may no longer be file-like
2708
for count, ((trans_id, tree_path, text_sha1), contents) in enumerate(
2709
tree.iter_files_bytes(new_desired_files)):
2710
if wt.supports_content_filtering():
2711
filters = wt._content_filter_stack(tree_path)
2712
contents = filtered_output_bytes(contents, filters,
2713
ContentFilterContext(tree_path, tree))
2714
tt.create_file(contents, trans_id, sha1=text_sha1)
2715
pb.update(gettext('Adding file contents'), count + offset, total)
1611
contents = accelerator_tree.get_file(file_id, accelerator_path)
1614
contents_bytes = (contents.read(),)
1617
yield identifier, contents_bytes
1618
for result in tree.iter_files_bytes(new_desired_files):
2718
1622
def _reparent_children(tt, old_parent, new_parent):
2719
1623
for child in tt.iter_tree_children(old_parent):
2720
1624
tt.adjust_path(tt.final_name(child), new_parent, child)
2723
1626
def _reparent_transform_children(tt, old_parent, new_parent):
2724
1627
by_parent = tt.by_parent()
2725
1628
for child in by_parent[old_parent]:
2726
1629
tt.adjust_path(tt.final_name(child), new_parent, child)
2727
return by_parent[old_parent]
2730
1631
def _content_match(tree, entry, file_id, kind, target_path):
2731
1632
if entry.kind != kind:
2832
1710
tt.set_executability(entry.executable, trans_id)
1713
@deprecated_function(zero_fifteen)
1714
def find_interesting(working_tree, target_tree, filenames):
1715
"""Find the ids corresponding to specified filenames.
1717
Deprecated: Please use tree1.paths2ids(filenames, [tree2]).
1719
working_tree.lock_read()
1721
target_tree.lock_read()
1723
return working_tree.paths2ids(filenames, [target_tree])
1725
target_tree.unlock()
1727
working_tree.unlock()
1730
@deprecated_function(zero_ninety)
1731
def change_entry(tt, file_id, working_tree, target_tree,
1732
trans_id_file_id, backups, trans_id, by_parent):
1733
"""Replace a file_id's contents with those from a target tree."""
1734
if file_id is None and target_tree is None:
1735
# skip the logic altogether in the deprecation test
1737
e_trans_id = trans_id_file_id(file_id)
1738
entry = target_tree.inventory[file_id]
1739
has_contents, contents_mod, meta_mod, = _entry_changes(file_id, entry,
1742
mode_id = e_trans_id
1745
tt.delete_contents(e_trans_id)
1747
parent_trans_id = trans_id_file_id(entry.parent_id)
1748
backup_name = get_backup_name(entry, by_parent,
1749
parent_trans_id, tt)
1750
tt.adjust_path(backup_name, parent_trans_id, e_trans_id)
1751
tt.unversion_file(e_trans_id)
1752
e_trans_id = tt.create_path(entry.name, parent_trans_id)
1753
tt.version_file(file_id, e_trans_id)
1754
trans_id[file_id] = e_trans_id
1755
create_by_entry(tt, entry, target_tree, e_trans_id, mode_id=mode_id)
1756
create_entry_executability(tt, entry, e_trans_id)
1759
tt.set_executability(entry.executable, e_trans_id)
1760
if tt.final_name(e_trans_id) != entry.name:
1763
parent_id = tt.final_parent(e_trans_id)
1764
parent_file_id = tt.final_file_id(parent_id)
1765
if parent_file_id != entry.parent_id:
1770
parent_trans_id = trans_id_file_id(entry.parent_id)
1771
tt.adjust_path(entry.name, parent_trans_id, e_trans_id)
1774
def get_backup_name(entry, by_parent, parent_trans_id, tt):
1775
return _get_backup_name(entry.name, by_parent, parent_trans_id, tt)
1778
def _get_backup_name(name, by_parent, parent_trans_id, tt):
1779
"""Produce a backup-style name that appears to be available"""
1783
yield "%s.~%d~" % (name, counter)
1785
for new_name in name_gen():
1786
if not tt.has_named_child(by_parent, parent_trans_id, new_name):
1790
def _entry_changes(file_id, entry, working_tree):
1791
"""Determine in which ways the inventory entry has changed.
1793
Returns booleans: has_contents, content_mod, meta_mod
1794
has_contents means there are currently contents, but they differ
1795
contents_mod means contents need to be modified
1796
meta_mod means the metadata needs to be modified
1798
cur_entry = working_tree.inventory[file_id]
1800
working_kind = working_tree.kind(file_id)
1803
has_contents = False
1806
if has_contents is True:
1807
if entry.kind != working_kind:
1808
contents_mod, meta_mod = True, False
1810
cur_entry._read_tree_state(working_tree.id2path(file_id),
1812
contents_mod, meta_mod = entry.detect_changes(cur_entry)
1813
cur_entry._forget_tree_state()
1814
return has_contents, contents_mod, meta_mod
2835
1817
def revert(working_tree, target_tree, filenames, backups=False,
2836
pb=None, change_reporter=None):
1818
pb=DummyProgress(), change_reporter=None):
2837
1819
"""Revert a working tree's contents to those of a target tree."""
2838
1820
target_tree.lock_read()
2839
pb = ui.ui_factory.nested_progress_bar()
2840
1821
tt = TreeTransform(working_tree, pb)
2842
1823
pp = ProgressPhase("Revert phase", 3, pb)
2843
conflicts, merge_modified = _prepare_revert_transform(
2844
working_tree, target_tree, tt, filenames, backups, pp)
1825
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1827
merge_modified = _alter_files(working_tree, target_tree, tt,
1828
child_pb, filenames, backups)
1832
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1834
raw_conflicts = resolve_conflicts(tt, child_pb,
1835
lambda t, c: conflict_pass(t, c, target_tree))
1838
conflicts = cook_conflicts(raw_conflicts, tt)
2845
1839
if change_reporter:
2846
1840
change_reporter = delta._ChangeReporter(
2847
1841
unversioned_filter=working_tree.is_ignored)
2848
delta.report_changes(tt.iter_changes(), change_reporter)
1842
delta.report_changes(tt._iter_changes(), change_reporter)
2849
1843
for conflict in conflicts:
2850
trace.warning(unicode(conflict))
2851
1845
pp.next_phase()
2853
1847
working_tree.set_merge_modified(merge_modified)
2919
1881
if basis_tree is None:
2920
1882
basis_tree = working_tree.basis_tree()
2921
1883
basis_tree.lock_read()
2922
if basis_tree.has_id(file_id):
1884
if file_id in basis_tree:
2923
1885
if wt_sha1 != basis_tree.get_file_sha1(file_id):
2924
1886
keep_content = True
2925
elif target_kind is None and not target_versioned:
1887
elif kind[1] is None and not versioned[1]:
2926
1888
keep_content = True
2927
if wt_kind is not None:
1889
if kind[0] is not None:
2928
1890
if not keep_content:
2929
1891
tt.delete_contents(trans_id)
2930
elif target_kind is not None:
2931
parent_trans_id = tt.trans_id_file_id(wt_parent)
2932
backup_name = tt._available_backup_name(
2933
wt_name, parent_trans_id)
1892
elif kind[1] is not None:
1893
parent_trans_id = tt.trans_id_file_id(parent[0])
1894
by_parent = tt.by_parent()
1895
backup_name = _get_backup_name(name[0], by_parent,
1896
parent_trans_id, tt)
2934
1897
tt.adjust_path(backup_name, parent_trans_id, trans_id)
2935
new_trans_id = tt.create_path(wt_name, parent_trans_id)
2936
if wt_versioned and target_versioned:
1898
new_trans_id = tt.create_path(name[0], parent_trans_id)
1899
if versioned == (True, True):
2937
1900
tt.unversion_file(trans_id)
2938
1901
tt.version_file(file_id, new_trans_id)
2939
1902
# New contents should have the same unix perms as old
2941
1904
mode_id = trans_id
2942
1905
trans_id = new_trans_id
2943
if target_kind in ('directory', 'tree-reference'):
1906
if kind[1] == 'directory':
2944
1907
tt.create_directory(trans_id)
2945
if target_kind == 'tree-reference':
2946
revision = target_tree.get_reference_revision(file_id,
2948
tt.set_tree_reference(revision, trans_id)
2949
elif target_kind == 'symlink':
1908
elif kind[1] == 'symlink':
2950
1909
tt.create_symlink(target_tree.get_symlink_target(file_id),
2952
elif target_kind == 'file':
1911
elif kind[1] == 'file':
2953
1912
deferred_files.append((file_id, (trans_id, mode_id)))
2954
1913
if basis_tree is None:
2955
1914
basis_tree = working_tree.basis_tree()
2956
1915
basis_tree.lock_read()
2957
1916
new_sha1 = target_tree.get_file_sha1(file_id)
2958
if (basis_tree.has_id(file_id) and
2959
new_sha1 == basis_tree.get_file_sha1(file_id)):
1917
if (file_id in basis_tree and new_sha1 ==
1918
basis_tree.get_file_sha1(file_id)):
2960
1919
if file_id in merge_modified:
2961
1920
del merge_modified[file_id]
2963
1922
merge_modified[file_id] = new_sha1
2965
1924
# preserve the execute bit when backing up
2966
if keep_content and wt_executable == target_executable:
2967
tt.set_executability(target_executable, trans_id)
2968
elif target_kind is not None:
2969
raise AssertionError(target_kind)
2970
if not wt_versioned and target_versioned:
1925
if keep_content and executable[0] == executable[1]:
1926
tt.set_executability(executable[1], trans_id)
1928
assert kind[1] is None
1929
if versioned == (False, True):
2971
1930
tt.version_file(file_id, trans_id)
2972
if wt_versioned and not target_versioned:
1931
if versioned == (True, False):
2973
1932
tt.unversion_file(trans_id)
2974
if (target_name is not None and
2975
(wt_name != target_name or wt_parent != target_parent)):
2976
if target_name == '' and target_parent is None:
2977
parent_trans = ROOT_PARENT
2979
parent_trans = tt.trans_id_file_id(target_parent)
2980
if wt_parent is None and wt_versioned:
2981
tt.adjust_root_path(target_name, parent_trans)
2983
tt.adjust_path(target_name, parent_trans, trans_id)
2984
if wt_executable != target_executable and target_kind == "file":
2985
tt.set_executability(target_executable, trans_id)
2986
if working_tree.supports_content_filtering():
2987
for index, ((trans_id, mode_id), bytes) in enumerate(
2988
target_tree.iter_files_bytes(deferred_files)):
2989
file_id = deferred_files[index][0]
2990
# We're reverting a tree to the target tree so using the
2991
# target tree to find the file path seems the best choice
2992
# here IMO - Ian C 27/Oct/2009
2993
filter_tree_path = target_tree.id2path(file_id)
2994
filters = working_tree._content_filter_stack(filter_tree_path)
2995
bytes = filtered_output_bytes(bytes, filters,
2996
ContentFilterContext(filter_tree_path, working_tree))
2997
tt.create_file(bytes, trans_id, mode_id)
2999
for (trans_id, mode_id), bytes in target_tree.iter_files_bytes(
3001
tt.create_file(bytes, trans_id, mode_id)
3002
tt.fixup_new_roots()
1933
if (name[1] is not None and
1934
(name[0] != name[1] or parent[0] != parent[1])):
1936
name[1], tt.trans_id_file_id(parent[1]), trans_id)
1937
if executable[0] != executable[1] and kind[1] == "file":
1938
tt.set_executability(executable[1], trans_id)
1939
for (trans_id, mode_id), bytes in target_tree.iter_files_bytes(
1941
tt.create_file(bytes, trans_id, mode_id)
3004
1943
if basis_tree is not None:
3005
1944
basis_tree.unlock()
3006
1945
return merge_modified
3009
def resolve_conflicts(tt, pb=None, pass_func=None):
1948
def resolve_conflicts(tt, pb=DummyProgress(), pass_func=None):
3010
1949
"""Make many conflict-resolution attempts, but die if they fail"""
3011
1950
if pass_func is None:
3012
1951
pass_func = conflict_pass
3013
1952
new_conflicts = set()
3014
pb = ui.ui_factory.nested_progress_bar()
3016
1954
for n in range(10):
3017
pb.update(gettext('Resolution pass'), n+1, 10)
1955
pb.update('Resolution pass', n+1, 10)
3018
1956
conflicts = tt.find_conflicts()
3019
1957
if len(conflicts) == 0:
3020
1958
return new_conflicts
3021
1959
new_conflicts.update(pass_func(tt, conflicts))
3022
1960
raise MalformedTransform(conflicts=conflicts)
3027
1965
def conflict_pass(tt, conflicts, path_tree=None):