1009
1079
def get_preview_tree(self):
1010
1080
"""Return a tree representing the result of the transform.
1012
The tree is a snapshot, and altering the TreeTransform will invalidate
1082
This tree only supports the subset of Tree functionality required
1083
by show_diff_trees. It must only be compared to tt._tree.
1015
1085
return _PreviewTree(self)
1017
def commit(self, branch, message, merge_parents=None, strict=False,
1018
timestamp=None, timezone=None, committer=None, authors=None,
1019
revprops=None, revision_id=None):
1020
"""Commit the result of this TreeTransform to a branch.
1022
:param branch: The branch to commit to.
1023
:param message: The message to attach to the commit.
1024
:param merge_parents: Additional parent revision-ids specified by
1026
:param strict: If True, abort the commit if there are unversioned
1028
:param timestamp: if not None, seconds-since-epoch for the time and
1029
date. (May be a float.)
1030
:param timezone: Optional timezone for timestamp, as an offset in
1032
:param committer: Optional committer in email-id format.
1033
(e.g. "J Random Hacker <jrandom@example.com>")
1034
:param authors: Optional list of authors in email-id format.
1035
:param revprops: Optional dictionary of revision properties.
1036
:param revision_id: Optional revision id. (Specifying a revision-id
1037
may reduce performance for some non-native formats.)
1038
:return: The revision_id of the revision committed.
1040
self._check_malformed()
1042
unversioned = set(self._new_contents).difference(set(self._new_id))
1043
for trans_id in unversioned:
1044
if self.final_file_id(trans_id) is None:
1045
raise errors.StrictCommitFailed()
1047
revno, last_rev_id = branch.last_revision_info()
1048
if last_rev_id == _mod_revision.NULL_REVISION:
1049
if merge_parents is not None:
1050
raise ValueError('Cannot supply merge parents for first'
1054
parent_ids = [last_rev_id]
1055
if merge_parents is not None:
1056
parent_ids.extend(merge_parents)
1057
if self._tree.get_revision_id() != last_rev_id:
1058
raise ValueError('TreeTransform not based on branch basis: %s' %
1059
self._tree.get_revision_id())
1060
revprops = commit.Commit.update_revprops(revprops, branch, authors)
1061
builder = branch.get_commit_builder(parent_ids,
1062
timestamp=timestamp,
1064
committer=committer,
1066
revision_id=revision_id)
1067
preview = self.get_preview_tree()
1068
list(builder.record_iter_changes(preview, last_rev_id,
1069
self.iter_changes()))
1070
builder.finish_inventory()
1071
revision_id = builder.commit(message)
1072
branch.set_last_revision_info(revno + 1, revision_id)
1075
def _text_parent(self, trans_id):
1076
file_id = self.tree_file_id(trans_id)
1078
if file_id is None or self._tree.kind(file_id) != 'file':
1080
except errors.NoSuchFile:
1084
def _get_parents_texts(self, trans_id):
1085
"""Get texts for compression parents of this file."""
1086
file_id = self._text_parent(trans_id)
1089
return (self._tree.get_file_text(file_id),)
1091
def _get_parents_lines(self, trans_id):
1092
"""Get lines for compression parents of this file."""
1093
file_id = self._text_parent(trans_id)
1096
return (self._tree.get_file_lines(file_id),)
1098
def serialize(self, serializer):
1099
"""Serialize this TreeTransform.
1101
:param serializer: A Serialiser like pack.ContainerSerializer.
1103
new_name = dict((k, v.encode('utf-8')) for k, v in
1104
self._new_name.items())
1105
new_executability = dict((k, int(v)) for k, v in
1106
self._new_executability.items())
1107
tree_path_ids = dict((k.encode('utf-8'), v)
1108
for k, v in self._tree_path_ids.items())
1110
'_id_number': self._id_number,
1111
'_new_name': new_name,
1112
'_new_parent': self._new_parent,
1113
'_new_executability': new_executability,
1114
'_new_id': self._new_id,
1115
'_tree_path_ids': tree_path_ids,
1116
'_removed_id': list(self._removed_id),
1117
'_removed_contents': list(self._removed_contents),
1118
'_non_present_ids': self._non_present_ids,
1120
yield serializer.bytes_record(bencode.bencode(attribs),
1122
for trans_id, kind in self._new_contents.items():
1124
lines = osutils.chunks_to_lines(
1125
self._read_file_chunks(trans_id))
1126
parents = self._get_parents_lines(trans_id)
1127
mpdiff = multiparent.MultiParent.from_lines(lines, parents)
1128
content = ''.join(mpdiff.to_patch())
1129
if kind == 'directory':
1131
if kind == 'symlink':
1132
content = self._read_symlink_target(trans_id)
1133
yield serializer.bytes_record(content, ((trans_id, kind),))
1135
def deserialize(self, records):
1136
"""Deserialize a stored TreeTransform.
1138
:param records: An iterable of (names, content) tuples, as per
1139
pack.ContainerPushParser.
1141
names, content = records.next()
1142
attribs = bencode.bdecode(content)
1143
self._id_number = attribs['_id_number']
1144
self._new_name = dict((k, v.decode('utf-8'))
1145
for k, v in attribs['_new_name'].items())
1146
self._new_parent = attribs['_new_parent']
1147
self._new_executability = dict((k, bool(v)) for k, v in
1148
attribs['_new_executability'].items())
1149
self._new_id = attribs['_new_id']
1150
self._r_new_id = dict((v, k) for k, v in self._new_id.items())
1151
self._tree_path_ids = {}
1152
self._tree_id_paths = {}
1153
for bytepath, trans_id in attribs['_tree_path_ids'].items():
1154
path = bytepath.decode('utf-8')
1155
self._tree_path_ids[path] = trans_id
1156
self._tree_id_paths[trans_id] = path
1157
self._removed_id = set(attribs['_removed_id'])
1158
self._removed_contents = set(attribs['_removed_contents'])
1159
self._non_present_ids = attribs['_non_present_ids']
1160
for ((trans_id, kind),), content in records:
1162
mpdiff = multiparent.MultiParent.from_patch(content)
1163
lines = mpdiff.to_lines(self._get_parents_texts(trans_id))
1164
self.create_file(lines, trans_id)
1165
if kind == 'directory':
1166
self.create_directory(trans_id)
1167
if kind == 'symlink':
1168
self.create_symlink(content.decode('utf-8'), trans_id)
1171
class DiskTreeTransform(TreeTransformBase):
1172
"""Tree transform storing its contents on disk."""
1174
def __init__(self, tree, limbodir, pb=None,
1175
case_sensitive=True):
1177
:param tree: The tree that will be transformed, but not necessarily
1179
:param limbodir: A directory where new files can be stored until
1180
they are installed in their proper places
1182
:param case_sensitive: If True, the target of the transform is
1183
case sensitive, not just case preserving.
1185
TreeTransformBase.__init__(self, tree, pb, case_sensitive)
1186
self._limbodir = limbodir
1187
self._deletiondir = None
1188
# A mapping of transform ids to their limbo filename
1189
self._limbo_files = {}
1190
self._possibly_stale_limbo_files = set()
1191
# A mapping of transform ids to a set of the transform ids of children
1192
# that their limbo directory has
1193
self._limbo_children = {}
1194
# Map transform ids to maps of child filename to child transform id
1195
self._limbo_children_names = {}
1196
# List of transform ids that need to be renamed from limbo into place
1197
self._needs_rename = set()
1198
self._creation_mtime = None
1201
"""Release the working tree lock, if held, clean up limbo dir.
1203
This is required if apply has not been invoked, but can be invoked
1206
if self._tree is None:
1209
limbo_paths = self._limbo_files.values() + list(
1210
self._possibly_stale_limbo_files)
1211
limbo_paths = sorted(limbo_paths, reverse=True)
1212
for path in limbo_paths:
1216
if e.errno != errno.ENOENT:
1218
# XXX: warn? perhaps we just got interrupted at an
1219
# inconvenient moment, but perhaps files are disappearing
1222
delete_any(self._limbodir)
1224
# We don't especially care *why* the dir is immortal.
1225
raise ImmortalLimbo(self._limbodir)
1227
if self._deletiondir is not None:
1228
delete_any(self._deletiondir)
1230
raise errors.ImmortalPendingDeletion(self._deletiondir)
1232
TreeTransformBase.finalize(self)
1234
def _limbo_supports_executable(self):
1235
"""Check if the limbo path supports the executable bit."""
1236
# FIXME: Check actual file system capabilities of limbodir
1237
return osutils.supports_executable()
1239
def _limbo_name(self, trans_id):
1240
"""Generate the limbo name of a file"""
1241
limbo_name = self._limbo_files.get(trans_id)
1242
if limbo_name is None:
1243
limbo_name = self._generate_limbo_path(trans_id)
1244
self._limbo_files[trans_id] = limbo_name
1247
def _generate_limbo_path(self, trans_id):
1248
"""Generate a limbo path using the trans_id as the relative path.
1250
This is suitable as a fallback, and when the transform should not be
1251
sensitive to the path encoding of the limbo directory.
1253
self._needs_rename.add(trans_id)
1254
return pathjoin(self._limbodir, trans_id)
1256
def adjust_path(self, name, parent, trans_id):
1257
previous_parent = self._new_parent.get(trans_id)
1258
previous_name = self._new_name.get(trans_id)
1259
TreeTransformBase.adjust_path(self, name, parent, trans_id)
1260
if (trans_id in self._limbo_files and
1261
trans_id not in self._needs_rename):
1262
self._rename_in_limbo([trans_id])
1263
if previous_parent != parent:
1264
self._limbo_children[previous_parent].remove(trans_id)
1265
if previous_parent != parent or previous_name != name:
1266
del self._limbo_children_names[previous_parent][previous_name]
1268
def _rename_in_limbo(self, trans_ids):
1269
"""Fix limbo names so that the right final path is produced.
1271
This means we outsmarted ourselves-- we tried to avoid renaming
1272
these files later by creating them with their final names in their
1273
final parents. But now the previous name or parent is no longer
1274
suitable, so we have to rename them.
1276
Even for trans_ids that have no new contents, we must remove their
1277
entries from _limbo_files, because they are now stale.
1279
for trans_id in trans_ids:
1280
old_path = self._limbo_files[trans_id]
1281
self._possibly_stale_limbo_files.add(old_path)
1282
del self._limbo_files[trans_id]
1283
if trans_id not in self._new_contents:
1285
new_path = self._limbo_name(trans_id)
1286
os.rename(old_path, new_path)
1287
self._possibly_stale_limbo_files.remove(old_path)
1288
for descendant in self._limbo_descendants(trans_id):
1289
desc_path = self._limbo_files[descendant]
1290
desc_path = new_path + desc_path[len(old_path):]
1291
self._limbo_files[descendant] = desc_path
1293
def _limbo_descendants(self, trans_id):
1294
"""Return the set of trans_ids whose limbo paths descend from this."""
1295
descendants = set(self._limbo_children.get(trans_id, []))
1296
for descendant in list(descendants):
1297
descendants.update(self._limbo_descendants(descendant))
1300
def create_file(self, contents, trans_id, mode_id=None, sha1=None):
1301
"""Schedule creation of a new file.
1305
:param contents: an iterator of strings, all of which will be written
1306
to the target destination.
1307
:param trans_id: TreeTransform handle
1308
:param mode_id: If not None, force the mode of the target file to match
1309
the mode of the object referenced by mode_id.
1310
Otherwise, we will try to preserve mode bits of an existing file.
1311
:param sha1: If the sha1 of this content is already known, pass it in.
1312
We can use it to prevent future sha1 computations.
1314
name = self._limbo_name(trans_id)
1315
f = open(name, 'wb')
1317
unique_add(self._new_contents, trans_id, 'file')
1318
f.writelines(contents)
1321
self._set_mtime(name)
1322
self._set_mode(trans_id, mode_id, S_ISREG)
1323
# It is unfortunate we have to use lstat instead of fstat, but we just
1324
# used utime and chmod on the file, so we need the accurate final
1326
if sha1 is not None:
1327
self._observed_sha1s[trans_id] = (sha1, osutils.lstat(name))
1329
def _read_file_chunks(self, trans_id):
1330
cur_file = open(self._limbo_name(trans_id), 'rb')
1332
return cur_file.readlines()
1336
def _read_symlink_target(self, trans_id):
1337
return os.readlink(self._limbo_name(trans_id))
1339
def _set_mtime(self, path):
1340
"""All files that are created get the same mtime.
1342
This time is set by the first object to be created.
1344
if self._creation_mtime is None:
1345
self._creation_mtime = time.time()
1346
os.utime(path, (self._creation_mtime, self._creation_mtime))
1348
def create_hardlink(self, path, trans_id):
1349
"""Schedule creation of a hard link"""
1350
name = self._limbo_name(trans_id)
1354
if e.errno != errno.EPERM:
1356
raise errors.HardLinkNotSupported(path)
1358
unique_add(self._new_contents, trans_id, 'file')
1360
# Clean up the file, it never got registered so
1361
# TreeTransform.finalize() won't clean it up.
1365
def create_directory(self, trans_id):
1366
"""Schedule creation of a new directory.
1368
See also new_directory.
1370
os.mkdir(self._limbo_name(trans_id))
1371
unique_add(self._new_contents, trans_id, 'directory')
1373
def create_symlink(self, target, trans_id):
1374
"""Schedule creation of a new symbolic link.
1376
target is a bytestring.
1377
See also new_symlink.
1380
os.symlink(target, self._limbo_name(trans_id))
1381
unique_add(self._new_contents, trans_id, 'symlink')
1384
path = FinalPaths(self).get_path(trans_id)
1387
raise UnableCreateSymlink(path=path)
1389
def cancel_creation(self, trans_id):
1390
"""Cancel the creation of new file contents."""
1391
del self._new_contents[trans_id]
1392
if trans_id in self._observed_sha1s:
1393
del self._observed_sha1s[trans_id]
1394
children = self._limbo_children.get(trans_id)
1395
# if this is a limbo directory with children, move them before removing
1397
if children is not None:
1398
self._rename_in_limbo(children)
1399
del self._limbo_children[trans_id]
1400
del self._limbo_children_names[trans_id]
1401
delete_any(self._limbo_name(trans_id))
1403
def new_orphan(self, trans_id, parent_id):
1404
conf = self._tree.get_config_stack()
1405
handle_orphan = conf.get('bzr.transform.orphan_policy')
1406
handle_orphan(self, trans_id, parent_id)
1409
class OrphaningError(errors.BzrError):
1411
# Only bugs could lead to such exception being seen by the user
1412
internal_error = True
1413
_fmt = "Error while orphaning %s in %s directory"
1415
def __init__(self, orphan, parent):
1416
errors.BzrError.__init__(self)
1417
self.orphan = orphan
1418
self.parent = parent
1421
class OrphaningForbidden(OrphaningError):
1423
_fmt = "Policy: %s doesn't allow creating orphans."
1425
def __init__(self, policy):
1426
errors.BzrError.__init__(self)
1427
self.policy = policy
1430
def move_orphan(tt, orphan_id, parent_id):
1431
"""See TreeTransformBase.new_orphan.
1433
This creates a new orphan in the `bzr-orphans` dir at the root of the
1436
:param tt: The TreeTransform orphaning `trans_id`.
1438
:param orphan_id: The trans id that should be orphaned.
1440
:param parent_id: The orphan parent trans id.
1442
# Add the orphan dir if it doesn't exist
1443
orphan_dir_basename = 'bzr-orphans'
1444
od_id = tt.trans_id_tree_path(orphan_dir_basename)
1445
if tt.final_kind(od_id) is None:
1446
tt.create_directory(od_id)
1447
parent_path = tt._tree_id_paths[parent_id]
1448
# Find a name that doesn't exist yet in the orphan dir
1449
actual_name = tt.final_name(orphan_id)
1450
new_name = tt._available_backup_name(actual_name, od_id)
1451
tt.adjust_path(new_name, od_id, orphan_id)
1452
trace.warning('%s has been orphaned in %s'
1453
% (joinpath(parent_path, actual_name), orphan_dir_basename))
1456
def refuse_orphan(tt, orphan_id, parent_id):
1457
"""See TreeTransformBase.new_orphan.
1459
This refuses to create orphan, letting the caller handle the conflict.
1461
raise OrphaningForbidden('never')
1464
orphaning_registry = registry.Registry()
1465
orphaning_registry.register(
1466
'conflict', refuse_orphan,
1467
'Leave orphans in place and create a conflict on the directory.')
1468
orphaning_registry.register(
1469
'move', move_orphan,
1470
'Move orphans into the bzr-orphans directory.')
1471
orphaning_registry._set_default_key('conflict')
1474
opt_transform_orphan = _mod_config.RegistryOption(
1475
'bzr.transform.orphan_policy', orphaning_registry,
1476
help='Policy for orphaned files during transform operations.',
1480
class TreeTransform(DiskTreeTransform):
1088
class TreeTransform(TreeTransformBase):
1481
1089
"""Represent a tree transformation.
1483
1091
This object is designed to support incremental generation of the transform,
1554
1162
limbodir = urlutils.local_path_from_url(
1555
1163
tree._transport.abspath('limbo'))
1556
osutils.ensure_empty_directory_exists(
1558
errors.ExistingLimbo)
1167
if e.errno == errno.EEXIST:
1168
raise ExistingLimbo(limbodir)
1559
1169
deletiondir = urlutils.local_path_from_url(
1560
1170
tree._transport.abspath('pending-deletion'))
1561
osutils.ensure_empty_directory_exists(
1563
errors.ExistingPendingDeletion)
1172
os.mkdir(deletiondir)
1174
if e.errno == errno.EEXIST:
1175
raise errors.ExistingPendingDeletion(deletiondir)
1568
# Cache of realpath results, to speed up canonical_path
1569
self._realpaths = {}
1570
# Cache of relpath results, to speed up canonical_path
1572
DiskTreeTransform.__init__(self, tree, limbodir, pb,
1180
TreeTransformBase.__init__(self, tree, limbodir, pb,
1573
1181
tree.case_sensitive)
1574
1182
self._deletiondir = deletiondir
1576
def canonical_path(self, path):
1577
"""Get the canonical tree-relative path"""
1578
# don't follow final symlinks
1579
abs = self._tree.abspath(path)
1580
if abs in self._relpaths:
1581
return self._relpaths[abs]
1582
dirname, basename = os.path.split(abs)
1583
if dirname not in self._realpaths:
1584
self._realpaths[dirname] = os.path.realpath(dirname)
1585
dirname = self._realpaths[dirname]
1586
abs = pathjoin(dirname, basename)
1587
if dirname in self._relpaths:
1588
relpath = pathjoin(self._relpaths[dirname], basename)
1589
relpath = relpath.rstrip('/\\')
1591
relpath = self._tree.relpath(abs)
1592
self._relpaths[abs] = relpath
1595
def tree_kind(self, trans_id):
1596
"""Determine the file kind in the working tree.
1598
:returns: The file kind or None if the file does not exist
1600
path = self._tree_id_paths.get(trans_id)
1604
return file_kind(self._tree.abspath(path))
1605
except errors.NoSuchFile:
1608
def _set_mode(self, trans_id, mode_id, typefunc):
1609
"""Set the mode of new file contents.
1610
The mode_id is the existing file to get the mode from (often the same
1611
as trans_id). The operation is only performed if there's a mode match
1612
according to typefunc.
1617
old_path = self._tree_id_paths[mode_id]
1621
mode = os.stat(self._tree.abspath(old_path)).st_mode
1623
if e.errno in (errno.ENOENT, errno.ENOTDIR):
1624
# Either old_path doesn't exist, or the parent of the
1625
# target is not a directory (but will be one eventually)
1626
# Either way, we know it doesn't exist *right now*
1627
# See also bug #248448
1632
osutils.chmod_if_possible(self._limbo_name(trans_id), mode)
1634
def iter_tree_children(self, parent_id):
1635
"""Iterate through the entry's tree children, if any"""
1637
path = self._tree_id_paths[parent_id]
1641
children = os.listdir(self._tree.abspath(path))
1643
if not (osutils._is_error_enotdir(e)
1644
or e.errno in (errno.ENOENT, errno.ESRCH)):
1648
for child in children:
1649
childpath = joinpath(path, child)
1650
if self._tree.is_control_filename(childpath):
1652
yield self.trans_id_tree_path(childpath)
1654
def _generate_limbo_path(self, trans_id):
1655
"""Generate a limbo path using the final path if possible.
1657
This optimizes the performance of applying the tree transform by
1658
avoiding renames. These renames can be avoided only when the parent
1659
directory is already scheduled for creation.
1661
If the final path cannot be used, falls back to using the trans_id as
1664
parent = self._new_parent.get(trans_id)
1665
# if the parent directory is already in limbo (e.g. when building a
1666
# tree), choose a limbo name inside the parent, to reduce further
1668
use_direct_path = False
1669
if self._new_contents.get(parent) == 'directory':
1670
filename = self._new_name.get(trans_id)
1671
if filename is not None:
1672
if parent not in self._limbo_children:
1673
self._limbo_children[parent] = set()
1674
self._limbo_children_names[parent] = {}
1675
use_direct_path = True
1676
# the direct path can only be used if no other file has
1677
# already taken this pathname, i.e. if the name is unused, or
1678
# if it is already associated with this trans_id.
1679
elif self._case_sensitive_target:
1680
if (self._limbo_children_names[parent].get(filename)
1681
in (trans_id, None)):
1682
use_direct_path = True
1684
for l_filename, l_trans_id in\
1685
self._limbo_children_names[parent].iteritems():
1686
if l_trans_id == trans_id:
1688
if l_filename.lower() == filename.lower():
1691
use_direct_path = True
1693
if not use_direct_path:
1694
return DiskTreeTransform._generate_limbo_path(self, trans_id)
1696
limbo_name = pathjoin(self._limbo_files[parent], filename)
1697
self._limbo_children[parent].add(trans_id)
1698
self._limbo_children_names[parent][filename] = trans_id
1702
1184
def apply(self, no_conflicts=False, precomputed_delta=None, _mover=None):
1703
1185
"""Apply all changes to the inventory and filesystem.
1854
1281
If inventory_delta is None, no inventory delta is calculated, and
1855
1282
no list of modified paths is returned.
1857
new_paths = self.new_paths(filesystem_only=True)
1284
new_paths = self.new_paths(filesystem_only=(inventory_delta is None))
1858
1285
modified_paths = []
1859
1287
new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1861
child_pb = ui.ui_factory.nested_progress_bar()
1289
if inventory_delta is not None:
1290
entries = self._tree.iter_entries_by_dir(
1291
new_path_file_ids.values())
1292
old_paths = dict((e.file_id, p) for p, e in entries)
1293
child_pb = bzrlib.ui.ui_factory.nested_progress_bar()
1863
1295
for num, (path, trans_id) in enumerate(new_paths):
1864
1297
if (num % 10) == 0:
1865
child_pb.update(gettext('adding file'), num, len(new_paths))
1298
child_pb.update('adding file', num, len(new_paths))
1866
1299
full_path = self._tree.abspath(path)
1867
1300
if trans_id in self._needs_rename:
1869
1302
mover.rename(self._limbo_name(trans_id), full_path)
1870
except errors.TransformRenameFailed, e:
1871
1304
# We may be renaming a dangling inventory id
1872
1305
if e.errno != errno.ENOENT:
1875
1308
self.rename_count += 1
1876
# TODO: if trans_id in self._observed_sha1s, we should
1877
# re-stat the final target, since ctime will be
1878
# updated by the change.
1879
if (trans_id in self._new_contents or
1880
self.path_changed(trans_id)):
1881
if trans_id in self._new_contents:
1882
modified_paths.append(full_path)
1309
if inventory_delta is not None:
1310
if (trans_id in self._new_contents or
1311
self.path_changed(trans_id)):
1312
if trans_id in self._new_contents:
1313
modified_paths.append(full_path)
1314
completed_new.append(trans_id)
1315
file_id = new_path_file_ids[trans_id]
1316
if file_id is not None and (trans_id in self._new_id or
1317
trans_id in self._new_name or
1318
trans_id in self._new_parent
1319
or trans_id in self._new_executability):
1321
kind = self.final_kind(trans_id)
1323
kind = self._tree.stored_kind(file_id)
1324
parent_trans_id = self.final_parent(trans_id)
1325
parent_file_id = new_path_file_ids.get(parent_trans_id)
1326
if parent_file_id is None:
1327
parent_file_id = self.final_file_id(
1329
if trans_id in self._new_reference_revision:
1330
new_entry = inventory.TreeReference(
1332
self._new_name[trans_id],
1333
self.final_file_id(self._new_parent[trans_id]),
1334
None, self._new_reference_revision[trans_id])
1336
new_entry = inventory.make_entry(kind,
1337
self.final_name(trans_id),
1338
parent_file_id, file_id)
1339
old_path = old_paths.get(new_entry.file_id)
1340
inventory_delta.append(
1341
(old_path, path, new_entry.file_id, new_entry))
1883
1343
if trans_id in self._new_executability:
1884
self._set_executability(path, trans_id)
1885
if trans_id in self._observed_sha1s:
1886
o_sha1, o_st_val = self._observed_sha1s[trans_id]
1887
st = osutils.lstat(full_path)
1888
self._observed_sha1s[trans_id] = (o_sha1, st)
1344
self._set_executability(path, new_entry, trans_id)
1890
1346
child_pb.finished()
1891
for path, trans_id in new_paths:
1892
# new_paths includes stuff like workingtree conflicts. Only the
1893
# stuff in new_contents actually comes from limbo.
1894
if trans_id in self._limbo_files:
1895
del self._limbo_files[trans_id]
1896
self._new_contents.clear()
1347
if inventory_delta is None:
1348
self._new_contents.clear()
1350
for trans_id in completed_new:
1351
del self._new_contents[trans_id]
1897
1352
return modified_paths
1899
def _apply_observed_sha1s(self):
1900
"""After we have finished renaming everything, update observed sha1s
1902
This has to be done after self._tree.apply_inventory_delta, otherwise
1903
it doesn't know anything about the files we are updating. Also, we want
1904
to do this as late as possible, so that most entries end up cached.
1906
# TODO: this doesn't update the stat information for directories. So
1907
# the first 'bzr status' will still need to rewrite
1908
# .bzr/checkout/dirstate. However, we at least don't need to
1909
# re-read all of the files.
1910
# TODO: If the operation took a while, we could do a time.sleep(3) here
1911
# to allow the clock to tick over and ensure we won't have any
1912
# problems. (we could observe start time, and finish time, and if
1913
# it is less than eg 10% overhead, add a sleep call.)
1914
paths = FinalPaths(self)
1915
for trans_id, observed in self._observed_sha1s.iteritems():
1916
path = paths.get_path(trans_id)
1917
# We could get the file_id, but dirstate prefers to use the path
1918
# anyway, and it is 'cheaper' to determine.
1919
# file_id = self._new_id[trans_id]
1920
self._tree._observed_sha1(None, path, observed)
1923
class TransformPreview(DiskTreeTransform):
1355
class TransformPreview(TreeTransformBase):
1924
1356
"""A TreeTransform for generating preview trees.
1926
1358
Unlike TreeTransform, this version works when the input tree is a
2666
1884
def _create_files(tt, tree, desired_files, pb, offset, accelerator_tree,
2668
1886
total = len(desired_files) + offset
2670
1887
if accelerator_tree is None:
2671
1888
new_desired_files = desired_files
2673
1890
iter = accelerator_tree.iter_changes(tree, include_unchanged=True)
2674
unchanged = [(f, p[1]) for (f, p, c, v, d, n, k, e)
2675
in iter if not (c or e[0] != e[1])]
2676
if accelerator_tree.supports_content_filtering():
2677
unchanged = [(f, p) for (f, p) in unchanged
2678
if not accelerator_tree.iter_search_rules([p]).next()]
2679
unchanged = dict(unchanged)
1891
unchanged = dict((f, p[1]) for (f, p, c, v, d, n, k, e)
1892
in iter if not (c or e[0] != e[1]))
2680
1893
new_desired_files = []
2682
for file_id, (trans_id, tree_path, text_sha1) in desired_files:
1895
for file_id, trans_id in desired_files:
2683
1896
accelerator_path = unchanged.get(file_id)
2684
1897
if accelerator_path is None:
2685
new_desired_files.append((file_id,
2686
(trans_id, tree_path, text_sha1)))
1898
new_desired_files.append((file_id, trans_id))
2688
pb.update(gettext('Adding file contents'), count + offset, total)
1900
pb.update('Adding file contents', count + offset, total)
2690
1902
tt.create_hardlink(accelerator_tree.abspath(accelerator_path),
2693
1905
contents = accelerator_tree.get_file(file_id, accelerator_path)
2694
if wt.supports_content_filtering():
2695
filters = wt._content_filter_stack(tree_path)
2696
contents = filtered_output_bytes(contents, filters,
2697
ContentFilterContext(tree_path, tree))
2699
tt.create_file(contents, trans_id, sha1=text_sha1)
1907
tt.create_file(contents, trans_id)
2703
except AttributeError:
2704
# after filtering, contents may no longer be file-like
2707
1911
offset += count
2708
for count, ((trans_id, tree_path, text_sha1), contents) in enumerate(
2709
tree.iter_files_bytes(new_desired_files)):
2710
if wt.supports_content_filtering():
2711
filters = wt._content_filter_stack(tree_path)
2712
contents = filtered_output_bytes(contents, filters,
2713
ContentFilterContext(tree_path, tree))
2714
tt.create_file(contents, trans_id, sha1=text_sha1)
2715
pb.update(gettext('Adding file contents'), count + offset, total)
1912
for count, (trans_id, contents) in enumerate(tree.iter_files_bytes(
1913
new_desired_files)):
1914
tt.create_file(contents, trans_id)
1915
pb.update('Adding file contents', count + offset, total)
2718
1918
def _reparent_children(tt, old_parent, new_parent):
2719
1919
for child in tt.iter_tree_children(old_parent):
2720
1920
tt.adjust_path(tt.final_name(child), new_parent, child)
2723
1922
def _reparent_transform_children(tt, old_parent, new_parent):
2724
1923
by_parent = tt.by_parent()
2725
1924
for child in by_parent[old_parent]:
2726
1925
tt.adjust_path(tt.final_name(child), new_parent, child)
2727
1926
return by_parent[old_parent]
2730
1928
def _content_match(tree, entry, file_id, kind, target_path):
2731
1929
if entry.kind != kind:
2733
1931
if entry.kind == "directory":
2735
1933
if entry.kind == "file":
2736
f = file(target_path, 'rb')
2738
if tree.get_file_text(file_id) == f.read():
1934
if tree.get_file(file_id).read() == file(target_path, 'rb').read():
2742
1936
elif entry.kind == "symlink":
2743
1937
if tree.get_symlink_target(file_id) == os.readlink(target_path):
2919
2118
if basis_tree is None:
2920
2119
basis_tree = working_tree.basis_tree()
2921
2120
basis_tree.lock_read()
2922
if basis_tree.has_id(file_id):
2121
if file_id in basis_tree:
2923
2122
if wt_sha1 != basis_tree.get_file_sha1(file_id):
2924
2123
keep_content = True
2925
elif target_kind is None and not target_versioned:
2124
elif kind[1] is None and not versioned[1]:
2926
2125
keep_content = True
2927
if wt_kind is not None:
2126
if kind[0] is not None:
2928
2127
if not keep_content:
2929
2128
tt.delete_contents(trans_id)
2930
elif target_kind is not None:
2931
parent_trans_id = tt.trans_id_file_id(wt_parent)
2932
backup_name = tt._available_backup_name(
2933
wt_name, parent_trans_id)
2129
elif kind[1] is not None:
2130
parent_trans_id = tt.trans_id_file_id(parent[0])
2131
by_parent = tt.by_parent()
2132
backup_name = _get_backup_name(name[0], by_parent,
2133
parent_trans_id, tt)
2934
2134
tt.adjust_path(backup_name, parent_trans_id, trans_id)
2935
new_trans_id = tt.create_path(wt_name, parent_trans_id)
2936
if wt_versioned and target_versioned:
2135
new_trans_id = tt.create_path(name[0], parent_trans_id)
2136
if versioned == (True, True):
2937
2137
tt.unversion_file(trans_id)
2938
2138
tt.version_file(file_id, new_trans_id)
2939
2139
# New contents should have the same unix perms as old
2941
2141
mode_id = trans_id
2942
2142
trans_id = new_trans_id
2943
if target_kind in ('directory', 'tree-reference'):
2143
if kind[1] == 'directory':
2944
2144
tt.create_directory(trans_id)
2945
if target_kind == 'tree-reference':
2946
revision = target_tree.get_reference_revision(file_id,
2948
tt.set_tree_reference(revision, trans_id)
2949
elif target_kind == 'symlink':
2145
elif kind[1] == 'symlink':
2950
2146
tt.create_symlink(target_tree.get_symlink_target(file_id),
2952
elif target_kind == 'file':
2148
elif kind[1] == 'file':
2953
2149
deferred_files.append((file_id, (trans_id, mode_id)))
2954
2150
if basis_tree is None:
2955
2151
basis_tree = working_tree.basis_tree()
2956
2152
basis_tree.lock_read()
2957
2153
new_sha1 = target_tree.get_file_sha1(file_id)
2958
if (basis_tree.has_id(file_id) and
2959
new_sha1 == basis_tree.get_file_sha1(file_id)):
2154
if (file_id in basis_tree and new_sha1 ==
2155
basis_tree.get_file_sha1(file_id)):
2960
2156
if file_id in merge_modified:
2961
2157
del merge_modified[file_id]
2963
2159
merge_modified[file_id] = new_sha1
2965
2161
# preserve the execute bit when backing up
2966
if keep_content and wt_executable == target_executable:
2967
tt.set_executability(target_executable, trans_id)
2968
elif target_kind is not None:
2969
raise AssertionError(target_kind)
2970
if not wt_versioned and target_versioned:
2162
if keep_content and executable[0] == executable[1]:
2163
tt.set_executability(executable[1], trans_id)
2164
elif kind[1] is not None:
2165
raise AssertionError(kind[1])
2166
if versioned == (False, True):
2971
2167
tt.version_file(file_id, trans_id)
2972
if wt_versioned and not target_versioned:
2168
if versioned == (True, False):
2973
2169
tt.unversion_file(trans_id)
2974
if (target_name is not None and
2975
(wt_name != target_name or wt_parent != target_parent)):
2976
if target_name == '' and target_parent is None:
2977
parent_trans = ROOT_PARENT
2979
parent_trans = tt.trans_id_file_id(target_parent)
2980
if wt_parent is None and wt_versioned:
2981
tt.adjust_root_path(target_name, parent_trans)
2983
tt.adjust_path(target_name, parent_trans, trans_id)
2984
if wt_executable != target_executable and target_kind == "file":
2985
tt.set_executability(target_executable, trans_id)
2986
if working_tree.supports_content_filtering():
2987
for index, ((trans_id, mode_id), bytes) in enumerate(
2988
target_tree.iter_files_bytes(deferred_files)):
2989
file_id = deferred_files[index][0]
2990
# We're reverting a tree to the target tree so using the
2991
# target tree to find the file path seems the best choice
2992
# here IMO - Ian C 27/Oct/2009
2993
filter_tree_path = target_tree.id2path(file_id)
2994
filters = working_tree._content_filter_stack(filter_tree_path)
2995
bytes = filtered_output_bytes(bytes, filters,
2996
ContentFilterContext(filter_tree_path, working_tree))
2997
tt.create_file(bytes, trans_id, mode_id)
2999
for (trans_id, mode_id), bytes in target_tree.iter_files_bytes(
3001
tt.create_file(bytes, trans_id, mode_id)
3002
tt.fixup_new_roots()
2170
if (name[1] is not None and
2171
(name[0] != name[1] or parent[0] != parent[1])):
2173
name[1], tt.trans_id_file_id(parent[1]), trans_id)
2174
if executable[0] != executable[1] and kind[1] == "file":
2175
tt.set_executability(executable[1], trans_id)
2176
for (trans_id, mode_id), bytes in target_tree.iter_files_bytes(
2178
tt.create_file(bytes, trans_id, mode_id)
3004
2180
if basis_tree is not None:
3005
2181
basis_tree.unlock()
3006
2182
return merge_modified
3009
def resolve_conflicts(tt, pb=None, pass_func=None):
2185
def resolve_conflicts(tt, pb=DummyProgress(), pass_func=None):
3010
2186
"""Make many conflict-resolution attempts, but die if they fail"""
3011
2187
if pass_func is None:
3012
2188
pass_func = conflict_pass
3013
2189
new_conflicts = set()
3014
pb = ui.ui_factory.nested_progress_bar()
3016
2191
for n in range(10):
3017
pb.update(gettext('Resolution pass'), n+1, 10)
2192
pb.update('Resolution pass', n+1, 10)
3018
2193
conflicts = tt.find_conflicts()
3019
2194
if len(conflicts) == 0:
3020
2195
return new_conflicts
3021
2196
new_conflicts.update(pass_func(tt, conflicts))
3022
2197
raise MalformedTransform(conflicts=conflicts)
3027
2202
def conflict_pass(tt, conflicts, path_tree=None):