1
# Copyright (C) 2005 Canonical Ltd
1
# Copyright (C) 2005, 2006, 2007, 2008 Canonical Ltd
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
5
5
# the Free Software Foundation; either version 2 of the License, or
6
6
# (at your option) any later version.
8
8
# This program is distributed in the hope that it will be useful,
9
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
11
# GNU General Public License for more details.
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19
def commit(branch, message,
26
allow_pointless=True):
27
"""Commit working copy as a new revision.
29
The basic approach is to add all the file texts into the
30
store, then the inventory, then make a new revision pointing
31
to that inventory and store that.
33
This is not quite safe if the working copy changes during the
34
commit; for the moment that is simply not allowed. A better
35
approach is to make a temporary copy of the files before
36
computing their hashes, and then add those hashes in turn to
37
the inventory. This should mean at least that there are no
38
broken hash pointers. There is no way we can get a snapshot
39
of the whole directory at an instant. This would also have to
40
be robust against files disappearing, moving, etc. So the
41
whole thing is a bit hard.
43
This raises PointlessCommit if there are no changes, no new merges,
44
and allow_pointless is false.
46
timestamp -- if not None, seconds-since-epoch for a
47
postdated/predated commit.
50
If true, commit only those files.
53
If set, use this as the new revision id.
54
Useful for test or import commands that need to tightly
55
control what revisions are assigned. If you duplicate
56
a revision id that exists elsewhere it is your own fault.
57
If null (default), a time/random revision id is generated.
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
# The newly committed revision is going to have a shape corresponding
19
# to that of the working tree. Files that are not in the
20
# working tree and that were in the predecessor are reported as
21
# removed --- this can include files that were either removed from the
22
# inventory or deleted in the working tree. If they were only
23
# deleted from disk, they are removed from the working inventory.
25
# We then consider the remaining entries, which will be in the new
26
# version. Directory entries are simply copied across. File entries
27
# must be checked to see if a new version of the file should be
28
# recorded. For each parent revision tree, we check to see what
29
# version of the file was present. If the file was present in at
30
# least one tree, and if it was the same version in all the trees,
31
# then we can just refer to that version. Otherwise, a new version
32
# representing the merger of the file versions must be added.
34
# TODO: Update hashcache before and after - or does the WorkingTree
37
# TODO: Rather than mashing together the ancestry and storing it back,
38
# perhaps the weave should have single method which does it all in one
39
# go, avoiding a lot of redundant work.
41
# TODO: Perhaps give a warning if one of the revisions marked as
42
# merged is already in the ancestry, and then don't record it as a
45
# TODO: If the file is newly merged but unchanged from the version it
46
# merges from, then it should still be reported as newly added
47
# relative to the basis revision.
49
# TODO: Change the parameter 'rev_id' to 'revision_id' to be consistent with
50
# the rest of the code; add a deprecation of the old name.
57
from cStringIO import StringIO
67
from bzrlib.branch import Branch
69
from bzrlib.errors import (BzrError, PointlessCommit,
73
from bzrlib.osutils import (get_user_encoding,
74
kind_marker, isdir,isfile, is_inside_any,
75
is_inside_or_parent_of_any,
76
minimum_path_selection,
77
quotefn, sha_file, split_lines,
80
from bzrlib.testament import Testament
81
from bzrlib.trace import mutter, note, warning, is_quiet
82
from bzrlib.inventory import Inventory, InventoryEntry, make_entry
83
from bzrlib import symbol_versioning
84
from bzrlib.symbol_versioning import (deprecated_passed,
87
from bzrlib.workingtree import WorkingTree
88
from bzrlib.urlutils import unescape_for_display
92
class NullCommitReporter(object):
93
"""I report on progress of a commit."""
95
def started(self, revno, revid, location=None):
97
symbol_versioning.warn("As of bzr 1.0 you must pass a location "
98
"to started.", DeprecationWarning,
102
def snapshot_change(self, change, path):
105
def completed(self, revno, rev_id):
108
def deleted(self, path):
111
def escaped(self, escape_count, message):
114
def missing(self, path):
117
def renamed(self, change, old_path, new_path):
120
def is_verbose(self):
124
class ReportCommitToLog(NullCommitReporter):
126
def _note(self, format, *args):
129
Subclasses may choose to override this method.
133
def snapshot_change(self, change, path):
134
if path == '' and change in ('added', 'modified'):
136
self._note("%s %s", change, path)
138
def started(self, revno, rev_id, location=None):
139
if location is not None:
140
location = ' to: ' + unescape_for_display(location, 'utf-8')
142
# When started was added, location was only made optional by
143
# accident. Matt Nordhoff 20071129
144
symbol_versioning.warn("As of bzr 1.0 you must pass a location "
145
"to started.", DeprecationWarning,
148
self._note('Committing%s', location)
150
def completed(self, revno, rev_id):
151
self._note('Committed revision %d.', revno)
153
def deleted(self, path):
154
self._note('deleted %s', path)
156
def escaped(self, escape_count, message):
157
self._note("replaced %d control characters in message", escape_count)
159
def missing(self, path):
160
self._note('missing %s', path)
162
def renamed(self, change, old_path, new_path):
163
self._note('%s %s => %s', change, old_path, new_path)
165
def is_verbose(self):
169
class Commit(object):
170
"""Task of committing a new revision.
172
This is a MethodObject: it accumulates state as the commit is
173
prepared, and then it is discarded. It doesn't represent
174
historical revisions, just the act of recording a new one.
177
Modified to hold a list of files that have been deleted from
178
the working directory; these should be removed from the
62
from bzrlib.osutils import local_time_offset, username
63
from bzrlib.branch import gen_file_id
64
from bzrlib.errors import BzrError, PointlessCommit
65
from bzrlib.revision import Revision, RevisionReference
66
from bzrlib.trace import mutter, note
67
from bzrlib.xml import serializer_v4
72
# First walk over the working inventory; and both update that
73
# and also build a new revision inventory. The revision
74
# inventory needs to hold the text-id, sha1 and size of the
75
# actual file versions committed in the revision. (These are
76
# not present in the working inventory.) We also need to
77
# detect missing/deleted files, and remove them from the
80
work_tree = branch.working_tree()
81
work_inv = work_tree.inventory
82
basis = branch.basis_tree()
83
basis_inv = basis.inventory
86
# note('looking for changes...')
87
# print 'looking for changes...'
88
# disabled; should be done at a higher level
91
pending_merges = branch.pending_merges()
93
missing_ids, new_inv, any_changes = \
94
_gather_commit(branch,
101
if not (any_changes or allow_pointless or pending_merges):
184
"""Create a Commit object.
186
:param reporter: the default reporter to use or None to decide later
188
self.reporter = reporter
198
allow_pointless=True,
206
message_callback=None,
209
possible_master_transports=None):
210
"""Commit working copy as a new revision.
212
:param message: the commit message (it or message_callback is required)
214
:param timestamp: if not None, seconds-since-epoch for a
215
postdated/predated commit.
217
:param specific_files: If true, commit only those files.
219
:param rev_id: If set, use this as the new revision id.
220
Useful for test or import commands that need to tightly
221
control what revisions are assigned. If you duplicate
222
a revision id that exists elsewhere it is your own fault.
223
If null (default), a time/random revision id is generated.
225
:param allow_pointless: If true (default), commit even if nothing
226
has changed and no merges are recorded.
228
:param strict: If true, don't allow a commit if the working tree
229
contains unknown files.
231
:param revprops: Properties for new revision
232
:param local: Perform a local only commit.
233
:param reporter: the reporter to use or None for the default
234
:param verbose: if True and the reporter is not None, report everything
235
:param recursive: If set to 'down', commit in any subtrees that have
236
pending changes of any sort during this commit.
237
:param exclude: None or a list of relative paths to exclude from the
238
commit. Pending changes to excluded files will be ignored by the
241
mutter('preparing to commit')
243
if working_tree is None:
244
raise BzrError("working_tree must be passed into commit().")
246
self.work_tree = working_tree
247
self.branch = self.work_tree.branch
248
if getattr(self.work_tree, 'requires_rich_root', lambda: False)():
249
if not self.branch.repository.supports_rich_root():
250
raise errors.RootNotRich()
251
if message_callback is None:
252
if message is not None:
253
if isinstance(message, str):
254
message = message.decode(get_user_encoding())
255
message_callback = lambda x: message
257
raise BzrError("The message or message_callback keyword"
258
" parameter is required for commit().")
260
self.bound_branch = None
261
self.any_entries_deleted = False
262
if exclude is not None:
263
self.exclude = sorted(
264
minimum_path_selection(exclude))
268
self.master_branch = None
269
self.master_locked = False
270
self.recursive = recursive
272
if specific_files is not None:
273
self.specific_files = sorted(
274
minimum_path_selection(specific_files))
276
self.specific_files = None
278
self.allow_pointless = allow_pointless
279
self.revprops = revprops
280
self.message_callback = message_callback
281
self.timestamp = timestamp
282
self.timezone = timezone
283
self.committer = committer
285
self.verbose = verbose
287
self.work_tree.lock_write()
288
self.parents = self.work_tree.get_parent_ids()
289
# We can use record_iter_changes IFF iter_changes is compatible with
290
# the command line parameters, and the repository has fast delta
291
# generation. See bug 347649.
292
self.use_record_iter_changes = (
293
not self.specific_files and
295
not self.branch.repository._format.supports_tree_reference and
296
(self.branch.repository._format.fast_deltas or
297
len(self.parents) < 2))
298
self.pb = bzrlib.ui.ui_factory.nested_progress_bar()
299
self.basis_revid = self.work_tree.last_revision()
300
self.basis_tree = self.work_tree.basis_tree()
301
self.basis_tree.lock_read()
303
# Cannot commit with conflicts present.
304
if len(self.work_tree.conflicts()) > 0:
305
raise ConflictsInTree
307
# Setup the bound branch variables as needed.
308
self._check_bound_branch(possible_master_transports)
310
# Check that the working tree is up to date
311
old_revno, new_revno = self._check_out_of_date_tree()
313
# Complete configuration setup
314
if reporter is not None:
315
self.reporter = reporter
316
elif self.reporter is None:
317
self.reporter = self._select_reporter()
318
if self.config is None:
319
self.config = self.branch.get_config()
321
self._set_specific_file_ids()
323
# Setup the progress bar. As the number of files that need to be
324
# committed in unknown, progress is reported as stages.
325
# We keep track of entries separately though and include that
326
# information in the progress bar during the relevant stages.
327
self.pb_stage_name = ""
328
self.pb_stage_count = 0
329
self.pb_stage_total = 5
330
if self.bound_branch:
331
self.pb_stage_total += 1
332
self.pb.show_pct = False
333
self.pb.show_spinner = False
334
self.pb.show_eta = False
335
self.pb.show_count = True
336
self.pb.show_bar = True
338
self._gather_parents()
339
# After a merge, a selected file commit is not supported.
340
# See 'bzr help merge' for an explanation as to why.
341
if len(self.parents) > 1 and self.specific_files:
342
raise errors.CannotCommitSelectedFileMerge(self.specific_files)
343
# Excludes are a form of selected file commit.
344
if len(self.parents) > 1 and self.exclude:
345
raise errors.CannotCommitSelectedFileMerge(self.exclude)
347
# Collect the changes
348
self._set_progress_stage("Collecting changes", counter=True)
349
self.builder = self.branch.get_commit_builder(self.parents,
350
self.config, timestamp, timezone, committer, revprops, rev_id)
353
self.builder.will_record_deletes()
354
# find the location being committed to
355
if self.bound_branch:
356
master_location = self.master_branch.base
358
master_location = self.branch.base
360
# report the start of the commit
361
self.reporter.started(new_revno, self.rev_id, master_location)
363
self._update_builder_with_changes()
364
self._check_pointless()
366
# TODO: Now the new inventory is known, check for conflicts.
367
# ADHB 2006-08-08: If this is done, populate_new_inv should not add
368
# weave lines, because nothing should be recorded until it is known
369
# that commit will succeed.
370
self._set_progress_stage("Saving data locally")
371
self.builder.finish_inventory()
373
# Prompt the user for a commit message if none provided
374
message = message_callback(self)
375
self.message = message
376
self._escape_commit_message()
378
# Add revision data to the local branch
379
self.rev_id = self.builder.commit(self.message)
382
mutter("aborting commit write group because of exception:")
383
trace.log_exception_quietly()
384
note("aborting commit write group: %r" % (e,))
388
self._process_pre_hooks(old_revno, new_revno)
390
# Upload revision data to the master.
391
# this will propagate merged revisions too if needed.
392
if self.bound_branch:
393
self._set_progress_stage("Uploading data to master branch")
394
# 'commit' to the master first so a timeout here causes the
395
# local branch to be out of date
396
self.master_branch.import_last_revision_info(
397
self.branch.repository, new_revno, self.rev_id)
399
# and now do the commit locally.
400
self.branch.set_last_revision_info(new_revno, self.rev_id)
402
# Make the working tree up to date with the branch
403
self._set_progress_stage("Updating the working tree")
404
self.work_tree.update_basis_by_delta(self.rev_id,
405
self.builder.get_basis_delta())
406
self.reporter.completed(new_revno, self.rev_id)
407
self._process_post_hooks(old_revno, new_revno)
412
def _select_reporter(self):
413
"""Select the CommitReporter to use."""
415
return NullCommitReporter()
416
return ReportCommitToLog()
418
def _check_pointless(self):
419
if self.allow_pointless:
421
# A merge with no effect on files
422
if len(self.parents) > 1:
424
# TODO: we could simplify this by using self.builder.basis_delta.
426
# The initial commit adds a root directory, but this in itself is not
427
# a worthwhile commit.
428
if (self.basis_revid == revision.NULL_REVISION and
429
((self.builder.new_inventory is not None and
430
len(self.builder.new_inventory) == 1) or
431
len(self.builder._basis_delta) == 1)):
102
432
raise PointlessCommit()
104
for file_id in missing_ids:
105
# Any files that have been deleted are now removed from the
106
# working inventory. Files that were not selected for commit
107
# are left as they were in the working inventory and ommitted
108
# from the revision inventory.
110
# have to do this later so we don't mess up the iterator.
111
# since parents may be removed before their children we
114
# FIXME: There's probably a better way to do this; perhaps
115
# the workingtree should know how to filter itbranch.
116
if work_inv.has_id(file_id):
117
del work_inv[file_id]
120
rev_id = _gen_revision_id(branch, time.time())
123
inv_tmp = tempfile.TemporaryFile()
125
serializer_v4.write_inventory(new_inv, inv_tmp)
127
branch.inventory_store.add(inv_tmp, inv_id)
128
mutter('new inventory_id is {%s}' % inv_id)
130
# We could also just sha hash the inv_tmp file
131
# however, in the case that branch.inventory_store.add()
132
# ever actually does anything special
133
inv_sha1 = branch.get_inventory_sha1(inv_id)
135
branch._write_inventory(work_inv)
137
if timestamp == None:
138
timestamp = time.time()
140
if committer == None:
141
committer = username(branch)
144
timezone = local_time_offset()
146
mutter("building commit log message")
147
rev = Revision(timestamp=timestamp,
152
inventory_sha1=inv_sha1,
156
precursor_id = branch.last_patch()
158
precursor_sha1 = branch.get_revision_sha1(precursor_id)
159
rev.parents.append(RevisionReference(precursor_id, precursor_sha1))
160
for merge_rev in pending_merges:
161
rev.parents.append(RevisionReference(merge_rev))
163
rev_tmp = tempfile.TemporaryFile()
164
serializer_v4.write_revision(rev, rev_tmp)
166
branch.revision_store.add(rev_tmp, rev_id)
167
mutter("new revision_id is {%s}" % rev_id)
169
## XXX: Everything up to here can simply be orphaned if we abort
170
## the commit; it will leave junk files behind but that doesn't
173
## TODO: Read back the just-generated changeset, and make sure it
174
## applies and recreates the right state.
176
## TODO: Also calculate and store the inventory SHA1
177
mutter("committing patch r%d" % (branch.revno() + 1))
179
branch.append_revision(rev_id)
181
branch.set_pending_merges([])
184
# disabled; should go through logging
185
# note("commited r%d" % branch.revno())
186
# print ("commited r%d" % branch.revno())
193
def _gen_revision_id(branch, when):
194
"""Return new revision-id."""
195
from binascii import hexlify
196
from bzrlib.osutils import rand_bytes, compact_date, user_email
198
s = '%s-%s-' % (user_email(branch), compact_date(when))
199
s += hexlify(rand_bytes(8))
203
def _gather_commit(branch, work_tree, work_inv, basis_inv, specific_files,
205
"""Build inventory preparatory to commit.
207
Returns missing_ids, new_inv, any_changes.
209
This adds any changed files into the text store, and sets their
210
test-id, sha and size in the returned inventory appropriately.
213
Modified to hold a list of files that have been deleted from
214
the working directory; these should be removed from the
217
from bzrlib.inventory import Inventory
218
from bzrlib.osutils import isdir, isfile, sha_string, quotefn, \
219
local_time_offset, username, kind_marker, is_inside_any
221
from bzrlib.branch import gen_file_id
222
from bzrlib.errors import BzrError
223
from bzrlib.revision import Revision
224
from bzrlib.trace import mutter, note
227
inv = Inventory(work_inv.root.file_id)
230
for path, entry in work_inv.iter_entries():
231
## TODO: Check that the file kind has not changed from the previous
232
## revision of this file (if any).
234
p = branch.abspath(path)
235
file_id = entry.file_id
236
mutter('commit prep file %s, id %r ' % (p, file_id))
238
if specific_files and not is_inside_any(specific_files, path):
239
mutter(' skipping file excluded from commit')
240
if basis_inv.has_id(file_id):
241
# carry over with previous state
242
inv.add(basis_inv[file_id].copy())
244
# omit this from committed inventory
248
if not work_tree.has_id(file_id):
250
print('deleted %s%s' % (path, kind_marker(entry.kind)))
252
mutter(" file is missing, removing from inventory")
253
missing_ids.append(file_id)
256
# this is present in the new inventory; may be new, modified or
258
old_ie = basis_inv.has_id(file_id) and basis_inv[file_id]
264
old_kind = old_ie.kind
265
if old_kind != entry.kind:
266
raise BzrError("entry %r changed kind from %r to %r"
267
% (file_id, old_kind, entry.kind))
269
if entry.kind == 'directory':
271
raise BzrError("%s is entered as directory but not a directory"
273
elif entry.kind == 'file':
275
raise BzrError("%s is entered as file but is not a file" % quotefn(p))
277
new_sha1 = work_tree.get_file_sha1(file_id)
280
and old_ie.text_sha1 == new_sha1):
281
## assert content == basis.get_file(file_id).read()
282
entry.text_id = old_ie.text_id
283
entry.text_sha1 = new_sha1
284
entry.text_size = old_ie.text_size
285
mutter(' unchanged from previous text_id {%s}' %
288
content = file(p, 'rb').read()
290
# calculate the sha again, just in case the file contents
291
# changed since we updated the cache
292
entry.text_sha1 = sha_string(content)
293
entry.text_size = len(content)
295
entry.text_id = gen_file_id(entry.name)
296
branch.text_store.add(content, entry.text_id)
297
mutter(' stored with text_id {%s}' % entry.text_id)
300
marked = path + kind_marker(entry.kind)
302
print 'added', marked
304
elif old_ie == entry:
306
elif (old_ie.name == entry.name
307
and old_ie.parent_id == entry.parent_id):
308
print 'modified', marked
311
print 'renamed', marked
314
return missing_ids, inv, any_changes
433
if self.builder.any_changes():
435
raise PointlessCommit()
437
def _check_bound_branch(self, possible_master_transports=None):
438
"""Check to see if the local branch is bound.
440
If it is bound, then most of the commit will actually be
441
done using the remote branch as the target branch.
442
Only at the end will the local branch be updated.
444
if self.local and not self.branch.get_bound_location():
445
raise errors.LocalRequiresBoundBranch()
448
self.master_branch = self.branch.get_master_branch(
449
possible_master_transports)
451
if not self.master_branch:
452
# make this branch the reference branch for out of date checks.
453
self.master_branch = self.branch
456
# If the master branch is bound, we must fail
457
master_bound_location = self.master_branch.get_bound_location()
458
if master_bound_location:
459
raise errors.CommitToDoubleBoundBranch(self.branch,
460
self.master_branch, master_bound_location)
462
# TODO: jam 20051230 We could automatically push local
463
# commits to the remote branch if they would fit.
464
# But for now, just require remote to be identical
467
# Make sure the local branch is identical to the master
468
master_info = self.master_branch.last_revision_info()
469
local_info = self.branch.last_revision_info()
470
if local_info != master_info:
471
raise errors.BoundBranchOutOfDate(self.branch,
474
# Now things are ready to change the master branch
476
self.bound_branch = self.branch
477
self.master_branch.lock_write()
478
self.master_locked = True
480
def _check_out_of_date_tree(self):
481
"""Check that the working tree is up to date.
483
:return: old_revision_number,new_revision_number tuple
486
first_tree_parent = self.work_tree.get_parent_ids()[0]
488
# if there are no parents, treat our parent as 'None'
489
# this is so that we still consider the master branch
490
# - in a checkout scenario the tree may have no
491
# parents but the branch may do.
492
first_tree_parent = bzrlib.revision.NULL_REVISION
493
old_revno, master_last = self.master_branch.last_revision_info()
494
if master_last != first_tree_parent:
495
if master_last != bzrlib.revision.NULL_REVISION:
496
raise errors.OutOfDateTree(self.work_tree)
497
if self.branch.repository.has_revision(first_tree_parent):
498
new_revno = old_revno + 1
500
# ghost parents never appear in revision history.
502
return old_revno,new_revno
504
def _process_pre_hooks(self, old_revno, new_revno):
505
"""Process any registered pre commit hooks."""
506
self._set_progress_stage("Running pre_commit hooks")
507
self._process_hooks("pre_commit", old_revno, new_revno)
509
def _process_post_hooks(self, old_revno, new_revno):
510
"""Process any registered post commit hooks."""
511
# Process the post commit hooks, if any
512
self._set_progress_stage("Running post_commit hooks")
513
# old style commit hooks - should be deprecated ? (obsoleted in
515
if self.config.post_commit() is not None:
516
hooks = self.config.post_commit().split(' ')
517
# this would be nicer with twisted.python.reflect.namedAny
519
result = eval(hook + '(branch, rev_id)',
520
{'branch':self.branch,
522
'rev_id':self.rev_id})
523
# process new style post commit hooks
524
self._process_hooks("post_commit", old_revno, new_revno)
526
def _process_hooks(self, hook_name, old_revno, new_revno):
527
if not Branch.hooks[hook_name]:
530
# new style commit hooks:
531
if not self.bound_branch:
532
hook_master = self.branch
535
hook_master = self.master_branch
536
hook_local = self.branch
537
# With bound branches, when the master is behind the local branch,
538
# the 'old_revno' and old_revid values here are incorrect.
539
# XXX: FIXME ^. RBC 20060206
541
old_revid = self.parents[0]
543
old_revid = bzrlib.revision.NULL_REVISION
545
if hook_name == "pre_commit":
546
future_tree = self.builder.revision_tree()
547
tree_delta = future_tree.changes_from(self.basis_tree,
550
for hook in Branch.hooks[hook_name]:
551
# show the running hook in the progress bar. As hooks may
552
# end up doing nothing (e.g. because they are not configured by
553
# the user) this is still showing progress, not showing overall
554
# actions - its up to each plugin to show a UI if it want's to
555
# (such as 'Emailing diff to foo@example.com').
556
self.pb_stage_name = "Running %s hooks [%s]" % \
557
(hook_name, Branch.hooks.get_hook_name(hook))
558
self._emit_progress()
559
if 'hooks' in debug.debug_flags:
560
mutter("Invoking commit hook: %r", hook)
561
if hook_name == "post_commit":
562
hook(hook_local, hook_master, old_revno, old_revid, new_revno,
564
elif hook_name == "pre_commit":
565
hook(hook_local, hook_master,
566
old_revno, old_revid, new_revno, self.rev_id,
567
tree_delta, future_tree)
570
"""Cleanup any open locks, progress bars etc."""
571
cleanups = [self._cleanup_bound_branch,
572
self.basis_tree.unlock,
573
self.work_tree.unlock,
575
found_exception = None
576
for cleanup in cleanups:
579
# we want every cleanup to run no matter what.
580
# so we have a catchall here, but we will raise the
581
# last encountered exception up the stack: and
582
# typically this will be useful enough.
585
if found_exception is not None:
586
# don't do a plan raise, because the last exception may have been
587
# trashed, e is our sure-to-work exception even though it loses the
588
# full traceback. XXX: RBC 20060421 perhaps we could check the
589
# exc_info and if its the same one do a plain raise otherwise
590
# 'raise e' as we do now.
593
def _cleanup_bound_branch(self):
594
"""Executed at the end of a try/finally to cleanup a bound branch.
596
If the branch wasn't bound, this is a no-op.
597
If it was, it resents self.branch to the local branch, instead
600
if not self.bound_branch:
602
if self.master_locked:
603
self.master_branch.unlock()
605
def _escape_commit_message(self):
606
"""Replace xml-incompatible control characters."""
607
# FIXME: RBC 20060419 this should be done by the revision
608
# serialiser not by commit. Then we can also add an unescaper
609
# in the deserializer and start roundtripping revision messages
610
# precisely. See repository_implementations/test_repository.py
611
self.message, escape_count = xml_serializer.escape_invalid_chars(
614
self.reporter.escaped(escape_count, self.message)
616
def _gather_parents(self):
617
"""Record the parents of a merge for merge detection."""
618
# TODO: Make sure that this list doesn't contain duplicate
619
# entries and the order is preserved when doing this.
620
if self.use_record_iter_changes:
622
self.basis_inv = self.basis_tree.inventory
623
self.parent_invs = [self.basis_inv]
624
for revision in self.parents[1:]:
625
if self.branch.repository.has_revision(revision):
626
mutter('commit parent revision {%s}', revision)
627
inventory = self.branch.repository.get_inventory(revision)
628
self.parent_invs.append(inventory)
630
mutter('commit parent ghost revision {%s}', revision)
632
def _update_builder_with_changes(self):
633
"""Update the commit builder with the data about what has changed.
635
exclude = self.exclude
636
specific_files = self.specific_files or []
637
mutter("Selecting files for commit with filter %s", specific_files)
640
if self.use_record_iter_changes:
641
iter_changes = self.work_tree.iter_changes(self.basis_tree)
642
iter_changes = self._filter_iter_changes(iter_changes)
643
for file_id, path, fs_hash in self.builder.record_iter_changes(
644
self.work_tree, self.basis_revid, iter_changes):
645
self.work_tree._observed_sha1(file_id, path, fs_hash)
647
# Build the new inventory
648
self._populate_from_inventory()
649
self._record_unselected()
650
self._report_and_accumulate_deletes()
652
def _filter_iter_changes(self, iter_changes):
653
"""Process iter_changes.
655
This method reports on the changes in iter_changes to the user, and
656
converts 'missing' entries in the iter_changes iterator to 'deleted'
657
entries. 'missing' entries have their
659
:param iter_changes: An iter_changes to process.
660
:return: A generator of changes.
662
reporter = self.reporter
663
report_changes = reporter.is_verbose()
665
for change in iter_changes:
667
old_path = change[1][0]
668
new_path = change[1][1]
669
versioned = change[3][1]
671
versioned = change[3][1]
672
if kind is None and versioned:
675
reporter.missing(new_path)
676
deleted_ids.append(change[0])
677
# Reset the new path (None) and new versioned flag (False)
678
change = (change[0], (change[1][0], None), change[2],
679
(change[3][0], False)) + change[4:]
680
elif kind == 'tree-reference':
681
if self.recursive == 'down':
682
self._commit_nested_tree(change[0], change[1][1])
683
if change[3][0] or change[3][1]:
687
reporter.deleted(old_path)
688
elif old_path is None:
689
reporter.snapshot_change('added', new_path)
690
elif old_path != new_path:
691
reporter.renamed('renamed', old_path, new_path)
694
self.work_tree.branch.repository._format.rich_root_data):
695
# Don't report on changes to '' in non rich root
697
reporter.snapshot_change('modified', new_path)
698
self._next_progress_entry()
699
# Unversion IDs that were found to be deleted
700
self.work_tree.unversion(deleted_ids)
702
def _record_unselected(self):
703
# If specific files are selected, then all un-selected files must be
704
# recorded in their previous state. For more details, see
705
# https://lists.ubuntu.com/archives/bazaar/2007q3/028476.html.
706
if self.specific_files or self.exclude:
707
specific_files = self.specific_files or []
708
for path, old_ie in self.basis_inv.iter_entries():
709
if old_ie.file_id in self.builder.new_inventory:
710
# already added - skip.
712
if (is_inside_any(specific_files, path)
713
and not is_inside_any(self.exclude, path)):
714
# was inside the selected path, and not excluded - if not
715
# present it has been deleted so skip.
717
# From here down it was either not selected, or was excluded:
718
# We preserve the entry unaltered.
720
# Note: specific file commits after a merge are currently
721
# prohibited. This test is for sanity/safety in case it's
722
# required after that changes.
723
if len(self.parents) > 1:
725
self.builder.record_entry_contents(ie, self.parent_invs, path,
726
self.basis_tree, None)
728
def _report_and_accumulate_deletes(self):
729
if (isinstance(self.basis_inv, Inventory)
730
and isinstance(self.builder.new_inventory, Inventory)):
731
# the older Inventory classes provide a _byid dict, and building a
732
# set from the keys of this dict is substantially faster than even
733
# getting a set of ids from the inventory
735
# <lifeless> set(dict) is roughly the same speed as
736
# set(iter(dict)) and both are significantly slower than
738
deleted_ids = set(self.basis_inv._byid.keys()) - \
739
set(self.builder.new_inventory._byid.keys())
741
deleted_ids = set(self.basis_inv) - set(self.builder.new_inventory)
743
self.any_entries_deleted = True
744
deleted = [(self.basis_tree.id2path(file_id), file_id)
745
for file_id in deleted_ids]
747
# XXX: this is not quite directory-order sorting
748
for path, file_id in deleted:
749
self.builder.record_delete(path, file_id)
750
self.reporter.deleted(path)
752
def _check_strict(self):
753
# XXX: when we use iter_changes this would likely be faster if
754
# iter_changes would check for us (even in the presence of
757
# raise an exception as soon as we find a single unknown.
758
for unknown in self.work_tree.unknowns():
759
raise StrictCommitFailed()
761
def _populate_from_inventory(self):
762
"""Populate the CommitBuilder by walking the working tree inventory."""
763
# Build the revision inventory.
765
# This starts by creating a new empty inventory. Depending on
766
# which files are selected for commit, and what is present in the
767
# current tree, the new inventory is populated. inventory entries
768
# which are candidates for modification have their revision set to
769
# None; inventory entries that are carried over untouched have their
770
# revision set to their prior value.
772
# ESEPARATIONOFCONCERNS: this function is diffing and using the diff
773
# results to create a new inventory at the same time, which results
774
# in bugs like #46635. Any reason not to use/enhance Tree.changes_from?
777
specific_files = self.specific_files
778
exclude = self.exclude
779
report_changes = self.reporter.is_verbose()
781
# A tree of paths that have been deleted. E.g. if foo/bar has been
782
# deleted, then we have {'foo':{'bar':{}}}
784
# XXX: Note that entries may have the wrong kind because the entry does
785
# not reflect the status on disk.
786
work_inv = self.work_tree.inventory
787
# NB: entries will include entries within the excluded ids/paths
788
# because iter_entries_by_dir has no 'exclude' facility today.
789
entries = work_inv.iter_entries_by_dir(
790
specific_file_ids=self.specific_file_ids, yield_parents=True)
791
for path, existing_ie in entries:
792
file_id = existing_ie.file_id
793
name = existing_ie.name
794
parent_id = existing_ie.parent_id
795
kind = existing_ie.kind
796
# Skip files that have been deleted from the working tree.
797
# The deleted path ids are also recorded so they can be explicitly
800
path_segments = splitpath(path)
801
deleted_dict = deleted_paths
802
for segment in path_segments:
803
deleted_dict = deleted_dict.get(segment, None)
805
# We either took a path not present in the dict
806
# (deleted_dict was None), or we've reached an empty
807
# child dir in the dict, so are now a sub-path.
811
if deleted_dict is not None:
812
# the path has a deleted parent, do not add it.
814
if exclude and is_inside_any(exclude, path):
815
# Skip excluded paths. Excluded paths are processed by
816
# _update_builder_with_changes.
818
content_summary = self.work_tree.path_content_summary(path)
819
# Note that when a filter of specific files is given, we must only
820
# skip/record deleted files matching that filter.
821
if not specific_files or is_inside_any(specific_files, path):
822
if content_summary[0] == 'missing':
823
if not deleted_paths:
824
# path won't have been split yet.
825
path_segments = splitpath(path)
826
deleted_dict = deleted_paths
827
for segment in path_segments:
828
deleted_dict = deleted_dict.setdefault(segment, {})
829
self.reporter.missing(path)
830
self._next_progress_entry()
831
deleted_ids.append(file_id)
833
# TODO: have the builder do the nested commit just-in-time IF and
835
if content_summary[0] == 'tree-reference':
836
# enforce repository nested tree policy.
837
if (not self.work_tree.supports_tree_reference() or
838
# repository does not support it either.
839
not self.branch.repository._format.supports_tree_reference):
840
content_summary = ('directory',) + content_summary[1:]
841
kind = content_summary[0]
842
# TODO: specific_files filtering before nested tree processing
843
if kind == 'tree-reference':
844
if self.recursive == 'down':
845
nested_revision_id = self._commit_nested_tree(
847
content_summary = content_summary[:3] + (
850
content_summary = content_summary[:3] + (
851
self.work_tree.get_reference_revision(file_id),)
853
# Record an entry for this item
854
# Note: I don't particularly want to have the existing_ie
855
# parameter but the test suite currently (28-Jun-07) breaks
856
# without it thanks to a unicode normalisation issue. :-(
857
definitely_changed = kind != existing_ie.kind
858
self._record_entry(path, file_id, specific_files, kind, name,
859
parent_id, definitely_changed, existing_ie, report_changes,
862
# Unversion IDs that were found to be deleted
863
self.work_tree.unversion(deleted_ids)
865
def _commit_nested_tree(self, file_id, path):
866
"Commit a nested tree."
867
sub_tree = self.work_tree.get_nested_tree(file_id, path)
868
# FIXME: be more comprehensive here:
869
# this works when both trees are in --trees repository,
870
# but when both are bound to a different repository,
871
# it fails; a better way of approaching this is to
872
# finally implement the explicit-caches approach design
873
# a while back - RBC 20070306.
874
if sub_tree.branch.repository.has_same_location(
875
self.work_tree.branch.repository):
876
sub_tree.branch.repository = \
877
self.work_tree.branch.repository
879
return sub_tree.commit(message=None, revprops=self.revprops,
880
recursive=self.recursive,
881
message_callback=self.message_callback,
882
timestamp=self.timestamp, timezone=self.timezone,
883
committer=self.committer,
884
allow_pointless=self.allow_pointless,
885
strict=self.strict, verbose=self.verbose,
886
local=self.local, reporter=self.reporter)
887
except errors.PointlessCommit:
888
return self.work_tree.get_reference_revision(file_id)
890
def _record_entry(self, path, file_id, specific_files, kind, name,
891
parent_id, definitely_changed, existing_ie, report_changes,
893
"Record the new inventory entry for a path if any."
894
# mutter('check %s {%s}', path, file_id)
895
# mutter('%s selected for commit', path)
896
if definitely_changed or existing_ie is None:
897
ie = make_entry(kind, name, parent_id, file_id)
899
ie = existing_ie.copy()
901
# For carried over entries we don't care about the fs hash - the repo
902
# isn't generating a sha, so we're not saving computation time.
903
_, _, fs_hash = self.builder.record_entry_contents(
904
ie, self.parent_invs, path, self.work_tree, content_summary)
906
self._report_change(ie, path)
908
self.work_tree._observed_sha1(ie.file_id, path, fs_hash)
911
def _report_change(self, ie, path):
912
"""Report a change to the user.
914
The change that has occurred is described relative to the basis
917
if (self.basis_inv.has_id(ie.file_id)):
918
basis_ie = self.basis_inv[ie.file_id]
921
change = ie.describe_change(basis_ie, ie)
922
if change in (InventoryEntry.RENAMED,
923
InventoryEntry.MODIFIED_AND_RENAMED):
924
old_path = self.basis_inv.id2path(ie.file_id)
925
self.reporter.renamed(change, old_path, path)
926
self._next_progress_entry()
928
if change == 'unchanged':
930
self.reporter.snapshot_change(change, path)
931
self._next_progress_entry()
933
def _set_progress_stage(self, name, counter=False):
934
"""Set the progress stage and emit an update to the progress bar."""
935
self.pb_stage_name = name
936
self.pb_stage_count += 1
938
self.pb_entries_count = 0
940
self.pb_entries_count = None
941
self._emit_progress()
943
def _next_progress_entry(self):
944
"""Emit an update to the progress bar and increment the entry count."""
945
self.pb_entries_count += 1
946
self._emit_progress()
948
def _emit_progress(self):
949
if self.pb_entries_count is not None:
950
text = "%s [%d] - Stage" % (self.pb_stage_name,
951
self.pb_entries_count)
953
text = "%s - Stage" % (self.pb_stage_name, )
954
self.pb.update(text, self.pb_stage_count, self.pb_stage_total)
956
def _set_specific_file_ids(self):
957
"""populate self.specific_file_ids if we will use it."""
958
if not self.use_record_iter_changes:
959
# If provided, ensure the specified files are versioned
960
if self.specific_files is not None:
961
# Note: This routine is being called because it raises
962
# PathNotVersionedError as a side effect of finding the IDs. We
963
# later use the ids we found as input to the working tree
964
# inventory iterator, so we only consider those ids rather than
965
# examining the whole tree again.
966
# XXX: Dont we have filter_unversioned to do this more
968
self.specific_file_ids = tree.find_ids_across_trees(
969
self.specific_files, [self.basis_tree, self.work_tree])
971
self.specific_file_ids = None