1
# Copyright (C) 2005 Canonical Ltd
1
# Copyright (C) 2005, 2006, 2007, 2008 Canonical Ltd
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
5
5
# the Free Software Foundation; either version 2 of the License, or
6
6
# (at your option) any later version.
8
8
# This program is distributed in the hope that it will be useful,
9
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
11
# GNU General Public License for more details.
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19
# FIXME: "bzr commit doc/format" commits doc/format.txt!
21
def commit(branch, message,
28
allow_pointless=True):
29
"""Commit working copy as a new revision.
31
The basic approach is to add all the file texts into the
32
store, then the inventory, then make a new revision pointing
33
to that inventory and store that.
35
This is not quite safe if the working copy changes during the
36
commit; for the moment that is simply not allowed. A better
37
approach is to make a temporary copy of the files before
38
computing their hashes, and then add those hashes in turn to
39
the inventory. This should mean at least that there are no
40
broken hash pointers. There is no way we can get a snapshot
41
of the whole directory at an instant. This would also have to
42
be robust against files disappearing, moving, etc. So the
43
whole thing is a bit hard.
45
This raises PointlessCommit if there are no changes, no new merges,
46
and allow_pointless is false.
48
timestamp -- if not None, seconds-since-epoch for a
49
postdated/predated commit.
52
If true, commit only those files.
55
If set, use this as the new revision id.
56
Useful for test or import commands that need to tightly
57
control what revisions are assigned. If you duplicate
58
a revision id that exists elsewhere it is your own fault.
59
If null (default), a time/random revision id is generated.
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
# The newly committed revision is going to have a shape corresponding
19
# to that of the working tree. Files that are not in the
20
# working tree and that were in the predecessor are reported as
21
# removed --- this can include files that were either removed from the
22
# inventory or deleted in the working tree. If they were only
23
# deleted from disk, they are removed from the working inventory.
25
# We then consider the remaining entries, which will be in the new
26
# version. Directory entries are simply copied across. File entries
27
# must be checked to see if a new version of the file should be
28
# recorded. For each parent revision tree, we check to see what
29
# version of the file was present. If the file was present in at
30
# least one tree, and if it was the same version in all the trees,
31
# then we can just refer to that version. Otherwise, a new version
32
# representing the merger of the file versions must be added.
34
# TODO: Update hashcache before and after - or does the WorkingTree
37
# TODO: Rather than mashing together the ancestry and storing it back,
38
# perhaps the weave should have single method which does it all in one
39
# go, avoiding a lot of redundant work.
41
# TODO: Perhaps give a warning if one of the revisions marked as
42
# merged is already in the ancestry, and then don't record it as a
45
# TODO: If the file is newly merged but unchanged from the version it
46
# merges from, then it should still be reported as newly added
47
# relative to the basis revision.
49
# TODO: Change the parameter 'rev_id' to 'revision_id' to be consistent with
50
# the rest of the code; add a deprecation of the old name.
57
from cStringIO import StringIO
67
from bzrlib.branch import Branch
69
from bzrlib.errors import (BzrError, PointlessCommit,
73
from bzrlib.osutils import (get_user_encoding,
74
kind_marker, isdir,isfile, is_inside_any,
75
is_inside_or_parent_of_any,
76
minimum_path_selection,
77
quotefn, sha_file, split_lines,
80
from bzrlib.testament import Testament
81
from bzrlib.trace import mutter, note, warning, is_quiet
82
from bzrlib.inventory import Inventory, InventoryEntry, make_entry
83
from bzrlib import symbol_versioning
84
from bzrlib.symbol_versioning import (deprecated_passed,
87
from bzrlib.workingtree import WorkingTree
88
from bzrlib.urlutils import unescape_for_display
92
class NullCommitReporter(object):
93
"""I report on progress of a commit."""
95
def started(self, revno, revid, location=None):
97
symbol_versioning.warn("As of bzr 1.0 you must pass a location "
98
"to started.", DeprecationWarning,
102
def snapshot_change(self, change, path):
105
def completed(self, revno, rev_id):
108
def deleted(self, path):
111
def missing(self, path):
114
def renamed(self, change, old_path, new_path):
117
def is_verbose(self):
121
class ReportCommitToLog(NullCommitReporter):
123
def _note(self, format, *args):
126
Subclasses may choose to override this method.
130
def snapshot_change(self, change, path):
131
if path == '' and change in ('added', 'modified'):
133
self._note("%s %s", change, path)
135
def started(self, revno, rev_id, location=None):
136
if location is not None:
137
location = ' to: ' + unescape_for_display(location, 'utf-8')
139
# When started was added, location was only made optional by
140
# accident. Matt Nordhoff 20071129
141
symbol_versioning.warn("As of bzr 1.0 you must pass a location "
142
"to started.", DeprecationWarning,
145
self._note('Committing%s', location)
147
def completed(self, revno, rev_id):
148
self._note('Committed revision %d.', revno)
150
def deleted(self, path):
151
self._note('deleted %s', path)
153
def missing(self, path):
154
self._note('missing %s', path)
156
def renamed(self, change, old_path, new_path):
157
self._note('%s %s => %s', change, old_path, new_path)
159
def is_verbose(self):
163
class Commit(object):
164
"""Task of committing a new revision.
166
This is a MethodObject: it accumulates state as the commit is
167
prepared, and then it is discarded. It doesn't represent
168
historical revisions, just the act of recording a new one.
171
Modified to hold a list of files that have been deleted from
172
the working directory; these should be removed from the
64
from bzrlib.osutils import local_time_offset, username
65
from bzrlib.branch import gen_file_id
66
from bzrlib.errors import BzrError, PointlessCommit
67
from bzrlib.revision import Revision, RevisionReference
68
from bzrlib.trace import mutter, note
69
from bzrlib.xml import pack_xml
74
# First walk over the working inventory; and both update that
75
# and also build a new revision inventory. The revision
76
# inventory needs to hold the text-id, sha1 and size of the
77
# actual file versions committed in the revision. (These are
78
# not present in the working inventory.) We also need to
79
# detect missing/deleted files, and remove them from the
82
work_tree = branch.working_tree()
83
work_inv = work_tree.inventory
84
basis = branch.basis_tree()
85
basis_inv = basis.inventory
88
note('looking for changes...')
90
pending_merges = branch.pending_merges()
92
missing_ids, new_inv, any_changes = \
93
_gather_commit(branch,
100
if not (any_changes or allow_pointless or pending_merges):
178
"""Create a Commit object.
180
:param reporter: the default reporter to use or None to decide later
182
self.reporter = reporter
192
allow_pointless=True,
200
message_callback=None,
203
possible_master_transports=None):
204
"""Commit working copy as a new revision.
206
:param message: the commit message (it or message_callback is required)
207
:param message_callback: A callback: message = message_callback(cmt_obj)
209
:param timestamp: if not None, seconds-since-epoch for a
210
postdated/predated commit.
212
:param specific_files: If not None, commit only those files. An empty
213
list means 'commit no files'.
215
:param rev_id: If set, use this as the new revision id.
216
Useful for test or import commands that need to tightly
217
control what revisions are assigned. If you duplicate
218
a revision id that exists elsewhere it is your own fault.
219
If null (default), a time/random revision id is generated.
221
:param allow_pointless: If true (default), commit even if nothing
222
has changed and no merges are recorded.
224
:param strict: If true, don't allow a commit if the working tree
225
contains unknown files.
227
:param revprops: Properties for new revision
228
:param local: Perform a local only commit.
229
:param reporter: the reporter to use or None for the default
230
:param verbose: if True and the reporter is not None, report everything
231
:param recursive: If set to 'down', commit in any subtrees that have
232
pending changes of any sort during this commit.
233
:param exclude: None or a list of relative paths to exclude from the
234
commit. Pending changes to excluded files will be ignored by the
237
mutter('preparing to commit')
239
if working_tree is None:
240
raise BzrError("working_tree must be passed into commit().")
242
self.work_tree = working_tree
243
self.branch = self.work_tree.branch
244
if getattr(self.work_tree, 'requires_rich_root', lambda: False)():
245
if not self.branch.repository.supports_rich_root():
246
raise errors.RootNotRich()
247
if message_callback is None:
248
if message is not None:
249
if isinstance(message, str):
250
message = message.decode(get_user_encoding())
251
message_callback = lambda x: message
253
raise BzrError("The message or message_callback keyword"
254
" parameter is required for commit().")
256
self.bound_branch = None
257
self.any_entries_deleted = False
258
if exclude is not None:
259
self.exclude = sorted(
260
minimum_path_selection(exclude))
264
self.master_branch = None
265
self.master_locked = False
266
self.recursive = recursive
268
# self.specific_files is None to indicate no filter, or any iterable to
269
# indicate a filter - [] means no files at all, as per iter_changes.
270
if specific_files is not None:
271
self.specific_files = sorted(
272
minimum_path_selection(specific_files))
274
self.specific_files = None
276
self.allow_pointless = allow_pointless
277
self.revprops = revprops
278
self.message_callback = message_callback
279
self.timestamp = timestamp
280
self.timezone = timezone
281
self.committer = committer
283
self.verbose = verbose
285
self.work_tree.lock_write()
286
self.parents = self.work_tree.get_parent_ids()
287
# We can use record_iter_changes IFF iter_changes is compatible with
288
# the command line parameters, and the repository has fast delta
289
# generation. See bug 347649.
290
self.use_record_iter_changes = (
292
not self.branch.repository._format.supports_tree_reference and
293
(self.branch.repository._format.fast_deltas or
294
len(self.parents) < 2))
295
self.pb = bzrlib.ui.ui_factory.nested_progress_bar()
296
self.basis_revid = self.work_tree.last_revision()
297
self.basis_tree = self.work_tree.basis_tree()
298
self.basis_tree.lock_read()
300
# Cannot commit with conflicts present.
301
if len(self.work_tree.conflicts()) > 0:
302
raise ConflictsInTree
304
# Setup the bound branch variables as needed.
305
self._check_bound_branch(possible_master_transports)
307
# Check that the working tree is up to date
308
old_revno, new_revno = self._check_out_of_date_tree()
310
# Complete configuration setup
311
if reporter is not None:
312
self.reporter = reporter
313
elif self.reporter is None:
314
self.reporter = self._select_reporter()
315
if self.config is None:
316
self.config = self.branch.get_config()
318
self._set_specific_file_ids()
320
# Setup the progress bar. As the number of files that need to be
321
# committed in unknown, progress is reported as stages.
322
# We keep track of entries separately though and include that
323
# information in the progress bar during the relevant stages.
324
self.pb_stage_name = ""
325
self.pb_stage_count = 0
326
self.pb_stage_total = 5
327
if self.bound_branch:
328
self.pb_stage_total += 1
329
self.pb.show_pct = False
330
self.pb.show_spinner = False
331
self.pb.show_eta = False
332
self.pb.show_count = True
333
self.pb.show_bar = True
335
self._gather_parents()
336
# After a merge, a selected file commit is not supported.
337
# See 'bzr help merge' for an explanation as to why.
338
if len(self.parents) > 1 and self.specific_files is not None:
339
raise errors.CannotCommitSelectedFileMerge(self.specific_files)
340
# Excludes are a form of selected file commit.
341
if len(self.parents) > 1 and self.exclude:
342
raise errors.CannotCommitSelectedFileMerge(self.exclude)
344
# Collect the changes
345
self._set_progress_stage("Collecting changes", counter=True)
346
self.builder = self.branch.get_commit_builder(self.parents,
347
self.config, timestamp, timezone, committer, revprops, rev_id)
350
self.builder.will_record_deletes()
351
# find the location being committed to
352
if self.bound_branch:
353
master_location = self.master_branch.base
355
master_location = self.branch.base
357
# report the start of the commit
358
self.reporter.started(new_revno, self.rev_id, master_location)
360
self._update_builder_with_changes()
361
self._check_pointless()
363
# TODO: Now the new inventory is known, check for conflicts.
364
# ADHB 2006-08-08: If this is done, populate_new_inv should not add
365
# weave lines, because nothing should be recorded until it is known
366
# that commit will succeed.
367
self._set_progress_stage("Saving data locally")
368
self.builder.finish_inventory()
370
# Prompt the user for a commit message if none provided
371
message = message_callback(self)
372
self.message = message
374
# Add revision data to the local branch
375
self.rev_id = self.builder.commit(self.message)
378
mutter("aborting commit write group because of exception:")
379
trace.log_exception_quietly()
380
note("aborting commit write group: %r" % (e,))
384
self._process_pre_hooks(old_revno, new_revno)
386
# Upload revision data to the master.
387
# this will propagate merged revisions too if needed.
388
if self.bound_branch:
389
self._set_progress_stage("Uploading data to master branch")
390
# 'commit' to the master first so a timeout here causes the
391
# local branch to be out of date
392
self.master_branch.import_last_revision_info(
393
self.branch.repository, new_revno, self.rev_id)
395
# and now do the commit locally.
396
self.branch.set_last_revision_info(new_revno, self.rev_id)
398
# Make the working tree be up to date with the branch. This
399
# includes automatic changes scheduled to be made to the tree, such
400
# as updating its basis and unversioning paths that were missing.
401
self.work_tree.unversion(self.deleted_ids)
402
self._set_progress_stage("Updating the working tree")
403
self.work_tree.update_basis_by_delta(self.rev_id,
404
self.builder.get_basis_delta())
405
self.reporter.completed(new_revno, self.rev_id)
406
self._process_post_hooks(old_revno, new_revno)
411
def _select_reporter(self):
412
"""Select the CommitReporter to use."""
414
return NullCommitReporter()
415
return ReportCommitToLog()
417
def _check_pointless(self):
418
if self.allow_pointless:
420
# A merge with no effect on files
421
if len(self.parents) > 1:
423
# TODO: we could simplify this by using self.builder.basis_delta.
425
# The initial commit adds a root directory, but this in itself is not
426
# a worthwhile commit.
427
if (self.basis_revid == revision.NULL_REVISION and
428
((self.builder.new_inventory is not None and
429
len(self.builder.new_inventory) == 1) or
430
len(self.builder._basis_delta) == 1)):
101
431
raise PointlessCommit()
103
for file_id in missing_ids:
104
# Any files that have been deleted are now removed from the
105
# working inventory. Files that were not selected for commit
106
# are left as they were in the working inventory and ommitted
107
# from the revision inventory.
109
# have to do this later so we don't mess up the iterator.
110
# since parents may be removed before their children we
113
# FIXME: There's probably a better way to do this; perhaps
114
# the workingtree should know how to filter itbranch.
115
if work_inv.has_id(file_id):
116
del work_inv[file_id]
119
rev_id = _gen_revision_id(branch, time.time())
122
inv_tmp = tempfile.TemporaryFile()
123
pack_xml(new_inv, inv_tmp)
125
branch.inventory_store.add(inv_tmp, inv_id)
126
mutter('new inventory_id is {%s}' % inv_id)
128
# We could also just sha hash the inv_tmp file
129
# however, in the case that branch.inventory_store.add()
130
# ever actually does anything special
131
inv_sha1 = branch.get_inventory_sha1(inv_id)
133
branch._write_inventory(work_inv)
135
if timestamp == None:
136
timestamp = time.time()
138
if committer == None:
139
committer = username(branch)
142
timezone = local_time_offset()
144
mutter("building commit log message")
145
rev = Revision(timestamp=timestamp,
150
inventory_sha1=inv_sha1,
154
precursor_id = branch.last_patch()
156
precursor_sha1 = branch.get_revision_sha1(precursor_id)
157
rev.parents.append(RevisionReference(precursor_id, precursor_sha1))
158
for merge_rev in pending_merges:
159
rev.parents.append(RevisionReference(merge_rev))
161
rev_tmp = tempfile.TemporaryFile()
162
pack_xml(rev, rev_tmp)
164
branch.revision_store.add(rev_tmp, rev_id)
165
mutter("new revision_id is {%s}" % rev_id)
167
## XXX: Everything up to here can simply be orphaned if we abort
168
## the commit; it will leave junk files behind but that doesn't
171
## TODO: Read back the just-generated changeset, and make sure it
172
## applies and recreates the right state.
174
## TODO: Also calculate and store the inventory SHA1
175
mutter("committing patch r%d" % (branch.revno() + 1))
177
branch.append_revision(rev_id)
179
branch.set_pending_merges([])
182
note("commited r%d" % branch.revno())
188
def _gen_revision_id(branch, when):
189
"""Return new revision-id."""
190
from binascii import hexlify
191
from bzrlib.osutils import rand_bytes, compact_date, user_email
193
s = '%s-%s-' % (user_email(branch), compact_date(when))
194
s += hexlify(rand_bytes(8))
198
def _gather_commit(branch, work_tree, work_inv, basis_inv, specific_files,
200
"""Build inventory preparatory to commit.
202
Returns missing_ids, new_inv, any_changes.
204
This adds any changed files into the text store, and sets their
205
test-id, sha and size in the returned inventory appropriately.
208
Modified to hold a list of files that have been deleted from
209
the working directory; these should be removed from the
212
from bzrlib.inventory import Inventory
213
from bzrlib.osutils import isdir, isfile, sha_string, quotefn, \
214
local_time_offset, username, kind_marker, is_inside_any
216
from bzrlib.branch import gen_file_id
217
from bzrlib.errors import BzrError
218
from bzrlib.revision import Revision
219
from bzrlib.trace import mutter, note
222
inv = Inventory(work_inv.root.file_id)
225
for path, entry in work_inv.iter_entries():
226
## TODO: Check that the file kind has not changed from the previous
227
## revision of this file (if any).
229
p = branch.abspath(path)
230
file_id = entry.file_id
231
mutter('commit prep file %s, id %r ' % (p, file_id))
233
if specific_files and not is_inside_any(specific_files, path):
234
mutter(' skipping file excluded from commit')
235
if basis_inv.has_id(file_id):
236
# carry over with previous state
237
inv.add(basis_inv[file_id].copy())
239
# omit this from committed inventory
243
if not work_tree.has_id(file_id):
245
print('deleted %s%s' % (path, kind_marker(entry.kind)))
247
mutter(" file is missing, removing from inventory")
248
missing_ids.append(file_id)
251
# this is present in the new inventory; may be new, modified or
253
old_ie = basis_inv.has_id(file_id) and basis_inv[file_id]
259
old_kind = old_ie.kind
260
if old_kind != entry.kind:
261
raise BzrError("entry %r changed kind from %r to %r"
262
% (file_id, old_kind, entry.kind))
264
if entry.kind == 'directory':
266
raise BzrError("%s is entered as directory but not a directory"
268
elif entry.kind == 'file':
270
raise BzrError("%s is entered as file but is not a file" % quotefn(p))
272
new_sha1 = work_tree.get_file_sha1(file_id)
275
and old_ie.text_sha1 == new_sha1):
276
## assert content == basis.get_file(file_id).read()
277
entry.text_id = old_ie.text_id
278
entry.text_sha1 = new_sha1
279
entry.text_size = old_ie.text_size
280
mutter(' unchanged from previous text_id {%s}' %
283
content = file(p, 'rb').read()
285
# calculate the sha again, just in case the file contents
286
# changed since we updated the cache
287
entry.text_sha1 = sha_string(content)
288
entry.text_size = len(content)
290
entry.text_id = gen_file_id(entry.name)
291
branch.text_store.add(content, entry.text_id)
292
mutter(' stored with text_id {%s}' % entry.text_id)
295
marked = path + kind_marker(entry.kind)
297
print 'added', marked
299
elif old_ie == entry:
301
elif (old_ie.name == entry.name
302
and old_ie.parent_id == entry.parent_id):
303
print 'modified', marked
306
print 'renamed', marked
309
return missing_ids, inv, any_changes
432
if self.builder.any_changes():
434
raise PointlessCommit()
436
def _check_bound_branch(self, possible_master_transports=None):
437
"""Check to see if the local branch is bound.
439
If it is bound, then most of the commit will actually be
440
done using the remote branch as the target branch.
441
Only at the end will the local branch be updated.
443
if self.local and not self.branch.get_bound_location():
444
raise errors.LocalRequiresBoundBranch()
447
self.master_branch = self.branch.get_master_branch(
448
possible_master_transports)
450
if not self.master_branch:
451
# make this branch the reference branch for out of date checks.
452
self.master_branch = self.branch
455
# If the master branch is bound, we must fail
456
master_bound_location = self.master_branch.get_bound_location()
457
if master_bound_location:
458
raise errors.CommitToDoubleBoundBranch(self.branch,
459
self.master_branch, master_bound_location)
461
# TODO: jam 20051230 We could automatically push local
462
# commits to the remote branch if they would fit.
463
# But for now, just require remote to be identical
466
# Make sure the local branch is identical to the master
467
master_info = self.master_branch.last_revision_info()
468
local_info = self.branch.last_revision_info()
469
if local_info != master_info:
470
raise errors.BoundBranchOutOfDate(self.branch,
473
# Now things are ready to change the master branch
475
self.bound_branch = self.branch
476
self.master_branch.lock_write()
477
self.master_locked = True
479
def _check_out_of_date_tree(self):
480
"""Check that the working tree is up to date.
482
:return: old_revision_number,new_revision_number tuple
485
first_tree_parent = self.work_tree.get_parent_ids()[0]
487
# if there are no parents, treat our parent as 'None'
488
# this is so that we still consider the master branch
489
# - in a checkout scenario the tree may have no
490
# parents but the branch may do.
491
first_tree_parent = bzrlib.revision.NULL_REVISION
492
old_revno, master_last = self.master_branch.last_revision_info()
493
if master_last != first_tree_parent:
494
if master_last != bzrlib.revision.NULL_REVISION:
495
raise errors.OutOfDateTree(self.work_tree)
496
if self.branch.repository.has_revision(first_tree_parent):
497
new_revno = old_revno + 1
499
# ghost parents never appear in revision history.
501
return old_revno,new_revno
503
def _process_pre_hooks(self, old_revno, new_revno):
504
"""Process any registered pre commit hooks."""
505
self._set_progress_stage("Running pre_commit hooks")
506
self._process_hooks("pre_commit", old_revno, new_revno)
508
def _process_post_hooks(self, old_revno, new_revno):
509
"""Process any registered post commit hooks."""
510
# Process the post commit hooks, if any
511
self._set_progress_stage("Running post_commit hooks")
512
# old style commit hooks - should be deprecated ? (obsoleted in
514
if self.config.post_commit() is not None:
515
hooks = self.config.post_commit().split(' ')
516
# this would be nicer with twisted.python.reflect.namedAny
518
result = eval(hook + '(branch, rev_id)',
519
{'branch':self.branch,
521
'rev_id':self.rev_id})
522
# process new style post commit hooks
523
self._process_hooks("post_commit", old_revno, new_revno)
525
def _process_hooks(self, hook_name, old_revno, new_revno):
526
if not Branch.hooks[hook_name]:
529
# new style commit hooks:
530
if not self.bound_branch:
531
hook_master = self.branch
534
hook_master = self.master_branch
535
hook_local = self.branch
536
# With bound branches, when the master is behind the local branch,
537
# the 'old_revno' and old_revid values here are incorrect.
538
# XXX: FIXME ^. RBC 20060206
540
old_revid = self.parents[0]
542
old_revid = bzrlib.revision.NULL_REVISION
544
if hook_name == "pre_commit":
545
future_tree = self.builder.revision_tree()
546
tree_delta = future_tree.changes_from(self.basis_tree,
549
for hook in Branch.hooks[hook_name]:
550
# show the running hook in the progress bar. As hooks may
551
# end up doing nothing (e.g. because they are not configured by
552
# the user) this is still showing progress, not showing overall
553
# actions - its up to each plugin to show a UI if it want's to
554
# (such as 'Emailing diff to foo@example.com').
555
self.pb_stage_name = "Running %s hooks [%s]" % \
556
(hook_name, Branch.hooks.get_hook_name(hook))
557
self._emit_progress()
558
if 'hooks' in debug.debug_flags:
559
mutter("Invoking commit hook: %r", hook)
560
if hook_name == "post_commit":
561
hook(hook_local, hook_master, old_revno, old_revid, new_revno,
563
elif hook_name == "pre_commit":
564
hook(hook_local, hook_master,
565
old_revno, old_revid, new_revno, self.rev_id,
566
tree_delta, future_tree)
569
"""Cleanup any open locks, progress bars etc."""
570
cleanups = [self._cleanup_bound_branch,
571
self.basis_tree.unlock,
572
self.work_tree.unlock,
574
found_exception = None
575
for cleanup in cleanups:
578
# we want every cleanup to run no matter what.
579
# so we have a catchall here, but we will raise the
580
# last encountered exception up the stack: and
581
# typically this will be useful enough.
584
if found_exception is not None:
585
# don't do a plan raise, because the last exception may have been
586
# trashed, e is our sure-to-work exception even though it loses the
587
# full traceback. XXX: RBC 20060421 perhaps we could check the
588
# exc_info and if its the same one do a plain raise otherwise
589
# 'raise e' as we do now.
592
def _cleanup_bound_branch(self):
593
"""Executed at the end of a try/finally to cleanup a bound branch.
595
If the branch wasn't bound, this is a no-op.
596
If it was, it resents self.branch to the local branch, instead
599
if not self.bound_branch:
601
if self.master_locked:
602
self.master_branch.unlock()
604
def _gather_parents(self):
605
"""Record the parents of a merge for merge detection."""
606
# TODO: Make sure that this list doesn't contain duplicate
607
# entries and the order is preserved when doing this.
608
if self.use_record_iter_changes:
610
self.basis_inv = self.basis_tree.inventory
611
self.parent_invs = [self.basis_inv]
612
for revision in self.parents[1:]:
613
if self.branch.repository.has_revision(revision):
614
mutter('commit parent revision {%s}', revision)
615
inventory = self.branch.repository.get_inventory(revision)
616
self.parent_invs.append(inventory)
618
mutter('commit parent ghost revision {%s}', revision)
620
def _update_builder_with_changes(self):
621
"""Update the commit builder with the data about what has changed.
623
exclude = self.exclude
624
specific_files = self.specific_files
625
mutter("Selecting files for commit with filter %s", specific_files)
628
if self.use_record_iter_changes:
629
iter_changes = self.work_tree.iter_changes(self.basis_tree,
630
specific_files=specific_files)
631
iter_changes = self._filter_iter_changes(iter_changes)
632
for file_id, path, fs_hash in self.builder.record_iter_changes(
633
self.work_tree, self.basis_revid, iter_changes):
634
self.work_tree._observed_sha1(file_id, path, fs_hash)
636
# Build the new inventory
637
self._populate_from_inventory()
638
self._record_unselected()
639
self._report_and_accumulate_deletes()
641
def _filter_iter_changes(self, iter_changes):
642
"""Process iter_changes.
644
This method reports on the changes in iter_changes to the user, and
645
converts 'missing' entries in the iter_changes iterator to 'deleted'
646
entries. 'missing' entries have their
648
:param iter_changes: An iter_changes to process.
649
:return: A generator of changes.
651
reporter = self.reporter
652
report_changes = reporter.is_verbose()
654
for change in iter_changes:
656
old_path = change[1][0]
657
new_path = change[1][1]
658
versioned = change[3][1]
660
versioned = change[3][1]
661
if kind is None and versioned:
664
reporter.missing(new_path)
665
deleted_ids.append(change[0])
666
# Reset the new path (None) and new versioned flag (False)
667
change = (change[0], (change[1][0], None), change[2],
668
(change[3][0], False)) + change[4:]
669
elif kind == 'tree-reference':
670
if self.recursive == 'down':
671
self._commit_nested_tree(change[0], change[1][1])
672
if change[3][0] or change[3][1]:
676
reporter.deleted(old_path)
677
elif old_path is None:
678
reporter.snapshot_change('added', new_path)
679
elif old_path != new_path:
680
reporter.renamed('renamed', old_path, new_path)
683
self.work_tree.branch.repository._format.rich_root_data):
684
# Don't report on changes to '' in non rich root
686
reporter.snapshot_change('modified', new_path)
687
self._next_progress_entry()
688
# Unversion IDs that were found to be deleted
689
self.deleted_ids = deleted_ids
691
def _record_unselected(self):
692
# If specific files are selected, then all un-selected files must be
693
# recorded in their previous state. For more details, see
694
# https://lists.ubuntu.com/archives/bazaar/2007q3/028476.html.
695
if self.specific_files or self.exclude:
696
specific_files = self.specific_files or []
697
for path, old_ie in self.basis_inv.iter_entries():
698
if old_ie.file_id in self.builder.new_inventory:
699
# already added - skip.
701
if (is_inside_any(specific_files, path)
702
and not is_inside_any(self.exclude, path)):
703
# was inside the selected path, and not excluded - if not
704
# present it has been deleted so skip.
706
# From here down it was either not selected, or was excluded:
707
# We preserve the entry unaltered.
709
# Note: specific file commits after a merge are currently
710
# prohibited. This test is for sanity/safety in case it's
711
# required after that changes.
712
if len(self.parents) > 1:
714
self.builder.record_entry_contents(ie, self.parent_invs, path,
715
self.basis_tree, None)
717
def _report_and_accumulate_deletes(self):
718
if (isinstance(self.basis_inv, Inventory)
719
and isinstance(self.builder.new_inventory, Inventory)):
720
# the older Inventory classes provide a _byid dict, and building a
721
# set from the keys of this dict is substantially faster than even
722
# getting a set of ids from the inventory
724
# <lifeless> set(dict) is roughly the same speed as
725
# set(iter(dict)) and both are significantly slower than
727
deleted_ids = set(self.basis_inv._byid.keys()) - \
728
set(self.builder.new_inventory._byid.keys())
730
deleted_ids = set(self.basis_inv) - set(self.builder.new_inventory)
732
self.any_entries_deleted = True
733
deleted = [(self.basis_tree.id2path(file_id), file_id)
734
for file_id in deleted_ids]
736
# XXX: this is not quite directory-order sorting
737
for path, file_id in deleted:
738
self.builder.record_delete(path, file_id)
739
self.reporter.deleted(path)
741
def _check_strict(self):
742
# XXX: when we use iter_changes this would likely be faster if
743
# iter_changes would check for us (even in the presence of
746
# raise an exception as soon as we find a single unknown.
747
for unknown in self.work_tree.unknowns():
748
raise StrictCommitFailed()
750
def _populate_from_inventory(self):
751
"""Populate the CommitBuilder by walking the working tree inventory."""
752
# Build the revision inventory.
754
# This starts by creating a new empty inventory. Depending on
755
# which files are selected for commit, and what is present in the
756
# current tree, the new inventory is populated. inventory entries
757
# which are candidates for modification have their revision set to
758
# None; inventory entries that are carried over untouched have their
759
# revision set to their prior value.
761
# ESEPARATIONOFCONCERNS: this function is diffing and using the diff
762
# results to create a new inventory at the same time, which results
763
# in bugs like #46635. Any reason not to use/enhance Tree.changes_from?
766
specific_files = self.specific_files
767
exclude = self.exclude
768
report_changes = self.reporter.is_verbose()
770
# A tree of paths that have been deleted. E.g. if foo/bar has been
771
# deleted, then we have {'foo':{'bar':{}}}
773
# XXX: Note that entries may have the wrong kind because the entry does
774
# not reflect the status on disk.
775
work_inv = self.work_tree.inventory
776
# NB: entries will include entries within the excluded ids/paths
777
# because iter_entries_by_dir has no 'exclude' facility today.
778
entries = work_inv.iter_entries_by_dir(
779
specific_file_ids=self.specific_file_ids, yield_parents=True)
780
for path, existing_ie in entries:
781
file_id = existing_ie.file_id
782
name = existing_ie.name
783
parent_id = existing_ie.parent_id
784
kind = existing_ie.kind
785
# Skip files that have been deleted from the working tree.
786
# The deleted path ids are also recorded so they can be explicitly
789
path_segments = splitpath(path)
790
deleted_dict = deleted_paths
791
for segment in path_segments:
792
deleted_dict = deleted_dict.get(segment, None)
794
# We either took a path not present in the dict
795
# (deleted_dict was None), or we've reached an empty
796
# child dir in the dict, so are now a sub-path.
800
if deleted_dict is not None:
801
# the path has a deleted parent, do not add it.
803
if exclude and is_inside_any(exclude, path):
804
# Skip excluded paths. Excluded paths are processed by
805
# _update_builder_with_changes.
807
content_summary = self.work_tree.path_content_summary(path)
808
kind = content_summary[0]
809
# Note that when a filter of specific files is given, we must only
810
# skip/record deleted files matching that filter.
811
if not specific_files or is_inside_any(specific_files, path):
812
if kind == 'missing':
813
if not deleted_paths:
814
# path won't have been split yet.
815
path_segments = splitpath(path)
816
deleted_dict = deleted_paths
817
for segment in path_segments:
818
deleted_dict = deleted_dict.setdefault(segment, {})
819
self.reporter.missing(path)
820
self._next_progress_entry()
821
deleted_ids.append(file_id)
823
# TODO: have the builder do the nested commit just-in-time IF and
825
if kind == 'tree-reference':
826
# enforce repository nested tree policy.
827
if (not self.work_tree.supports_tree_reference() or
828
# repository does not support it either.
829
not self.branch.repository._format.supports_tree_reference):
831
content_summary = (kind, None, None, None)
832
elif self.recursive == 'down':
833
nested_revision_id = self._commit_nested_tree(
835
content_summary = (kind, None, None, nested_revision_id)
837
nested_revision_id = self.work_tree.get_reference_revision(file_id)
838
content_summary = (kind, None, None, nested_revision_id)
840
# Record an entry for this item
841
# Note: I don't particularly want to have the existing_ie
842
# parameter but the test suite currently (28-Jun-07) breaks
843
# without it thanks to a unicode normalisation issue. :-(
844
definitely_changed = kind != existing_ie.kind
845
self._record_entry(path, file_id, specific_files, kind, name,
846
parent_id, definitely_changed, existing_ie, report_changes,
849
# Unversion IDs that were found to be deleted
850
self.deleted_ids = deleted_ids
852
def _commit_nested_tree(self, file_id, path):
853
"Commit a nested tree."
854
sub_tree = self.work_tree.get_nested_tree(file_id, path)
855
# FIXME: be more comprehensive here:
856
# this works when both trees are in --trees repository,
857
# but when both are bound to a different repository,
858
# it fails; a better way of approaching this is to
859
# finally implement the explicit-caches approach design
860
# a while back - RBC 20070306.
861
if sub_tree.branch.repository.has_same_location(
862
self.work_tree.branch.repository):
863
sub_tree.branch.repository = \
864
self.work_tree.branch.repository
866
return sub_tree.commit(message=None, revprops=self.revprops,
867
recursive=self.recursive,
868
message_callback=self.message_callback,
869
timestamp=self.timestamp, timezone=self.timezone,
870
committer=self.committer,
871
allow_pointless=self.allow_pointless,
872
strict=self.strict, verbose=self.verbose,
873
local=self.local, reporter=self.reporter)
874
except errors.PointlessCommit:
875
return self.work_tree.get_reference_revision(file_id)
877
def _record_entry(self, path, file_id, specific_files, kind, name,
878
parent_id, definitely_changed, existing_ie, report_changes,
880
"Record the new inventory entry for a path if any."
881
# mutter('check %s {%s}', path, file_id)
882
# mutter('%s selected for commit', path)
883
if definitely_changed or existing_ie is None:
884
ie = make_entry(kind, name, parent_id, file_id)
886
ie = existing_ie.copy()
888
# For carried over entries we don't care about the fs hash - the repo
889
# isn't generating a sha, so we're not saving computation time.
890
_, _, fs_hash = self.builder.record_entry_contents(
891
ie, self.parent_invs, path, self.work_tree, content_summary)
893
self._report_change(ie, path)
895
self.work_tree._observed_sha1(ie.file_id, path, fs_hash)
898
def _report_change(self, ie, path):
899
"""Report a change to the user.
901
The change that has occurred is described relative to the basis
904
if (self.basis_inv.has_id(ie.file_id)):
905
basis_ie = self.basis_inv[ie.file_id]
908
change = ie.describe_change(basis_ie, ie)
909
if change in (InventoryEntry.RENAMED,
910
InventoryEntry.MODIFIED_AND_RENAMED):
911
old_path = self.basis_inv.id2path(ie.file_id)
912
self.reporter.renamed(change, old_path, path)
913
self._next_progress_entry()
915
if change == 'unchanged':
917
self.reporter.snapshot_change(change, path)
918
self._next_progress_entry()
920
def _set_progress_stage(self, name, counter=False):
921
"""Set the progress stage and emit an update to the progress bar."""
922
self.pb_stage_name = name
923
self.pb_stage_count += 1
925
self.pb_entries_count = 0
927
self.pb_entries_count = None
928
self._emit_progress()
930
def _next_progress_entry(self):
931
"""Emit an update to the progress bar and increment the entry count."""
932
self.pb_entries_count += 1
933
self._emit_progress()
935
def _emit_progress(self):
936
if self.pb_entries_count is not None:
937
text = "%s [%d] - Stage" % (self.pb_stage_name,
938
self.pb_entries_count)
940
text = "%s - Stage" % (self.pb_stage_name, )
941
self.pb.update(text, self.pb_stage_count, self.pb_stage_total)
943
def _set_specific_file_ids(self):
944
"""populate self.specific_file_ids if we will use it."""
945
if not self.use_record_iter_changes:
946
# If provided, ensure the specified files are versioned
947
if self.specific_files is not None:
948
# Note: This routine is being called because it raises
949
# PathNotVersionedError as a side effect of finding the IDs. We
950
# later use the ids we found as input to the working tree
951
# inventory iterator, so we only consider those ids rather than
952
# examining the whole tree again.
953
# XXX: Dont we have filter_unversioned to do this more
955
self.specific_file_ids = tree.find_ids_across_trees(
956
self.specific_files, [self.basis_tree, self.work_tree])
958
self.specific_file_ids = None