1
# Copyright (C) 2005 Canonical Ltd
1
# Copyright (C) 2005, 2006 Canonical Ltd
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
5
5
# the Free Software Foundation; either version 2 of the License, or
6
6
# (at your option) any later version.
8
8
# This program is distributed in the hope that it will be useful,
9
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
11
# GNU General Public License for more details.
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
18
## XXX: Can we do any better about making interrupted commits change
21
## XXX: If we merged two versions of a file then we still need to
22
## create a new version representing that merge, even if it didn't
23
## change from the parent.
25
## TODO: Read back the just-generated changeset, and make sure it
26
## applies and recreates the right state.
18
# XXX: Can we do any better about making interrupted commits change
21
# TODO: Separate 'prepare' phase where we find a list of potentially
22
# committed files. We then can then pause the commit to prompt for a
23
# commit message, knowing the summary will be the same as what's
24
# actually used for the commit. (But perhaps simpler to simply get
25
# the tree status, then use that for a selective commit?)
27
# The newly committed revision is going to have a shape corresponding
28
# to that of the working inventory. Files that are not in the
29
# working tree and that were in the predecessor are reported as
30
# removed --- this can include files that were either removed from the
31
# inventory or deleted in the working tree. If they were only
32
# deleted from disk, they are removed from the working inventory.
34
# We then consider the remaining entries, which will be in the new
35
# version. Directory entries are simply copied across. File entries
36
# must be checked to see if a new version of the file should be
37
# recorded. For each parent revision inventory, we check to see what
38
# version of the file was present. If the file was present in at
39
# least one tree, and if it was the same version in all the trees,
40
# then we can just refer to that version. Otherwise, a new version
41
# representing the merger of the file versions must be added.
43
# TODO: Update hashcache before and after - or does the WorkingTree
46
# TODO: Rather than mashing together the ancestry and storing it back,
47
# perhaps the weave should have single method which does it all in one
48
# go, avoiding a lot of redundant work.
50
# TODO: Perhaps give a warning if one of the revisions marked as
51
# merged is already in the ancestry, and then don't record it as a
54
# TODO: If the file is newly merged but unchanged from the version it
55
# merges from, then it should still be reported as newly added
56
# relative to the basis revision.
58
# TODO: Do checks that the tree can be committed *before* running the
59
# editor; this should include checks for a pointless commit and for
60
# unknown or missing files.
62
# TODO: If commit fails, leave the message in a file somewhere.
37
from binascii import hexlify
38
71
from cStringIO import StringIO
40
from bzrlib.osutils import (local_time_offset, username,
41
rand_bytes, compact_date, user_email,
42
kind_marker, is_inside_any, quotefn,
43
sha_string, sha_file, isdir, isfile)
44
from bzrlib.branch import gen_file_id
45
from bzrlib.errors import BzrError, PointlessCommit
46
from bzrlib.revision import Revision, RevisionReference
47
from bzrlib.trace import mutter, note
74
import bzrlib.errors as errors
75
from bzrlib.errors import (BzrError, PointlessCommit,
79
from bzrlib.osutils import (kind_marker, isdir,isfile, is_inside_any,
80
is_inside_or_parent_of_any,
81
quotefn, sha_file, split_lines)
82
from bzrlib.testament import Testament
83
from bzrlib.trace import mutter, note, warning
48
84
from bzrlib.xml5 import serializer_v5
49
from bzrlib.inventory import Inventory
50
from bzrlib.delta import compare_trees
51
from bzrlib.weave import Weave
52
from bzrlib.weavefile import read_weave, write_weave_v5
53
from bzrlib.atomicfile import AtomicFile
85
from bzrlib.inventory import Inventory, ROOT_ID, InventoryEntry
86
from bzrlib.symbol_versioning import (deprecated_passed,
90
from bzrlib.workingtree import WorkingTree
93
@deprecated_function(zero_seven)
56
94
def commit(*args, **kwargs):
57
95
"""Commit a new revision to a branch.
99
165
working inventory.
101
167
def __init__(self,
103
170
if reporter is not None:
104
171
self.reporter = reporter
106
173
self.reporter = NullCommitReporter()
174
if config is not None:
180
branch=DEPRECATED_PARAMETER, message=None,
114
184
specific_files=None,
116
allow_pointless=True):
186
allow_pointless=True,
117
194
"""Commit working copy as a new revision.
119
The basic approach is to add all the file texts into the
120
store, then the inventory, then make a new revision pointing
121
to that inventory and store that.
123
This is not quite safe if the working copy changes during the
124
commit; for the moment that is simply not allowed. A better
125
approach is to make a temporary copy of the files before
126
computing their hashes, and then add those hashes in turn to
127
the inventory. This should mean at least that there are no
128
broken hash pointers. There is no way we can get a snapshot
129
of the whole directory at an instant. This would also have to
130
be robust against files disappearing, moving, etc. So the
131
whole thing is a bit hard.
133
This raises PointlessCommit if there are no changes, no new merges,
134
and allow_pointless is false.
196
branch -- the deprecated branch to commit to. New callers should pass in
199
message -- the commit message, a mandatory parameter
136
201
timestamp -- if not None, seconds-since-epoch for a
137
202
postdated/predated commit.
140
If true, commit only those files.
204
specific_files -- If true, commit only those files.
143
If set, use this as the new revision id.
206
rev_id -- If set, use this as the new revision id.
144
207
Useful for test or import commands that need to tightly
145
208
control what revisions are assigned. If you duplicate
146
209
a revision id that exists elsewhere it is your own fault.
147
210
If null (default), a time/random revision id is generated.
212
allow_pointless -- If true (default), commit even if nothing
213
has changed and no merges are recorded.
215
strict -- If true, don't allow a commit if the working tree
216
contains unknown files.
218
revprops -- Properties for new revision
219
:param local: Perform a local only commit.
151
self.branch.lock_write()
221
mutter('preparing to commit')
223
if deprecated_passed(branch):
224
warnings.warn("Commit.commit (branch, ...): The branch parameter is "
225
"deprecated as of bzr 0.8. Please use working_tree= instead.",
226
DeprecationWarning, stacklevel=2)
228
self.work_tree = self.branch.bzrdir.open_workingtree()
229
elif working_tree is None:
230
raise BzrError("One of branch and working_tree must be passed into commit().")
232
self.work_tree = working_tree
233
self.branch = self.work_tree.branch
235
raise BzrError("The message keyword parameter is required for commit().")
237
self.bound_branch = None
239
self.master_branch = None
240
self.master_locked = False
153
242
self.specific_files = specific_files
154
243
self.allow_pointless = allow_pointless
156
if timestamp is None:
157
self.timestamp = time.time()
159
self.timestamp = long(timestamp)
161
if committer is None:
162
self.committer = username(self.branch)
164
assert isinstance(committer, basestring), type(committer)
165
self.committer = committer
168
self.timezone = local_time_offset()
170
self.timezone = int(timezone)
172
assert isinstance(message, basestring), type(message)
173
self.message = message
245
if reporter is None and self.reporter is None:
246
self.reporter = NullCommitReporter()
247
elif reporter is not None:
248
self.reporter = reporter
250
self.work_tree.lock_write()
251
self.pb = bzrlib.ui.ui_factory.nested_progress_bar()
176
# First walk over the working inventory; and both update that
177
# and also build a new revision inventory. The revision
178
# inventory needs to hold the text-id, sha1 and size of the
179
# actual file versions committed in the revision. (These are
180
# not present in the working inventory.) We also need to
181
# detect missing/deleted files, and remove them from the
184
self.work_tree = self.branch.working_tree()
253
# Cannot commit with conflicts present.
254
if len(self.work_tree.conflicts())>0:
255
raise ConflictsInTree
257
# setup the bound branch variables as needed.
258
self._check_bound_branch()
260
# check for out of date working trees
261
# if we are bound, then self.branch is the master branch and this
262
# test is thus all we need.
263
if self.work_tree.last_revision() != self.master_branch.last_revision():
264
raise errors.OutOfDateTree(self.work_tree)
267
# raise an exception as soon as we find a single unknown.
268
for unknown in self.work_tree.unknowns():
269
raise StrictCommitFailed()
271
if self.config is None:
272
self.config = self.branch.get_config()
274
if isinstance(message, str):
275
message = message.decode(bzrlib.user_encoding)
276
assert isinstance(message, unicode), type(message)
277
self.message = message
278
self._escape_commit_message()
185
280
self.work_inv = self.work_tree.inventory
186
self.basis_tree = self.branch.basis_tree()
281
self.basis_tree = self.work_tree.basis_tree()
187
282
self.basis_inv = self.basis_tree.inventory
283
# one to finish, one for rev and inventory, and one for each
284
# inventory entry, and the same for the new inventory.
285
# note that this estimate is too long when we do a partial tree
286
# commit which excludes some new files from being considered.
287
# The estimate is corrected when we populate the new inv.
288
self.pb_total = len(self.work_inv) + 5
189
self.pending_merges = self.branch.pending_merges()
190
if self.pending_merges:
191
raise NotImplementedError("sorry, can't commit merges to the weave format yet")
291
self._gather_parents()
292
if len(self.parents) > 1 and self.specific_files:
293
raise NotImplementedError('selected-file commit of merges is not supported yet: files %r',
295
self._check_parents_present()
296
self.builder = self.branch.get_commit_builder(self.parents,
297
self.config, timestamp, timezone, committer, revprops, rev_id)
193
if self.rev_id is None:
194
self.rev_id = _gen_revision_id(self.branch, time.time())
196
# todo: update hashcache
197
self.delta = compare_trees(self.basis_tree, self.work_tree,
198
specific_files=self.specific_files)
200
if not (self.delta.has_changed()
201
or self.allow_pointless
202
or self.pending_merges):
299
self._remove_deleted()
300
self._populate_new_inv()
301
self._report_deletes()
303
if not (self.allow_pointless
304
or len(self.parents) > 1
305
or self.builder.new_inventory != self.basis_inv):
203
306
raise PointlessCommit()
205
self.new_inv = self.basis_inv.copy()
207
self.delta.show(sys.stdout)
209
self._remove_deleted()
212
self.branch._write_inventory(self.work_inv)
213
self._record_inventory()
215
self._make_revision()
216
note('committted r%d {%s}', (self.branch.revno() + 1),
308
self._emit_progress_update()
309
# TODO: Now the new inventory is known, check for conflicts and prompt the
310
# user for a commit message.
311
self.builder.finish_inventory()
312
self._emit_progress_update()
313
self.rev_id = self.builder.commit(self.message)
314
self._emit_progress_update()
315
# revision data is in the local branch now.
317
# upload revision data to the master.
318
# this will propagate merged revisions too if needed.
319
if self.bound_branch:
320
self.master_branch.repository.fetch(self.branch.repository,
321
revision_id=self.rev_id)
322
# now the master has the revision data
323
# 'commit' to the master first so a timeout here causes the local
324
# branch to be out of date
325
self.master_branch.append_revision(self.rev_id)
327
# and now do the commit locally.
218
328
self.branch.append_revision(self.rev_id)
219
self.branch.set_pending_merges([])
330
self.work_tree.set_pending_merges([])
331
self.work_tree.set_last_revision(self.rev_id)
332
# now the work tree is up to date with the branch
334
self.reporter.completed(self.branch.revno(), self.rev_id)
335
if self.config.post_commit() is not None:
336
hooks = self.config.post_commit().split(' ')
337
# this would be nicer with twisted.python.reflect.namedAny
339
result = eval(hook + '(branch, rev_id)',
340
{'branch':self.branch,
342
'rev_id':self.rev_id})
343
self._emit_progress_update()
224
def _record_inventory(self):
226
serializer_v5.write_inventory(self.new_inv, inv_tmp)
227
self.inv_sha1 = sha_string(inv_tmp.getvalue())
229
self.branch.inventory_store.add(inv_tmp, self.rev_id)
232
def _make_revision(self):
233
"""Record a new revision object for this commit."""
234
self.rev = Revision(timestamp=self.timestamp,
235
timezone=self.timezone,
236
committer=self.committer,
237
message=self.message,
238
inventory_sha1=self.inv_sha1,
239
revision_id=self.rev_id)
241
self.rev.parents = []
242
precursor_id = self.branch.last_patch()
244
self.rev.parents.append(RevisionReference(precursor_id))
245
for merge_rev in self.pending_merges:
246
rev.parents.append(RevisionReference(merge_rev))
248
rev_tmp = tempfile.TemporaryFile()
249
serializer_v5.write_revision(self.rev, rev_tmp)
251
self.branch.revision_store.add(rev_tmp, self.rev_id)
252
mutter('new revision_id is {%s}', self.rev_id)
348
def _check_bound_branch(self):
349
"""Check to see if the local branch is bound.
351
If it is bound, then most of the commit will actually be
352
done using the remote branch as the target branch.
353
Only at the end will the local branch be updated.
355
if self.local and not self.branch.get_bound_location():
356
raise errors.LocalRequiresBoundBranch()
359
self.master_branch = self.branch.get_master_branch()
361
if not self.master_branch:
362
# make this branch the reference branch for out of date checks.
363
self.master_branch = self.branch
366
# If the master branch is bound, we must fail
367
master_bound_location = self.master_branch.get_bound_location()
368
if master_bound_location:
369
raise errors.CommitToDoubleBoundBranch(self.branch,
370
self.master_branch, master_bound_location)
372
# TODO: jam 20051230 We could automatically push local
373
# commits to the remote branch if they would fit.
374
# But for now, just require remote to be identical
377
# Make sure the local branch is identical to the master
378
master_rh = self.master_branch.revision_history()
379
local_rh = self.branch.revision_history()
380
if local_rh != master_rh:
381
raise errors.BoundBranchOutOfDate(self.branch,
384
# Now things are ready to change the master branch
386
self.bound_branch = self.branch
387
self.master_branch.lock_write()
388
self.master_locked = True
391
"""Cleanup any open locks, progress bars etc."""
392
cleanups = [self._cleanup_bound_branch,
393
self.work_tree.unlock,
395
found_exception = None
396
for cleanup in cleanups:
399
# we want every cleanup to run no matter what.
400
# so we have a catchall here, but we will raise the
401
# last encountered exception up the stack: and
402
# typically this will be useful enough.
405
if found_exception is not None:
406
# don't do a plan raise, because the last exception may have been
407
# trashed, e is our sure-to-work exception even though it loses the
408
# full traceback. XXX: RBC 20060421 perhaps we could check the
409
# exc_info and if its the same one do a plain raise otherwise
410
# 'raise e' as we do now.
413
def _cleanup_bound_branch(self):
414
"""Executed at the end of a try/finally to cleanup a bound branch.
416
If the branch wasn't bound, this is a no-op.
417
If it was, it resents self.branch to the local branch, instead
420
if not self.bound_branch:
422
if self.master_locked:
423
self.master_branch.unlock()
425
def _escape_commit_message(self):
426
"""Replace xml-incompatible control characters."""
427
# FIXME: RBC 20060419 this should be done by the revision
428
# serialiser not by commit. Then we can also add an unescaper
429
# in the deserializer and start roundtripping revision messages
430
# precisely. See repository_implementations/test_repository.py
432
# Python strings can include characters that can't be
433
# represented in well-formed XML; escape characters that
434
# aren't listed in the XML specification
435
# (http://www.w3.org/TR/REC-xml/#NT-Char).
436
self.message, escape_count = re.subn(
437
u'[^\x09\x0A\x0D\u0020-\uD7FF\uE000-\uFFFD]+',
438
lambda match: match.group(0).encode('unicode_escape'),
441
self.reporter.escaped(escape_count, self.message)
443
def _gather_parents(self):
444
"""Record the parents of a merge for merge detection."""
445
# TODO: Make sure that this list doesn't contain duplicate
446
# entries and the order is preserved when doing this.
447
self.parents = self.work_tree.get_parent_ids()
448
self.parent_invs = []
449
for revision in self.parents:
450
if self.branch.repository.has_revision(revision):
451
inventory = self.branch.repository.get_inventory(revision)
452
self.parent_invs.append(inventory)
454
def _check_parents_present(self):
455
for parent_id in self.parents:
456
mutter('commit parent revision {%s}', parent_id)
457
if not self.branch.repository.has_revision(parent_id):
458
if parent_id == self.branch.last_revision():
459
warning("parent is missing %r", parent_id)
460
raise BzrCheckError("branch %s is missing revision {%s}"
461
% (self.branch, parent_id))
255
463
def _remove_deleted(self):
256
"""Remove deleted files from the working and stored inventories."""
257
for path, id, kind in self.delta.removed:
258
if self.work_inv.has_id(id):
259
del self.work_inv[id]
260
if self.new_inv.has_id(id):
265
def _store_files(self):
266
"""Store new texts of modified/added files."""
267
# We must make sure that directories are added before anything
268
# inside them is added. the files within the delta report are
269
# sorted by path so we know the directory will come before its
271
for path, file_id, kind in self.delta.added:
273
ie = self.work_inv[file_id].copy()
276
self._store_file_text(file_id)
278
for path, file_id, kind in self.delta.modified:
281
self._store_file_text(file_id)
283
for old_path, new_path, file_id, kind, text_modified in self.delta.renamed:
286
if not text_modified:
288
self._store_file_text(file_id)
291
def _store_file_text(self, file_id):
292
"""Store updated text for one modified or added file."""
293
note('store new text for {%s} in revision {%s}',
294
file_id, self.rev_id)
295
new_lines = self.work_tree.get_file(file_id).readlines()
296
if file_id in self.new_inv: # was in basis inventory
297
ie = self.new_inv[file_id]
298
assert ie.file_id == file_id
299
assert file_id in self.basis_inv
300
assert self.basis_inv[file_id].kind == 'file'
301
old_version = self.basis_inv[file_id].text_version
302
file_parents = [old_version]
303
else: # new in this revision
304
ie = self.work_inv[file_id].copy()
306
assert file_id not in self.basis_inv
308
assert ie.kind == 'file'
309
self._add_text_to_weave(file_id, new_lines, file_parents)
310
# make a new inventory entry for this file, using whatever
311
# it had in the working copy, plus details on the new text
312
ie.text_sha1 = _sha_strings(new_lines)
313
ie.text_size = sum(map(len, new_lines))
314
ie.text_version = self.rev_id
315
ie.entry_version = self.rev_id
318
def _add_text_to_weave(self, file_id, new_lines, parents):
319
weave_fn = self.branch.controlfilename(['weaves', file_id+'.weave'])
320
if os.path.exists(weave_fn):
321
w = read_weave(file(weave_fn, 'rb'))
324
# XXX: Should set the appropriate parents by looking for this file_id
325
# in all revision parents
326
parent_idxs = map(w.lookup, parents)
327
w.add(self.rev_id, parent_idxs, new_lines)
328
af = AtomicFile(weave_fn)
330
write_weave_v5(w, af)
336
def _gen_revision_id(branch, when):
337
"""Return new revision-id."""
338
s = '%s-%s-' % (user_email(branch), compact_date(when))
339
s += hexlify(rand_bytes(8))
343
def _sha_strings(strings):
344
"""Return the sha-1 of concatenation of strings"""
346
map(s.update, strings)
464
"""Remove deleted files from the working inventories.
466
This is done prior to taking the working inventory as the
467
basis for the new committed inventory.
469
This returns true if any files
470
*that existed in the basis inventory* were deleted.
471
Files that were added and deleted
472
in the working copy don't matter.
474
specific = self.specific_files
476
for path, ie in self.work_inv.iter_entries():
477
if specific and not is_inside_any(specific, path):
479
if not self.work_tree.has_filename(path):
480
self.reporter.missing(path)
481
deleted_ids.append((path, ie.file_id))
483
deleted_ids.sort(reverse=True)
484
for path, file_id in deleted_ids:
485
del self.work_inv[file_id]
486
self.work_tree._write_inventory(self.work_inv)
488
def _populate_new_inv(self):
489
"""Build revision inventory.
491
This creates a new empty inventory. Depending on
492
which files are selected for commit, and what is present in the
493
current tree, the new inventory is populated. inventory entries
494
which are candidates for modification have their revision set to
495
None; inventory entries that are carried over untouched have their
496
revision set to their prior value.
498
# ESEPARATIONOFCONCERNS: this function is diffing and using the diff
499
# results to create a new inventory at the same time, which results
500
# in bugs like #46635. Any reason not to use/enhance Tree.changes_from?
502
mutter("Selecting files for commit with filter %s", self.specific_files)
503
# at this point we dont copy the root entry:
504
entries = self.work_inv.iter_entries()
506
self._emit_progress_update()
507
for path, new_ie in entries:
508
self._emit_progress_update()
509
file_id = new_ie.file_id
510
# mutter('check %s {%s}', path, file_id)
511
if (not self.specific_files or
512
is_inside_or_parent_of_any(self.specific_files, path)):
513
# mutter('%s selected for commit', path)
517
# mutter('%s not selected for commit', path)
518
if self.basis_inv.has_id(file_id):
519
ie = self.basis_inv[file_id].copy()
521
# this entry is new and not being committed
524
self.builder.record_entry_contents(ie, self.parent_invs,
525
path, self.work_tree)
526
# describe the nature of the change that has occurred relative to
527
# the basis inventory.
528
if (self.basis_inv.has_id(ie.file_id)):
529
basis_ie = self.basis_inv[ie.file_id]
532
change = ie.describe_change(basis_ie, ie)
533
if change in (InventoryEntry.RENAMED,
534
InventoryEntry.MODIFIED_AND_RENAMED):
535
old_path = self.basis_inv.id2path(ie.file_id)
536
self.reporter.renamed(change, old_path, path)
538
self.reporter.snapshot_change(change, path)
540
if not self.specific_files:
543
# ignore removals that don't match filespec
544
for path, new_ie in self.basis_inv.iter_entries():
545
if new_ie.file_id in self.work_inv:
547
if is_inside_any(self.specific_files, path):
551
self.builder.record_entry_contents(ie, self.parent_invs, path,
554
def _emit_progress_update(self):
555
"""Emit an update to the progress bar."""
556
self.pb.update("Committing", self.pb_count, self.pb_total)
559
def _report_deletes(self):
560
for path, ie in self.basis_inv.iter_entries():
561
if ie.file_id not in self.builder.new_inventory:
562
self.reporter.deleted(path)