1
1
# Copyright (C) 2005 Canonical Ltd
3
3
# This program is free software; you can redistribute it and/or modify
4
4
# it under the terms of the GNU General Public License as published by
5
5
# the Free Software Foundation; either version 2 of the License, or
6
6
# (at your option) any later version.
8
8
# This program is distributed in the hope that it will be useful,
9
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
11
# GNU General Public License for more details.
13
13
# You should have received a copy of the GNU General Public License
14
14
# along with this program; if not, write to the Free Software
15
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
18
## XXX: Can we do any better about making interrupted commits change
21
## XXX: If we merged two versions of a file then we still need to
22
## create a new version representing that merge, even if it didn't
23
## change from the parent.
25
## TODO: Read back the just-generated changeset, and make sure it
26
## applies and recreates the right state.
18
# XXX: Can we do any better about making interrupted commits change
19
# nothing? Perhaps the best approach is to integrate commit of
20
# AtomicFiles with releasing the lock on the Branch.
22
# TODO: Separate 'prepare' phase where we find a list of potentially
23
# committed files. We then can then pause the commit to prompt for a
24
# commit message, knowing the summary will be the same as what's
25
# actually used for the commit. (But perhaps simpler to simply get
26
# the tree status, then use that for a selective commit?)
28
# The newly committed revision is going to have a shape corresponding
29
# to that of the working inventory. Files that are not in the
30
# working tree and that were in the predecessor are reported as
31
# removed --- this can include files that were either removed from the
32
# inventory or deleted in the working tree. If they were only
33
# deleted from disk, they are removed from the working inventory.
35
# We then consider the remaining entries, which will be in the new
36
# version. Directory entries are simply copied across. File entries
37
# must be checked to see if a new version of the file should be
38
# recorded. For each parent revision inventory, we check to see what
39
# version of the file was present. If the file was present in at
40
# least one tree, and if it was the same version in all the trees,
41
# then we can just refer to that version. Otherwise, a new version
42
# representing the merger of the file versions must be added.
44
# TODO: Update hashcache before and after - or does the WorkingTree
47
# TODO: Rather than mashing together the ancestry and storing it back,
48
# perhaps the weave should have single method which does it all in one
49
# go, avoiding a lot of redundant work.
51
# TODO: Perhaps give a warning if one of the revisions marked as
52
# merged is already in the ancestry, and then don't record it as a
55
# TODO: If the file is newly merged but unchanged from the version it
56
# merges from, then it should still be reported as newly added
57
# relative to the basis revision.
37
66
from binascii import hexlify
38
67
from cStringIO import StringIO
40
69
from bzrlib.osutils import (local_time_offset, username,
41
70
rand_bytes, compact_date, user_email,
42
71
kind_marker, is_inside_any, quotefn,
43
sha_string, sha_file, isdir, isfile)
44
from bzrlib.branch import gen_file_id, INVENTORY_FILEID, ANCESTRY_FILEID
45
from bzrlib.errors import BzrError, PointlessCommit
46
from bzrlib.revision import Revision, RevisionReference
47
from bzrlib.trace import mutter, note
72
sha_string, sha_strings, sha_file, isdir, isfile,
74
from bzrlib.branch import gen_file_id
75
from bzrlib.errors import (BzrError, PointlessCommit,
79
from bzrlib.revision import Revision
80
from bzrlib.trace import mutter, note, warning
48
81
from bzrlib.xml5 import serializer_v5
49
from bzrlib.inventory import Inventory
50
from bzrlib.delta import compare_trees
82
from bzrlib.inventory import Inventory, ROOT_ID
51
83
from bzrlib.weave import Weave
52
84
from bzrlib.weavefile import read_weave, write_weave_v5
53
85
from bzrlib.atomicfile import AtomicFile
61
93
New code should use the Commit class instead.
95
## XXX: Remove this in favor of Branch.commit?
63
96
Commit().commit(*args, **kwargs)
66
99
class NullCommitReporter(object):
67
100
"""I report on progress of a commit."""
68
def added(self, path):
71
def removed(self, path):
74
def renamed(self, old_path, new_path):
102
def snapshot_change(self, change, path):
105
def completed(self, revno, rev_id):
108
def deleted(self, file_id):
111
def escaped(self, escape_count, message):
114
def missing(self, path):
78
117
class ReportCommitToLog(NullCommitReporter):
79
def added(self, path):
80
note('added %s', path)
82
def removed(self, path):
83
note('removed %s', path)
85
def renamed(self, old_path, new_path):
86
note('renamed %s => %s', old_path, new_path)
119
def snapshot_change(self, change, path):
120
note("%s %s", change, path)
122
def completed(self, revno, rev_id):
123
note('committed r%d {%s}', revno, rev_id)
125
def deleted(self, file_id):
126
note('deleted %s', file_id)
128
def escaped(self, escape_count, message):
129
note("replaced %d control characters in message", escape_count)
131
def missing(self, path):
132
note('missing %s', path)
89
134
class Commit(object):
90
135
"""Task of committing a new revision.
114
159
specific_files=None,
116
allow_pointless=True):
161
allow_pointless=True,
117
163
"""Commit working copy as a new revision.
119
The basic approach is to add all the file texts into the
120
store, then the inventory, then make a new revision pointing
121
to that inventory and store that.
123
This is not quite safe if the working copy changes during the
124
commit; for the moment that is simply not allowed. A better
125
approach is to make a temporary copy of the files before
126
computing their hashes, and then add those hashes in turn to
127
the inventory. This should mean at least that there are no
128
broken hash pointers. There is no way we can get a snapshot
129
of the whole directory at an instant. This would also have to
130
be robust against files disappearing, moving, etc. So the
131
whole thing is a bit hard.
133
This raises PointlessCommit if there are no changes, no new merges,
134
and allow_pointless is false.
136
165
timestamp -- if not None, seconds-since-epoch for a
137
166
postdated/predated commit.
140
If true, commit only those files.
168
specific_files -- If true, commit only those files.
143
If set, use this as the new revision id.
170
rev_id -- If set, use this as the new revision id.
144
171
Useful for test or import commands that need to tightly
145
172
control what revisions are assigned. If you duplicate
146
173
a revision id that exists elsewhere it is your own fault.
147
174
If null (default), a time/random revision id is generated.
176
allow_pointless -- If true (default), commit even if nothing
177
has changed and no merges are recorded.
179
mutter('preparing to commit')
150
181
self.branch = branch
151
self.branch.lock_write()
182
self.weave_store = branch.weave_store
152
183
self.rev_id = rev_id
153
184
self.specific_files = specific_files
154
185
self.allow_pointless = allow_pointless
172
208
assert isinstance(message, basestring), type(message)
173
209
self.message = message
210
self._escape_commit_message()
212
self.branch.lock_write()
176
# First walk over the working inventory; and both update that
177
# and also build a new revision inventory. The revision
178
# inventory needs to hold the text-id, sha1 and size of the
179
# actual file versions committed in the revision. (These are
180
# not present in the working inventory.) We also need to
181
# detect missing/deleted files, and remove them from the
184
214
self.work_tree = self.branch.working_tree()
185
215
self.work_inv = self.work_tree.inventory
186
216
self.basis_tree = self.branch.basis_tree()
187
217
self.basis_inv = self.basis_tree.inventory
189
219
self._gather_parents()
191
if self.rev_id is None:
192
self.rev_id = _gen_revision_id(self.branch, time.time())
194
# TODO: update hashcache
195
self.delta = compare_trees(self.basis_tree, self.work_tree,
196
specific_files=self.specific_files)
198
if not (self.delta.has_changed()
199
or self.allow_pointless
200
or len(self.parents) != 1):
220
if len(self.parents) > 1 and self.specific_files:
221
raise NotImplementedError('selected-file commit of merges is not supported yet')
222
self._check_parents_present()
224
self._remove_deleted()
225
self._populate_new_inv()
226
self._store_snapshot()
227
self._report_deletes()
229
if not (self.allow_pointless
230
or len(self.parents) > 1
231
or self.new_inv != self.basis_inv):
201
232
raise PointlessCommit()
203
self.new_inv = self.basis_inv.copy()
205
self.delta.show(sys.stdout)
207
self._remove_deleted()
210
self.branch._write_inventory(self.work_inv)
234
if len(list(self.work_tree.iter_conflicts()))>0:
235
raise ConflictsInTree
211
237
self._record_inventory()
212
self._record_ancestry()
214
238
self._make_revision()
215
note('committted r%d {%s}', (self.branch.revno() + 1),
239
self.reporter.completed(self.branch.revno()+1, self.rev_id)
217
240
self.branch.append_revision(self.rev_id)
218
241
self.branch.set_pending_merges([])
220
243
self.branch.unlock()
223
245
def _record_inventory(self):
224
246
"""Store the inventory for the new revision."""
226
serializer_v5.write_inventory(self.new_inv, inv_tmp)
228
self.inv_sha1 = sha_string(inv_tmp.getvalue())
229
inv_lines = inv_tmp.readlines()
230
self.branch.weave_store.add_text(INVENTORY_FILEID, self.rev_id,
231
inv_lines, self.parents)
234
def _record_ancestry(self):
235
"""Append merged revision ancestry to the ancestry file."""
236
if len(self.parents) > 1:
237
raise NotImplementedError("sorry, can't commit merges yet")
238
w = self.branch.weave_store.get_weave_or_empty(ANCESTRY_FILEID)
240
lines = w.get(w.lookup(self.parents[0]))
247
inv_text = serializer_v5.write_inventory_to_string(self.new_inv)
248
self.inv_sha1 = sha_string(inv_text)
249
s = self.branch.control_weaves
250
s.add_text('inventory', self.rev_id,
251
split_lines(inv_text), self.present_parents,
252
self.branch.get_transaction())
254
def _escape_commit_message(self):
255
"""Replace xml-incompatible control characters."""
256
# Python strings can include characters that can't be
257
# represented in well-formed XML; escape characters that
258
# aren't listed in the XML specification
259
# (http://www.w3.org/TR/REC-xml/#NT-Char).
260
if isinstance(self.message, unicode):
261
char_pattern = u'[^\x09\x0A\x0D\u0020-\uD7FF\uE000-\uFFFD]'
243
lines.append(self.rev_id + '\n')
244
parent_idxs = map(w.lookup, self.parents)
245
w.add(self.rev_id, parent_idxs, lines)
246
self.branch.weave_store.put_weave(ANCESTRY_FILEID, w)
263
# Use a regular 'str' as pattern to avoid having re.subn
264
# return 'unicode' results.
265
char_pattern = '[^x09\x0A\x0D\x20-\xFF]'
266
self.message, escape_count = re.subn(
268
lambda match: match.group(0).encode('unicode_escape'),
271
self.reporter.escaped(escape_count, self.message)
249
273
def _gather_parents(self):
274
"""Record the parents of a merge for merge detection."""
250
275
pending_merges = self.branch.pending_merges()
252
raise NotImplementedError("sorry, can't commit merges to the weave format yet")
253
276
self.parents = []
254
precursor_id = self.branch.last_patch()
277
self.parent_invs = []
278
self.present_parents = []
279
precursor_id = self.branch.last_revision()
256
281
self.parents.append(precursor_id)
257
282
self.parents += pending_merges
283
for revision in self.parents:
284
if self.branch.has_revision(revision):
285
self.parent_invs.append(self.branch.get_inventory(revision))
286
self.present_parents.append(revision)
288
def _check_parents_present(self):
289
for parent_id in self.parents:
290
mutter('commit parent revision {%s}', parent_id)
291
if not self.branch.has_revision(parent_id):
292
if parent_id == self.branch.last_revision():
293
warning("parent is missing %r", parent_id)
294
raise HistoryMissing(self.branch, 'revision', parent_id)
296
mutter("commit will ghost revision %r", parent_id)
260
298
def _make_revision(self):
261
299
"""Record a new revision object for this commit."""
262
300
self.rev = Revision(timestamp=self.timestamp,
265
303
message=self.message,
266
304
inventory_sha1=self.inv_sha1,
267
305
revision_id=self.rev_id)
268
self.rev.parents = map(RevisionReference, self.parents)
269
rev_tmp = tempfile.TemporaryFile()
306
self.rev.parent_ids = self.parents
270
308
serializer_v5.write_revision(self.rev, rev_tmp)
272
310
self.branch.revision_store.add(rev_tmp, self.rev_id)
273
311
mutter('new revision_id is {%s}', self.rev_id)
276
313
def _remove_deleted(self):
277
"""Remove deleted files from the working and stored inventories."""
278
for path, id, kind in self.delta.removed:
279
if self.work_inv.has_id(id):
280
del self.work_inv[id]
281
if self.new_inv.has_id(id):
286
def _store_files(self):
287
"""Store new texts of modified/added files."""
288
# We must make sure that directories are added before anything
289
# inside them is added. the files within the delta report are
290
# sorted by path so we know the directory will come before its
292
for path, file_id, kind in self.delta.added:
294
ie = self.work_inv[file_id].copy()
314
"""Remove deleted files from the working inventories.
316
This is done prior to taking the working inventory as the
317
basis for the new committed inventory.
319
This returns true if any files
320
*that existed in the basis inventory* were deleted.
321
Files that were added and deleted
322
in the working copy don't matter.
324
specific = self.specific_files
326
for path, ie in self.work_inv.iter_entries():
327
if specific and not is_inside_any(specific, path):
329
if not self.work_tree.has_filename(path):
330
self.reporter.missing(path)
331
deleted_ids.append((path, ie.file_id))
333
deleted_ids.sort(reverse=True)
334
for path, file_id in deleted_ids:
335
del self.work_inv[file_id]
336
self.branch._write_inventory(self.work_inv)
338
def _store_snapshot(self):
339
"""Pass over inventory and record a snapshot.
341
Entries get a new revision when they are modified in
342
any way, which includes a merge with a new set of
343
parents that have the same entry.
345
# XXX: Need to think more here about when the user has
346
# made a specific decision on a particular value -- c.f.
348
for path, ie in self.new_inv.iter_entries():
349
previous_entries = ie.find_previous_heads(
351
self.weave_store.get_weave_or_empty(ie.file_id,
352
self.branch.get_transaction()))
353
if ie.revision is None:
354
change = ie.snapshot(self.rev_id, path, previous_entries,
355
self.work_tree, self.weave_store,
356
self.branch.get_transaction())
297
self._store_file_text(file_id)
299
for path, file_id, kind in self.delta.modified:
302
self._store_file_text(file_id)
304
for old_path, new_path, file_id, kind, text_modified in self.delta.renamed:
307
if not text_modified:
309
self._store_file_text(file_id)
312
def _store_file_text(self, file_id):
313
"""Store updated text for one modified or added file."""
314
note('store new text for {%s} in revision {%s}',
315
file_id, self.rev_id)
316
new_lines = self.work_tree.get_file(file_id).readlines()
317
if file_id in self.new_inv: # was in basis inventory
318
ie = self.new_inv[file_id]
319
assert ie.file_id == file_id
320
assert file_id in self.basis_inv
321
assert self.basis_inv[file_id].kind == 'file'
322
old_version = self.basis_inv[file_id].text_version
323
file_parents = [old_version]
324
else: # new in this revision
325
ie = self.work_inv[file_id].copy()
327
assert file_id not in self.basis_inv
329
assert ie.kind == 'file'
330
self._add_text_to_weave(file_id, new_lines, file_parents)
331
# make a new inventory entry for this file, using whatever
332
# it had in the working copy, plus details on the new text
333
ie.text_sha1 = _sha_strings(new_lines)
334
ie.text_size = sum(map(len, new_lines))
335
ie.text_version = self.rev_id
336
ie.entry_version = self.rev_id
339
def _add_text_to_weave(self, file_id, new_lines, parents):
340
if file_id.startswith('__'):
341
raise ValueError('illegal file-id %r for text file' % file_id)
342
self.branch.weave_store.add_text(file_id, self.rev_id, new_lines, parents)
359
self.reporter.snapshot_change(change, path)
361
def _populate_new_inv(self):
362
"""Build revision inventory.
364
This creates a new empty inventory. Depending on
365
which files are selected for commit, and what is present in the
366
current tree, the new inventory is populated. inventory entries
367
which are candidates for modification have their revision set to
368
None; inventory entries that are carried over untouched have their
369
revision set to their prior value.
371
mutter("Selecting files for commit with filter %s", self.specific_files)
372
self.new_inv = Inventory()
373
for path, new_ie in self.work_inv.iter_entries():
374
file_id = new_ie.file_id
375
mutter('check %s {%s}', path, new_ie.file_id)
376
if self.specific_files:
377
if not is_inside_any(self.specific_files, path):
378
mutter('%s not selected for commit', path)
379
self._carry_entry(file_id)
382
# this is selected, ensure its parents are too.
383
parent_id = new_ie.parent_id
384
while parent_id != ROOT_ID:
385
if not self.new_inv.has_id(parent_id):
386
ie = self._select_entry(self.work_inv[parent_id])
387
mutter('%s selected for commit because of %s',
388
self.new_inv.id2path(parent_id), path)
390
ie = self.new_inv[parent_id]
391
if ie.revision is not None:
393
mutter('%s selected for commit because of %s',
394
self.new_inv.id2path(parent_id), path)
395
parent_id = ie.parent_id
396
mutter('%s selected for commit', path)
397
self._select_entry(new_ie)
399
def _select_entry(self, new_ie):
400
"""Make new_ie be considered for committing."""
406
def _carry_entry(self, file_id):
407
"""Carry the file unchanged from the basis revision."""
408
if self.basis_inv.has_id(file_id):
409
self.new_inv.add(self.basis_inv[file_id].copy())
411
def _report_deletes(self):
412
for file_id in self.basis_inv:
413
if file_id not in self.new_inv:
414
self.reporter.deleted(self.basis_inv.id2path(file_id))
345
416
def _gen_revision_id(branch, when):
346
417
"""Return new revision-id."""
347
418
s = '%s-%s-' % (user_email(branch), compact_date(when))
348
419
s += hexlify(rand_bytes(8))
352
def _sha_strings(strings):
353
"""Return the sha-1 of concatenation of strings"""
355
map(s.update, strings)