1
# Copyright (C) 2005 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
18
## XXX: Can we do any better about making interrupted commits change
21
## XXX: If we merged two versions of a file then we still need to
22
## create a new version representing that merge, even if it didn't
23
## change from the parent.
25
## TODO: Read back the just-generated changeset, and make sure it
26
## applies and recreates the right state.
37
from binascii import hexlify
38
from cStringIO import StringIO
40
from bzrlib.osutils import (local_time_offset, username,
41
rand_bytes, compact_date, user_email,
42
kind_marker, is_inside_any, quotefn,
43
sha_string, sha_file, isdir, isfile)
44
from bzrlib.branch import gen_file_id, INVENTORY_FILEID, ANCESTRY_FILEID
45
from bzrlib.errors import BzrError, PointlessCommit
46
from bzrlib.revision import Revision, RevisionReference
47
from bzrlib.trace import mutter, note
48
from bzrlib.xml5 import serializer_v5
49
from bzrlib.inventory import Inventory
50
from bzrlib.delta import compare_trees
51
from bzrlib.weave import Weave
52
from bzrlib.weavefile import read_weave, write_weave_v5
53
from bzrlib.atomicfile import AtomicFile
56
def commit(*args, **kwargs):
57
"""Commit a new revision to a branch.
59
Function-style interface for convenience of old callers.
61
New code should use the Commit class instead.
63
Commit().commit(*args, **kwargs)
66
class NullCommitReporter(object):
67
"""I report on progress of a commit."""
68
def added(self, path):
71
def removed(self, path):
74
def renamed(self, old_path, new_path):
78
class ReportCommitToLog(NullCommitReporter):
79
def added(self, path):
80
note('added %s', path)
82
def removed(self, path):
83
note('removed %s', path)
85
def renamed(self, old_path, new_path):
86
note('renamed %s => %s', old_path, new_path)
90
"""Task of committing a new revision.
92
This is a MethodObject: it accumulates state as the commit is
93
prepared, and then it is discarded. It doesn't represent
94
historical revisions, just the act of recording a new one.
97
Modified to hold a list of files that have been deleted from
98
the working directory; these should be removed from the
103
if reporter is not None:
104
self.reporter = reporter
106
self.reporter = NullCommitReporter()
116
allow_pointless=True):
117
"""Commit working copy as a new revision.
119
The basic approach is to add all the file texts into the
120
store, then the inventory, then make a new revision pointing
121
to that inventory and store that.
123
This is not quite safe if the working copy changes during the
124
commit; for the moment that is simply not allowed. A better
125
approach is to make a temporary copy of the files before
126
computing their hashes, and then add those hashes in turn to
127
the inventory. This should mean at least that there are no
128
broken hash pointers. There is no way we can get a snapshot
129
of the whole directory at an instant. This would also have to
130
be robust against files disappearing, moving, etc. So the
131
whole thing is a bit hard.
133
This raises PointlessCommit if there are no changes, no new merges,
134
and allow_pointless is false.
136
timestamp -- if not None, seconds-since-epoch for a
137
postdated/predated commit.
140
If true, commit only those files.
143
If set, use this as the new revision id.
144
Useful for test or import commands that need to tightly
145
control what revisions are assigned. If you duplicate
146
a revision id that exists elsewhere it is your own fault.
147
If null (default), a time/random revision id is generated.
151
self.branch.lock_write()
153
self.specific_files = specific_files
154
self.allow_pointless = allow_pointless
156
if timestamp is None:
157
self.timestamp = time.time()
159
self.timestamp = long(timestamp)
161
if committer is None:
162
self.committer = username(self.branch)
164
assert isinstance(committer, basestring), type(committer)
165
self.committer = committer
168
self.timezone = local_time_offset()
170
self.timezone = int(timezone)
172
assert isinstance(message, basestring), type(message)
173
self.message = message
176
# First walk over the working inventory; and both update that
177
# and also build a new revision inventory. The revision
178
# inventory needs to hold the text-id, sha1 and size of the
179
# actual file versions committed in the revision. (These are
180
# not present in the working inventory.) We also need to
181
# detect missing/deleted files, and remove them from the
184
self.work_tree = self.branch.working_tree()
185
self.work_inv = self.work_tree.inventory
186
self.basis_tree = self.branch.basis_tree()
187
self.basis_inv = self.basis_tree.inventory
189
self._gather_parents()
191
if self.rev_id is None:
192
self.rev_id = _gen_revision_id(self.branch, time.time())
194
# TODO: update hashcache
195
self.delta = compare_trees(self.basis_tree, self.work_tree,
196
specific_files=self.specific_files)
198
if not (self.delta.has_changed()
199
or self.allow_pointless
200
or len(self.parents) != 1):
201
raise PointlessCommit()
203
self.new_inv = self.basis_inv.copy()
205
self.delta.show(sys.stdout)
207
self._remove_deleted()
210
self.branch._write_inventory(self.work_inv)
211
self._record_inventory()
212
self._record_ancestry()
214
self._make_revision()
215
note('committted r%d {%s}', (self.branch.revno() + 1),
217
self.branch.append_revision(self.rev_id)
218
self.branch.set_pending_merges([])
223
def _record_inventory(self):
224
"""Store the inventory for the new revision."""
226
serializer_v5.write_inventory(self.new_inv, inv_tmp)
228
self.inv_sha1 = sha_string(inv_tmp.getvalue())
229
inv_lines = inv_tmp.readlines()
230
self.branch.weave_store.add_text(INVENTORY_FILEID, self.rev_id,
231
inv_lines, self.parents)
234
def _record_ancestry(self):
235
"""Append merged revision ancestry to the ancestry file."""
236
if len(self.parents) > 1:
237
raise NotImplementedError("sorry, can't commit merges yet")
238
w = self.branch.weave_store.get_weave_or_empty(ANCESTRY_FILEID)
240
lines = w.get(w.lookup(self.parents[0]))
243
lines.append(self.rev_id + '\n')
244
parent_idxs = map(w.lookup, self.parents)
245
w.add(self.rev_id, parent_idxs, lines)
246
self.branch.weave_store.put_weave(ANCESTRY_FILEID, w)
249
def _gather_parents(self):
250
pending_merges = self.branch.pending_merges()
252
raise NotImplementedError("sorry, can't commit merges to the weave format yet")
254
precursor_id = self.branch.last_patch()
256
self.parents.append(precursor_id)
257
self.parents += pending_merges
260
def _make_revision(self):
261
"""Record a new revision object for this commit."""
262
self.rev = Revision(timestamp=self.timestamp,
263
timezone=self.timezone,
264
committer=self.committer,
265
message=self.message,
266
inventory_sha1=self.inv_sha1,
267
revision_id=self.rev_id)
268
self.rev.parents = map(RevisionReference, self.parents)
269
rev_tmp = tempfile.TemporaryFile()
270
serializer_v5.write_revision(self.rev, rev_tmp)
272
self.branch.revision_store.add(rev_tmp, self.rev_id)
273
mutter('new revision_id is {%s}', self.rev_id)
276
def _remove_deleted(self):
277
"""Remove deleted files from the working and stored inventories."""
278
for path, id, kind in self.delta.removed:
279
if self.work_inv.has_id(id):
280
del self.work_inv[id]
281
if self.new_inv.has_id(id):
286
def _store_files(self):
287
"""Store new texts of modified/added files."""
288
# We must make sure that directories are added before anything
289
# inside them is added. the files within the delta report are
290
# sorted by path so we know the directory will come before its
292
for path, file_id, kind in self.delta.added:
294
ie = self.work_inv[file_id].copy()
297
self._store_file_text(file_id)
299
for path, file_id, kind in self.delta.modified:
302
self._store_file_text(file_id)
304
for old_path, new_path, file_id, kind, text_modified in self.delta.renamed:
307
if not text_modified:
309
self._store_file_text(file_id)
312
def _store_file_text(self, file_id):
313
"""Store updated text for one modified or added file."""
314
note('store new text for {%s} in revision {%s}',
315
file_id, self.rev_id)
316
new_lines = self.work_tree.get_file(file_id).readlines()
317
if file_id in self.new_inv: # was in basis inventory
318
ie = self.new_inv[file_id]
319
assert ie.file_id == file_id
320
assert file_id in self.basis_inv
321
assert self.basis_inv[file_id].kind == 'file'
322
old_version = self.basis_inv[file_id].text_version
323
file_parents = [old_version]
324
else: # new in this revision
325
ie = self.work_inv[file_id].copy()
327
assert file_id not in self.basis_inv
329
assert ie.kind == 'file'
330
self._add_text_to_weave(file_id, new_lines, file_parents)
331
# make a new inventory entry for this file, using whatever
332
# it had in the working copy, plus details on the new text
333
ie.text_sha1 = _sha_strings(new_lines)
334
ie.text_size = sum(map(len, new_lines))
335
ie.text_version = self.rev_id
336
ie.entry_version = self.rev_id
339
def _add_text_to_weave(self, file_id, new_lines, parents):
340
if file_id.startswith('__'):
341
raise ValueError('illegal file-id %r for text file' % file_id)
342
self.branch.weave_store.add_text(file_id, self.rev_id, new_lines, parents)
345
def _gen_revision_id(branch, when):
346
"""Return new revision-id."""
347
s = '%s-%s-' % (user_email(branch), compact_date(when))
348
s += hexlify(rand_bytes(8))
352
def _sha_strings(strings):
353
"""Return the sha-1 of concatenation of strings"""
355
map(s.update, strings)