15
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
18
## XXX: Can we do any better about making interrupted commits change
21
## XXX: If we merged two versions of a file then we still need to
22
## create a new version representing that merge, even if it didn't
23
## change from the parent.
25
## TODO: Read back the just-generated changeset, and make sure it
26
## applies and recreates the right state.
37
from binascii import hexlify
38
from cStringIO import StringIO
40
from bzrlib.osutils import (local_time_offset, username,
41
rand_bytes, compact_date, user_email,
42
kind_marker, is_inside_any, quotefn,
43
sha_string, sha_strings, sha_file, isdir, isfile)
44
from bzrlib.branch import gen_file_id, INVENTORY_FILEID, ANCESTRY_FILEID
45
from bzrlib.errors import BzrError, PointlessCommit
46
from bzrlib.revision import Revision, RevisionReference
47
from bzrlib.trace import mutter, note
48
from bzrlib.xml5 import serializer_v5
49
from bzrlib.inventory import Inventory
50
from bzrlib.delta import compare_trees
51
from bzrlib.weave import Weave
52
from bzrlib.weavefile import read_weave, write_weave_v5
53
from bzrlib.atomicfile import AtomicFile
56
def commit(*args, **kwargs):
57
"""Commit a new revision to a branch.
59
Function-style interface for convenience of old callers.
61
New code should use the Commit class instead.
63
Commit().commit(*args, **kwargs)
66
class NullCommitReporter(object):
67
"""I report on progress of a commit."""
68
def added(self, path):
71
def removed(self, path):
74
def renamed(self, old_path, new_path):
78
class ReportCommitToLog(NullCommitReporter):
79
def added(self, path):
80
note('added %s', path)
82
def removed(self, path):
83
note('removed %s', path)
85
def renamed(self, old_path, new_path):
86
note('renamed %s => %s', old_path, new_path)
90
"""Task of committing a new revision.
92
This is a MethodObject: it accumulates state as the commit is
93
prepared, and then it is discarded. It doesn't represent
94
historical revisions, just the act of recording a new one.
97
Modified to hold a list of files that have been deleted from
98
the working directory; these should be removed from the
103
if reporter is not None:
104
self.reporter = reporter
106
self.reporter = NullCommitReporter()
19
def commit(branch, message, timestamp=None, timezone=None,
24
"""Commit working copy as a new revision.
26
The basic approach is to add all the file texts into the
27
store, then the inventory, then make a new revision pointing
28
to that inventory and store that.
30
This is not quite safe if the working copy changes during the
31
commit; for the moment that is simply not allowed. A better
32
approach is to make a temporary copy of the files before
33
computing their hashes, and then add those hashes in turn to
34
the inventory. This should mean at least that there are no
35
broken hash pointers. There is no way we can get a snapshot
36
of the whole directory at an instant. This would also have to
37
be robust against files disappearing, moving, etc. So the
38
whole thing is a bit hard.
40
timestamp -- if not None, seconds-since-epoch for a
41
postdated/predated commit.
44
If true, commit only those files.
47
import os, time, tempfile
49
from inventory import Inventory
50
from osutils import isdir, isfile, sha_string, quotefn, \
51
local_time_offset, username, kind_marker, is_inside_any
53
from branch import gen_file_id
54
from errors import BzrError
55
from revision import Revision
56
from trace import mutter, note
58
branch._need_writelock()
60
## TODO: Show branch names
62
# TODO: Don't commit if there are no changes, unless forced?
64
# First walk over the working inventory; and both update that
65
# and also build a new revision inventory. The revision
66
# inventory needs to hold the text-id, sha1 and size of the
67
# actual file versions committed in the revision. (These are
68
# not present in the working inventory.) We also need to
69
# detect missing/deleted files, and remove them from the
72
work_tree = branch.working_tree()
73
work_inv = work_tree.inventory
75
basis = branch.basis_tree()
76
basis_inv = basis.inventory
79
print 'looking for changes...'
80
for path, entry in work_inv.iter_entries():
81
## TODO: Cope with files that have gone missing.
83
## TODO: Check that the file kind has not changed from the previous
84
## revision of this file (if any).
88
p = branch.abspath(path)
89
file_id = entry.file_id
90
mutter('commit prep file %s, id %r ' % (p, file_id))
92
if specific_files and not is_inside_any(specific_files, path):
93
if basis_inv.has_id(file_id):
94
# carry over with previous state
95
inv.add(basis_inv[file_id].copy())
97
# omit this from committed inventory
101
if not work_tree.has_id(file_id):
102
note('deleted %s%s' % (path, kind_marker(entry.kind)))
103
mutter(" file is missing, removing from inventory")
104
missing_ids.append(file_id)
109
if basis_inv.has_id(file_id):
110
old_kind = basis_inv[file_id].kind
111
if old_kind != entry.kind:
112
raise BzrError("entry %r changed kind from %r to %r"
113
% (file_id, old_kind, entry.kind))
115
if entry.kind == 'directory':
117
raise BzrError("%s is entered as directory but not a directory"
119
elif entry.kind == 'file':
121
raise BzrError("%s is entered as file but is not a file" % quotefn(p))
123
new_sha1 = work_tree.get_file_sha1(file_id)
125
old_ie = basis_inv.has_id(file_id) and basis_inv[file_id]
127
and old_ie.text_sha1 == new_sha1):
128
## assert content == basis.get_file(file_id).read()
129
entry.text_id = old_ie.text_id
130
entry.text_sha1 = new_sha1
131
entry.text_size = old_ie.text_size
132
mutter(' unchanged from previous text_id {%s}' %
135
content = file(p, 'rb').read()
137
entry.text_sha1 = sha_string(content)
138
entry.text_size = len(content)
140
entry.text_id = gen_file_id(entry.name)
141
branch.text_store.add(content, entry.text_id)
142
mutter(' stored with text_id {%s}' % entry.text_id)
144
note('added %s' % path)
145
elif (old_ie.name == entry.name
146
and old_ie.parent_id == entry.parent_id):
147
note('modified %s' % path)
149
note('renamed %s' % path)
152
for file_id in missing_ids:
153
# Any files that have been deleted are now removed from the
154
# working inventory. Files that were not selected for commit
155
# are left as they were in the working inventory and ommitted
156
# from the revision inventory.
116
allow_pointless=True):
117
"""Commit working copy as a new revision.
119
The basic approach is to add all the file texts into the
120
store, then the inventory, then make a new revision pointing
121
to that inventory and store that.
123
This is not quite safe if the working copy changes during the
124
commit; for the moment that is simply not allowed. A better
125
approach is to make a temporary copy of the files before
126
computing their hashes, and then add those hashes in turn to
127
the inventory. This should mean at least that there are no
128
broken hash pointers. There is no way we can get a snapshot
129
of the whole directory at an instant. This would also have to
130
be robust against files disappearing, moving, etc. So the
131
whole thing is a bit hard.
133
This raises PointlessCommit if there are no changes, no new merges,
134
and allow_pointless is false.
136
timestamp -- if not None, seconds-since-epoch for a
137
postdated/predated commit.
140
If true, commit only those files.
143
If set, use this as the new revision id.
144
Useful for test or import commands that need to tightly
145
control what revisions are assigned. If you duplicate
146
a revision id that exists elsewhere it is your own fault.
147
If null (default), a time/random revision id is generated.
151
self.branch.lock_write()
153
self.specific_files = specific_files
154
self.allow_pointless = allow_pointless
156
if timestamp is None:
157
self.timestamp = time.time()
159
self.timestamp = long(timestamp)
161
if committer is None:
162
self.committer = username(self.branch)
164
assert isinstance(committer, basestring), type(committer)
165
self.committer = committer
168
self.timezone = local_time_offset()
170
self.timezone = int(timezone)
172
assert isinstance(message, basestring), type(message)
173
self.message = message
176
# First walk over the working inventory; and both update that
177
# and also build a new revision inventory. The revision
178
# inventory needs to hold the text-id, sha1 and size of the
179
# actual file versions committed in the revision. (These are
180
# not present in the working inventory.) We also need to
181
# detect missing/deleted files, and remove them from the
184
self.work_tree = self.branch.working_tree()
185
self.work_inv = self.work_tree.inventory
186
self.basis_tree = self.branch.basis_tree()
187
self.basis_inv = self.basis_tree.inventory
189
self._gather_parents()
191
if self.rev_id is None:
192
self.rev_id = _gen_revision_id(self.branch, time.time())
194
# TODO: update hashcache
195
self.delta = compare_trees(self.basis_tree, self.work_tree,
196
specific_files=self.specific_files)
198
if not (self.delta.has_changed()
199
or self.allow_pointless
200
or len(self.parents) != 1):
201
raise PointlessCommit()
203
self.new_inv = self.basis_inv.copy()
205
self.delta.show(sys.stdout)
207
self._remove_deleted()
210
self.branch._write_inventory(self.work_inv)
211
self._record_inventory()
212
self._record_ancestry()
214
self._make_revision()
215
note('committted r%d {%s}', (self.branch.revno() + 1),
217
self.branch.append_revision(self.rev_id)
218
self.branch.set_pending_merges([])
223
def _record_inventory(self):
224
"""Store the inventory for the new revision."""
226
serializer_v5.write_inventory(self.new_inv, inv_tmp)
228
self.inv_sha1 = sha_string(inv_tmp.getvalue())
229
inv_lines = inv_tmp.readlines()
230
self.branch.weave_store.add_text(INVENTORY_FILEID, self.rev_id,
231
inv_lines, self.parents)
234
def _record_ancestry(self):
235
"""Append merged revision ancestry to the ancestry file."""
236
if len(self.parents) > 1:
237
raise NotImplementedError("sorry, can't commit merges yet")
238
w = self.branch.weave_store.get_weave_or_empty(ANCESTRY_FILEID)
240
lines = w.get(w.lookup(self.parents[0]))
243
lines.append(self.rev_id + '\n')
244
parent_idxs = map(w.lookup, self.parents)
245
w.add(self.rev_id, parent_idxs, lines)
246
self.branch.weave_store.put_weave(ANCESTRY_FILEID, w)
249
def _gather_parents(self):
250
pending_merges = self.branch.pending_merges()
252
raise NotImplementedError("sorry, can't commit merges to the weave format yet")
254
precursor_id = self.branch.last_patch()
256
self.parents.append(precursor_id)
257
self.parents += pending_merges
260
def _make_revision(self):
261
"""Record a new revision object for this commit."""
262
self.rev = Revision(timestamp=self.timestamp,
263
timezone=self.timezone,
264
committer=self.committer,
265
message=self.message,
266
inventory_sha1=self.inv_sha1,
267
revision_id=self.rev_id)
268
self.rev.parents = map(RevisionReference, self.parents)
269
rev_tmp = tempfile.TemporaryFile()
270
serializer_v5.write_revision(self.rev, rev_tmp)
272
self.branch.revision_store.add(rev_tmp, self.rev_id)
273
mutter('new revision_id is {%s}', self.rev_id)
276
def _remove_deleted(self):
277
"""Remove deleted files from the working and stored inventories."""
278
for path, id, kind in self.delta.removed:
279
if self.work_inv.has_id(id):
280
del self.work_inv[id]
281
if self.new_inv.has_id(id):
286
def _store_files(self):
287
"""Store new texts of modified/added files."""
288
# We must make sure that directories are added before anything
289
# inside them is added. the files within the delta report are
290
# sorted by path so we know the directory will come before its
292
for path, file_id, kind in self.delta.added:
294
ie = self.work_inv[file_id].copy()
297
self._store_file_text(file_id)
299
for path, file_id, kind in self.delta.modified:
302
self._store_file_text(file_id)
304
for old_path, new_path, file_id, kind, text_modified in self.delta.renamed:
307
if not text_modified:
309
self._store_file_text(file_id)
312
def _store_file_text(self, file_id):
313
"""Store updated text for one modified or added file."""
314
note('store new text for {%s} in revision {%s}',
315
file_id, self.rev_id)
316
new_lines = self.work_tree.get_file(file_id).readlines()
317
if file_id in self.new_inv: # was in basis inventory
318
ie = self.new_inv[file_id]
319
assert ie.file_id == file_id
320
assert file_id in self.basis_inv
321
assert self.basis_inv[file_id].kind == 'file'
322
old_version = self.basis_inv[file_id].text_version
323
file_parents = [old_version]
324
else: # new in this revision
325
ie = self.work_inv[file_id].copy()
327
assert file_id not in self.basis_inv
329
assert ie.kind == 'file'
330
self._add_text_to_weave(file_id, new_lines, file_parents)
331
# make a new inventory entry for this file, using whatever
332
# it had in the working copy, plus details on the new text
333
ie.text_sha1 = sha_strings(new_lines)
334
ie.text_size = sum(map(len, new_lines))
335
ie.text_version = self.rev_id
336
ie.entry_version = self.rev_id
339
def _add_text_to_weave(self, file_id, new_lines, parents):
340
if file_id.startswith('__'):
341
raise ValueError('illegal file-id %r for text file' % file_id)
342
self.branch.weave_store.add_text(file_id, self.rev_id, new_lines, parents)
345
def _gen_revision_id(branch, when):
158
# have to do this later so we don't mess up the iterator.
159
# since parents may be removed before their children we
162
# FIXME: There's probably a better way to do this; perhaps
163
# the workingtree should know how to filter itbranch.
164
if work_inv.has_id(file_id):
165
del work_inv[file_id]
169
rev_id = _gen_revision_id(time.time())
172
inv_tmp = tempfile.TemporaryFile()
173
inv.write_xml(inv_tmp)
175
branch.inventory_store.add(inv_tmp, inv_id)
176
mutter('new inventory_id is {%s}' % inv_id)
178
branch._write_inventory(work_inv)
180
if timestamp == None:
181
timestamp = time.time()
183
if committer == None:
184
committer = username()
187
timezone = local_time_offset()
189
mutter("building commit log message")
190
rev = Revision(timestamp=timestamp,
193
precursor = branch.last_patch(),
198
rev_tmp = tempfile.TemporaryFile()
199
rev.write_xml(rev_tmp)
201
branch.revision_store.add(rev_tmp, rev_id)
202
mutter("new revision_id is {%s}" % rev_id)
204
## XXX: Everything up to here can simply be orphaned if we abort
205
## the commit; it will leave junk files behind but that doesn't
208
## TODO: Read back the just-generated changeset, and make sure it
209
## applies and recreates the right state.
211
## TODO: Also calculate and store the inventory SHA1
212
mutter("committing patch r%d" % (branch.revno() + 1))
215
branch.append_revision(rev_id)
217
note("commited r%d" % branch.revno())
221
def _gen_revision_id(when):
346
222
"""Return new revision-id."""
347
s = '%s-%s-' % (user_email(branch), compact_date(when))
223
from binascii import hexlify
224
from osutils import rand_bytes, compact_date, user_email
226
s = '%s-%s-' % (user_email(), compact_date(when))
348
227
s += hexlify(rand_bytes(8))