15
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
18
## XXX: Can we do any better about making interrupted commits change
21
## XXX: If we merged two versions of a file then we still need to
22
## create a new version representing that merge, even if it didn't
23
## change from the parent.
25
## TODO: Read back the just-generated changeset, and make sure it
26
## applies and recreates the right state.
37
from binascii import hexlify
38
from cStringIO import StringIO
40
from bzrlib.osutils import (local_time_offset, username,
41
rand_bytes, compact_date, user_email,
42
kind_marker, is_inside_any, quotefn,
43
sha_string, sha_file, isdir, isfile)
44
from bzrlib.branch import gen_file_id
45
from bzrlib.errors import BzrError, PointlessCommit
46
from bzrlib.revision import Revision, RevisionReference
47
from bzrlib.trace import mutter, note
48
from bzrlib.xml5 import serializer_v5
49
from bzrlib.inventory import Inventory
50
from bzrlib.delta import compare_trees
51
from bzrlib.weave import Weave
52
from bzrlib.weavefile import read_weave, write_weave_v5
53
from bzrlib.atomicfile import AtomicFile
56
class NullCommitReporter(object):
57
"""I report on progress of a commit."""
58
def added(self, path):
61
def removed(self, path):
64
def renamed(self, old_path, new_path):
68
class ReportCommitToLog(NullCommitReporter):
69
def added(self, path):
70
note('added %s', path)
72
def removed(self, path):
73
note('removed %s', path)
75
def renamed(self, old_path, new_path):
76
note('renamed %s => %s', old_path, new_path)
80
"""Task of committing a new revision.
82
This is a MethodObject: it accumulates state as the commit is
83
prepared, and then it is discarded. It doesn't represent
84
historical revisions, just the act of recording a new one.
87
Modified to hold a list of files that have been deleted from
88
the working directory; these should be removed from the
19
def commit(branch, message,
26
"""Commit working copy as a new revision.
28
The basic approach is to add all the file texts into the
29
store, then the inventory, then make a new revision pointing
30
to that inventory and store that.
32
This is not quite safe if the working copy changes during the
33
commit; for the moment that is simply not allowed. A better
34
approach is to make a temporary copy of the files before
35
computing their hashes, and then add those hashes in turn to
36
the inventory. This should mean at least that there are no
37
broken hash pointers. There is no way we can get a snapshot
38
of the whole directory at an instant. This would also have to
39
be robust against files disappearing, moving, etc. So the
40
whole thing is a bit hard.
42
timestamp -- if not None, seconds-since-epoch for a
43
postdated/predated commit.
46
If true, commit only those files.
49
If set, use this as the new revision id.
50
Useful for test or import commands that need to tightly
51
control what revisions are assigned. If you duplicate
52
a revision id that exists elsewhere it is your own fault.
53
If null (default), a time/random revision id is generated.
93
if reporter is not None:
94
self.reporter = reporter
96
self.reporter = NullCommitReporter()
106
allow_pointless=True):
107
"""Commit working copy as a new revision.
109
The basic approach is to add all the file texts into the
110
store, then the inventory, then make a new revision pointing
111
to that inventory and store that.
113
This is not quite safe if the working copy changes during the
114
commit; for the moment that is simply not allowed. A better
115
approach is to make a temporary copy of the files before
116
computing their hashes, and then add those hashes in turn to
117
the inventory. This should mean at least that there are no
118
broken hash pointers. There is no way we can get a snapshot
119
of the whole directory at an instant. This would also have to
120
be robust against files disappearing, moving, etc. So the
121
whole thing is a bit hard.
123
This raises PointlessCommit if there are no changes, no new merges,
124
and allow_pointless is false.
126
timestamp -- if not None, seconds-since-epoch for a
127
postdated/predated commit.
130
If true, commit only those files.
133
If set, use this as the new revision id.
134
Useful for test or import commands that need to tightly
135
control what revisions are assigned. If you duplicate
136
a revision id that exists elsewhere it is your own fault.
137
If null (default), a time/random revision id is generated.
141
self.branch.lock_write()
143
self.specific_files = specific_files
144
self.allow_pointless = allow_pointless
146
if timestamp is None:
147
self.timestamp = time.time()
149
self.timestamp = long(timestamp)
151
if committer is None:
152
self.committer = username(self.branch)
154
assert isinstance(committer, basestring), type(committer)
155
self.committer = committer
158
self.timezone = local_time_offset()
160
self.timezone = int(timezone)
162
assert isinstance(message, basestring), type(message)
163
self.message = message
166
# First walk over the working inventory; and both update that
167
# and also build a new revision inventory. The revision
168
# inventory needs to hold the text-id, sha1 and size of the
169
# actual file versions committed in the revision. (These are
170
# not present in the working inventory.) We also need to
171
# detect missing/deleted files, and remove them from the
174
self.work_tree = self.branch.working_tree()
175
self.work_inv = self.work_tree.inventory
176
self.basis_tree = self.branch.basis_tree()
177
self.basis_inv = self.basis_tree.inventory
179
self.pending_merges = self.branch.pending_merges()
180
if self.pending_merges:
181
raise NotImplementedError("sorry, can't commit merges to the weave format yet")
183
if self.rev_id is None:
184
self.rev_id = _gen_revision_id(self.branch, time.time())
186
# todo: update hashcache
187
self.delta = compare_trees(self.basis_tree, self.work_tree,
188
specific_files=self.specific_files)
190
if not (self.delta.has_changed()
191
or self.allow_pointless
192
or self.pending_merges):
193
raise PointlessCommit()
195
self.new_inv = self.basis_inv.copy()
197
self.delta.show(sys.stdout)
199
self._remove_deleted()
202
self.branch._write_inventory(self.work_inv)
203
self._record_inventory()
205
self._make_revision()
206
note('committted r%d {%s}', (self.branch.revno() + 1),
208
self.branch.append_revision(self.rev_id)
209
self.branch.set_pending_merges([])
214
def _record_inventory(self):
216
serializer_v5.write_inventory(self.new_inv, inv_tmp)
217
self.inv_sha1 = sha_string(inv_tmp.getvalue())
58
from bzrlib.osutils import local_time_offset, username
59
from bzrlib.branch import gen_file_id
60
from bzrlib.errors import BzrError
61
from bzrlib.revision import Revision, RevisionReference
62
from bzrlib.trace import mutter, note
67
# First walk over the working inventory; and both update that
68
# and also build a new revision inventory. The revision
69
# inventory needs to hold the text-id, sha1 and size of the
70
# actual file versions committed in the revision. (These are
71
# not present in the working inventory.) We also need to
72
# detect missing/deleted files, and remove them from the
75
work_tree = branch.working_tree()
76
work_inv = work_tree.inventory
77
basis = branch.basis_tree()
78
basis_inv = basis.inventory
81
note('looking for changes...')
83
missing_ids, new_inv = _gather_commit(branch,
90
for file_id in missing_ids:
91
# Any files that have been deleted are now removed from the
92
# working inventory. Files that were not selected for commit
93
# are left as they were in the working inventory and ommitted
94
# from the revision inventory.
96
# have to do this later so we don't mess up the iterator.
97
# since parents may be removed before their children we
100
# FIXME: There's probably a better way to do this; perhaps
101
# the workingtree should know how to filter itbranch.
102
if work_inv.has_id(file_id):
103
del work_inv[file_id]
107
rev_id = _gen_revision_id(time.time())
110
inv_tmp = tempfile.TemporaryFile()
111
new_inv.write_xml(inv_tmp)
219
self.branch.inventory_store.add(inv_tmp, self.rev_id)
222
def _make_revision(self):
223
"""Record a new revision object for this commit."""
224
self.rev = Revision(timestamp=self.timestamp,
225
timezone=self.timezone,
226
committer=self.committer,
227
message=self.message,
228
inventory_sha1=self.inv_sha1,
229
revision_id=self.rev_id)
231
self.rev.parents = []
232
precursor_id = self.branch.last_patch()
113
branch.inventory_store.add(inv_tmp, inv_id)
114
mutter('new inventory_id is {%s}' % inv_id)
116
# We could also just sha hash the inv_tmp file
117
# however, in the case that branch.inventory_store.add()
118
# ever actually does anything special
119
inv_sha1 = branch.get_inventory_sha1(inv_id)
121
branch._write_inventory(work_inv)
123
if timestamp == None:
124
timestamp = time.time()
126
if committer == None:
127
committer = username()
130
timezone = local_time_offset()
132
mutter("building commit log message")
133
rev = Revision(timestamp=timestamp,
138
inventory_sha1=inv_sha1,
141
precursor_id = branch.last_patch()
234
self.rev.parents.append(RevisionReference(precursor_id))
235
for merge_rev in self.pending_merges:
236
rev.parents.append(RevisionReference(merge_rev))
143
precursor_sha1 = branch.get_revision_sha1(precursor_id)
144
rev.parents = [RevisionReference(precursor_id, precursor_sha1)]
238
146
rev_tmp = tempfile.TemporaryFile()
239
serializer_v5.write_revision(self.rev, rev_tmp)
147
rev.write_xml(rev_tmp)
241
self.branch.revision_store.add(rev_tmp, self.rev_id)
242
mutter('new revision_id is {%s}', self.rev_id)
245
def _remove_deleted(self):
246
"""Remove deleted files from the working and stored inventories."""
247
for path, id, kind in self.delta.removed:
248
if self.work_inv.has_id(id):
249
del self.work_inv[id]
250
if self.new_inv.has_id(id):
255
def _store_files(self):
256
"""Store new texts of modified/added files."""
257
for path, id, kind in self.delta.modified:
260
self._store_file_text(id)
262
for path, id, kind in self.delta.added:
265
self._store_file_text(id)
267
for old_path, new_path, id, kind, text_modified in self.delta.renamed:
270
if not text_modified:
272
self._store_file_text(id)
275
def _store_file_text(self, file_id):
276
"""Store updated text for one modified or added file."""
277
note('store new text for {%s} in revision {%s}',
278
file_id, self.rev_id)
279
new_lines = self.work_tree.get_file(file_id).readlines()
280
if file_id in self.new_inv: # was in basis inventory
281
ie = self.new_inv[file_id]
282
assert ie.file_id == file_id
283
assert file_id in self.basis_inv
284
assert self.basis_inv[file_id].kind == 'file'
285
old_version = self.basis_inv[file_id].text_version
286
file_parents = [old_version]
287
else: # new in this revision
288
ie = self.work_inv[file_id].copy()
290
assert file_id not in self.basis_inv
292
assert ie.kind == 'file'
293
self._add_text_to_weave(file_id, new_lines, file_parents)
294
# make a new inventory entry for this file, using whatever
295
# it had in the working copy, plus details on the new text
296
ie.text_sha1 = _sha_strings(new_lines)
297
ie.text_size = sum(map(len, new_lines))
298
ie.text_version = self.rev_id
299
ie.entry_version = self.rev_id
302
def _add_text_to_weave(self, file_id, new_lines, parents):
303
weave_fn = self.branch.controlfilename(['weaves', file_id+'.weave'])
304
if os.path.exists(weave_fn):
305
w = read_weave(file(weave_fn, 'rb'))
308
# XXX: Should set the appropriate parents by looking for this file_id
309
# in all revision parents
310
parent_idxs = map(w.lookup, parents)
311
w.add(self.rev_id, parent_idxs, new_lines)
312
af = AtomicFile(weave_fn)
314
write_weave_v5(w, af)
320
def _gen_revision_id(branch, when):
149
branch.revision_store.add(rev_tmp, rev_id)
150
mutter("new revision_id is {%s}" % rev_id)
152
## XXX: Everything up to here can simply be orphaned if we abort
153
## the commit; it will leave junk files behind but that doesn't
156
## TODO: Read back the just-generated changeset, and make sure it
157
## applies and recreates the right state.
159
## TODO: Also calculate and store the inventory SHA1
160
mutter("committing patch r%d" % (branch.revno() + 1))
162
branch.append_revision(rev_id)
165
note("commited r%d" % branch.revno())
171
def _gen_revision_id(when):
321
172
"""Return new revision-id."""
322
s = '%s-%s-' % (user_email(branch), compact_date(when))
173
from binascii import hexlify
174
from osutils import rand_bytes, compact_date, user_email
176
s = '%s-%s-' % (user_email(), compact_date(when))
323
177
s += hexlify(rand_bytes(8))
327
def _sha_strings(strings):
328
"""Return the sha-1 of concatenation of strings"""
330
map(s.update, strings)
181
def _gather_commit(branch, work_tree, work_inv, basis_inv, specific_files,
183
"""Build inventory preparatory to commit.
185
This adds any changed files into the text store, and sets their
186
test-id, sha and size in the returned inventory appropriately.
189
Modified to hold a list of files that have been deleted from
190
the working directory; these should be removed from the
193
from bzrlib.inventory import Inventory
194
from osutils import isdir, isfile, sha_string, quotefn, \
195
local_time_offset, username, kind_marker, is_inside_any
197
from branch import gen_file_id
198
from errors import BzrError
199
from revision import Revision
200
from bzrlib.trace import mutter, note
205
for path, entry in work_inv.iter_entries():
206
## TODO: Check that the file kind has not changed from the previous
207
## revision of this file (if any).
209
p = branch.abspath(path)
210
file_id = entry.file_id
211
mutter('commit prep file %s, id %r ' % (p, file_id))
213
if specific_files and not is_inside_any(specific_files, path):
214
if basis_inv.has_id(file_id):
215
# carry over with previous state
216
inv.add(basis_inv[file_id].copy())
218
# omit this from committed inventory
222
if not work_tree.has_id(file_id):
224
print('deleted %s%s' % (path, kind_marker(entry.kind)))
225
mutter(" file is missing, removing from inventory")
226
missing_ids.append(file_id)
229
# this is present in the new inventory; may be new, modified or
231
old_ie = basis_inv.has_id(file_id) and basis_inv[file_id]
237
old_kind = old_ie.kind
238
if old_kind != entry.kind:
239
raise BzrError("entry %r changed kind from %r to %r"
240
% (file_id, old_kind, entry.kind))
242
if entry.kind == 'directory':
244
raise BzrError("%s is entered as directory but not a directory"
246
elif entry.kind == 'file':
248
raise BzrError("%s is entered as file but is not a file" % quotefn(p))
250
new_sha1 = work_tree.get_file_sha1(file_id)
253
and old_ie.text_sha1 == new_sha1):
254
## assert content == basis.get_file(file_id).read()
255
entry.text_id = old_ie.text_id
256
entry.text_sha1 = new_sha1
257
entry.text_size = old_ie.text_size
258
mutter(' unchanged from previous text_id {%s}' %
261
content = file(p, 'rb').read()
263
# calculate the sha again, just in case the file contents
264
# changed since we updated the cache
265
entry.text_sha1 = sha_string(content)
266
entry.text_size = len(content)
268
entry.text_id = gen_file_id(entry.name)
269
branch.text_store.add(content, entry.text_id)
270
mutter(' stored with text_id {%s}' % entry.text_id)
273
marked = path + kind_marker(entry.kind)
275
print 'added', marked
276
elif old_ie == entry:
278
elif (old_ie.name == entry.name
279
and old_ie.parent_id == entry.parent_id):
280
print 'modified', marked
282
print 'renamed', marked
284
return missing_ids, inv