15
15
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
18
## XXX: Can we do any better about making interrupted commits change
21
## XXX: If we merged two versions of a file then we still need to
22
## create a new version representing that merge, even if it didn't
23
## change from the parent.
25
## TODO: Read back the just-generated changeset, and make sure it
26
## applies and recreates the right state.
37
from binascii import hexlify
38
from cStringIO import StringIO
40
from bzrlib.osutils import (local_time_offset, username,
41
rand_bytes, compact_date, user_email,
42
kind_marker, is_inside_any, quotefn,
43
sha_string, sha_strings, sha_file, isdir, isfile)
44
from bzrlib.branch import gen_file_id, INVENTORY_FILEID, ANCESTRY_FILEID
45
from bzrlib.errors import BzrError, PointlessCommit
46
from bzrlib.revision import Revision, RevisionReference
47
from bzrlib.trace import mutter, note
48
from bzrlib.xml5 import serializer_v5
49
from bzrlib.inventory import Inventory
50
from bzrlib.delta import compare_trees
51
from bzrlib.weave import Weave
52
from bzrlib.weavefile import read_weave, write_weave_v5
53
from bzrlib.atomicfile import AtomicFile
56
def commit(*args, **kwargs):
57
"""Commit a new revision to a branch.
59
Function-style interface for convenience of old callers.
61
New code should use the Commit class instead.
63
Commit().commit(*args, **kwargs)
66
class NullCommitReporter(object):
67
"""I report on progress of a commit."""
68
def added(self, path):
71
def removed(self, path):
74
def renamed(self, old_path, new_path):
78
class ReportCommitToLog(NullCommitReporter):
79
def added(self, path):
80
note('added %s', path)
82
def removed(self, path):
83
note('removed %s', path)
85
def renamed(self, old_path, new_path):
86
note('renamed %s => %s', old_path, new_path)
90
"""Task of committing a new revision.
92
This is a MethodObject: it accumulates state as the commit is
93
prepared, and then it is discarded. It doesn't represent
94
historical revisions, just the act of recording a new one.
97
Modified to hold a list of files that have been deleted from
98
the working directory; these should be removed from the
103
if reporter is not None:
104
self.reporter = reporter
106
self.reporter = NullCommitReporter()
116
allow_pointless=True):
117
"""Commit working copy as a new revision.
119
The basic approach is to add all the file texts into the
120
store, then the inventory, then make a new revision pointing
121
to that inventory and store that.
123
This is not quite safe if the working copy changes during the
124
commit; for the moment that is simply not allowed. A better
125
approach is to make a temporary copy of the files before
126
computing their hashes, and then add those hashes in turn to
127
the inventory. This should mean at least that there are no
128
broken hash pointers. There is no way we can get a snapshot
129
of the whole directory at an instant. This would also have to
130
be robust against files disappearing, moving, etc. So the
131
whole thing is a bit hard.
133
This raises PointlessCommit if there are no changes, no new merges,
134
and allow_pointless is false.
136
timestamp -- if not None, seconds-since-epoch for a
137
postdated/predated commit.
140
If true, commit only those files.
143
If set, use this as the new revision id.
144
Useful for test or import commands that need to tightly
145
control what revisions are assigned. If you duplicate
146
a revision id that exists elsewhere it is your own fault.
147
If null (default), a time/random revision id is generated.
151
self.branch.lock_write()
153
self.specific_files = specific_files
154
self.allow_pointless = allow_pointless
156
if timestamp is None:
157
self.timestamp = time.time()
159
self.timestamp = long(timestamp)
161
if committer is None:
162
self.committer = username(self.branch)
164
assert isinstance(committer, basestring), type(committer)
165
self.committer = committer
168
self.timezone = local_time_offset()
170
self.timezone = int(timezone)
172
assert isinstance(message, basestring), type(message)
173
self.message = message
176
# First walk over the working inventory; and both update that
177
# and also build a new revision inventory. The revision
178
# inventory needs to hold the text-id, sha1 and size of the
179
# actual file versions committed in the revision. (These are
180
# not present in the working inventory.) We also need to
181
# detect missing/deleted files, and remove them from the
184
self.work_tree = self.branch.working_tree()
185
self.work_inv = self.work_tree.inventory
186
self.basis_tree = self.branch.basis_tree()
187
self.basis_inv = self.basis_tree.inventory
189
self._gather_parents()
191
if self.rev_id is None:
192
self.rev_id = _gen_revision_id(self.branch, time.time())
194
# TODO: update hashcache
195
self.delta = compare_trees(self.basis_tree, self.work_tree,
196
specific_files=self.specific_files)
198
if not (self.delta.has_changed()
199
or self.allow_pointless
200
or len(self.parents) != 1):
201
raise PointlessCommit()
203
self.new_inv = self.basis_inv.copy()
205
self.delta.show(sys.stdout)
207
self._remove_deleted()
210
self.branch._write_inventory(self.work_inv)
211
self._record_inventory()
212
self._record_ancestry()
214
self._make_revision()
215
note('committted r%d {%s}', (self.branch.revno() + 1),
217
self.branch.append_revision(self.rev_id)
218
self.branch.set_pending_merges([])
223
def _record_inventory(self):
224
"""Store the inventory for the new revision."""
226
serializer_v5.write_inventory(self.new_inv, inv_tmp)
228
self.inv_sha1 = sha_string(inv_tmp.getvalue())
229
inv_lines = inv_tmp.readlines()
230
self.branch.weave_store.add_text(INVENTORY_FILEID, self.rev_id,
231
inv_lines, self.parents)
234
def _record_ancestry(self):
235
"""Append merged revision ancestry to the ancestry file."""
236
if len(self.parents) > 1:
237
raise NotImplementedError("sorry, can't commit merges yet")
238
w = self.branch.weave_store.get_weave_or_empty(ANCESTRY_FILEID)
240
lines = w.get(w.lookup(self.parents[0]))
243
lines.append(self.rev_id + '\n')
244
parent_idxs = map(w.lookup, self.parents)
245
w.add(self.rev_id, parent_idxs, lines)
246
self.branch.weave_store.put_weave(ANCESTRY_FILEID, w)
249
def _gather_parents(self):
250
pending_merges = self.branch.pending_merges()
252
raise NotImplementedError("sorry, can't commit merges to the weave format yet")
254
precursor_id = self.branch.last_patch()
256
self.parents.append(precursor_id)
257
self.parents += pending_merges
260
def _make_revision(self):
261
"""Record a new revision object for this commit."""
262
self.rev = Revision(timestamp=self.timestamp,
263
timezone=self.timezone,
264
committer=self.committer,
265
message=self.message,
266
inventory_sha1=self.inv_sha1,
267
revision_id=self.rev_id)
268
self.rev.parents = map(RevisionReference, self.parents)
269
rev_tmp = tempfile.TemporaryFile()
270
serializer_v5.write_revision(self.rev, rev_tmp)
272
self.branch.revision_store.add(rev_tmp, self.rev_id)
273
mutter('new revision_id is {%s}', self.rev_id)
276
def _remove_deleted(self):
277
"""Remove deleted files from the working and stored inventories."""
278
for path, id, kind in self.delta.removed:
279
if self.work_inv.has_id(id):
280
del self.work_inv[id]
281
if self.new_inv.has_id(id):
286
def _store_files(self):
287
"""Store new texts of modified/added files."""
288
# We must make sure that directories are added before anything
289
# inside them is added. the files within the delta report are
290
# sorted by path so we know the directory will come before its
292
for path, file_id, kind in self.delta.added:
294
ie = self.work_inv[file_id].copy()
297
self._store_file_text(file_id)
299
for path, file_id, kind in self.delta.modified:
302
self._store_file_text(file_id)
304
for old_path, new_path, file_id, kind, text_modified in self.delta.renamed:
307
if not text_modified:
309
self._store_file_text(file_id)
312
def _store_file_text(self, file_id):
313
"""Store updated text for one modified or added file."""
314
note('store new text for {%s} in revision {%s}',
315
file_id, self.rev_id)
316
new_lines = self.work_tree.get_file(file_id).readlines()
317
if file_id in self.new_inv: # was in basis inventory
318
ie = self.new_inv[file_id]
319
assert ie.file_id == file_id
320
assert file_id in self.basis_inv
321
assert self.basis_inv[file_id].kind == 'file'
322
old_version = self.basis_inv[file_id].text_version
323
file_parents = [old_version]
324
else: # new in this revision
325
ie = self.work_inv[file_id].copy()
327
assert file_id not in self.basis_inv
329
assert ie.kind == 'file'
330
self._add_text_to_weave(file_id, new_lines, file_parents)
331
# make a new inventory entry for this file, using whatever
332
# it had in the working copy, plus details on the new text
333
ie.text_sha1 = sha_strings(new_lines)
334
ie.text_size = sum(map(len, new_lines))
335
ie.text_version = self.rev_id
336
ie.entry_version = self.rev_id
339
def _add_text_to_weave(self, file_id, new_lines, parents):
340
if file_id.startswith('__'):
341
raise ValueError('illegal file-id %r for text file' % file_id)
342
self.branch.weave_store.add_text(file_id, self.rev_id, new_lines, parents)
345
def _gen_revision_id(branch, when):
19
def commit(branch, message,
26
"""Commit working copy as a new revision.
28
The basic approach is to add all the file texts into the
29
store, then the inventory, then make a new revision pointing
30
to that inventory and store that.
32
This is not quite safe if the working copy changes during the
33
commit; for the moment that is simply not allowed. A better
34
approach is to make a temporary copy of the files before
35
computing their hashes, and then add those hashes in turn to
36
the inventory. This should mean at least that there are no
37
broken hash pointers. There is no way we can get a snapshot
38
of the whole directory at an instant. This would also have to
39
be robust against files disappearing, moving, etc. So the
40
whole thing is a bit hard.
42
timestamp -- if not None, seconds-since-epoch for a
43
postdated/predated commit.
46
If true, commit only those files.
49
If set, use this as the new revision id.
50
Useful for test or import commands that need to tightly
51
control what revisions are assigned. If you duplicate
52
a revision id that exists elsewhere it is your own fault.
53
If null (default), a time/random revision id is generated.
56
import os, time, tempfile
58
from inventory import Inventory
59
from osutils import isdir, isfile, sha_string, quotefn, \
60
local_time_offset, username, kind_marker, is_inside_any
62
from branch import gen_file_id
63
from errors import BzrError
64
from revision import Revision
65
from trace import mutter, note
67
branch._need_writelock()
69
# First walk over the working inventory; and both update that
70
# and also build a new revision inventory. The revision
71
# inventory needs to hold the text-id, sha1 and size of the
72
# actual file versions committed in the revision. (These are
73
# not present in the working inventory.) We also need to
74
# detect missing/deleted files, and remove them from the
77
work_tree = branch.working_tree()
78
work_inv = work_tree.inventory
80
basis = branch.basis_tree()
81
basis_inv = basis.inventory
85
note('looking for changes...')
87
for path, entry in work_inv.iter_entries():
88
## TODO: Check that the file kind has not changed from the previous
89
## revision of this file (if any).
93
p = branch.abspath(path)
94
file_id = entry.file_id
95
mutter('commit prep file %s, id %r ' % (p, file_id))
97
if specific_files and not is_inside_any(specific_files, path):
98
if basis_inv.has_id(file_id):
99
# carry over with previous state
100
inv.add(basis_inv[file_id].copy())
102
# omit this from committed inventory
106
if not work_tree.has_id(file_id):
108
print('deleted %s%s' % (path, kind_marker(entry.kind)))
109
mutter(" file is missing, removing from inventory")
110
missing_ids.append(file_id)
115
if basis_inv.has_id(file_id):
116
old_kind = basis_inv[file_id].kind
117
if old_kind != entry.kind:
118
raise BzrError("entry %r changed kind from %r to %r"
119
% (file_id, old_kind, entry.kind))
121
if entry.kind == 'directory':
123
raise BzrError("%s is entered as directory but not a directory"
125
elif entry.kind == 'file':
127
raise BzrError("%s is entered as file but is not a file" % quotefn(p))
129
new_sha1 = work_tree.get_file_sha1(file_id)
131
old_ie = basis_inv.has_id(file_id) and basis_inv[file_id]
133
and old_ie.text_sha1 == new_sha1):
134
## assert content == basis.get_file(file_id).read()
135
entry.text_id = old_ie.text_id
136
entry.text_sha1 = new_sha1
137
entry.text_size = old_ie.text_size
138
mutter(' unchanged from previous text_id {%s}' %
141
content = file(p, 'rb').read()
143
# calculate the sha again, just in case the file contents
144
# changed since we updated the cache
145
entry.text_sha1 = sha_string(content)
146
entry.text_size = len(content)
148
entry.text_id = gen_file_id(entry.name)
149
branch.text_store.add(content, entry.text_id)
150
mutter(' stored with text_id {%s}' % entry.text_id)
153
print('added %s' % path)
154
elif (old_ie.name == entry.name
155
and old_ie.parent_id == entry.parent_id):
156
print('modified %s' % path)
158
print('renamed %s' % path)
161
for file_id in missing_ids:
162
# Any files that have been deleted are now removed from the
163
# working inventory. Files that were not selected for commit
164
# are left as they were in the working inventory and ommitted
165
# from the revision inventory.
167
# have to do this later so we don't mess up the iterator.
168
# since parents may be removed before their children we
171
# FIXME: There's probably a better way to do this; perhaps
172
# the workingtree should know how to filter itbranch.
173
if work_inv.has_id(file_id):
174
del work_inv[file_id]
178
rev_id = _gen_revision_id(time.time())
181
inv_tmp = tempfile.TemporaryFile()
182
inv.write_xml(inv_tmp)
184
branch.inventory_store.add(inv_tmp, inv_id)
185
mutter('new inventory_id is {%s}' % inv_id)
187
branch._write_inventory(work_inv)
189
if timestamp == None:
190
timestamp = time.time()
192
if committer == None:
193
committer = username()
196
timezone = local_time_offset()
198
mutter("building commit log message")
199
rev = Revision(timestamp=timestamp,
202
precursor = branch.last_patch(),
207
rev_tmp = tempfile.TemporaryFile()
208
rev.write_xml(rev_tmp)
210
branch.revision_store.add(rev_tmp, rev_id)
211
mutter("new revision_id is {%s}" % rev_id)
213
## XXX: Everything up to here can simply be orphaned if we abort
214
## the commit; it will leave junk files behind but that doesn't
217
## TODO: Read back the just-generated changeset, and make sure it
218
## applies and recreates the right state.
220
## TODO: Also calculate and store the inventory SHA1
221
mutter("committing patch r%d" % (branch.revno() + 1))
223
branch.append_revision(rev_id)
226
note("commited r%d" % branch.revno())
230
def _gen_revision_id(when):
346
231
"""Return new revision-id."""
347
s = '%s-%s-' % (user_email(branch), compact_date(when))
232
from binascii import hexlify
233
from osutils import rand_bytes, compact_date, user_email
235
s = '%s-%s-' % (user_email(), compact_date(when))
348
236
s += hexlify(rand_bytes(8))