323
322
TODO: Adding a directory should optionally recurse down and
324
323
add all non-ignored children. Perhaps do that in a
325
324
higher-level method.
327
>>> b = ScratchBranch(files=['foo'])
328
>>> 'foo' in b.unknowns()
333
>>> 'foo' in b.unknowns()
335
>>> bool(b.inventory.path2id('foo'))
341
Traceback (most recent call last):
343
BzrError: ('foo is already versioned', [])
345
>>> b.add(['nothere'])
346
Traceback (most recent call last):
347
BzrError: ('cannot add: not a regular file or directory: nothere', [])
349
326
self._need_writelock()
351
328
# TODO: Re-adding a file that is removed in the working copy
352
329
# should probably put it back with the previous ID.
353
330
if isinstance(files, types.StringTypes):
331
assert(ids is None or isinstance(ids, types.StringTypes))
337
ids = [None] * len(files)
339
assert(len(ids) == len(files))
356
341
inv = self.read_working_inventory()
342
for f,file_id in zip(files, ids):
358
343
if is_control_file(f):
359
344
bailout("cannot add control file %s" % quotefn(f))
479
452
return self.working_tree().unknowns()
482
def commit(self, message, timestamp=None, timezone=None,
485
"""Commit working copy as a new revision.
487
The basic approach is to add all the file texts into the
488
store, then the inventory, then make a new revision pointing
489
to that inventory and store that.
491
This is not quite safe if the working copy changes during the
492
commit; for the moment that is simply not allowed. A better
493
approach is to make a temporary copy of the files before
494
computing their hashes, and then add those hashes in turn to
495
the inventory. This should mean at least that there are no
496
broken hash pointers. There is no way we can get a snapshot
497
of the whole directory at an instant. This would also have to
498
be robust against files disappearing, moving, etc. So the
499
whole thing is a bit hard.
501
timestamp -- if not None, seconds-since-epoch for a
502
postdated/predated commit.
504
self._need_writelock()
506
## TODO: Show branch names
508
# TODO: Don't commit if there are no changes, unless forced?
510
# First walk over the working inventory; and both update that
511
# and also build a new revision inventory. The revision
512
# inventory needs to hold the text-id, sha1 and size of the
513
# actual file versions committed in the revision. (These are
514
# not present in the working inventory.) We also need to
515
# detect missing/deleted files, and remove them from the
518
work_inv = self.read_working_inventory()
520
basis = self.basis_tree()
521
basis_inv = basis.inventory
523
for path, entry in work_inv.iter_entries():
524
## TODO: Cope with files that have gone missing.
526
## TODO: Check that the file kind has not changed from the previous
527
## revision of this file (if any).
531
p = self.abspath(path)
532
file_id = entry.file_id
533
mutter('commit prep file %s, id %r ' % (p, file_id))
535
if not os.path.exists(p):
536
mutter(" file is missing, removing from inventory")
538
show_status('D', entry.kind, quotefn(path))
539
missing_ids.append(file_id)
542
# TODO: Handle files that have been deleted
544
# TODO: Maybe a special case for empty files? Seems a
545
# waste to store them many times.
549
if basis_inv.has_id(file_id):
550
old_kind = basis_inv[file_id].kind
551
if old_kind != entry.kind:
552
bailout("entry %r changed kind from %r to %r"
553
% (file_id, old_kind, entry.kind))
555
if entry.kind == 'directory':
557
bailout("%s is entered as directory but not a directory" % quotefn(p))
558
elif entry.kind == 'file':
560
bailout("%s is entered as file but is not a file" % quotefn(p))
562
content = file(p, 'rb').read()
564
entry.text_sha1 = sha_string(content)
565
entry.text_size = len(content)
567
old_ie = basis_inv.has_id(file_id) and basis_inv[file_id]
569
and (old_ie.text_size == entry.text_size)
570
and (old_ie.text_sha1 == entry.text_sha1)):
571
## assert content == basis.get_file(file_id).read()
572
entry.text_id = basis_inv[file_id].text_id
573
mutter(' unchanged from previous text_id {%s}' %
577
entry.text_id = gen_file_id(entry.name)
578
self.text_store.add(content, entry.text_id)
579
mutter(' stored with text_id {%s}' % entry.text_id)
583
elif (old_ie.name == entry.name
584
and old_ie.parent_id == entry.parent_id):
589
show_status(state, entry.kind, quotefn(path))
591
for file_id in missing_ids:
592
# have to do this later so we don't mess up the iterator.
593
# since parents may be removed before their children we
596
# FIXME: There's probably a better way to do this; perhaps
597
# the workingtree should know how to filter itself.
598
if work_inv.has_id(file_id):
599
del work_inv[file_id]
602
inv_id = rev_id = _gen_revision_id(time.time())
604
inv_tmp = tempfile.TemporaryFile()
605
inv.write_xml(inv_tmp)
607
self.inventory_store.add(inv_tmp, inv_id)
608
mutter('new inventory_id is {%s}' % inv_id)
610
self._write_inventory(work_inv)
612
if timestamp == None:
613
timestamp = time.time()
615
if committer == None:
616
committer = username()
619
timezone = local_time_offset()
621
mutter("building commit log message")
622
rev = Revision(timestamp=timestamp,
625
precursor = self.last_patch(),
630
rev_tmp = tempfile.TemporaryFile()
631
rev.write_xml(rev_tmp)
633
self.revision_store.add(rev_tmp, rev_id)
634
mutter("new revision_id is {%s}" % rev_id)
636
## XXX: Everything up to here can simply be orphaned if we abort
637
## the commit; it will leave junk files behind but that doesn't
640
## TODO: Read back the just-generated changeset, and make sure it
641
## applies and recreates the right state.
643
## TODO: Also calculate and store the inventory SHA1
644
mutter("committing patch r%d" % (self.revno() + 1))
647
self.append_revision(rev_id)
650
note("commited r%d" % self.revno())
653
455
def append_revision(self, revision_id):
654
456
mutter("add {%s} to revision-history" % revision_id)
655
457
rev_history = self.revision_history()