165
123
working inventory.
167
125
def __init__(self,
170
"""Create a Commit object.
172
:param reporter: the default reporter to use or None to decide later
174
self.reporter = reporter
127
if reporter is not None:
128
self.reporter = reporter
130
self.reporter = NullCommitReporter()
182
138
specific_files=None,
184
140
allow_pointless=True,
192
message_callback=None,
195
possible_master_transports=None):
196
142
"""Commit working copy as a new revision.
198
:param message: the commit message (it or message_callback is required)
199
:param message_callback: A callback: message = message_callback(cmt_obj)
201
:param timestamp: if not None, seconds-since-epoch for a
202
postdated/predated commit.
204
:param specific_files: If not None, commit only those files. An empty
205
list means 'commit no files'.
207
:param rev_id: If set, use this as the new revision id.
144
timestamp -- if not None, seconds-since-epoch for a
145
postdated/predated commit.
147
specific_files -- If true, commit only those files.
149
rev_id -- If set, use this as the new revision id.
208
150
Useful for test or import commands that need to tightly
209
151
control what revisions are assigned. If you duplicate
210
152
a revision id that exists elsewhere it is your own fault.
211
153
If null (default), a time/random revision id is generated.
213
:param allow_pointless: If true (default), commit even if nothing
155
allow_pointless -- If true (default), commit even if nothing
214
156
has changed and no merges are recorded.
216
:param strict: If true, don't allow a commit if the working tree
217
contains unknown files.
219
:param revprops: Properties for new revision
220
:param local: Perform a local only commit.
221
:param reporter: the reporter to use or None for the default
222
:param verbose: if True and the reporter is not None, report everything
223
:param recursive: If set to 'down', commit in any subtrees that have
224
pending changes of any sort during this commit.
225
:param exclude: None or a list of relative paths to exclude from the
226
commit. Pending changes to excluded files will be ignored by the
229
operation = OperationWithCleanups(self._commit)
230
self.revprops = revprops or {}
231
# XXX: Can be set on __init__ or passed in - this is a bit ugly.
232
self.config = config or self.config
233
return operation.run(
238
specific_files=specific_files,
240
allow_pointless=allow_pointless,
243
working_tree=working_tree,
246
message_callback=message_callback,
249
possible_master_transports=possible_master_transports)
251
def _commit(self, operation, message, timestamp, timezone, committer,
252
specific_files, rev_id, allow_pointless, strict, verbose,
253
working_tree, local, reporter, message_callback, recursive,
254
exclude, possible_master_transports):
255
158
mutter('preparing to commit')
257
if working_tree is None:
258
raise BzrError("working_tree must be passed into commit().")
260
self.work_tree = working_tree
261
self.branch = self.work_tree.branch
262
if getattr(self.work_tree, 'requires_rich_root', lambda: False)():
263
if not self.branch.repository.supports_rich_root():
264
raise errors.RootNotRich()
265
if message_callback is None:
266
if message is not None:
267
if isinstance(message, str):
268
message = message.decode(get_user_encoding())
269
message_callback = lambda x: message
271
raise BzrError("The message or message_callback keyword"
272
" parameter is required for commit().")
274
self.bound_branch = None
275
self.any_entries_deleted = False
276
if exclude is not None:
277
self.exclude = sorted(
278
minimum_path_selection(exclude))
282
self.master_branch = None
283
self.recursive = recursive
285
# self.specific_files is None to indicate no filter, or any iterable to
286
# indicate a filter - [] means no files at all, as per iter_changes.
287
if specific_files is not None:
288
self.specific_files = sorted(
289
minimum_path_selection(specific_files))
291
self.specific_files = None
161
self.weave_store = branch.weave_store
163
self.specific_files = specific_files
293
164
self.allow_pointless = allow_pointless
294
self.message_callback = message_callback
295
self.timestamp = timestamp
296
self.timezone = timezone
297
self.committer = committer
299
self.verbose = verbose
301
self.work_tree.lock_write()
302
operation.add_cleanup(self.work_tree.unlock)
303
self.parents = self.work_tree.get_parent_ids()
304
# We can use record_iter_changes IFF iter_changes is compatible with
305
# the command line parameters, and the repository has fast delta
306
# generation. See bug 347649.
307
self.use_record_iter_changes = (
309
not self.branch.repository._format.supports_tree_reference and
310
(self.branch.repository._format.fast_deltas or
311
len(self.parents) < 2))
312
self.pb = bzrlib.ui.ui_factory.nested_progress_bar()
313
operation.add_cleanup(self.pb.finished)
314
self.basis_revid = self.work_tree.last_revision()
315
self.basis_tree = self.work_tree.basis_tree()
316
self.basis_tree.lock_read()
317
operation.add_cleanup(self.basis_tree.unlock)
318
# Cannot commit with conflicts present.
319
if len(self.work_tree.conflicts()) > 0:
320
raise ConflictsInTree
322
# Setup the bound branch variables as needed.
323
self._check_bound_branch(operation, possible_master_transports)
325
# Check that the working tree is up to date
326
old_revno, new_revno = self._check_out_of_date_tree()
328
# Complete configuration setup
329
if reporter is not None:
330
self.reporter = reporter
331
elif self.reporter is None:
332
self.reporter = self._select_reporter()
333
if self.config is None:
334
self.config = self.branch.get_config()
336
self._set_specific_file_ids()
338
# Setup the progress bar. As the number of files that need to be
339
# committed in unknown, progress is reported as stages.
340
# We keep track of entries separately though and include that
341
# information in the progress bar during the relevant stages.
342
self.pb_stage_name = ""
343
self.pb_stage_count = 0
344
self.pb_stage_total = 5
345
if self.bound_branch:
346
self.pb_stage_total += 1
347
self.pb.show_pct = False
348
self.pb.show_spinner = False
349
self.pb.show_eta = False
350
self.pb.show_count = True
351
self.pb.show_bar = True
353
self._gather_parents()
354
# After a merge, a selected file commit is not supported.
355
# See 'bzr help merge' for an explanation as to why.
356
if len(self.parents) > 1 and self.specific_files is not None:
357
raise errors.CannotCommitSelectedFileMerge(self.specific_files)
358
# Excludes are a form of selected file commit.
359
if len(self.parents) > 1 and self.exclude:
360
raise errors.CannotCommitSelectedFileMerge(self.exclude)
362
# Collect the changes
363
self._set_progress_stage("Collecting changes", counter=True)
364
self.builder = self.branch.get_commit_builder(self.parents,
365
self.config, timestamp, timezone, committer, self.revprops, rev_id)
368
self.builder.will_record_deletes()
369
# find the location being committed to
370
if self.bound_branch:
371
master_location = self.master_branch.base
373
master_location = self.branch.base
375
# report the start of the commit
376
self.reporter.started(new_revno, self.rev_id, master_location)
378
self._update_builder_with_changes()
379
self._check_pointless()
381
# TODO: Now the new inventory is known, check for conflicts.
382
# ADHB 2006-08-08: If this is done, populate_new_inv should not add
383
# weave lines, because nothing should be recorded until it is known
384
# that commit will succeed.
385
self._set_progress_stage("Saving data locally")
386
self.builder.finish_inventory()
388
# Prompt the user for a commit message if none provided
389
message = message_callback(self)
390
self.message = message
392
# Add revision data to the local branch
393
self.rev_id = self.builder.commit(self.message)
396
mutter("aborting commit write group because of exception:")
397
trace.log_exception_quietly()
398
note("aborting commit write group: %r" % (e,))
402
self._process_pre_hooks(old_revno, new_revno)
404
# Upload revision data to the master.
405
# this will propagate merged revisions too if needed.
406
if self.bound_branch:
407
self._set_progress_stage("Uploading data to master branch")
408
# 'commit' to the master first so a timeout here causes the
409
# local branch to be out of date
410
self.master_branch.import_last_revision_info(
411
self.branch.repository, new_revno, self.rev_id)
413
# and now do the commit locally.
414
self.branch.set_last_revision_info(new_revno, self.rev_id)
416
# Make the working tree be up to date with the branch. This
417
# includes automatic changes scheduled to be made to the tree, such
418
# as updating its basis and unversioning paths that were missing.
419
self.work_tree.unversion(self.deleted_ids)
420
self._set_progress_stage("Updating the working tree")
421
self.work_tree.update_basis_by_delta(self.rev_id,
422
self.builder.get_basis_delta())
423
self.reporter.completed(new_revno, self.rev_id)
424
self._process_post_hooks(old_revno, new_revno)
427
def _select_reporter(self):
428
"""Select the CommitReporter to use."""
430
return NullCommitReporter()
431
return ReportCommitToLog()
433
def _check_pointless(self):
434
if self.allow_pointless:
436
# A merge with no effect on files
437
if len(self.parents) > 1:
439
# TODO: we could simplify this by using self.builder.basis_delta.
441
# The initial commit adds a root directory, but this in itself is not
442
# a worthwhile commit.
443
if (self.basis_revid == revision.NULL_REVISION and
444
((self.builder.new_inventory is not None and
445
len(self.builder.new_inventory) == 1) or
446
len(self.builder._basis_delta) == 1)):
447
raise PointlessCommit()
448
if self.builder.any_changes():
450
raise PointlessCommit()
452
def _check_bound_branch(self, operation, possible_master_transports=None):
453
"""Check to see if the local branch is bound.
455
If it is bound, then most of the commit will actually be
456
done using the remote branch as the target branch.
457
Only at the end will the local branch be updated.
459
if self.local and not self.branch.get_bound_location():
460
raise errors.LocalRequiresBoundBranch()
463
self.master_branch = self.branch.get_master_branch(
464
possible_master_transports)
466
if not self.master_branch:
467
# make this branch the reference branch for out of date checks.
468
self.master_branch = self.branch
471
# If the master branch is bound, we must fail
472
master_bound_location = self.master_branch.get_bound_location()
473
if master_bound_location:
474
raise errors.CommitToDoubleBoundBranch(self.branch,
475
self.master_branch, master_bound_location)
477
# TODO: jam 20051230 We could automatically push local
478
# commits to the remote branch if they would fit.
479
# But for now, just require remote to be identical
482
# Make sure the local branch is identical to the master
483
master_info = self.master_branch.last_revision_info()
484
local_info = self.branch.last_revision_info()
485
if local_info != master_info:
486
raise errors.BoundBranchOutOfDate(self.branch,
489
# Now things are ready to change the master branch
491
self.bound_branch = self.branch
492
self.master_branch.lock_write()
493
operation.add_cleanup(self.master_branch.unlock)
495
def _check_out_of_date_tree(self):
496
"""Check that the working tree is up to date.
498
:return: old_revision_number,new_revision_number tuple
501
first_tree_parent = self.work_tree.get_parent_ids()[0]
503
# if there are no parents, treat our parent as 'None'
504
# this is so that we still consider the master branch
505
# - in a checkout scenario the tree may have no
506
# parents but the branch may do.
507
first_tree_parent = bzrlib.revision.NULL_REVISION
508
old_revno, master_last = self.master_branch.last_revision_info()
509
if master_last != first_tree_parent:
510
if master_last != bzrlib.revision.NULL_REVISION:
511
raise errors.OutOfDateTree(self.work_tree)
512
if self.branch.repository.has_revision(first_tree_parent):
513
new_revno = old_revno + 1
515
# ghost parents never appear in revision history.
517
return old_revno,new_revno
519
def _process_pre_hooks(self, old_revno, new_revno):
520
"""Process any registered pre commit hooks."""
521
self._set_progress_stage("Running pre_commit hooks")
522
self._process_hooks("pre_commit", old_revno, new_revno)
524
def _process_post_hooks(self, old_revno, new_revno):
525
"""Process any registered post commit hooks."""
526
# Process the post commit hooks, if any
527
self._set_progress_stage("Running post_commit hooks")
528
# old style commit hooks - should be deprecated ? (obsoleted in
530
if self.config.post_commit() is not None:
531
hooks = self.config.post_commit().split(' ')
532
# this would be nicer with twisted.python.reflect.namedAny
534
result = eval(hook + '(branch, rev_id)',
535
{'branch':self.branch,
537
'rev_id':self.rev_id})
538
# process new style post commit hooks
539
self._process_hooks("post_commit", old_revno, new_revno)
541
def _process_hooks(self, hook_name, old_revno, new_revno):
542
if not Branch.hooks[hook_name]:
545
# new style commit hooks:
546
if not self.bound_branch:
547
hook_master = self.branch
550
hook_master = self.master_branch
551
hook_local = self.branch
552
# With bound branches, when the master is behind the local branch,
553
# the 'old_revno' and old_revid values here are incorrect.
554
# XXX: FIXME ^. RBC 20060206
556
old_revid = self.parents[0]
558
old_revid = bzrlib.revision.NULL_REVISION
560
if hook_name == "pre_commit":
561
future_tree = self.builder.revision_tree()
562
tree_delta = future_tree.changes_from(self.basis_tree,
565
for hook in Branch.hooks[hook_name]:
566
# show the running hook in the progress bar. As hooks may
567
# end up doing nothing (e.g. because they are not configured by
568
# the user) this is still showing progress, not showing overall
569
# actions - its up to each plugin to show a UI if it want's to
570
# (such as 'Emailing diff to foo@example.com').
571
self.pb_stage_name = "Running %s hooks [%s]" % \
572
(hook_name, Branch.hooks.get_hook_name(hook))
573
self._emit_progress()
574
if 'hooks' in debug.debug_flags:
575
mutter("Invoking commit hook: %r", hook)
576
if hook_name == "post_commit":
577
hook(hook_local, hook_master, old_revno, old_revid, new_revno,
579
elif hook_name == "pre_commit":
580
hook(hook_local, hook_master,
581
old_revno, old_revid, new_revno, self.rev_id,
582
tree_delta, future_tree)
166
if timestamp is None:
167
self.timestamp = time.time()
169
self.timestamp = long(timestamp)
172
self.rev_id = _gen_revision_id(self.branch, self.timestamp)
176
if committer is None:
177
self.committer = username(self.branch)
179
assert isinstance(committer, basestring), type(committer)
180
self.committer = committer
183
self.timezone = local_time_offset()
185
self.timezone = int(timezone)
187
assert isinstance(message, basestring), type(message)
188
self.message = message
190
self.branch.lock_write()
192
self.work_tree = self.branch.working_tree()
193
self.work_inv = self.work_tree.inventory
194
self.basis_tree = self.branch.basis_tree()
195
self.basis_inv = self.basis_tree.inventory
197
self._gather_parents()
198
self._check_parents_present()
200
self._remove_deleted()
201
self.new_inv = Inventory()
202
self._store_entries()
203
self._report_deletes()
204
self._set_name_versions()
206
if not (self.allow_pointless
207
or len(self.parents) > 1
208
or self.new_inv != self.basis_inv):
209
raise PointlessCommit()
211
self._record_inventory()
212
self._record_ancestry()
213
self._make_revision()
214
note('committed r%d {%s}', (self.branch.revno() + 1),
216
self.branch.append_revision(self.rev_id)
217
self.branch.set_pending_merges([])
223
def _record_inventory(self):
224
"""Store the inventory for the new revision."""
225
inv_text = serializer_v5.write_inventory_to_string(self.new_inv)
226
self.inv_sha1 = sha_string(inv_text)
227
self.weave_store.add_text(INVENTORY_FILEID, self.rev_id,
228
split_lines(inv_text), self.parents)
231
def _record_ancestry(self):
232
"""Append merged revision ancestry to the ancestry file.
234
This should be the merged ancestry of all parents, plus the
236
w = self.weave_store.get_weave_or_empty(ANCESTRY_FILEID)
237
lines = self._merge_ancestry_lines(w)
238
w.add(self.rev_id, self.parents, lines)
239
self.weave_store.put_weave(ANCESTRY_FILEID, w)
242
def _merge_ancestry_lines(self, ancestry_weave):
243
"""Return merged ancestry lines.
245
The lines are revision-ids followed by newlines."""
248
for parent_id in self.parents:
249
for line in ancestry_weave.get(parent_id):
250
assert line[-1] == '\n'
254
r = self.rev_id + '\n'
257
mutter('merged ancestry of {%s}:\n%s', self.rev_id, ''.join(ancs))
584
261
def _gather_parents(self):
585
"""Record the parents of a merge for merge detection."""
586
# TODO: Make sure that this list doesn't contain duplicate
587
# entries and the order is preserved when doing this.
588
if self.use_record_iter_changes:
590
self.basis_inv = self.basis_tree.inventory
591
self.parent_invs = [self.basis_inv]
592
for revision in self.parents[1:]:
593
if self.branch.repository.has_revision(revision):
594
mutter('commit parent revision {%s}', revision)
595
inventory = self.branch.repository.get_inventory(revision)
596
self.parent_invs.append(inventory)
598
mutter('commit parent ghost revision {%s}', revision)
600
def _update_builder_with_changes(self):
601
"""Update the commit builder with the data about what has changed.
603
exclude = self.exclude
604
specific_files = self.specific_files
605
mutter("Selecting files for commit with filter %s", specific_files)
608
if self.use_record_iter_changes:
609
iter_changes = self.work_tree.iter_changes(self.basis_tree,
610
specific_files=specific_files)
611
iter_changes = self._filter_iter_changes(iter_changes)
612
for file_id, path, fs_hash in self.builder.record_iter_changes(
613
self.work_tree, self.basis_revid, iter_changes):
614
self.work_tree._observed_sha1(file_id, path, fs_hash)
616
# Build the new inventory
617
self._populate_from_inventory()
618
self._record_unselected()
619
self._report_and_accumulate_deletes()
621
def _filter_iter_changes(self, iter_changes):
622
"""Process iter_changes.
624
This method reports on the changes in iter_changes to the user, and
625
converts 'missing' entries in the iter_changes iterator to 'deleted'
626
entries. 'missing' entries have their
628
:param iter_changes: An iter_changes to process.
629
:return: A generator of changes.
631
reporter = self.reporter
632
report_changes = reporter.is_verbose()
262
pending_merges = self.branch.pending_merges()
264
self.parent_trees = []
265
precursor_id = self.branch.last_revision()
267
self.parents.append(precursor_id)
268
self.parent_trees.append(self.basis_tree)
269
self.parents += pending_merges
270
self.parent_trees.extend(map(self.branch.revision_tree, pending_merges))
273
def _check_parents_present(self):
274
for parent_id in self.parents:
275
mutter('commit parent revision {%s}', parent_id)
276
if not self.branch.has_revision(parent_id):
277
warning("can't commit a merge from an absent parent")
278
raise HistoryMissing(self.branch, 'revision', parent_id)
281
def _make_revision(self):
282
"""Record a new revision object for this commit."""
283
self.rev = Revision(timestamp=self.timestamp,
284
timezone=self.timezone,
285
committer=self.committer,
286
message=self.message,
287
inventory_sha1=self.inv_sha1,
288
revision_id=self.rev_id)
289
self.rev.parents = map(RevisionReference, self.parents)
291
serializer_v5.write_revision(self.rev, rev_tmp)
293
self.branch.revision_store.add(rev_tmp, self.rev_id)
294
mutter('new revision_id is {%s}', self.rev_id)
297
def _remove_deleted(self):
298
"""Remove deleted files from the working inventories.
300
This is done prior to taking the working inventory as the
301
basis for the new committed inventory.
303
This returns true if any files
304
*that existed in the basis inventory* were deleted.
305
Files that were added and deleted
306
in the working copy don't matter.
308
specific = self.specific_files
634
for change in iter_changes:
636
old_path = change[1][0]
637
new_path = change[1][1]
638
versioned = change[3][1]
640
versioned = change[3][1]
641
if kind is None and versioned:
644
reporter.missing(new_path)
645
deleted_ids.append(change[0])
646
# Reset the new path (None) and new versioned flag (False)
647
change = (change[0], (change[1][0], None), change[2],
648
(change[3][0], False)) + change[4:]
649
elif kind == 'tree-reference':
650
if self.recursive == 'down':
651
self._commit_nested_tree(change[0], change[1][1])
652
if change[3][0] or change[3][1]:
656
reporter.deleted(old_path)
657
elif old_path is None:
658
reporter.snapshot_change('added', new_path)
659
elif old_path != new_path:
660
reporter.renamed('renamed', old_path, new_path)
663
self.work_tree.branch.repository._format.rich_root_data):
664
# Don't report on changes to '' in non rich root
666
reporter.snapshot_change('modified', new_path)
667
self._next_progress_entry()
668
# Unversion IDs that were found to be deleted
669
self.deleted_ids = deleted_ids
671
def _record_unselected(self):
672
# If specific files are selected, then all un-selected files must be
673
# recorded in their previous state. For more details, see
674
# https://lists.ubuntu.com/archives/bazaar/2007q3/028476.html.
675
if self.specific_files or self.exclude:
676
specific_files = self.specific_files or []
677
for path, old_ie in self.basis_inv.iter_entries():
678
if old_ie.file_id in self.builder.new_inventory:
679
# already added - skip.
681
if (is_inside_any(specific_files, path)
682
and not is_inside_any(self.exclude, path)):
683
# was inside the selected path, and not excluded - if not
684
# present it has been deleted so skip.
686
# From here down it was either not selected, or was excluded:
687
# We preserve the entry unaltered.
689
# Note: specific file commits after a merge are currently
690
# prohibited. This test is for sanity/safety in case it's
691
# required after that changes.
692
if len(self.parents) > 1:
694
self.builder.record_entry_contents(ie, self.parent_invs, path,
695
self.basis_tree, None)
697
def _report_and_accumulate_deletes(self):
698
if (isinstance(self.basis_inv, Inventory)
699
and isinstance(self.builder.new_inventory, Inventory)):
700
# the older Inventory classes provide a _byid dict, and building a
701
# set from the keys of this dict is substantially faster than even
702
# getting a set of ids from the inventory
704
# <lifeless> set(dict) is roughly the same speed as
705
# set(iter(dict)) and both are significantly slower than
707
deleted_ids = set(self.basis_inv._byid.keys()) - \
708
set(self.builder.new_inventory._byid.keys())
710
deleted_ids = set(self.basis_inv) - set(self.builder.new_inventory)
310
for path, ie in self.work_inv.iter_entries():
311
if specific and not is_inside_any(specific, path):
313
if not self.work_tree.has_filename(path):
314
note('missing %s', path)
315
deleted_ids.append(ie.file_id)
712
self.any_entries_deleted = True
713
deleted = [(self.basis_tree.id2path(file_id), file_id)
714
for file_id in deleted_ids]
716
# XXX: this is not quite directory-order sorting
717
for path, file_id in deleted:
718
self.builder.record_delete(path, file_id)
719
self.reporter.deleted(path)
721
def _check_strict(self):
722
# XXX: when we use iter_changes this would likely be faster if
723
# iter_changes would check for us (even in the presence of
726
# raise an exception as soon as we find a single unknown.
727
for unknown in self.work_tree.unknowns():
728
raise StrictCommitFailed()
730
def _populate_from_inventory(self):
731
"""Populate the CommitBuilder by walking the working tree inventory."""
732
# Build the revision inventory.
734
# This starts by creating a new empty inventory. Depending on
735
# which files are selected for commit, and what is present in the
736
# current tree, the new inventory is populated. inventory entries
737
# which are candidates for modification have their revision set to
738
# None; inventory entries that are carried over untouched have their
739
# revision set to their prior value.
741
# ESEPARATIONOFCONCERNS: this function is diffing and using the diff
742
# results to create a new inventory at the same time, which results
743
# in bugs like #46635. Any reason not to use/enhance Tree.changes_from?
746
specific_files = self.specific_files
747
exclude = self.exclude
748
report_changes = self.reporter.is_verbose()
750
# A tree of paths that have been deleted. E.g. if foo/bar has been
751
# deleted, then we have {'foo':{'bar':{}}}
753
# XXX: Note that entries may have the wrong kind because the entry does
754
# not reflect the status on disk.
755
work_inv = self.work_tree.inventory
756
# NB: entries will include entries within the excluded ids/paths
757
# because iter_entries_by_dir has no 'exclude' facility today.
758
entries = work_inv.iter_entries_by_dir(
759
specific_file_ids=self.specific_file_ids, yield_parents=True)
760
for path, existing_ie in entries:
761
file_id = existing_ie.file_id
762
name = existing_ie.name
763
parent_id = existing_ie.parent_id
764
kind = existing_ie.kind
765
# Skip files that have been deleted from the working tree.
766
# The deleted path ids are also recorded so they can be explicitly
769
path_segments = splitpath(path)
770
deleted_dict = deleted_paths
771
for segment in path_segments:
772
deleted_dict = deleted_dict.get(segment, None)
774
# We either took a path not present in the dict
775
# (deleted_dict was None), or we've reached an empty
776
# child dir in the dict, so are now a sub-path.
780
if deleted_dict is not None:
781
# the path has a deleted parent, do not add it.
783
if exclude and is_inside_any(exclude, path):
784
# Skip excluded paths. Excluded paths are processed by
785
# _update_builder_with_changes.
317
for file_id in deleted_ids:
318
del self.work_inv[file_id]
319
self.branch._write_inventory(self.work_inv)
322
def _find_file_parents(self, file_id):
323
"""Return the text versions and hashes for all file parents.
325
Returned as a map from text version to inventory entry.
327
This is a set containing the file versions in all parents
328
revisions containing the file. If the file is new, the set
331
for tree in self.parent_trees:
332
if file_id in tree.inventory:
333
ie = tree.inventory[file_id]
334
assert ie.kind == 'file'
335
assert ie.file_id == file_id
336
if ie.text_version in r:
337
assert r[ie.text_version] == ie
339
r[ie.text_version] = ie
343
def _set_name_versions(self):
344
"""Pass over inventory and mark new entry version as needed.
346
Files get a new name version when they are new, have a
347
different parent, or a different name from in the
348
basis inventory, or if the file is in a different place
349
to any of the parents."""
350
# XXX: Need to think more here about when the user has
351
# made a specific decision on a particular value -- c.f.
353
for path, ie in self.new_inv.iter_entries():
356
for parent_tree in self.parent_trees:
357
parent_inv = parent_tree.inventory
358
if file_id not in parent_inv:
360
parent_ie = parent_inv[file_id]
361
if parent_ie.parent_id != ie.parent_id:
364
elif parent_ie.name != ie.name:
367
elif old_version is None:
368
old_version = parent_ie.name_version
369
elif old_version != parent_ie.name_version:
373
pass # so far so good
374
if old_version is None:
375
mutter('new name_version for {%s}', file_id)
376
ie.name_version = self.rev_id
378
mutter('name_version for {%s} inherited as {%s}',
379
file_id, old_version)
380
ie.name_version = old_version
383
def _store_entries(self):
384
"""Build revision inventory and store modified files.
386
This is called with new_inv a new empty inventory. Depending on
387
which files are selected for commit, and which ones have
388
been modified or merged, new inventory entries are built
389
based on the working and parent inventories.
391
As a side-effect this stores new text versions for committed
392
files with text changes or merges.
394
Each entry can have one of several things happen:
396
carry_file -- carried from the previous version (if not
399
commit_nonfile -- no text to worry about
401
commit_old_text -- same text, may have moved
403
commit_file -- new text version
405
for path, new_ie in self.work_inv.iter_entries():
406
file_id = new_ie.file_id
407
mutter('check %s {%s}', path, new_ie.file_id)
408
if self.specific_files:
409
if not is_inside_any(self.specific_files, path):
410
mutter('%s not selected for commit', path)
411
self._carry_file(file_id)
413
if new_ie.kind != 'file':
414
self._commit_nonfile(file_id)
787
content_summary = self.work_tree.path_content_summary(path)
788
kind = content_summary[0]
789
# Note that when a filter of specific files is given, we must only
790
# skip/record deleted files matching that filter.
791
if not specific_files or is_inside_any(specific_files, path):
792
if kind == 'missing':
793
if not deleted_paths:
794
# path won't have been split yet.
795
path_segments = splitpath(path)
796
deleted_dict = deleted_paths
797
for segment in path_segments:
798
deleted_dict = deleted_dict.setdefault(segment, {})
799
self.reporter.missing(path)
800
self._next_progress_entry()
801
deleted_ids.append(file_id)
417
file_parents = self._find_file_parents(file_id)
418
if len(file_parents) == 1:
419
parent_ie = file_parents.values()[0]
420
wc_sha1 = self.work_tree.get_file_sha1(file_id)
421
if parent_ie.text_sha1 == wc_sha1:
422
# text not changed or merged
423
self._commit_old_text(file_id, parent_ie)
803
# TODO: have the builder do the nested commit just-in-time IF and
805
if kind == 'tree-reference':
806
# enforce repository nested tree policy.
807
if (not self.work_tree.supports_tree_reference() or
808
# repository does not support it either.
809
not self.branch.repository._format.supports_tree_reference):
811
content_summary = (kind, None, None, None)
812
elif self.recursive == 'down':
813
nested_revision_id = self._commit_nested_tree(
815
content_summary = (kind, None, None, nested_revision_id)
817
nested_revision_id = self.work_tree.get_reference_revision(file_id)
818
content_summary = (kind, None, None, nested_revision_id)
820
# Record an entry for this item
821
# Note: I don't particularly want to have the existing_ie
822
# parameter but the test suite currently (28-Jun-07) breaks
823
# without it thanks to a unicode normalisation issue. :-(
824
definitely_changed = kind != existing_ie.kind
825
self._record_entry(path, file_id, specific_files, kind, name,
826
parent_id, definitely_changed, existing_ie, report_changes,
829
# Unversion IDs that were found to be deleted
830
self.deleted_ids = deleted_ids
832
def _commit_nested_tree(self, file_id, path):
833
"Commit a nested tree."
834
sub_tree = self.work_tree.get_nested_tree(file_id, path)
835
# FIXME: be more comprehensive here:
836
# this works when both trees are in --trees repository,
837
# but when both are bound to a different repository,
838
# it fails; a better way of approaching this is to
839
# finally implement the explicit-caches approach design
840
# a while back - RBC 20070306.
841
if sub_tree.branch.repository.has_same_location(
842
self.work_tree.branch.repository):
843
sub_tree.branch.repository = \
844
self.work_tree.branch.repository
846
return sub_tree.commit(message=None, revprops=self.revprops,
847
recursive=self.recursive,
848
message_callback=self.message_callback,
849
timestamp=self.timestamp, timezone=self.timezone,
850
committer=self.committer,
851
allow_pointless=self.allow_pointless,
852
strict=self.strict, verbose=self.verbose,
853
local=self.local, reporter=self.reporter)
854
except errors.PointlessCommit:
855
return self.work_tree.get_reference_revision(file_id)
857
def _record_entry(self, path, file_id, specific_files, kind, name,
858
parent_id, definitely_changed, existing_ie, report_changes,
860
"Record the new inventory entry for a path if any."
861
# mutter('check %s {%s}', path, file_id)
862
# mutter('%s selected for commit', path)
863
if definitely_changed or existing_ie is None:
864
ie = make_entry(kind, name, parent_id, file_id)
866
ie = existing_ie.copy()
868
# For carried over entries we don't care about the fs hash - the repo
869
# isn't generating a sha, so we're not saving computation time.
870
_, _, fs_hash = self.builder.record_entry_contents(
871
ie, self.parent_invs, path, self.work_tree, content_summary)
873
self._report_change(ie, path)
875
self.work_tree._observed_sha1(ie.file_id, path, fs_hash)
878
def _report_change(self, ie, path):
879
"""Report a change to the user.
881
The change that has occurred is described relative to the basis
884
if (self.basis_inv.has_id(ie.file_id)):
885
basis_ie = self.basis_inv[ie.file_id]
888
change = ie.describe_change(basis_ie, ie)
889
if change in (InventoryEntry.RENAMED,
890
InventoryEntry.MODIFIED_AND_RENAMED):
891
old_path = self.basis_inv.id2path(ie.file_id)
892
self.reporter.renamed(change, old_path, path)
893
self._next_progress_entry()
895
if change == 'unchanged':
897
self.reporter.snapshot_change(change, path)
898
self._next_progress_entry()
900
def _set_progress_stage(self, name, counter=False):
901
"""Set the progress stage and emit an update to the progress bar."""
902
self.pb_stage_name = name
903
self.pb_stage_count += 1
905
self.pb_entries_count = 0
907
self.pb_entries_count = None
908
self._emit_progress()
910
def _next_progress_entry(self):
911
"""Emit an update to the progress bar and increment the entry count."""
912
self.pb_entries_count += 1
913
self._emit_progress()
915
def _emit_progress(self):
916
if self.pb_entries_count is not None:
917
text = "%s [%d] - Stage" % (self.pb_stage_name,
918
self.pb_entries_count)
920
text = "%s - Stage" % (self.pb_stage_name, )
921
self.pb.update(text, self.pb_stage_count, self.pb_stage_total)
923
def _set_specific_file_ids(self):
924
"""populate self.specific_file_ids if we will use it."""
925
if not self.use_record_iter_changes:
926
# If provided, ensure the specified files are versioned
927
if self.specific_files is not None:
928
# Note: This routine is being called because it raises
929
# PathNotVersionedError as a side effect of finding the IDs. We
930
# later use the ids we found as input to the working tree
931
# inventory iterator, so we only consider those ids rather than
932
# examining the whole tree again.
933
# XXX: Dont we have filter_unversioned to do this more
935
self.specific_file_ids = tree.find_ids_across_trees(
936
self.specific_files, [self.basis_tree, self.work_tree])
426
mutter('parents of %s are %r', path, file_parents)
428
# file is either new, or a file merge; need to record
430
if len(file_parents) > 1:
431
note('merged %s', path)
432
elif len(file_parents) == 0:
433
note('added %s', path)
938
self.specific_file_ids = None
435
note('modified %s', path)
436
self._commit_file(new_ie, file_id, file_parents)
439
def _commit_nonfile(self, file_id):
440
self.new_inv.add(self.work_inv[file_id].copy())
443
def _carry_file(self, file_id):
444
"""Carry the file unchanged from the basis revision."""
445
if self.basis_inv.has_id(file_id):
446
self.new_inv.add(self.basis_inv[file_id].copy())
449
def _commit_old_text(self, file_id, parent_ie):
450
"""Keep the same text as last time, but possibly a different name."""
451
ie = self.work_inv[file_id].copy()
452
ie.text_version = parent_ie.text_version
453
ie.text_size = parent_ie.text_size
454
ie.text_sha1 = parent_ie.text_sha1
458
def _report_deletes(self):
459
for file_id in self.basis_inv:
460
if file_id not in self.new_inv:
461
note('deleted %s', self.basis_inv.id2path(file_id))
464
def _commit_file(self, new_ie, file_id, file_parents):
465
mutter('store new text for {%s} in revision {%s}',
466
file_id, self.rev_id)
467
new_lines = self.work_tree.get_file(file_id).readlines()
468
self._add_text_to_weave(file_id, new_lines, file_parents)
469
new_ie.text_version = self.rev_id
470
new_ie.text_sha1 = sha_strings(new_lines)
471
new_ie.text_size = sum(map(len, new_lines))
472
self.new_inv.add(new_ie)
475
def _add_text_to_weave(self, file_id, new_lines, parents):
476
if file_id.startswith('__'):
477
raise ValueError('illegal file-id %r for text file' % file_id)
478
self.weave_store.add_text(file_id, self.rev_id, new_lines, parents)
481
def _gen_revision_id(branch, when):
482
"""Return new revision-id."""
483
s = '%s-%s-' % (user_email(branch), compact_date(when))
484
s += hexlify(rand_bytes(8))