164
135
working inventory.
166
137
def __init__(self,
169
"""Create a Commit object.
171
:param reporter: the default reporter to use or None to decide later
173
self.reporter = reporter
177
def update_revprops(revprops, branch, authors=None, author=None,
178
local=False, possible_master_transports=None):
181
if possible_master_transports is None:
182
possible_master_transports = []
183
if not 'branch-nick' in revprops:
184
revprops['branch-nick'] = branch._get_nick(
186
possible_master_transports)
187
if authors is not None:
188
if author is not None:
189
raise AssertionError('Specifying both author and authors '
190
'is not allowed. Specify just authors instead')
191
if 'author' in revprops or 'authors' in revprops:
192
# XXX: maybe we should just accept one of them?
193
raise AssertionError('author property given twice')
195
for individual in authors:
196
if '\n' in individual:
197
raise AssertionError('\\n is not a valid character '
198
'in an author identity')
199
revprops['authors'] = '\n'.join(authors)
200
if author is not None:
201
symbol_versioning.warn('The parameter author was deprecated'
202
' in version 1.13. Use authors instead',
204
if 'author' in revprops or 'authors' in revprops:
205
# XXX: maybe we should just accept one of them?
206
raise AssertionError('author property given twice')
208
raise AssertionError('\\n is not a valid character '
209
'in an author identity')
210
revprops['authors'] = author
139
if reporter is not None:
140
self.reporter = reporter
142
self.reporter = NullCommitReporter()
218
150
specific_files=None,
220
152
allow_pointless=True,
228
message_callback=None,
231
possible_master_transports=None,
233
154
"""Commit working copy as a new revision.
235
:param message: the commit message (it or message_callback is required)
236
:param message_callback: A callback: message = message_callback(cmt_obj)
238
:param timestamp: if not None, seconds-since-epoch for a
239
postdated/predated commit.
241
:param specific_files: If not None, commit only those files. An empty
242
list means 'commit no files'.
244
:param rev_id: If set, use this as the new revision id.
156
timestamp -- if not None, seconds-since-epoch for a
157
postdated/predated commit.
159
specific_files -- If true, commit only those files.
161
rev_id -- If set, use this as the new revision id.
245
162
Useful for test or import commands that need to tightly
246
163
control what revisions are assigned. If you duplicate
247
164
a revision id that exists elsewhere it is your own fault.
248
165
If null (default), a time/random revision id is generated.
250
:param allow_pointless: If true (default), commit even if nothing
167
allow_pointless -- If true (default), commit even if nothing
251
168
has changed and no merges are recorded.
253
:param strict: If true, don't allow a commit if the working tree
254
contains unknown files.
256
:param revprops: Properties for new revision
257
:param local: Perform a local only commit.
258
:param reporter: the reporter to use or None for the default
259
:param verbose: if True and the reporter is not None, report everything
260
:param recursive: If set to 'down', commit in any subtrees that have
261
pending changes of any sort during this commit.
262
:param exclude: None or a list of relative paths to exclude from the
263
commit. Pending changes to excluded files will be ignored by the
265
:param lossy: When committing to a foreign VCS, ignore any
266
data that can not be natively represented.
268
operation = OperationWithCleanups(self._commit)
269
self.revprops = revprops or {}
270
# XXX: Can be set on __init__ or passed in - this is a bit ugly.
271
self.config = config or self.config
272
return operation.run(
277
specific_files=specific_files,
279
allow_pointless=allow_pointless,
282
working_tree=working_tree,
285
message_callback=message_callback,
288
possible_master_transports=possible_master_transports,
291
def _commit(self, operation, message, timestamp, timezone, committer,
292
specific_files, rev_id, allow_pointless, strict, verbose,
293
working_tree, local, reporter, message_callback, recursive,
294
exclude, possible_master_transports, lossy):
295
170
mutter('preparing to commit')
297
if working_tree is None:
298
raise BzrError("working_tree must be passed into commit().")
300
self.work_tree = working_tree
301
self.branch = self.work_tree.branch
302
if getattr(self.work_tree, 'requires_rich_root', lambda: False)():
303
if not self.branch.repository.supports_rich_root():
304
raise errors.RootNotRich()
305
if message_callback is None:
306
if message is not None:
307
if isinstance(message, str):
308
message = message.decode(get_user_encoding())
309
message_callback = lambda x: message
311
raise BzrError("The message or message_callback keyword"
312
" parameter is required for commit().")
314
self.bound_branch = None
315
self.any_entries_deleted = False
316
if exclude is not None:
317
self.exclude = sorted(
318
minimum_path_selection(exclude))
322
self.master_branch = None
323
self.recursive = recursive
325
# self.specific_files is None to indicate no filter, or any iterable to
326
# indicate a filter - [] means no files at all, as per iter_changes.
327
if specific_files is not None:
328
self.specific_files = sorted(
329
minimum_path_selection(specific_files))
331
self.specific_files = None
173
self.weave_store = branch.weave_store
175
self.specific_files = specific_files
333
176
self.allow_pointless = allow_pointless
334
self.message_callback = message_callback
335
self.timestamp = timestamp
336
self.timezone = timezone
337
self.committer = committer
339
self.verbose = verbose
341
self.work_tree.lock_write()
342
operation.add_cleanup(self.work_tree.unlock)
343
self.parents = self.work_tree.get_parent_ids()
344
# We can use record_iter_changes IFF iter_changes is compatible with
345
# the command line parameters, and the repository has fast delta
346
# generation. See bug 347649.
347
self.use_record_iter_changes = (
349
not self.branch.repository._format.supports_tree_reference and
350
(self.branch.repository._format.fast_deltas or
351
len(self.parents) < 2))
352
self.pb = ui.ui_factory.nested_progress_bar()
353
operation.add_cleanup(self.pb.finished)
354
self.basis_revid = self.work_tree.last_revision()
355
self.basis_tree = self.work_tree.basis_tree()
356
self.basis_tree.lock_read()
357
operation.add_cleanup(self.basis_tree.unlock)
358
# Cannot commit with conflicts present.
359
if len(self.work_tree.conflicts()) > 0:
360
raise ConflictsInTree
362
# Setup the bound branch variables as needed.
363
self._check_bound_branch(operation, possible_master_transports)
365
# Check that the working tree is up to date
366
old_revno, new_revno = self._check_out_of_date_tree()
368
# Complete configuration setup
369
if reporter is not None:
370
self.reporter = reporter
371
elif self.reporter is None:
372
self.reporter = self._select_reporter()
373
if self.config is None:
374
self.config = self.branch.get_config()
376
self._set_specific_file_ids()
378
# Setup the progress bar. As the number of files that need to be
379
# committed in unknown, progress is reported as stages.
380
# We keep track of entries separately though and include that
381
# information in the progress bar during the relevant stages.
382
self.pb_stage_name = ""
383
self.pb_stage_count = 0
384
self.pb_stage_total = 5
385
if self.bound_branch:
386
# 2 extra stages: "Uploading data to master branch" and "Merging
387
# tags to master branch"
388
self.pb_stage_total += 2
389
self.pb.show_pct = False
390
self.pb.show_spinner = False
391
self.pb.show_eta = False
392
self.pb.show_count = True
393
self.pb.show_bar = True
395
self._gather_parents()
396
# After a merge, a selected file commit is not supported.
397
# See 'bzr help merge' for an explanation as to why.
398
if len(self.parents) > 1 and self.specific_files is not None:
399
raise errors.CannotCommitSelectedFileMerge(self.specific_files)
400
# Excludes are a form of selected file commit.
401
if len(self.parents) > 1 and self.exclude:
402
raise errors.CannotCommitSelectedFileMerge(self.exclude)
404
# Collect the changes
405
self._set_progress_stage("Collecting changes", counter=True)
407
self.builder = self.branch.get_commit_builder(self.parents,
408
self.config, timestamp, timezone, committer, self.revprops,
410
if not self.builder.supports_record_entry_contents and self.exclude:
412
raise errors.ExcludesUnsupported(self.branch.repository)
415
self.builder.will_record_deletes()
416
# find the location being committed to
417
if self.bound_branch:
418
master_location = self.master_branch.base
420
master_location = self.branch.base
422
# report the start of the commit
423
self.reporter.started(new_revno, self.rev_id, master_location)
425
self._update_builder_with_changes()
426
self._check_pointless()
428
# TODO: Now the new inventory is known, check for conflicts.
429
# ADHB 2006-08-08: If this is done, populate_new_inv should not add
430
# weave lines, because nothing should be recorded until it is known
431
# that commit will succeed.
432
self._set_progress_stage("Saving data locally")
433
self.builder.finish_inventory()
435
# Prompt the user for a commit message if none provided
436
message = message_callback(self)
437
self.message = message
439
# Add revision data to the local branch
440
self.rev_id = self.builder.commit(self.message)
443
mutter("aborting commit write group because of exception:")
444
trace.log_exception_quietly()
448
self._process_pre_hooks(old_revno, new_revno)
450
# Upload revision data to the master.
451
# this will propagate merged revisions too if needed.
452
if self.bound_branch:
453
self._set_progress_stage("Uploading data to master branch")
454
# 'commit' to the master first so a timeout here causes the
455
# local branch to be out of date
456
(new_revno, self.rev_id) = self.master_branch.import_last_revision_info_and_tags(
457
self.branch, new_revno, self.rev_id, lossy=lossy)
459
self.branch.fetch(self.master_branch, self.rev_id)
461
# and now do the commit locally.
462
self.branch.set_last_revision_info(new_revno, self.rev_id)
464
# Merge local tags to remote
465
if self.bound_branch:
466
self._set_progress_stage("Merging tags to master branch")
467
tag_updates, tag_conflicts = self.branch.tags.merge_to(
468
self.master_branch.tags)
470
warning_lines = [' ' + name for name, _, _ in tag_conflicts]
471
note("Conflicting tags in bound branch:\n" +
472
"\n".join(warning_lines))
474
# Make the working tree be up to date with the branch. This
475
# includes automatic changes scheduled to be made to the tree, such
476
# as updating its basis and unversioning paths that were missing.
477
self.work_tree.unversion(self.deleted_ids)
478
self._set_progress_stage("Updating the working tree")
479
self.work_tree.update_basis_by_delta(self.rev_id,
480
self.builder.get_basis_delta())
481
self.reporter.completed(new_revno, self.rev_id)
482
self._process_post_hooks(old_revno, new_revno)
485
def _select_reporter(self):
486
"""Select the CommitReporter to use."""
488
return NullCommitReporter()
489
return ReportCommitToLog()
491
def _check_pointless(self):
492
if self.allow_pointless:
494
# A merge with no effect on files
495
if len(self.parents) > 1:
497
if self.builder.any_changes():
499
raise PointlessCommit()
501
def _check_bound_branch(self, operation, possible_master_transports=None):
502
"""Check to see if the local branch is bound.
504
If it is bound, then most of the commit will actually be
505
done using the remote branch as the target branch.
506
Only at the end will the local branch be updated.
508
if self.local and not self.branch.get_bound_location():
509
raise errors.LocalRequiresBoundBranch()
512
self.master_branch = self.branch.get_master_branch(
513
possible_master_transports)
515
if not self.master_branch:
516
# make this branch the reference branch for out of date checks.
517
self.master_branch = self.branch
520
# If the master branch is bound, we must fail
521
master_bound_location = self.master_branch.get_bound_location()
522
if master_bound_location:
523
raise errors.CommitToDoubleBoundBranch(self.branch,
524
self.master_branch, master_bound_location)
526
# TODO: jam 20051230 We could automatically push local
527
# commits to the remote branch if they would fit.
528
# But for now, just require remote to be identical
531
# Make sure the local branch is identical to the master
532
master_info = self.master_branch.last_revision_info()
533
local_info = self.branch.last_revision_info()
534
if local_info != master_info:
535
raise errors.BoundBranchOutOfDate(self.branch,
538
# Now things are ready to change the master branch
540
self.bound_branch = self.branch
541
self.master_branch.lock_write()
542
operation.add_cleanup(self.master_branch.unlock)
544
def _check_out_of_date_tree(self):
545
"""Check that the working tree is up to date.
547
:return: old_revision_number,new_revision_number tuple
550
first_tree_parent = self.work_tree.get_parent_ids()[0]
552
# if there are no parents, treat our parent as 'None'
553
# this is so that we still consider the master branch
554
# - in a checkout scenario the tree may have no
555
# parents but the branch may do.
556
first_tree_parent = bzrlib.revision.NULL_REVISION
557
old_revno, master_last = self.master_branch.last_revision_info()
558
if master_last != first_tree_parent:
559
if master_last != bzrlib.revision.NULL_REVISION:
560
raise errors.OutOfDateTree(self.work_tree)
561
if self.branch.repository.has_revision(first_tree_parent):
562
new_revno = old_revno + 1
564
# ghost parents never appear in revision history.
566
return old_revno,new_revno
568
def _process_pre_hooks(self, old_revno, new_revno):
569
"""Process any registered pre commit hooks."""
570
self._set_progress_stage("Running pre_commit hooks")
571
self._process_hooks("pre_commit", old_revno, new_revno)
573
def _process_post_hooks(self, old_revno, new_revno):
574
"""Process any registered post commit hooks."""
575
# Process the post commit hooks, if any
576
self._set_progress_stage("Running post_commit hooks")
577
# old style commit hooks - should be deprecated ? (obsoleted in
579
if self.config.post_commit() is not None:
580
hooks = self.config.post_commit().split(' ')
581
# this would be nicer with twisted.python.reflect.namedAny
583
result = eval(hook + '(branch, rev_id)',
584
{'branch':self.branch,
586
'rev_id':self.rev_id})
587
# process new style post commit hooks
588
self._process_hooks("post_commit", old_revno, new_revno)
590
def _process_hooks(self, hook_name, old_revno, new_revno):
591
if not Branch.hooks[hook_name]:
594
# new style commit hooks:
595
if not self.bound_branch:
596
hook_master = self.branch
599
hook_master = self.master_branch
600
hook_local = self.branch
601
# With bound branches, when the master is behind the local branch,
602
# the 'old_revno' and old_revid values here are incorrect.
603
# XXX: FIXME ^. RBC 20060206
605
old_revid = self.parents[0]
607
old_revid = bzrlib.revision.NULL_REVISION
609
if hook_name == "pre_commit":
610
future_tree = self.builder.revision_tree()
611
tree_delta = future_tree.changes_from(self.basis_tree,
614
for hook in Branch.hooks[hook_name]:
615
# show the running hook in the progress bar. As hooks may
616
# end up doing nothing (e.g. because they are not configured by
617
# the user) this is still showing progress, not showing overall
618
# actions - its up to each plugin to show a UI if it want's to
619
# (such as 'Emailing diff to foo@example.com').
620
self.pb_stage_name = "Running %s hooks [%s]" % \
621
(hook_name, Branch.hooks.get_hook_name(hook))
622
self._emit_progress()
623
if 'hooks' in debug.debug_flags:
624
mutter("Invoking commit hook: %r", hook)
625
if hook_name == "post_commit":
626
hook(hook_local, hook_master, old_revno, old_revid, new_revno,
628
elif hook_name == "pre_commit":
629
hook(hook_local, hook_master,
630
old_revno, old_revid, new_revno, self.rev_id,
631
tree_delta, future_tree)
178
if timestamp is None:
179
self.timestamp = time.time()
181
self.timestamp = long(timestamp)
184
self.rev_id = _gen_revision_id(self.branch, self.timestamp)
188
if committer is None:
189
self.committer = username(self.branch)
191
assert isinstance(committer, basestring), type(committer)
192
self.committer = committer
195
self.timezone = local_time_offset()
197
self.timezone = int(timezone)
199
assert isinstance(message, basestring), type(message)
200
self.message = message
202
self.branch.lock_write()
204
self.work_tree = self.branch.working_tree()
205
self.work_inv = self.work_tree.inventory
206
self.basis_tree = self.branch.basis_tree()
207
self.basis_inv = self.basis_tree.inventory
209
self._gather_parents()
210
if len(self.parents) > 1 and self.specific_files:
211
raise NotImplementedError('selected-file commit of merges is not supported yet')
212
self._check_parents_present()
214
self._remove_deleted()
215
self.new_inv = Inventory()
216
self._store_entries()
217
self._report_deletes()
218
self._set_name_versions()
220
if not (self.allow_pointless
221
or len(self.parents) > 1
222
or self.new_inv != self.basis_inv):
223
raise PointlessCommit()
225
self._record_inventory()
226
self._record_ancestry()
227
self._make_revision()
228
note('committed r%d {%s}', (self.branch.revno() + 1),
230
self.branch.append_revision(self.rev_id)
231
self.branch.set_pending_merges([])
237
def _record_inventory(self):
238
"""Store the inventory for the new revision."""
239
inv_text = serializer_v5.write_inventory_to_string(self.new_inv)
240
self.inv_sha1 = sha_string(inv_text)
241
s = self.branch.control_weaves
242
s.add_text('inventory', self.rev_id,
243
split_lines(inv_text), self.parents)
246
def _record_ancestry(self):
247
"""Append merged revision ancestry to the ancestry file.
249
This should be the merged ancestry of all parents, plus the
251
s = self.branch.control_weaves
252
w = s.get_weave_or_empty('ancestry')
253
lines = self._make_ancestry(w)
254
w.add(self.rev_id, self.parents, lines)
255
s.put_weave('ancestry', w)
258
def _make_ancestry(self, ancestry_weave):
259
"""Return merged ancestry lines.
261
The lines are revision-ids followed by newlines."""
262
parent_ancestries = [ancestry_weave.get(p) for p in self.parents]
263
new_lines = merge_ancestry_lines(self.rev_id, parent_ancestries)
264
mutter('merged ancestry of {%s}:\n%s', self.rev_id, ''.join(new_lines))
633
268
def _gather_parents(self):
634
"""Record the parents of a merge for merge detection."""
635
# TODO: Make sure that this list doesn't contain duplicate
636
# entries and the order is preserved when doing this.
637
if self.use_record_iter_changes:
639
self.basis_inv = self.basis_tree.inventory
640
self.parent_invs = [self.basis_inv]
641
for revision in self.parents[1:]:
642
if self.branch.repository.has_revision(revision):
643
mutter('commit parent revision {%s}', revision)
644
inventory = self.branch.repository.get_inventory(revision)
645
self.parent_invs.append(inventory)
647
mutter('commit parent ghost revision {%s}', revision)
649
def _update_builder_with_changes(self):
650
"""Update the commit builder with the data about what has changed.
652
exclude = self.exclude
653
specific_files = self.specific_files
654
mutter("Selecting files for commit with filter %s", specific_files)
657
if self.use_record_iter_changes:
658
iter_changes = self.work_tree.iter_changes(self.basis_tree,
659
specific_files=specific_files)
660
iter_changes = self._filter_iter_changes(iter_changes)
661
for file_id, path, fs_hash in self.builder.record_iter_changes(
662
self.work_tree, self.basis_revid, iter_changes):
663
self.work_tree._observed_sha1(file_id, path, fs_hash)
665
# Build the new inventory
666
self._populate_from_inventory()
667
self._record_unselected()
668
self._report_and_accumulate_deletes()
670
def _filter_iter_changes(self, iter_changes):
671
"""Process iter_changes.
673
This method reports on the changes in iter_changes to the user, and
674
converts 'missing' entries in the iter_changes iterator to 'deleted'
675
entries. 'missing' entries have their
677
:param iter_changes: An iter_changes to process.
678
:return: A generator of changes.
680
reporter = self.reporter
681
report_changes = reporter.is_verbose()
269
pending_merges = self.branch.pending_merges()
271
self.parent_trees = []
272
precursor_id = self.branch.last_revision()
274
self.parents.append(precursor_id)
275
self.parent_trees.append(self.basis_tree)
276
self.parents += pending_merges
277
self.parent_trees.extend(map(self.branch.revision_tree, pending_merges))
280
def _check_parents_present(self):
281
for parent_id in self.parents:
282
mutter('commit parent revision {%s}', parent_id)
283
if not self.branch.has_revision(parent_id):
284
warning("can't commit a merge from an absent parent")
285
raise HistoryMissing(self.branch, 'revision', parent_id)
288
def _make_revision(self):
289
"""Record a new revision object for this commit."""
290
self.rev = Revision(timestamp=self.timestamp,
291
timezone=self.timezone,
292
committer=self.committer,
293
message=self.message,
294
inventory_sha1=self.inv_sha1,
295
revision_id=self.rev_id)
296
self.rev.parent_ids = self.parents
298
serializer_v5.write_revision(self.rev, rev_tmp)
300
self.branch.revision_store.add(rev_tmp, self.rev_id, compressed=False)
301
mutter('new revision_id is {%s}', self.rev_id)
304
def _remove_deleted(self):
305
"""Remove deleted files from the working inventories.
307
This is done prior to taking the working inventory as the
308
basis for the new committed inventory.
310
This returns true if any files
311
*that existed in the basis inventory* were deleted.
312
Files that were added and deleted
313
in the working copy don't matter.
315
specific = self.specific_files
683
for change in iter_changes:
685
old_path = change[1][0]
686
new_path = change[1][1]
687
versioned = change[3][1]
689
versioned = change[3][1]
690
if kind is None and versioned:
693
reporter.missing(new_path)
694
deleted_ids.append(change[0])
695
# Reset the new path (None) and new versioned flag (False)
696
change = (change[0], (change[1][0], None), change[2],
697
(change[3][0], False)) + change[4:]
698
new_path = change[1][1]
700
elif kind == 'tree-reference':
701
if self.recursive == 'down':
702
self._commit_nested_tree(change[0], change[1][1])
703
if change[3][0] or change[3][1]:
707
reporter.deleted(old_path)
708
elif old_path is None:
709
reporter.snapshot_change('added', new_path)
710
elif old_path != new_path:
711
reporter.renamed('renamed', old_path, new_path)
714
self.work_tree.branch.repository._format.rich_root_data):
715
# Don't report on changes to '' in non rich root
717
reporter.snapshot_change('modified', new_path)
718
self._next_progress_entry()
719
# Unversion IDs that were found to be deleted
720
self.deleted_ids = deleted_ids
722
def _record_unselected(self):
723
# If specific files are selected, then all un-selected files must be
724
# recorded in their previous state. For more details, see
725
# https://lists.ubuntu.com/archives/bazaar/2007q3/028476.html.
726
if self.specific_files or self.exclude:
727
specific_files = self.specific_files or []
728
for path, old_ie in self.basis_inv.iter_entries():
729
if self.builder.new_inventory.has_id(old_ie.file_id):
730
# already added - skip.
732
if (is_inside_any(specific_files, path)
733
and not is_inside_any(self.exclude, path)):
734
# was inside the selected path, and not excluded - if not
735
# present it has been deleted so skip.
737
# From here down it was either not selected, or was excluded:
738
# We preserve the entry unaltered.
740
# Note: specific file commits after a merge are currently
741
# prohibited. This test is for sanity/safety in case it's
742
# required after that changes.
743
if len(self.parents) > 1:
745
self.builder.record_entry_contents(ie, self.parent_invs, path,
746
self.basis_tree, None)
748
def _report_and_accumulate_deletes(self):
749
if (isinstance(self.basis_inv, Inventory)
750
and isinstance(self.builder.new_inventory, Inventory)):
751
# the older Inventory classes provide a _byid dict, and building a
752
# set from the keys of this dict is substantially faster than even
753
# getting a set of ids from the inventory
755
# <lifeless> set(dict) is roughly the same speed as
756
# set(iter(dict)) and both are significantly slower than
758
deleted_ids = set(self.basis_inv._byid.keys()) - \
759
set(self.builder.new_inventory._byid.keys())
761
deleted_ids = set(self.basis_inv) - set(self.builder.new_inventory)
317
for path, ie in self.work_inv.iter_entries():
318
if specific and not is_inside_any(specific, path):
320
if not self.work_tree.has_filename(path):
321
note('missing %s', path)
322
deleted_ids.append(ie.file_id)
763
self.any_entries_deleted = True
764
deleted = [(self.basis_tree.id2path(file_id), file_id)
765
for file_id in deleted_ids]
767
# XXX: this is not quite directory-order sorting
768
for path, file_id in deleted:
769
self.builder.record_delete(path, file_id)
770
self.reporter.deleted(path)
772
def _check_strict(self):
773
# XXX: when we use iter_changes this would likely be faster if
774
# iter_changes would check for us (even in the presence of
777
# raise an exception as soon as we find a single unknown.
778
for unknown in self.work_tree.unknowns():
779
raise StrictCommitFailed()
781
def _populate_from_inventory(self):
782
"""Populate the CommitBuilder by walking the working tree inventory."""
783
# Build the revision inventory.
785
# This starts by creating a new empty inventory. Depending on
786
# which files are selected for commit, and what is present in the
787
# current tree, the new inventory is populated. inventory entries
788
# which are candidates for modification have their revision set to
789
# None; inventory entries that are carried over untouched have their
790
# revision set to their prior value.
792
# ESEPARATIONOFCONCERNS: this function is diffing and using the diff
793
# results to create a new inventory at the same time, which results
794
# in bugs like #46635. Any reason not to use/enhance Tree.changes_from?
797
specific_files = self.specific_files
798
exclude = self.exclude
799
report_changes = self.reporter.is_verbose()
801
# A tree of paths that have been deleted. E.g. if foo/bar has been
802
# deleted, then we have {'foo':{'bar':{}}}
804
# XXX: Note that entries may have the wrong kind because the entry does
805
# not reflect the status on disk.
806
work_inv = self.work_tree.inventory
807
# NB: entries will include entries within the excluded ids/paths
808
# because iter_entries_by_dir has no 'exclude' facility today.
809
entries = work_inv.iter_entries_by_dir(
810
specific_file_ids=self.specific_file_ids, yield_parents=True)
811
for path, existing_ie in entries:
812
file_id = existing_ie.file_id
813
name = existing_ie.name
814
parent_id = existing_ie.parent_id
815
kind = existing_ie.kind
816
# Skip files that have been deleted from the working tree.
817
# The deleted path ids are also recorded so they can be explicitly
820
path_segments = splitpath(path)
821
deleted_dict = deleted_paths
822
for segment in path_segments:
823
deleted_dict = deleted_dict.get(segment, None)
825
# We either took a path not present in the dict
826
# (deleted_dict was None), or we've reached an empty
827
# child dir in the dict, so are now a sub-path.
831
if deleted_dict is not None:
832
# the path has a deleted parent, do not add it.
834
if exclude and is_inside_any(exclude, path):
835
# Skip excluded paths. Excluded paths are processed by
836
# _update_builder_with_changes.
324
for file_id in deleted_ids:
325
del self.work_inv[file_id]
326
self.branch._write_inventory(self.work_inv)
329
def _find_file_parents(self, file_id):
330
"""Return the text versions and hashes for all file parents.
332
Returned as a map from text version to inventory entry.
334
This is a set containing the file versions in all parents
335
revisions containing the file. If the file is new, the set
338
for tree in self.parent_trees:
339
if file_id in tree.inventory:
340
ie = tree.inventory[file_id]
341
assert ie.kind == 'file'
342
assert ie.file_id == file_id
343
if ie.text_version in r:
344
assert r[ie.text_version] == ie
346
r[ie.text_version] = ie
350
def _set_name_versions(self):
351
"""Pass over inventory and mark new entry version as needed.
353
Files get a new name version when they are new, have a
354
different parent, or a different name from in the
355
basis inventory, or if the file is in a different place
356
to any of the parents."""
357
# XXX: Need to think more here about when the user has
358
# made a specific decision on a particular value -- c.f.
360
for path, ie in self.new_inv.iter_entries():
363
for parent_tree in self.parent_trees:
364
parent_inv = parent_tree.inventory
365
if file_id not in parent_inv:
367
parent_ie = parent_inv[file_id]
368
if parent_ie.parent_id != ie.parent_id:
371
elif parent_ie.name != ie.name:
374
elif old_version is None:
375
old_version = parent_ie.name_version
376
elif old_version != parent_ie.name_version:
380
pass # so far so good
381
if old_version is None:
382
mutter('new name_version for {%s}', file_id)
383
ie.name_version = self.rev_id
385
mutter('name_version for {%s} inherited as {%s}',
386
file_id, old_version)
387
ie.name_version = old_version
390
def _store_entries(self):
391
"""Build revision inventory and store modified files.
393
This is called with new_inv a new empty inventory. Depending on
394
which files are selected for commit, and which ones have
395
been modified or merged, new inventory entries are built
396
based on the working and parent inventories.
398
As a side-effect this stores new text versions for committed
399
files with text changes or merges.
401
Each entry can have one of several things happen:
403
carry_file -- carried from the previous version (if not
406
commit_nonfile -- no text to worry about
408
commit_old_text -- same text, may have moved
410
commit_file -- new text version
412
for path, new_ie in self.work_inv.iter_entries():
413
file_id = new_ie.file_id
414
mutter('check %s {%s}', path, new_ie.file_id)
415
if self.specific_files:
416
if not is_inside_any(self.specific_files, path):
417
mutter('%s not selected for commit', path)
418
self._carry_file(file_id)
420
if new_ie.kind != 'file':
421
self._commit_nonfile(file_id)
838
content_summary = self.work_tree.path_content_summary(path)
839
kind = content_summary[0]
840
# Note that when a filter of specific files is given, we must only
841
# skip/record deleted files matching that filter.
842
if not specific_files or is_inside_any(specific_files, path):
843
if kind == 'missing':
844
if not deleted_paths:
845
# path won't have been split yet.
846
path_segments = splitpath(path)
847
deleted_dict = deleted_paths
848
for segment in path_segments:
849
deleted_dict = deleted_dict.setdefault(segment, {})
850
self.reporter.missing(path)
851
self._next_progress_entry()
852
deleted_ids.append(file_id)
424
file_parents = self._find_file_parents(file_id)
425
mutter('parents of %s are %r', path, file_parents)
426
if len(file_parents) == 1:
427
parent_ie = file_parents.values()[0]
428
wc_sha1 = self.work_tree.get_file_sha1(file_id)
429
if parent_ie.text_sha1 == wc_sha1:
430
# text not changed or merged
431
self._commit_old_text(file_id, parent_ie)
854
# TODO: have the builder do the nested commit just-in-time IF and
856
if kind == 'tree-reference':
857
# enforce repository nested tree policy.
858
if (not self.work_tree.supports_tree_reference() or
859
# repository does not support it either.
860
not self.branch.repository._format.supports_tree_reference):
862
content_summary = (kind, None, None, None)
863
elif self.recursive == 'down':
864
nested_revision_id = self._commit_nested_tree(
866
content_summary = (kind, None, None, nested_revision_id)
868
nested_revision_id = self.work_tree.get_reference_revision(file_id)
869
content_summary = (kind, None, None, nested_revision_id)
871
# Record an entry for this item
872
# Note: I don't particularly want to have the existing_ie
873
# parameter but the test suite currently (28-Jun-07) breaks
874
# without it thanks to a unicode normalisation issue. :-(
875
definitely_changed = kind != existing_ie.kind
876
self._record_entry(path, file_id, specific_files, kind, name,
877
parent_id, definitely_changed, existing_ie, report_changes,
880
# Unversion IDs that were found to be deleted
881
self.deleted_ids = deleted_ids
883
def _commit_nested_tree(self, file_id, path):
884
"Commit a nested tree."
885
sub_tree = self.work_tree.get_nested_tree(file_id, path)
886
# FIXME: be more comprehensive here:
887
# this works when both trees are in --trees repository,
888
# but when both are bound to a different repository,
889
# it fails; a better way of approaching this is to
890
# finally implement the explicit-caches approach design
891
# a while back - RBC 20070306.
892
if sub_tree.branch.repository.has_same_location(
893
self.work_tree.branch.repository):
894
sub_tree.branch.repository = \
895
self.work_tree.branch.repository
897
return sub_tree.commit(message=None, revprops=self.revprops,
898
recursive=self.recursive,
899
message_callback=self.message_callback,
900
timestamp=self.timestamp, timezone=self.timezone,
901
committer=self.committer,
902
allow_pointless=self.allow_pointless,
903
strict=self.strict, verbose=self.verbose,
904
local=self.local, reporter=self.reporter)
905
except errors.PointlessCommit:
906
return self.work_tree.get_reference_revision(file_id)
908
def _record_entry(self, path, file_id, specific_files, kind, name,
909
parent_id, definitely_changed, existing_ie, report_changes,
911
"Record the new inventory entry for a path if any."
912
# mutter('check %s {%s}', path, file_id)
913
# mutter('%s selected for commit', path)
914
if definitely_changed or existing_ie is None:
915
ie = make_entry(kind, name, parent_id, file_id)
917
ie = existing_ie.copy()
919
# For carried over entries we don't care about the fs hash - the repo
920
# isn't generating a sha, so we're not saving computation time.
921
_, _, fs_hash = self.builder.record_entry_contents(
922
ie, self.parent_invs, path, self.work_tree, content_summary)
924
self._report_change(ie, path)
926
self.work_tree._observed_sha1(ie.file_id, path, fs_hash)
929
def _report_change(self, ie, path):
930
"""Report a change to the user.
932
The change that has occurred is described relative to the basis
935
if (self.basis_inv.has_id(ie.file_id)):
936
basis_ie = self.basis_inv[ie.file_id]
939
change = ie.describe_change(basis_ie, ie)
940
if change in (InventoryEntry.RENAMED,
941
InventoryEntry.MODIFIED_AND_RENAMED):
942
old_path = self.basis_inv.id2path(ie.file_id)
943
self.reporter.renamed(change, old_path, path)
944
self._next_progress_entry()
946
if change == 'unchanged':
948
self.reporter.snapshot_change(change, path)
949
self._next_progress_entry()
951
def _set_progress_stage(self, name, counter=False):
952
"""Set the progress stage and emit an update to the progress bar."""
953
self.pb_stage_name = name
954
self.pb_stage_count += 1
956
self.pb_entries_count = 0
958
self.pb_entries_count = None
959
self._emit_progress()
961
def _next_progress_entry(self):
962
"""Emit an update to the progress bar and increment the entry count."""
963
self.pb_entries_count += 1
964
self._emit_progress()
966
def _emit_progress(self):
967
if self.pb_entries_count is not None:
968
text = "%s [%d] - Stage" % (self.pb_stage_name,
969
self.pb_entries_count)
971
text = "%s - Stage" % (self.pb_stage_name, )
972
self.pb.update(text, self.pb_stage_count, self.pb_stage_total)
974
def _set_specific_file_ids(self):
975
"""populate self.specific_file_ids if we will use it."""
976
if not self.use_record_iter_changes:
977
# If provided, ensure the specified files are versioned
978
if self.specific_files is not None:
979
# Note: This routine is being called because it raises
980
# PathNotVersionedError as a side effect of finding the IDs. We
981
# later use the ids we found as input to the working tree
982
# inventory iterator, so we only consider those ids rather than
983
# examining the whole tree again.
984
# XXX: Dont we have filter_unversioned to do this more
986
self.specific_file_ids = tree.find_ids_across_trees(
987
self.specific_files, [self.basis_tree, self.work_tree])
433
# file is either new, or a file merge; need to record
435
if len(file_parents) > 1:
436
note('merged %s', path)
437
elif len(file_parents) == 0:
438
note('added %s', path)
989
self.specific_file_ids = None
440
note('modified %s', path)
441
self._commit_file(new_ie, file_id, file_parents)
444
def _commit_nonfile(self, file_id):
445
self.new_inv.add(self.work_inv[file_id].copy())
448
def _carry_file(self, file_id):
449
"""Carry the file unchanged from the basis revision."""
450
if self.basis_inv.has_id(file_id):
451
self.new_inv.add(self.basis_inv[file_id].copy())
454
def _commit_old_text(self, file_id, parent_ie):
455
"""Keep the same text as last time, but possibly a different name."""
456
ie = self.work_inv[file_id].copy()
457
ie.text_version = parent_ie.text_version
458
ie.text_size = parent_ie.text_size
459
ie.text_sha1 = parent_ie.text_sha1
463
def _report_deletes(self):
464
for file_id in self.basis_inv:
465
if file_id not in self.new_inv:
466
note('deleted %s', self.basis_inv.id2path(file_id))
469
def _commit_file(self, new_ie, file_id, file_parents):
470
mutter('store new text for {%s} in revision {%s}',
471
file_id, self.rev_id)
472
new_lines = self.work_tree.get_file(file_id).readlines()
473
self._add_text_to_weave(file_id, new_lines, file_parents)
474
new_ie.text_version = self.rev_id
475
new_ie.text_sha1 = sha_strings(new_lines)
476
new_ie.text_size = sum(map(len, new_lines))
477
self.new_inv.add(new_ie)
480
def _add_text_to_weave(self, file_id, new_lines, parents):
481
self.weave_store.add_text(file_id, self.rev_id, new_lines, parents)
484
def _gen_revision_id(branch, when):
485
"""Return new revision-id."""
486
s = '%s-%s-' % (user_email(branch), compact_date(when))
487
s += hexlify(rand_bytes(8))
493
def merge_ancestry_lines(rev_id, ancestries):
494
"""Return merged ancestry lines.
496
rev_id -- id of the new revision
498
ancestries -- a sequence of ancestries for parent revisions,
499
as newline-terminated line lists.
501
if len(ancestries) == 0:
502
return [rev_id + '\n']
503
seen = set(ancestries[0])
504
ancs = ancestries[0][:]
505
for parent_ancestry in ancestries[1:]:
506
for line in parent_ancestry:
507
assert line[-1] == '\n'