1
# Copyright (C) 2005-2010 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
# FIXME: This refactoring of the workingtree code doesn't seem to keep
18
# the WorkingTree's copy of the inventory in sync with the branch. The
19
# branch modifies its working inventory when it does a commit to make
20
# missing files permanently removed.
22
# TODO: Maybe also keep the full path of the entry, and the children?
23
# But those depend on its position within a particular inventory, and
24
# it would be nice not to need to hold the backpointer here.
26
# This should really be an id randomly assigned when the tree is
27
# created, but it's not for now.
30
from bzrlib.lazy_import import lazy_import
31
lazy_import(globals(), """
46
from bzrlib.errors import (
50
from bzrlib.trace import mutter
51
from bzrlib.static_tuple import StaticTuple
54
class InventoryEntry(object):
55
"""Description of a versioned file.
57
An InventoryEntry has the following fields, which are also
58
present in the XML inventory-entry element:
63
(within the parent directory)
66
file_id of the parent directory, or ROOT_ID
69
the revision_id in which this variation of this file was
73
Indicates that this file should be executable on systems
77
sha-1 of the text of the file
80
size in bytes of the text of the file
82
(reading a version 4 tree created a text_id field.)
87
>>> i.add(InventoryDirectory('123', 'src', ROOT_ID))
88
InventoryDirectory('123', 'src', parent_id='TREE_ROOT', revision=None)
89
>>> i.add(InventoryFile('2323', 'hello.c', parent_id='123'))
90
InventoryFile('2323', 'hello.c', parent_id='123', sha1=None, len=None, revision=None)
91
>>> shouldbe = {0: '', 1: 'src', 2: 'src/hello.c'}
92
>>> for ix, j in enumerate(i.iter_entries()):
93
... print (j[0] == shouldbe[ix], j[1])
95
(True, InventoryDirectory('TREE_ROOT', u'', parent_id=None, revision=None))
96
(True, InventoryDirectory('123', 'src', parent_id='TREE_ROOT', revision=None))
97
(True, InventoryFile('2323', 'hello.c', parent_id='123', sha1=None, len=None, revision=None))
98
>>> i.add(InventoryFile('2324', 'bye.c', '123'))
99
InventoryFile('2324', 'bye.c', parent_id='123', sha1=None, len=None, revision=None)
100
>>> i.add(InventoryDirectory('2325', 'wibble', '123'))
101
InventoryDirectory('2325', 'wibble', parent_id='123', revision=None)
102
>>> i.path2id('src/wibble')
106
>>> i.add(InventoryFile('2326', 'wibble.c', '2325'))
107
InventoryFile('2326', 'wibble.c', parent_id='2325', sha1=None, len=None, revision=None)
109
InventoryFile('2326', 'wibble.c', parent_id='2325', sha1=None, len=None, revision=None)
110
>>> for path, entry in i.iter_entries():
119
>>> i.id2path('2326')
120
'src/wibble/wibble.c'
123
# Constants returned by describe_change()
125
# TODO: These should probably move to some kind of FileChangeDescription
126
# class; that's like what's inside a TreeDelta but we want to be able to
127
# generate them just for one file at a time.
129
MODIFIED_AND_RENAMED = 'modified and renamed'
131
__slots__ = ['file_id', 'revision', 'parent_id', 'name']
133
# Attributes that all InventoryEntry instances are expected to have, but
134
# that don't vary for all kinds of entry. (e.g. symlink_target is only
135
# relevant to InventoryLink, so there's no reason to make every
136
# InventoryFile instance allocate space to hold a value for it.)
137
# Attributes that only vary for files: executable, text_sha1, text_size,
143
# Attributes that only vary for symlinks: symlink_target
144
symlink_target = None
145
# Attributes that only vary for tree-references: reference_revision
146
reference_revision = None
149
def detect_changes(self, old_entry):
150
"""Return a (text_modified, meta_modified) from this to old_entry.
152
_read_tree_state must have been called on self and old_entry prior to
153
calling detect_changes.
157
def _diff(self, text_diff, from_label, tree, to_label, to_entry, to_tree,
158
output_to, reverse=False):
159
"""Perform a diff between two entries of the same kind."""
161
def parent_candidates(self, previous_inventories):
162
"""Find possible per-file graph parents.
164
This is currently defined by:
165
- Select the last changed revision in the parent inventory.
166
- Do deal with a short lived bug in bzr 0.8's development two entries
167
that have the same last changed but different 'x' bit settings are
170
# revision:ie mapping for each ie found in previous_inventories.
172
# identify candidate head revision ids.
173
for inv in previous_inventories:
174
if self.file_id in inv:
175
ie = inv[self.file_id]
176
if ie.revision in candidates:
177
# same revision value in two different inventories:
178
# correct possible inconsistencies:
179
# * there was a bug in revision updates with 'x' bit
182
if candidates[ie.revision].executable != ie.executable:
183
candidates[ie.revision].executable = False
184
ie.executable = False
185
except AttributeError:
188
# add this revision as a candidate.
189
candidates[ie.revision] = ie
193
"""Return true if the object this entry represents has textual data.
195
Note that textual data includes binary content.
197
Also note that all entries get weave files created for them.
198
This attribute is primarily used when upgrading from old trees that
199
did not have the weave index for all inventory entries.
203
def __init__(self, file_id, name, parent_id):
204
"""Create an InventoryEntry
206
The filename must be a single component, relative to the
207
parent directory; it cannot be a whole path or relative name.
209
>>> e = InventoryFile('123', 'hello.c', ROOT_ID)
214
>>> e = InventoryFile('123', 'src/hello.c', ROOT_ID)
215
Traceback (most recent call last):
216
InvalidEntryName: Invalid entry name: src/hello.c
218
if '/' in name or '\\' in name:
219
raise errors.InvalidEntryName(name=name)
220
self.file_id = file_id
223
self.parent_id = parent_id
225
def kind_character(self):
226
"""Return a short kind indicator useful for appending to names."""
227
raise BzrError('unknown kind %r' % self.kind)
229
known_kinds = ('file', 'directory', 'symlink')
231
def sorted_children(self):
232
return sorted(self.children.items())
235
def versionable_kind(kind):
236
return (kind in ('file', 'directory', 'symlink', 'tree-reference'))
238
def check(self, checker, rev_id, inv):
239
"""Check this inventory entry is intact.
241
This is a template method, override _check for kind specific
244
:param checker: Check object providing context for the checks;
245
can be used to find out what parts of the repository have already
247
:param rev_id: Revision id from which this InventoryEntry was loaded.
248
Not necessarily the last-changed revision for this file.
249
:param inv: Inventory from which the entry was loaded.
251
if self.parent_id is not None:
252
if not inv.has_id(self.parent_id):
253
raise BzrCheckError('missing parent {%s} in inventory for revision {%s}'
254
% (self.parent_id, rev_id))
255
checker._add_entry_to_text_key_references(inv, self)
256
self._check(checker, rev_id)
258
def _check(self, checker, rev_id):
259
"""Check this inventory entry for kind specific errors."""
260
checker._report_items.append(
261
'unknown entry kind %r in revision {%s}' % (self.kind, rev_id))
264
"""Clone this inventory entry."""
265
raise NotImplementedError
268
def describe_change(old_entry, new_entry):
269
"""Describe the change between old_entry and this.
271
This smells of being an InterInventoryEntry situation, but as its
272
the first one, we're making it a static method for now.
274
An entry with a different parent, or different name is considered
275
to be renamed. Reparenting is an internal detail.
276
Note that renaming the parent does not trigger a rename for the
279
# TODO: Perhaps return an object rather than just a string
280
if old_entry is new_entry:
281
# also the case of both being None
283
elif old_entry is None:
285
elif new_entry is None:
287
if old_entry.kind != new_entry.kind:
289
text_modified, meta_modified = new_entry.detect_changes(old_entry)
290
if text_modified or meta_modified:
294
# TODO 20060511 (mbp, rbc) factor out 'detect_rename' here.
295
if old_entry.parent_id != new_entry.parent_id:
297
elif old_entry.name != new_entry.name:
301
if renamed and not modified:
302
return InventoryEntry.RENAMED
303
if modified and not renamed:
305
if modified and renamed:
306
return InventoryEntry.MODIFIED_AND_RENAMED
310
return ("%s(%r, %r, parent_id=%r, revision=%r)"
311
% (self.__class__.__name__,
317
def __eq__(self, other):
319
# For the case when objects are cached
321
if not isinstance(other, InventoryEntry):
322
return NotImplemented
324
return ((self.file_id == other.file_id)
325
and (self.name == other.name)
326
and (other.symlink_target == self.symlink_target)
327
and (self.text_sha1 == other.text_sha1)
328
and (self.text_size == other.text_size)
329
and (self.text_id == other.text_id)
330
and (self.parent_id == other.parent_id)
331
and (self.kind == other.kind)
332
and (self.revision == other.revision)
333
and (self.executable == other.executable)
334
and (self.reference_revision == other.reference_revision)
337
def __ne__(self, other):
338
return not (self == other)
341
raise ValueError('not hashable')
343
def _unchanged(self, previous_ie):
344
"""Has this entry changed relative to previous_ie.
346
This method should be overridden in child classes.
349
# different inv parent
350
if previous_ie.parent_id != self.parent_id:
353
elif previous_ie.name != self.name:
355
elif previous_ie.kind != self.kind:
359
def _read_tree_state(self, path, work_tree):
360
"""Populate fields in the inventory entry from the given tree.
362
Note that this should be modified to be a noop on virtual trees
363
as all entries created there are prepopulated.
365
# TODO: Rather than running this manually, we should check the
366
# working sha1 and other expensive properties when they're
367
# first requested, or preload them if they're already known
368
pass # nothing to do by default
370
def _forget_tree_state(self):
374
class InventoryDirectory(InventoryEntry):
375
"""A directory in an inventory."""
377
__slots__ = ['children']
381
def _check(self, checker, rev_id):
382
"""See InventoryEntry._check"""
383
# In non rich root repositories we do not expect a file graph for the
385
if self.name == '' and not checker.rich_roots:
387
# Directories are stored as an empty file, but the file should exist
388
# to provide a per-fileid log. The hash of every directory content is
389
# "da..." below (the sha1sum of '').
390
checker.add_pending_item(rev_id,
391
('texts', self.file_id, self.revision), 'text',
392
'da39a3ee5e6b4b0d3255bfef95601890afd80709')
395
other = InventoryDirectory(self.file_id, self.name, self.parent_id)
396
other.revision = self.revision
397
# note that children are *not* copied; they're pulled across when
401
def __init__(self, file_id, name, parent_id):
402
super(InventoryDirectory, self).__init__(file_id, name, parent_id)
405
def kind_character(self):
406
"""See InventoryEntry.kind_character."""
410
class InventoryFile(InventoryEntry):
411
"""A file in an inventory."""
413
__slots__ = ['text_sha1', 'text_size', 'text_id', 'executable']
417
def __init__(self, file_id, name, parent_id):
418
super(InventoryFile, self).__init__(file_id, name, parent_id)
419
self.text_sha1 = None
420
self.text_size = None
422
self.executable = False
424
def _check(self, checker, tree_revision_id):
425
"""See InventoryEntry._check"""
426
# TODO: check size too.
427
checker.add_pending_item(tree_revision_id,
428
('texts', self.file_id, self.revision), 'text',
430
if self.text_size is None:
431
checker._report_items.append(
432
'fileid {%s} in {%s} has None for text_size' % (self.file_id,
436
other = InventoryFile(self.file_id, self.name, self.parent_id)
437
other.executable = self.executable
438
other.text_id = self.text_id
439
other.text_sha1 = self.text_sha1
440
other.text_size = self.text_size
441
other.revision = self.revision
444
def detect_changes(self, old_entry):
445
"""See InventoryEntry.detect_changes."""
446
text_modified = (self.text_sha1 != old_entry.text_sha1)
447
meta_modified = (self.executable != old_entry.executable)
448
return text_modified, meta_modified
450
def _diff(self, text_diff, from_label, tree, to_label, to_entry, to_tree,
451
output_to, reverse=False):
452
"""See InventoryEntry._diff."""
453
from bzrlib.diff import DiffText
454
from_file_id = self.file_id
456
to_file_id = to_entry.file_id
460
to_file_id, from_file_id = from_file_id, to_file_id
461
tree, to_tree = to_tree, tree
462
from_label, to_label = to_label, from_label
463
differ = DiffText(tree, to_tree, output_to, 'utf-8', '', '',
465
return differ.diff_text(from_file_id, to_file_id, from_label, to_label)
468
"""See InventoryEntry.has_text."""
471
def kind_character(self):
472
"""See InventoryEntry.kind_character."""
475
def _read_tree_state(self, path, work_tree):
476
"""See InventoryEntry._read_tree_state."""
477
self.text_sha1 = work_tree.get_file_sha1(self.file_id, path=path)
478
# FIXME: 20050930 probe for the text size when getting sha1
479
# in _read_tree_state
480
self.executable = work_tree.is_executable(self.file_id, path=path)
483
return ("%s(%r, %r, parent_id=%r, sha1=%r, len=%s, revision=%s)"
484
% (self.__class__.__name__,
492
def _forget_tree_state(self):
493
self.text_sha1 = None
495
def _unchanged(self, previous_ie):
496
"""See InventoryEntry._unchanged."""
497
compatible = super(InventoryFile, self)._unchanged(previous_ie)
498
if self.text_sha1 != previous_ie.text_sha1:
501
# FIXME: 20050930 probe for the text size when getting sha1
502
# in _read_tree_state
503
self.text_size = previous_ie.text_size
504
if self.executable != previous_ie.executable:
509
class InventoryLink(InventoryEntry):
510
"""A file in an inventory."""
512
__slots__ = ['symlink_target']
516
def __init__(self, file_id, name, parent_id):
517
super(InventoryLink, self).__init__(file_id, name, parent_id)
518
self.symlink_target = None
520
def _check(self, checker, tree_revision_id):
521
"""See InventoryEntry._check"""
522
if self.symlink_target is None:
523
checker._report_items.append(
524
'symlink {%s} has no target in revision {%s}'
525
% (self.file_id, tree_revision_id))
526
# Symlinks are stored as ''
527
checker.add_pending_item(tree_revision_id,
528
('texts', self.file_id, self.revision), 'text',
529
'da39a3ee5e6b4b0d3255bfef95601890afd80709')
532
other = InventoryLink(self.file_id, self.name, self.parent_id)
533
other.symlink_target = self.symlink_target
534
other.revision = self.revision
537
def detect_changes(self, old_entry):
538
"""See InventoryEntry.detect_changes."""
539
# FIXME: which _modified field should we use ? RBC 20051003
540
text_modified = (self.symlink_target != old_entry.symlink_target)
542
mutter(" symlink target changed")
543
meta_modified = False
544
return text_modified, meta_modified
546
def _diff(self, text_diff, from_label, tree, to_label, to_entry, to_tree,
547
output_to, reverse=False):
548
"""See InventoryEntry._diff."""
549
from bzrlib.diff import DiffSymlink
550
old_target = self.symlink_target
551
if to_entry is not None:
552
new_target = to_entry.symlink_target
561
new_target, old_target = old_target, new_target
562
differ = DiffSymlink(old_tree, new_tree, output_to)
563
return differ.diff_symlink(old_target, new_target)
565
def kind_character(self):
566
"""See InventoryEntry.kind_character."""
569
def _read_tree_state(self, path, work_tree):
570
"""See InventoryEntry._read_tree_state."""
571
self.symlink_target = work_tree.get_symlink_target(self.file_id)
573
def _forget_tree_state(self):
574
self.symlink_target = None
576
def _unchanged(self, previous_ie):
577
"""See InventoryEntry._unchanged."""
578
compatible = super(InventoryLink, self)._unchanged(previous_ie)
579
if self.symlink_target != previous_ie.symlink_target:
584
class TreeReference(InventoryEntry):
586
__slots__ = ['reference_revision']
588
kind = 'tree-reference'
590
def __init__(self, file_id, name, parent_id, revision=None,
591
reference_revision=None):
592
InventoryEntry.__init__(self, file_id, name, parent_id)
593
self.revision = revision
594
self.reference_revision = reference_revision
597
return TreeReference(self.file_id, self.name, self.parent_id,
598
self.revision, self.reference_revision)
600
def _read_tree_state(self, path, work_tree):
601
"""Populate fields in the inventory entry from the given tree.
603
self.reference_revision = work_tree.get_reference_revision(
606
def _forget_tree_state(self):
607
self.reference_revision = None
609
def _unchanged(self, previous_ie):
610
"""See InventoryEntry._unchanged."""
611
compatible = super(TreeReference, self)._unchanged(previous_ie)
612
if self.reference_revision != previous_ie.reference_revision:
617
class CommonInventory(object):
618
"""Basic inventory logic, defined in terms of primitives like has_id.
620
An inventory is the metadata about the contents of a tree.
622
This is broadly a map from file_id to entries such as directories, files,
623
symlinks and tree references. Each entry maintains its own metadata like
624
SHA1 and length for files, or children for a directory.
626
Entries can be looked up either by path or by file_id.
628
InventoryEntry objects must not be modified after they are
629
inserted, other than through the Inventory API.
632
def __contains__(self, file_id):
633
"""True if this entry contains a file with given id.
635
>>> inv = Inventory()
636
>>> inv.add(InventoryFile('123', 'foo.c', ROOT_ID))
637
InventoryFile('123', 'foo.c', parent_id='TREE_ROOT', sha1=None, len=None, revision=None)
643
Note that this method along with __iter__ are not encouraged for use as
644
they are less clear than specific query methods - they may be rmeoved
647
return self.has_id(file_id)
649
def has_filename(self, filename):
650
return bool(self.path2id(filename))
652
def id2path(self, file_id):
653
"""Return as a string the path to file_id.
656
>>> e = i.add(InventoryDirectory('src-id', 'src', ROOT_ID))
657
>>> e = i.add(InventoryFile('foo-id', 'foo.c', parent_id='src-id'))
658
>>> print i.id2path('foo-id')
661
:raises NoSuchId: If file_id is not present in the inventory.
663
# get all names, skipping root
664
return '/'.join(reversed(
665
[parent.name for parent in
666
self._iter_file_id_parents(file_id)][:-1]))
668
def iter_entries(self, from_dir=None, recursive=True):
669
"""Return (path, entry) pairs, in order by name.
671
:param from_dir: if None, start from the root,
672
otherwise start from this directory (either file-id or entry)
673
:param recursive: recurse into directories or not
676
if self.root is None:
680
elif isinstance(from_dir, basestring):
681
from_dir = self[from_dir]
683
# unrolling the recursive called changed the time from
684
# 440ms/663ms (inline/total) to 116ms/116ms
685
children = from_dir.children.items()
688
for name, ie in children:
691
children = collections.deque(children)
692
stack = [(u'', children)]
694
from_dir_relpath, children = stack[-1]
697
name, ie = children.popleft()
699
# we know that from_dir_relpath never ends in a slash
700
# and 'f' doesn't begin with one, we can do a string op, rather
701
# than the checks of pathjoin(), though this means that all paths
703
path = from_dir_relpath + '/' + name
707
if ie.kind != 'directory':
710
# But do this child first
711
new_children = ie.children.items()
713
new_children = collections.deque(new_children)
714
stack.append((path, new_children))
715
# Break out of inner loop, so that we start outer loop with child
718
# if we finished all children, pop it off the stack
721
def iter_entries_by_dir(self, from_dir=None, specific_file_ids=None,
722
yield_parents=False):
723
"""Iterate over the entries in a directory first order.
725
This returns all entries for a directory before returning
726
the entries for children of a directory. This is not
727
lexicographically sorted order, and is a hybrid between
728
depth-first and breadth-first.
730
:param yield_parents: If True, yield the parents from the root leading
731
down to specific_file_ids that have been requested. This has no
732
impact if specific_file_ids is None.
733
:return: This yields (path, entry) pairs
735
if specific_file_ids and not isinstance(specific_file_ids, set):
736
specific_file_ids = set(specific_file_ids)
737
# TODO? Perhaps this should return the from_dir so that the root is
738
# yielded? or maybe an option?
740
if self.root is None:
742
# Optimize a common case
743
if (not yield_parents and specific_file_ids is not None and
744
len(specific_file_ids) == 1):
745
file_id = list(specific_file_ids)[0]
747
yield self.id2path(file_id), self[file_id]
750
if (specific_file_ids is None or yield_parents or
751
self.root.file_id in specific_file_ids):
753
elif isinstance(from_dir, basestring):
754
from_dir = self[from_dir]
756
if specific_file_ids is not None:
757
# TODO: jam 20070302 This could really be done as a loop rather
758
# than a bunch of recursive calls.
761
def add_ancestors(file_id):
762
if file_id not in byid:
764
parent_id = byid[file_id].parent_id
765
if parent_id is None:
767
if parent_id not in parents:
768
parents.add(parent_id)
769
add_ancestors(parent_id)
770
for file_id in specific_file_ids:
771
add_ancestors(file_id)
775
stack = [(u'', from_dir)]
777
cur_relpath, cur_dir = stack.pop()
780
for child_name, child_ie in sorted(cur_dir.children.iteritems()):
782
child_relpath = cur_relpath + child_name
784
if (specific_file_ids is None or
785
child_ie.file_id in specific_file_ids or
786
(yield_parents and child_ie.file_id in parents)):
787
yield child_relpath, child_ie
789
if child_ie.kind == 'directory':
790
if parents is None or child_ie.file_id in parents:
791
child_dirs.append((child_relpath+'/', child_ie))
792
stack.extend(reversed(child_dirs))
794
def _make_delta(self, old):
795
"""Make an inventory delta from two inventories."""
798
adds = new_ids - old_ids
799
deletes = old_ids - new_ids
800
common = old_ids.intersection(new_ids)
802
for file_id in deletes:
803
delta.append((old.id2path(file_id), None, file_id, None))
805
delta.append((None, self.id2path(file_id), file_id, self[file_id]))
806
for file_id in common:
807
if old[file_id] != self[file_id]:
808
delta.append((old.id2path(file_id), self.id2path(file_id),
809
file_id, self[file_id]))
812
def _get_mutable_inventory(self):
813
"""Returns a mutable copy of the object.
815
Some inventories are immutable, yet working trees, for example, needs
816
to mutate exisiting inventories instead of creating a new one.
818
raise NotImplementedError(self._get_mutable_inventory)
820
def make_entry(self, kind, name, parent_id, file_id=None):
821
"""Simple thunk to bzrlib.inventory.make_entry."""
822
return make_entry(kind, name, parent_id, file_id)
825
"""Return list of (path, ie) for all entries except the root.
827
This may be faster than iter_entries.
830
def descend(dir_ie, dir_path):
831
kids = dir_ie.children.items()
833
for name, ie in kids:
834
child_path = osutils.pathjoin(dir_path, name)
835
accum.append((child_path, ie))
836
if ie.kind == 'directory':
837
descend(ie, child_path)
839
descend(self.root, u'')
842
def directories(self):
843
"""Return (path, entry) pairs for all directories, including the root.
846
def descend(parent_ie, parent_path):
847
accum.append((parent_path, parent_ie))
849
kids = [(ie.name, ie) for ie in parent_ie.children.itervalues() if ie.kind == 'directory']
852
for name, child_ie in kids:
853
child_path = osutils.pathjoin(parent_path, name)
854
descend(child_ie, child_path)
855
descend(self.root, u'')
858
def path2id(self, relpath):
859
"""Walk down through directories to return entry of last component.
861
:param relpath: may be either a list of path components, or a single
862
string, in which case it is automatically split.
864
This returns the entry of the last component in the path,
865
which may be either a file or a directory.
867
Returns None IFF the path is not found.
869
if isinstance(relpath, basestring):
870
names = osutils.splitpath(relpath)
876
except errors.NoSuchId:
877
# root doesn't exist yet so nothing else can
883
children = getattr(parent, 'children', None)
892
return parent.file_id
894
def filter(self, specific_fileids):
895
"""Get an inventory view filtered against a set of file-ids.
897
Children of directories and parents are included.
899
The result may or may not reference the underlying inventory
900
so it should be treated as immutable.
902
interesting_parents = set()
903
for fileid in specific_fileids:
905
interesting_parents.update(self.get_idpath(fileid))
906
except errors.NoSuchId:
907
# This fileid is not in the inventory - that's ok
909
entries = self.iter_entries()
910
if self.root is None:
911
return Inventory(root_id=None)
912
other = Inventory(entries.next()[1].file_id)
913
other.root.revision = self.root.revision
914
other.revision_id = self.revision_id
915
directories_to_expand = set()
916
for path, entry in entries:
917
file_id = entry.file_id
918
if (file_id in specific_fileids
919
or entry.parent_id in directories_to_expand):
920
if entry.kind == 'directory':
921
directories_to_expand.add(file_id)
922
elif file_id not in interesting_parents:
924
other.add(entry.copy())
927
def get_idpath(self, file_id):
928
"""Return a list of file_ids for the path to an entry.
930
The list contains one element for each directory followed by
931
the id of the file itself. So the length of the returned list
932
is equal to the depth of the file in the tree, counting the
933
root directory as depth 1.
936
for parent in self._iter_file_id_parents(file_id):
937
p.insert(0, parent.file_id)
941
class Inventory(CommonInventory):
942
"""Mutable dict based in-memory inventory.
944
We never store the full path to a file, because renaming a directory
945
implicitly moves all of its contents. This class internally maintains a
946
lookup tree that allows the children under a directory to be
949
>>> inv = Inventory()
950
>>> inv.add(InventoryFile('123-123', 'hello.c', ROOT_ID))
951
InventoryFile('123-123', 'hello.c', parent_id='TREE_ROOT', sha1=None, len=None, revision=None)
952
>>> inv['123-123'].name
955
Id's may be looked up from paths:
957
>>> inv.path2id('hello.c')
962
There are iterators over the contents:
964
>>> [entry[0] for entry in inv.iter_entries()]
968
def __init__(self, root_id=ROOT_ID, revision_id=None):
969
"""Create or read an inventory.
971
If a working directory is specified, the inventory is read
972
from there. If the file is specified, read from that. If not,
973
the inventory is created empty.
975
The inventory is created with a default root directory, with
978
if root_id is not None:
979
self._set_root(InventoryDirectory(root_id, u'', None))
983
self.revision_id = revision_id
986
# More than one page of ouput is not useful anymore to debug
989
contents = repr(self._byid)
990
if len(contents) > max_len:
991
contents = contents[:(max_len-len(closing))] + closing
992
return "<Inventory object at %x, contents=%r>" % (id(self), contents)
994
def apply_delta(self, delta):
995
"""Apply a delta to this inventory.
997
See the inventory developers documentation for the theory behind
1000
If delta application fails the inventory is left in an indeterminate
1001
state and must not be used.
1003
:param delta: A list of changes to apply. After all the changes are
1004
applied the final inventory must be internally consistent, but it
1005
is ok to supply changes which, if only half-applied would have an
1006
invalid result - such as supplying two changes which rename two
1007
files, 'A' and 'B' with each other : [('A', 'B', 'A-id', a_entry),
1008
('B', 'A', 'B-id', b_entry)].
1010
Each change is a tuple, of the form (old_path, new_path, file_id,
1013
When new_path is None, the change indicates the removal of an entry
1014
from the inventory and new_entry will be ignored (using None is
1015
appropriate). If new_path is not None, then new_entry must be an
1016
InventoryEntry instance, which will be incorporated into the
1017
inventory (and replace any existing entry with the same file id).
1019
When old_path is None, the change indicates the addition of
1020
a new entry to the inventory.
1022
When neither new_path nor old_path are None, the change is a
1023
modification to an entry, such as a rename, reparent, kind change
1026
The children attribute of new_entry is ignored. This is because
1027
this method preserves children automatically across alterations to
1028
the parent of the children, and cases where the parent id of a
1029
child is changing require the child to be passed in as a separate
1030
change regardless. E.g. in the recursive deletion of a directory -
1031
the directory's children must be included in the delta, or the
1032
final inventory will be invalid.
1034
Note that a file_id must only appear once within a given delta.
1035
An AssertionError is raised otherwise.
1037
# Check that the delta is legal. It would be nice if this could be
1038
# done within the loops below but it's safer to validate the delta
1039
# before starting to mutate the inventory, as there isn't a rollback
1041
list(_check_delta_unique_ids(_check_delta_unique_new_paths(
1042
_check_delta_unique_old_paths(_check_delta_ids_match_entry(
1043
_check_delta_ids_are_valid(
1044
_check_delta_new_path_entry_both_or_None(
1048
# Remove all affected items which were in the original inventory,
1049
# starting with the longest paths, thus ensuring parents are examined
1050
# after their children, which means that everything we examine has no
1051
# modified children remaining by the time we examine it.
1052
for old_path, file_id in sorted(((op, f) for op, np, f, e in delta
1053
if op is not None), reverse=True):
1054
# Preserve unaltered children of file_id for later reinsertion.
1055
file_id_children = getattr(self[file_id], 'children', {})
1056
if len(file_id_children):
1057
children[file_id] = file_id_children
1058
if self.id2path(file_id) != old_path:
1059
raise errors.InconsistentDelta(old_path, file_id,
1060
"Entry was at wrong other path %r." % self.id2path(file_id))
1061
# Remove file_id and the unaltered children. If file_id is not
1062
# being deleted it will be reinserted back later.
1063
self.remove_recursive_id(file_id)
1064
# Insert all affected which should be in the new inventory, reattaching
1065
# their children if they had any. This is done from shortest path to
1066
# longest, ensuring that items which were modified and whose parents in
1067
# the resulting inventory were also modified, are inserted after their
1069
for new_path, f, new_entry in sorted((np, f, e) for op, np, f, e in
1070
delta if np is not None):
1071
if new_entry.kind == 'directory':
1072
# Pop the child which to allow detection of children whose
1073
# parents were deleted and which were not reattached to a new
1075
replacement = InventoryDirectory(new_entry.file_id,
1076
new_entry.name, new_entry.parent_id)
1077
replacement.revision = new_entry.revision
1078
replacement.children = children.pop(replacement.file_id, {})
1079
new_entry = replacement
1082
except errors.DuplicateFileId:
1083
raise errors.InconsistentDelta(new_path, new_entry.file_id,
1084
"New id is already present in target.")
1085
except AttributeError:
1086
raise errors.InconsistentDelta(new_path, new_entry.file_id,
1087
"Parent is not a directory.")
1088
if self.id2path(new_entry.file_id) != new_path:
1089
raise errors.InconsistentDelta(new_path, new_entry.file_id,
1090
"New path is not consistent with parent path.")
1092
# Get the parent id that was deleted
1093
parent_id, children = children.popitem()
1094
raise errors.InconsistentDelta("<deleted>", parent_id,
1095
"The file id was deleted but its children were not deleted.")
1097
def create_by_apply_delta(self, inventory_delta, new_revision_id,
1098
propagate_caches=False):
1099
"""See CHKInventory.create_by_apply_delta()"""
1100
new_inv = self.copy()
1101
new_inv.apply_delta(inventory_delta)
1102
new_inv.revision_id = new_revision_id
1105
def _set_root(self, ie):
1107
self._byid = {self.root.file_id: self.root}
1110
# TODO: jam 20051218 Should copy also copy the revision_id?
1111
entries = self.iter_entries()
1112
if self.root is None:
1113
return Inventory(root_id=None)
1114
other = Inventory(entries.next()[1].file_id)
1115
other.root.revision = self.root.revision
1116
# copy recursively so we know directories will be added before
1117
# their children. There are more efficient ways than this...
1118
for path, entry in entries:
1119
other.add(entry.copy())
1122
def _get_mutable_inventory(self):
1123
"""See CommonInventory._get_mutable_inventory."""
1124
return copy.deepcopy(self)
1127
"""Iterate over all file-ids."""
1128
return iter(self._byid)
1130
def iter_just_entries(self):
1131
"""Iterate over all entries.
1133
Unlike iter_entries(), just the entries are returned (not (path, ie))
1134
and the order of entries is undefined.
1136
XXX: We may not want to merge this into bzr.dev.
1138
if self.root is None:
1140
for _, ie in self._byid.iteritems():
1144
"""Returns number of entries."""
1145
return len(self._byid)
1147
def __getitem__(self, file_id):
1148
"""Return the entry for given file_id.
1150
>>> inv = Inventory()
1151
>>> inv.add(InventoryFile('123123', 'hello.c', ROOT_ID))
1152
InventoryFile('123123', 'hello.c', parent_id='TREE_ROOT', sha1=None, len=None, revision=None)
1153
>>> inv['123123'].name
1157
return self._byid[file_id]
1159
# really we're passing an inventory, not a tree...
1160
raise errors.NoSuchId(self, file_id)
1162
def get_file_kind(self, file_id):
1163
return self._byid[file_id].kind
1165
def get_child(self, parent_id, filename):
1166
return self[parent_id].children.get(filename)
1168
def _add_child(self, entry):
1169
"""Add an entry to the inventory, without adding it to its parent"""
1170
if entry.file_id in self._byid:
1171
raise BzrError("inventory already contains entry with id {%s}" %
1173
self._byid[entry.file_id] = entry
1174
for child in getattr(entry, 'children', {}).itervalues():
1175
self._add_child(child)
1178
def add(self, entry):
1179
"""Add entry to inventory.
1183
if entry.file_id in self._byid:
1184
raise errors.DuplicateFileId(entry.file_id,
1185
self._byid[entry.file_id])
1186
if entry.parent_id is None:
1190
parent = self._byid[entry.parent_id]
1192
raise errors.InconsistentDelta("<unknown>", entry.parent_id,
1193
"Parent not in inventory.")
1194
if entry.name in parent.children:
1195
raise errors.InconsistentDelta(
1196
self.id2path(parent.children[entry.name].file_id),
1198
"Path already versioned")
1199
parent.children[entry.name] = entry
1200
return self._add_child(entry)
1202
def add_path(self, relpath, kind, file_id=None, parent_id=None):
1203
"""Add entry from a path.
1205
The immediate parent must already be versioned.
1207
Returns the new entry object."""
1209
parts = osutils.splitpath(relpath)
1213
file_id = generate_ids.gen_root_id()
1214
self.root = InventoryDirectory(file_id, '', None)
1215
self._byid = {self.root.file_id: self.root}
1218
parent_path = parts[:-1]
1219
parent_id = self.path2id(parent_path)
1220
if parent_id is None:
1221
raise errors.NotVersionedError(path=parent_path)
1222
ie = make_entry(kind, parts[-1], parent_id, file_id)
1225
def __delitem__(self, file_id):
1226
"""Remove entry by id.
1228
>>> inv = Inventory()
1229
>>> inv.add(InventoryFile('123', 'foo.c', ROOT_ID))
1230
InventoryFile('123', 'foo.c', parent_id='TREE_ROOT', sha1=None, len=None, revision=None)
1238
del self._byid[file_id]
1239
if ie.parent_id is not None:
1240
del self[ie.parent_id].children[ie.name]
1242
def __eq__(self, other):
1243
"""Compare two sets by comparing their contents.
1245
>>> i1 = Inventory()
1246
>>> i2 = Inventory()
1249
>>> i1.add(InventoryFile('123', 'foo', ROOT_ID))
1250
InventoryFile('123', 'foo', parent_id='TREE_ROOT', sha1=None, len=None, revision=None)
1253
>>> i2.add(InventoryFile('123', 'foo', ROOT_ID))
1254
InventoryFile('123', 'foo', parent_id='TREE_ROOT', sha1=None, len=None, revision=None)
1258
if not isinstance(other, Inventory):
1259
return NotImplemented
1261
return self._byid == other._byid
1263
def __ne__(self, other):
1264
return not self.__eq__(other)
1267
raise ValueError('not hashable')
1269
def _iter_file_id_parents(self, file_id):
1270
"""Yield the parents of file_id up to the root."""
1271
while file_id is not None:
1273
ie = self._byid[file_id]
1275
raise errors.NoSuchId(tree=None, file_id=file_id)
1277
file_id = ie.parent_id
1279
def has_id(self, file_id):
1280
return (file_id in self._byid)
1282
def _make_delta(self, old):
1283
"""Make an inventory delta from two inventories."""
1284
old_getter = getattr(old, '_byid', old)
1285
new_getter = self._byid
1286
old_ids = set(old_getter)
1287
new_ids = set(new_getter)
1288
adds = new_ids - old_ids
1289
deletes = old_ids - new_ids
1290
if not adds and not deletes:
1293
common = old_ids.intersection(new_ids)
1295
for file_id in deletes:
1296
delta.append((old.id2path(file_id), None, file_id, None))
1297
for file_id in adds:
1298
delta.append((None, self.id2path(file_id), file_id, self[file_id]))
1299
for file_id in common:
1300
new_ie = new_getter[file_id]
1301
old_ie = old_getter[file_id]
1302
# If xml_serializer returns the cached InventoryEntries (rather
1303
# than always doing .copy()), inlining the 'is' check saves 2.7M
1304
# calls to __eq__. Under lsprof this saves 20s => 6s.
1305
# It is a minor improvement without lsprof.
1306
if old_ie is new_ie or old_ie == new_ie:
1309
delta.append((old.id2path(file_id), self.id2path(file_id),
1313
def remove_recursive_id(self, file_id):
1314
"""Remove file_id, and children, from the inventory.
1316
:param file_id: A file_id to remove.
1318
to_find_delete = [self._byid[file_id]]
1320
while to_find_delete:
1321
ie = to_find_delete.pop()
1322
to_delete.append(ie.file_id)
1323
if ie.kind == 'directory':
1324
to_find_delete.extend(ie.children.values())
1325
for file_id in reversed(to_delete):
1327
del self._byid[file_id]
1328
if ie.parent_id is not None:
1329
del self[ie.parent_id].children[ie.name]
1333
def rename(self, file_id, new_parent_id, new_name):
1334
"""Move a file within the inventory.
1336
This can change either the name, or the parent, or both.
1338
This does not move the working file.
1340
new_name = ensure_normalized_name(new_name)
1341
if not is_valid_name(new_name):
1342
raise BzrError("not an acceptable filename: %r" % new_name)
1344
new_parent = self._byid[new_parent_id]
1345
if new_name in new_parent.children:
1346
raise BzrError("%r already exists in %r" % (new_name, self.id2path(new_parent_id)))
1348
new_parent_idpath = self.get_idpath(new_parent_id)
1349
if file_id in new_parent_idpath:
1350
raise BzrError("cannot move directory %r into a subdirectory of itself, %r"
1351
% (self.id2path(file_id), self.id2path(new_parent_id)))
1353
file_ie = self._byid[file_id]
1354
old_parent = self._byid[file_ie.parent_id]
1356
# TODO: Don't leave things messed up if this fails
1358
del old_parent.children[file_ie.name]
1359
new_parent.children[new_name] = file_ie
1361
file_ie.name = new_name
1362
file_ie.parent_id = new_parent_id
1364
def is_root(self, file_id):
1365
return self.root is not None and file_id == self.root.file_id
1368
class CHKInventory(CommonInventory):
1369
"""An inventory persisted in a CHK store.
1371
By design, a CHKInventory is immutable so many of the methods
1372
supported by Inventory - add, rename, apply_delta, etc - are *not*
1373
supported. To create a new CHKInventory, use create_by_apply_delta()
1374
or from_inventory(), say.
1376
Internally, a CHKInventory has one or two CHKMaps:
1378
* id_to_entry - a map from (file_id,) => InventoryEntry as bytes
1379
* parent_id_basename_to_file_id - a map from (parent_id, basename_utf8)
1382
The second map is optional and not present in early CHkRepository's.
1384
No caching is performed: every method call or item access will perform
1385
requests to the storage layer. As such, keep references to objects you
1389
def __init__(self, search_key_name):
1390
CommonInventory.__init__(self)
1391
self._fileid_to_entry_cache = {}
1392
self._path_to_fileid_cache = {}
1393
self._search_key_name = search_key_name
1396
def __eq__(self, other):
1397
"""Compare two sets by comparing their contents."""
1398
if not isinstance(other, CHKInventory):
1399
return NotImplemented
1401
this_key = self.id_to_entry.key()
1402
other_key = other.id_to_entry.key()
1403
this_pid_key = self.parent_id_basename_to_file_id.key()
1404
other_pid_key = other.parent_id_basename_to_file_id.key()
1405
if None in (this_key, this_pid_key, other_key, other_pid_key):
1407
return this_key == other_key and this_pid_key == other_pid_key
1409
def _entry_to_bytes(self, entry):
1410
"""Serialise entry as a single bytestring.
1412
:param Entry: An inventory entry.
1413
:return: A bytestring for the entry.
1416
ENTRY ::= FILE | DIR | SYMLINK | TREE
1417
FILE ::= "file: " COMMON SEP SHA SEP SIZE SEP EXECUTABLE
1418
DIR ::= "dir: " COMMON
1419
SYMLINK ::= "symlink: " COMMON SEP TARGET_UTF8
1420
TREE ::= "tree: " COMMON REFERENCE_REVISION
1421
COMMON ::= FILE_ID SEP PARENT_ID SEP NAME_UTF8 SEP REVISION
1424
if entry.parent_id is not None:
1425
parent_str = entry.parent_id
1428
name_str = entry.name.encode("utf8")
1429
if entry.kind == 'file':
1430
if entry.executable:
1434
return "file: %s\n%s\n%s\n%s\n%s\n%d\n%s" % (
1435
entry.file_id, parent_str, name_str, entry.revision,
1436
entry.text_sha1, entry.text_size, exec_str)
1437
elif entry.kind == 'directory':
1438
return "dir: %s\n%s\n%s\n%s" % (
1439
entry.file_id, parent_str, name_str, entry.revision)
1440
elif entry.kind == 'symlink':
1441
return "symlink: %s\n%s\n%s\n%s\n%s" % (
1442
entry.file_id, parent_str, name_str, entry.revision,
1443
entry.symlink_target.encode("utf8"))
1444
elif entry.kind == 'tree-reference':
1445
return "tree: %s\n%s\n%s\n%s\n%s" % (
1446
entry.file_id, parent_str, name_str, entry.revision,
1447
entry.reference_revision)
1449
raise ValueError("unknown kind %r" % entry.kind)
1451
def _expand_fileids_to_parents_and_children(self, file_ids):
1452
"""Give a more wholistic view starting with the given file_ids.
1454
For any file_id which maps to a directory, we will include all children
1455
of that directory. We will also include all directories which are
1456
parents of the given file_ids, but we will not include their children.
1463
fringle # fringle-id
1467
if given [foo-id] we will include
1468
TREE_ROOT as interesting parents
1470
foo-id, baz-id, frob-id, fringle-id
1474
# TODO: Pre-pass over the list of fileids to see if anything is already
1475
# deserialized in self._fileid_to_entry_cache
1477
directories_to_expand = set()
1478
children_of_parent_id = {}
1479
# It is okay if some of the fileids are missing
1480
for entry in self._getitems(file_ids):
1481
if entry.kind == 'directory':
1482
directories_to_expand.add(entry.file_id)
1483
interesting.add(entry.parent_id)
1484
children_of_parent_id.setdefault(entry.parent_id, []
1485
).append(entry.file_id)
1487
# Now, interesting has all of the direct parents, but not the
1488
# parents of those parents. It also may have some duplicates with
1490
remaining_parents = interesting.difference(file_ids)
1491
# When we hit the TREE_ROOT, we'll get an interesting parent of None,
1492
# but we don't actually want to recurse into that
1493
interesting.add(None) # this will auto-filter it in the loop
1494
remaining_parents.discard(None)
1495
while remaining_parents:
1496
next_parents = set()
1497
for entry in self._getitems(remaining_parents):
1498
next_parents.add(entry.parent_id)
1499
children_of_parent_id.setdefault(entry.parent_id, []
1500
).append(entry.file_id)
1501
# Remove any search tips we've already processed
1502
remaining_parents = next_parents.difference(interesting)
1503
interesting.update(remaining_parents)
1504
# We should probably also .difference(directories_to_expand)
1505
interesting.update(file_ids)
1506
interesting.discard(None)
1507
while directories_to_expand:
1508
# Expand directories by looking in the
1509
# parent_id_basename_to_file_id map
1510
keys = [StaticTuple(f,).intern() for f in directories_to_expand]
1511
directories_to_expand = set()
1512
items = self.parent_id_basename_to_file_id.iteritems(keys)
1513
next_file_ids = set([item[1] for item in items])
1514
next_file_ids = next_file_ids.difference(interesting)
1515
interesting.update(next_file_ids)
1516
for entry in self._getitems(next_file_ids):
1517
if entry.kind == 'directory':
1518
directories_to_expand.add(entry.file_id)
1519
children_of_parent_id.setdefault(entry.parent_id, []
1520
).append(entry.file_id)
1521
return interesting, children_of_parent_id
1523
def filter(self, specific_fileids):
1524
"""Get an inventory view filtered against a set of file-ids.
1526
Children of directories and parents are included.
1528
The result may or may not reference the underlying inventory
1529
so it should be treated as immutable.
1532
parent_to_children) = self._expand_fileids_to_parents_and_children(
1534
# There is some overlap here, but we assume that all interesting items
1535
# are in the _fileid_to_entry_cache because we had to read them to
1536
# determine if they were a dir we wanted to recurse, or just a file
1537
# This should give us all the entries we'll want to add, so start
1539
other = Inventory(self.root_id)
1540
other.root.revision = self.root.revision
1541
other.revision_id = self.revision_id
1542
if not interesting or not parent_to_children:
1543
# empty filter, or filtering entrys that don't exist
1544
# (if even 1 existed, then we would have populated
1545
# parent_to_children with at least the tree root.)
1547
cache = self._fileid_to_entry_cache
1548
remaining_children = collections.deque(parent_to_children[self.root_id])
1549
while remaining_children:
1550
file_id = remaining_children.popleft()
1552
if ie.kind == 'directory':
1553
ie = ie.copy() # We create a copy to depopulate the .children attribute
1554
# TODO: depending on the uses of 'other' we should probably alwyas
1555
# '.copy()' to prevent someone from mutating other and
1556
# invaliding our internal cache
1558
if file_id in parent_to_children:
1559
remaining_children.extend(parent_to_children[file_id])
1563
def _bytes_to_utf8name_key(bytes):
1564
"""Get the file_id, revision_id key out of bytes."""
1565
# We don't normally care about name, except for times when we want
1566
# to filter out empty names because of non rich-root...
1567
sections = bytes.split('\n')
1568
kind, file_id = sections[0].split(': ')
1569
return (sections[2], intern(file_id), intern(sections[3]))
1571
def _bytes_to_entry(self, bytes):
1572
"""Deserialise a serialised entry."""
1573
sections = bytes.split('\n')
1574
if sections[0].startswith("file: "):
1575
result = InventoryFile(sections[0][6:],
1576
sections[2].decode('utf8'),
1578
result.text_sha1 = sections[4]
1579
result.text_size = int(sections[5])
1580
result.executable = sections[6] == "Y"
1581
elif sections[0].startswith("dir: "):
1582
result = CHKInventoryDirectory(sections[0][5:],
1583
sections[2].decode('utf8'),
1585
elif sections[0].startswith("symlink: "):
1586
result = InventoryLink(sections[0][9:],
1587
sections[2].decode('utf8'),
1589
result.symlink_target = sections[4].decode('utf8')
1590
elif sections[0].startswith("tree: "):
1591
result = TreeReference(sections[0][6:],
1592
sections[2].decode('utf8'),
1594
result.reference_revision = sections[4]
1596
raise ValueError("Not a serialised entry %r" % bytes)
1597
result.file_id = intern(result.file_id)
1598
result.revision = intern(sections[3])
1599
if result.parent_id == '':
1600
result.parent_id = None
1601
self._fileid_to_entry_cache[result.file_id] = result
1604
def _get_mutable_inventory(self):
1605
"""See CommonInventory._get_mutable_inventory."""
1606
entries = self.iter_entries()
1607
inv = Inventory(None, self.revision_id)
1608
for path, inv_entry in entries:
1609
inv.add(inv_entry.copy())
1612
def create_by_apply_delta(self, inventory_delta, new_revision_id,
1613
propagate_caches=False):
1614
"""Create a new CHKInventory by applying inventory_delta to this one.
1616
See the inventory developers documentation for the theory behind
1619
:param inventory_delta: The inventory delta to apply. See
1620
Inventory.apply_delta for details.
1621
:param new_revision_id: The revision id of the resulting CHKInventory.
1622
:param propagate_caches: If True, the caches for this inventory are
1623
copied to and updated for the result.
1624
:return: The new CHKInventory.
1626
split = osutils.split
1627
result = CHKInventory(self._search_key_name)
1628
if propagate_caches:
1629
# Just propagate the path-to-fileid cache for now
1630
result._path_to_fileid_cache = dict(self._path_to_fileid_cache.iteritems())
1631
search_key_func = chk_map.search_key_registry.get(self._search_key_name)
1632
self.id_to_entry._ensure_root()
1633
maximum_size = self.id_to_entry._root_node.maximum_size
1634
result.revision_id = new_revision_id
1635
result.id_to_entry = chk_map.CHKMap(
1636
self.id_to_entry._store,
1637
self.id_to_entry.key(),
1638
search_key_func=search_key_func)
1639
result.id_to_entry._ensure_root()
1640
result.id_to_entry._root_node.set_maximum_size(maximum_size)
1641
# Change to apply to the parent_id_basename delta. The dict maps
1642
# (parent_id, basename) -> (old_key, new_value). We use a dict because
1643
# when a path has its id replaced (e.g. the root is changed, or someone
1644
# does bzr mv a b, bzr mv c a, we should output a single change to this
1645
# map rather than two.
1646
parent_id_basename_delta = {}
1647
if self.parent_id_basename_to_file_id is not None:
1648
result.parent_id_basename_to_file_id = chk_map.CHKMap(
1649
self.parent_id_basename_to_file_id._store,
1650
self.parent_id_basename_to_file_id.key(),
1651
search_key_func=search_key_func)
1652
result.parent_id_basename_to_file_id._ensure_root()
1653
self.parent_id_basename_to_file_id._ensure_root()
1654
result_p_id_root = result.parent_id_basename_to_file_id._root_node
1655
p_id_root = self.parent_id_basename_to_file_id._root_node
1656
result_p_id_root.set_maximum_size(p_id_root.maximum_size)
1657
result_p_id_root._key_width = p_id_root._key_width
1659
result.parent_id_basename_to_file_id = None
1660
result.root_id = self.root_id
1661
id_to_entry_delta = []
1662
# inventory_delta is only traversed once, so we just update the
1664
# Check for repeated file ids
1665
inventory_delta = _check_delta_unique_ids(inventory_delta)
1666
# Repeated old paths
1667
inventory_delta = _check_delta_unique_old_paths(inventory_delta)
1668
# Check for repeated new paths
1669
inventory_delta = _check_delta_unique_new_paths(inventory_delta)
1670
# Check for entries that don't match the fileid
1671
inventory_delta = _check_delta_ids_match_entry(inventory_delta)
1672
# Check for nonsense fileids
1673
inventory_delta = _check_delta_ids_are_valid(inventory_delta)
1674
# Check for new_path <-> entry consistency
1675
inventory_delta = _check_delta_new_path_entry_both_or_None(
1677
# All changed entries need to have their parents be directories and be
1678
# at the right path. This set contains (path, id) tuples.
1680
# When we delete an item, all the children of it must be either deleted
1681
# or altered in their own right. As we batch process the change via
1682
# CHKMap.apply_delta, we build a set of things to use to validate the
1686
for old_path, new_path, file_id, entry in inventory_delta:
1689
result.root_id = file_id
1690
if new_path is None:
1695
if propagate_caches:
1697
del result._path_to_fileid_cache[old_path]
1700
deletes.add(file_id)
1702
new_key = StaticTuple(file_id,)
1703
new_value = result._entry_to_bytes(entry)
1704
# Update caches. It's worth doing this whether
1705
# we're propagating the old caches or not.
1706
result._path_to_fileid_cache[new_path] = file_id
1707
parents.add((split(new_path)[0], entry.parent_id))
1708
if old_path is None:
1711
old_key = StaticTuple(file_id,)
1712
if self.id2path(file_id) != old_path:
1713
raise errors.InconsistentDelta(old_path, file_id,
1714
"Entry was at wrong other path %r." %
1715
self.id2path(file_id))
1716
altered.add(file_id)
1717
id_to_entry_delta.append(StaticTuple(old_key, new_key, new_value))
1718
if result.parent_id_basename_to_file_id is not None:
1719
# parent_id, basename changes
1720
if old_path is None:
1723
old_entry = self[file_id]
1724
old_key = self._parent_id_basename_key(old_entry)
1725
if new_path is None:
1729
new_key = self._parent_id_basename_key(entry)
1731
# If the two keys are the same, the value will be unchanged
1732
# as its always the file id for this entry.
1733
if old_key != new_key:
1734
# Transform a change into explicit delete/add preserving
1735
# a possible match on the key from a different file id.
1736
if old_key is not None:
1737
parent_id_basename_delta.setdefault(
1738
old_key, [None, None])[0] = old_key
1739
if new_key is not None:
1740
parent_id_basename_delta.setdefault(
1741
new_key, [None, None])[1] = new_value
1742
# validate that deletes are complete.
1743
for file_id in deletes:
1744
entry = self[file_id]
1745
if entry.kind != 'directory':
1747
# This loop could potentially be better by using the id_basename
1748
# map to just get the child file ids.
1749
for child in entry.children.values():
1750
if child.file_id not in altered:
1751
raise errors.InconsistentDelta(self.id2path(child.file_id),
1752
child.file_id, "Child not deleted or reparented when "
1754
result.id_to_entry.apply_delta(id_to_entry_delta)
1755
if parent_id_basename_delta:
1756
# Transform the parent_id_basename delta data into a linear delta
1757
# with only one record for a given key. Optimally this would allow
1758
# re-keying, but its simpler to just output that as a delete+add
1759
# to spend less time calculating the delta.
1761
for key, (old_key, value) in parent_id_basename_delta.iteritems():
1762
if value is not None:
1763
delta_list.append((old_key, key, value))
1765
delta_list.append((old_key, None, None))
1766
result.parent_id_basename_to_file_id.apply_delta(delta_list)
1767
parents.discard(('', None))
1768
for parent_path, parent in parents:
1770
if result[parent].kind != 'directory':
1771
raise errors.InconsistentDelta(result.id2path(parent), parent,
1772
'Not a directory, but given children')
1773
except errors.NoSuchId:
1774
raise errors.InconsistentDelta("<unknown>", parent,
1775
"Parent is not present in resulting inventory.")
1776
if result.path2id(parent_path) != parent:
1777
raise errors.InconsistentDelta(parent_path, parent,
1778
"Parent has wrong path %r." % result.path2id(parent_path))
1782
def deserialise(klass, chk_store, bytes, expected_revision_id):
1783
"""Deserialise a CHKInventory.
1785
:param chk_store: A CHK capable VersionedFiles instance.
1786
:param bytes: The serialised bytes.
1787
:param expected_revision_id: The revision ID we think this inventory is
1789
:return: A CHKInventory
1791
lines = bytes.split('\n')
1793
raise AssertionError('bytes to deserialize must end with an eol')
1795
if lines[0] != 'chkinventory:':
1796
raise ValueError("not a serialised CHKInventory: %r" % bytes)
1798
allowed_keys = frozenset(['root_id', 'revision_id', 'search_key_name',
1799
'parent_id_basename_to_file_id',
1801
for line in lines[1:]:
1802
key, value = line.split(': ', 1)
1803
if key not in allowed_keys:
1804
raise errors.BzrError('Unknown key in inventory: %r\n%r'
1807
raise errors.BzrError('Duplicate key in inventory: %r\n%r'
1810
revision_id = intern(info['revision_id'])
1811
root_id = intern(info['root_id'])
1812
search_key_name = intern(info.get('search_key_name', 'plain'))
1813
parent_id_basename_to_file_id = intern(info.get(
1814
'parent_id_basename_to_file_id', None))
1815
if not parent_id_basename_to_file_id.startswith('sha1:'):
1816
raise ValueError('parent_id_basename_to_file_id should be a sha1'
1817
' key not %r' % (parent_id_basename_to_file_id,))
1818
id_to_entry = info['id_to_entry']
1819
if not id_to_entry.startswith('sha1:'):
1820
raise ValueError('id_to_entry should be a sha1'
1821
' key not %r' % (id_to_entry,))
1823
result = CHKInventory(search_key_name)
1824
result.revision_id = revision_id
1825
result.root_id = root_id
1826
search_key_func = chk_map.search_key_registry.get(
1827
result._search_key_name)
1828
if parent_id_basename_to_file_id is not None:
1829
result.parent_id_basename_to_file_id = chk_map.CHKMap(
1830
chk_store, StaticTuple(parent_id_basename_to_file_id,),
1831
search_key_func=search_key_func)
1833
result.parent_id_basename_to_file_id = None
1835
result.id_to_entry = chk_map.CHKMap(chk_store,
1836
StaticTuple(id_to_entry,),
1837
search_key_func=search_key_func)
1838
if (result.revision_id,) != expected_revision_id:
1839
raise ValueError("Mismatched revision id and expected: %r, %r" %
1840
(result.revision_id, expected_revision_id))
1844
def from_inventory(klass, chk_store, inventory, maximum_size=0, search_key_name='plain'):
1845
"""Create a CHKInventory from an existing inventory.
1847
The content of inventory is copied into the chk_store, and a
1848
CHKInventory referencing that is returned.
1850
:param chk_store: A CHK capable VersionedFiles instance.
1851
:param inventory: The inventory to copy.
1852
:param maximum_size: The CHKMap node size limit.
1853
:param search_key_name: The identifier for the search key function
1855
result = klass(search_key_name)
1856
result.revision_id = inventory.revision_id
1857
result.root_id = inventory.root.file_id
1859
entry_to_bytes = result._entry_to_bytes
1860
parent_id_basename_key = result._parent_id_basename_key
1861
id_to_entry_dict = {}
1862
parent_id_basename_dict = {}
1863
for path, entry in inventory.iter_entries():
1864
key = StaticTuple(entry.file_id,).intern()
1865
id_to_entry_dict[key] = entry_to_bytes(entry)
1866
p_id_key = parent_id_basename_key(entry)
1867
parent_id_basename_dict[p_id_key] = entry.file_id
1869
result._populate_from_dicts(chk_store, id_to_entry_dict,
1870
parent_id_basename_dict, maximum_size=maximum_size)
1873
def _populate_from_dicts(self, chk_store, id_to_entry_dict,
1874
parent_id_basename_dict, maximum_size):
1875
search_key_func = chk_map.search_key_registry.get(self._search_key_name)
1876
root_key = chk_map.CHKMap.from_dict(chk_store, id_to_entry_dict,
1877
maximum_size=maximum_size, key_width=1,
1878
search_key_func=search_key_func)
1879
self.id_to_entry = chk_map.CHKMap(chk_store, root_key,
1881
root_key = chk_map.CHKMap.from_dict(chk_store,
1882
parent_id_basename_dict,
1883
maximum_size=maximum_size, key_width=2,
1884
search_key_func=search_key_func)
1885
self.parent_id_basename_to_file_id = chk_map.CHKMap(chk_store,
1886
root_key, search_key_func)
1888
def _parent_id_basename_key(self, entry):
1889
"""Create a key for a entry in a parent_id_basename_to_file_id index."""
1890
if entry.parent_id is not None:
1891
parent_id = entry.parent_id
1894
return StaticTuple(parent_id, entry.name.encode('utf8')).intern()
1896
def __getitem__(self, file_id):
1897
"""map a single file_id -> InventoryEntry."""
1899
raise errors.NoSuchId(self, file_id)
1900
result = self._fileid_to_entry_cache.get(file_id, None)
1901
if result is not None:
1904
return self._bytes_to_entry(
1905
self.id_to_entry.iteritems([StaticTuple(file_id,)]).next()[1])
1906
except StopIteration:
1907
# really we're passing an inventory, not a tree...
1908
raise errors.NoSuchId(self, file_id)
1910
def _getitems(self, file_ids):
1911
"""Similar to __getitem__, but lets you query for multiple.
1913
The returned order is undefined. And currently if an item doesn't
1914
exist, it isn't included in the output.
1918
for file_id in file_ids:
1919
entry = self._fileid_to_entry_cache.get(file_id, None)
1921
remaining.append(file_id)
1923
result.append(entry)
1924
file_keys = [StaticTuple(f,).intern() for f in remaining]
1925
for file_key, value in self.id_to_entry.iteritems(file_keys):
1926
entry = self._bytes_to_entry(value)
1927
result.append(entry)
1928
self._fileid_to_entry_cache[entry.file_id] = entry
1931
def has_id(self, file_id):
1932
# Perhaps have an explicit 'contains' method on CHKMap ?
1933
if self._fileid_to_entry_cache.get(file_id, None) is not None:
1936
self.id_to_entry.iteritems([StaticTuple(file_id,)]))) == 1
1938
def is_root(self, file_id):
1939
return file_id == self.root_id
1941
def _iter_file_id_parents(self, file_id):
1942
"""Yield the parents of file_id up to the root."""
1943
while file_id is not None:
1947
raise errors.NoSuchId(tree=self, file_id=file_id)
1949
file_id = ie.parent_id
1952
"""Iterate over all file-ids."""
1953
for key, _ in self.id_to_entry.iteritems():
1956
def iter_just_entries(self):
1957
"""Iterate over all entries.
1959
Unlike iter_entries(), just the entries are returned (not (path, ie))
1960
and the order of entries is undefined.
1962
XXX: We may not want to merge this into bzr.dev.
1964
for key, entry in self.id_to_entry.iteritems():
1966
ie = self._fileid_to_entry_cache.get(file_id, None)
1968
ie = self._bytes_to_entry(entry)
1969
self._fileid_to_entry_cache[file_id] = ie
1972
def iter_changes(self, basis):
1973
"""Generate a Tree.iter_changes change list between this and basis.
1975
:param basis: Another CHKInventory.
1976
:return: An iterator over the changes between self and basis, as per
1977
tree.iter_changes().
1979
# We want: (file_id, (path_in_source, path_in_target),
1980
# changed_content, versioned, parent, name, kind,
1982
for key, basis_value, self_value in \
1983
self.id_to_entry.iter_changes(basis.id_to_entry):
1985
if basis_value is not None:
1986
basis_entry = basis._bytes_to_entry(basis_value)
1987
path_in_source = basis.id2path(file_id)
1988
basis_parent = basis_entry.parent_id
1989
basis_name = basis_entry.name
1990
basis_executable = basis_entry.executable
1992
path_in_source = None
1995
basis_executable = None
1996
if self_value is not None:
1997
self_entry = self._bytes_to_entry(self_value)
1998
path_in_target = self.id2path(file_id)
1999
self_parent = self_entry.parent_id
2000
self_name = self_entry.name
2001
self_executable = self_entry.executable
2003
path_in_target = None
2006
self_executable = None
2007
if basis_value is None:
2009
kind = (None, self_entry.kind)
2010
versioned = (False, True)
2011
elif self_value is None:
2013
kind = (basis_entry.kind, None)
2014
versioned = (True, False)
2016
kind = (basis_entry.kind, self_entry.kind)
2017
versioned = (True, True)
2018
changed_content = False
2019
if kind[0] != kind[1]:
2020
changed_content = True
2021
elif kind[0] == 'file':
2022
if (self_entry.text_size != basis_entry.text_size or
2023
self_entry.text_sha1 != basis_entry.text_sha1):
2024
changed_content = True
2025
elif kind[0] == 'symlink':
2026
if self_entry.symlink_target != basis_entry.symlink_target:
2027
changed_content = True
2028
elif kind[0] == 'tree-reference':
2029
if (self_entry.reference_revision !=
2030
basis_entry.reference_revision):
2031
changed_content = True
2032
parent = (basis_parent, self_parent)
2033
name = (basis_name, self_name)
2034
executable = (basis_executable, self_executable)
2035
if (not changed_content
2036
and parent[0] == parent[1]
2037
and name[0] == name[1]
2038
and executable[0] == executable[1]):
2039
# Could happen when only the revision changed for a directory
2042
yield (file_id, (path_in_source, path_in_target), changed_content,
2043
versioned, parent, name, kind, executable)
2046
"""Return the number of entries in the inventory."""
2047
return len(self.id_to_entry)
2049
def _make_delta(self, old):
2050
"""Make an inventory delta from two inventories."""
2051
if type(old) != CHKInventory:
2052
return CommonInventory._make_delta(self, old)
2054
for key, old_value, self_value in \
2055
self.id_to_entry.iter_changes(old.id_to_entry):
2057
if old_value is not None:
2058
old_path = old.id2path(file_id)
2061
if self_value is not None:
2062
entry = self._bytes_to_entry(self_value)
2063
self._fileid_to_entry_cache[file_id] = entry
2064
new_path = self.id2path(file_id)
2068
delta.append((old_path, new_path, file_id, entry))
2071
def path2id(self, relpath):
2072
"""See CommonInventory.path2id()."""
2073
# TODO: perhaps support negative hits?
2074
result = self._path_to_fileid_cache.get(relpath, None)
2075
if result is not None:
2077
if isinstance(relpath, basestring):
2078
names = osutils.splitpath(relpath)
2081
current_id = self.root_id
2082
if current_id is None:
2084
parent_id_index = self.parent_id_basename_to_file_id
2086
for basename in names:
2087
if cur_path is None:
2090
cur_path = cur_path + '/' + basename
2091
basename_utf8 = basename.encode('utf8')
2092
file_id = self._path_to_fileid_cache.get(cur_path, None)
2094
key_filter = [StaticTuple(current_id, basename_utf8)]
2095
items = parent_id_index.iteritems(key_filter)
2096
for (parent_id, name_utf8), file_id in items:
2097
if parent_id != current_id or name_utf8 != basename_utf8:
2098
raise errors.BzrError("corrupt inventory lookup! "
2099
"%r %r %r %r" % (parent_id, current_id, name_utf8,
2104
self._path_to_fileid_cache[cur_path] = file_id
2105
current_id = file_id
2109
"""Serialise the inventory to lines."""
2110
lines = ["chkinventory:\n"]
2111
if self._search_key_name != 'plain':
2112
# custom ordering grouping things that don't change together
2113
lines.append('search_key_name: %s\n' % (self._search_key_name,))
2114
lines.append("root_id: %s\n" % self.root_id)
2115
lines.append('parent_id_basename_to_file_id: %s\n' %
2116
(self.parent_id_basename_to_file_id.key()[0],))
2117
lines.append("revision_id: %s\n" % self.revision_id)
2118
lines.append("id_to_entry: %s\n" % (self.id_to_entry.key()[0],))
2120
lines.append("revision_id: %s\n" % self.revision_id)
2121
lines.append("root_id: %s\n" % self.root_id)
2122
if self.parent_id_basename_to_file_id is not None:
2123
lines.append('parent_id_basename_to_file_id: %s\n' %
2124
(self.parent_id_basename_to_file_id.key()[0],))
2125
lines.append("id_to_entry: %s\n" % (self.id_to_entry.key()[0],))
2130
"""Get the root entry."""
2131
return self[self.root_id]
2134
class CHKInventoryDirectory(InventoryDirectory):
2135
"""A directory in an inventory."""
2137
__slots__ = ['_children', '_chk_inventory']
2139
def __init__(self, file_id, name, parent_id, chk_inventory):
2140
# Don't call InventoryDirectory.__init__ - it isn't right for this
2142
InventoryEntry.__init__(self, file_id, name, parent_id)
2143
self._children = None
2144
self._chk_inventory = chk_inventory
2148
"""Access the list of children of this directory.
2150
With a parent_id_basename_to_file_id index, loads all the children,
2151
without loads the entire index. Without is bad. A more sophisticated
2152
proxy object might be nice, to allow partial loading of children as
2153
well when specific names are accessed. (So path traversal can be
2154
written in the obvious way but not examine siblings.).
2156
if self._children is not None:
2157
return self._children
2158
# No longer supported
2159
if self._chk_inventory.parent_id_basename_to_file_id is None:
2160
raise AssertionError("Inventories without"
2161
" parent_id_basename_to_file_id are no longer supported")
2163
# XXX: Todo - use proxy objects for the children rather than loading
2164
# all when the attribute is referenced.
2165
parent_id_index = self._chk_inventory.parent_id_basename_to_file_id
2167
for (parent_id, name_utf8), file_id in parent_id_index.iteritems(
2168
key_filter=[StaticTuple(self.file_id,)]):
2169
child_keys.add(StaticTuple(file_id,))
2171
for file_id_key in child_keys:
2172
entry = self._chk_inventory._fileid_to_entry_cache.get(
2173
file_id_key[0], None)
2174
if entry is not None:
2175
result[entry.name] = entry
2176
cached.add(file_id_key)
2177
child_keys.difference_update(cached)
2178
# populate; todo: do by name
2179
id_to_entry = self._chk_inventory.id_to_entry
2180
for file_id_key, bytes in id_to_entry.iteritems(child_keys):
2181
entry = self._chk_inventory._bytes_to_entry(bytes)
2182
result[entry.name] = entry
2183
self._chk_inventory._fileid_to_entry_cache[file_id_key[0]] = entry
2184
self._children = result
2188
'directory': InventoryDirectory,
2189
'file': InventoryFile,
2190
'symlink': InventoryLink,
2191
'tree-reference': TreeReference
2194
def make_entry(kind, name, parent_id, file_id=None):
2195
"""Create an inventory entry.
2197
:param kind: the type of inventory entry to create.
2198
:param name: the basename of the entry.
2199
:param parent_id: the parent_id of the entry.
2200
:param file_id: the file_id to use. if None, one will be created.
2203
file_id = generate_ids.gen_file_id(name)
2204
name = ensure_normalized_name(name)
2206
factory = entry_factory[kind]
2208
raise errors.BadFileKindError(name, kind)
2209
return factory(file_id, name, parent_id)
2212
def ensure_normalized_name(name):
2215
:raises InvalidNormalization: When name is not normalized, and cannot be
2216
accessed on this platform by the normalized path.
2217
:return: The NFC normalised version of name.
2219
#------- This has been copied to bzrlib.dirstate.DirState.add, please
2220
# keep them synchronised.
2221
# we dont import normalized_filename directly because we want to be
2222
# able to change the implementation at runtime for tests.
2223
norm_name, can_access = osutils.normalized_filename(name)
2224
if norm_name != name:
2228
# TODO: jam 20060701 This would probably be more useful
2229
# if the error was raised with the full path
2230
raise errors.InvalidNormalization(name)
2236
def is_valid_name(name):
2238
if _NAME_RE is None:
2239
_NAME_RE = re.compile(r'^[^/\\]+$')
2241
return bool(_NAME_RE.match(name))
2244
def _check_delta_unique_ids(delta):
2245
"""Decorate a delta and check that the file ids in it are unique.
2247
:return: A generator over delta.
2251
length = len(ids) + 1
2253
if len(ids) != length:
2254
raise errors.InconsistentDelta(item[0] or item[1], item[2],
2259
def _check_delta_unique_new_paths(delta):
2260
"""Decorate a delta and check that the new paths in it are unique.
2262
:return: A generator over delta.
2266
length = len(paths) + 1
2268
if path is not None:
2270
if len(paths) != length:
2271
raise errors.InconsistentDelta(path, item[2], "repeated path")
2275
def _check_delta_unique_old_paths(delta):
2276
"""Decorate a delta and check that the old paths in it are unique.
2278
:return: A generator over delta.
2282
length = len(paths) + 1
2284
if path is not None:
2286
if len(paths) != length:
2287
raise errors.InconsistentDelta(path, item[2], "repeated path")
2291
def _check_delta_ids_are_valid(delta):
2292
"""Decorate a delta and check that the ids in it are valid.
2294
:return: A generator over delta.
2299
raise errors.InconsistentDelta(item[0] or item[1], item[2],
2300
"entry with file_id None %r" % entry)
2301
if type(item[2]) != str:
2302
raise errors.InconsistentDelta(item[0] or item[1], item[2],
2303
"entry with non bytes file_id %r" % entry)
2307
def _check_delta_ids_match_entry(delta):
2308
"""Decorate a delta and check that the ids in it match the entry.file_id.
2310
:return: A generator over delta.
2314
if entry is not None:
2315
if entry.file_id != item[2]:
2316
raise errors.InconsistentDelta(item[0] or item[1], item[2],
2317
"mismatched id with %r" % entry)
2321
def _check_delta_new_path_entry_both_or_None(delta):
2322
"""Decorate a delta and check that the new_path and entry are paired.
2324
:return: A generator over delta.
2329
if new_path is None and entry is not None:
2330
raise errors.InconsistentDelta(item[0], item[1],
2331
"Entry with no new_path")
2332
if new_path is not None and entry is None:
2333
raise errors.InconsistentDelta(new_path, item[1],
2334
"new_path with no entry")